1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2018-2019 Realtek Corporation
3 */
4
5 #include <linux/module.h>
6 #include "main.h"
7 #include "coex.h"
8 #include "fw.h"
9 #include "tx.h"
10 #include "rx.h"
11 #include "phy.h"
12 #include "rtw8822c.h"
13 #include "rtw8822c_table.h"
14 #include "mac.h"
15 #include "reg.h"
16 #include "debug.h"
17 #include "util.h"
18 #include "bf.h"
19 #include "efuse.h"
20
21 #define IQK_DONE_8822C 0xaa
22
23 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
24 u8 rx_path, bool is_tx2_path);
25
rtw8822ce_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)26 static void rtw8822ce_efuse_parsing(struct rtw_efuse *efuse,
27 struct rtw8822c_efuse *map)
28 {
29 ether_addr_copy(efuse->addr, map->e.mac_addr);
30 }
31
rtw8822cu_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)32 static void rtw8822cu_efuse_parsing(struct rtw_efuse *efuse,
33 struct rtw8822c_efuse *map)
34 {
35 ether_addr_copy(efuse->addr, map->u.mac_addr);
36 }
37
rtw8822cs_efuse_parsing(struct rtw_efuse * efuse,struct rtw8822c_efuse * map)38 static void rtw8822cs_efuse_parsing(struct rtw_efuse *efuse,
39 struct rtw8822c_efuse *map)
40 {
41 ether_addr_copy(efuse->addr, map->s.mac_addr);
42 }
43
rtw8822c_read_efuse(struct rtw_dev * rtwdev,u8 * log_map)44 static int rtw8822c_read_efuse(struct rtw_dev *rtwdev, u8 *log_map)
45 {
46 struct rtw_efuse *efuse = &rtwdev->efuse;
47 struct rtw8822c_efuse *map;
48 int i;
49
50 map = (struct rtw8822c_efuse *)log_map;
51
52 efuse->rfe_option = map->rfe_option;
53 efuse->rf_board_option = map->rf_board_option;
54 efuse->crystal_cap = map->xtal_k & XCAP_MASK;
55 efuse->channel_plan = map->channel_plan;
56 efuse->country_code[0] = map->country_code[0];
57 efuse->country_code[1] = map->country_code[1];
58 efuse->bt_setting = map->rf_bt_setting;
59 efuse->regd = map->rf_board_option & 0x7;
60 efuse->thermal_meter[RF_PATH_A] = map->path_a_thermal;
61 efuse->thermal_meter[RF_PATH_B] = map->path_b_thermal;
62 efuse->thermal_meter_k =
63 (map->path_a_thermal + map->path_b_thermal) >> 1;
64 efuse->power_track_type = (map->tx_pwr_calibrate_rate >> 4) & 0xf;
65
66 for (i = 0; i < 4; i++)
67 efuse->txpwr_idx_table[i] = map->txpwr_idx_table[i];
68
69 switch (rtw_hci_type(rtwdev)) {
70 case RTW_HCI_TYPE_PCIE:
71 rtw8822ce_efuse_parsing(efuse, map);
72 break;
73 case RTW_HCI_TYPE_USB:
74 rtw8822cu_efuse_parsing(efuse, map);
75 break;
76 case RTW_HCI_TYPE_SDIO:
77 rtw8822cs_efuse_parsing(efuse, map);
78 break;
79 default:
80 /* unsupported now */
81 return -ENOTSUPP;
82 }
83
84 return 0;
85 }
86
rtw8822c_header_file_init(struct rtw_dev * rtwdev,bool pre)87 static void rtw8822c_header_file_init(struct rtw_dev *rtwdev, bool pre)
88 {
89 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
90 rtw_write32_set(rtwdev, REG_3WIRE, BIT_3WIRE_PI_ON);
91 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_TX_EN | BIT_3WIRE_RX_EN);
92 rtw_write32_set(rtwdev, REG_3WIRE2, BIT_3WIRE_PI_ON);
93
94 if (pre)
95 rtw_write32_clr(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
96 else
97 rtw_write32_set(rtwdev, REG_ENCCK, BIT_CCK_OFDM_BLK_EN);
98 }
99
rtw8822c_bb_reset(struct rtw_dev * rtwdev)100 static void rtw8822c_bb_reset(struct rtw_dev *rtwdev)
101 {
102 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
103 rtw_write16_clr(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
104 rtw_write16_set(rtwdev, REG_SYS_FUNC_EN, BIT_FEN_BB_RSTB);
105 }
106
rtw8822c_dac_backup_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)107 static void rtw8822c_dac_backup_reg(struct rtw_dev *rtwdev,
108 struct rtw_backup_info *backup,
109 struct rtw_backup_info *backup_rf)
110 {
111 u32 path, i;
112 u32 val;
113 u32 reg;
114 u32 rf_addr[DACK_RF_8822C] = {0x8f};
115 u32 addrs[DACK_REG_8822C] = {0x180c, 0x1810, 0x410c, 0x4110,
116 0x1c3c, 0x1c24, 0x1d70, 0x9b4,
117 0x1a00, 0x1a14, 0x1d58, 0x1c38,
118 0x1e24, 0x1e28, 0x1860, 0x4160};
119
120 for (i = 0; i < DACK_REG_8822C; i++) {
121 backup[i].len = 4;
122 backup[i].reg = addrs[i];
123 backup[i].val = rtw_read32(rtwdev, addrs[i]);
124 }
125
126 for (path = 0; path < DACK_PATH_8822C; path++) {
127 for (i = 0; i < DACK_RF_8822C; i++) {
128 reg = rf_addr[i];
129 val = rtw_read_rf(rtwdev, path, reg, RFREG_MASK);
130 backup_rf[path * i + i].reg = reg;
131 backup_rf[path * i + i].val = val;
132 }
133 }
134 }
135
rtw8822c_dac_restore_reg(struct rtw_dev * rtwdev,struct rtw_backup_info * backup,struct rtw_backup_info * backup_rf)136 static void rtw8822c_dac_restore_reg(struct rtw_dev *rtwdev,
137 struct rtw_backup_info *backup,
138 struct rtw_backup_info *backup_rf)
139 {
140 u32 path, i;
141 u32 val;
142 u32 reg;
143
144 rtw_restore_reg(rtwdev, backup, DACK_REG_8822C);
145
146 for (path = 0; path < DACK_PATH_8822C; path++) {
147 for (i = 0; i < DACK_RF_8822C; i++) {
148 val = backup_rf[path * i + i].val;
149 reg = backup_rf[path * i + i].reg;
150 rtw_write_rf(rtwdev, path, reg, RFREG_MASK, val);
151 }
152 }
153 }
154
rtw8822c_rf_minmax_cmp(struct rtw_dev * rtwdev,u32 value,u32 * min,u32 * max)155 static void rtw8822c_rf_minmax_cmp(struct rtw_dev *rtwdev, u32 value,
156 u32 *min, u32 *max)
157 {
158 if (value >= 0x200) {
159 if (*min >= 0x200) {
160 if (*min > value)
161 *min = value;
162 } else {
163 *min = value;
164 }
165 if (*max >= 0x200) {
166 if (*max < value)
167 *max = value;
168 }
169 } else {
170 if (*min < 0x200) {
171 if (*min > value)
172 *min = value;
173 }
174
175 if (*max >= 0x200) {
176 *max = value;
177 } else {
178 if (*max < value)
179 *max = value;
180 }
181 }
182 }
183
__rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * v1,u32 * v2)184 static void __rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *v1, u32 *v2)
185 {
186 if (*v1 >= 0x200 && *v2 >= 0x200) {
187 if (*v1 > *v2)
188 swap(*v1, *v2);
189 } else if (*v1 < 0x200 && *v2 < 0x200) {
190 if (*v1 > *v2)
191 swap(*v1, *v2);
192 } else if (*v1 < 0x200 && *v2 >= 0x200) {
193 swap(*v1, *v2);
194 }
195 }
196
rtw8822c_dac_iq_sort(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)197 static void rtw8822c_dac_iq_sort(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
198 {
199 u32 i, j;
200
201 for (i = 0; i < DACK_SN_8822C - 1; i++) {
202 for (j = 0; j < (DACK_SN_8822C - 1 - i) ; j++) {
203 __rtw8822c_dac_iq_sort(rtwdev, &iv[j], &iv[j + 1]);
204 __rtw8822c_dac_iq_sort(rtwdev, &qv[j], &qv[j + 1]);
205 }
206 }
207 }
208
rtw8822c_dac_iq_offset(struct rtw_dev * rtwdev,u32 * vec,u32 * val)209 static void rtw8822c_dac_iq_offset(struct rtw_dev *rtwdev, u32 *vec, u32 *val)
210 {
211 u32 p, m, t, i;
212
213 m = 0;
214 p = 0;
215 for (i = 10; i < DACK_SN_8822C - 10; i++) {
216 if (vec[i] > 0x200)
217 m = (0x400 - vec[i]) + m;
218 else
219 p = vec[i] + p;
220 }
221
222 if (p > m) {
223 t = p - m;
224 t = t / (DACK_SN_8822C - 20);
225 } else {
226 t = m - p;
227 t = t / (DACK_SN_8822C - 20);
228 if (t != 0x0)
229 t = 0x400 - t;
230 }
231
232 *val = t;
233 }
234
rtw8822c_get_path_write_addr(u8 path)235 static u32 rtw8822c_get_path_write_addr(u8 path)
236 {
237 u32 base_addr;
238
239 switch (path) {
240 case RF_PATH_A:
241 base_addr = 0x1800;
242 break;
243 case RF_PATH_B:
244 base_addr = 0x4100;
245 break;
246 default:
247 WARN_ON(1);
248 return -1;
249 }
250
251 return base_addr;
252 }
253
rtw8822c_get_path_read_addr(u8 path)254 static u32 rtw8822c_get_path_read_addr(u8 path)
255 {
256 u32 base_addr;
257
258 switch (path) {
259 case RF_PATH_A:
260 base_addr = 0x2800;
261 break;
262 case RF_PATH_B:
263 base_addr = 0x4500;
264 break;
265 default:
266 WARN_ON(1);
267 return -1;
268 }
269
270 return base_addr;
271 }
272
rtw8822c_dac_iq_check(struct rtw_dev * rtwdev,u32 value)273 static bool rtw8822c_dac_iq_check(struct rtw_dev *rtwdev, u32 value)
274 {
275 bool ret = true;
276
277 if ((value >= 0x200 && (0x400 - value) > 0x64) ||
278 (value < 0x200 && value > 0x64)) {
279 ret = false;
280 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] Error overflow\n");
281 }
282
283 return ret;
284 }
285
rtw8822c_dac_cal_iq_sample(struct rtw_dev * rtwdev,u32 * iv,u32 * qv)286 static void rtw8822c_dac_cal_iq_sample(struct rtw_dev *rtwdev, u32 *iv, u32 *qv)
287 {
288 u32 temp;
289 int i = 0, cnt = 0;
290
291 while (i < DACK_SN_8822C && cnt < 10000) {
292 cnt++;
293 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
294 iv[i] = (temp & 0x3ff000) >> 12;
295 qv[i] = temp & 0x3ff;
296
297 if (rtw8822c_dac_iq_check(rtwdev, iv[i]) &&
298 rtw8822c_dac_iq_check(rtwdev, qv[i]))
299 i++;
300 }
301 }
302
rtw8822c_dac_cal_iq_search(struct rtw_dev * rtwdev,u32 * iv,u32 * qv,u32 * i_value,u32 * q_value)303 static void rtw8822c_dac_cal_iq_search(struct rtw_dev *rtwdev,
304 u32 *iv, u32 *qv,
305 u32 *i_value, u32 *q_value)
306 {
307 u32 i_max = 0, q_max = 0, i_min = 0, q_min = 0;
308 u32 i_delta, q_delta;
309 u32 temp;
310 int i, cnt = 0;
311
312 do {
313 i_min = iv[0];
314 i_max = iv[0];
315 q_min = qv[0];
316 q_max = qv[0];
317 for (i = 0; i < DACK_SN_8822C; i++) {
318 rtw8822c_rf_minmax_cmp(rtwdev, iv[i], &i_min, &i_max);
319 rtw8822c_rf_minmax_cmp(rtwdev, qv[i], &q_min, &q_max);
320 }
321
322 if (i_max < 0x200 && i_min < 0x200)
323 i_delta = i_max - i_min;
324 else if (i_max >= 0x200 && i_min >= 0x200)
325 i_delta = i_max - i_min;
326 else
327 i_delta = i_max + (0x400 - i_min);
328
329 if (q_max < 0x200 && q_min < 0x200)
330 q_delta = q_max - q_min;
331 else if (q_max >= 0x200 && q_min >= 0x200)
332 q_delta = q_max - q_min;
333 else
334 q_delta = q_max + (0x400 - q_min);
335
336 rtw_dbg(rtwdev, RTW_DBG_RFK,
337 "[DACK] i: min=0x%08x, max=0x%08x, delta=0x%08x\n",
338 i_min, i_max, i_delta);
339 rtw_dbg(rtwdev, RTW_DBG_RFK,
340 "[DACK] q: min=0x%08x, max=0x%08x, delta=0x%08x\n",
341 q_min, q_max, q_delta);
342
343 rtw8822c_dac_iq_sort(rtwdev, iv, qv);
344
345 if (i_delta > 5 || q_delta > 5) {
346 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
347 iv[0] = (temp & 0x3ff000) >> 12;
348 qv[0] = temp & 0x3ff;
349 temp = rtw_read32_mask(rtwdev, 0x2dbc, 0x3fffff);
350 iv[DACK_SN_8822C - 1] = (temp & 0x3ff000) >> 12;
351 qv[DACK_SN_8822C - 1] = temp & 0x3ff;
352 } else {
353 break;
354 }
355 } while (cnt++ < 100);
356
357 rtw8822c_dac_iq_offset(rtwdev, iv, i_value);
358 rtw8822c_dac_iq_offset(rtwdev, qv, q_value);
359 }
360
rtw8822c_dac_cal_rf_mode(struct rtw_dev * rtwdev,u32 * i_value,u32 * q_value)361 static void rtw8822c_dac_cal_rf_mode(struct rtw_dev *rtwdev,
362 u32 *i_value, u32 *q_value)
363 {
364 u32 iv[DACK_SN_8822C], qv[DACK_SN_8822C];
365 u32 rf_a, rf_b;
366
367 rf_a = rtw_read_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK);
368 rf_b = rtw_read_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK);
369
370 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-A=0x%05x\n", rf_a);
371 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] RF path-B=0x%05x\n", rf_b);
372
373 rtw8822c_dac_cal_iq_sample(rtwdev, iv, qv);
374 rtw8822c_dac_cal_iq_search(rtwdev, iv, qv, i_value, q_value);
375 }
376
rtw8822c_dac_bb_setting(struct rtw_dev * rtwdev)377 static void rtw8822c_dac_bb_setting(struct rtw_dev *rtwdev)
378 {
379 rtw_write32_mask(rtwdev, 0x1d58, 0xff8, 0x1ff);
380 rtw_write32_mask(rtwdev, 0x1a00, 0x3, 0x2);
381 rtw_write32_mask(rtwdev, 0x1a14, 0x300, 0x3);
382 rtw_write32(rtwdev, 0x1d70, 0x7e7e7e7e);
383 rtw_write32_mask(rtwdev, 0x180c, 0x3, 0x0);
384 rtw_write32_mask(rtwdev, 0x410c, 0x3, 0x0);
385 rtw_write32(rtwdev, 0x1b00, 0x00000008);
386 rtw_write8(rtwdev, 0x1bcc, 0x3f);
387 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
388 rtw_write8(rtwdev, 0x1bcc, 0x3f);
389 rtw_write32_mask(rtwdev, 0x1e24, BIT(31), 0x0);
390 rtw_write32_mask(rtwdev, 0x1e28, 0xf, 0x3);
391 }
392
rtw8822c_dac_cal_adc(struct rtw_dev * rtwdev,u8 path,u32 * adc_ic,u32 * adc_qc)393 static void rtw8822c_dac_cal_adc(struct rtw_dev *rtwdev,
394 u8 path, u32 *adc_ic, u32 *adc_qc)
395 {
396 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
397 u32 ic = 0, qc = 0, temp = 0;
398 u32 base_addr;
399 u32 path_sel;
400 int i;
401
402 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK path(%d)\n", path);
403
404 base_addr = rtw8822c_get_path_write_addr(path);
405 switch (path) {
406 case RF_PATH_A:
407 path_sel = 0xa0000;
408 break;
409 case RF_PATH_B:
410 path_sel = 0x80000;
411 break;
412 default:
413 WARN_ON(1);
414 return;
415 }
416
417 /* ADCK step1 */
418 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x0);
419 if (path == RF_PATH_B)
420 rtw_write32(rtwdev, base_addr + 0x30, 0x30db8041);
421 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
422 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
423 rtw_write32(rtwdev, base_addr + 0x10, 0x02dd08c4);
424 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
425 rtw_write_rf(rtwdev, RF_PATH_A, 0x0, RFREG_MASK, 0x10000);
426 rtw_write_rf(rtwdev, RF_PATH_B, 0x0, RFREG_MASK, 0x10000);
427 for (i = 0; i < 10; i++) {
428 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK count=%d\n", i);
429 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8003);
430 rtw_write32(rtwdev, 0x1c24, 0x00010002);
431 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
432 rtw_dbg(rtwdev, RTW_DBG_RFK,
433 "[DACK] before: i=0x%x, q=0x%x\n", ic, qc);
434
435 /* compensation value */
436 if (ic != 0x0) {
437 ic = 0x400 - ic;
438 *adc_ic = ic;
439 }
440 if (qc != 0x0) {
441 qc = 0x400 - qc;
442 *adc_qc = qc;
443 }
444 temp = (ic & 0x3ff) | ((qc & 0x3ff) << 10);
445 rtw_write32(rtwdev, base_addr + 0x68, temp);
446 dm_info->dack_adck[path] = temp;
447 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] ADCK 0x%08x=0x08%x\n",
448 base_addr + 0x68, temp);
449 /* check ADC DC offset */
450 rtw_write32(rtwdev, 0x1c3c, path_sel + 0x8103);
451 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
452 rtw_dbg(rtwdev, RTW_DBG_RFK,
453 "[DACK] after: i=0x%08x, q=0x%08x\n", ic, qc);
454 if (ic >= 0x200)
455 ic = 0x400 - ic;
456 if (qc >= 0x200)
457 qc = 0x400 - qc;
458 if (ic < 5 && qc < 5)
459 break;
460 }
461
462 /* ADCK step2 */
463 rtw_write32(rtwdev, 0x1c3c, 0x00000003);
464 rtw_write32(rtwdev, base_addr + 0x0c, 0x10000260);
465 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
466
467 /* release pull low switch on IQ path */
468 rtw_write_rf(rtwdev, path, 0x8f, BIT(13), 0x1);
469 }
470
rtw8822c_dac_cal_step1(struct rtw_dev * rtwdev,u8 path)471 static void rtw8822c_dac_cal_step1(struct rtw_dev *rtwdev, u8 path)
472 {
473 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
474 u32 base_addr;
475 u32 read_addr;
476
477 base_addr = rtw8822c_get_path_write_addr(path);
478 read_addr = rtw8822c_get_path_read_addr(path);
479
480 rtw_write32(rtwdev, base_addr + 0x68, dm_info->dack_adck[path]);
481 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
482 if (path == RF_PATH_A) {
483 rtw_write32(rtwdev, base_addr + 0x60, 0xf0040ff0);
484 rtw_write32(rtwdev, 0x1c38, 0xffffffff);
485 }
486 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
487 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
488 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
489 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff81);
490 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
491 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
492 rtw_write32(rtwdev, base_addr + 0xd8, 0x0008ff81);
493 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
494 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
495 mdelay(2);
496 rtw_write32(rtwdev, base_addr + 0xbc, 0x000aff8d);
497 mdelay(2);
498 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
499 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
500 mdelay(1);
501 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
502 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
503 mdelay(20);
504 if (!check_hw_ready(rtwdev, read_addr + 0x08, 0x7fff80, 0xffff) ||
505 !check_hw_ready(rtwdev, read_addr + 0x34, 0x7fff80, 0xffff))
506 rtw_err(rtwdev, "failed to wait for dack ready\n");
507 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
508 mdelay(1);
509 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
510 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
511 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
512 rtw_write32(rtwdev, base_addr + 0xbc, 0x0008ff87);
513 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
514 }
515
rtw8822c_dac_cal_step2(struct rtw_dev * rtwdev,u8 path,u32 * ic_out,u32 * qc_out)516 static void rtw8822c_dac_cal_step2(struct rtw_dev *rtwdev,
517 u8 path, u32 *ic_out, u32 *qc_out)
518 {
519 u32 base_addr;
520 u32 ic, qc, ic_in, qc_in;
521
522 base_addr = rtw8822c_get_path_write_addr(path);
523 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, 0x0);
524 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, 0x8);
525 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, 0x0);
526 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, 0x8);
527
528 rtw_write32(rtwdev, 0x1b00, 0x00000008);
529 rtw_write8(rtwdev, 0x1bcc, 0x03f);
530 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
531 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
532 rtw_write32(rtwdev, 0x1c3c, 0x00088103);
533
534 rtw8822c_dac_cal_rf_mode(rtwdev, &ic_in, &qc_in);
535 ic = ic_in;
536 qc = qc_in;
537
538 /* compensation value */
539 if (ic != 0x0)
540 ic = 0x400 - ic;
541 if (qc != 0x0)
542 qc = 0x400 - qc;
543 if (ic < 0x300) {
544 ic = ic * 2 * 6 / 5;
545 ic = ic + 0x80;
546 } else {
547 ic = (0x400 - ic) * 2 * 6 / 5;
548 ic = 0x7f - ic;
549 }
550 if (qc < 0x300) {
551 qc = qc * 2 * 6 / 5;
552 qc = qc + 0x80;
553 } else {
554 qc = (0x400 - qc) * 2 * 6 / 5;
555 qc = 0x7f - qc;
556 }
557
558 *ic_out = ic;
559 *qc_out = qc;
560
561 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] before i=0x%x, q=0x%x\n", ic_in, qc_in);
562 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] after i=0x%x, q=0x%x\n", ic, qc);
563 }
564
rtw8822c_dac_cal_step3(struct rtw_dev * rtwdev,u8 path,u32 adc_ic,u32 adc_qc,u32 * ic_in,u32 * qc_in,u32 * i_out,u32 * q_out)565 static void rtw8822c_dac_cal_step3(struct rtw_dev *rtwdev, u8 path,
566 u32 adc_ic, u32 adc_qc,
567 u32 *ic_in, u32 *qc_in,
568 u32 *i_out, u32 *q_out)
569 {
570 u32 base_addr;
571 u32 read_addr;
572 u32 ic, qc;
573 u32 temp;
574
575 base_addr = rtw8822c_get_path_write_addr(path);
576 read_addr = rtw8822c_get_path_read_addr(path);
577 ic = *ic_in;
578 qc = *qc_in;
579
580 rtw_write32(rtwdev, base_addr + 0x0c, 0xdff00220);
581 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
582 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
583 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb88);
584 rtw_write32(rtwdev, base_addr + 0xbc, 0xc008ff81);
585 rtw_write32(rtwdev, base_addr + 0xc0, 0x0003d208);
586 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xf0000000, ic & 0xf);
587 rtw_write32_mask(rtwdev, base_addr + 0xc0, 0xf, (ic & 0xf0) >> 4);
588 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb88);
589 rtw_write32(rtwdev, base_addr + 0xd8, 0xe008ff81);
590 rtw_write32(rtwdev, base_addr + 0xdc, 0x0003d208);
591 rtw_write32_mask(rtwdev, base_addr + 0xd8, 0xf0000000, qc & 0xf);
592 rtw_write32_mask(rtwdev, base_addr + 0xdc, 0xf, (qc & 0xf0) >> 4);
593 rtw_write32(rtwdev, base_addr + 0xb8, 0x60000000);
594 mdelay(2);
595 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x6);
596 mdelay(2);
597 rtw_write32(rtwdev, base_addr + 0xb0, 0x0a11fb89);
598 rtw_write32(rtwdev, base_addr + 0xcc, 0x0a11fb89);
599 mdelay(1);
600 rtw_write32(rtwdev, base_addr + 0xb8, 0x62000000);
601 rtw_write32(rtwdev, base_addr + 0xd4, 0x62000000);
602 mdelay(20);
603 if (!check_hw_ready(rtwdev, read_addr + 0x24, 0x07f80000, ic) ||
604 !check_hw_ready(rtwdev, read_addr + 0x50, 0x07f80000, qc))
605 rtw_err(rtwdev, "failed to write IQ vector to hardware\n");
606 rtw_write32(rtwdev, base_addr + 0xb8, 0x02000000);
607 mdelay(1);
608 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0xe, 0x3);
609 rtw_write32(rtwdev, 0x9b4, 0xdb6db600);
610
611 /* check DAC DC offset */
612 temp = ((adc_ic + 0x10) & 0x3ff) | (((adc_qc + 0x10) & 0x3ff) << 10);
613 rtw_write32(rtwdev, base_addr + 0x68, temp);
614 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c5);
615 rtw_write32(rtwdev, base_addr + 0x60, 0xf0000000);
616 rtw8822c_dac_cal_rf_mode(rtwdev, &ic, &qc);
617 if (ic >= 0x10)
618 ic = ic - 0x10;
619 else
620 ic = 0x400 - (0x10 - ic);
621
622 if (qc >= 0x10)
623 qc = qc - 0x10;
624 else
625 qc = 0x400 - (0x10 - qc);
626
627 *i_out = ic;
628 *q_out = qc;
629
630 if (ic >= 0x200)
631 ic = 0x400 - ic;
632 if (qc >= 0x200)
633 qc = 0x400 - qc;
634
635 *ic_in = ic;
636 *qc_in = qc;
637
638 rtw_dbg(rtwdev, RTW_DBG_RFK,
639 "[DACK] after DACK i=0x%x, q=0x%x\n", *i_out, *q_out);
640 }
641
rtw8822c_dac_cal_step4(struct rtw_dev * rtwdev,u8 path)642 static void rtw8822c_dac_cal_step4(struct rtw_dev *rtwdev, u8 path)
643 {
644 u32 base_addr = rtw8822c_get_path_write_addr(path);
645
646 rtw_write32(rtwdev, base_addr + 0x68, 0x0);
647 rtw_write32(rtwdev, base_addr + 0x10, 0x02d508c4);
648 rtw_write32_mask(rtwdev, base_addr + 0xbc, 0x1, 0x0);
649 rtw_write32_mask(rtwdev, base_addr + 0x30, BIT(30), 0x1);
650 }
651
rtw8822c_dac_cal_backup_vec(struct rtw_dev * rtwdev,u8 path,u8 vec,u32 w_addr,u32 r_addr)652 static void rtw8822c_dac_cal_backup_vec(struct rtw_dev *rtwdev,
653 u8 path, u8 vec, u32 w_addr, u32 r_addr)
654 {
655 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
656 u16 val;
657 u32 i;
658
659 if (WARN_ON(vec >= 2))
660 return;
661
662 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
663 rtw_write32_mask(rtwdev, w_addr, 0xf0000000, i);
664 val = (u16)rtw_read32_mask(rtwdev, r_addr, 0x7fc0000);
665 dm_info->dack_msbk[path][vec][i] = val;
666 }
667 }
668
rtw8822c_dac_cal_backup_path(struct rtw_dev * rtwdev,u8 path)669 static void rtw8822c_dac_cal_backup_path(struct rtw_dev *rtwdev, u8 path)
670 {
671 u32 w_off = 0x1c;
672 u32 r_off = 0x2c;
673 u32 w_addr, r_addr;
674
675 if (WARN_ON(path >= 2))
676 return;
677
678 /* backup I vector */
679 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0;
680 r_addr = rtw8822c_get_path_read_addr(path) + 0x10;
681 rtw8822c_dac_cal_backup_vec(rtwdev, path, 0, w_addr, r_addr);
682
683 /* backup Q vector */
684 w_addr = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
685 r_addr = rtw8822c_get_path_read_addr(path) + 0x10 + r_off;
686 rtw8822c_dac_cal_backup_vec(rtwdev, path, 1, w_addr, r_addr);
687 }
688
rtw8822c_dac_cal_backup_dck(struct rtw_dev * rtwdev)689 static void rtw8822c_dac_cal_backup_dck(struct rtw_dev *rtwdev)
690 {
691 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
692 u8 val;
693
694 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000);
695 dm_info->dack_dck[RF_PATH_A][0][0] = val;
696 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_I_1, 0xf);
697 dm_info->dack_dck[RF_PATH_A][0][1] = val;
698 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000);
699 dm_info->dack_dck[RF_PATH_A][1][0] = val;
700 val = (u8)rtw_read32_mask(rtwdev, REG_DCKA_Q_1, 0xf);
701 dm_info->dack_dck[RF_PATH_A][1][1] = val;
702
703 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000);
704 dm_info->dack_dck[RF_PATH_B][0][0] = val;
705 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_I_1, 0xf);
706 dm_info->dack_dck[RF_PATH_B][1][0] = val;
707 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000);
708 dm_info->dack_dck[RF_PATH_B][0][1] = val;
709 val = (u8)rtw_read32_mask(rtwdev, REG_DCKB_Q_1, 0xf);
710 dm_info->dack_dck[RF_PATH_B][1][1] = val;
711 }
712
rtw8822c_dac_cal_backup(struct rtw_dev * rtwdev)713 static void rtw8822c_dac_cal_backup(struct rtw_dev *rtwdev)
714 {
715 u32 temp[3];
716
717 temp[0] = rtw_read32(rtwdev, 0x1860);
718 temp[1] = rtw_read32(rtwdev, 0x4160);
719 temp[2] = rtw_read32(rtwdev, 0x9b4);
720
721 /* set clock */
722 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
723
724 /* backup path-A I/Q */
725 rtw_write32_clr(rtwdev, 0x1830, BIT(30));
726 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
727 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_A);
728
729 /* backup path-B I/Q */
730 rtw_write32_clr(rtwdev, 0x4130, BIT(30));
731 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
732 rtw8822c_dac_cal_backup_path(rtwdev, RF_PATH_B);
733
734 rtw8822c_dac_cal_backup_dck(rtwdev);
735 rtw_write32_set(rtwdev, 0x1830, BIT(30));
736 rtw_write32_set(rtwdev, 0x4130, BIT(30));
737
738 rtw_write32(rtwdev, 0x1860, temp[0]);
739 rtw_write32(rtwdev, 0x4160, temp[1]);
740 rtw_write32(rtwdev, 0x9b4, temp[2]);
741 }
742
rtw8822c_dac_cal_restore_dck(struct rtw_dev * rtwdev)743 static void rtw8822c_dac_cal_restore_dck(struct rtw_dev *rtwdev)
744 {
745 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
746 u8 val;
747
748 rtw_write32_set(rtwdev, REG_DCKA_I_0, BIT(19));
749 val = dm_info->dack_dck[RF_PATH_A][0][0];
750 rtw_write32_mask(rtwdev, REG_DCKA_I_0, 0xf0000000, val);
751 val = dm_info->dack_dck[RF_PATH_A][0][1];
752 rtw_write32_mask(rtwdev, REG_DCKA_I_1, 0xf, val);
753
754 rtw_write32_set(rtwdev, REG_DCKA_Q_0, BIT(19));
755 val = dm_info->dack_dck[RF_PATH_A][1][0];
756 rtw_write32_mask(rtwdev, REG_DCKA_Q_0, 0xf0000000, val);
757 val = dm_info->dack_dck[RF_PATH_A][1][1];
758 rtw_write32_mask(rtwdev, REG_DCKA_Q_1, 0xf, val);
759
760 rtw_write32_set(rtwdev, REG_DCKB_I_0, BIT(19));
761 val = dm_info->dack_dck[RF_PATH_B][0][0];
762 rtw_write32_mask(rtwdev, REG_DCKB_I_0, 0xf0000000, val);
763 val = dm_info->dack_dck[RF_PATH_B][0][1];
764 rtw_write32_mask(rtwdev, REG_DCKB_I_1, 0xf, val);
765
766 rtw_write32_set(rtwdev, REG_DCKB_Q_0, BIT(19));
767 val = dm_info->dack_dck[RF_PATH_B][1][0];
768 rtw_write32_mask(rtwdev, REG_DCKB_Q_0, 0xf0000000, val);
769 val = dm_info->dack_dck[RF_PATH_B][1][1];
770 rtw_write32_mask(rtwdev, REG_DCKB_Q_1, 0xf, val);
771 }
772
rtw8822c_dac_cal_restore_prepare(struct rtw_dev * rtwdev)773 static void rtw8822c_dac_cal_restore_prepare(struct rtw_dev *rtwdev)
774 {
775 rtw_write32(rtwdev, 0x9b4, 0xdb66db00);
776
777 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x0);
778 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x0);
779 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x0);
780 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x0);
781
782 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x0);
783 rtw_write32_mask(rtwdev, 0x1860, 0xfc000000, 0x3c);
784 rtw_write32_mask(rtwdev, 0x18b4, BIT(0), 0x1);
785 rtw_write32_mask(rtwdev, 0x18d0, BIT(0), 0x1);
786
787 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x0);
788 rtw_write32_mask(rtwdev, 0x4160, 0xfc000000, 0x3c);
789 rtw_write32_mask(rtwdev, 0x41b4, BIT(0), 0x1);
790 rtw_write32_mask(rtwdev, 0x41d0, BIT(0), 0x1);
791
792 rtw_write32_mask(rtwdev, 0x18b0, 0xf00, 0x0);
793 rtw_write32_mask(rtwdev, 0x18c0, BIT(14), 0x0);
794 rtw_write32_mask(rtwdev, 0x18cc, 0xf00, 0x0);
795 rtw_write32_mask(rtwdev, 0x18dc, BIT(14), 0x0);
796
797 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x0);
798 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x0);
799 rtw_write32_mask(rtwdev, 0x18b0, BIT(0), 0x1);
800 rtw_write32_mask(rtwdev, 0x18cc, BIT(0), 0x1);
801
802 rtw8822c_dac_cal_restore_dck(rtwdev);
803
804 rtw_write32_mask(rtwdev, 0x18c0, 0x38000, 0x7);
805 rtw_write32_mask(rtwdev, 0x18dc, 0x38000, 0x7);
806 rtw_write32_mask(rtwdev, 0x41c0, 0x38000, 0x7);
807 rtw_write32_mask(rtwdev, 0x41dc, 0x38000, 0x7);
808
809 rtw_write32_mask(rtwdev, 0x18b8, BIT(26) | BIT(25), 0x1);
810 rtw_write32_mask(rtwdev, 0x18d4, BIT(26) | BIT(25), 0x1);
811
812 rtw_write32_mask(rtwdev, 0x41b0, 0xf00, 0x0);
813 rtw_write32_mask(rtwdev, 0x41c0, BIT(14), 0x0);
814 rtw_write32_mask(rtwdev, 0x41cc, 0xf00, 0x0);
815 rtw_write32_mask(rtwdev, 0x41dc, BIT(14), 0x0);
816
817 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x0);
818 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x0);
819 rtw_write32_mask(rtwdev, 0x41b0, BIT(0), 0x1);
820 rtw_write32_mask(rtwdev, 0x41cc, BIT(0), 0x1);
821
822 rtw_write32_mask(rtwdev, 0x41b8, BIT(26) | BIT(25), 0x1);
823 rtw_write32_mask(rtwdev, 0x41d4, BIT(26) | BIT(25), 0x1);
824 }
825
rtw8822c_dac_cal_restore_wait(struct rtw_dev * rtwdev,u32 target_addr,u32 toggle_addr)826 static bool rtw8822c_dac_cal_restore_wait(struct rtw_dev *rtwdev,
827 u32 target_addr, u32 toggle_addr)
828 {
829 u32 cnt = 0;
830
831 do {
832 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x0);
833 rtw_write32_mask(rtwdev, toggle_addr, BIT(26) | BIT(25), 0x2);
834
835 if (rtw_read32_mask(rtwdev, target_addr, 0xf) == 0x6)
836 return true;
837
838 } while (cnt++ < 100);
839
840 return false;
841 }
842
rtw8822c_dac_cal_restore_path(struct rtw_dev * rtwdev,u8 path)843 static bool rtw8822c_dac_cal_restore_path(struct rtw_dev *rtwdev, u8 path)
844 {
845 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
846 u32 w_off = 0x1c;
847 u32 r_off = 0x2c;
848 u32 w_i, r_i, w_q, r_q;
849 u32 value;
850 u32 i;
851
852 w_i = rtw8822c_get_path_write_addr(path) + 0xb0;
853 r_i = rtw8822c_get_path_read_addr(path) + 0x08;
854 w_q = rtw8822c_get_path_write_addr(path) + 0xb0 + w_off;
855 r_q = rtw8822c_get_path_read_addr(path) + 0x08 + r_off;
856
857 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_i, w_i + 0x8))
858 return false;
859
860 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
861 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
862 value = dm_info->dack_msbk[path][0][i];
863 rtw_write32_mask(rtwdev, w_i + 0x4, 0xff8, value);
864 rtw_write32_mask(rtwdev, w_i, 0xf0000000, i);
865 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x1);
866 }
867
868 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(2), 0x0);
869
870 if (!rtw8822c_dac_cal_restore_wait(rtwdev, r_q, w_q + 0x8))
871 return false;
872
873 for (i = 0; i < DACK_MSBK_BACKUP_NUM; i++) {
874 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
875 value = dm_info->dack_msbk[path][1][i];
876 rtw_write32_mask(rtwdev, w_q + 0x4, 0xff8, value);
877 rtw_write32_mask(rtwdev, w_q, 0xf0000000, i);
878 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x1);
879 }
880 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(2), 0x0);
881
882 rtw_write32_mask(rtwdev, w_i + 0x8, BIT(26) | BIT(25), 0x0);
883 rtw_write32_mask(rtwdev, w_q + 0x8, BIT(26) | BIT(25), 0x0);
884 rtw_write32_mask(rtwdev, w_i + 0x4, BIT(0), 0x0);
885 rtw_write32_mask(rtwdev, w_q + 0x4, BIT(0), 0x0);
886
887 return true;
888 }
889
__rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)890 static bool __rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
891 {
892 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_A))
893 return false;
894
895 if (!rtw8822c_dac_cal_restore_path(rtwdev, RF_PATH_B))
896 return false;
897
898 return true;
899 }
900
rtw8822c_dac_cal_restore(struct rtw_dev * rtwdev)901 static bool rtw8822c_dac_cal_restore(struct rtw_dev *rtwdev)
902 {
903 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
904 u32 temp[3];
905
906 /* sample the first element for both path's IQ vector */
907 if (dm_info->dack_msbk[RF_PATH_A][0][0] == 0 &&
908 dm_info->dack_msbk[RF_PATH_A][1][0] == 0 &&
909 dm_info->dack_msbk[RF_PATH_B][0][0] == 0 &&
910 dm_info->dack_msbk[RF_PATH_B][1][0] == 0)
911 return false;
912
913 temp[0] = rtw_read32(rtwdev, 0x1860);
914 temp[1] = rtw_read32(rtwdev, 0x4160);
915 temp[2] = rtw_read32(rtwdev, 0x9b4);
916
917 rtw8822c_dac_cal_restore_prepare(rtwdev);
918 if (!check_hw_ready(rtwdev, 0x2808, 0x7fff80, 0xffff) ||
919 !check_hw_ready(rtwdev, 0x2834, 0x7fff80, 0xffff) ||
920 !check_hw_ready(rtwdev, 0x4508, 0x7fff80, 0xffff) ||
921 !check_hw_ready(rtwdev, 0x4534, 0x7fff80, 0xffff))
922 return false;
923
924 if (!__rtw8822c_dac_cal_restore(rtwdev)) {
925 rtw_err(rtwdev, "failed to restore dack vectors\n");
926 return false;
927 }
928
929 rtw_write32_mask(rtwdev, 0x1830, BIT(30), 0x1);
930 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
931 rtw_write32(rtwdev, 0x1860, temp[0]);
932 rtw_write32(rtwdev, 0x4160, temp[1]);
933 rtw_write32_mask(rtwdev, 0x18b0, BIT(27), 0x1);
934 rtw_write32_mask(rtwdev, 0x18cc, BIT(27), 0x1);
935 rtw_write32_mask(rtwdev, 0x41b0, BIT(27), 0x1);
936 rtw_write32_mask(rtwdev, 0x41cc, BIT(27), 0x1);
937 rtw_write32(rtwdev, 0x9b4, temp[2]);
938
939 return true;
940 }
941
rtw8822c_rf_dac_cal(struct rtw_dev * rtwdev)942 static void rtw8822c_rf_dac_cal(struct rtw_dev *rtwdev)
943 {
944 struct rtw_backup_info backup_rf[DACK_RF_8822C * DACK_PATH_8822C];
945 struct rtw_backup_info backup[DACK_REG_8822C];
946 u32 ic = 0, qc = 0, i;
947 u32 i_a = 0x0, q_a = 0x0, i_b = 0x0, q_b = 0x0;
948 u32 ic_a = 0x0, qc_a = 0x0, ic_b = 0x0, qc_b = 0x0;
949 u32 adc_ic_a = 0x0, adc_qc_a = 0x0, adc_ic_b = 0x0, adc_qc_b = 0x0;
950
951 if (rtw8822c_dac_cal_restore(rtwdev))
952 return;
953
954 /* not able to restore, do it */
955
956 rtw8822c_dac_backup_reg(rtwdev, backup, backup_rf);
957
958 rtw8822c_dac_bb_setting(rtwdev);
959
960 /* path-A */
961 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_A, &adc_ic_a, &adc_qc_a);
962 for (i = 0; i < 10; i++) {
963 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_A);
964 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_A, &ic, &qc);
965 ic_a = ic;
966 qc_a = qc;
967
968 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_A, adc_ic_a, adc_qc_a,
969 &ic, &qc, &i_a, &q_a);
970
971 if (ic < 5 && qc < 5)
972 break;
973 }
974 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_A);
975
976 /* path-B */
977 rtw8822c_dac_cal_adc(rtwdev, RF_PATH_B, &adc_ic_b, &adc_qc_b);
978 for (i = 0; i < 10; i++) {
979 rtw8822c_dac_cal_step1(rtwdev, RF_PATH_B);
980 rtw8822c_dac_cal_step2(rtwdev, RF_PATH_B, &ic, &qc);
981 ic_b = ic;
982 qc_b = qc;
983
984 rtw8822c_dac_cal_step3(rtwdev, RF_PATH_B, adc_ic_b, adc_qc_b,
985 &ic, &qc, &i_b, &q_b);
986
987 if (ic < 5 && qc < 5)
988 break;
989 }
990 rtw8822c_dac_cal_step4(rtwdev, RF_PATH_B);
991
992 rtw_write32(rtwdev, 0x1b00, 0x00000008);
993 rtw_write32_mask(rtwdev, 0x4130, BIT(30), 0x1);
994 rtw_write8(rtwdev, 0x1bcc, 0x0);
995 rtw_write32(rtwdev, 0x1b00, 0x0000000a);
996 rtw_write8(rtwdev, 0x1bcc, 0x0);
997
998 rtw8822c_dac_restore_reg(rtwdev, backup, backup_rf);
999
1000 /* backup results to restore, saving a lot of time */
1001 rtw8822c_dac_cal_backup(rtwdev);
1002
1003 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: ic=0x%x, qc=0x%x\n", ic_a, qc_a);
1004 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: ic=0x%x, qc=0x%x\n", ic_b, qc_b);
1005 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path A: i=0x%x, q=0x%x\n", i_a, q_a);
1006 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DACK] path B: i=0x%x, q=0x%x\n", i_b, q_b);
1007 }
1008
rtw8822c_rf_x2_check(struct rtw_dev * rtwdev)1009 static void rtw8822c_rf_x2_check(struct rtw_dev *rtwdev)
1010 {
1011 u8 x2k_busy;
1012
1013 mdelay(1);
1014 x2k_busy = rtw_read_rf(rtwdev, RF_PATH_A, 0xb8, BIT(15));
1015 if (x2k_busy == 1) {
1016 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0xC4440);
1017 rtw_write_rf(rtwdev, RF_PATH_A, 0xba, RFREG_MASK, 0x6840D);
1018 rtw_write_rf(rtwdev, RF_PATH_A, 0xb8, RFREG_MASK, 0x80440);
1019 mdelay(1);
1020 }
1021 }
1022
rtw8822c_set_power_trim(struct rtw_dev * rtwdev,s8 bb_gain[2][8])1023 static void rtw8822c_set_power_trim(struct rtw_dev *rtwdev, s8 bb_gain[2][8])
1024 {
1025 #define RF_SET_POWER_TRIM(_path, _seq, _idx) \
1026 do { \
1027 rtw_write_rf(rtwdev, _path, 0x33, RFREG_MASK, _seq); \
1028 rtw_write_rf(rtwdev, _path, 0x3f, RFREG_MASK, \
1029 bb_gain[_path][_idx]); \
1030 } while (0)
1031 u8 path;
1032
1033 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1034 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 1);
1035 RF_SET_POWER_TRIM(path, 0x0, 0);
1036 RF_SET_POWER_TRIM(path, 0x1, 1);
1037 RF_SET_POWER_TRIM(path, 0x2, 2);
1038 RF_SET_POWER_TRIM(path, 0x3, 2);
1039 RF_SET_POWER_TRIM(path, 0x4, 3);
1040 RF_SET_POWER_TRIM(path, 0x5, 4);
1041 RF_SET_POWER_TRIM(path, 0x6, 5);
1042 RF_SET_POWER_TRIM(path, 0x7, 6);
1043 RF_SET_POWER_TRIM(path, 0x8, 7);
1044 RF_SET_POWER_TRIM(path, 0x9, 3);
1045 RF_SET_POWER_TRIM(path, 0xa, 4);
1046 RF_SET_POWER_TRIM(path, 0xb, 5);
1047 RF_SET_POWER_TRIM(path, 0xc, 6);
1048 RF_SET_POWER_TRIM(path, 0xd, 7);
1049 RF_SET_POWER_TRIM(path, 0xe, 7);
1050 rtw_write_rf(rtwdev, path, 0xee, BIT(19), 0);
1051 }
1052 #undef RF_SET_POWER_TRIM
1053 }
1054
rtw8822c_power_trim(struct rtw_dev * rtwdev)1055 static void rtw8822c_power_trim(struct rtw_dev *rtwdev)
1056 {
1057 u8 pg_pwr = 0xff, i, path, idx;
1058 s8 bb_gain[2][8] = {};
1059 u16 rf_efuse_2g[3] = {PPG_2GL_TXAB, PPG_2GM_TXAB, PPG_2GH_TXAB};
1060 u16 rf_efuse_5g[2][5] = {{PPG_5GL1_TXA, PPG_5GL2_TXA, PPG_5GM1_TXA,
1061 PPG_5GM2_TXA, PPG_5GH1_TXA},
1062 {PPG_5GL1_TXB, PPG_5GL2_TXB, PPG_5GM1_TXB,
1063 PPG_5GM2_TXB, PPG_5GH1_TXB} };
1064 bool set = false;
1065
1066 for (i = 0; i < ARRAY_SIZE(rf_efuse_2g); i++) {
1067 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[i], &pg_pwr);
1068 if (pg_pwr == EFUSE_READ_FAIL)
1069 continue;
1070 set = true;
1071 bb_gain[RF_PATH_A][i] = FIELD_GET(PPG_2G_A_MASK, pg_pwr);
1072 bb_gain[RF_PATH_B][i] = FIELD_GET(PPG_2G_B_MASK, pg_pwr);
1073 }
1074
1075 for (i = 0; i < ARRAY_SIZE(rf_efuse_5g[0]); i++) {
1076 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1077 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path][i],
1078 &pg_pwr);
1079 if (pg_pwr == EFUSE_READ_FAIL)
1080 continue;
1081 set = true;
1082 idx = i + ARRAY_SIZE(rf_efuse_2g);
1083 bb_gain[path][idx] = FIELD_GET(PPG_5G_MASK, pg_pwr);
1084 }
1085 }
1086 if (set)
1087 rtw8822c_set_power_trim(rtwdev, bb_gain);
1088
1089 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1090 }
1091
rtw8822c_thermal_trim(struct rtw_dev * rtwdev)1092 static void rtw8822c_thermal_trim(struct rtw_dev *rtwdev)
1093 {
1094 u16 rf_efuse[2] = {PPG_THERMAL_A, PPG_THERMAL_B};
1095 u8 pg_therm = 0xff, thermal[2] = {0}, path;
1096
1097 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1098 rtw_read8_physical_efuse(rtwdev, rf_efuse[path], &pg_therm);
1099 if (pg_therm == EFUSE_READ_FAIL)
1100 return;
1101 /* Efuse value of BIT(0) shall be move to BIT(3), and the value
1102 * of BIT(1) to BIT(3) should be right shifted 1 bit.
1103 */
1104 thermal[path] = FIELD_GET(GENMASK(3, 1), pg_therm);
1105 thermal[path] |= FIELD_PREP(BIT(3), pg_therm & BIT(0));
1106 rtw_write_rf(rtwdev, path, 0x43, RF_THEMAL_MASK, thermal[path]);
1107 }
1108 }
1109
rtw8822c_pa_bias(struct rtw_dev * rtwdev)1110 static void rtw8822c_pa_bias(struct rtw_dev *rtwdev)
1111 {
1112 u16 rf_efuse_2g[2] = {PPG_PABIAS_2GA, PPG_PABIAS_2GB};
1113 u16 rf_efuse_5g[2] = {PPG_PABIAS_5GA, PPG_PABIAS_5GB};
1114 u8 pg_pa_bias = 0xff, path;
1115
1116 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1117 rtw_read8_physical_efuse(rtwdev, rf_efuse_2g[path],
1118 &pg_pa_bias);
1119 if (pg_pa_bias == EFUSE_READ_FAIL)
1120 return;
1121 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1122 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_2G_MASK, pg_pa_bias);
1123 }
1124 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1125 rtw_read8_physical_efuse(rtwdev, rf_efuse_5g[path],
1126 &pg_pa_bias);
1127 pg_pa_bias = FIELD_GET(PPG_PABIAS_MASK, pg_pa_bias);
1128 rtw_write_rf(rtwdev, path, RF_PA, RF_PABIAS_5G_MASK, pg_pa_bias);
1129 }
1130 }
1131
rtw8822c_rfk_handshake(struct rtw_dev * rtwdev,bool is_before_k)1132 static void rtw8822c_rfk_handshake(struct rtw_dev *rtwdev, bool is_before_k)
1133 {
1134 struct rtw_dm_info *dm = &rtwdev->dm_info;
1135 u8 u1b_tmp;
1136 u8 u4b_tmp;
1137 int ret;
1138
1139 if (is_before_k) {
1140 rtw_dbg(rtwdev, RTW_DBG_RFK,
1141 "[RFK] WiFi / BT RFK handshake start!!\n");
1142
1143 if (!dm->is_bt_iqk_timeout) {
1144 ret = read_poll_timeout(rtw_read32_mask, u4b_tmp,
1145 u4b_tmp == 0, 20, 600000, false,
1146 rtwdev, REG_PMC_DBG_CTRL1,
1147 BITS_PMC_BT_IQK_STS);
1148 if (ret) {
1149 rtw_dbg(rtwdev, RTW_DBG_RFK,
1150 "[RFK] Wait BT IQK finish timeout!!\n");
1151 dm->is_bt_iqk_timeout = true;
1152 }
1153 }
1154
1155 rtw_fw_inform_rfk_status(rtwdev, true);
1156
1157 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1158 u1b_tmp == 1, 20, 100000, false,
1159 rtwdev, REG_ARFR4, BIT_WL_RFK);
1160 if (ret)
1161 rtw_dbg(rtwdev, RTW_DBG_RFK,
1162 "[RFK] Send WiFi RFK start H2C cmd FAIL!!\n");
1163 } else {
1164 rtw_fw_inform_rfk_status(rtwdev, false);
1165 ret = read_poll_timeout(rtw_read8_mask, u1b_tmp,
1166 u1b_tmp == 1, 20, 100000, false,
1167 rtwdev, REG_ARFR4,
1168 BIT_WL_RFK);
1169 if (ret)
1170 rtw_dbg(rtwdev, RTW_DBG_RFK,
1171 "[RFK] Send WiFi RFK finish H2C cmd FAIL!!\n");
1172
1173 rtw_dbg(rtwdev, RTW_DBG_RFK,
1174 "[RFK] WiFi / BT RFK handshake finish!!\n");
1175 }
1176 }
1177
rtw8822c_rfk_power_save(struct rtw_dev * rtwdev,bool is_power_save)1178 static void rtw8822c_rfk_power_save(struct rtw_dev *rtwdev,
1179 bool is_power_save)
1180 {
1181 u8 path;
1182
1183 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1184 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1185 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_PS_EN,
1186 is_power_save ? 0 : 1);
1187 }
1188 }
1189
rtw8822c_txgapk_backup_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1190 static void rtw8822c_txgapk_backup_bb_reg(struct rtw_dev *rtwdev, const u32 reg[],
1191 u32 reg_backup[], u32 reg_num)
1192 {
1193 u32 i;
1194
1195 for (i = 0; i < reg_num; i++) {
1196 reg_backup[i] = rtw_read32(rtwdev, reg[i]);
1197
1198 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Backup BB 0x%x = 0x%x\n",
1199 reg[i], reg_backup[i]);
1200 }
1201 }
1202
rtw8822c_txgapk_reload_bb_reg(struct rtw_dev * rtwdev,const u32 reg[],u32 reg_backup[],u32 reg_num)1203 static void rtw8822c_txgapk_reload_bb_reg(struct rtw_dev *rtwdev,
1204 const u32 reg[], u32 reg_backup[],
1205 u32 reg_num)
1206 {
1207 u32 i;
1208
1209 for (i = 0; i < reg_num; i++) {
1210 rtw_write32(rtwdev, reg[i], reg_backup[i]);
1211 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Reload BB 0x%x = 0x%x\n",
1212 reg[i], reg_backup[i]);
1213 }
1214 }
1215
check_rf_status(struct rtw_dev * rtwdev,u8 status)1216 static bool check_rf_status(struct rtw_dev *rtwdev, u8 status)
1217 {
1218 u8 reg_rf0_a, reg_rf0_b;
1219
1220 reg_rf0_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A,
1221 RF_MODE_TRXAGC, BIT_RF_MODE);
1222 reg_rf0_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B,
1223 RF_MODE_TRXAGC, BIT_RF_MODE);
1224
1225 if (reg_rf0_a == status || reg_rf0_b == status)
1226 return false;
1227
1228 return true;
1229 }
1230
rtw8822c_txgapk_tx_pause(struct rtw_dev * rtwdev)1231 static void rtw8822c_txgapk_tx_pause(struct rtw_dev *rtwdev)
1232 {
1233 bool status;
1234 int ret;
1235
1236 rtw_write8(rtwdev, REG_TXPAUSE, BIT_AC_QUEUE);
1237 rtw_write32_mask(rtwdev, REG_TX_FIFO, BIT_STOP_TX, 0x2);
1238
1239 ret = read_poll_timeout_atomic(check_rf_status, status, status,
1240 2, 5000, false, rtwdev, 2);
1241 if (ret)
1242 rtw_warn(rtwdev, "failed to pause TX\n");
1243
1244 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] Tx pause!!\n");
1245 }
1246
rtw8822c_txgapk_bb_dpk(struct rtw_dev * rtwdev,u8 path)1247 static void rtw8822c_txgapk_bb_dpk(struct rtw_dev *rtwdev, u8 path)
1248 {
1249 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1250
1251 rtw_write32_mask(rtwdev, REG_ENFN, BIT_IQK_DPK_EN, 0x1);
1252 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1253 BIT_IQK_DPK_CLOCK_SRC, 0x1);
1254 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2,
1255 BIT_IQK_DPK_RESET_SRC, 0x1);
1256 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_EN_IOQ_IQK_DPK, 0x1);
1257 rtw_write32_mask(rtwdev, REG_CH_DELAY_EXTR2, BIT_TST_IQK2SET_SRC, 0x0);
1258 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x1ff);
1259
1260 if (path == RF_PATH_A) {
1261 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1262 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1263 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x1);
1264 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1265 BIT_TX_SCALE_0DB, 0x1);
1266 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x0);
1267 } else if (path == RF_PATH_B) {
1268 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1269 BIT_RFTXEN_GCK_FORCE_ON, 0x1);
1270 rtw_write32_mask(rtwdev, REG_3WIRE2,
1271 BIT_DIS_SHARERX_TXGAT, 0x1);
1272 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1273 BIT_TX_SCALE_0DB, 0x1);
1274 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x0);
1275 }
1276 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x2);
1277 }
1278
rtw8822c_txgapk_afe_dpk(struct rtw_dev * rtwdev,u8 path)1279 static void rtw8822c_txgapk_afe_dpk(struct rtw_dev *rtwdev, u8 path)
1280 {
1281 u32 reg;
1282
1283 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1284
1285 if (path == RF_PATH_A) {
1286 reg = REG_ANAPAR_A;
1287 } else if (path == RF_PATH_B) {
1288 reg = REG_ANAPAR_B;
1289 } else {
1290 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1291 return;
1292 }
1293
1294 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, MASKDWORD);
1295 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1296 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700f0001);
1297 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x701f0001);
1298 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x702f0001);
1299 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x703f0001);
1300 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x704f0001);
1301 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705f0001);
1302 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x706f0001);
1303 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707f0001);
1304 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708f0001);
1305 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709f0001);
1306 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70af0001);
1307 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bf0001);
1308 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cf0001);
1309 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70df0001);
1310 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ef0001);
1311 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1312 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ff0001);
1313 }
1314
rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev * rtwdev,u8 path)1315 static void rtw8822c_txgapk_afe_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1316 {
1317 u32 reg;
1318
1319 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1320
1321 if (path == RF_PATH_A) {
1322 reg = REG_ANAPAR_A;
1323 } else if (path == RF_PATH_B) {
1324 reg = REG_ANAPAR_B;
1325 } else {
1326 rtw_err(rtwdev, "[TXGAPK] unknown path %d!!\n", path);
1327 return;
1328 }
1329 rtw_write32_mask(rtwdev, REG_IQK_CTRL, MASKDWORD, 0xffa1005e);
1330 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x700b8041);
1331 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70144041);
1332 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70244041);
1333 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70344041);
1334 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70444041);
1335 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x705b8041);
1336 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70644041);
1337 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x707b8041);
1338 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x708b8041);
1339 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x709b8041);
1340 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70ab8041);
1341 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70bb8041);
1342 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70cb8041);
1343 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70db8041);
1344 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70eb8041);
1345 rtw_write32_mask(rtwdev, reg, MASKDWORD, 0x70fb8041);
1346 }
1347
rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev * rtwdev,u8 path)1348 static void rtw8822c_txgapk_bb_dpk_restore(struct rtw_dev *rtwdev, u8 path)
1349 {
1350 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1351
1352 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x0);
1353 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TIA_BYPASS, 0x0);
1354 rtw_write_rf(rtwdev, path, RF_DIS_BYPASS_TXBB, BIT_TXBB, 0x0);
1355
1356 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1357 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1358 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1359 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1360 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x1);
1361 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1362 rtw_write32_mask(rtwdev, REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1363 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, 0x00);
1364 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, 0x0);
1365 rtw_write32_mask(rtwdev, REG_CCA_OFF, BIT_CCA_ON_BY_PW, 0x0);
1366
1367 if (path == RF_PATH_A) {
1368 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_A,
1369 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1370 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_DIS_SHARERX_TXGAT, 0x0);
1371 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_A,
1372 BIT_TX_SCALE_0DB, 0x0);
1373 rtw_write32_mask(rtwdev, REG_3WIRE, BIT_3WIRE_EN, 0x3);
1374 } else if (path == RF_PATH_B) {
1375 rtw_write32_mask(rtwdev, REG_RFTXEN_GCK_B,
1376 BIT_RFTXEN_GCK_FORCE_ON, 0x0);
1377 rtw_write32_mask(rtwdev, REG_3WIRE2,
1378 BIT_DIS_SHARERX_TXGAT, 0x0);
1379 rtw_write32_mask(rtwdev, REG_DIS_SHARE_RX_B,
1380 BIT_TX_SCALE_0DB, 0x0);
1381 rtw_write32_mask(rtwdev, REG_3WIRE2, BIT_3WIRE_EN, 0x3);
1382 }
1383
1384 rtw_write32_mask(rtwdev, REG_CCKSB, BIT_BBMODE, 0x0);
1385 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_CFIR_EN, 0x5);
1386 }
1387
_rtw8822c_txgapk_gain_valid(struct rtw_dev * rtwdev,u32 gain)1388 static bool _rtw8822c_txgapk_gain_valid(struct rtw_dev *rtwdev, u32 gain)
1389 {
1390 if ((FIELD_GET(BIT_GAIN_TX_PAD_H, gain) >= 0xc) &&
1391 (FIELD_GET(BIT_GAIN_TX_PAD_L, gain) >= 0xe))
1392 return true;
1393
1394 return false;
1395 }
1396
_rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev,u8 band,u8 path)1397 static void _rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev,
1398 u8 band, u8 path)
1399 {
1400 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1401 u32 v, tmp_3f = 0;
1402 u8 gain, check_txgain;
1403
1404 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1405
1406 switch (band) {
1407 case RF_BAND_2G_OFDM:
1408 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1409 break;
1410 case RF_BAND_5G_L:
1411 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1412 break;
1413 case RF_BAND_5G_M:
1414 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1415 break;
1416 case RF_BAND_5G_H:
1417 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1418 break;
1419 default:
1420 break;
1421 }
1422
1423 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, MASKBYTE0, 0x88);
1424
1425 check_txgain = 0;
1426 for (gain = 0; gain < RF_GAIN_NUM; gain++) {
1427 v = txgapk->rf3f_bp[band][gain][path];
1428 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1429 if (!check_txgain) {
1430 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1431 check_txgain = 1;
1432 }
1433 rtw_dbg(rtwdev, RTW_DBG_RFK,
1434 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1435 txgapk->rf3f_bp[band][gain][path]);
1436 } else {
1437 tmp_3f = txgapk->rf3f_bp[band][gain][path];
1438 }
1439
1440 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN, tmp_3f);
1441 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_I_GAIN, gain);
1442 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x1);
1443 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_GAIN_RST, 0x0);
1444
1445 rtw_dbg(rtwdev, RTW_DBG_RFK,
1446 "[TXGAPK] Band=%d 0x1b98[11:0]=0x%03X path=%d\n",
1447 band, tmp_3f, path);
1448 }
1449 }
1450
rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev * rtwdev)1451 static void rtw8822c_txgapk_write_gain_bb_table(struct rtw_dev *rtwdev)
1452 {
1453 u8 path, band;
1454
1455 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1456 __func__, rtwdev->dm_info.gapk.channel);
1457
1458 for (band = 0; band < RF_BAND_MAX; band++) {
1459 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1460 _rtw8822c_txgapk_write_gain_bb_table(rtwdev,
1461 band, path);
1462 }
1463 }
1464 }
1465
rtw8822c_txgapk_read_offset(struct rtw_dev * rtwdev,u8 path)1466 static void rtw8822c_txgapk_read_offset(struct rtw_dev *rtwdev, u8 path)
1467 {
1468 static const u32 cfg1_1b00[2] = {0x00000d18, 0x00000d2a};
1469 static const u32 cfg2_1b00[2] = {0x00000d19, 0x00000d2b};
1470 static const u32 set_pi[2] = {REG_RSV_CTRL, REG_WLRF1};
1471 static const u32 path_setting[2] = {REG_ORITXCODE, REG_ORITXCODE2};
1472 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1473 u8 channel = txgapk->channel;
1474 u32 val;
1475 int i;
1476
1477 if (path >= ARRAY_SIZE(cfg1_1b00) ||
1478 path >= ARRAY_SIZE(cfg2_1b00) ||
1479 path >= ARRAY_SIZE(set_pi) ||
1480 path >= ARRAY_SIZE(path_setting)) {
1481 rtw_warn(rtwdev, "[TXGAPK] wrong path %d\n", path);
1482 return;
1483 }
1484
1485 rtw_write32_mask(rtwdev, REG_ANTMAP0, BIT_ANT_PATH, path + 1);
1486 rtw_write32_mask(rtwdev, REG_TXLGMAP, MASKDWORD, 0xe4e40000);
1487 rtw_write32_mask(rtwdev, REG_TXANTSEG, BIT_ANTSEG, 0x3);
1488 rtw_write32_mask(rtwdev, path_setting[path], MASK20BITS, 0x33312);
1489 rtw_write32_mask(rtwdev, path_setting[path], BIT_PATH_EN, 0x1);
1490 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x0);
1491 rtw_write_rf(rtwdev, path, RF_LUTDBG, BIT_TXA_TANK, 0x1);
1492 rtw_write_rf(rtwdev, path, RF_IDAC, BIT_TX_MODE, 0x820);
1493 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1494 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1495
1496 rtw_write32_mask(rtwdev, REG_TX_TONE_IDX, MASKBYTE0, 0x018);
1497 fsleep(1000);
1498 if (channel >= 1 && channel <= 14)
1499 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_2G_SWING);
1500 else
1501 rtw_write32_mask(rtwdev, REG_R_CONFIG, MASKBYTE0, BIT_5G_SWING);
1502 fsleep(1000);
1503
1504 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg1_1b00[path]);
1505 rtw_write32_mask(rtwdev, REG_NCTL0, MASKDWORD, cfg2_1b00[path]);
1506
1507 read_poll_timeout(rtw_read32_mask, val,
1508 val == 0x55, 1000, 100000, false,
1509 rtwdev, REG_RPT_CIP, BIT_RPT_CIP_STATUS);
1510
1511 rtw_write32_mask(rtwdev, set_pi[path], BITS_RFC_DIRECT, 0x2);
1512 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1513 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_EN, 0x1);
1514 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x12);
1515 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x3);
1516 val = rtw_read32(rtwdev, REG_STAT_RPT);
1517
1518 txgapk->offset[0][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1519 txgapk->offset[1][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1520 txgapk->offset[2][path] = (s8)FIELD_GET(BIT_GAPK_RPT2, val);
1521 txgapk->offset[3][path] = (s8)FIELD_GET(BIT_GAPK_RPT3, val);
1522 txgapk->offset[4][path] = (s8)FIELD_GET(BIT_GAPK_RPT4, val);
1523 txgapk->offset[5][path] = (s8)FIELD_GET(BIT_GAPK_RPT5, val);
1524 txgapk->offset[6][path] = (s8)FIELD_GET(BIT_GAPK_RPT6, val);
1525 txgapk->offset[7][path] = (s8)FIELD_GET(BIT_GAPK_RPT7, val);
1526
1527 rtw_write32_mask(rtwdev, REG_TX_GAIN_SET, BIT_GAPK_RPT_IDX, 0x4);
1528 val = rtw_read32(rtwdev, REG_STAT_RPT);
1529
1530 txgapk->offset[8][path] = (s8)FIELD_GET(BIT_GAPK_RPT0, val);
1531 txgapk->offset[9][path] = (s8)FIELD_GET(BIT_GAPK_RPT1, val);
1532
1533 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1534 if (txgapk->offset[i][path] & BIT(3))
1535 txgapk->offset[i][path] = txgapk->offset[i][path] |
1536 0xf0;
1537 for (i = 0; i < RF_HW_OFFSET_NUM; i++)
1538 rtw_dbg(rtwdev, RTW_DBG_RFK,
1539 "[TXGAPK] offset %d %d path=%d\n",
1540 txgapk->offset[i][path], i, path);
1541 }
1542
rtw8822c_txgapk_calculate_offset(struct rtw_dev * rtwdev,u8 path)1543 static void rtw8822c_txgapk_calculate_offset(struct rtw_dev *rtwdev, u8 path)
1544 {
1545 static const u32 bb_reg[] = {REG_ANTMAP0, REG_TXLGMAP, REG_TXANTSEG,
1546 REG_ORITXCODE, REG_ORITXCODE2};
1547 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1548 u8 channel = txgapk->channel;
1549 u32 reg_backup[ARRAY_SIZE(bb_reg)] = {0};
1550
1551 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s channel=%d\n",
1552 __func__, channel);
1553
1554 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1555 reg_backup, ARRAY_SIZE(bb_reg));
1556
1557 if (channel >= 1 && channel <= 14) {
1558 rtw_write32_mask(rtwdev,
1559 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1560 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1561 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1562 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1563 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1564 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x5000f);
1565 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x0);
1566 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x1);
1567 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0f);
1568 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1569 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
1570 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1571 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1572
1573 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x00);
1574 rtw_write32_mask(rtwdev, REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x0);
1575
1576 rtw8822c_txgapk_read_offset(rtwdev, path);
1577 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1578
1579 } else {
1580 rtw_write32_mask(rtwdev,
1581 REG_SINGLE_TONE_SW, BIT_IRQ_TEST_MODE, 0x0);
1582 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SEL_PATH, path);
1583 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x3f);
1584 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
1585 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
1586 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50011);
1587 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x3);
1588 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x3);
1589 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
1590 rtw_write_rf(rtwdev, path,
1591 RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0x2);
1592 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0x12);
1593 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
1594 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
1595 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x1);
1596 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x5);
1597
1598 rtw_write32_mask(rtwdev, REG_IQKSTAT, MASKBYTE0, 0x0);
1599
1600 if (channel >= 36 && channel <= 64)
1601 rtw_write32_mask(rtwdev,
1602 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x2);
1603 else if (channel >= 100 && channel <= 144)
1604 rtw_write32_mask(rtwdev,
1605 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x3);
1606 else if (channel >= 149 && channel <= 177)
1607 rtw_write32_mask(rtwdev,
1608 REG_TABLE_SEL, BIT_Q_GAIN_SEL, 0x4);
1609
1610 rtw8822c_txgapk_read_offset(rtwdev, path);
1611 rtw_dbg(rtwdev, RTW_DBG_RFK, "=============================\n");
1612 }
1613 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1614 reg_backup, ARRAY_SIZE(bb_reg));
1615 }
1616
rtw8822c_txgapk_rf_restore(struct rtw_dev * rtwdev,u8 path)1617 static void rtw8822c_txgapk_rf_restore(struct rtw_dev *rtwdev, u8 path)
1618 {
1619 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1620
1621 if (path >= rtwdev->hal.rf_path_num)
1622 return;
1623
1624 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RF_MODE, 0x3);
1625 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x0);
1626 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT_PW_EXT_TIA, 0x0);
1627 }
1628
rtw8822c_txgapk_cal_gain(struct rtw_dev * rtwdev,u32 gain,s8 offset)1629 static u32 rtw8822c_txgapk_cal_gain(struct rtw_dev *rtwdev, u32 gain, s8 offset)
1630 {
1631 u32 gain_x2, new_gain;
1632
1633 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1634
1635 if (_rtw8822c_txgapk_gain_valid(rtwdev, gain)) {
1636 new_gain = gain;
1637 rtw_dbg(rtwdev, RTW_DBG_RFK,
1638 "[TXGAPK] gain=0x%03X(>=0xCEX) offset=%d new_gain=0x%03X\n",
1639 gain, offset, new_gain);
1640 return new_gain;
1641 }
1642
1643 gain_x2 = (gain << 1) + offset;
1644 new_gain = (gain_x2 >> 1) | (gain_x2 & BIT(0) ? BIT_GAIN_EXT : 0);
1645
1646 rtw_dbg(rtwdev, RTW_DBG_RFK,
1647 "[TXGAPK] gain=0x%X offset=%d new_gain=0x%X\n",
1648 gain, offset, new_gain);
1649
1650 return new_gain;
1651 }
1652
rtw8822c_txgapk_write_tx_gain(struct rtw_dev * rtwdev)1653 static void rtw8822c_txgapk_write_tx_gain(struct rtw_dev *rtwdev)
1654 {
1655 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1656 u32 i, j, tmp = 0x20, tmp_3f, v;
1657 s8 offset_tmp[RF_GAIN_NUM] = {0};
1658 u8 path, band = RF_BAND_2G_OFDM, channel = txgapk->channel;
1659
1660 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1661
1662 if (channel >= 1 && channel <= 14) {
1663 tmp = 0x20;
1664 band = RF_BAND_2G_OFDM;
1665 } else if (channel >= 36 && channel <= 64) {
1666 tmp = 0x200;
1667 band = RF_BAND_5G_L;
1668 } else if (channel >= 100 && channel <= 144) {
1669 tmp = 0x280;
1670 band = RF_BAND_5G_M;
1671 } else if (channel >= 149 && channel <= 177) {
1672 tmp = 0x300;
1673 band = RF_BAND_5G_H;
1674 } else {
1675 rtw_err(rtwdev, "[TXGAPK] unknown channel %d!!\n", channel);
1676 return;
1677 }
1678
1679 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1680 for (i = 0; i < RF_GAIN_NUM; i++) {
1681 offset_tmp[i] = 0;
1682 for (j = i; j < RF_GAIN_NUM; j++) {
1683 v = txgapk->rf3f_bp[band][j][path];
1684 if (_rtw8822c_txgapk_gain_valid(rtwdev, v))
1685 continue;
1686
1687 offset_tmp[i] += txgapk->offset[j][path];
1688 txgapk->fianl_offset[i][path] = offset_tmp[i];
1689 }
1690
1691 v = txgapk->rf3f_bp[band][i][path];
1692 if (_rtw8822c_txgapk_gain_valid(rtwdev, v)) {
1693 rtw_dbg(rtwdev, RTW_DBG_RFK,
1694 "[TXGAPK] tx_gain=0x%03X >= 0xCEX\n",
1695 txgapk->rf3f_bp[band][i][path]);
1696 } else {
1697 txgapk->rf3f_fs[path][i] = offset_tmp[i];
1698 rtw_dbg(rtwdev, RTW_DBG_RFK,
1699 "[TXGAPK] offset %d %d\n",
1700 offset_tmp[i], i);
1701 }
1702 }
1703
1704 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x10000);
1705 for (i = 0; i < RF_GAIN_NUM; i++) {
1706 rtw_write_rf(rtwdev, path,
1707 RF_LUTWA, RFREG_MASK, tmp + i);
1708
1709 tmp_3f = rtw8822c_txgapk_cal_gain(rtwdev,
1710 txgapk->rf3f_bp[band][i][path],
1711 offset_tmp[i]);
1712 rtw_write_rf(rtwdev, path, RF_LUTWD0,
1713 BIT_GAIN_EXT | BIT_DATA_L, tmp_3f);
1714
1715 rtw_dbg(rtwdev, RTW_DBG_RFK,
1716 "[TXGAPK] 0x33=0x%05X 0x3f=0x%04X\n",
1717 tmp + i, tmp_3f);
1718 }
1719 rtw_write_rf(rtwdev, path, RF_LUTWE2, RFREG_MASK, 0x0);
1720 }
1721 }
1722
rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev * rtwdev)1723 static void rtw8822c_txgapk_save_all_tx_gain_table(struct rtw_dev *rtwdev)
1724 {
1725 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1726 static const u32 three_wire[2] = {REG_3WIRE, REG_3WIRE2};
1727 static const u8 ch_num[RF_BAND_MAX] = {1, 1, 36, 100, 149};
1728 static const u8 band_num[RF_BAND_MAX] = {0x0, 0x0, 0x1, 0x3, 0x5};
1729 static const u8 cck[RF_BAND_MAX] = {0x1, 0x0, 0x0, 0x0, 0x0};
1730 u8 path, band, gain, rf0_idx;
1731 u32 rf18, v;
1732
1733 if (rtwdev->dm_info.dm_flags & BIT(RTW_DM_CAP_TXGAPK))
1734 return;
1735
1736 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1737
1738 if (txgapk->read_txgain == 1) {
1739 rtw_dbg(rtwdev, RTW_DBG_RFK,
1740 "[TXGAPK] Already Read txgapk->read_txgain return!!!\n");
1741 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1742 return;
1743 }
1744
1745 for (band = 0; band < RF_BAND_MAX; band++) {
1746 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1747 rf18 = rtw_read_rf(rtwdev, path, RF_CFGCH, RFREG_MASK);
1748
1749 rtw_write32_mask(rtwdev,
1750 three_wire[path], BIT_3WIRE_EN, 0x0);
1751 rtw_write_rf(rtwdev, path,
1752 RF_CFGCH, MASKBYTE0, ch_num[band]);
1753 rtw_write_rf(rtwdev, path,
1754 RF_CFGCH, BIT_BAND, band_num[band]);
1755 rtw_write_rf(rtwdev, path,
1756 RF_BW_TRXBB, BIT_DBG_CCK_CCA, cck[band]);
1757 rtw_write_rf(rtwdev, path,
1758 RF_BW_TRXBB, BIT_TX_CCK_IND, cck[band]);
1759 gain = 0;
1760 for (rf0_idx = 1; rf0_idx < 32; rf0_idx += 3) {
1761 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC,
1762 MASKBYTE0, rf0_idx);
1763 v = rtw_read_rf(rtwdev, path,
1764 RF_TX_RESULT, RFREG_MASK);
1765 txgapk->rf3f_bp[band][gain][path] = v & BIT_DATA_L;
1766
1767 rtw_dbg(rtwdev, RTW_DBG_RFK,
1768 "[TXGAPK] 0x5f=0x%03X band=%d path=%d\n",
1769 txgapk->rf3f_bp[band][gain][path],
1770 band, path);
1771 gain++;
1772 }
1773 rtw_write_rf(rtwdev, path, RF_CFGCH, RFREG_MASK, rf18);
1774 rtw_write32_mask(rtwdev,
1775 three_wire[path], BIT_3WIRE_EN, 0x3);
1776 }
1777 }
1778 rtw8822c_txgapk_write_gain_bb_table(rtwdev);
1779 txgapk->read_txgain = 1;
1780 }
1781
rtw8822c_txgapk(struct rtw_dev * rtwdev)1782 static void rtw8822c_txgapk(struct rtw_dev *rtwdev)
1783 {
1784 static const u32 bb_reg[2] = {REG_TX_PTCL_CTRL, REG_TX_FIFO};
1785 struct rtw_gapk_info *txgapk = &rtwdev->dm_info.gapk;
1786 u32 bb_reg_backup[2];
1787 u8 path;
1788
1789 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] ======>%s\n", __func__);
1790
1791 rtw8822c_txgapk_save_all_tx_gain_table(rtwdev);
1792
1793 if (txgapk->read_txgain == 0) {
1794 rtw_dbg(rtwdev, RTW_DBG_RFK,
1795 "[TXGAPK] txgapk->read_txgain == 0 return!!!\n");
1796 return;
1797 }
1798
1799 if (rtwdev->efuse.power_track_type >= 4 &&
1800 rtwdev->efuse.power_track_type <= 7) {
1801 rtw_dbg(rtwdev, RTW_DBG_RFK,
1802 "[TXGAPK] Normal Mode in TSSI mode. return!!!\n");
1803 return;
1804 }
1805
1806 rtw8822c_txgapk_backup_bb_reg(rtwdev, bb_reg,
1807 bb_reg_backup, ARRAY_SIZE(bb_reg));
1808 rtw8822c_txgapk_tx_pause(rtwdev);
1809 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
1810 txgapk->channel = rtw_read_rf(rtwdev, path,
1811 RF_CFGCH, RFREG_MASK) & MASKBYTE0;
1812 rtw8822c_txgapk_bb_dpk(rtwdev, path);
1813 rtw8822c_txgapk_afe_dpk(rtwdev, path);
1814 rtw8822c_txgapk_calculate_offset(rtwdev, path);
1815 rtw8822c_txgapk_rf_restore(rtwdev, path);
1816 rtw8822c_txgapk_afe_dpk_restore(rtwdev, path);
1817 rtw8822c_txgapk_bb_dpk_restore(rtwdev, path);
1818 }
1819 rtw8822c_txgapk_write_tx_gain(rtwdev);
1820 rtw8822c_txgapk_reload_bb_reg(rtwdev, bb_reg,
1821 bb_reg_backup, ARRAY_SIZE(bb_reg));
1822 }
1823
rtw8822c_do_gapk(struct rtw_dev * rtwdev)1824 static void rtw8822c_do_gapk(struct rtw_dev *rtwdev)
1825 {
1826 struct rtw_dm_info *dm = &rtwdev->dm_info;
1827
1828 if (dm->dm_flags & BIT(RTW_DM_CAP_TXGAPK)) {
1829 rtw_dbg(rtwdev, RTW_DBG_RFK, "[TXGAPK] feature disable!!!\n");
1830 return;
1831 }
1832 rtw8822c_rfk_handshake(rtwdev, true);
1833 rtw8822c_txgapk(rtwdev);
1834 rtw8822c_rfk_handshake(rtwdev, false);
1835 }
1836
rtw8822c_rf_init(struct rtw_dev * rtwdev)1837 static void rtw8822c_rf_init(struct rtw_dev *rtwdev)
1838 {
1839 rtw8822c_rf_dac_cal(rtwdev);
1840 rtw8822c_rf_x2_check(rtwdev);
1841 rtw8822c_thermal_trim(rtwdev);
1842 rtw8822c_power_trim(rtwdev);
1843 rtw8822c_pa_bias(rtwdev);
1844 }
1845
rtw8822c_pwrtrack_init(struct rtw_dev * rtwdev)1846 static void rtw8822c_pwrtrack_init(struct rtw_dev *rtwdev)
1847 {
1848 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1849 u8 path;
1850
1851 for (path = RF_PATH_A; path < RTW_RF_PATH_MAX; path++) {
1852 dm_info->delta_power_index[path] = 0;
1853 ewma_thermal_init(&dm_info->avg_thermal[path]);
1854 dm_info->thermal_avg[path] = 0xff;
1855 }
1856
1857 dm_info->pwr_trk_triggered = false;
1858 dm_info->thermal_meter_k = rtwdev->efuse.thermal_meter_k;
1859 dm_info->thermal_meter_lck = rtwdev->efuse.thermal_meter_k;
1860 }
1861
rtw8822c_phy_set_param(struct rtw_dev * rtwdev)1862 static void rtw8822c_phy_set_param(struct rtw_dev *rtwdev)
1863 {
1864 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
1865 struct rtw_hal *hal = &rtwdev->hal;
1866 u8 crystal_cap;
1867 u8 cck_gi_u_bnd_msb = 0;
1868 u8 cck_gi_u_bnd_lsb = 0;
1869 u8 cck_gi_l_bnd_msb = 0;
1870 u8 cck_gi_l_bnd_lsb = 0;
1871 bool is_tx2_path;
1872
1873 /* power on BB/RF domain */
1874 rtw_write8_set(rtwdev, REG_SYS_FUNC_EN,
1875 BIT_FEN_BB_GLB_RST | BIT_FEN_BB_RSTB);
1876 rtw_write8_set(rtwdev, REG_RF_CTRL,
1877 BIT_RF_EN | BIT_RF_RSTB | BIT_RF_SDM_RSTB);
1878 rtw_write32_set(rtwdev, REG_WLRF1, BIT_WLRF1_BBRF_EN);
1879
1880 /* disable low rate DPD */
1881 rtw_write32_mask(rtwdev, REG_DIS_DPD, DIS_DPD_MASK, DIS_DPD_RATEALL);
1882
1883 /* pre init before header files config */
1884 rtw8822c_header_file_init(rtwdev, true);
1885
1886 rtw_phy_load_tables(rtwdev);
1887
1888 crystal_cap = rtwdev->efuse.crystal_cap & 0x7f;
1889 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, 0xfffc00,
1890 crystal_cap | (crystal_cap << 7));
1891
1892 /* post init after header files config */
1893 rtw8822c_header_file_init(rtwdev, false);
1894
1895 is_tx2_path = false;
1896 rtw8822c_config_trx_mode(rtwdev, hal->antenna_tx, hal->antenna_rx,
1897 is_tx2_path);
1898 rtw_phy_init(rtwdev);
1899
1900 cck_gi_u_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc000);
1901 cck_gi_u_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1aa8, 0xf0000);
1902 cck_gi_l_bnd_msb = (u8)rtw_read32_mask(rtwdev, 0x1a98, 0xc0);
1903 cck_gi_l_bnd_lsb = (u8)rtw_read32_mask(rtwdev, 0x1a70, 0x0f000000);
1904
1905 dm_info->cck_gi_u_bnd = ((cck_gi_u_bnd_msb << 4) | (cck_gi_u_bnd_lsb));
1906 dm_info->cck_gi_l_bnd = ((cck_gi_l_bnd_msb << 4) | (cck_gi_l_bnd_lsb));
1907
1908 rtw8822c_rf_init(rtwdev);
1909 rtw8822c_pwrtrack_init(rtwdev);
1910
1911 rtw_bf_phy_init(rtwdev);
1912 }
1913
1914 #define WLAN_TXQ_RPT_EN 0x1F
1915 #define WLAN_SLOT_TIME 0x09
1916 #define WLAN_PIFS_TIME 0x1C
1917 #define WLAN_SIFS_CCK_CONT_TX 0x0A
1918 #define WLAN_SIFS_OFDM_CONT_TX 0x0E
1919 #define WLAN_SIFS_CCK_TRX 0x0A
1920 #define WLAN_SIFS_OFDM_TRX 0x10
1921 #define WLAN_NAV_MAX 0xC8
1922 #define WLAN_RDG_NAV 0x05
1923 #define WLAN_TXOP_NAV 0x1B
1924 #define WLAN_CCK_RX_TSF 0x30
1925 #define WLAN_OFDM_RX_TSF 0x30
1926 #define WLAN_TBTT_PROHIBIT 0x04 /* unit : 32us */
1927 #define WLAN_TBTT_HOLD_TIME 0x064 /* unit : 32us */
1928 #define WLAN_DRV_EARLY_INT 0x04
1929 #define WLAN_BCN_CTRL_CLT0 0x10
1930 #define WLAN_BCN_DMA_TIME 0x02
1931 #define WLAN_BCN_MAX_ERR 0xFF
1932 #define WLAN_SIFS_CCK_DUR_TUNE 0x0A
1933 #define WLAN_SIFS_OFDM_DUR_TUNE 0x10
1934 #define WLAN_SIFS_CCK_CTX 0x0A
1935 #define WLAN_SIFS_CCK_IRX 0x0A
1936 #define WLAN_SIFS_OFDM_CTX 0x0E
1937 #define WLAN_SIFS_OFDM_IRX 0x0E
1938 #define WLAN_EIFS_DUR_TUNE 0x40
1939 #define WLAN_EDCA_VO_PARAM 0x002FA226
1940 #define WLAN_EDCA_VI_PARAM 0x005EA328
1941 #define WLAN_EDCA_BE_PARAM 0x005EA42B
1942 #define WLAN_EDCA_BK_PARAM 0x0000A44F
1943
1944 #define WLAN_RX_FILTER0 0xFFFFFFFF
1945 #define WLAN_RX_FILTER2 0xFFFF
1946 #define WLAN_RCR_CFG 0xE400220E
1947 #define WLAN_RXPKT_MAX_SZ 12288
1948 #define WLAN_RXPKT_MAX_SZ_512 (WLAN_RXPKT_MAX_SZ >> 9)
1949
1950 #define WLAN_AMPDU_MAX_TIME 0x70
1951 #define WLAN_RTS_LEN_TH 0xFF
1952 #define WLAN_RTS_TX_TIME_TH 0x08
1953 #define WLAN_MAX_AGG_PKT_LIMIT 0x3f
1954 #define WLAN_RTS_MAX_AGG_PKT_LIMIT 0x3f
1955 #define WLAN_PRE_TXCNT_TIME_TH 0x1E0
1956 #define FAST_EDCA_VO_TH 0x06
1957 #define FAST_EDCA_VI_TH 0x06
1958 #define FAST_EDCA_BE_TH 0x06
1959 #define FAST_EDCA_BK_TH 0x06
1960 #define WLAN_BAR_RETRY_LIMIT 0x01
1961 #define WLAN_BAR_ACK_TYPE 0x05
1962 #define WLAN_RA_TRY_RATE_AGG_LIMIT 0x08
1963 #define WLAN_RESP_TXRATE 0x84
1964 #define WLAN_ACK_TO 0x21
1965 #define WLAN_ACK_TO_CCK 0x6A
1966 #define WLAN_DATA_RATE_FB_CNT_1_4 0x01000000
1967 #define WLAN_DATA_RATE_FB_CNT_5_8 0x08070504
1968 #define WLAN_RTS_RATE_FB_CNT_5_8 0x08070504
1969 #define WLAN_DATA_RATE_FB_RATE0 0xFE01F010
1970 #define WLAN_DATA_RATE_FB_RATE0_H 0x40000000
1971 #define WLAN_RTS_RATE_FB_RATE1 0x003FF010
1972 #define WLAN_RTS_RATE_FB_RATE1_H 0x40000000
1973 #define WLAN_RTS_RATE_FB_RATE4 0x0600F010
1974 #define WLAN_RTS_RATE_FB_RATE4_H 0x400003E0
1975 #define WLAN_RTS_RATE_FB_RATE5 0x0600F015
1976 #define WLAN_RTS_RATE_FB_RATE5_H 0x000000E0
1977 #define WLAN_MULTI_ADDR 0xFFFFFFFF
1978
1979 #define WLAN_TX_FUNC_CFG1 0x30
1980 #define WLAN_TX_FUNC_CFG2 0x30
1981 #define WLAN_MAC_OPT_NORM_FUNC1 0x98
1982 #define WLAN_MAC_OPT_LB_FUNC1 0x80
1983 #define WLAN_MAC_OPT_FUNC2 0xb0810041
1984 #define WLAN_MAC_INT_MIG_CFG 0x33330000
1985
1986 #define WLAN_SIFS_CFG (WLAN_SIFS_CCK_CONT_TX | \
1987 (WLAN_SIFS_OFDM_CONT_TX << BIT_SHIFT_SIFS_OFDM_CTX) | \
1988 (WLAN_SIFS_CCK_TRX << BIT_SHIFT_SIFS_CCK_TRX) | \
1989 (WLAN_SIFS_OFDM_TRX << BIT_SHIFT_SIFS_OFDM_TRX))
1990
1991 #define WLAN_SIFS_DUR_TUNE (WLAN_SIFS_CCK_DUR_TUNE | \
1992 (WLAN_SIFS_OFDM_DUR_TUNE << 8))
1993
1994 #define WLAN_TBTT_TIME (WLAN_TBTT_PROHIBIT |\
1995 (WLAN_TBTT_HOLD_TIME << BIT_SHIFT_TBTT_HOLD_TIME_AP))
1996
1997 #define WLAN_NAV_CFG (WLAN_RDG_NAV | (WLAN_TXOP_NAV << 16))
1998 #define WLAN_RX_TSF_CFG (WLAN_CCK_RX_TSF | (WLAN_OFDM_RX_TSF) << 8)
1999
2000 #define MAC_CLK_SPEED 80 /* 80M */
2001 #define EFUSE_PCB_INFO_OFFSET 0xCA
2002
rtw8822c_mac_init(struct rtw_dev * rtwdev)2003 static int rtw8822c_mac_init(struct rtw_dev *rtwdev)
2004 {
2005 u8 value8;
2006 u16 value16;
2007 u32 value32;
2008 u16 pre_txcnt;
2009
2010 /* txq control */
2011 value8 = rtw_read8(rtwdev, REG_FWHW_TXQ_CTRL);
2012 value8 |= (BIT(7) & ~BIT(1) & ~BIT(2));
2013 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL, value8);
2014 rtw_write8(rtwdev, REG_FWHW_TXQ_CTRL + 1, WLAN_TXQ_RPT_EN);
2015 /* sifs control */
2016 rtw_write16(rtwdev, REG_SPEC_SIFS, WLAN_SIFS_DUR_TUNE);
2017 rtw_write32(rtwdev, REG_SIFS, WLAN_SIFS_CFG);
2018 rtw_write16(rtwdev, REG_RESP_SIFS_CCK,
2019 WLAN_SIFS_CCK_CTX | WLAN_SIFS_CCK_IRX << 8);
2020 rtw_write16(rtwdev, REG_RESP_SIFS_OFDM,
2021 WLAN_SIFS_OFDM_CTX | WLAN_SIFS_OFDM_IRX << 8);
2022 /* rate fallback control */
2023 rtw_write32(rtwdev, REG_DARFRC, WLAN_DATA_RATE_FB_CNT_1_4);
2024 rtw_write32(rtwdev, REG_DARFRCH, WLAN_DATA_RATE_FB_CNT_5_8);
2025 rtw_write32(rtwdev, REG_RARFRCH, WLAN_RTS_RATE_FB_CNT_5_8);
2026 rtw_write32(rtwdev, REG_ARFR0, WLAN_DATA_RATE_FB_RATE0);
2027 rtw_write32(rtwdev, REG_ARFRH0, WLAN_DATA_RATE_FB_RATE0_H);
2028 rtw_write32(rtwdev, REG_ARFR1_V1, WLAN_RTS_RATE_FB_RATE1);
2029 rtw_write32(rtwdev, REG_ARFRH1_V1, WLAN_RTS_RATE_FB_RATE1_H);
2030 rtw_write32(rtwdev, REG_ARFR4, WLAN_RTS_RATE_FB_RATE4);
2031 rtw_write32(rtwdev, REG_ARFRH4, WLAN_RTS_RATE_FB_RATE4_H);
2032 rtw_write32(rtwdev, REG_ARFR5, WLAN_RTS_RATE_FB_RATE5);
2033 rtw_write32(rtwdev, REG_ARFRH5, WLAN_RTS_RATE_FB_RATE5_H);
2034 /* protocol configuration */
2035 rtw_write8(rtwdev, REG_AMPDU_MAX_TIME_V1, WLAN_AMPDU_MAX_TIME);
2036 rtw_write8_set(rtwdev, REG_TX_HANG_CTRL, BIT_EN_EOF_V1);
2037 pre_txcnt = WLAN_PRE_TXCNT_TIME_TH | BIT_EN_PRECNT;
2038 rtw_write8(rtwdev, REG_PRECNT_CTRL, (u8)(pre_txcnt & 0xFF));
2039 rtw_write8(rtwdev, REG_PRECNT_CTRL + 1, (u8)(pre_txcnt >> 8));
2040 value32 = WLAN_RTS_LEN_TH | (WLAN_RTS_TX_TIME_TH << 8) |
2041 (WLAN_MAX_AGG_PKT_LIMIT << 16) |
2042 (WLAN_RTS_MAX_AGG_PKT_LIMIT << 24);
2043 rtw_write32(rtwdev, REG_PROT_MODE_CTRL, value32);
2044 rtw_write16(rtwdev, REG_BAR_MODE_CTRL + 2,
2045 WLAN_BAR_RETRY_LIMIT | WLAN_RA_TRY_RATE_AGG_LIMIT << 8);
2046 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING, FAST_EDCA_VO_TH);
2047 rtw_write8(rtwdev, REG_FAST_EDCA_VOVI_SETTING + 2, FAST_EDCA_VI_TH);
2048 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING, FAST_EDCA_BE_TH);
2049 rtw_write8(rtwdev, REG_FAST_EDCA_BEBK_SETTING + 2, FAST_EDCA_BK_TH);
2050 /* close BA parser */
2051 rtw_write8_clr(rtwdev, REG_LIFETIME_EN, BIT_BA_PARSER_EN);
2052 rtw_write32_clr(rtwdev, REG_RRSR, BITS_RRSR_RSC);
2053
2054 /* EDCA configuration */
2055 rtw_write32(rtwdev, REG_EDCA_VO_PARAM, WLAN_EDCA_VO_PARAM);
2056 rtw_write32(rtwdev, REG_EDCA_VI_PARAM, WLAN_EDCA_VI_PARAM);
2057 rtw_write32(rtwdev, REG_EDCA_BE_PARAM, WLAN_EDCA_BE_PARAM);
2058 rtw_write32(rtwdev, REG_EDCA_BK_PARAM, WLAN_EDCA_BK_PARAM);
2059 rtw_write8(rtwdev, REG_PIFS, WLAN_PIFS_TIME);
2060 rtw_write8_clr(rtwdev, REG_TX_PTCL_CTRL + 1, BIT_SIFS_BK_EN >> 8);
2061 rtw_write8_set(rtwdev, REG_RD_CTRL + 1,
2062 (BIT_DIS_TXOP_CFE | BIT_DIS_LSIG_CFE |
2063 BIT_DIS_STBC_CFE) >> 8);
2064
2065 /* MAC clock configuration */
2066 rtw_write32_clr(rtwdev, REG_AFE_CTRL1, BIT_MAC_CLK_SEL);
2067 rtw_write8(rtwdev, REG_USTIME_TSF, MAC_CLK_SPEED);
2068 rtw_write8(rtwdev, REG_USTIME_EDCA, MAC_CLK_SPEED);
2069
2070 rtw_write8_set(rtwdev, REG_MISC_CTRL,
2071 BIT_EN_FREE_CNT | BIT_DIS_SECOND_CCA);
2072 rtw_write8_clr(rtwdev, REG_TIMER0_SRC_SEL, BIT_TSFT_SEL_TIMER0);
2073 rtw_write16(rtwdev, REG_TXPAUSE, 0x0000);
2074 rtw_write8(rtwdev, REG_SLOT, WLAN_SLOT_TIME);
2075 rtw_write32(rtwdev, REG_RD_NAV_NXT, WLAN_NAV_CFG);
2076 rtw_write16(rtwdev, REG_RXTSF_OFFSET_CCK, WLAN_RX_TSF_CFG);
2077 /* Set beacon cotnrol - enable TSF and other related functions */
2078 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2079 /* Set send beacon related registers */
2080 rtw_write32(rtwdev, REG_TBTT_PROHIBIT, WLAN_TBTT_TIME);
2081 rtw_write8(rtwdev, REG_DRVERLYINT, WLAN_DRV_EARLY_INT);
2082 rtw_write8(rtwdev, REG_BCN_CTRL_CLINT0, WLAN_BCN_CTRL_CLT0);
2083 rtw_write8(rtwdev, REG_BCNDMATIM, WLAN_BCN_DMA_TIME);
2084 rtw_write8(rtwdev, REG_BCN_MAX_ERR, WLAN_BCN_MAX_ERR);
2085
2086 /* WMAC configuration */
2087 rtw_write32(rtwdev, REG_MAR, WLAN_MULTI_ADDR);
2088 rtw_write32(rtwdev, REG_MAR + 4, WLAN_MULTI_ADDR);
2089 rtw_write8(rtwdev, REG_BBPSF_CTRL + 2, WLAN_RESP_TXRATE);
2090 rtw_write8(rtwdev, REG_ACKTO, WLAN_ACK_TO);
2091 rtw_write8(rtwdev, REG_ACKTO_CCK, WLAN_ACK_TO_CCK);
2092 rtw_write16(rtwdev, REG_EIFS, WLAN_EIFS_DUR_TUNE);
2093 rtw_write8(rtwdev, REG_NAV_CTRL + 2, WLAN_NAV_MAX);
2094 rtw_write8(rtwdev, REG_WMAC_TRXPTCL_CTL_H + 2, WLAN_BAR_ACK_TYPE);
2095 rtw_write32(rtwdev, REG_RXFLTMAP0, WLAN_RX_FILTER0);
2096 rtw_write16(rtwdev, REG_RXFLTMAP2, WLAN_RX_FILTER2);
2097 rtw_write32(rtwdev, REG_RCR, WLAN_RCR_CFG);
2098 rtw_write8(rtwdev, REG_RX_PKT_LIMIT, WLAN_RXPKT_MAX_SZ_512);
2099 rtw_write8(rtwdev, REG_TCR + 2, WLAN_TX_FUNC_CFG2);
2100 rtw_write8(rtwdev, REG_TCR + 1, WLAN_TX_FUNC_CFG1);
2101 rtw_write32_set(rtwdev, REG_GENERAL_OPTION, BIT_DUMMY_FCS_READY_MASK_EN);
2102 rtw_write32(rtwdev, REG_WMAC_OPTION_FUNCTION + 8, WLAN_MAC_OPT_FUNC2);
2103 rtw_write8(rtwdev, REG_WMAC_OPTION_FUNCTION_1, WLAN_MAC_OPT_NORM_FUNC1);
2104
2105 /* init low power */
2106 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL + 2) & 0xF00F;
2107 value16 |= (BIT_RXGCK_VHT_FIFOTHR(1) | BIT_RXGCK_HT_FIFOTHR(1) |
2108 BIT_RXGCK_OFDM_FIFOTHR(1) | BIT_RXGCK_CCK_FIFOTHR(1)) >> 16;
2109 rtw_write16(rtwdev, REG_RXPSF_CTRL + 2, value16);
2110 value16 = 0;
2111 value16 = BIT_SET_RXPSF_PKTLENTHR(value16, 1);
2112 value16 |= BIT_RXPSF_CTRLEN | BIT_RXPSF_VHTCHKEN | BIT_RXPSF_HTCHKEN
2113 | BIT_RXPSF_OFDMCHKEN | BIT_RXPSF_CCKCHKEN
2114 | BIT_RXPSF_OFDMRST;
2115 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2116 rtw_write32(rtwdev, REG_RXPSF_TYPE_CTRL, 0xFFFFFFFF);
2117 /* rx ignore configuration */
2118 value16 = rtw_read16(rtwdev, REG_RXPSF_CTRL);
2119 value16 &= ~(BIT_RXPSF_MHCHKEN | BIT_RXPSF_CCKRST |
2120 BIT_RXPSF_CONT_ERRCHKEN);
2121 value16 = BIT_SET_RXPSF_ERRTHR(value16, 0x07);
2122 rtw_write16(rtwdev, REG_RXPSF_CTRL, value16);
2123 rtw_write8_set(rtwdev, REG_SND_PTCL_CTRL,
2124 BIT_DIS_CHK_VHTSIGB_CRC);
2125
2126 /* Interrupt migration configuration */
2127 rtw_write32(rtwdev, REG_INT_MIG, WLAN_MAC_INT_MIG_CFG);
2128
2129 return 0;
2130 }
2131
2132 #define FWCD_SIZE_REG_8822C 0x2000
2133 #define FWCD_SIZE_DMEM_8822C 0x10000
2134 #define FWCD_SIZE_IMEM_8822C 0x10000
2135 #define FWCD_SIZE_EMEM_8822C 0x20000
2136 #define FWCD_SIZE_ROM_8822C 0x10000
2137
2138 static const u32 __fwcd_segs_8822c[] = {
2139 FWCD_SIZE_REG_8822C,
2140 FWCD_SIZE_DMEM_8822C,
2141 FWCD_SIZE_IMEM_8822C,
2142 FWCD_SIZE_EMEM_8822C,
2143 FWCD_SIZE_ROM_8822C,
2144 };
2145
2146 static const struct rtw_fwcd_segs rtw8822c_fwcd_segs = {
2147 .segs = __fwcd_segs_8822c,
2148 .num = ARRAY_SIZE(__fwcd_segs_8822c),
2149 };
2150
rtw8822c_dump_fw_crash(struct rtw_dev * rtwdev)2151 static int rtw8822c_dump_fw_crash(struct rtw_dev *rtwdev)
2152 {
2153 #define __dump_fw_8822c(_dev, _mem) \
2154 rtw_dump_fw(_dev, OCPBASE_ ## _mem ## _88XX, \
2155 FWCD_SIZE_ ## _mem ## _8822C, RTW_FWCD_ ## _mem)
2156 int ret;
2157
2158 ret = rtw_dump_reg(rtwdev, 0x0, FWCD_SIZE_REG_8822C);
2159 if (ret)
2160 return ret;
2161 ret = __dump_fw_8822c(rtwdev, DMEM);
2162 if (ret)
2163 return ret;
2164 ret = __dump_fw_8822c(rtwdev, IMEM);
2165 if (ret)
2166 return ret;
2167 ret = __dump_fw_8822c(rtwdev, EMEM);
2168 if (ret)
2169 return ret;
2170 ret = __dump_fw_8822c(rtwdev, ROM);
2171 if (ret)
2172 return ret;
2173
2174 return 0;
2175
2176 #undef __dump_fw_8822c
2177 }
2178
rtw8822c_rstb_3wire(struct rtw_dev * rtwdev,bool enable)2179 static void rtw8822c_rstb_3wire(struct rtw_dev *rtwdev, bool enable)
2180 {
2181 if (enable) {
2182 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x1);
2183 rtw_write32_mask(rtwdev, REG_ANAPAR_A, BIT_ANAPAR_UPDATE, 0x1);
2184 rtw_write32_mask(rtwdev, REG_ANAPAR_B, BIT_ANAPAR_UPDATE, 0x1);
2185 } else {
2186 rtw_write32_mask(rtwdev, REG_RSTB, BIT_RSTB_3WIRE, 0x0);
2187 }
2188 }
2189
rtw8822c_set_channel_rf(struct rtw_dev * rtwdev,u8 channel,u8 bw)2190 static void rtw8822c_set_channel_rf(struct rtw_dev *rtwdev, u8 channel, u8 bw)
2191 {
2192 #define RF18_BAND_MASK (BIT(16) | BIT(9) | BIT(8))
2193 #define RF18_BAND_2G (0)
2194 #define RF18_BAND_5G (BIT(16) | BIT(8))
2195 #define RF18_CHANNEL_MASK (MASKBYTE0)
2196 #define RF18_RFSI_MASK (BIT(18) | BIT(17))
2197 #define RF18_RFSI_GE_CH80 (BIT(17))
2198 #define RF18_RFSI_GT_CH140 (BIT(18))
2199 #define RF18_BW_MASK (BIT(13) | BIT(12))
2200 #define RF18_BW_20M (BIT(13) | BIT(12))
2201 #define RF18_BW_40M (BIT(13))
2202 #define RF18_BW_80M (BIT(12))
2203
2204 u32 rf_reg18 = 0;
2205 u32 rf_rxbb = 0;
2206
2207 rf_reg18 = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
2208
2209 rf_reg18 &= ~(RF18_BAND_MASK | RF18_CHANNEL_MASK | RF18_RFSI_MASK |
2210 RF18_BW_MASK);
2211
2212 rf_reg18 |= (IS_CH_2G_BAND(channel) ? RF18_BAND_2G : RF18_BAND_5G);
2213 rf_reg18 |= (channel & RF18_CHANNEL_MASK);
2214 if (IS_CH_5G_BAND_4(channel))
2215 rf_reg18 |= RF18_RFSI_GT_CH140;
2216 else if (IS_CH_5G_BAND_3(channel))
2217 rf_reg18 |= RF18_RFSI_GE_CH80;
2218
2219 switch (bw) {
2220 case RTW_CHANNEL_WIDTH_5:
2221 case RTW_CHANNEL_WIDTH_10:
2222 case RTW_CHANNEL_WIDTH_20:
2223 default:
2224 rf_reg18 |= RF18_BW_20M;
2225 rf_rxbb = 0x18;
2226 break;
2227 case RTW_CHANNEL_WIDTH_40:
2228 /* RF bandwidth */
2229 rf_reg18 |= RF18_BW_40M;
2230 rf_rxbb = 0x10;
2231 break;
2232 case RTW_CHANNEL_WIDTH_80:
2233 rf_reg18 |= RF18_BW_80M;
2234 rf_rxbb = 0x8;
2235 break;
2236 }
2237
2238 rtw8822c_rstb_3wire(rtwdev, false);
2239
2240 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x01);
2241 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWA, 0x1f, 0x12);
2242 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWD0, 0xfffff, rf_rxbb);
2243 rtw_write_rf(rtwdev, RF_PATH_A, RF_LUTWE2, 0x04, 0x00);
2244
2245 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x01);
2246 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWA, 0x1f, 0x12);
2247 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWD0, 0xfffff, rf_rxbb);
2248 rtw_write_rf(rtwdev, RF_PATH_B, RF_LUTWE2, 0x04, 0x00);
2249
2250 rtw_write_rf(rtwdev, RF_PATH_A, RF_CFGCH, RFREG_MASK, rf_reg18);
2251 rtw_write_rf(rtwdev, RF_PATH_B, RF_CFGCH, RFREG_MASK, rf_reg18);
2252
2253 rtw8822c_rstb_3wire(rtwdev, true);
2254 }
2255
rtw8822c_toggle_igi(struct rtw_dev * rtwdev)2256 static void rtw8822c_toggle_igi(struct rtw_dev *rtwdev)
2257 {
2258 u32 igi;
2259
2260 igi = rtw_read32_mask(rtwdev, REG_RXIGI, 0x7f);
2261 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi - 2);
2262 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi - 2);
2263 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f, igi);
2264 rtw_write32_mask(rtwdev, REG_RXIGI, 0x7f00, igi);
2265 }
2266
rtw8822c_set_channel_bb(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_ch_idx)2267 static void rtw8822c_set_channel_bb(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2268 u8 primary_ch_idx)
2269 {
2270 if (IS_CH_2G_BAND(channel)) {
2271 rtw_write32_clr(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2272 rtw_write32_set(rtwdev, REG_TXF4, BIT(20));
2273 rtw_write32_clr(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2274 rtw_write32_clr(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2275 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0xF);
2276
2277 switch (bw) {
2278 case RTW_CHANNEL_WIDTH_20:
2279 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2280 0x5);
2281 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2282 0x5);
2283 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2284 0x6);
2285 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2286 0x6);
2287 break;
2288 case RTW_CHANNEL_WIDTH_40:
2289 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_CCK,
2290 0x4);
2291 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_CCK,
2292 0x4);
2293 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2294 0x0);
2295 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2296 0x0);
2297 break;
2298 }
2299 if (channel == 13 || channel == 14)
2300 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x969);
2301 else if (channel == 11 || channel == 12)
2302 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x96a);
2303 else
2304 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x9aa);
2305 if (channel == 14) {
2306 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x3da0);
2307 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2308 0x4962c931);
2309 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x6aa3);
2310 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xaa7b);
2311 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xf3d7);
2312 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD, 0x0);
2313 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2314 0xff012455);
2315 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD, 0xffff);
2316 } else {
2317 rtw_write32_mask(rtwdev, REG_TXF0, MASKHWORD, 0x5284);
2318 rtw_write32_mask(rtwdev, REG_TXF1, MASKDWORD,
2319 0x3e18fec8);
2320 rtw_write32_mask(rtwdev, REG_TXF2, MASKLWORD, 0x0a88);
2321 rtw_write32_mask(rtwdev, REG_TXF3, MASKHWORD, 0xacc4);
2322 rtw_write32_mask(rtwdev, REG_TXF4, MASKLWORD, 0xc8b2);
2323 rtw_write32_mask(rtwdev, REG_TXF5, MASKDWORD,
2324 0x00faf0de);
2325 rtw_write32_mask(rtwdev, REG_TXF6, MASKDWORD,
2326 0x00122344);
2327 rtw_write32_mask(rtwdev, REG_TXF7, MASKDWORD,
2328 0x0fffffff);
2329 }
2330 if (channel == 13)
2331 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2332 else
2333 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x1);
2334 } else if (IS_CH_5G_BAND(channel)) {
2335 rtw_write32_set(rtwdev, REG_CCKTXONLY, BIT_BB_CCK_CHECK_EN);
2336 rtw_write32_set(rtwdev, REG_CCK_CHECK, BIT_CHECK_CCK_EN);
2337 rtw_write32_set(rtwdev, REG_BGCTRL, BITS_RX_IQ_WEIGHT);
2338 rtw_write32_clr(rtwdev, REG_TXF4, BIT(20));
2339 rtw_write32_mask(rtwdev, REG_CCAMSK, 0x3F000000, 0x22);
2340 rtw_write32_mask(rtwdev, REG_TXDFIR0, 0x70, 0x3);
2341 if (IS_CH_5G_BAND_1(channel) || IS_CH_5G_BAND_2(channel)) {
2342 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2343 0x1);
2344 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2345 0x1);
2346 } else if (IS_CH_5G_BAND_3(channel)) {
2347 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2348 0x2);
2349 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2350 0x2);
2351 } else if (IS_CH_5G_BAND_4(channel)) {
2352 rtw_write32_mask(rtwdev, REG_RXAGCCTL0, BITS_RXAGC_OFDM,
2353 0x3);
2354 rtw_write32_mask(rtwdev, REG_RXAGCCTL, BITS_RXAGC_OFDM,
2355 0x3);
2356 }
2357
2358 if (channel >= 36 && channel <= 51)
2359 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x494);
2360 else if (channel >= 52 && channel <= 55)
2361 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x493);
2362 else if (channel >= 56 && channel <= 111)
2363 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x453);
2364 else if (channel >= 112 && channel <= 119)
2365 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x452);
2366 else if (channel >= 120 && channel <= 172)
2367 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x412);
2368 else if (channel >= 173 && channel <= 177)
2369 rtw_write32_mask(rtwdev, REG_SCOTRK, 0xfff, 0x411);
2370 }
2371
2372 switch (bw) {
2373 case RTW_CHANNEL_WIDTH_20:
2374 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x19B);
2375 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2376 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x0);
2377 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x7);
2378 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x6);
2379 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2380 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2381 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2382 break;
2383 case RTW_CHANNEL_WIDTH_40:
2384 rtw_write32_mask(rtwdev, REG_CCKSB, BIT(4),
2385 (primary_ch_idx == RTW_SC_20_UPPER ? 1 : 0));
2386 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x5);
2387 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2388 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2389 (primary_ch_idx | (primary_ch_idx << 4)));
2390 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x1);
2391 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2392 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2393 break;
2394 case RTW_CHANNEL_WIDTH_80:
2395 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0xa);
2396 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xc0, 0x0);
2397 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xff00,
2398 (primary_ch_idx | (primary_ch_idx << 4)));
2399 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x6);
2400 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x1);
2401 break;
2402 case RTW_CHANNEL_WIDTH_5:
2403 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2404 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2405 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x1);
2406 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x4);
2407 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x4);
2408 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2409 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2410 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2411 break;
2412 case RTW_CHANNEL_WIDTH_10:
2413 rtw_write32_mask(rtwdev, REG_DFIRBW, 0x3FF0, 0x2AB);
2414 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xf, 0x0);
2415 rtw_write32_mask(rtwdev, REG_TXBWCTL, 0xffc0, 0x2);
2416 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700, 0x6);
2417 rtw_write32_mask(rtwdev, REG_TXCLK, 0x700000, 0x5);
2418 rtw_write32_mask(rtwdev, REG_CCK_SOURCE, BIT_NBI_EN, 0x0);
2419 rtw_write32_mask(rtwdev, REG_SBD, BITS_SUBTUNE, 0x1);
2420 rtw_write32_mask(rtwdev, REG_PT_CHSMO, BIT_PT_OPT, 0x0);
2421 break;
2422 }
2423 }
2424
rtw8822c_set_channel(struct rtw_dev * rtwdev,u8 channel,u8 bw,u8 primary_chan_idx)2425 static void rtw8822c_set_channel(struct rtw_dev *rtwdev, u8 channel, u8 bw,
2426 u8 primary_chan_idx)
2427 {
2428 rtw8822c_set_channel_bb(rtwdev, channel, bw, primary_chan_idx);
2429 rtw_set_channel_mac(rtwdev, channel, bw, primary_chan_idx);
2430 rtw8822c_set_channel_rf(rtwdev, channel, bw);
2431 rtw8822c_toggle_igi(rtwdev);
2432 }
2433
rtw8822c_config_cck_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2434 static void rtw8822c_config_cck_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2435 {
2436 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2437 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x0);
2438 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x0);
2439 } else if (rx_path == BB_PATH_AB) {
2440 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00600000, 0x1);
2441 rtw_write32_mask(rtwdev, REG_CCANRX, 0x00060000, 0x1);
2442 }
2443
2444 if (rx_path == BB_PATH_A)
2445 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x0);
2446 else if (rx_path == BB_PATH_B)
2447 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x5);
2448 else if (rx_path == BB_PATH_AB)
2449 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0x0f000000, 0x1);
2450 }
2451
rtw8822c_config_ofdm_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2452 static void rtw8822c_config_ofdm_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2453 {
2454 if (rx_path == BB_PATH_A || rx_path == BB_PATH_B) {
2455 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x0);
2456 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x0);
2457 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x0);
2458 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x0);
2459 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x0);
2460 } else if (rx_path == BB_PATH_AB) {
2461 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x300, 0x1);
2462 rtw_write32_mask(rtwdev, REG_RXFNCTL, 0x600000, 0x1);
2463 rtw_write32_mask(rtwdev, REG_AGCSWSH, BIT(17), 0x1);
2464 rtw_write32_mask(rtwdev, REG_ANTWTPD, BIT(20), 0x1);
2465 rtw_write32_mask(rtwdev, REG_MRCM, BIT(24), 0x1);
2466 }
2467
2468 rtw_write32_mask(rtwdev, 0x824, 0x0f000000, rx_path);
2469 rtw_write32_mask(rtwdev, 0x824, 0x000f0000, rx_path);
2470 }
2471
rtw8822c_config_rx_path(struct rtw_dev * rtwdev,u8 rx_path)2472 static void rtw8822c_config_rx_path(struct rtw_dev *rtwdev, u8 rx_path)
2473 {
2474 rtw8822c_config_cck_rx_path(rtwdev, rx_path);
2475 rtw8822c_config_ofdm_rx_path(rtwdev, rx_path);
2476 }
2477
rtw8822c_config_cck_tx_path(struct rtw_dev * rtwdev,u8 tx_path,bool is_tx2_path)2478 static void rtw8822c_config_cck_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2479 bool is_tx2_path)
2480 {
2481 if (tx_path == BB_PATH_A) {
2482 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2483 } else if (tx_path == BB_PATH_B) {
2484 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x4);
2485 } else {
2486 if (is_tx2_path)
2487 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0xc);
2488 else
2489 rtw_write32_mask(rtwdev, REG_RXCCKSEL, 0xf0000000, 0x8);
2490 }
2491 rtw8822c_bb_reset(rtwdev);
2492 }
2493
rtw8822c_config_ofdm_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss)2494 static void rtw8822c_config_ofdm_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2495 enum rtw_bb_path tx_path_sel_1ss)
2496 {
2497 if (tx_path == BB_PATH_A) {
2498 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x11);
2499 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2500 } else if (tx_path == BB_PATH_B) {
2501 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x12);
2502 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xff, 0x0);
2503 } else {
2504 if (tx_path_sel_1ss == BB_PATH_AB) {
2505 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x33);
2506 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0404);
2507 } else if (tx_path_sel_1ss == BB_PATH_B) {
2508 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x32);
2509 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2510 } else if (tx_path_sel_1ss == BB_PATH_A) {
2511 rtw_write32_mask(rtwdev, REG_ANTMAP0, 0xff, 0x31);
2512 rtw_write32_mask(rtwdev, REG_TXLGMAP, 0xffff, 0x0400);
2513 }
2514 }
2515 rtw8822c_bb_reset(rtwdev);
2516 }
2517
rtw8822c_config_tx_path(struct rtw_dev * rtwdev,u8 tx_path,enum rtw_bb_path tx_path_sel_1ss,enum rtw_bb_path tx_path_cck,bool is_tx2_path)2518 static void rtw8822c_config_tx_path(struct rtw_dev *rtwdev, u8 tx_path,
2519 enum rtw_bb_path tx_path_sel_1ss,
2520 enum rtw_bb_path tx_path_cck,
2521 bool is_tx2_path)
2522 {
2523 rtw8822c_config_cck_tx_path(rtwdev, tx_path_cck, is_tx2_path);
2524 rtw8822c_config_ofdm_tx_path(rtwdev, tx_path, tx_path_sel_1ss);
2525 rtw8822c_bb_reset(rtwdev);
2526 }
2527
rtw8822c_config_trx_mode(struct rtw_dev * rtwdev,u8 tx_path,u8 rx_path,bool is_tx2_path)2528 static void rtw8822c_config_trx_mode(struct rtw_dev *rtwdev, u8 tx_path,
2529 u8 rx_path, bool is_tx2_path)
2530 {
2531 if ((tx_path | rx_path) & BB_PATH_A)
2532 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x33312);
2533 else
2534 rtw_write32_mask(rtwdev, REG_ORITXCODE, MASK20BITS, 0x11111);
2535 if ((tx_path | rx_path) & BB_PATH_B)
2536 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x33312);
2537 else
2538 rtw_write32_mask(rtwdev, REG_ORITXCODE2, MASK20BITS, 0x11111);
2539
2540 rtw8822c_config_rx_path(rtwdev, rx_path);
2541 rtw8822c_config_tx_path(rtwdev, tx_path, BB_PATH_A, BB_PATH_A,
2542 is_tx2_path);
2543
2544 rtw8822c_toggle_igi(rtwdev);
2545 }
2546
query_phy_status_page0(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2547 static void query_phy_status_page0(struct rtw_dev *rtwdev, u8 *phy_status,
2548 struct rtw_rx_pkt_stat *pkt_stat)
2549 {
2550 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2551 u8 l_bnd, u_bnd;
2552 u8 gain_a, gain_b;
2553 s8 rx_power[RTW_RF_PATH_MAX];
2554 s8 min_rx_power = -120;
2555 u8 rssi;
2556 u8 channel;
2557 int path;
2558
2559 rx_power[RF_PATH_A] = GET_PHY_STAT_P0_PWDB_A(phy_status);
2560 rx_power[RF_PATH_B] = GET_PHY_STAT_P0_PWDB_B(phy_status);
2561 l_bnd = dm_info->cck_gi_l_bnd;
2562 u_bnd = dm_info->cck_gi_u_bnd;
2563 gain_a = GET_PHY_STAT_P0_GAIN_A(phy_status);
2564 gain_b = GET_PHY_STAT_P0_GAIN_B(phy_status);
2565 if (gain_a < l_bnd)
2566 rx_power[RF_PATH_A] += (l_bnd - gain_a) << 1;
2567 else if (gain_a > u_bnd)
2568 rx_power[RF_PATH_A] -= (gain_a - u_bnd) << 1;
2569 if (gain_b < l_bnd)
2570 rx_power[RF_PATH_B] += (l_bnd - gain_b) << 1;
2571 else if (gain_b > u_bnd)
2572 rx_power[RF_PATH_B] -= (gain_b - u_bnd) << 1;
2573
2574 rx_power[RF_PATH_A] -= 110;
2575 rx_power[RF_PATH_B] -= 110;
2576
2577 channel = GET_PHY_STAT_P0_CHANNEL(phy_status);
2578 if (channel == 0)
2579 channel = rtwdev->hal.current_channel;
2580 rtw_set_rx_freq_band(pkt_stat, channel);
2581
2582 pkt_stat->rx_power[RF_PATH_A] = rx_power[RF_PATH_A];
2583 pkt_stat->rx_power[RF_PATH_B] = rx_power[RF_PATH_B];
2584
2585 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2586 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2587 dm_info->rssi[path] = rssi;
2588 }
2589
2590 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 1);
2591 pkt_stat->bw = RTW_CHANNEL_WIDTH_20;
2592 pkt_stat->signal_power = max(pkt_stat->rx_power[RF_PATH_A],
2593 min_rx_power);
2594 }
2595
query_phy_status_page1(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2596 static void query_phy_status_page1(struct rtw_dev *rtwdev, u8 *phy_status,
2597 struct rtw_rx_pkt_stat *pkt_stat)
2598 {
2599 struct rtw_path_div *p_div = &rtwdev->dm_path_div;
2600 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2601 u8 rxsc, bw;
2602 s8 min_rx_power = -120;
2603 s8 rx_evm;
2604 u8 evm_dbm = 0;
2605 u8 rssi;
2606 int path;
2607 u8 channel;
2608
2609 if (pkt_stat->rate > DESC_RATE11M && pkt_stat->rate < DESC_RATEMCS0)
2610 rxsc = GET_PHY_STAT_P1_L_RXSC(phy_status);
2611 else
2612 rxsc = GET_PHY_STAT_P1_HT_RXSC(phy_status);
2613
2614 if (rxsc == 0)
2615 bw = rtwdev->hal.current_band_width;
2616 else if (rxsc >= 1 && rxsc <= 8)
2617 bw = RTW_CHANNEL_WIDTH_20;
2618 else if (rxsc >= 9 && rxsc <= 12)
2619 bw = RTW_CHANNEL_WIDTH_40;
2620 else
2621 bw = RTW_CHANNEL_WIDTH_80;
2622
2623 channel = GET_PHY_STAT_P1_CHANNEL(phy_status);
2624 rtw_set_rx_freq_band(pkt_stat, channel);
2625
2626 pkt_stat->rx_power[RF_PATH_A] = GET_PHY_STAT_P1_PWDB_A(phy_status) - 110;
2627 pkt_stat->rx_power[RF_PATH_B] = GET_PHY_STAT_P1_PWDB_B(phy_status) - 110;
2628 pkt_stat->rssi = rtw_phy_rf_power_2_rssi(pkt_stat->rx_power, 2);
2629 pkt_stat->bw = bw;
2630 pkt_stat->signal_power = max3(pkt_stat->rx_power[RF_PATH_A],
2631 pkt_stat->rx_power[RF_PATH_B],
2632 min_rx_power);
2633
2634 dm_info->curr_rx_rate = pkt_stat->rate;
2635
2636 pkt_stat->rx_evm[RF_PATH_A] = GET_PHY_STAT_P1_RXEVM_A(phy_status);
2637 pkt_stat->rx_evm[RF_PATH_B] = GET_PHY_STAT_P1_RXEVM_B(phy_status);
2638
2639 pkt_stat->rx_snr[RF_PATH_A] = GET_PHY_STAT_P1_RXSNR_A(phy_status);
2640 pkt_stat->rx_snr[RF_PATH_B] = GET_PHY_STAT_P1_RXSNR_B(phy_status);
2641
2642 pkt_stat->cfo_tail[RF_PATH_A] = GET_PHY_STAT_P1_CFO_TAIL_A(phy_status);
2643 pkt_stat->cfo_tail[RF_PATH_B] = GET_PHY_STAT_P1_CFO_TAIL_B(phy_status);
2644
2645 for (path = 0; path <= rtwdev->hal.rf_path_num; path++) {
2646 rssi = rtw_phy_rf_power_2_rssi(&pkt_stat->rx_power[path], 1);
2647 dm_info->rssi[path] = rssi;
2648 if (path == RF_PATH_A) {
2649 p_div->path_a_sum += rssi;
2650 p_div->path_a_cnt++;
2651 } else if (path == RF_PATH_B) {
2652 p_div->path_b_sum += rssi;
2653 p_div->path_b_cnt++;
2654 }
2655 dm_info->rx_snr[path] = pkt_stat->rx_snr[path] >> 1;
2656 dm_info->cfo_tail[path] = (pkt_stat->cfo_tail[path] * 5) >> 1;
2657
2658 rx_evm = pkt_stat->rx_evm[path];
2659
2660 if (rx_evm < 0) {
2661 if (rx_evm == S8_MIN)
2662 evm_dbm = 0;
2663 else
2664 evm_dbm = ((u8)-rx_evm >> 1);
2665 }
2666 dm_info->rx_evm_dbm[path] = evm_dbm;
2667 }
2668 rtw_phy_parsing_cfo(rtwdev, pkt_stat);
2669 }
2670
query_phy_status(struct rtw_dev * rtwdev,u8 * phy_status,struct rtw_rx_pkt_stat * pkt_stat)2671 static void query_phy_status(struct rtw_dev *rtwdev, u8 *phy_status,
2672 struct rtw_rx_pkt_stat *pkt_stat)
2673 {
2674 u8 page;
2675
2676 page = *phy_status & 0xf;
2677
2678 switch (page) {
2679 case 0:
2680 query_phy_status_page0(rtwdev, phy_status, pkt_stat);
2681 break;
2682 case 1:
2683 query_phy_status_page1(rtwdev, phy_status, pkt_stat);
2684 break;
2685 default:
2686 rtw_warn(rtwdev, "unused phy status page (%d)\n", page);
2687 return;
2688 }
2689 }
2690
rtw8822c_query_rx_desc(struct rtw_dev * rtwdev,u8 * rx_desc,struct rtw_rx_pkt_stat * pkt_stat,struct ieee80211_rx_status * rx_status)2691 static void rtw8822c_query_rx_desc(struct rtw_dev *rtwdev, u8 *rx_desc,
2692 struct rtw_rx_pkt_stat *pkt_stat,
2693 struct ieee80211_rx_status *rx_status)
2694 {
2695 struct ieee80211_hdr *hdr;
2696 u32 desc_sz = rtwdev->chip->rx_pkt_desc_sz;
2697 u8 *phy_status = NULL;
2698
2699 memset(pkt_stat, 0, sizeof(*pkt_stat));
2700
2701 pkt_stat->phy_status = GET_RX_DESC_PHYST(rx_desc);
2702 pkt_stat->icv_err = GET_RX_DESC_ICV_ERR(rx_desc);
2703 pkt_stat->crc_err = GET_RX_DESC_CRC32(rx_desc);
2704 pkt_stat->decrypted = !GET_RX_DESC_SWDEC(rx_desc) &&
2705 GET_RX_DESC_ENC_TYPE(rx_desc) != RX_DESC_ENC_NONE;
2706 pkt_stat->is_c2h = GET_RX_DESC_C2H(rx_desc);
2707 pkt_stat->pkt_len = GET_RX_DESC_PKT_LEN(rx_desc);
2708 pkt_stat->drv_info_sz = GET_RX_DESC_DRV_INFO_SIZE(rx_desc);
2709 pkt_stat->shift = GET_RX_DESC_SHIFT(rx_desc);
2710 pkt_stat->rate = GET_RX_DESC_RX_RATE(rx_desc);
2711 pkt_stat->cam_id = GET_RX_DESC_MACID(rx_desc);
2712 pkt_stat->ppdu_cnt = GET_RX_DESC_PPDU_CNT(rx_desc);
2713 pkt_stat->tsf_low = GET_RX_DESC_TSFL(rx_desc);
2714
2715 /* drv_info_sz is in unit of 8-bytes */
2716 pkt_stat->drv_info_sz *= 8;
2717
2718 /* c2h cmd pkt's rx/phy status is not interested */
2719 if (pkt_stat->is_c2h)
2720 return;
2721
2722 hdr = (struct ieee80211_hdr *)(rx_desc + desc_sz + pkt_stat->shift +
2723 pkt_stat->drv_info_sz);
2724 pkt_stat->hdr = hdr;
2725 if (pkt_stat->phy_status) {
2726 phy_status = rx_desc + desc_sz + pkt_stat->shift;
2727 query_phy_status(rtwdev, phy_status, pkt_stat);
2728 }
2729
2730 rtw_rx_fill_rx_status(rtwdev, pkt_stat, hdr, rx_status, phy_status);
2731 }
2732
2733 static void
rtw8822c_set_write_tx_power_ref(struct rtw_dev * rtwdev,u8 * tx_pwr_ref_cck,u8 * tx_pwr_ref_ofdm)2734 rtw8822c_set_write_tx_power_ref(struct rtw_dev *rtwdev, u8 *tx_pwr_ref_cck,
2735 u8 *tx_pwr_ref_ofdm)
2736 {
2737 struct rtw_hal *hal = &rtwdev->hal;
2738 u32 txref_cck[2] = {0x18a0, 0x41a0};
2739 u32 txref_ofdm[2] = {0x18e8, 0x41e8};
2740 u8 path;
2741
2742 for (path = 0; path < hal->rf_path_num; path++) {
2743 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2744 rtw_write32_mask(rtwdev, txref_cck[path], 0x7f0000,
2745 tx_pwr_ref_cck[path]);
2746 }
2747 for (path = 0; path < hal->rf_path_num; path++) {
2748 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0);
2749 rtw_write32_mask(rtwdev, txref_ofdm[path], 0x1fc00,
2750 tx_pwr_ref_ofdm[path]);
2751 }
2752 }
2753
rtw8822c_set_tx_power_diff(struct rtw_dev * rtwdev,u8 rate,s8 * diff_idx)2754 static void rtw8822c_set_tx_power_diff(struct rtw_dev *rtwdev, u8 rate,
2755 s8 *diff_idx)
2756 {
2757 u32 offset_txagc = 0x3a00;
2758 u8 rate_idx = rate & 0xfc;
2759 u8 pwr_idx[4];
2760 u32 phy_pwr_idx;
2761 int i;
2762
2763 for (i = 0; i < 4; i++)
2764 pwr_idx[i] = diff_idx[i] & 0x7f;
2765
2766 phy_pwr_idx = pwr_idx[0] |
2767 (pwr_idx[1] << 8) |
2768 (pwr_idx[2] << 16) |
2769 (pwr_idx[3] << 24);
2770
2771 rtw_write32_mask(rtwdev, 0x1c90, BIT(15), 0x0);
2772 rtw_write32_mask(rtwdev, offset_txagc + rate_idx, MASKDWORD,
2773 phy_pwr_idx);
2774 }
2775
rtw8822c_set_tx_power_index(struct rtw_dev * rtwdev)2776 static void rtw8822c_set_tx_power_index(struct rtw_dev *rtwdev)
2777 {
2778 struct rtw_hal *hal = &rtwdev->hal;
2779 u8 rs, rate, j;
2780 u8 pwr_ref_cck[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATE11M],
2781 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATE11M]};
2782 u8 pwr_ref_ofdm[2] = {hal->tx_pwr_tbl[RF_PATH_A][DESC_RATEMCS7],
2783 hal->tx_pwr_tbl[RF_PATH_B][DESC_RATEMCS7]};
2784 s8 diff_a, diff_b;
2785 u8 pwr_a, pwr_b;
2786 s8 diff_idx[4];
2787
2788 rtw8822c_set_write_tx_power_ref(rtwdev, pwr_ref_cck, pwr_ref_ofdm);
2789 for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++) {
2790 for (j = 0; j < rtw_rate_size[rs]; j++) {
2791 rate = rtw_rate_section[rs][j];
2792 pwr_a = hal->tx_pwr_tbl[RF_PATH_A][rate];
2793 pwr_b = hal->tx_pwr_tbl[RF_PATH_B][rate];
2794 if (rs == 0) {
2795 diff_a = (s8)pwr_a - (s8)pwr_ref_cck[0];
2796 diff_b = (s8)pwr_b - (s8)pwr_ref_cck[1];
2797 } else {
2798 diff_a = (s8)pwr_a - (s8)pwr_ref_ofdm[0];
2799 diff_b = (s8)pwr_b - (s8)pwr_ref_ofdm[1];
2800 }
2801 diff_idx[rate % 4] = min(diff_a, diff_b);
2802 if (rate % 4 == 3)
2803 rtw8822c_set_tx_power_diff(rtwdev, rate - 3,
2804 diff_idx);
2805 }
2806 }
2807 }
2808
rtw8822c_set_antenna(struct rtw_dev * rtwdev,u32 antenna_tx,u32 antenna_rx)2809 static int rtw8822c_set_antenna(struct rtw_dev *rtwdev,
2810 u32 antenna_tx,
2811 u32 antenna_rx)
2812 {
2813 struct rtw_hal *hal = &rtwdev->hal;
2814
2815 switch (antenna_tx) {
2816 case BB_PATH_A:
2817 case BB_PATH_B:
2818 case BB_PATH_AB:
2819 break;
2820 default:
2821 rtw_warn(rtwdev, "unsupported tx path 0x%x\n", antenna_tx);
2822 return -EINVAL;
2823 }
2824
2825 /* path B only is not available for RX */
2826 switch (antenna_rx) {
2827 case BB_PATH_A:
2828 case BB_PATH_AB:
2829 break;
2830 default:
2831 rtw_warn(rtwdev, "unsupported rx path 0x%x\n", antenna_rx);
2832 return -EINVAL;
2833 }
2834
2835 hal->antenna_tx = antenna_tx;
2836 hal->antenna_rx = antenna_rx;
2837
2838 rtw8822c_config_trx_mode(rtwdev, antenna_tx, antenna_rx, false);
2839
2840 return 0;
2841 }
2842
rtw8822c_cfg_ldo25(struct rtw_dev * rtwdev,bool enable)2843 static void rtw8822c_cfg_ldo25(struct rtw_dev *rtwdev, bool enable)
2844 {
2845 u8 ldo_pwr;
2846
2847 ldo_pwr = rtw_read8(rtwdev, REG_ANAPARLDO_POW_MAC);
2848 ldo_pwr = enable ? ldo_pwr | BIT_LDOE25_PON : ldo_pwr & ~BIT_LDOE25_PON;
2849 rtw_write8(rtwdev, REG_ANAPARLDO_POW_MAC, ldo_pwr);
2850 }
2851
rtw8822c_false_alarm_statistics(struct rtw_dev * rtwdev)2852 static void rtw8822c_false_alarm_statistics(struct rtw_dev *rtwdev)
2853 {
2854 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
2855 u32 cck_enable;
2856 u32 cck_fa_cnt;
2857 u32 crc32_cnt;
2858 u32 cca32_cnt;
2859 u32 ofdm_fa_cnt;
2860 u32 ofdm_fa_cnt1, ofdm_fa_cnt2, ofdm_fa_cnt3, ofdm_fa_cnt4, ofdm_fa_cnt5;
2861 u16 parity_fail, rate_illegal, crc8_fail, mcs_fail, sb_search_fail,
2862 fast_fsync, crc8_fail_vhta, mcs_fail_vht;
2863
2864 cck_enable = rtw_read32(rtwdev, REG_ENCCK) & BIT_CCK_BLK_EN;
2865 cck_fa_cnt = rtw_read16(rtwdev, REG_CCK_FACNT);
2866
2867 ofdm_fa_cnt1 = rtw_read32(rtwdev, REG_OFDM_FACNT1);
2868 ofdm_fa_cnt2 = rtw_read32(rtwdev, REG_OFDM_FACNT2);
2869 ofdm_fa_cnt3 = rtw_read32(rtwdev, REG_OFDM_FACNT3);
2870 ofdm_fa_cnt4 = rtw_read32(rtwdev, REG_OFDM_FACNT4);
2871 ofdm_fa_cnt5 = rtw_read32(rtwdev, REG_OFDM_FACNT5);
2872
2873 parity_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt1);
2874 rate_illegal = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt2);
2875 crc8_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt2);
2876 crc8_fail_vhta = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt3);
2877 mcs_fail = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt4);
2878 mcs_fail_vht = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt4);
2879 fast_fsync = FIELD_GET(GENMASK(15, 0), ofdm_fa_cnt5);
2880 sb_search_fail = FIELD_GET(GENMASK(31, 16), ofdm_fa_cnt5);
2881
2882 ofdm_fa_cnt = parity_fail + rate_illegal + crc8_fail + crc8_fail_vhta +
2883 mcs_fail + mcs_fail_vht + fast_fsync + sb_search_fail;
2884
2885 dm_info->cck_fa_cnt = cck_fa_cnt;
2886 dm_info->ofdm_fa_cnt = ofdm_fa_cnt;
2887 dm_info->total_fa_cnt = ofdm_fa_cnt;
2888 dm_info->total_fa_cnt += cck_enable ? cck_fa_cnt : 0;
2889
2890 crc32_cnt = rtw_read32(rtwdev, 0x2c04);
2891 dm_info->cck_ok_cnt = crc32_cnt & 0xffff;
2892 dm_info->cck_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2893 crc32_cnt = rtw_read32(rtwdev, 0x2c14);
2894 dm_info->ofdm_ok_cnt = crc32_cnt & 0xffff;
2895 dm_info->ofdm_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2896 crc32_cnt = rtw_read32(rtwdev, 0x2c10);
2897 dm_info->ht_ok_cnt = crc32_cnt & 0xffff;
2898 dm_info->ht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2899 crc32_cnt = rtw_read32(rtwdev, 0x2c0c);
2900 dm_info->vht_ok_cnt = crc32_cnt & 0xffff;
2901 dm_info->vht_err_cnt = (crc32_cnt & 0xffff0000) >> 16;
2902
2903 cca32_cnt = rtw_read32(rtwdev, 0x2c08);
2904 dm_info->ofdm_cca_cnt = ((cca32_cnt & 0xffff0000) >> 16);
2905 dm_info->cck_cca_cnt = cca32_cnt & 0xffff;
2906 dm_info->total_cca_cnt = dm_info->ofdm_cca_cnt;
2907 if (cck_enable)
2908 dm_info->total_cca_cnt += dm_info->cck_cca_cnt;
2909
2910 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 0);
2911 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_CCK_FA_RST, 2);
2912 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 0);
2913 rtw_write32_mask(rtwdev, REG_CCANRX, BIT_OFDM_FA_RST, 2);
2914
2915 /* disable rx clk gating to reset counters */
2916 rtw_write32_clr(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2917 rtw_write32_set(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2918 rtw_write32_clr(rtwdev, REG_CNT_CTRL, BIT_ALL_CNT_RST);
2919 rtw_write32_set(rtwdev, REG_RX_BREAK, BIT_COM_RX_GCK_EN);
2920 }
2921
rtw8822c_do_lck(struct rtw_dev * rtwdev)2922 static void rtw8822c_do_lck(struct rtw_dev *rtwdev)
2923 {
2924 u32 val;
2925
2926 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2927 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0FA);
2928 fsleep(1);
2929 rtw_write_rf(rtwdev, RF_PATH_A, RF_AAC_CTRL, RFREG_MASK, 0x80000);
2930 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_AAC, RFREG_MASK, 0x80001);
2931 read_poll_timeout(rtw_read_rf, val, val != 0x1, 1000, 100000,
2932 true, rtwdev, RF_PATH_A, RF_AAC_CTRL, 0x1000);
2933 rtw_write_rf(rtwdev, RF_PATH_A, RF_SYN_PFD, RFREG_MASK, 0x1F0F8);
2934 rtw_write_rf(rtwdev, RF_PATH_B, RF_SYN_CTRL, RFREG_MASK, 0x80010);
2935
2936 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2937 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x4f000);
2938 fsleep(1);
2939 rtw_write_rf(rtwdev, RF_PATH_A, RF_FAST_LCK, RFREG_MASK, 0x0f000);
2940 }
2941
rtw8822c_do_iqk(struct rtw_dev * rtwdev)2942 static void rtw8822c_do_iqk(struct rtw_dev *rtwdev)
2943 {
2944 struct rtw_iqk_para para = {0};
2945 u8 iqk_chk;
2946 int ret;
2947
2948 para.clear = 1;
2949 rtw_fw_do_iqk(rtwdev, ¶);
2950
2951 ret = read_poll_timeout(rtw_read8, iqk_chk, iqk_chk == IQK_DONE_8822C,
2952 20000, 300000, false, rtwdev, REG_RPT_CIP);
2953 if (ret)
2954 rtw_warn(rtwdev, "failed to poll iqk status bit\n");
2955
2956 rtw_write8(rtwdev, REG_IQKSTAT, 0x0);
2957 }
2958
2959 /* for coex */
rtw8822c_coex_cfg_init(struct rtw_dev * rtwdev)2960 static void rtw8822c_coex_cfg_init(struct rtw_dev *rtwdev)
2961 {
2962 /* enable TBTT nterrupt */
2963 rtw_write8_set(rtwdev, REG_BCN_CTRL, BIT_EN_BCN_FUNCTION);
2964
2965 /* BT report packet sample rate */
2966 /* 0x790[5:0]=0x5 */
2967 rtw_write8_mask(rtwdev, REG_BT_TDMA_TIME, BIT_MASK_SAMPLE_RATE, 0x5);
2968
2969 /* enable BT counter statistics */
2970 rtw_write8(rtwdev, REG_BT_STAT_CTRL, 0x1);
2971
2972 /* enable PTA (3-wire function form BT side) */
2973 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_BT_PTA_EN);
2974 rtw_write32_set(rtwdev, REG_GPIO_MUXCFG, BIT_PO_BT_PTA_PINS);
2975
2976 /* enable PTA (tx/rx signal form WiFi side) */
2977 rtw_write8_set(rtwdev, REG_QUEUE_CTRL, BIT_PTA_WL_TX_EN);
2978 /* wl tx signal to PTA not case EDCCA */
2979 rtw_write8_clr(rtwdev, REG_QUEUE_CTRL, BIT_PTA_EDCCA_EN);
2980 /* GNT_BT=1 while select both */
2981 rtw_write16_set(rtwdev, REG_BT_COEX_V2, BIT_GNT_BT_POLARITY);
2982 /* BT_CCA = ~GNT_WL_BB, not or GNT_BT_BB, LTE_Rx */
2983 rtw_write8_clr(rtwdev, REG_DUMMY_PAGE4_V1, BIT_BTCCA_CTRL);
2984
2985 /* to avoid RF parameter error */
2986 rtw_write_rf(rtwdev, RF_PATH_B, RF_MODOPT, 0xfffff, 0x40000);
2987 }
2988
rtw8822c_coex_cfg_gnt_fix(struct rtw_dev * rtwdev)2989 static void rtw8822c_coex_cfg_gnt_fix(struct rtw_dev *rtwdev)
2990 {
2991 struct rtw_coex *coex = &rtwdev->coex;
2992 struct rtw_coex_stat *coex_stat = &coex->stat;
2993 struct rtw_efuse *efuse = &rtwdev->efuse;
2994 u32 rf_0x1;
2995
2996 if (coex_stat->gnt_workaround_state == coex_stat->wl_coex_mode)
2997 return;
2998
2999 coex_stat->gnt_workaround_state = coex_stat->wl_coex_mode;
3000
3001 if ((coex_stat->kt_ver == 0 && coex->under_5g) || coex->freerun)
3002 rf_0x1 = 0x40021;
3003 else
3004 rf_0x1 = 0x40000;
3005
3006 /* BT at S1 for Shared-Ant */
3007 if (efuse->share_ant)
3008 rf_0x1 |= BIT(13);
3009
3010 rtw_write_rf(rtwdev, RF_PATH_B, 0x1, 0xfffff, rf_0x1);
3011
3012 /* WL-S0 2G RF TRX cannot be masked by GNT_BT
3013 * enable "WLS0 BB chage RF mode if GNT_BT = 1" for shared-antenna type
3014 * disable:0x1860[3] = 1, enable:0x1860[3] = 0
3015 *
3016 * enable "DAC off if GNT_WL = 0" for non-shared-antenna
3017 * disable 0x1c30[22] = 0,
3018 * enable: 0x1c30[22] = 1, 0x1c38[12] = 0, 0x1c38[28] = 1
3019 */
3020 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3021 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
3022 BIT_ANAPAR_BTPS >> 16, 0);
3023 } else {
3024 rtw_write8_mask(rtwdev, REG_ANAPAR + 2,
3025 BIT_ANAPAR_BTPS >> 16, 1);
3026 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 1,
3027 BIT_DAC_OFF_ENABLE, 0);
3028 rtw_write8_mask(rtwdev, REG_RSTB_SEL + 3,
3029 BIT_DAC_OFF_ENABLE, 1);
3030 }
3031
3032 /* disable WL-S1 BB chage RF mode if GNT_BT
3033 * since RF TRx mask can do it
3034 */
3035 rtw_write8_mask(rtwdev, REG_IGN_GNTBT4,
3036 BIT_PI_IGNORE_GNT_BT, 1);
3037
3038 /* disable WL-S0 BB chage RF mode if wifi is at 5G,
3039 * or antenna path is separated
3040 */
3041 if (coex_stat->wl_coex_mode == COEX_WLINK_2GFREE) {
3042 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3043 BIT_PI_IGNORE_GNT_BT, 1);
3044 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3045 BIT_NOMASK_TXBT_ENABLE, 1);
3046 } else if (coex_stat->wl_coex_mode == COEX_WLINK_5G ||
3047 coex->under_5g || !efuse->share_ant) {
3048 if (coex_stat->kt_ver >= 3) {
3049 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3050 BIT_PI_IGNORE_GNT_BT, 0);
3051 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3052 BIT_NOMASK_TXBT_ENABLE, 1);
3053 } else {
3054 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3055 BIT_PI_IGNORE_GNT_BT, 1);
3056 }
3057 } else {
3058 /* shared-antenna */
3059 rtw_write8_mask(rtwdev, REG_IGN_GNT_BT1,
3060 BIT_PI_IGNORE_GNT_BT, 0);
3061 if (coex_stat->kt_ver >= 3) {
3062 rtw_write8_mask(rtwdev, REG_NOMASK_TXBT,
3063 BIT_NOMASK_TXBT_ENABLE, 0);
3064 }
3065 }
3066 }
3067
rtw8822c_coex_cfg_gnt_debug(struct rtw_dev * rtwdev)3068 static void rtw8822c_coex_cfg_gnt_debug(struct rtw_dev *rtwdev)
3069 {
3070 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 2, BIT_BTGP_SPI_EN >> 16, 0);
3071 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 3, BIT_BTGP_JTAG_EN >> 24, 0);
3072 rtw_write8_mask(rtwdev, REG_GPIO_MUXCFG + 2, BIT_FSPI_EN >> 16, 0);
3073 rtw_write8_mask(rtwdev, REG_PAD_CTRL1 + 1, BIT_LED1DIS >> 8, 0);
3074 rtw_write8_mask(rtwdev, REG_SYS_SDIO_CTRL + 3, BIT_DBG_GNT_WL_BT >> 24, 0);
3075 }
3076
rtw8822c_coex_cfg_rfe_type(struct rtw_dev * rtwdev)3077 static void rtw8822c_coex_cfg_rfe_type(struct rtw_dev *rtwdev)
3078 {
3079 struct rtw_coex *coex = &rtwdev->coex;
3080 struct rtw_coex_rfe *coex_rfe = &coex->rfe;
3081 struct rtw_efuse *efuse = &rtwdev->efuse;
3082
3083 coex_rfe->rfe_module_type = rtwdev->efuse.rfe_option;
3084 coex_rfe->ant_switch_polarity = 0;
3085 coex_rfe->ant_switch_exist = false;
3086 coex_rfe->ant_switch_with_bt = false;
3087 coex_rfe->ant_switch_diversity = false;
3088
3089 if (efuse->share_ant)
3090 coex_rfe->wlg_at_btg = true;
3091 else
3092 coex_rfe->wlg_at_btg = false;
3093
3094 /* disable LTE coex in wifi side */
3095 rtw_coex_write_indirect_reg(rtwdev, LTE_COEX_CTRL, BIT_LTE_COEX_EN, 0x0);
3096 rtw_coex_write_indirect_reg(rtwdev, LTE_WL_TRX_CTRL, MASKLWORD, 0xffff);
3097 rtw_coex_write_indirect_reg(rtwdev, LTE_BT_TRX_CTRL, MASKLWORD, 0xffff);
3098 }
3099
rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev * rtwdev,u8 wl_pwr)3100 static void rtw8822c_coex_cfg_wl_tx_power(struct rtw_dev *rtwdev, u8 wl_pwr)
3101 {
3102 struct rtw_coex *coex = &rtwdev->coex;
3103 struct rtw_coex_dm *coex_dm = &coex->dm;
3104
3105 if (wl_pwr == coex_dm->cur_wl_pwr_lvl)
3106 return;
3107
3108 coex_dm->cur_wl_pwr_lvl = wl_pwr;
3109 }
3110
rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev * rtwdev,bool low_gain)3111 static void rtw8822c_coex_cfg_wl_rx_gain(struct rtw_dev *rtwdev, bool low_gain)
3112 {
3113 struct rtw_coex *coex = &rtwdev->coex;
3114 struct rtw_coex_dm *coex_dm = &coex->dm;
3115
3116 if (low_gain == coex_dm->cur_wl_rx_low_gain_en)
3117 return;
3118
3119 coex_dm->cur_wl_rx_low_gain_en = low_gain;
3120
3121 if (coex_dm->cur_wl_rx_low_gain_en) {
3122 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table On!\n");
3123
3124 /* set Rx filter corner RCK offset */
3125 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x22);
3126 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x36);
3127 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x22);
3128 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x36);
3129
3130 } else {
3131 rtw_dbg(rtwdev, RTW_DBG_COEX, "[BTCoex], Hi-Li Table Off!\n");
3132
3133 /* set Rx filter corner RCK offset */
3134 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCKD, RFREG_MASK, 0x20);
3135 rtw_write_rf(rtwdev, RF_PATH_A, RF_RCK, RFREG_MASK, 0x0);
3136 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCKD, RFREG_MASK, 0x20);
3137 rtw_write_rf(rtwdev, RF_PATH_B, RF_RCK, RFREG_MASK, 0x0);
3138 }
3139 }
3140
rtw8822c_bf_enable_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee)3141 static void rtw8822c_bf_enable_bfee_su(struct rtw_dev *rtwdev,
3142 struct rtw_vif *vif,
3143 struct rtw_bfee *bfee)
3144 {
3145 u8 csi_rsc = 0;
3146 u32 tmp6dc;
3147
3148 rtw_bf_enable_bfee_su(rtwdev, vif, bfee);
3149
3150 tmp6dc = rtw_read32(rtwdev, REG_BBPSF_CTRL) |
3151 BIT_WMAC_USE_NDPARATE |
3152 (csi_rsc << 13);
3153 if (vif->net_type == RTW_NET_AP_MODE)
3154 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc | BIT(12));
3155 else
3156 rtw_write32(rtwdev, REG_BBPSF_CTRL, tmp6dc & ~BIT(12));
3157
3158 rtw_write32(rtwdev, REG_CSI_RRSR, 0x550);
3159 }
3160
rtw8822c_bf_config_bfee_su(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3161 static void rtw8822c_bf_config_bfee_su(struct rtw_dev *rtwdev,
3162 struct rtw_vif *vif,
3163 struct rtw_bfee *bfee, bool enable)
3164 {
3165 if (enable)
3166 rtw8822c_bf_enable_bfee_su(rtwdev, vif, bfee);
3167 else
3168 rtw_bf_remove_bfee_su(rtwdev, bfee);
3169 }
3170
rtw8822c_bf_config_bfee_mu(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3171 static void rtw8822c_bf_config_bfee_mu(struct rtw_dev *rtwdev,
3172 struct rtw_vif *vif,
3173 struct rtw_bfee *bfee, bool enable)
3174 {
3175 if (enable)
3176 rtw_bf_enable_bfee_mu(rtwdev, vif, bfee);
3177 else
3178 rtw_bf_remove_bfee_mu(rtwdev, bfee);
3179 }
3180
rtw8822c_bf_config_bfee(struct rtw_dev * rtwdev,struct rtw_vif * vif,struct rtw_bfee * bfee,bool enable)3181 static void rtw8822c_bf_config_bfee(struct rtw_dev *rtwdev, struct rtw_vif *vif,
3182 struct rtw_bfee *bfee, bool enable)
3183 {
3184 if (bfee->role == RTW_BFEE_SU)
3185 rtw8822c_bf_config_bfee_su(rtwdev, vif, bfee, enable);
3186 else if (bfee->role == RTW_BFEE_MU)
3187 rtw8822c_bf_config_bfee_mu(rtwdev, vif, bfee, enable);
3188 else
3189 rtw_warn(rtwdev, "wrong bfee role\n");
3190 }
3191
3192 struct dpk_cfg_pair {
3193 u32 addr;
3194 u32 bitmask;
3195 u32 data;
3196 };
3197
rtw8822c_parse_tbl_dpk(struct rtw_dev * rtwdev,const struct rtw_table * tbl)3198 void rtw8822c_parse_tbl_dpk(struct rtw_dev *rtwdev,
3199 const struct rtw_table *tbl)
3200 {
3201 const struct dpk_cfg_pair *p = tbl->data;
3202 const struct dpk_cfg_pair *end = p + tbl->size / 3;
3203
3204 BUILD_BUG_ON(sizeof(struct dpk_cfg_pair) != sizeof(u32) * 3);
3205
3206 for (; p < end; p++)
3207 rtw_write32_mask(rtwdev, p->addr, p->bitmask, p->data);
3208 }
3209
rtw8822c_dpk_set_gnt_wl(struct rtw_dev * rtwdev,bool is_before_k)3210 static void rtw8822c_dpk_set_gnt_wl(struct rtw_dev *rtwdev, bool is_before_k)
3211 {
3212 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3213
3214 if (is_before_k) {
3215 dpk_info->gnt_control = rtw_read32(rtwdev, 0x70);
3216 dpk_info->gnt_value = rtw_coex_read_indirect_reg(rtwdev, 0x38);
3217 rtw_write32_mask(rtwdev, 0x70, BIT(26), 0x1);
3218 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKBYTE1, 0x77);
3219 } else {
3220 rtw_coex_write_indirect_reg(rtwdev, 0x38, MASKDWORD,
3221 dpk_info->gnt_value);
3222 rtw_write32(rtwdev, 0x70, dpk_info->gnt_control);
3223 }
3224 }
3225
3226 static void
rtw8822c_dpk_restore_registers(struct rtw_dev * rtwdev,u32 reg_num,struct rtw_backup_info * bckp)3227 rtw8822c_dpk_restore_registers(struct rtw_dev *rtwdev, u32 reg_num,
3228 struct rtw_backup_info *bckp)
3229 {
3230 rtw_restore_reg(rtwdev, bckp, reg_num);
3231 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3232 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0x4);
3233 }
3234
3235 static void
rtw8822c_dpk_backup_registers(struct rtw_dev * rtwdev,u32 * reg,u32 reg_num,struct rtw_backup_info * bckp)3236 rtw8822c_dpk_backup_registers(struct rtw_dev *rtwdev, u32 *reg,
3237 u32 reg_num, struct rtw_backup_info *bckp)
3238 {
3239 u32 i;
3240
3241 for (i = 0; i < reg_num; i++) {
3242 bckp[i].len = 4;
3243 bckp[i].reg = reg[i];
3244 bckp[i].val = rtw_read32(rtwdev, reg[i]);
3245 }
3246 }
3247
rtw8822c_dpk_backup_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3248 static void rtw8822c_dpk_backup_rf_registers(struct rtw_dev *rtwdev,
3249 u32 *rf_reg,
3250 u32 rf_reg_bak[][2])
3251 {
3252 u32 i;
3253
3254 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3255 rf_reg_bak[i][RF_PATH_A] = rtw_read_rf(rtwdev, RF_PATH_A,
3256 rf_reg[i], RFREG_MASK);
3257 rf_reg_bak[i][RF_PATH_B] = rtw_read_rf(rtwdev, RF_PATH_B,
3258 rf_reg[i], RFREG_MASK);
3259 }
3260 }
3261
rtw8822c_dpk_reload_rf_registers(struct rtw_dev * rtwdev,u32 * rf_reg,u32 rf_reg_bak[][2])3262 static void rtw8822c_dpk_reload_rf_registers(struct rtw_dev *rtwdev,
3263 u32 *rf_reg,
3264 u32 rf_reg_bak[][2])
3265 {
3266 u32 i;
3267
3268 for (i = 0; i < DPK_RF_REG_NUM; i++) {
3269 rtw_write_rf(rtwdev, RF_PATH_A, rf_reg[i], RFREG_MASK,
3270 rf_reg_bak[i][RF_PATH_A]);
3271 rtw_write_rf(rtwdev, RF_PATH_B, rf_reg[i], RFREG_MASK,
3272 rf_reg_bak[i][RF_PATH_B]);
3273 }
3274 }
3275
rtw8822c_dpk_information(struct rtw_dev * rtwdev)3276 static void rtw8822c_dpk_information(struct rtw_dev *rtwdev)
3277 {
3278 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3279 u32 reg;
3280 u8 band_shift;
3281
3282 reg = rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK);
3283
3284 band_shift = FIELD_GET(BIT(16), reg);
3285 dpk_info->dpk_band = 1 << band_shift;
3286 dpk_info->dpk_ch = FIELD_GET(0xff, reg);
3287 dpk_info->dpk_bw = FIELD_GET(0x3000, reg);
3288 }
3289
rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev * rtwdev,u8 path)3290 static void rtw8822c_dpk_rxbb_dc_cal(struct rtw_dev *rtwdev, u8 path)
3291 {
3292 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3293 udelay(5);
3294 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84801);
3295 usleep_range(600, 610);
3296 rtw_write_rf(rtwdev, path, 0x92, RFREG_MASK, 0x84800);
3297 }
3298
rtw8822c_dpk_dc_corr_check(struct rtw_dev * rtwdev,u8 path)3299 static u8 rtw8822c_dpk_dc_corr_check(struct rtw_dev *rtwdev, u8 path)
3300 {
3301 u16 dc_i, dc_q;
3302 u8 corr_idx;
3303
3304 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000900f0);
3305 dc_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3306 dc_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(11, 0));
3307
3308 if (dc_i & BIT(11))
3309 dc_i = 0x1000 - dc_i;
3310 if (dc_q & BIT(11))
3311 dc_q = 0x1000 - dc_q;
3312
3313 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3314 corr_idx = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(7, 0));
3315 rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(15, 8));
3316
3317 if (dc_i > 200 || dc_q > 200 || corr_idx < 40 || corr_idx > 65)
3318 return 1;
3319 else
3320 return 0;
3321
3322 }
3323
rtw8822c_dpk_tx_pause(struct rtw_dev * rtwdev)3324 static void rtw8822c_dpk_tx_pause(struct rtw_dev *rtwdev)
3325 {
3326 u8 reg_a, reg_b;
3327 u16 count = 0;
3328
3329 rtw_write8(rtwdev, 0x522, 0xff);
3330 rtw_write32_mask(rtwdev, 0x1e70, 0xf, 0x2);
3331
3332 do {
3333 reg_a = (u8)rtw_read_rf(rtwdev, RF_PATH_A, 0x00, 0xf0000);
3334 reg_b = (u8)rtw_read_rf(rtwdev, RF_PATH_B, 0x00, 0xf0000);
3335 udelay(2);
3336 count++;
3337 } while ((reg_a == 2 || reg_b == 2) && count < 2500);
3338 }
3339
rtw8822c_dpk_mac_bb_setting(struct rtw_dev * rtwdev)3340 static void rtw8822c_dpk_mac_bb_setting(struct rtw_dev *rtwdev)
3341 {
3342 rtw8822c_dpk_tx_pause(rtwdev);
3343 rtw_load_table(rtwdev, &rtw8822c_dpk_mac_bb_tbl);
3344 }
3345
rtw8822c_dpk_afe_setting(struct rtw_dev * rtwdev,bool is_do_dpk)3346 static void rtw8822c_dpk_afe_setting(struct rtw_dev *rtwdev, bool is_do_dpk)
3347 {
3348 if (is_do_dpk)
3349 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_is_dpk_tbl);
3350 else
3351 rtw_load_table(rtwdev, &rtw8822c_dpk_afe_no_dpk_tbl);
3352 }
3353
rtw8822c_dpk_pre_setting(struct rtw_dev * rtwdev)3354 static void rtw8822c_dpk_pre_setting(struct rtw_dev *rtwdev)
3355 {
3356 u8 path;
3357
3358 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
3359 rtw_write_rf(rtwdev, path, RF_RXAGC_OFFSET, RFREG_MASK, 0x0);
3360 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3361 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G)
3362 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
3363 else
3364 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
3365 rtw_write32_mask(rtwdev, REG_DPD_LUT0, BIT_GLOSS_DB, 0x4);
3366 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x3);
3367 }
3368 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3369 rtw_write32(rtwdev, REG_DPD_CTL11, 0x3b23170b);
3370 rtw_write32(rtwdev, REG_DPD_CTL12, 0x775f5347);
3371 }
3372
rtw8822c_dpk_rf_setting(struct rtw_dev * rtwdev,u8 path)3373 static u32 rtw8822c_dpk_rf_setting(struct rtw_dev *rtwdev, u8 path)
3374 {
3375 u32 ori_txbb;
3376
3377 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x50017);
3378 ori_txbb = rtw_read_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK);
3379
3380 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TX_GAIN, 0x1);
3381 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_PWR_TRIM, 0x1);
3382 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_BB_GAIN, 0x0);
3383 rtw_write_rf(rtwdev, path, RF_TX_GAIN, RFREG_MASK, ori_txbb);
3384
3385 if (rtwdev->dm_info.dpk_info.dpk_band == RTW_BAND_2G) {
3386 rtw_write_rf(rtwdev, path, RF_TX_GAIN_OFFSET, BIT_RF_GAIN, 0x1);
3387 rtw_write_rf(rtwdev, path, RF_RXG_GAIN, BIT_RXG_GAIN, 0x0);
3388 } else {
3389 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_TXA_LB_ATT, 0x0);
3390 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_ATT, 0x6);
3391 rtw_write_rf(rtwdev, path, RF_TXA_LB_SW, BIT_LB_SW, 0x1);
3392 rtw_write_rf(rtwdev, path, RF_RXA_MIX_GAIN, BIT_RXA_MIX_GAIN, 0);
3393 }
3394
3395 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3396 rtw_write_rf(rtwdev, path, RF_DEBUG, BIT_DE_TRXBW, 0x1);
3397 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_RXBB, 0x0);
3398
3399 if (rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80)
3400 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x2);
3401 else
3402 rtw_write_rf(rtwdev, path, RF_BW_TRXBB, BIT_BW_TXBB, 0x1);
3403
3404 rtw_write_rf(rtwdev, path, RF_EXT_TIA_BW, BIT(1), 0x1);
3405
3406 usleep_range(100, 110);
3407
3408 return ori_txbb & 0x1f;
3409 }
3410
rtw8822c_dpk_get_cmd(struct rtw_dev * rtwdev,u8 action,u8 path)3411 static u16 rtw8822c_dpk_get_cmd(struct rtw_dev *rtwdev, u8 action, u8 path)
3412 {
3413 u16 cmd;
3414 u8 bw = rtwdev->dm_info.dpk_info.dpk_bw == DPK_CHANNEL_WIDTH_80 ? 2 : 0;
3415
3416 switch (action) {
3417 case RTW_DPK_GAIN_LOSS:
3418 cmd = 0x14 + path;
3419 break;
3420 case RTW_DPK_DO_DPK:
3421 cmd = 0x16 + path + bw;
3422 break;
3423 case RTW_DPK_DPK_ON:
3424 cmd = 0x1a + path;
3425 break;
3426 case RTW_DPK_DAGC:
3427 cmd = 0x1c + path + bw;
3428 break;
3429 default:
3430 return 0;
3431 }
3432
3433 return (cmd << 8) | 0x48;
3434 }
3435
rtw8822c_dpk_one_shot(struct rtw_dev * rtwdev,u8 path,u8 action)3436 static u8 rtw8822c_dpk_one_shot(struct rtw_dev *rtwdev, u8 path, u8 action)
3437 {
3438 u16 dpk_cmd;
3439 u8 result = 0;
3440
3441 rtw8822c_dpk_set_gnt_wl(rtwdev, true);
3442
3443 if (action == RTW_DPK_CAL_PWR) {
3444 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x1);
3445 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(12), 0x0);
3446 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3447 msleep(10);
3448 if (!check_hw_ready(rtwdev, REG_STAT_RPT, BIT(31), 0x1)) {
3449 result = 1;
3450 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3451 }
3452 } else {
3453 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3454 0x8 | (path << 1));
3455 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3456
3457 dpk_cmd = rtw8822c_dpk_get_cmd(rtwdev, action, path);
3458 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd);
3459 rtw_write32(rtwdev, REG_NCTL0, dpk_cmd + 1);
3460 msleep(10);
3461 if (!check_hw_ready(rtwdev, 0x2d9c, 0xff, 0x55)) {
3462 result = 1;
3463 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] one-shot over 20ms\n");
3464 }
3465 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
3466 0x8 | (path << 1));
3467 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3468 }
3469
3470 rtw8822c_dpk_set_gnt_wl(rtwdev, false);
3471
3472 rtw_write8(rtwdev, 0x1b10, 0x0);
3473
3474 return result;
3475 }
3476
rtw8822c_dpk_dgain_read(struct rtw_dev * rtwdev,u8 path)3477 static u16 rtw8822c_dpk_dgain_read(struct rtw_dev *rtwdev, u8 path)
3478 {
3479 u16 dgain;
3480
3481 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3482 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, 0x00ff0000, 0x0);
3483
3484 dgain = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, GENMASK(27, 16));
3485
3486 return dgain;
3487 }
3488
rtw8822c_dpk_thermal_read(struct rtw_dev * rtwdev,u8 path)3489 static u8 rtw8822c_dpk_thermal_read(struct rtw_dev *rtwdev, u8 path)
3490 {
3491 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3492 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x0);
3493 rtw_write_rf(rtwdev, path, RF_T_METER, BIT(19), 0x1);
3494 udelay(15);
3495
3496 return (u8)rtw_read_rf(rtwdev, path, RF_T_METER, 0x0007e);
3497 }
3498
rtw8822c_dpk_pas_read(struct rtw_dev * rtwdev,u8 path)3499 static u32 rtw8822c_dpk_pas_read(struct rtw_dev *rtwdev, u8 path)
3500 {
3501 u32 i_val, q_val;
3502
3503 rtw_write32(rtwdev, REG_NCTL0, 0x8 | (path << 1));
3504 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3505 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060001);
3506 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3507 rtw_write32(rtwdev, 0x1b4c, 0x00080000);
3508
3509 q_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD);
3510 i_val = rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD);
3511
3512 if (i_val & BIT(15))
3513 i_val = 0x10000 - i_val;
3514 if (q_val & BIT(15))
3515 q_val = 0x10000 - q_val;
3516
3517 rtw_write32(rtwdev, 0x1b4c, 0x00000000);
3518
3519 return i_val * i_val + q_val * q_val;
3520 }
3521
rtw8822c_psd_log2base(u32 val)3522 static u32 rtw8822c_psd_log2base(u32 val)
3523 {
3524 u32 tmp, val_integerd_b, tindex;
3525 u32 result, val_fractiond_b;
3526 u32 table_fraction[21] = {0, 432, 332, 274, 232, 200, 174,
3527 151, 132, 115, 100, 86, 74, 62, 51,
3528 42, 32, 23, 15, 7, 0};
3529
3530 if (val == 0)
3531 return 0;
3532
3533 val_integerd_b = __fls(val) + 1;
3534
3535 tmp = (val * 100) / (1 << val_integerd_b);
3536 tindex = tmp / 5;
3537
3538 if (tindex >= ARRAY_SIZE(table_fraction))
3539 tindex = ARRAY_SIZE(table_fraction) - 1;
3540
3541 val_fractiond_b = table_fraction[tindex];
3542
3543 result = val_integerd_b * 100 - val_fractiond_b;
3544
3545 return result;
3546 }
3547
rtw8822c_dpk_gainloss_result(struct rtw_dev * rtwdev,u8 path)3548 static u8 rtw8822c_dpk_gainloss_result(struct rtw_dev *rtwdev, u8 path)
3549 {
3550 u8 result;
3551
3552 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3553 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x1);
3554 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x00060000);
3555
3556 result = (u8)rtw_read32_mask(rtwdev, REG_STAT_RPT, 0x000000f0);
3557
3558 rtw_write32_mask(rtwdev, 0x1b48, BIT(14), 0x0);
3559
3560 return result;
3561 }
3562
rtw8822c_dpk_agc_gain_chk(struct rtw_dev * rtwdev,u8 path,u8 limited_pga)3563 static u8 rtw8822c_dpk_agc_gain_chk(struct rtw_dev *rtwdev, u8 path,
3564 u8 limited_pga)
3565 {
3566 u8 result = 0;
3567 u16 dgain;
3568
3569 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3570 dgain = rtw8822c_dpk_dgain_read(rtwdev, path);
3571
3572 if (dgain > 1535 && !limited_pga)
3573 return RTW_DPK_GAIN_LESS;
3574 else if (dgain < 768 && !limited_pga)
3575 return RTW_DPK_GAIN_LARGE;
3576 else
3577 return result;
3578 }
3579
rtw8822c_dpk_agc_loss_chk(struct rtw_dev * rtwdev,u8 path)3580 static u8 rtw8822c_dpk_agc_loss_chk(struct rtw_dev *rtwdev, u8 path)
3581 {
3582 u32 loss, loss_db;
3583
3584 loss = rtw8822c_dpk_pas_read(rtwdev, path);
3585 if (loss < 0x4000000)
3586 return RTW_DPK_GL_LESS;
3587 loss_db = 3 * rtw8822c_psd_log2base(loss >> 13) - 3870;
3588
3589 if (loss_db > 1000)
3590 return RTW_DPK_GL_LARGE;
3591 else if (loss_db < 250)
3592 return RTW_DPK_GL_LESS;
3593 else
3594 return RTW_DPK_AGC_OUT;
3595 }
3596
3597 struct rtw8822c_dpk_data {
3598 u8 txbb;
3599 u8 pga;
3600 u8 limited_pga;
3601 u8 agc_cnt;
3602 bool loss_only;
3603 bool gain_only;
3604 u8 path;
3605 };
3606
rtw8822c_gain_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3607 static u8 rtw8822c_gain_check_state(struct rtw_dev *rtwdev,
3608 struct rtw8822c_dpk_data *data)
3609 {
3610 u8 state;
3611
3612 data->txbb = (u8)rtw_read_rf(rtwdev, data->path, RF_TX_GAIN,
3613 BIT_GAIN_TXBB);
3614 data->pga = (u8)rtw_read_rf(rtwdev, data->path, RF_MODE_TRXAGC,
3615 BIT_RXAGC);
3616
3617 if (data->loss_only) {
3618 state = RTW_DPK_LOSS_CHECK;
3619 goto check_end;
3620 }
3621
3622 state = rtw8822c_dpk_agc_gain_chk(rtwdev, data->path,
3623 data->limited_pga);
3624 if (state == RTW_DPK_GAIN_CHECK && data->gain_only)
3625 state = RTW_DPK_AGC_OUT;
3626 else if (state == RTW_DPK_GAIN_CHECK)
3627 state = RTW_DPK_LOSS_CHECK;
3628
3629 check_end:
3630 data->agc_cnt++;
3631 if (data->agc_cnt >= 6)
3632 state = RTW_DPK_AGC_OUT;
3633
3634 return state;
3635 }
3636
rtw8822c_gain_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3637 static u8 rtw8822c_gain_large_state(struct rtw_dev *rtwdev,
3638 struct rtw8822c_dpk_data *data)
3639 {
3640 u8 pga = data->pga;
3641
3642 if (pga > 0xe)
3643 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3644 else if (pga > 0xb && pga < 0xf)
3645 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0x0);
3646 else if (pga < 0xc)
3647 data->limited_pga = 1;
3648
3649 return RTW_DPK_GAIN_CHECK;
3650 }
3651
rtw8822c_gain_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3652 static u8 rtw8822c_gain_less_state(struct rtw_dev *rtwdev,
3653 struct rtw8822c_dpk_data *data)
3654 {
3655 u8 pga = data->pga;
3656
3657 if (pga < 0xc)
3658 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xc);
3659 else if (pga > 0xb && pga < 0xf)
3660 rtw_write_rf(rtwdev, data->path, RF_MODE_TRXAGC, BIT_RXAGC, 0xf);
3661 else if (pga > 0xe)
3662 data->limited_pga = 1;
3663
3664 return RTW_DPK_GAIN_CHECK;
3665 }
3666
rtw8822c_gl_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data,u8 is_large)3667 static u8 rtw8822c_gl_state(struct rtw_dev *rtwdev,
3668 struct rtw8822c_dpk_data *data, u8 is_large)
3669 {
3670 u8 txbb_bound[] = {0x1f, 0};
3671
3672 if (data->txbb == txbb_bound[is_large])
3673 return RTW_DPK_AGC_OUT;
3674
3675 if (is_large == 1)
3676 data->txbb -= 2;
3677 else
3678 data->txbb += 3;
3679
3680 rtw_write_rf(rtwdev, data->path, RF_TX_GAIN, BIT_GAIN_TXBB, data->txbb);
3681 data->limited_pga = 0;
3682
3683 return RTW_DPK_GAIN_CHECK;
3684 }
3685
rtw8822c_gl_large_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3686 static u8 rtw8822c_gl_large_state(struct rtw_dev *rtwdev,
3687 struct rtw8822c_dpk_data *data)
3688 {
3689 return rtw8822c_gl_state(rtwdev, data, 1);
3690 }
3691
rtw8822c_gl_less_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3692 static u8 rtw8822c_gl_less_state(struct rtw_dev *rtwdev,
3693 struct rtw8822c_dpk_data *data)
3694 {
3695 return rtw8822c_gl_state(rtwdev, data, 0);
3696 }
3697
rtw8822c_loss_check_state(struct rtw_dev * rtwdev,struct rtw8822c_dpk_data * data)3698 static u8 rtw8822c_loss_check_state(struct rtw_dev *rtwdev,
3699 struct rtw8822c_dpk_data *data)
3700 {
3701 u8 path = data->path;
3702 u8 state;
3703
3704 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_GAIN_LOSS);
3705 state = rtw8822c_dpk_agc_loss_chk(rtwdev, path);
3706
3707 return state;
3708 }
3709
3710 static u8 (*dpk_state[])(struct rtw_dev *rtwdev,
3711 struct rtw8822c_dpk_data *data) = {
3712 rtw8822c_gain_check_state, rtw8822c_gain_large_state,
3713 rtw8822c_gain_less_state, rtw8822c_gl_large_state,
3714 rtw8822c_gl_less_state, rtw8822c_loss_check_state };
3715
rtw8822c_dpk_pas_agc(struct rtw_dev * rtwdev,u8 path,bool gain_only,bool loss_only)3716 static u8 rtw8822c_dpk_pas_agc(struct rtw_dev *rtwdev, u8 path,
3717 bool gain_only, bool loss_only)
3718 {
3719 struct rtw8822c_dpk_data data = {0};
3720 u8 (*func)(struct rtw_dev *rtwdev, struct rtw8822c_dpk_data *data);
3721 u8 state = RTW_DPK_GAIN_CHECK;
3722
3723 data.loss_only = loss_only;
3724 data.gain_only = gain_only;
3725 data.path = path;
3726
3727 for (;;) {
3728 func = dpk_state[state];
3729 state = func(rtwdev, &data);
3730 if (state == RTW_DPK_AGC_OUT)
3731 break;
3732 }
3733
3734 return data.txbb;
3735 }
3736
rtw8822c_dpk_coef_iq_check(struct rtw_dev * rtwdev,u16 coef_i,u16 coef_q)3737 static bool rtw8822c_dpk_coef_iq_check(struct rtw_dev *rtwdev,
3738 u16 coef_i, u16 coef_q)
3739 {
3740 if (coef_i == 0x1000 || coef_i == 0x0fff ||
3741 coef_q == 0x1000 || coef_q == 0x0fff)
3742 return true;
3743
3744 return false;
3745 }
3746
rtw8822c_dpk_coef_transfer(struct rtw_dev * rtwdev)3747 static u32 rtw8822c_dpk_coef_transfer(struct rtw_dev *rtwdev)
3748 {
3749 u32 reg = 0;
3750 u16 coef_i = 0, coef_q = 0;
3751
3752 reg = rtw_read32(rtwdev, REG_STAT_RPT);
3753
3754 coef_i = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKHWORD) & 0x1fff;
3755 coef_q = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, MASKLWORD) & 0x1fff;
3756
3757 coef_q = ((0x2000 - coef_q) & 0x1fff) - 1;
3758
3759 reg = (coef_i << 16) | coef_q;
3760
3761 return reg;
3762 }
3763
3764 static const u32 rtw8822c_dpk_get_coef_tbl[] = {
3765 0x000400f0, 0x040400f0, 0x080400f0, 0x010400f0, 0x050400f0,
3766 0x090400f0, 0x020400f0, 0x060400f0, 0x0a0400f0, 0x030400f0,
3767 0x070400f0, 0x0b0400f0, 0x0c0400f0, 0x100400f0, 0x0d0400f0,
3768 0x110400f0, 0x0e0400f0, 0x120400f0, 0x0f0400f0, 0x130400f0,
3769 };
3770
rtw8822c_dpk_coef_tbl_apply(struct rtw_dev * rtwdev,u8 path)3771 static void rtw8822c_dpk_coef_tbl_apply(struct rtw_dev *rtwdev, u8 path)
3772 {
3773 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3774 int i;
3775
3776 for (i = 0; i < 20; i++) {
3777 rtw_write32(rtwdev, REG_RXSRAM_CTL,
3778 rtw8822c_dpk_get_coef_tbl[i]);
3779 dpk_info->coef[path][i] = rtw8822c_dpk_coef_transfer(rtwdev);
3780 }
3781 }
3782
rtw8822c_dpk_get_coef(struct rtw_dev * rtwdev,u8 path)3783 static void rtw8822c_dpk_get_coef(struct rtw_dev *rtwdev, u8 path)
3784 {
3785 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3786
3787 if (path == RF_PATH_A) {
3788 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x0);
3789 rtw_write32(rtwdev, REG_DPD_CTL0_S0, 0x30000080);
3790 } else if (path == RF_PATH_B) {
3791 rtw_write32_mask(rtwdev, REG_DPD_CTL0, BIT(24), 0x1);
3792 rtw_write32(rtwdev, REG_DPD_CTL0_S1, 0x30000080);
3793 }
3794
3795 rtw8822c_dpk_coef_tbl_apply(rtwdev, path);
3796 }
3797
rtw8822c_dpk_coef_read(struct rtw_dev * rtwdev,u8 path)3798 static u8 rtw8822c_dpk_coef_read(struct rtw_dev *rtwdev, u8 path)
3799 {
3800 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3801 u8 addr, result = 1;
3802 u16 coef_i, coef_q;
3803
3804 for (addr = 0; addr < 20; addr++) {
3805 coef_i = FIELD_GET(0x1fff0000, dpk_info->coef[path][addr]);
3806 coef_q = FIELD_GET(0x1fff, dpk_info->coef[path][addr]);
3807
3808 if (rtw8822c_dpk_coef_iq_check(rtwdev, coef_i, coef_q)) {
3809 result = 0;
3810 break;
3811 }
3812 }
3813 return result;
3814 }
3815
rtw8822c_dpk_coef_write(struct rtw_dev * rtwdev,u8 path,u8 result)3816 static void rtw8822c_dpk_coef_write(struct rtw_dev *rtwdev, u8 path, u8 result)
3817 {
3818 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3819 u16 reg[DPK_RF_PATH_NUM] = {0x1b0c, 0x1b64};
3820 u32 coef;
3821 u8 addr;
3822
3823 rtw_write32(rtwdev, REG_NCTL0, 0x0000000c);
3824 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3825
3826 for (addr = 0; addr < 20; addr++) {
3827 if (result == 0) {
3828 if (addr == 3)
3829 coef = 0x04001fff;
3830 else
3831 coef = 0x00001fff;
3832 } else {
3833 coef = dpk_info->coef[path][addr];
3834 }
3835 rtw_write32(rtwdev, reg[path] + addr * 4, coef);
3836 }
3837 }
3838
rtw8822c_dpk_fill_result(struct rtw_dev * rtwdev,u32 dpk_txagc,u8 path,u8 result)3839 static void rtw8822c_dpk_fill_result(struct rtw_dev *rtwdev, u32 dpk_txagc,
3840 u8 path, u8 result)
3841 {
3842 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3843
3844 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3845
3846 if (result)
3847 rtw_write8(rtwdev, REG_DPD_AGC, (u8)(dpk_txagc - 6));
3848 else
3849 rtw_write8(rtwdev, REG_DPD_AGC, 0x00);
3850
3851 dpk_info->result[path] = result;
3852 dpk_info->dpk_txagc[path] = rtw_read8(rtwdev, REG_DPD_AGC);
3853
3854 rtw8822c_dpk_coef_write(rtwdev, path, result);
3855 }
3856
rtw8822c_dpk_gainloss(struct rtw_dev * rtwdev,u8 path)3857 static u32 rtw8822c_dpk_gainloss(struct rtw_dev *rtwdev, u8 path)
3858 {
3859 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3860 u8 tx_agc, tx_bb, ori_txbb, ori_txagc, tx_agc_search, t1, t2;
3861
3862 ori_txbb = rtw8822c_dpk_rf_setting(rtwdev, path);
3863 ori_txagc = (u8)rtw_read_rf(rtwdev, path, RF_MODE_TRXAGC, BIT_TXAGC);
3864
3865 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3866 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3867 rtw8822c_dpk_dgain_read(rtwdev, path);
3868
3869 if (rtw8822c_dpk_dc_corr_check(rtwdev, path)) {
3870 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
3871 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DAGC);
3872 rtw8822c_dpk_dc_corr_check(rtwdev, path);
3873 }
3874
3875 t1 = rtw8822c_dpk_thermal_read(rtwdev, path);
3876 tx_bb = rtw8822c_dpk_pas_agc(rtwdev, path, false, true);
3877 tx_agc_search = rtw8822c_dpk_gainloss_result(rtwdev, path);
3878
3879 if (tx_bb < tx_agc_search)
3880 tx_bb = 0;
3881 else
3882 tx_bb = tx_bb - tx_agc_search;
3883
3884 rtw_write_rf(rtwdev, path, RF_TX_GAIN, BIT_GAIN_TXBB, tx_bb);
3885
3886 tx_agc = ori_txagc - (ori_txbb - tx_bb);
3887
3888 t2 = rtw8822c_dpk_thermal_read(rtwdev, path);
3889
3890 dpk_info->thermal_dpk_delta[path] = abs(t2 - t1);
3891
3892 return tx_agc;
3893 }
3894
rtw8822c_dpk_by_path(struct rtw_dev * rtwdev,u32 tx_agc,u8 path)3895 static u8 rtw8822c_dpk_by_path(struct rtw_dev *rtwdev, u32 tx_agc, u8 path)
3896 {
3897 u8 result;
3898
3899 result = rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DO_DPK);
3900
3901 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3902
3903 result = result | (u8)rtw_read32_mask(rtwdev, REG_DPD_CTL1_S0, BIT(26));
3904
3905 rtw_write_rf(rtwdev, path, RF_MODE_TRXAGC, RFREG_MASK, 0x33e14);
3906
3907 rtw8822c_dpk_get_coef(rtwdev, path);
3908
3909 return result;
3910 }
3911
rtw8822c_dpk_cal_gs(struct rtw_dev * rtwdev,u8 path)3912 static void rtw8822c_dpk_cal_gs(struct rtw_dev *rtwdev, u8 path)
3913 {
3914 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3915 u32 tmp_gs = 0;
3916
3917 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3918 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_BYPASS_DPD, 0x0);
3919 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
3920 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x9);
3921 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x1);
3922 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3923 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_DPD_CLK, 0xf);
3924
3925 if (path == RF_PATH_A) {
3926 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
3927 0x1066680);
3928 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN, 0x1);
3929 } else {
3930 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
3931 0x1066680);
3932 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN, 0x1);
3933 }
3934
3935 if (dpk_info->dpk_bw == DPK_CHANNEL_WIDTH_80) {
3936 rtw_write32(rtwdev, REG_DPD_CTL16, 0x80001310);
3937 rtw_write32(rtwdev, REG_DPD_CTL16, 0x00001310);
3938 rtw_write32(rtwdev, REG_DPD_CTL16, 0x810000db);
3939 rtw_write32(rtwdev, REG_DPD_CTL16, 0x010000db);
3940 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3941 rtw_write32(rtwdev, REG_DPD_CTL15,
3942 0x05020000 | (BIT(path) << 28));
3943 } else {
3944 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8200190c);
3945 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0200190c);
3946 rtw_write32(rtwdev, REG_DPD_CTL16, 0x8301ee14);
3947 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0301ee14);
3948 rtw_write32(rtwdev, REG_DPD_CTL16, 0x0000b428);
3949 rtw_write32(rtwdev, REG_DPD_CTL15,
3950 0x05020008 | (BIT(path) << 28));
3951 }
3952
3953 rtw_write32_mask(rtwdev, REG_DPD_CTL0, MASKBYTE3, 0x8 | path);
3954
3955 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_CAL_PWR);
3956
3957 rtw_write32_mask(rtwdev, REG_DPD_CTL15, MASKBYTE3, 0x0);
3958 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
3959 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_IQ_SWITCH, 0x0);
3960 rtw_write32_mask(rtwdev, REG_R_CONFIG, BIT_INNER_LB, 0x0);
3961 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
3962
3963 if (path == RF_PATH_A)
3964 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, 0x5b);
3965 else
3966 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, 0x5b);
3967
3968 rtw_write32_mask(rtwdev, REG_RXSRAM_CTL, BIT_RPT_SEL, 0x0);
3969
3970 tmp_gs = (u16)rtw_read32_mask(rtwdev, REG_STAT_RPT, BIT_RPT_DGAIN);
3971 tmp_gs = (tmp_gs * 910) >> 10;
3972 tmp_gs = DIV_ROUND_CLOSEST(tmp_gs, 10);
3973
3974 if (path == RF_PATH_A)
3975 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF, tmp_gs);
3976 else
3977 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF, tmp_gs);
3978
3979 dpk_info->dpk_gs[path] = tmp_gs;
3980 }
3981
rtw8822c_dpk_cal_coef1(struct rtw_dev * rtwdev)3982 static void rtw8822c_dpk_cal_coef1(struct rtw_dev *rtwdev)
3983 {
3984 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
3985 u32 offset[DPK_RF_PATH_NUM] = {0, 0x58};
3986 u32 i_scaling;
3987 u8 path;
3988
3989 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3990 rtw_write32(rtwdev, REG_RXSRAM_CTL, 0x000000f0);
3991 rtw_write32(rtwdev, REG_NCTL0, 0x00001148);
3992 rtw_write32(rtwdev, REG_NCTL0, 0x00001149);
3993
3994 check_hw_ready(rtwdev, 0x2d9c, MASKBYTE0, 0x55);
3995
3996 rtw_write8(rtwdev, 0x1b10, 0x0);
3997 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x0000000c);
3998
3999 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4000 i_scaling = 0x16c00 / dpk_info->dpk_gs[path];
4001
4002 rtw_write32_mask(rtwdev, 0x1b18 + offset[path], MASKHWORD,
4003 i_scaling);
4004 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4005 GENMASK(31, 28), 0x9);
4006 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4007 GENMASK(31, 28), 0x1);
4008 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0 + offset[path],
4009 GENMASK(31, 28), 0x0);
4010 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0 + offset[path],
4011 BIT(14), 0x0);
4012 }
4013 }
4014
rtw8822c_dpk_on(struct rtw_dev * rtwdev,u8 path)4015 static void rtw8822c_dpk_on(struct rtw_dev *rtwdev, u8 path)
4016 {
4017 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4018
4019 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4020
4021 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0x8 | (path << 1));
4022 rtw_write32_mask(rtwdev, REG_IQK_CTL1, BIT_TX_CFIR, 0x0);
4023
4024 if (test_bit(path, dpk_info->dpk_path_ok))
4025 rtw8822c_dpk_cal_gs(rtwdev, path);
4026 }
4027
rtw8822c_dpk_check_pass(struct rtw_dev * rtwdev,bool is_fail,u32 dpk_txagc,u8 path)4028 static bool rtw8822c_dpk_check_pass(struct rtw_dev *rtwdev, bool is_fail,
4029 u32 dpk_txagc, u8 path)
4030 {
4031 bool result;
4032
4033 if (!is_fail) {
4034 if (rtw8822c_dpk_coef_read(rtwdev, path))
4035 result = true;
4036 else
4037 result = false;
4038 } else {
4039 result = false;
4040 }
4041
4042 rtw8822c_dpk_fill_result(rtwdev, dpk_txagc, path, result);
4043
4044 return result;
4045 }
4046
rtw8822c_dpk_result_reset(struct rtw_dev * rtwdev)4047 static void rtw8822c_dpk_result_reset(struct rtw_dev *rtwdev)
4048 {
4049 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4050 u8 path;
4051
4052 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4053 clear_bit(path, dpk_info->dpk_path_ok);
4054 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4055 0x8 | (path << 1));
4056 rtw_write32_mask(rtwdev, 0x1b58, 0x0000007f, 0x0);
4057
4058 dpk_info->dpk_txagc[path] = 0;
4059 dpk_info->result[path] = 0;
4060 dpk_info->dpk_gs[path] = 0x5b;
4061 dpk_info->pre_pwsf[path] = 0;
4062 dpk_info->thermal_dpk[path] = rtw8822c_dpk_thermal_read(rtwdev,
4063 path);
4064 }
4065 }
4066
rtw8822c_dpk_calibrate(struct rtw_dev * rtwdev,u8 path)4067 static void rtw8822c_dpk_calibrate(struct rtw_dev *rtwdev, u8 path)
4068 {
4069 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4070 u32 dpk_txagc;
4071 u8 dpk_fail;
4072
4073 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk start\n", path);
4074
4075 dpk_txagc = rtw8822c_dpk_gainloss(rtwdev, path);
4076
4077 dpk_fail = rtw8822c_dpk_by_path(rtwdev, dpk_txagc, path);
4078
4079 if (!rtw8822c_dpk_check_pass(rtwdev, dpk_fail, dpk_txagc, path))
4080 rtw_err(rtwdev, "failed to do dpk calibration\n");
4081
4082 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] s%d dpk finish\n", path);
4083
4084 if (dpk_info->result[path])
4085 set_bit(path, dpk_info->dpk_path_ok);
4086 }
4087
rtw8822c_dpk_path_select(struct rtw_dev * rtwdev)4088 static void rtw8822c_dpk_path_select(struct rtw_dev *rtwdev)
4089 {
4090 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_A);
4091 rtw8822c_dpk_calibrate(rtwdev, RF_PATH_B);
4092 rtw8822c_dpk_on(rtwdev, RF_PATH_A);
4093 rtw8822c_dpk_on(rtwdev, RF_PATH_B);
4094 rtw8822c_dpk_cal_coef1(rtwdev);
4095 }
4096
rtw8822c_dpk_enable_disable(struct rtw_dev * rtwdev)4097 static void rtw8822c_dpk_enable_disable(struct rtw_dev *rtwdev)
4098 {
4099 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4100 u32 mask = BIT(15) | BIT(14);
4101
4102 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4103
4104 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, BIT_DPD_EN,
4105 dpk_info->is_dpk_pwr_on);
4106 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, BIT_DPD_EN,
4107 dpk_info->is_dpk_pwr_on);
4108
4109 if (test_bit(RF_PATH_A, dpk_info->dpk_path_ok)) {
4110 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S0, mask, 0x0);
4111 rtw_write8(rtwdev, REG_DPD_CTL0_S0, dpk_info->dpk_gs[RF_PATH_A]);
4112 }
4113 if (test_bit(RF_PATH_B, dpk_info->dpk_path_ok)) {
4114 rtw_write32_mask(rtwdev, REG_DPD_CTL1_S1, mask, 0x0);
4115 rtw_write8(rtwdev, REG_DPD_CTL0_S1, dpk_info->dpk_gs[RF_PATH_B]);
4116 }
4117 }
4118
rtw8822c_dpk_reload_data(struct rtw_dev * rtwdev)4119 static void rtw8822c_dpk_reload_data(struct rtw_dev *rtwdev)
4120 {
4121 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4122 u8 path;
4123
4124 if (!test_bit(RF_PATH_A, dpk_info->dpk_path_ok) &&
4125 !test_bit(RF_PATH_B, dpk_info->dpk_path_ok) &&
4126 dpk_info->dpk_ch == 0)
4127 return;
4128
4129 for (path = 0; path < rtwdev->hal.rf_path_num; path++) {
4130 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4131 0x8 | (path << 1));
4132 if (dpk_info->dpk_band == RTW_BAND_2G)
4133 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f100000);
4134 else
4135 rtw_write32(rtwdev, REG_DPD_CTL1_S1, 0x1f0d0000);
4136
4137 rtw_write8(rtwdev, REG_DPD_AGC, dpk_info->dpk_txagc[path]);
4138
4139 rtw8822c_dpk_coef_write(rtwdev, path,
4140 test_bit(path, dpk_info->dpk_path_ok));
4141
4142 rtw8822c_dpk_one_shot(rtwdev, path, RTW_DPK_DPK_ON);
4143
4144 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE, 0xc);
4145
4146 if (path == RF_PATH_A)
4147 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S0, BIT_GS_PWSF,
4148 dpk_info->dpk_gs[path]);
4149 else
4150 rtw_write32_mask(rtwdev, REG_DPD_CTL0_S1, BIT_GS_PWSF,
4151 dpk_info->dpk_gs[path]);
4152 }
4153 rtw8822c_dpk_cal_coef1(rtwdev);
4154 }
4155
rtw8822c_dpk_reload(struct rtw_dev * rtwdev)4156 static bool rtw8822c_dpk_reload(struct rtw_dev *rtwdev)
4157 {
4158 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4159 u8 channel;
4160
4161 dpk_info->is_reload = false;
4162
4163 channel = (u8)(rtw_read_rf(rtwdev, RF_PATH_A, 0x18, RFREG_MASK) & 0xff);
4164
4165 if (channel == dpk_info->dpk_ch) {
4166 rtw_dbg(rtwdev, RTW_DBG_RFK,
4167 "[DPK] DPK reload for CH%d!!\n", dpk_info->dpk_ch);
4168 rtw8822c_dpk_reload_data(rtwdev);
4169 dpk_info->is_reload = true;
4170 }
4171
4172 return dpk_info->is_reload;
4173 }
4174
rtw8822c_do_dpk(struct rtw_dev * rtwdev)4175 static void rtw8822c_do_dpk(struct rtw_dev *rtwdev)
4176 {
4177 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4178 struct rtw_backup_info bckp[DPK_BB_REG_NUM];
4179 u32 rf_reg_backup[DPK_RF_REG_NUM][DPK_RF_PATH_NUM];
4180 u32 bb_reg[DPK_BB_REG_NUM] = {
4181 0x520, 0x820, 0x824, 0x1c3c, 0x1d58, 0x1864,
4182 0x4164, 0x180c, 0x410c, 0x186c, 0x416c,
4183 0x1a14, 0x1e70, 0x80c, 0x1d70, 0x1e7c, 0x18a4, 0x41a4};
4184 u32 rf_reg[DPK_RF_REG_NUM] = {
4185 0x0, 0x1a, 0x55, 0x63, 0x87, 0x8f, 0xde};
4186 u8 path;
4187
4188 if (!dpk_info->is_dpk_pwr_on) {
4189 rtw_dbg(rtwdev, RTW_DBG_RFK, "[DPK] Skip DPK due to DPD PWR off\n");
4190 return;
4191 } else if (rtw8822c_dpk_reload(rtwdev)) {
4192 return;
4193 }
4194
4195 for (path = RF_PATH_A; path < DPK_RF_PATH_NUM; path++)
4196 ewma_thermal_init(&dpk_info->avg_thermal[path]);
4197
4198 rtw8822c_dpk_information(rtwdev);
4199
4200 rtw8822c_dpk_backup_registers(rtwdev, bb_reg, DPK_BB_REG_NUM, bckp);
4201 rtw8822c_dpk_backup_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4202
4203 rtw8822c_dpk_mac_bb_setting(rtwdev);
4204 rtw8822c_dpk_afe_setting(rtwdev, true);
4205 rtw8822c_dpk_pre_setting(rtwdev);
4206 rtw8822c_dpk_result_reset(rtwdev);
4207 rtw8822c_dpk_path_select(rtwdev);
4208 rtw8822c_dpk_afe_setting(rtwdev, false);
4209 rtw8822c_dpk_enable_disable(rtwdev);
4210
4211 rtw8822c_dpk_reload_rf_registers(rtwdev, rf_reg, rf_reg_backup);
4212 for (path = 0; path < rtwdev->hal.rf_path_num; path++)
4213 rtw8822c_dpk_rxbb_dc_cal(rtwdev, path);
4214 rtw8822c_dpk_restore_registers(rtwdev, DPK_BB_REG_NUM, bckp);
4215 }
4216
rtw8822c_phy_calibration(struct rtw_dev * rtwdev)4217 static void rtw8822c_phy_calibration(struct rtw_dev *rtwdev)
4218 {
4219 rtw8822c_rfk_power_save(rtwdev, false);
4220 rtw8822c_do_gapk(rtwdev);
4221 rtw8822c_do_iqk(rtwdev);
4222 rtw8822c_do_dpk(rtwdev);
4223 rtw8822c_rfk_power_save(rtwdev, true);
4224 }
4225
rtw8822c_dpk_track(struct rtw_dev * rtwdev)4226 static void rtw8822c_dpk_track(struct rtw_dev *rtwdev)
4227 {
4228 struct rtw_dpk_info *dpk_info = &rtwdev->dm_info.dpk_info;
4229 u8 path;
4230 u8 thermal_value[DPK_RF_PATH_NUM] = {0};
4231 s8 offset[DPK_RF_PATH_NUM], delta_dpk[DPK_RF_PATH_NUM];
4232
4233 if (dpk_info->thermal_dpk[0] == 0 && dpk_info->thermal_dpk[1] == 0)
4234 return;
4235
4236 for (path = 0; path < DPK_RF_PATH_NUM; path++) {
4237 thermal_value[path] = rtw8822c_dpk_thermal_read(rtwdev, path);
4238 ewma_thermal_add(&dpk_info->avg_thermal[path],
4239 thermal_value[path]);
4240 thermal_value[path] =
4241 ewma_thermal_read(&dpk_info->avg_thermal[path]);
4242 delta_dpk[path] = dpk_info->thermal_dpk[path] -
4243 thermal_value[path];
4244 offset[path] = delta_dpk[path] -
4245 dpk_info->thermal_dpk_delta[path];
4246 offset[path] &= 0x7f;
4247
4248 if (offset[path] != dpk_info->pre_pwsf[path]) {
4249 rtw_write32_mask(rtwdev, REG_NCTL0, BIT_SUBPAGE,
4250 0x8 | (path << 1));
4251 rtw_write32_mask(rtwdev, 0x1b58, GENMASK(6, 0),
4252 offset[path]);
4253 dpk_info->pre_pwsf[path] = offset[path];
4254 }
4255 }
4256 }
4257
4258 #define XCAP_EXTEND(val) ({typeof(val) _v = (val); _v | _v << 7; })
rtw8822c_set_crystal_cap_reg(struct rtw_dev * rtwdev,u8 crystal_cap)4259 static void rtw8822c_set_crystal_cap_reg(struct rtw_dev *rtwdev, u8 crystal_cap)
4260 {
4261 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4262 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4263 u32 val = 0;
4264
4265 val = XCAP_EXTEND(crystal_cap);
4266 cfo->crystal_cap = crystal_cap;
4267 rtw_write32_mask(rtwdev, REG_ANAPAR_XTAL_0, BIT_XCAP_0, val);
4268 }
4269
rtw8822c_set_crystal_cap(struct rtw_dev * rtwdev,u8 crystal_cap)4270 static void rtw8822c_set_crystal_cap(struct rtw_dev *rtwdev, u8 crystal_cap)
4271 {
4272 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4273 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4274
4275 if (cfo->crystal_cap == crystal_cap)
4276 return;
4277
4278 rtw8822c_set_crystal_cap_reg(rtwdev, crystal_cap);
4279 }
4280
rtw8822c_cfo_tracking_reset(struct rtw_dev * rtwdev)4281 static void rtw8822c_cfo_tracking_reset(struct rtw_dev *rtwdev)
4282 {
4283 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4284 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4285
4286 cfo->is_adjust = true;
4287
4288 if (cfo->crystal_cap > rtwdev->efuse.crystal_cap)
4289 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap - 1);
4290 else if (cfo->crystal_cap < rtwdev->efuse.crystal_cap)
4291 rtw8822c_set_crystal_cap(rtwdev, cfo->crystal_cap + 1);
4292 }
4293
rtw8822c_cfo_init(struct rtw_dev * rtwdev)4294 static void rtw8822c_cfo_init(struct rtw_dev *rtwdev)
4295 {
4296 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4297 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4298
4299 cfo->crystal_cap = rtwdev->efuse.crystal_cap;
4300 cfo->is_adjust = true;
4301 }
4302
4303 #define REPORT_TO_KHZ(val) ({typeof(val) _v = (val); (_v << 1) + (_v >> 1); })
rtw8822c_cfo_calc_avg(struct rtw_dev * rtwdev,u8 path_num)4304 static s32 rtw8822c_cfo_calc_avg(struct rtw_dev *rtwdev, u8 path_num)
4305 {
4306 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4307 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4308 s32 cfo_avg, cfo_path_sum = 0, cfo_rpt_sum;
4309 u8 i;
4310
4311 for (i = 0; i < path_num; i++) {
4312 cfo_rpt_sum = REPORT_TO_KHZ(cfo->cfo_tail[i]);
4313
4314 if (cfo->cfo_cnt[i])
4315 cfo_avg = cfo_rpt_sum / cfo->cfo_cnt[i];
4316 else
4317 cfo_avg = 0;
4318
4319 cfo_path_sum += cfo_avg;
4320 }
4321
4322 for (i = 0; i < path_num; i++) {
4323 cfo->cfo_tail[i] = 0;
4324 cfo->cfo_cnt[i] = 0;
4325 }
4326
4327 return cfo_path_sum / path_num;
4328 }
4329
rtw8822c_cfo_need_adjust(struct rtw_dev * rtwdev,s32 cfo_avg)4330 static void rtw8822c_cfo_need_adjust(struct rtw_dev *rtwdev, s32 cfo_avg)
4331 {
4332 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4333 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4334
4335 if (!cfo->is_adjust) {
4336 if (abs(cfo_avg) > CFO_TRK_ENABLE_TH)
4337 cfo->is_adjust = true;
4338 } else {
4339 if (abs(cfo_avg) <= CFO_TRK_STOP_TH)
4340 cfo->is_adjust = false;
4341 }
4342
4343 if (!rtw_coex_disabled(rtwdev)) {
4344 cfo->is_adjust = false;
4345 rtw8822c_set_crystal_cap(rtwdev, rtwdev->efuse.crystal_cap);
4346 }
4347 }
4348
rtw8822c_cfo_track(struct rtw_dev * rtwdev)4349 static void rtw8822c_cfo_track(struct rtw_dev *rtwdev)
4350 {
4351 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4352 struct rtw_cfo_track *cfo = &dm_info->cfo_track;
4353 u8 path_num = rtwdev->hal.rf_path_num;
4354 s8 crystal_cap = cfo->crystal_cap;
4355 s32 cfo_avg = 0;
4356
4357 if (rtwdev->sta_cnt != 1) {
4358 rtw8822c_cfo_tracking_reset(rtwdev);
4359 return;
4360 }
4361
4362 if (cfo->packet_count == cfo->packet_count_pre)
4363 return;
4364
4365 cfo->packet_count_pre = cfo->packet_count;
4366 cfo_avg = rtw8822c_cfo_calc_avg(rtwdev, path_num);
4367 rtw8822c_cfo_need_adjust(rtwdev, cfo_avg);
4368
4369 if (cfo->is_adjust) {
4370 if (cfo_avg > CFO_TRK_ADJ_TH)
4371 crystal_cap++;
4372 else if (cfo_avg < -CFO_TRK_ADJ_TH)
4373 crystal_cap--;
4374
4375 crystal_cap = clamp_t(s8, crystal_cap, 0, XCAP_MASK);
4376 rtw8822c_set_crystal_cap(rtwdev, (u8)crystal_cap);
4377 }
4378 }
4379
4380 static const struct rtw_phy_cck_pd_reg
4381 rtw8822c_cck_pd_reg[RTW_CHANNEL_WIDTH_40 + 1][RTW_RF_PATH_MAX] = {
4382 {
4383 {0x1ac8, 0x00ff, 0x1ad0, 0x01f},
4384 {0x1ac8, 0xff00, 0x1ad0, 0x3e0}
4385 },
4386 {
4387 {0x1acc, 0x00ff, 0x1ad0, 0x01F00000},
4388 {0x1acc, 0xff00, 0x1ad0, 0x3E000000}
4389 },
4390 };
4391
4392 #define RTW_CCK_PD_MAX 255
4393 #define RTW_CCK_CS_MAX 31
4394 #define RTW_CCK_CS_ERR1 27
4395 #define RTW_CCK_CS_ERR2 29
4396 static void
rtw8822c_phy_cck_pd_set_reg(struct rtw_dev * rtwdev,s8 pd_diff,s8 cs_diff,u8 bw,u8 nrx)4397 rtw8822c_phy_cck_pd_set_reg(struct rtw_dev *rtwdev,
4398 s8 pd_diff, s8 cs_diff, u8 bw, u8 nrx)
4399 {
4400 u32 pd, cs;
4401
4402 if (WARN_ON(bw > RTW_CHANNEL_WIDTH_40 || nrx >= RTW_RF_PATH_MAX))
4403 return;
4404
4405 pd = rtw_read32_mask(rtwdev,
4406 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4407 rtw8822c_cck_pd_reg[bw][nrx].mask_pd);
4408 cs = rtw_read32_mask(rtwdev,
4409 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4410 rtw8822c_cck_pd_reg[bw][nrx].mask_cs);
4411 pd += pd_diff;
4412 cs += cs_diff;
4413 if (pd > RTW_CCK_PD_MAX)
4414 pd = RTW_CCK_PD_MAX;
4415 if (cs == RTW_CCK_CS_ERR1 || cs == RTW_CCK_CS_ERR2)
4416 cs++;
4417 else if (cs > RTW_CCK_CS_MAX)
4418 cs = RTW_CCK_CS_MAX;
4419 rtw_write32_mask(rtwdev,
4420 rtw8822c_cck_pd_reg[bw][nrx].reg_pd,
4421 rtw8822c_cck_pd_reg[bw][nrx].mask_pd,
4422 pd);
4423 rtw_write32_mask(rtwdev,
4424 rtw8822c_cck_pd_reg[bw][nrx].reg_cs,
4425 rtw8822c_cck_pd_reg[bw][nrx].mask_cs,
4426 cs);
4427
4428 rtw_dbg(rtwdev, RTW_DBG_PHY,
4429 "is_linked=%d, bw=%d, nrx=%d, cs_ratio=0x%x, pd_th=0x%x\n",
4430 rtw_is_assoc(rtwdev), bw, nrx, cs, pd);
4431 }
4432
rtw8822c_phy_cck_pd_set(struct rtw_dev * rtwdev,u8 new_lvl)4433 static void rtw8822c_phy_cck_pd_set(struct rtw_dev *rtwdev, u8 new_lvl)
4434 {
4435 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4436 s8 pd_lvl[CCK_PD_LV_MAX] = {0, 2, 4, 6, 8};
4437 s8 cs_lvl[CCK_PD_LV_MAX] = {0, 2, 2, 2, 4};
4438 u8 cur_lvl;
4439 u8 nrx, bw;
4440
4441 nrx = (u8)rtw_read32_mask(rtwdev, 0x1a2c, 0x60000);
4442 bw = (u8)rtw_read32_mask(rtwdev, 0x9b0, 0xc);
4443
4444 rtw_dbg(rtwdev, RTW_DBG_PHY, "lv: (%d) -> (%d) bw=%d nr=%d cck_fa_avg=%d\n",
4445 dm_info->cck_pd_lv[bw][nrx], new_lvl, bw, nrx,
4446 dm_info->cck_fa_avg);
4447
4448 if (dm_info->cck_pd_lv[bw][nrx] == new_lvl)
4449 return;
4450
4451 cur_lvl = dm_info->cck_pd_lv[bw][nrx];
4452
4453 /* update cck pd info */
4454 dm_info->cck_fa_avg = CCK_FA_AVG_RESET;
4455
4456 rtw8822c_phy_cck_pd_set_reg(rtwdev,
4457 pd_lvl[new_lvl] - pd_lvl[cur_lvl],
4458 cs_lvl[new_lvl] - cs_lvl[cur_lvl],
4459 bw, nrx);
4460 dm_info->cck_pd_lv[bw][nrx] = new_lvl;
4461 }
4462
4463 #define PWR_TRACK_MASK 0x7f
rtw8822c_pwrtrack_set(struct rtw_dev * rtwdev,u8 rf_path)4464 static void rtw8822c_pwrtrack_set(struct rtw_dev *rtwdev, u8 rf_path)
4465 {
4466 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4467
4468 switch (rf_path) {
4469 case RF_PATH_A:
4470 rtw_write32_mask(rtwdev, 0x18a0, PWR_TRACK_MASK,
4471 dm_info->delta_power_index[rf_path]);
4472 break;
4473 case RF_PATH_B:
4474 rtw_write32_mask(rtwdev, 0x41a0, PWR_TRACK_MASK,
4475 dm_info->delta_power_index[rf_path]);
4476 break;
4477 default:
4478 break;
4479 }
4480 }
4481
rtw8822c_pwr_track_stats(struct rtw_dev * rtwdev,u8 path)4482 static void rtw8822c_pwr_track_stats(struct rtw_dev *rtwdev, u8 path)
4483 {
4484 u8 thermal_value;
4485
4486 if (rtwdev->efuse.thermal_meter[path] == 0xff)
4487 return;
4488
4489 thermal_value = rtw_read_rf(rtwdev, path, RF_T_METER, 0x7e);
4490 rtw_phy_pwrtrack_avg(rtwdev, thermal_value, path);
4491 }
4492
rtw8822c_pwr_track_path(struct rtw_dev * rtwdev,struct rtw_swing_table * swing_table,u8 path)4493 static void rtw8822c_pwr_track_path(struct rtw_dev *rtwdev,
4494 struct rtw_swing_table *swing_table,
4495 u8 path)
4496 {
4497 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4498 u8 delta;
4499
4500 delta = rtw_phy_pwrtrack_get_delta(rtwdev, path);
4501 dm_info->delta_power_index[path] =
4502 rtw_phy_pwrtrack_get_pwridx(rtwdev, swing_table, path, path,
4503 delta);
4504 rtw8822c_pwrtrack_set(rtwdev, path);
4505 }
4506
__rtw8822c_pwr_track(struct rtw_dev * rtwdev)4507 static void __rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4508 {
4509 struct rtw_swing_table swing_table;
4510 u8 i;
4511
4512 rtw_phy_config_swing_table(rtwdev, &swing_table);
4513
4514 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4515 rtw8822c_pwr_track_stats(rtwdev, i);
4516 if (rtw_phy_pwrtrack_need_lck(rtwdev))
4517 rtw8822c_do_lck(rtwdev);
4518 for (i = 0; i < rtwdev->hal.rf_path_num; i++)
4519 rtw8822c_pwr_track_path(rtwdev, &swing_table, i);
4520 }
4521
rtw8822c_pwr_track(struct rtw_dev * rtwdev)4522 static void rtw8822c_pwr_track(struct rtw_dev *rtwdev)
4523 {
4524 struct rtw_efuse *efuse = &rtwdev->efuse;
4525 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4526
4527 if (efuse->power_track_type != 0)
4528 return;
4529
4530 if (!dm_info->pwr_trk_triggered) {
4531 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4532 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x00);
4533 rtw_write_rf(rtwdev, RF_PATH_A, RF_T_METER, BIT(19), 0x01);
4534
4535 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4536 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x00);
4537 rtw_write_rf(rtwdev, RF_PATH_B, RF_T_METER, BIT(19), 0x01);
4538
4539 dm_info->pwr_trk_triggered = true;
4540 return;
4541 }
4542
4543 __rtw8822c_pwr_track(rtwdev);
4544 dm_info->pwr_trk_triggered = false;
4545 }
4546
rtw8822c_adaptivity_init(struct rtw_dev * rtwdev)4547 static void rtw8822c_adaptivity_init(struct rtw_dev *rtwdev)
4548 {
4549 rtw_phy_set_edcca_th(rtwdev, RTW8822C_EDCCA_MAX, RTW8822C_EDCCA_MAX);
4550
4551 /* mac edcca state setting */
4552 rtw_write32_clr(rtwdev, REG_TX_PTCL_CTRL, BIT_DIS_EDCCA);
4553 rtw_write32_set(rtwdev, REG_RD_CTRL, BIT_EDCCA_MSK_CNTDOWN_EN);
4554
4555 /* edcca decistion opt */
4556 rtw_write32_clr(rtwdev, REG_EDCCA_DECISION, BIT_EDCCA_OPTION);
4557 }
4558
rtw8822c_adaptivity(struct rtw_dev * rtwdev)4559 static void rtw8822c_adaptivity(struct rtw_dev *rtwdev)
4560 {
4561 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
4562 s8 l2h, h2l;
4563 u8 igi;
4564
4565 igi = dm_info->igi_history[0];
4566 if (dm_info->edcca_mode == RTW_EDCCA_NORMAL) {
4567 l2h = max_t(s8, igi + EDCCA_IGI_L2H_DIFF, EDCCA_TH_L2H_LB);
4568 h2l = l2h - EDCCA_L2H_H2L_DIFF_NORMAL;
4569 } else {
4570 if (igi < dm_info->l2h_th_ini - EDCCA_ADC_BACKOFF)
4571 l2h = igi + EDCCA_ADC_BACKOFF;
4572 else
4573 l2h = dm_info->l2h_th_ini;
4574 h2l = l2h - EDCCA_L2H_H2L_DIFF;
4575 }
4576
4577 rtw_phy_set_edcca_th(rtwdev, l2h, h2l);
4578 }
4579
rtw8822c_fill_txdesc_checksum(struct rtw_dev * rtwdev,struct rtw_tx_pkt_info * pkt_info,u8 * txdesc)4580 static void rtw8822c_fill_txdesc_checksum(struct rtw_dev *rtwdev,
4581 struct rtw_tx_pkt_info *pkt_info,
4582 u8 *txdesc)
4583 {
4584 const struct rtw_chip_info *chip = rtwdev->chip;
4585 size_t words;
4586
4587 words = (pkt_info->pkt_offset * 8 + chip->tx_pkt_desc_sz) / 2;
4588
4589 fill_txdesc_checksum_common(txdesc, words);
4590 }
4591
4592 static const struct rtw_pwr_seq_cmd trans_carddis_to_cardemu_8822c[] = {
4593 {0x0086,
4594 RTW_PWR_CUT_ALL_MSK,
4595 RTW_PWR_INTF_SDIO_MSK,
4596 RTW_PWR_ADDR_SDIO,
4597 RTW_PWR_CMD_WRITE, BIT(0), 0},
4598 {0x0086,
4599 RTW_PWR_CUT_ALL_MSK,
4600 RTW_PWR_INTF_SDIO_MSK,
4601 RTW_PWR_ADDR_SDIO,
4602 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4603 {0x002E,
4604 RTW_PWR_CUT_ALL_MSK,
4605 RTW_PWR_INTF_ALL_MSK,
4606 RTW_PWR_ADDR_MAC,
4607 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4608 {0x002D,
4609 RTW_PWR_CUT_ALL_MSK,
4610 RTW_PWR_INTF_ALL_MSK,
4611 RTW_PWR_ADDR_MAC,
4612 RTW_PWR_CMD_WRITE, BIT(0), 0},
4613 {0x007F,
4614 RTW_PWR_CUT_ALL_MSK,
4615 RTW_PWR_INTF_ALL_MSK,
4616 RTW_PWR_ADDR_MAC,
4617 RTW_PWR_CMD_WRITE, BIT(7), 0},
4618 {0x004A,
4619 RTW_PWR_CUT_ALL_MSK,
4620 RTW_PWR_INTF_USB_MSK,
4621 RTW_PWR_ADDR_MAC,
4622 RTW_PWR_CMD_WRITE, BIT(0), 0},
4623 {0x0005,
4624 RTW_PWR_CUT_ALL_MSK,
4625 RTW_PWR_INTF_ALL_MSK,
4626 RTW_PWR_ADDR_MAC,
4627 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4) | BIT(7), 0},
4628 {0xFFFF,
4629 RTW_PWR_CUT_ALL_MSK,
4630 RTW_PWR_INTF_ALL_MSK,
4631 0,
4632 RTW_PWR_CMD_END, 0, 0},
4633 };
4634
4635 static const struct rtw_pwr_seq_cmd trans_cardemu_to_act_8822c[] = {
4636 {0x0000,
4637 RTW_PWR_CUT_ALL_MSK,
4638 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4639 RTW_PWR_ADDR_MAC,
4640 RTW_PWR_CMD_WRITE, BIT(5), 0},
4641 {0x0005,
4642 RTW_PWR_CUT_ALL_MSK,
4643 RTW_PWR_INTF_ALL_MSK,
4644 RTW_PWR_ADDR_MAC,
4645 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3) | BIT(2)), 0},
4646 {0x0075,
4647 RTW_PWR_CUT_ALL_MSK,
4648 RTW_PWR_INTF_PCI_MSK,
4649 RTW_PWR_ADDR_MAC,
4650 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4651 {0x0006,
4652 RTW_PWR_CUT_ALL_MSK,
4653 RTW_PWR_INTF_ALL_MSK,
4654 RTW_PWR_ADDR_MAC,
4655 RTW_PWR_CMD_POLLING, BIT(1), BIT(1)},
4656 {0x0075,
4657 RTW_PWR_CUT_ALL_MSK,
4658 RTW_PWR_INTF_PCI_MSK,
4659 RTW_PWR_ADDR_MAC,
4660 RTW_PWR_CMD_WRITE, BIT(0), 0},
4661 {0xFF1A,
4662 RTW_PWR_CUT_ALL_MSK,
4663 RTW_PWR_INTF_USB_MSK,
4664 RTW_PWR_ADDR_MAC,
4665 RTW_PWR_CMD_WRITE, 0xFF, 0},
4666 {0x002E,
4667 RTW_PWR_CUT_ALL_MSK,
4668 RTW_PWR_INTF_ALL_MSK,
4669 RTW_PWR_ADDR_MAC,
4670 RTW_PWR_CMD_WRITE, BIT(3), 0},
4671 {0x0006,
4672 RTW_PWR_CUT_ALL_MSK,
4673 RTW_PWR_INTF_ALL_MSK,
4674 RTW_PWR_ADDR_MAC,
4675 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4676 {0x0005,
4677 RTW_PWR_CUT_ALL_MSK,
4678 RTW_PWR_INTF_ALL_MSK,
4679 RTW_PWR_ADDR_MAC,
4680 RTW_PWR_CMD_WRITE, (BIT(4) | BIT(3)), 0},
4681 {0x1018,
4682 RTW_PWR_CUT_ALL_MSK,
4683 RTW_PWR_INTF_ALL_MSK,
4684 RTW_PWR_ADDR_MAC,
4685 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4686 {0x0005,
4687 RTW_PWR_CUT_ALL_MSK,
4688 RTW_PWR_INTF_ALL_MSK,
4689 RTW_PWR_ADDR_MAC,
4690 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4691 {0x0005,
4692 RTW_PWR_CUT_ALL_MSK,
4693 RTW_PWR_INTF_ALL_MSK,
4694 RTW_PWR_ADDR_MAC,
4695 RTW_PWR_CMD_POLLING, BIT(0), 0},
4696 {0x0074,
4697 RTW_PWR_CUT_ALL_MSK,
4698 RTW_PWR_INTF_PCI_MSK,
4699 RTW_PWR_ADDR_MAC,
4700 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4701 {0x0071,
4702 RTW_PWR_CUT_ALL_MSK,
4703 RTW_PWR_INTF_PCI_MSK,
4704 RTW_PWR_ADDR_MAC,
4705 RTW_PWR_CMD_WRITE, BIT(4), 0},
4706 {0x0062,
4707 RTW_PWR_CUT_ALL_MSK,
4708 RTW_PWR_INTF_PCI_MSK,
4709 RTW_PWR_ADDR_MAC,
4710 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)),
4711 (BIT(7) | BIT(6) | BIT(5))},
4712 {0x0061,
4713 RTW_PWR_CUT_ALL_MSK,
4714 RTW_PWR_INTF_PCI_MSK,
4715 RTW_PWR_ADDR_MAC,
4716 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6) | BIT(5)), 0},
4717 {0x001F,
4718 RTW_PWR_CUT_ALL_MSK,
4719 RTW_PWR_INTF_ALL_MSK,
4720 RTW_PWR_ADDR_MAC,
4721 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4722 {0x00EF,
4723 RTW_PWR_CUT_ALL_MSK,
4724 RTW_PWR_INTF_ALL_MSK,
4725 RTW_PWR_ADDR_MAC,
4726 RTW_PWR_CMD_WRITE, (BIT(7) | BIT(6)), BIT(7)},
4727 {0x1045,
4728 RTW_PWR_CUT_ALL_MSK,
4729 RTW_PWR_INTF_ALL_MSK,
4730 RTW_PWR_ADDR_MAC,
4731 RTW_PWR_CMD_WRITE, BIT(4), BIT(4)},
4732 {0x0010,
4733 RTW_PWR_CUT_ALL_MSK,
4734 RTW_PWR_INTF_ALL_MSK,
4735 RTW_PWR_ADDR_MAC,
4736 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4737 {0x1064,
4738 RTW_PWR_CUT_ALL_MSK,
4739 RTW_PWR_INTF_ALL_MSK,
4740 RTW_PWR_ADDR_MAC,
4741 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4742 {0xFFFF,
4743 RTW_PWR_CUT_ALL_MSK,
4744 RTW_PWR_INTF_ALL_MSK,
4745 0,
4746 RTW_PWR_CMD_END, 0, 0},
4747 };
4748
4749 static const struct rtw_pwr_seq_cmd trans_act_to_cardemu_8822c[] = {
4750 {0x0093,
4751 RTW_PWR_CUT_ALL_MSK,
4752 RTW_PWR_INTF_ALL_MSK,
4753 RTW_PWR_ADDR_MAC,
4754 RTW_PWR_CMD_WRITE, BIT(3), 0},
4755 {0x001F,
4756 RTW_PWR_CUT_ALL_MSK,
4757 RTW_PWR_INTF_ALL_MSK,
4758 RTW_PWR_ADDR_MAC,
4759 RTW_PWR_CMD_WRITE, 0xFF, 0},
4760 {0x00EF,
4761 RTW_PWR_CUT_ALL_MSK,
4762 RTW_PWR_INTF_ALL_MSK,
4763 RTW_PWR_ADDR_MAC,
4764 RTW_PWR_CMD_WRITE, 0xFF, 0},
4765 {0x1045,
4766 RTW_PWR_CUT_ALL_MSK,
4767 RTW_PWR_INTF_ALL_MSK,
4768 RTW_PWR_ADDR_MAC,
4769 RTW_PWR_CMD_WRITE, BIT(4), 0},
4770 {0xFF1A,
4771 RTW_PWR_CUT_ALL_MSK,
4772 RTW_PWR_INTF_USB_MSK,
4773 RTW_PWR_ADDR_MAC,
4774 RTW_PWR_CMD_WRITE, 0xFF, 0x30},
4775 {0x0049,
4776 RTW_PWR_CUT_ALL_MSK,
4777 RTW_PWR_INTF_ALL_MSK,
4778 RTW_PWR_ADDR_MAC,
4779 RTW_PWR_CMD_WRITE, BIT(1), 0},
4780 {0x0006,
4781 RTW_PWR_CUT_ALL_MSK,
4782 RTW_PWR_INTF_ALL_MSK,
4783 RTW_PWR_ADDR_MAC,
4784 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4785 {0x0002,
4786 RTW_PWR_CUT_ALL_MSK,
4787 RTW_PWR_INTF_ALL_MSK,
4788 RTW_PWR_ADDR_MAC,
4789 RTW_PWR_CMD_WRITE, BIT(1), 0},
4790 {0x0005,
4791 RTW_PWR_CUT_ALL_MSK,
4792 RTW_PWR_INTF_ALL_MSK,
4793 RTW_PWR_ADDR_MAC,
4794 RTW_PWR_CMD_WRITE, BIT(1), BIT(1)},
4795 {0x0005,
4796 RTW_PWR_CUT_ALL_MSK,
4797 RTW_PWR_INTF_ALL_MSK,
4798 RTW_PWR_ADDR_MAC,
4799 RTW_PWR_CMD_POLLING, BIT(1), 0},
4800 {0x0000,
4801 RTW_PWR_CUT_ALL_MSK,
4802 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4803 RTW_PWR_ADDR_MAC,
4804 RTW_PWR_CMD_WRITE, BIT(5), BIT(5)},
4805 {0xFFFF,
4806 RTW_PWR_CUT_ALL_MSK,
4807 RTW_PWR_INTF_ALL_MSK,
4808 0,
4809 RTW_PWR_CMD_END, 0, 0},
4810 };
4811
4812 static const struct rtw_pwr_seq_cmd trans_cardemu_to_carddis_8822c[] = {
4813 {0x0005,
4814 RTW_PWR_CUT_ALL_MSK,
4815 RTW_PWR_INTF_SDIO_MSK,
4816 RTW_PWR_ADDR_MAC,
4817 RTW_PWR_CMD_WRITE, BIT(7), BIT(7)},
4818 {0x0007,
4819 RTW_PWR_CUT_ALL_MSK,
4820 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4821 RTW_PWR_ADDR_MAC,
4822 RTW_PWR_CMD_WRITE, 0xFF, 0x00},
4823 {0x0067,
4824 RTW_PWR_CUT_ALL_MSK,
4825 RTW_PWR_INTF_ALL_MSK,
4826 RTW_PWR_ADDR_MAC,
4827 RTW_PWR_CMD_WRITE, BIT(5), 0},
4828 {0x004A,
4829 RTW_PWR_CUT_ALL_MSK,
4830 RTW_PWR_INTF_USB_MSK,
4831 RTW_PWR_ADDR_MAC,
4832 RTW_PWR_CMD_WRITE, BIT(0), 0},
4833 {0x0081,
4834 RTW_PWR_CUT_ALL_MSK,
4835 RTW_PWR_INTF_ALL_MSK,
4836 RTW_PWR_ADDR_MAC,
4837 RTW_PWR_CMD_WRITE, BIT(7) | BIT(6), 0},
4838 {0x0090,
4839 RTW_PWR_CUT_ALL_MSK,
4840 RTW_PWR_INTF_ALL_MSK,
4841 RTW_PWR_ADDR_MAC,
4842 RTW_PWR_CMD_WRITE, BIT(1), 0},
4843 {0x0092,
4844 RTW_PWR_CUT_ALL_MSK,
4845 RTW_PWR_INTF_PCI_MSK,
4846 RTW_PWR_ADDR_MAC,
4847 RTW_PWR_CMD_WRITE, 0xFF, 0x20},
4848 {0x0093,
4849 RTW_PWR_CUT_ALL_MSK,
4850 RTW_PWR_INTF_PCI_MSK,
4851 RTW_PWR_ADDR_MAC,
4852 RTW_PWR_CMD_WRITE, 0xFF, 0x04},
4853 {0x0005,
4854 RTW_PWR_CUT_ALL_MSK,
4855 RTW_PWR_INTF_USB_MSK | RTW_PWR_INTF_SDIO_MSK,
4856 RTW_PWR_ADDR_MAC,
4857 RTW_PWR_CMD_WRITE, BIT(3) | BIT(4), BIT(3)},
4858 {0x0005,
4859 RTW_PWR_CUT_ALL_MSK,
4860 RTW_PWR_INTF_PCI_MSK,
4861 RTW_PWR_ADDR_MAC,
4862 RTW_PWR_CMD_WRITE, BIT(2), BIT(2)},
4863 {0x0086,
4864 RTW_PWR_CUT_ALL_MSK,
4865 RTW_PWR_INTF_SDIO_MSK,
4866 RTW_PWR_ADDR_SDIO,
4867 RTW_PWR_CMD_WRITE, BIT(0), BIT(0)},
4868 {0xFFFF,
4869 RTW_PWR_CUT_ALL_MSK,
4870 RTW_PWR_INTF_ALL_MSK,
4871 0,
4872 RTW_PWR_CMD_END, 0, 0},
4873 };
4874
4875 static const struct rtw_pwr_seq_cmd *card_enable_flow_8822c[] = {
4876 trans_carddis_to_cardemu_8822c,
4877 trans_cardemu_to_act_8822c,
4878 NULL
4879 };
4880
4881 static const struct rtw_pwr_seq_cmd *card_disable_flow_8822c[] = {
4882 trans_act_to_cardemu_8822c,
4883 trans_cardemu_to_carddis_8822c,
4884 NULL
4885 };
4886
4887 static const struct rtw_intf_phy_para usb2_param_8822c[] = {
4888 {0xFFFF, 0x00,
4889 RTW_IP_SEL_PHY,
4890 RTW_INTF_PHY_CUT_ALL,
4891 RTW_INTF_PHY_PLATFORM_ALL},
4892 };
4893
4894 static const struct rtw_intf_phy_para usb3_param_8822c[] = {
4895 {0xFFFF, 0x0000,
4896 RTW_IP_SEL_PHY,
4897 RTW_INTF_PHY_CUT_ALL,
4898 RTW_INTF_PHY_PLATFORM_ALL},
4899 };
4900
4901 static const struct rtw_intf_phy_para pcie_gen1_param_8822c[] = {
4902 {0xFFFF, 0x0000,
4903 RTW_IP_SEL_PHY,
4904 RTW_INTF_PHY_CUT_ALL,
4905 RTW_INTF_PHY_PLATFORM_ALL},
4906 };
4907
4908 static const struct rtw_intf_phy_para pcie_gen2_param_8822c[] = {
4909 {0xFFFF, 0x0000,
4910 RTW_IP_SEL_PHY,
4911 RTW_INTF_PHY_CUT_ALL,
4912 RTW_INTF_PHY_PLATFORM_ALL},
4913 };
4914
4915 static const struct rtw_intf_phy_para_table phy_para_table_8822c = {
4916 .usb2_para = usb2_param_8822c,
4917 .usb3_para = usb3_param_8822c,
4918 .gen1_para = pcie_gen1_param_8822c,
4919 .gen2_para = pcie_gen2_param_8822c,
4920 .n_usb2_para = ARRAY_SIZE(usb2_param_8822c),
4921 .n_usb3_para = ARRAY_SIZE(usb2_param_8822c),
4922 .n_gen1_para = ARRAY_SIZE(pcie_gen1_param_8822c),
4923 .n_gen2_para = ARRAY_SIZE(pcie_gen2_param_8822c),
4924 };
4925
4926 static const struct rtw_rfe_def rtw8822c_rfe_defs[] = {
4927 [0] = RTW_DEF_RFE(8822c, 0, 0),
4928 [1] = RTW_DEF_RFE(8822c, 0, 0),
4929 [2] = RTW_DEF_RFE(8822c, 0, 0),
4930 [3] = RTW_DEF_RFE(8822c, 0, 0),
4931 [4] = RTW_DEF_RFE(8822c, 0, 0),
4932 [5] = RTW_DEF_RFE(8822c, 0, 5),
4933 [6] = RTW_DEF_RFE(8822c, 0, 0),
4934 };
4935
4936 static const struct rtw_hw_reg rtw8822c_dig[] = {
4937 [0] = { .addr = 0x1d70, .mask = 0x7f },
4938 [1] = { .addr = 0x1d70, .mask = 0x7f00 },
4939 };
4940
4941 static const struct rtw_ltecoex_addr rtw8822c_ltecoex_addr = {
4942 .ctrl = LTECOEX_ACCESS_CTRL,
4943 .wdata = LTECOEX_WRITE_DATA,
4944 .rdata = LTECOEX_READ_DATA,
4945 };
4946
4947 static const struct rtw_page_table page_table_8822c[] = {
4948 {64, 64, 64, 64, 1},
4949 {64, 64, 64, 64, 1},
4950 {64, 64, 0, 0, 1},
4951 {64, 64, 64, 0, 1},
4952 {64, 64, 64, 64, 1},
4953 };
4954
4955 static const struct rtw_rqpn rqpn_table_8822c[] = {
4956 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4957 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4958 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4959 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4960 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4961 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4962 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4963 RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_HIGH,
4964 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4965 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4966 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4967 RTW_DMA_MAPPING_HIGH, RTW_DMA_MAPPING_HIGH},
4968 {RTW_DMA_MAPPING_NORMAL, RTW_DMA_MAPPING_NORMAL,
4969 RTW_DMA_MAPPING_LOW, RTW_DMA_MAPPING_LOW,
4970 RTW_DMA_MAPPING_EXTRA, RTW_DMA_MAPPING_HIGH},
4971 };
4972
4973 static struct rtw_prioq_addrs prioq_addrs_8822c = {
4974 .prio[RTW_DMA_MAPPING_EXTRA] = {
4975 .rsvd = REG_FIFOPAGE_INFO_4, .avail = REG_FIFOPAGE_INFO_4 + 2,
4976 },
4977 .prio[RTW_DMA_MAPPING_LOW] = {
4978 .rsvd = REG_FIFOPAGE_INFO_2, .avail = REG_FIFOPAGE_INFO_2 + 2,
4979 },
4980 .prio[RTW_DMA_MAPPING_NORMAL] = {
4981 .rsvd = REG_FIFOPAGE_INFO_3, .avail = REG_FIFOPAGE_INFO_3 + 2,
4982 },
4983 .prio[RTW_DMA_MAPPING_HIGH] = {
4984 .rsvd = REG_FIFOPAGE_INFO_1, .avail = REG_FIFOPAGE_INFO_1 + 2,
4985 },
4986 .wsize = true,
4987 };
4988
4989 static struct rtw_chip_ops rtw8822c_ops = {
4990 .phy_set_param = rtw8822c_phy_set_param,
4991 .read_efuse = rtw8822c_read_efuse,
4992 .query_rx_desc = rtw8822c_query_rx_desc,
4993 .set_channel = rtw8822c_set_channel,
4994 .mac_init = rtw8822c_mac_init,
4995 .dump_fw_crash = rtw8822c_dump_fw_crash,
4996 .read_rf = rtw_phy_read_rf,
4997 .write_rf = rtw_phy_write_rf_reg_mix,
4998 .set_tx_power_index = rtw8822c_set_tx_power_index,
4999 .set_antenna = rtw8822c_set_antenna,
5000 .cfg_ldo25 = rtw8822c_cfg_ldo25,
5001 .false_alarm_statistics = rtw8822c_false_alarm_statistics,
5002 .dpk_track = rtw8822c_dpk_track,
5003 .phy_calibration = rtw8822c_phy_calibration,
5004 .cck_pd_set = rtw8822c_phy_cck_pd_set,
5005 .pwr_track = rtw8822c_pwr_track,
5006 .config_bfee = rtw8822c_bf_config_bfee,
5007 .set_gid_table = rtw_bf_set_gid_table,
5008 .cfg_csi_rate = rtw_bf_cfg_csi_rate,
5009 .adaptivity_init = rtw8822c_adaptivity_init,
5010 .adaptivity = rtw8822c_adaptivity,
5011 .cfo_init = rtw8822c_cfo_init,
5012 .cfo_track = rtw8822c_cfo_track,
5013 .config_tx_path = rtw8822c_config_tx_path,
5014 .config_txrx_mode = rtw8822c_config_trx_mode,
5015 .fill_txdesc_checksum = rtw8822c_fill_txdesc_checksum,
5016
5017 .coex_set_init = rtw8822c_coex_cfg_init,
5018 .coex_set_ant_switch = NULL,
5019 .coex_set_gnt_fix = rtw8822c_coex_cfg_gnt_fix,
5020 .coex_set_gnt_debug = rtw8822c_coex_cfg_gnt_debug,
5021 .coex_set_rfe_type = rtw8822c_coex_cfg_rfe_type,
5022 .coex_set_wl_tx_power = rtw8822c_coex_cfg_wl_tx_power,
5023 .coex_set_wl_rx_gain = rtw8822c_coex_cfg_wl_rx_gain,
5024 };
5025
5026 /* Shared-Antenna Coex Table */
5027 static const struct coex_table_para table_sant_8822c[] = {
5028 {0xffffffff, 0xffffffff}, /* case-0 */
5029 {0x55555555, 0x55555555},
5030 {0x66555555, 0x66555555},
5031 {0xaaaaaaaa, 0xaaaaaaaa},
5032 {0x5a5a5a5a, 0x5a5a5a5a},
5033 {0xfafafafa, 0xfafafafa}, /* case-5 */
5034 {0x6a5a5555, 0xaaaaaaaa},
5035 {0x6a5a56aa, 0x6a5a56aa},
5036 {0x6a5a5a5a, 0x6a5a5a5a},
5037 {0x66555555, 0x5a5a5a5a},
5038 {0x66555555, 0x6a5a5a5a}, /* case-10 */
5039 {0x66555555, 0x6a5a5aaa},
5040 {0x66555555, 0x5a5a5aaa},
5041 {0x66555555, 0x6aaa5aaa},
5042 {0x66555555, 0xaaaa5aaa},
5043 {0x66555555, 0xaaaaaaaa}, /* case-15 */
5044 {0xffff55ff, 0xfafafafa},
5045 {0xffff55ff, 0x6afa5afa},
5046 {0xaaffffaa, 0xfafafafa},
5047 {0xaa5555aa, 0x5a5a5a5a},
5048 {0xaa5555aa, 0x6a5a5a5a}, /* case-20 */
5049 {0xaa5555aa, 0xaaaaaaaa},
5050 {0xffffffff, 0x5a5a5a5a},
5051 {0xffffffff, 0x5a5a5a5a},
5052 {0xffffffff, 0x55555555},
5053 {0xffffffff, 0x5a5a5aaa}, /* case-25 */
5054 {0x55555555, 0x5a5a5a5a},
5055 {0x55555555, 0xaaaaaaaa},
5056 {0x55555555, 0x6a5a6a5a},
5057 {0x66556655, 0x66556655},
5058 {0x66556aaa, 0x6a5a6aaa}, /*case-30*/
5059 {0xffffffff, 0x5aaa5aaa},
5060 {0x56555555, 0x5a5a5aaa},
5061 {0xdaffdaff, 0xdaffdaff},
5062 {0xddffddff, 0xddffddff},
5063 };
5064
5065 /* Non-Shared-Antenna Coex Table */
5066 static const struct coex_table_para table_nsant_8822c[] = {
5067 {0xffffffff, 0xffffffff}, /* case-100 */
5068 {0x55555555, 0x55555555},
5069 {0x66555555, 0x66555555},
5070 {0xaaaaaaaa, 0xaaaaaaaa},
5071 {0x5a5a5a5a, 0x5a5a5a5a},
5072 {0xfafafafa, 0xfafafafa}, /* case-105 */
5073 {0x5afa5afa, 0x5afa5afa},
5074 {0x55555555, 0xfafafafa},
5075 {0x66555555, 0xfafafafa},
5076 {0x66555555, 0x5a5a5a5a},
5077 {0x66555555, 0x6a5a5a5a}, /* case-110 */
5078 {0x66555555, 0xaaaaaaaa},
5079 {0xffff55ff, 0xfafafafa},
5080 {0xffff55ff, 0x5afa5afa},
5081 {0xffff55ff, 0xaaaaaaaa},
5082 {0xffff55ff, 0xffff55ff}, /* case-115 */
5083 {0xaaffffaa, 0x5afa5afa},
5084 {0xaaffffaa, 0xaaaaaaaa},
5085 {0xffffffff, 0xfafafafa},
5086 {0xffffffff, 0x5afa5afa},
5087 {0xffffffff, 0xaaaaaaaa}, /* case-120 */
5088 {0x55ff55ff, 0x5afa5afa},
5089 {0x55ff55ff, 0xaaaaaaaa},
5090 {0x55ff55ff, 0x55ff55ff}
5091 };
5092
5093 /* Shared-Antenna TDMA */
5094 static const struct coex_tdma_para tdma_sant_8822c[] = {
5095 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-0 */
5096 { {0x61, 0x45, 0x03, 0x11, 0x11} }, /* case-1 */
5097 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5098 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5099 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5100 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-5 */
5101 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5102 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5103 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5104 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5105 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-10 */
5106 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5107 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5108 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5109 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5110 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-15 */
5111 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5112 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5113 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5114 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5115 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-20 */
5116 { {0x51, 0x4a, 0x03, 0x10, 0x50} },
5117 { {0x51, 0x0c, 0x03, 0x10, 0x54} },
5118 { {0x55, 0x08, 0x03, 0x10, 0x54} },
5119 { {0x65, 0x10, 0x03, 0x11, 0x10} },
5120 { {0x51, 0x10, 0x03, 0x10, 0x51} }, /* case-25 */
5121 { {0x51, 0x08, 0x03, 0x10, 0x50} },
5122 { {0x61, 0x08, 0x03, 0x11, 0x11} }
5123 };
5124
5125 /* Non-Shared-Antenna TDMA */
5126 static const struct coex_tdma_para tdma_nsant_8822c[] = {
5127 { {0x00, 0x00, 0x00, 0x00, 0x00} }, /* case-100 */
5128 { {0x61, 0x45, 0x03, 0x11, 0x11} },
5129 { {0x61, 0x3a, 0x03, 0x11, 0x11} },
5130 { {0x61, 0x30, 0x03, 0x11, 0x11} },
5131 { {0x61, 0x20, 0x03, 0x11, 0x11} },
5132 { {0x61, 0x10, 0x03, 0x11, 0x11} }, /* case-105 */
5133 { {0x61, 0x45, 0x03, 0x11, 0x10} },
5134 { {0x61, 0x3a, 0x03, 0x11, 0x10} },
5135 { {0x61, 0x30, 0x03, 0x11, 0x10} },
5136 { {0x61, 0x20, 0x03, 0x11, 0x10} },
5137 { {0x61, 0x10, 0x03, 0x11, 0x10} }, /* case-110 */
5138 { {0x61, 0x08, 0x03, 0x11, 0x14} },
5139 { {0x61, 0x08, 0x03, 0x10, 0x14} },
5140 { {0x51, 0x08, 0x03, 0x10, 0x54} },
5141 { {0x51, 0x08, 0x03, 0x10, 0x55} },
5142 { {0x51, 0x08, 0x07, 0x10, 0x54} }, /* case-115 */
5143 { {0x51, 0x45, 0x03, 0x10, 0x50} },
5144 { {0x51, 0x3a, 0x03, 0x10, 0x50} },
5145 { {0x51, 0x30, 0x03, 0x10, 0x50} },
5146 { {0x51, 0x20, 0x03, 0x10, 0x50} },
5147 { {0x51, 0x10, 0x03, 0x10, 0x50} }, /* case-120 */
5148 { {0x51, 0x08, 0x03, 0x10, 0x50} }
5149 };
5150
5151 /* rssi in percentage % (dbm = % - 100) */
5152 static const u8 wl_rssi_step_8822c[] = {60, 50, 44, 30};
5153 static const u8 bt_rssi_step_8822c[] = {8, 15, 20, 25};
5154 static const struct coex_5g_afh_map afh_5g_8822c[] = { {0, 0, 0} };
5155
5156 /* wl_tx_dec_power, bt_tx_dec_power, wl_rx_gain, bt_rx_lna_constrain */
5157 static const struct coex_rf_para rf_para_tx_8822c[] = {
5158 {0, 0, false, 7}, /* for normal */
5159 {0, 16, false, 7}, /* for WL-CPT */
5160 {8, 17, true, 4},
5161 {7, 18, true, 4},
5162 {6, 19, true, 4},
5163 {5, 20, true, 4},
5164 {0, 21, true, 4} /* for gamg hid */
5165 };
5166
5167 static const struct coex_rf_para rf_para_rx_8822c[] = {
5168 {0, 0, false, 7}, /* for normal */
5169 {0, 16, false, 7}, /* for WL-CPT */
5170 {3, 24, true, 5},
5171 {2, 26, true, 5},
5172 {1, 27, true, 5},
5173 {0, 28, true, 5},
5174 {0, 28, true, 5} /* for gamg hid */
5175 };
5176
5177 static_assert(ARRAY_SIZE(rf_para_tx_8822c) == ARRAY_SIZE(rf_para_rx_8822c));
5178
5179 static const u8
5180 rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5181 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5182 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5183 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5184 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5185 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5186 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5187 { 0, 1, 2, 3, 5, 6, 7, 8, 9, 10,
5188 11, 12, 13, 14, 15, 16, 18, 19, 20, 21,
5189 22, 23, 24, 25, 26, 27, 28, 29, 30, 32 },
5190 };
5191
5192 static const u8
5193 rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5194 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5195 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5196 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5197 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5198 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5199 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5200 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5201 10, 10, 11, 12, 13, 14, 15, 16, 17, 18,
5202 19, 20, 21, 22, 22, 23, 24, 25, 26, 27 },
5203 };
5204
5205 static const u8
5206 rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5207 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5208 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5209 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5210 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5211 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5212 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5213 { 0, 1, 2, 4, 5, 6, 7, 8, 9, 10,
5214 11, 13, 14, 15, 16, 17, 18, 19, 20, 21,
5215 23, 24, 25, 26, 27, 28, 29, 30, 31, 33 },
5216 };
5217
5218 static const u8
5219 rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_NUM][RTW_PWR_TRK_TBL_SZ] = {
5220 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5221 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5222 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5223 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5224 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5225 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5226 { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5227 10, 11, 12, 13, 14, 15, 16, 17, 18, 20,
5228 21, 22, 23, 24, 25, 26, 27, 28, 29, 30 },
5229 };
5230
5231 static const u8 rtw8822c_pwrtrk_2gb_n[RTW_PWR_TRK_TBL_SZ] = {
5232 0, 1, 2, 3, 4, 4, 5, 6, 7, 8,
5233 9, 9, 10, 11, 12, 13, 14, 15, 15, 16,
5234 17, 18, 19, 20, 20, 21, 22, 23, 24, 25
5235 };
5236
5237 static const u8 rtw8822c_pwrtrk_2gb_p[RTW_PWR_TRK_TBL_SZ] = {
5238 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5239 10, 11, 12, 13, 14, 14, 15, 16, 17, 18,
5240 19, 20, 21, 22, 23, 24, 25, 26, 27, 28
5241 };
5242
5243 static const u8 rtw8822c_pwrtrk_2ga_n[RTW_PWR_TRK_TBL_SZ] = {
5244 0, 1, 2, 2, 3, 4, 4, 5, 6, 6,
5245 7, 8, 8, 9, 9, 10, 11, 11, 12, 13,
5246 13, 14, 15, 15, 16, 17, 17, 18, 19, 19
5247 };
5248
5249 static const u8 rtw8822c_pwrtrk_2ga_p[RTW_PWR_TRK_TBL_SZ] = {
5250 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5251 10, 11, 11, 12, 13, 14, 15, 16, 17, 18,
5252 19, 20, 21, 22, 23, 24, 25, 25, 26, 27
5253 };
5254
5255 static const u8 rtw8822c_pwrtrk_2g_cck_b_n[RTW_PWR_TRK_TBL_SZ] = {
5256 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5257 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5258 17, 18, 19, 20, 21, 22, 23, 23, 24, 25
5259 };
5260
5261 static const u8 rtw8822c_pwrtrk_2g_cck_b_p[RTW_PWR_TRK_TBL_SZ] = {
5262 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
5263 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
5264 20, 21, 22, 23, 24, 25, 26, 27, 28, 29
5265 };
5266
5267 static const u8 rtw8822c_pwrtrk_2g_cck_a_n[RTW_PWR_TRK_TBL_SZ] = {
5268 0, 1, 2, 3, 3, 4, 5, 6, 6, 7,
5269 8, 9, 9, 10, 11, 12, 12, 13, 14, 15,
5270 15, 16, 17, 18, 18, 19, 20, 21, 21, 22
5271 };
5272
5273 static const u8 rtw8822c_pwrtrk_2g_cck_a_p[RTW_PWR_TRK_TBL_SZ] = {
5274 0, 1, 2, 3, 4, 5, 5, 6, 7, 8,
5275 9, 10, 11, 11, 12, 13, 14, 15, 16, 17,
5276 18, 18, 19, 20, 21, 22, 23, 24, 24, 25
5277 };
5278
5279 static const struct rtw_pwr_track_tbl rtw8822c_rtw_pwr_track_tbl = {
5280 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_1],
5281 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_2],
5282 .pwrtrk_5gb_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_n[RTW_PWR_TRK_5G_3],
5283 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_1],
5284 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_2],
5285 .pwrtrk_5gb_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5gb_p[RTW_PWR_TRK_5G_3],
5286 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_1],
5287 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_2],
5288 .pwrtrk_5ga_n[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_n[RTW_PWR_TRK_5G_3],
5289 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_1] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_1],
5290 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_2] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_2],
5291 .pwrtrk_5ga_p[RTW_PWR_TRK_5G_3] = rtw8822c_pwrtrk_5ga_p[RTW_PWR_TRK_5G_3],
5292 .pwrtrk_2gb_n = rtw8822c_pwrtrk_2gb_n,
5293 .pwrtrk_2gb_p = rtw8822c_pwrtrk_2gb_p,
5294 .pwrtrk_2ga_n = rtw8822c_pwrtrk_2ga_n,
5295 .pwrtrk_2ga_p = rtw8822c_pwrtrk_2ga_p,
5296 .pwrtrk_2g_cckb_n = rtw8822c_pwrtrk_2g_cck_b_n,
5297 .pwrtrk_2g_cckb_p = rtw8822c_pwrtrk_2g_cck_b_p,
5298 .pwrtrk_2g_ccka_n = rtw8822c_pwrtrk_2g_cck_a_n,
5299 .pwrtrk_2g_ccka_p = rtw8822c_pwrtrk_2g_cck_a_p,
5300 };
5301
5302 static struct rtw_hw_reg_offset rtw8822c_edcca_th[] = {
5303 [EDCCA_TH_L2H_IDX] = {
5304 {.addr = 0x84c, .mask = MASKBYTE2}, .offset = 0x80
5305 },
5306 [EDCCA_TH_H2L_IDX] = {
5307 {.addr = 0x84c, .mask = MASKBYTE3}, .offset = 0x80
5308 },
5309 };
5310
5311 #ifdef CONFIG_PM
5312 static const struct wiphy_wowlan_support rtw_wowlan_stub_8822c = {
5313 .flags = WIPHY_WOWLAN_MAGIC_PKT | WIPHY_WOWLAN_GTK_REKEY_FAILURE |
5314 WIPHY_WOWLAN_DISCONNECT | WIPHY_WOWLAN_SUPPORTS_GTK_REKEY |
5315 WIPHY_WOWLAN_NET_DETECT,
5316 .n_patterns = RTW_MAX_PATTERN_NUM,
5317 .pattern_max_len = RTW_MAX_PATTERN_SIZE,
5318 .pattern_min_len = 1,
5319 .max_nd_match_sets = 4,
5320 };
5321 #endif
5322
5323 static const struct rtw_reg_domain coex_info_hw_regs_8822c[] = {
5324 {0x1860, BIT(3), RTW_REG_DOMAIN_MAC8},
5325 {0x4160, BIT(3), RTW_REG_DOMAIN_MAC8},
5326 {0x1c32, BIT(6), RTW_REG_DOMAIN_MAC8},
5327 {0x1c38, BIT(28), RTW_REG_DOMAIN_MAC32},
5328 {0, 0, RTW_REG_DOMAIN_NL},
5329 {0x430, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5330 {0x434, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5331 {0x42a, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5332 {0x426, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5333 {0x45e, BIT(3), RTW_REG_DOMAIN_MAC8},
5334 {0x454, MASKLWORD, RTW_REG_DOMAIN_MAC16},
5335 {0, 0, RTW_REG_DOMAIN_NL},
5336 {0x4c, BIT(24) | BIT(23), RTW_REG_DOMAIN_MAC32},
5337 {0x64, BIT(0), RTW_REG_DOMAIN_MAC8},
5338 {0x4c6, BIT(4), RTW_REG_DOMAIN_MAC8},
5339 {0x40, BIT(5), RTW_REG_DOMAIN_MAC8},
5340 {0x1, RFREG_MASK, RTW_REG_DOMAIN_RF_B},
5341 {0, 0, RTW_REG_DOMAIN_NL},
5342 {0x550, MASKDWORD, RTW_REG_DOMAIN_MAC32},
5343 {0x522, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5344 {0x953, BIT(1), RTW_REG_DOMAIN_MAC8},
5345 {0xc50, MASKBYTE0, RTW_REG_DOMAIN_MAC8},
5346 };
5347
5348 const struct rtw_chip_info rtw8822c_hw_spec = {
5349 .ops = &rtw8822c_ops,
5350 .id = RTW_CHIP_TYPE_8822C,
5351 .fw_name = "rtw88/rtw8822c_fw.bin",
5352 .wlan_cpu = RTW_WCPU_11AC,
5353 .tx_pkt_desc_sz = 48,
5354 .tx_buf_desc_sz = 16,
5355 .rx_pkt_desc_sz = 24,
5356 .rx_buf_desc_sz = 8,
5357 .phy_efuse_size = 512,
5358 .log_efuse_size = 768,
5359 .ptct_efuse_size = 124,
5360 .txff_size = 262144,
5361 .rxff_size = 24576,
5362 .fw_rxff_size = 12288,
5363 .rsvd_drv_pg_num = 16,
5364 .txgi_factor = 2,
5365 .is_pwr_by_rate_dec = false,
5366 .max_power_index = 0x7f,
5367 .csi_buf_pg_num = 50,
5368 .band = RTW_BAND_2G | RTW_BAND_5G,
5369 .page_size = TX_PAGE_SIZE,
5370 .dig_min = 0x20,
5371 .default_1ss_tx_path = BB_PATH_A,
5372 .path_div_supported = true,
5373 .ht_supported = true,
5374 .vht_supported = true,
5375 .lps_deep_mode_supported = BIT(LPS_DEEP_MODE_LCLK) | BIT(LPS_DEEP_MODE_PG),
5376 .sys_func_en = 0xD8,
5377 .pwr_on_seq = card_enable_flow_8822c,
5378 .pwr_off_seq = card_disable_flow_8822c,
5379 .page_table = page_table_8822c,
5380 .rqpn_table = rqpn_table_8822c,
5381 .prioq_addrs = &prioq_addrs_8822c,
5382 .intf_table = &phy_para_table_8822c,
5383 .dig = rtw8822c_dig,
5384 .dig_cck = NULL,
5385 .rf_base_addr = {0x3c00, 0x4c00},
5386 .rf_sipi_addr = {0x1808, 0x4108},
5387 .ltecoex_addr = &rtw8822c_ltecoex_addr,
5388 .mac_tbl = &rtw8822c_mac_tbl,
5389 .agc_tbl = &rtw8822c_agc_tbl,
5390 .bb_tbl = &rtw8822c_bb_tbl,
5391 .rfk_init_tbl = &rtw8822c_array_mp_cal_init_tbl,
5392 .rf_tbl = {&rtw8822c_rf_b_tbl, &rtw8822c_rf_a_tbl},
5393 .rfe_defs = rtw8822c_rfe_defs,
5394 .rfe_defs_size = ARRAY_SIZE(rtw8822c_rfe_defs),
5395 .en_dis_dpd = true,
5396 .dpd_ratemask = DIS_DPD_RATEALL,
5397 .pwr_track_tbl = &rtw8822c_rtw_pwr_track_tbl,
5398 .iqk_threshold = 8,
5399 .lck_threshold = 8,
5400 .bfer_su_max_num = 2,
5401 .bfer_mu_max_num = 1,
5402 .rx_ldpc = true,
5403 .tx_stbc = true,
5404 .edcca_th = rtw8822c_edcca_th,
5405 .l2h_th_ini_cs = 60,
5406 .l2h_th_ini_ad = 45,
5407 .ampdu_density = IEEE80211_HT_MPDU_DENSITY_2,
5408
5409 #ifdef CONFIG_PM
5410 .wow_fw_name = "rtw88/rtw8822c_wow_fw.bin",
5411 .wowlan_stub = &rtw_wowlan_stub_8822c,
5412 .max_sched_scan_ssids = 4,
5413 #endif
5414 .max_scan_ie_len = (RTW_PROBE_PG_CNT - 1) * TX_PAGE_SIZE,
5415 .coex_para_ver = 0x22020720,
5416 .bt_desired_ver = 0x20,
5417 .scbd_support = true,
5418 .new_scbd10_def = true,
5419 .ble_hid_profile_support = true,
5420 .wl_mimo_ps_support = true,
5421 .pstdma_type = COEX_PSTDMA_FORCE_LPSOFF,
5422 .bt_rssi_type = COEX_BTRSSI_DBM,
5423 .ant_isolation = 15,
5424 .rssi_tolerance = 2,
5425 .wl_rssi_step = wl_rssi_step_8822c,
5426 .bt_rssi_step = bt_rssi_step_8822c,
5427 .table_sant_num = ARRAY_SIZE(table_sant_8822c),
5428 .table_sant = table_sant_8822c,
5429 .table_nsant_num = ARRAY_SIZE(table_nsant_8822c),
5430 .table_nsant = table_nsant_8822c,
5431 .tdma_sant_num = ARRAY_SIZE(tdma_sant_8822c),
5432 .tdma_sant = tdma_sant_8822c,
5433 .tdma_nsant_num = ARRAY_SIZE(tdma_nsant_8822c),
5434 .tdma_nsant = tdma_nsant_8822c,
5435 .wl_rf_para_num = ARRAY_SIZE(rf_para_tx_8822c),
5436 .wl_rf_para_tx = rf_para_tx_8822c,
5437 .wl_rf_para_rx = rf_para_rx_8822c,
5438 .bt_afh_span_bw20 = 0x24,
5439 .bt_afh_span_bw40 = 0x36,
5440 .afh_5g_num = ARRAY_SIZE(afh_5g_8822c),
5441 .afh_5g = afh_5g_8822c,
5442
5443 .coex_info_hw_regs_num = ARRAY_SIZE(coex_info_hw_regs_8822c),
5444 .coex_info_hw_regs = coex_info_hw_regs_8822c,
5445
5446 .fw_fifo_addr = {0x780, 0x700, 0x780, 0x660, 0x650, 0x680},
5447 .fwcd_segs = &rtw8822c_fwcd_segs,
5448 };
5449 EXPORT_SYMBOL(rtw8822c_hw_spec);
5450
5451 MODULE_FIRMWARE("rtw88/rtw8822c_fw.bin");
5452 MODULE_FIRMWARE("rtw88/rtw8822c_wow_fw.bin");
5453
5454 MODULE_AUTHOR("Realtek Corporation");
5455 MODULE_DESCRIPTION("Realtek 802.11ac wireless 8822c driver");
5456 MODULE_LICENSE("Dual BSD/GPL");
5457