1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * (c) Copyright 2002-2010, Ralink Technology, Inc.
4  * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
5  * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
6  * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
7  */
8 
9 #include <linux/kernel.h>
10 #include <linux/etherdevice.h>
11 
12 #include "mt76x0.h"
13 #include "mcu.h"
14 #include "eeprom.h"
15 #include "phy.h"
16 #include "initvals.h"
17 #include "initvals_phy.h"
18 #include "../mt76x02_phy.h"
19 
20 static int
21 mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value)
22 {
23 	int ret = 0;
24 	u8 bank, reg;
25 
26 	if (test_bit(MT76_REMOVED, &dev->mt76.state))
27 		return -ENODEV;
28 
29 	bank = MT_RF_BANK(offset);
30 	reg = MT_RF_REG(offset);
31 
32 	if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
33 		return -EINVAL;
34 
35 	mutex_lock(&dev->phy_mutex);
36 
37 	if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) {
38 		ret = -ETIMEDOUT;
39 		goto out;
40 	}
41 
42 	mt76_wr(dev, MT_RF_CSR_CFG,
43 		FIELD_PREP(MT_RF_CSR_CFG_DATA, value) |
44 		FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
45 		FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
46 		MT_RF_CSR_CFG_WR |
47 		MT_RF_CSR_CFG_KICK);
48 
49 out:
50 	mutex_unlock(&dev->phy_mutex);
51 
52 	if (ret < 0)
53 		dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n",
54 			bank, reg, ret);
55 
56 	return ret;
57 }
58 
59 static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset)
60 {
61 	int ret = -ETIMEDOUT;
62 	u32 val;
63 	u8 bank, reg;
64 
65 	if (test_bit(MT76_REMOVED, &dev->mt76.state))
66 		return -ENODEV;
67 
68 	bank = MT_RF_BANK(offset);
69 	reg = MT_RF_REG(offset);
70 
71 	if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
72 		return -EINVAL;
73 
74 	mutex_lock(&dev->phy_mutex);
75 
76 	if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
77 		goto out;
78 
79 	mt76_wr(dev, MT_RF_CSR_CFG,
80 		FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
81 		FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
82 		MT_RF_CSR_CFG_KICK);
83 
84 	if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
85 		goto out;
86 
87 	val = mt76_rr(dev, MT_RF_CSR_CFG);
88 	if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg &&
89 	    FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank)
90 		ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val);
91 
92 out:
93 	mutex_unlock(&dev->phy_mutex);
94 
95 	if (ret < 0)
96 		dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n",
97 			bank, reg, ret);
98 
99 	return ret;
100 }
101 
102 static int
103 mt76x0_rf_wr(struct mt76x02_dev *dev, u32 offset, u8 val)
104 {
105 	if (mt76_is_usb(dev)) {
106 		struct mt76_reg_pair pair = {
107 			.reg = offset,
108 			.value = val,
109 		};
110 
111 		WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
112 			     &dev->mt76.state));
113 		return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
114 	} else {
115 		return mt76x0_rf_csr_wr(dev, offset, val);
116 	}
117 }
118 
119 static int mt76x0_rf_rr(struct mt76x02_dev *dev, u32 offset)
120 {
121 	int ret;
122 	u32 val;
123 
124 	if (mt76_is_usb(dev)) {
125 		struct mt76_reg_pair pair = {
126 			.reg = offset,
127 		};
128 
129 		WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
130 			     &dev->mt76.state));
131 		ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
132 		val = pair.value;
133 	} else {
134 		ret = val = mt76x0_rf_csr_rr(dev, offset);
135 	}
136 
137 	return (ret < 0) ? ret : val;
138 }
139 
140 static int
141 mt76x0_rf_rmw(struct mt76x02_dev *dev, u32 offset, u8 mask, u8 val)
142 {
143 	int ret;
144 
145 	ret = mt76x0_rf_rr(dev, offset);
146 	if (ret < 0)
147 		return ret;
148 
149 	val |= ret & ~mask;
150 
151 	ret = mt76x0_rf_wr(dev, offset, val);
152 	return ret ? ret : val;
153 }
154 
155 static int
156 mt76x0_rf_set(struct mt76x02_dev *dev, u32 offset, u8 val)
157 {
158 	return mt76x0_rf_rmw(dev, offset, 0, val);
159 }
160 
161 static int
162 mt76x0_rf_clear(struct mt76x02_dev *dev, u32 offset, u8 mask)
163 {
164 	return mt76x0_rf_rmw(dev, offset, mask, 0);
165 }
166 
167 static void
168 mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev *dev,
169 			const struct mt76_reg_pair *data,
170 			int n)
171 {
172 	while (n-- > 0) {
173 		mt76x0_rf_csr_wr(dev, data->reg, data->value);
174 		data++;
175 	}
176 }
177 
178 #define RF_RANDOM_WRITE(dev, tab) do {					\
179 	if (mt76_is_mmio(dev))						\
180 		mt76x0_phy_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab));	\
181 	else								\
182 		mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\
183 } while (0)
184 
185 int mt76x0_phy_wait_bbp_ready(struct mt76x02_dev *dev)
186 {
187 	int i = 20;
188 	u32 val;
189 
190 	do {
191 		val = mt76_rr(dev, MT_BBP(CORE, 0));
192 		if (val && ~val)
193 			break;
194 	} while (--i);
195 
196 	if (!i) {
197 		dev_err(dev->mt76.dev, "Error: BBP is not ready\n");
198 		return -EIO;
199 	}
200 
201 	dev_dbg(dev->mt76.dev, "BBP version %08x\n", val);
202 	return 0;
203 }
204 
205 static void
206 mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band)
207 {
208 	switch (band) {
209 	case NL80211_BAND_2GHZ:
210 		RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
211 
212 		mt76x0_rf_wr(dev, MT_RF(5, 0), 0x45);
213 		mt76x0_rf_wr(dev, MT_RF(6, 0), 0x44);
214 
215 		mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
216 		mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
217 		break;
218 	case NL80211_BAND_5GHZ:
219 		RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
220 
221 		mt76x0_rf_wr(dev, MT_RF(5, 0), 0x44);
222 		mt76x0_rf_wr(dev, MT_RF(6, 0), 0x45);
223 
224 		mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
225 		mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
226 		break;
227 	default:
228 		break;
229 	}
230 }
231 
232 static void
233 mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_band)
234 {
235 	const struct mt76x0_freq_item *freq_item;
236 	u16 rf_band = rf_bw_band & 0xff00;
237 	u16 rf_bw = rf_bw_band & 0x00ff;
238 	enum nl80211_band band;
239 	bool b_sdm = false;
240 	u32 mac_reg;
241 	int i;
242 
243 	for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) {
244 		if (channel == mt76x0_sdm_channel[i]) {
245 			b_sdm = true;
246 			break;
247 		}
248 	}
249 
250 	for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) {
251 		if (channel == mt76x0_frequency_plan[i].channel) {
252 			rf_band = mt76x0_frequency_plan[i].band;
253 
254 			if (b_sdm)
255 				freq_item = &(mt76x0_sdm_frequency_plan[i]);
256 			else
257 				freq_item = &(mt76x0_frequency_plan[i]);
258 
259 			mt76x0_rf_wr(dev, MT_RF(0, 37), freq_item->pllR37);
260 			mt76x0_rf_wr(dev, MT_RF(0, 36), freq_item->pllR36);
261 			mt76x0_rf_wr(dev, MT_RF(0, 35), freq_item->pllR35);
262 			mt76x0_rf_wr(dev, MT_RF(0, 34), freq_item->pllR34);
263 			mt76x0_rf_wr(dev, MT_RF(0, 33), freq_item->pllR33);
264 
265 			mt76x0_rf_rmw(dev, MT_RF(0, 32), 0xe0,
266 				      freq_item->pllR32_b7b5);
267 
268 			/* R32<4:0> pll_den: (Denomina - 8) */
269 			mt76x0_rf_rmw(dev, MT_RF(0, 32), MT_RF_PLL_DEN_MASK,
270 				      freq_item->pllR32_b4b0);
271 
272 			/* R31<7:5> */
273 			mt76x0_rf_rmw(dev, MT_RF(0, 31), 0xe0,
274 				      freq_item->pllR31_b7b5);
275 
276 			/* R31<4:0> pll_k(Nominator) */
277 			mt76x0_rf_rmw(dev, MT_RF(0, 31), MT_RF_PLL_K_MASK,
278 				      freq_item->pllR31_b4b0);
279 
280 			/* R30<7> sdm_reset_n */
281 			if (b_sdm) {
282 				mt76x0_rf_clear(dev, MT_RF(0, 30),
283 						MT_RF_SDM_RESET_MASK);
284 				mt76x0_rf_set(dev, MT_RF(0, 30),
285 					      MT_RF_SDM_RESET_MASK);
286 			} else {
287 				mt76x0_rf_rmw(dev, MT_RF(0, 30),
288 					      MT_RF_SDM_RESET_MASK,
289 					      freq_item->pllR30_b7);
290 			}
291 
292 			/* R30<6:2> sdmmash_prbs,sin */
293 			mt76x0_rf_rmw(dev, MT_RF(0, 30),
294 				      MT_RF_SDM_MASH_PRBS_MASK,
295 				      freq_item->pllR30_b6b2);
296 
297 			/* R30<1> sdm_bp */
298 			mt76x0_rf_rmw(dev, MT_RF(0, 30), MT_RF_SDM_BP_MASK,
299 				      freq_item->pllR30_b1 << 1);
300 
301 			/* R30<0> R29<7:0> (hex) pll_n */
302 			mt76x0_rf_wr(dev, MT_RF(0, 29),
303 				     freq_item->pll_n & 0xff);
304 
305 			mt76x0_rf_rmw(dev, MT_RF(0, 30), 0x1,
306 				      (freq_item->pll_n >> 8) & 0x1);
307 
308 			/* R28<7:6> isi_iso */
309 			mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_ISI_ISO_MASK,
310 				      freq_item->pllR28_b7b6);
311 
312 			/* R28<5:4> pfd_dly */
313 			mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_PFD_DLY_MASK,
314 				      freq_item->pllR28_b5b4);
315 
316 			/* R28<3:2> clksel option */
317 			mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_CLK_SEL_MASK,
318 				      freq_item->pllR28_b3b2);
319 
320 			/* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
321 			mt76x0_rf_wr(dev, MT_RF(0, 26),
322 				     freq_item->pll_sdm_k & 0xff);
323 			mt76x0_rf_wr(dev, MT_RF(0, 27),
324 				     (freq_item->pll_sdm_k >> 8) & 0xff);
325 
326 			mt76x0_rf_rmw(dev, MT_RF(0, 28), 0x3,
327 				      (freq_item->pll_sdm_k >> 16) & 0x3);
328 
329 			/* R24<1:0> xo_div */
330 			mt76x0_rf_rmw(dev, MT_RF(0, 24), MT_RF_XO_DIV_MASK,
331 				      freq_item->pllR24_b1b0);
332 
333 			break;
334 		}
335 	}
336 
337 	for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
338 		if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) {
339 			mt76x0_rf_wr(dev,
340 				     mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
341 				     mt76x0_rf_bw_switch_tab[i].value);
342 		} else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) &&
343 			   (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) {
344 			mt76x0_rf_wr(dev,
345 				     mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
346 				     mt76x0_rf_bw_switch_tab[i].value);
347 		}
348 	}
349 
350 	for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
351 		if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) {
352 			mt76x0_rf_wr(dev,
353 				     mt76x0_rf_band_switch_tab[i].rf_bank_reg,
354 				     mt76x0_rf_band_switch_tab[i].value);
355 		}
356 	}
357 
358 	mt76_clear(dev, MT_RF_MISC, 0xc);
359 
360 	band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ;
361 	if (mt76x02_ext_pa_enabled(dev, band)) {
362 		/*
363 			MT_RF_MISC (offset: 0x0518)
364 			[2]1'b1: enable external A band PA, 1'b0: disable external A band PA
365 			[3]1'b1: enable external G band PA, 1'b0: disable external G band PA
366 		*/
367 		if (rf_band & RF_A_BAND)
368 			mt76_set(dev, MT_RF_MISC, BIT(2));
369 		else
370 			mt76_set(dev, MT_RF_MISC, BIT(3));
371 
372 		/* External PA */
373 		for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++)
374 			if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band)
375 				mt76x0_rf_wr(dev,
376 					mt76x0_rf_ext_pa_tab[i].rf_bank_reg,
377 					mt76x0_rf_ext_pa_tab[i].value);
378 	}
379 
380 	if (rf_band & RF_G_BAND) {
381 		mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400);
382 		/* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
383 		mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
384 		mac_reg &= 0x896400FF;
385 		mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
386 	} else {
387 		mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800);
388 		/* Set Atten mode = 0 For Ext A band, Disable Tx Inc dcoc Cal. */
389 		mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
390 		mac_reg &= 0x890400FF;
391 		mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
392 	}
393 }
394 
395 static void
396 mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u16 rf_bw_band)
397 {
398 	int i;
399 
400 	for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) {
401 		const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i];
402 		const struct mt76_reg_pair *pair = &item->reg_pair;
403 
404 		if ((rf_bw_band & item->bw_band) != rf_bw_band)
405 			continue;
406 
407 		if (pair->reg == MT_BBP(AGC, 8)) {
408 			u32 val = pair->value;
409 			u8 gain;
410 
411 			gain = FIELD_GET(MT_BBP_AGC_GAIN, val);
412 			gain -= dev->cal.rx.lna_gain * 2;
413 			val &= ~MT_BBP_AGC_GAIN;
414 			val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain);
415 			mt76_wr(dev, pair->reg, val);
416 		} else {
417 			mt76_wr(dev, pair->reg, pair->value);
418 		}
419 	}
420 }
421 
422 static void mt76x0_phy_ant_select(struct mt76x02_dev *dev)
423 {
424 	u16 ee_ant = mt76x02_eeprom_get(dev, MT_EE_ANTENNA);
425 	u16 ee_cfg1 = mt76x02_eeprom_get(dev, MT_EE_CFG1_INIT);
426 	u16 nic_conf2 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2);
427 	u32 wlan, coex3;
428 	bool ant_div;
429 
430 	wlan = mt76_rr(dev, MT_WLAN_FUN_CTRL);
431 	coex3 = mt76_rr(dev, MT_COEXCFG3);
432 
433 	ee_ant &= ~(BIT(14) | BIT(12));
434 	wlan  &= ~(BIT(6) | BIT(5));
435 	coex3 &= ~GENMASK(5, 2);
436 
437 	if (ee_ant & MT_EE_ANTENNA_DUAL) {
438 		/* dual antenna mode */
439 		ant_div = !(nic_conf2 & MT_EE_NIC_CONF_2_ANT_OPT) &&
440 			  (nic_conf2 & MT_EE_NIC_CONF_2_ANT_DIV);
441 		if (ant_div)
442 			ee_ant |= BIT(12);
443 		else
444 			coex3 |= BIT(4);
445 		coex3 |= BIT(3);
446 		if (dev->mt76.cap.has_2ghz)
447 			wlan |= BIT(6);
448 	} else {
449 		/* sigle antenna mode */
450 		if (dev->mt76.cap.has_5ghz) {
451 			coex3 |= BIT(3) | BIT(4);
452 		} else {
453 			wlan |= BIT(6);
454 			coex3 |= BIT(1);
455 		}
456 	}
457 
458 	if (is_mt7630(dev))
459 		ee_ant |= BIT(14) | BIT(11);
460 
461 	mt76_wr(dev, MT_WLAN_FUN_CTRL, wlan);
462 	mt76_rmw(dev, MT_CMB_CTRL, GENMASK(15, 0), ee_ant);
463 	mt76_rmw(dev, MT_CSR_EE_CFG1, GENMASK(15, 0), ee_cfg1);
464 	mt76_clear(dev, MT_COEXCFG0, BIT(2));
465 	mt76_wr(dev, MT_COEXCFG3, coex3);
466 }
467 
468 static void
469 mt76x0_phy_bbp_set_bw(struct mt76x02_dev *dev, enum nl80211_chan_width width)
470 {
471 	enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4};
472 	int bw;
473 
474 	switch (width) {
475 	default:
476 	case NL80211_CHAN_WIDTH_20_NOHT:
477 	case NL80211_CHAN_WIDTH_20:
478 		bw = BW_20;
479 		break;
480 	case NL80211_CHAN_WIDTH_40:
481 		bw = BW_40;
482 		break;
483 	case NL80211_CHAN_WIDTH_80:
484 		bw = BW_80;
485 		break;
486 	case NL80211_CHAN_WIDTH_10:
487 		bw = BW_10;
488 		break;
489 	case NL80211_CHAN_WIDTH_80P80:
490 	case NL80211_CHAN_WIDTH_160:
491 	case NL80211_CHAN_WIDTH_5:
492 		/* TODO error */
493 		return ;
494 	}
495 
496 	mt76x02_mcu_function_select(dev, BW_SETTING, bw);
497 }
498 
499 static void mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev *dev)
500 {
501 	struct ieee80211_channel *chan = dev->mt76.chandef.chan;
502 	u32 val;
503 
504 	if (chan->band == NL80211_BAND_5GHZ)
505 		mt76x0_rf_clear(dev, MT_RF(0, 67), 0xf);
506 
507 	/* bypass ADDA control */
508 	mt76_wr(dev, MT_RF_SETTING_0, 0x60002237);
509 	mt76_wr(dev, MT_RF_BYPASS_0, 0xffffffff);
510 
511 	/* bbp sw reset */
512 	mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
513 	usleep_range(500, 1000);
514 	mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
515 
516 	val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
517 	mt76_wr(dev, MT_BBP(CORE, 34), val);
518 
519 	/* enable TX with DAC0 input */
520 	mt76_wr(dev, MT_BBP(TXBE, 6), BIT(31));
521 
522 	mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200);
523 	dev->cal.tssi_dc = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
524 
525 	/* stop bypass ADDA */
526 	mt76_wr(dev, MT_RF_BYPASS_0, 0);
527 	/* stop TX */
528 	mt76_wr(dev, MT_BBP(TXBE, 6), 0);
529 	/* bbp sw reset */
530 	mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
531 	usleep_range(500, 1000);
532 	mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
533 
534 	if (chan->band == NL80211_BAND_5GHZ)
535 		mt76x0_rf_rmw(dev, MT_RF(0, 67), 0xf, 0x4);
536 }
537 
538 static int
539 mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev *dev, s16 *ltssi,
540 			      u8 *info)
541 {
542 	struct ieee80211_channel *chan = dev->mt76.chandef.chan;
543 	u32 val;
544 
545 	val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
546 	mt76_wr(dev, MT_BBP(CORE, 34), val);
547 
548 	if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
549 		mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
550 		return -ETIMEDOUT;
551 	}
552 
553 	*ltssi = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
554 	if (chan->band == NL80211_BAND_5GHZ)
555 		*ltssi += 128;
556 
557 	/* set packet info#1 mode */
558 	mt76_wr(dev, MT_BBP(CORE, 34), 0x80041);
559 	info[0] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
560 
561 	/* set packet info#2 mode */
562 	mt76_wr(dev, MT_BBP(CORE, 34), 0x80042);
563 	info[1] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
564 
565 	/* set packet info#3 mode */
566 	mt76_wr(dev, MT_BBP(CORE, 34), 0x80043);
567 	info[2] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
568 
569 	return 0;
570 }
571 
572 static u8 mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev *dev,
573 				    int index, u8 tx_rate)
574 {
575 	u32 val, reg;
576 
577 	reg = (index == 1) ? MT_RF_PA_MODE_CFG1 : MT_RF_PA_MODE_CFG0;
578 	val = mt76_rr(dev, reg);
579 	return (val & (3 << (tx_rate * 2))) >> (tx_rate * 2);
580 }
581 
582 static int
583 mt76x0_phy_get_target_power(struct mt76x02_dev *dev, u8 tx_mode,
584 			    u8 *info, s8 *target_power,
585 			    s8 *target_pa_power)
586 {
587 	u8 tx_rate, cur_power;
588 
589 	cur_power = mt76_rr(dev, MT_TX_ALC_CFG_0) & MT_TX_ALC_CFG_0_CH_INIT_0;
590 	switch (tx_mode) {
591 	case 0:
592 		/* cck rates */
593 		tx_rate = (info[0] & 0x60) >> 5;
594 		if (tx_rate > 3)
595 			return -EINVAL;
596 
597 		*target_power = cur_power + dev->mt76.rate_power.cck[tx_rate];
598 		*target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, tx_rate);
599 		break;
600 	case 1: {
601 		u8 index;
602 
603 		/* ofdm rates */
604 		tx_rate = (info[0] & 0xf0) >> 4;
605 		switch (tx_rate) {
606 		case 0xb:
607 			index = 0;
608 			break;
609 		case 0xf:
610 			index = 1;
611 			break;
612 		case 0xa:
613 			index = 2;
614 			break;
615 		case 0xe:
616 			index = 3;
617 			break;
618 		case 0x9:
619 			index = 4;
620 			break;
621 		case 0xd:
622 			index = 5;
623 			break;
624 		case 0x8:
625 			index = 6;
626 			break;
627 		case 0xc:
628 			index = 7;
629 			break;
630 		default:
631 			return -EINVAL;
632 		}
633 
634 		*target_power = cur_power + dev->mt76.rate_power.ofdm[index];
635 		*target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, index + 4);
636 		break;
637 	}
638 	case 4:
639 		/* vht rates */
640 		tx_rate = info[1] & 0xf;
641 		if (tx_rate > 9)
642 			return -EINVAL;
643 
644 		*target_power = cur_power + dev->mt76.rate_power.vht[tx_rate];
645 		*target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
646 		break;
647 	default:
648 		/* ht rates */
649 		tx_rate = info[1] & 0x7f;
650 		if (tx_rate > 9)
651 			return -EINVAL;
652 
653 		*target_power = cur_power + dev->mt76.rate_power.ht[tx_rate];
654 		*target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
655 		break;
656 	}
657 
658 	return 0;
659 }
660 
661 static s16 mt76x0_phy_lin2db(u16 val)
662 {
663 	u32 mantissa = val << 4;
664 	int ret, data;
665 	s16 exp = -4;
666 
667 	while (mantissa < BIT(15)) {
668 		mantissa <<= 1;
669 		if (--exp < -20)
670 			return -10000;
671 	}
672 	while (mantissa > 0xffff) {
673 		mantissa >>= 1;
674 		if (++exp > 20)
675 			return -10000;
676 	}
677 
678 	/* s(15,0) */
679 	if (mantissa <= 47104)
680 		data = mantissa + (mantissa >> 3) + (mantissa >> 4) - 38400;
681 	else
682 		data = mantissa - (mantissa >> 3) - (mantissa >> 6) - 23040;
683 	data = max_t(int, 0, data);
684 
685 	ret = ((15 + exp) << 15) + data;
686 	ret = (ret << 2) + (ret << 1) + (ret >> 6) + (ret >> 7);
687 	return ret >> 10;
688 }
689 
690 static int
691 mt76x0_phy_get_delta_power(struct mt76x02_dev *dev, u8 tx_mode,
692 			   s8 target_power, s8 target_pa_power,
693 			   s16 ltssi)
694 {
695 	struct ieee80211_channel *chan = dev->mt76.chandef.chan;
696 	int tssi_target = target_power << 12, tssi_slope;
697 	int tssi_offset, tssi_db, ret;
698 	u32 data;
699 	u16 val;
700 
701 	if (chan->band == NL80211_BAND_5GHZ) {
702 		u8 bound[7];
703 		int i, err;
704 
705 		err = mt76x02_eeprom_copy(dev, MT_EE_TSSI_BOUND1, bound,
706 					  sizeof(bound));
707 		if (err < 0)
708 			return err;
709 
710 		for (i = 0; i < ARRAY_SIZE(bound); i++) {
711 			if (chan->hw_value <= bound[i] || !bound[i])
712 				break;
713 		}
714 		val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_5G + i * 2);
715 
716 		tssi_offset = val >> 8;
717 		if ((tssi_offset >= 64 && tssi_offset <= 127) ||
718 		    (tssi_offset & BIT(7)))
719 			tssi_offset -= BIT(8);
720 	} else {
721 		val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_2G);
722 
723 		tssi_offset = val >> 8;
724 		if (tssi_offset & BIT(7))
725 			tssi_offset -= BIT(8);
726 	}
727 	tssi_slope = val & 0xff;
728 
729 	switch (target_pa_power) {
730 	case 1:
731 		if (chan->band == NL80211_BAND_2GHZ)
732 			tssi_target += 29491; /* 3.6 * 8192 */
733 		/* fall through */
734 	case 0:
735 		break;
736 	default:
737 		tssi_target += 4424; /* 0.54 * 8192 */
738 		break;
739 	}
740 
741 	if (!tx_mode) {
742 		data = mt76_rr(dev, MT_BBP(CORE, 1));
743 		if (is_mt7630(dev) && mt76_is_mmio(dev)) {
744 			int offset;
745 
746 			/* 2.3 * 8192 or 1.5 * 8192 */
747 			offset = (data & BIT(5)) ? 18841 : 12288;
748 			tssi_target += offset;
749 		} else if (data & BIT(5)) {
750 			/* 0.8 * 8192 */
751 			tssi_target += 6554;
752 		}
753 	}
754 
755 	data = mt76_rr(dev, MT_BBP(TXBE, 4));
756 	switch (data & 0x3) {
757 	case 1:
758 		tssi_target -= 49152; /* -6db * 8192 */
759 		break;
760 	case 2:
761 		tssi_target -= 98304; /* -12db * 8192 */
762 		break;
763 	case 3:
764 		tssi_target += 49152; /* 6db * 8192 */
765 		break;
766 	default:
767 		break;
768 	}
769 
770 	tssi_db = mt76x0_phy_lin2db(ltssi - dev->cal.tssi_dc) * tssi_slope;
771 	if (chan->band == NL80211_BAND_5GHZ) {
772 		tssi_db += ((tssi_offset - 50) << 10); /* offset s4.3 */
773 		tssi_target -= tssi_db;
774 		if (ltssi > 254 && tssi_target > 0) {
775 			/* upper saturate */
776 			tssi_target = 0;
777 		}
778 	} else {
779 		tssi_db += (tssi_offset << 9); /* offset s3.4 */
780 		tssi_target -= tssi_db;
781 		/* upper-lower saturate */
782 		if ((ltssi > 126 && tssi_target > 0) ||
783 		    ((ltssi - dev->cal.tssi_dc) < 1 && tssi_target < 0)) {
784 			tssi_target = 0;
785 		}
786 	}
787 
788 	if ((dev->cal.tssi_target ^ tssi_target) < 0 &&
789 	    dev->cal.tssi_target > -4096 && dev->cal.tssi_target < 4096 &&
790 	    tssi_target > -4096 && tssi_target < 4096) {
791 		if ((tssi_target < 0 &&
792 		     tssi_target + dev->cal.tssi_target > 0) ||
793 		    (tssi_target > 0 &&
794 		     tssi_target + dev->cal.tssi_target <= 0))
795 			tssi_target = 0;
796 		else
797 			dev->cal.tssi_target = tssi_target;
798 	} else {
799 		dev->cal.tssi_target = tssi_target;
800 	}
801 
802 	/* make the compensate value to the nearest compensate code */
803 	if (tssi_target > 0)
804 		tssi_target += 2048;
805 	else
806 		tssi_target -= 2048;
807 	tssi_target >>= 12;
808 
809 	ret = mt76_get_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP);
810 	if (ret & BIT(5))
811 		ret -= BIT(6);
812 	ret += tssi_target;
813 
814 	ret = min_t(int, 31, ret);
815 	return max_t(int, -32, ret);
816 }
817 
818 static void mt76x0_phy_tssi_calibrate(struct mt76x02_dev *dev)
819 {
820 	s8 target_power, target_pa_power;
821 	u8 tssi_info[3], tx_mode;
822 	s16 ltssi;
823 	s8 val;
824 
825 	if (mt76x0_phy_tssi_adc_calibrate(dev, &ltssi, tssi_info) < 0)
826 		return;
827 
828 	tx_mode = tssi_info[0] & 0x7;
829 	if (mt76x0_phy_get_target_power(dev, tx_mode, tssi_info,
830 					&target_power, &target_pa_power) < 0)
831 		return;
832 
833 	val = mt76x0_phy_get_delta_power(dev, tx_mode, target_power,
834 					 target_pa_power, ltssi);
835 	mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP, val);
836 }
837 
838 void mt76x0_phy_set_txpower(struct mt76x02_dev *dev)
839 {
840 	struct mt76_rate_power *t = &dev->mt76.rate_power;
841 	s8 info;
842 
843 	mt76x0_get_tx_power_per_rate(dev, dev->mt76.chandef.chan, t);
844 	mt76x0_get_power_info(dev, dev->mt76.chandef.chan, &info);
845 
846 	mt76x02_add_rate_power_offset(t, info);
847 	mt76x02_limit_rate_power(t, dev->mt76.txpower_conf);
848 	dev->mt76.txpower_cur = mt76x02_get_max_rate_power(t);
849 	mt76x02_add_rate_power_offset(t, -info);
850 
851 	dev->target_power = info;
852 	mt76x02_phy_set_txpower(dev, info, info);
853 }
854 
855 void mt76x0_phy_calibrate(struct mt76x02_dev *dev, bool power_on)
856 {
857 	struct ieee80211_channel *chan = dev->mt76.chandef.chan;
858 	int is_5ghz = (chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
859 	u32 val, tx_alc, reg_val;
860 
861 	if (is_mt7630(dev))
862 		return;
863 
864 	if (power_on) {
865 		mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0);
866 		mt76x02_mcu_calibrate(dev, MCU_CAL_VCO, chan->hw_value);
867 		usleep_range(10, 20);
868 
869 		if (mt76x0_tssi_enabled(dev)) {
870 			mt76_wr(dev, MT_MAC_SYS_CTRL,
871 				MT_MAC_SYS_CTRL_ENABLE_RX);
872 			mt76x0_phy_tssi_dc_calibrate(dev);
873 			mt76_wr(dev, MT_MAC_SYS_CTRL,
874 				MT_MAC_SYS_CTRL_ENABLE_TX |
875 				MT_MAC_SYS_CTRL_ENABLE_RX);
876 		}
877 	}
878 
879 	tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
880 	mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
881 	usleep_range(500, 700);
882 
883 	reg_val = mt76_rr(dev, MT_BBP(IBI, 9));
884 	mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e);
885 
886 	if (is_5ghz) {
887 		if (chan->hw_value < 100)
888 			val = 0x701;
889 		else if (chan->hw_value < 140)
890 			val = 0x801;
891 		else
892 			val = 0x901;
893 	} else {
894 		val = 0x600;
895 	}
896 
897 	mt76x02_mcu_calibrate(dev, MCU_CAL_FULL, val);
898 	msleep(350);
899 	mt76x02_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz);
900 	usleep_range(15000, 20000);
901 
902 	mt76_wr(dev, MT_BBP(IBI, 9), reg_val);
903 	mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
904 	mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
905 }
906 EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate);
907 
908 int mt76x0_phy_set_channel(struct mt76x02_dev *dev,
909 			   struct cfg80211_chan_def *chandef)
910 {
911 	u32 ext_cca_chan[4] = {
912 		[0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
913 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
914 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
915 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
916 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
917 		[1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
918 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
919 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
920 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
921 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
922 		[2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
923 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
924 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
925 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
926 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
927 		[3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
928 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
929 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
930 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
931 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
932 	};
933 	bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
934 	int ch_group_index, freq, freq1;
935 	u8 channel;
936 	u32 val;
937 	u16 rf_bw_band;
938 
939 	freq = chandef->chan->center_freq;
940 	freq1 = chandef->center_freq1;
941 	channel = chandef->chan->hw_value;
942 	rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND;
943 	dev->mt76.chandef = *chandef;
944 
945 	switch (chandef->width) {
946 	case NL80211_CHAN_WIDTH_40:
947 		if (freq1 > freq)
948 			ch_group_index = 0;
949 		else
950 			ch_group_index = 1;
951 		channel += 2 - ch_group_index * 4;
952 		rf_bw_band |= RF_BW_40;
953 		break;
954 	case NL80211_CHAN_WIDTH_80:
955 		ch_group_index = (freq - freq1 + 30) / 20;
956 		if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
957 			ch_group_index = 0;
958 		channel += 6 - ch_group_index * 4;
959 		rf_bw_band |= RF_BW_80;
960 		break;
961 	default:
962 		ch_group_index = 0;
963 		rf_bw_band |= RF_BW_20;
964 		break;
965 	}
966 
967 	if (mt76_is_usb(dev)) {
968 		mt76x0_phy_bbp_set_bw(dev, chandef->width);
969 	} else {
970 		if (chandef->width == NL80211_CHAN_WIDTH_80 ||
971 		    chandef->width == NL80211_CHAN_WIDTH_40)
972 			val = 0x201;
973 		else
974 			val = 0x601;
975 		mt76_wr(dev, MT_TX_SW_CFG0, val);
976 	}
977 	mt76x02_phy_set_bw(dev, chandef->width, ch_group_index);
978 	mt76x02_phy_set_band(dev, chandef->chan->band,
979 			     ch_group_index & 1);
980 
981 	mt76_rmw(dev, MT_EXT_CCA_CFG,
982 		 (MT_EXT_CCA_CFG_CCA0 |
983 		  MT_EXT_CCA_CFG_CCA1 |
984 		  MT_EXT_CCA_CFG_CCA2 |
985 		  MT_EXT_CCA_CFG_CCA3 |
986 		  MT_EXT_CCA_CFG_CCA_MASK),
987 		 ext_cca_chan[ch_group_index]);
988 
989 	mt76x0_phy_set_band(dev, chandef->chan->band);
990 	mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
991 
992 	/* set Japan Tx filter at channel 14 */
993 	if (channel == 14)
994 		mt76_set(dev, MT_BBP(CORE, 1), 0x20);
995 	else
996 		mt76_clear(dev, MT_BBP(CORE, 1), 0x20);
997 
998 	mt76x0_read_rx_gain(dev);
999 	mt76x0_phy_set_chan_bbp_params(dev, rf_bw_band);
1000 
1001 	/* enable vco */
1002 	mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7));
1003 	if (scan)
1004 		return 0;
1005 
1006 	mt76x02_init_agc_gain(dev);
1007 	mt76x0_phy_calibrate(dev, false);
1008 	mt76x0_phy_set_txpower(dev);
1009 
1010 	ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1011 				     MT_CALIBRATE_INTERVAL);
1012 
1013 	return 0;
1014 }
1015 
1016 static void mt76x0_phy_temp_sensor(struct mt76x02_dev *dev)
1017 {
1018 	u8 rf_b7_73, rf_b0_66, rf_b0_67;
1019 	s8 val;
1020 
1021 	rf_b7_73 = mt76x0_rf_rr(dev, MT_RF(7, 73));
1022 	rf_b0_66 = mt76x0_rf_rr(dev, MT_RF(0, 66));
1023 	rf_b0_67 = mt76x0_rf_rr(dev, MT_RF(0, 67));
1024 
1025 	mt76x0_rf_wr(dev, MT_RF(7, 73), 0x02);
1026 	mt76x0_rf_wr(dev, MT_RF(0, 66), 0x23);
1027 	mt76x0_rf_wr(dev, MT_RF(0, 67), 0x01);
1028 
1029 	mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
1030 	if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
1031 		mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
1032 		goto done;
1033 	}
1034 
1035 	val = mt76_rr(dev, MT_BBP(CORE, 35));
1036 	val = (35 * (val - dev->cal.rx.temp_offset)) / 10 + 25;
1037 
1038 	if (abs(val - dev->cal.temp_vco) > 20) {
1039 		mt76x02_mcu_calibrate(dev, MCU_CAL_VCO,
1040 				      dev->mt76.chandef.chan->hw_value);
1041 		dev->cal.temp_vco = val;
1042 	}
1043 	if (abs(val - dev->cal.temp) > 30) {
1044 		mt76x0_phy_calibrate(dev, false);
1045 		dev->cal.temp = val;
1046 	}
1047 
1048 done:
1049 	mt76x0_rf_wr(dev, MT_RF(7, 73), rf_b7_73);
1050 	mt76x0_rf_wr(dev, MT_RF(0, 66), rf_b0_66);
1051 	mt76x0_rf_wr(dev, MT_RF(0, 67), rf_b0_67);
1052 }
1053 
1054 static void mt76x0_phy_set_gain_val(struct mt76x02_dev *dev)
1055 {
1056 	u8 gain = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
1057 
1058 	mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, gain);
1059 
1060 	if ((dev->mt76.chandef.chan->flags & IEEE80211_CHAN_RADAR) &&
1061 	    !is_mt7630(dev))
1062 		mt76x02_phy_dfs_adjust_agc(dev);
1063 }
1064 
1065 static void
1066 mt76x0_phy_update_channel_gain(struct mt76x02_dev *dev)
1067 {
1068 	bool gain_change;
1069 	u8 gain_delta;
1070 	int low_gain;
1071 
1072 	dev->cal.avg_rssi_all = mt76_get_min_avg_rssi(&dev->mt76);
1073 	if (!dev->cal.avg_rssi_all)
1074 		dev->cal.avg_rssi_all = -75;
1075 
1076 	low_gain = (dev->cal.avg_rssi_all > mt76x02_get_rssi_gain_thresh(dev)) +
1077 		   (dev->cal.avg_rssi_all > mt76x02_get_low_rssi_gain_thresh(dev));
1078 
1079 	gain_change = dev->cal.low_gain < 0 ||
1080 		      (dev->cal.low_gain & 2) ^ (low_gain & 2);
1081 	dev->cal.low_gain = low_gain;
1082 
1083 	if (!gain_change) {
1084 		if (mt76x02_phy_adjust_vga_gain(dev))
1085 			mt76x0_phy_set_gain_val(dev);
1086 		return;
1087 	}
1088 
1089 	dev->cal.agc_gain_adjust = (low_gain == 2) ? 0 : 10;
1090 	gain_delta = (low_gain == 2) ? 10 : 0;
1091 
1092 	dev->cal.agc_gain_cur[0] = dev->cal.agc_gain_init[0] - gain_delta;
1093 	mt76x0_phy_set_gain_val(dev);
1094 
1095 	/* clear false CCA counters */
1096 	mt76_rr(dev, MT_RX_STAT_1);
1097 }
1098 
1099 static void mt76x0_phy_calibration_work(struct work_struct *work)
1100 {
1101 	struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev,
1102 					       cal_work.work);
1103 
1104 	mt76x0_phy_update_channel_gain(dev);
1105 	if (mt76x0_tssi_enabled(dev))
1106 		mt76x0_phy_tssi_calibrate(dev);
1107 	else
1108 		mt76x0_phy_temp_sensor(dev);
1109 
1110 	ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1111 				     4 * MT_CALIBRATE_INTERVAL);
1112 }
1113 
1114 static void mt76x0_rf_patch_reg_array(struct mt76x02_dev *dev,
1115 				      const struct mt76_reg_pair *rp, int len)
1116 {
1117 	int i;
1118 
1119 	for (i = 0; i < len; i++) {
1120 		u32 reg = rp[i].reg;
1121 		u8 val = rp[i].value;
1122 
1123 		switch (reg) {
1124 		case MT_RF(0, 3):
1125 			if (mt76_is_mmio(dev)) {
1126 				if (is_mt7630(dev))
1127 					val = 0x70;
1128 				else
1129 					val = 0x63;
1130 			} else {
1131 				val = 0x73;
1132 			}
1133 			break;
1134 		case MT_RF(0, 21):
1135 			if (is_mt7610e(dev))
1136 				val = 0x10;
1137 			else
1138 				val = 0x12;
1139 			break;
1140 		case MT_RF(5, 2):
1141 			if (is_mt7630(dev))
1142 				val = 0x1d;
1143 			else if (is_mt7610e(dev))
1144 				val = 0x00;
1145 			else
1146 				val = 0x0c;
1147 			break;
1148 		default:
1149 			break;
1150 		}
1151 		mt76x0_rf_wr(dev, reg, val);
1152 	}
1153 }
1154 
1155 static void mt76x0_phy_rf_init(struct mt76x02_dev *dev)
1156 {
1157 	int i;
1158 	u8 val;
1159 
1160 	mt76x0_rf_patch_reg_array(dev, mt76x0_rf_central_tab,
1161 				  ARRAY_SIZE(mt76x0_rf_central_tab));
1162 	mt76x0_rf_patch_reg_array(dev, mt76x0_rf_2g_channel_0_tab,
1163 				  ARRAY_SIZE(mt76x0_rf_2g_channel_0_tab));
1164 	RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
1165 	RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab);
1166 
1167 	for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
1168 		const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i];
1169 
1170 		if (item->bw_band == RF_BW_20)
1171 			mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1172 		else if (((RF_G_BAND | RF_BW_20) & item->bw_band) == (RF_G_BAND | RF_BW_20))
1173 			mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1174 	}
1175 
1176 	for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
1177 		if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) {
1178 			mt76x0_rf_wr(dev,
1179 				     mt76x0_rf_band_switch_tab[i].rf_bank_reg,
1180 				     mt76x0_rf_band_switch_tab[i].value);
1181 		}
1182 	}
1183 
1184 	/*
1185 	   Frequency calibration
1186 	   E1: B0.R22<6:0>: xo_cxo<6:0>
1187 	   E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
1188 	 */
1189 	mt76x0_rf_wr(dev, MT_RF(0, 22),
1190 		     min_t(u8, dev->cal.rx.freq_offset, 0xbf));
1191 	val = mt76x0_rf_rr(dev, MT_RF(0, 22));
1192 
1193 	/* Reset procedure DAC during power-up:
1194 	 * - set B0.R73<7>
1195 	 * - clear B0.R73<7>
1196 	 * - set B0.R73<7>
1197 	 */
1198 	mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1199 	mt76x0_rf_clear(dev, MT_RF(0, 73), BIT(7));
1200 	mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1201 
1202 	/* vcocal_en: initiate VCO calibration (reset after completion)) */
1203 	mt76x0_rf_set(dev, MT_RF(0, 4), 0x80);
1204 }
1205 
1206 void mt76x0_phy_init(struct mt76x02_dev *dev)
1207 {
1208 	INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibration_work);
1209 
1210 	mt76x0_phy_ant_select(dev);
1211 	mt76x0_phy_rf_init(dev);
1212 	mt76x02_phy_set_rxpath(dev);
1213 	mt76x02_phy_set_txdac(dev);
1214 }
1215