1 /*
2  * (c) Copyright 2002-2010, Ralink Technology, Inc.
3  * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
4  * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
5  * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License version 2
9  * as published by the Free Software Foundation
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  */
16 
17 #include <linux/kernel.h>
18 #include <linux/etherdevice.h>
19 
20 #include "mt76x0.h"
21 #include "mcu.h"
22 #include "eeprom.h"
23 #include "phy.h"
24 #include "initvals.h"
25 #include "initvals_phy.h"
26 #include "../mt76x02_phy.h"
27 
28 static int
29 mt76x0_rf_csr_wr(struct mt76x02_dev *dev, u32 offset, u8 value)
30 {
31 	int ret = 0;
32 	u8 bank, reg;
33 
34 	if (test_bit(MT76_REMOVED, &dev->mt76.state))
35 		return -ENODEV;
36 
37 	bank = MT_RF_BANK(offset);
38 	reg = MT_RF_REG(offset);
39 
40 	if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
41 		return -EINVAL;
42 
43 	mutex_lock(&dev->phy_mutex);
44 
45 	if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) {
46 		ret = -ETIMEDOUT;
47 		goto out;
48 	}
49 
50 	mt76_wr(dev, MT_RF_CSR_CFG,
51 		FIELD_PREP(MT_RF_CSR_CFG_DATA, value) |
52 		FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
53 		FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
54 		MT_RF_CSR_CFG_WR |
55 		MT_RF_CSR_CFG_KICK);
56 
57 out:
58 	mutex_unlock(&dev->phy_mutex);
59 
60 	if (ret < 0)
61 		dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n",
62 			bank, reg, ret);
63 
64 	return ret;
65 }
66 
67 static int mt76x0_rf_csr_rr(struct mt76x02_dev *dev, u32 offset)
68 {
69 	int ret = -ETIMEDOUT;
70 	u32 val;
71 	u8 bank, reg;
72 
73 	if (test_bit(MT76_REMOVED, &dev->mt76.state))
74 		return -ENODEV;
75 
76 	bank = MT_RF_BANK(offset);
77 	reg = MT_RF_REG(offset);
78 
79 	if (WARN_ON_ONCE(reg > 127) || WARN_ON_ONCE(bank > 8))
80 		return -EINVAL;
81 
82 	mutex_lock(&dev->phy_mutex);
83 
84 	if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
85 		goto out;
86 
87 	mt76_wr(dev, MT_RF_CSR_CFG,
88 		FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
89 		FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
90 		MT_RF_CSR_CFG_KICK);
91 
92 	if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
93 		goto out;
94 
95 	val = mt76_rr(dev, MT_RF_CSR_CFG);
96 	if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg &&
97 	    FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank)
98 		ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val);
99 
100 out:
101 	mutex_unlock(&dev->phy_mutex);
102 
103 	if (ret < 0)
104 		dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n",
105 			bank, reg, ret);
106 
107 	return ret;
108 }
109 
110 static int
111 mt76x0_rf_wr(struct mt76x02_dev *dev, u32 offset, u8 val)
112 {
113 	if (mt76_is_usb(dev)) {
114 		struct mt76_reg_pair pair = {
115 			.reg = offset,
116 			.value = val,
117 		};
118 
119 		WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
120 			     &dev->mt76.state));
121 		return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
122 	} else {
123 		return mt76x0_rf_csr_wr(dev, offset, val);
124 	}
125 }
126 
127 static int mt76x0_rf_rr(struct mt76x02_dev *dev, u32 offset)
128 {
129 	int ret;
130 	u32 val;
131 
132 	if (mt76_is_usb(dev)) {
133 		struct mt76_reg_pair pair = {
134 			.reg = offset,
135 		};
136 
137 		WARN_ON_ONCE(!test_bit(MT76_STATE_MCU_RUNNING,
138 			     &dev->mt76.state));
139 		ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
140 		val = pair.value;
141 	} else {
142 		ret = val = mt76x0_rf_csr_rr(dev, offset);
143 	}
144 
145 	return (ret < 0) ? ret : val;
146 }
147 
148 static int
149 mt76x0_rf_rmw(struct mt76x02_dev *dev, u32 offset, u8 mask, u8 val)
150 {
151 	int ret;
152 
153 	ret = mt76x0_rf_rr(dev, offset);
154 	if (ret < 0)
155 		return ret;
156 
157 	val |= ret & ~mask;
158 
159 	ret = mt76x0_rf_wr(dev, offset, val);
160 	return ret ? ret : val;
161 }
162 
163 static int
164 mt76x0_rf_set(struct mt76x02_dev *dev, u32 offset, u8 val)
165 {
166 	return mt76x0_rf_rmw(dev, offset, 0, val);
167 }
168 
169 static int
170 mt76x0_rf_clear(struct mt76x02_dev *dev, u32 offset, u8 mask)
171 {
172 	return mt76x0_rf_rmw(dev, offset, mask, 0);
173 }
174 
175 static void
176 mt76x0_phy_rf_csr_wr_rp(struct mt76x02_dev *dev,
177 			const struct mt76_reg_pair *data,
178 			int n)
179 {
180 	while (n-- > 0) {
181 		mt76x0_rf_csr_wr(dev, data->reg, data->value);
182 		data++;
183 	}
184 }
185 
186 #define RF_RANDOM_WRITE(dev, tab) do {					\
187 	if (mt76_is_mmio(dev))						\
188 		mt76x0_phy_rf_csr_wr_rp(dev, tab, ARRAY_SIZE(tab));	\
189 	else								\
190 		mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, tab, ARRAY_SIZE(tab));\
191 } while (0)
192 
193 int mt76x0_phy_wait_bbp_ready(struct mt76x02_dev *dev)
194 {
195 	int i = 20;
196 	u32 val;
197 
198 	do {
199 		val = mt76_rr(dev, MT_BBP(CORE, 0));
200 		if (val && ~val)
201 			break;
202 	} while (--i);
203 
204 	if (!i) {
205 		dev_err(dev->mt76.dev, "Error: BBP is not ready\n");
206 		return -EIO;
207 	}
208 
209 	dev_dbg(dev->mt76.dev, "BBP version %08x\n", val);
210 	return 0;
211 }
212 
213 static void
214 mt76x0_phy_set_band(struct mt76x02_dev *dev, enum nl80211_band band)
215 {
216 	switch (band) {
217 	case NL80211_BAND_2GHZ:
218 		RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
219 
220 		mt76x0_rf_wr(dev, MT_RF(5, 0), 0x45);
221 		mt76x0_rf_wr(dev, MT_RF(6, 0), 0x44);
222 
223 		mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
224 		mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
225 		break;
226 	case NL80211_BAND_5GHZ:
227 		RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
228 
229 		mt76x0_rf_wr(dev, MT_RF(5, 0), 0x44);
230 		mt76x0_rf_wr(dev, MT_RF(6, 0), 0x45);
231 
232 		mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
233 		mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
234 		break;
235 	default:
236 		break;
237 	}
238 }
239 
240 static void
241 mt76x0_phy_set_chan_rf_params(struct mt76x02_dev *dev, u8 channel, u16 rf_bw_band)
242 {
243 	const struct mt76x0_freq_item *freq_item;
244 	u16 rf_band = rf_bw_band & 0xff00;
245 	u16 rf_bw = rf_bw_band & 0x00ff;
246 	enum nl80211_band band;
247 	bool b_sdm = false;
248 	u32 mac_reg;
249 	int i;
250 
251 	for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) {
252 		if (channel == mt76x0_sdm_channel[i]) {
253 			b_sdm = true;
254 			break;
255 		}
256 	}
257 
258 	for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) {
259 		if (channel == mt76x0_frequency_plan[i].channel) {
260 			rf_band = mt76x0_frequency_plan[i].band;
261 
262 			if (b_sdm)
263 				freq_item = &(mt76x0_sdm_frequency_plan[i]);
264 			else
265 				freq_item = &(mt76x0_frequency_plan[i]);
266 
267 			mt76x0_rf_wr(dev, MT_RF(0, 37), freq_item->pllR37);
268 			mt76x0_rf_wr(dev, MT_RF(0, 36), freq_item->pllR36);
269 			mt76x0_rf_wr(dev, MT_RF(0, 35), freq_item->pllR35);
270 			mt76x0_rf_wr(dev, MT_RF(0, 34), freq_item->pllR34);
271 			mt76x0_rf_wr(dev, MT_RF(0, 33), freq_item->pllR33);
272 
273 			mt76x0_rf_rmw(dev, MT_RF(0, 32), 0xe0,
274 				      freq_item->pllR32_b7b5);
275 
276 			/* R32<4:0> pll_den: (Denomina - 8) */
277 			mt76x0_rf_rmw(dev, MT_RF(0, 32), MT_RF_PLL_DEN_MASK,
278 				      freq_item->pllR32_b4b0);
279 
280 			/* R31<7:5> */
281 			mt76x0_rf_rmw(dev, MT_RF(0, 31), 0xe0,
282 				      freq_item->pllR31_b7b5);
283 
284 			/* R31<4:0> pll_k(Nominator) */
285 			mt76x0_rf_rmw(dev, MT_RF(0, 31), MT_RF_PLL_K_MASK,
286 				      freq_item->pllR31_b4b0);
287 
288 			/* R30<7> sdm_reset_n */
289 			if (b_sdm) {
290 				mt76x0_rf_clear(dev, MT_RF(0, 30),
291 						MT_RF_SDM_RESET_MASK);
292 				mt76x0_rf_set(dev, MT_RF(0, 30),
293 					      MT_RF_SDM_RESET_MASK);
294 			} else {
295 				mt76x0_rf_rmw(dev, MT_RF(0, 30),
296 					      MT_RF_SDM_RESET_MASK,
297 					      freq_item->pllR30_b7);
298 			}
299 
300 			/* R30<6:2> sdmmash_prbs,sin */
301 			mt76x0_rf_rmw(dev, MT_RF(0, 30),
302 				      MT_RF_SDM_MASH_PRBS_MASK,
303 				      freq_item->pllR30_b6b2);
304 
305 			/* R30<1> sdm_bp */
306 			mt76x0_rf_rmw(dev, MT_RF(0, 30), MT_RF_SDM_BP_MASK,
307 				      freq_item->pllR30_b1 << 1);
308 
309 			/* R30<0> R29<7:0> (hex) pll_n */
310 			mt76x0_rf_wr(dev, MT_RF(0, 29),
311 				     freq_item->pll_n & 0xff);
312 
313 			mt76x0_rf_rmw(dev, MT_RF(0, 30), 0x1,
314 				      (freq_item->pll_n >> 8) & 0x1);
315 
316 			/* R28<7:6> isi_iso */
317 			mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_ISI_ISO_MASK,
318 				      freq_item->pllR28_b7b6);
319 
320 			/* R28<5:4> pfd_dly */
321 			mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_PFD_DLY_MASK,
322 				      freq_item->pllR28_b5b4);
323 
324 			/* R28<3:2> clksel option */
325 			mt76x0_rf_rmw(dev, MT_RF(0, 28), MT_RF_CLK_SEL_MASK,
326 				      freq_item->pllR28_b3b2);
327 
328 			/* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
329 			mt76x0_rf_wr(dev, MT_RF(0, 26),
330 				     freq_item->pll_sdm_k & 0xff);
331 			mt76x0_rf_wr(dev, MT_RF(0, 27),
332 				     (freq_item->pll_sdm_k >> 8) & 0xff);
333 
334 			mt76x0_rf_rmw(dev, MT_RF(0, 28), 0x3,
335 				      (freq_item->pll_sdm_k >> 16) & 0x3);
336 
337 			/* R24<1:0> xo_div */
338 			mt76x0_rf_rmw(dev, MT_RF(0, 24), MT_RF_XO_DIV_MASK,
339 				      freq_item->pllR24_b1b0);
340 
341 			break;
342 		}
343 	}
344 
345 	for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
346 		if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) {
347 			mt76x0_rf_wr(dev,
348 				     mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
349 				     mt76x0_rf_bw_switch_tab[i].value);
350 		} else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) &&
351 			   (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) {
352 			mt76x0_rf_wr(dev,
353 				     mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
354 				     mt76x0_rf_bw_switch_tab[i].value);
355 		}
356 	}
357 
358 	for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
359 		if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) {
360 			mt76x0_rf_wr(dev,
361 				     mt76x0_rf_band_switch_tab[i].rf_bank_reg,
362 				     mt76x0_rf_band_switch_tab[i].value);
363 		}
364 	}
365 
366 	mt76_clear(dev, MT_RF_MISC, 0xc);
367 
368 	band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ;
369 	if (mt76x02_ext_pa_enabled(dev, band)) {
370 		/*
371 			MT_RF_MISC (offset: 0x0518)
372 			[2]1'b1: enable external A band PA, 1'b0: disable external A band PA
373 			[3]1'b1: enable external G band PA, 1'b0: disable external G band PA
374 		*/
375 		if (rf_band & RF_A_BAND)
376 			mt76_set(dev, MT_RF_MISC, BIT(2));
377 		else
378 			mt76_set(dev, MT_RF_MISC, BIT(3));
379 
380 		/* External PA */
381 		for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++)
382 			if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band)
383 				mt76x0_rf_wr(dev,
384 					mt76x0_rf_ext_pa_tab[i].rf_bank_reg,
385 					mt76x0_rf_ext_pa_tab[i].value);
386 	}
387 
388 	if (rf_band & RF_G_BAND) {
389 		mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400);
390 		/* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
391 		mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
392 		mac_reg &= 0x896400FF;
393 		mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
394 	} else {
395 		mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800);
396 		/* Set Atten mode = 0 For Ext A band, Disable Tx Inc dcoc Cal. */
397 		mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
398 		mac_reg &= 0x890400FF;
399 		mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
400 	}
401 }
402 
403 static void
404 mt76x0_phy_set_chan_bbp_params(struct mt76x02_dev *dev, u16 rf_bw_band)
405 {
406 	int i;
407 
408 	for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) {
409 		const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i];
410 		const struct mt76_reg_pair *pair = &item->reg_pair;
411 
412 		if ((rf_bw_band & item->bw_band) != rf_bw_band)
413 			continue;
414 
415 		if (pair->reg == MT_BBP(AGC, 8)) {
416 			u32 val = pair->value;
417 			u8 gain;
418 
419 			gain = FIELD_GET(MT_BBP_AGC_GAIN, val);
420 			gain -= dev->cal.rx.lna_gain * 2;
421 			val &= ~MT_BBP_AGC_GAIN;
422 			val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain);
423 			mt76_wr(dev, pair->reg, val);
424 		} else {
425 			mt76_wr(dev, pair->reg, pair->value);
426 		}
427 	}
428 }
429 
430 static void mt76x0_phy_ant_select(struct mt76x02_dev *dev)
431 {
432 	u16 ee_ant = mt76x02_eeprom_get(dev, MT_EE_ANTENNA);
433 	u16 nic_conf2 = mt76x02_eeprom_get(dev, MT_EE_NIC_CONF_2);
434 	u32 wlan, coex3, cmb;
435 	bool ant_div;
436 
437 	wlan = mt76_rr(dev, MT_WLAN_FUN_CTRL);
438 	cmb = mt76_rr(dev, MT_CMB_CTRL);
439 	coex3 = mt76_rr(dev, MT_COEXCFG3);
440 
441 	cmb   &= ~(BIT(14) | BIT(12));
442 	wlan  &= ~(BIT(6) | BIT(5));
443 	coex3 &= ~GENMASK(5, 2);
444 
445 	if (ee_ant & MT_EE_ANTENNA_DUAL) {
446 		/* dual antenna mode */
447 		ant_div = !(nic_conf2 & MT_EE_NIC_CONF_2_ANT_OPT) &&
448 			  (nic_conf2 & MT_EE_NIC_CONF_2_ANT_DIV);
449 		if (ant_div)
450 			cmb |= BIT(12);
451 		else
452 			coex3 |= BIT(4);
453 		coex3 |= BIT(3);
454 		if (dev->mt76.cap.has_2ghz)
455 			wlan |= BIT(6);
456 	} else {
457 		/* sigle antenna mode */
458 		if (dev->mt76.cap.has_5ghz) {
459 			coex3 |= BIT(3) | BIT(4);
460 		} else {
461 			wlan |= BIT(6);
462 			coex3 |= BIT(1);
463 		}
464 	}
465 
466 	if (is_mt7630(dev))
467 		cmb |= BIT(14) | BIT(11);
468 
469 	mt76_wr(dev, MT_WLAN_FUN_CTRL, wlan);
470 	mt76_wr(dev, MT_CMB_CTRL, cmb);
471 	mt76_clear(dev, MT_COEXCFG0, BIT(2));
472 	mt76_wr(dev, MT_COEXCFG3, coex3);
473 }
474 
475 static void
476 mt76x0_phy_bbp_set_bw(struct mt76x02_dev *dev, enum nl80211_chan_width width)
477 {
478 	enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4};
479 	int bw;
480 
481 	switch (width) {
482 	default:
483 	case NL80211_CHAN_WIDTH_20_NOHT:
484 	case NL80211_CHAN_WIDTH_20:
485 		bw = BW_20;
486 		break;
487 	case NL80211_CHAN_WIDTH_40:
488 		bw = BW_40;
489 		break;
490 	case NL80211_CHAN_WIDTH_80:
491 		bw = BW_80;
492 		break;
493 	case NL80211_CHAN_WIDTH_10:
494 		bw = BW_10;
495 		break;
496 	case NL80211_CHAN_WIDTH_80P80:
497 	case NL80211_CHAN_WIDTH_160:
498 	case NL80211_CHAN_WIDTH_5:
499 		/* TODO error */
500 		return ;
501 	}
502 
503 	mt76x02_mcu_function_select(dev, BW_SETTING, bw);
504 }
505 
506 static void mt76x0_phy_tssi_dc_calibrate(struct mt76x02_dev *dev)
507 {
508 	struct ieee80211_channel *chan = dev->mt76.chandef.chan;
509 	u32 val;
510 
511 	if (chan->band == NL80211_BAND_5GHZ)
512 		mt76x0_rf_clear(dev, MT_RF(0, 67), 0xf);
513 
514 	/* bypass ADDA control */
515 	mt76_wr(dev, MT_RF_SETTING_0, 0x60002237);
516 	mt76_wr(dev, MT_RF_BYPASS_0, 0xffffffff);
517 
518 	/* bbp sw reset */
519 	mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
520 	usleep_range(500, 1000);
521 	mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
522 
523 	val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
524 	mt76_wr(dev, MT_BBP(CORE, 34), val);
525 
526 	/* enable TX with DAC0 input */
527 	mt76_wr(dev, MT_BBP(TXBE, 6), BIT(31));
528 
529 	mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200);
530 	dev->cal.tssi_dc = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
531 
532 	/* stop bypass ADDA */
533 	mt76_wr(dev, MT_RF_BYPASS_0, 0);
534 	/* stop TX */
535 	mt76_wr(dev, MT_BBP(TXBE, 6), 0);
536 	/* bbp sw reset */
537 	mt76_set(dev, MT_BBP(CORE, 4), BIT(0));
538 	usleep_range(500, 1000);
539 	mt76_clear(dev, MT_BBP(CORE, 4), BIT(0));
540 
541 	if (chan->band == NL80211_BAND_5GHZ)
542 		mt76x0_rf_rmw(dev, MT_RF(0, 67), 0xf, 0x4);
543 }
544 
545 static int
546 mt76x0_phy_tssi_adc_calibrate(struct mt76x02_dev *dev, s16 *ltssi,
547 			      u8 *info)
548 {
549 	struct ieee80211_channel *chan = dev->mt76.chandef.chan;
550 	u32 val;
551 
552 	val = (chan->band == NL80211_BAND_5GHZ) ? 0x80055 : 0x80050;
553 	mt76_wr(dev, MT_BBP(CORE, 34), val);
554 
555 	if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
556 		mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
557 		return -ETIMEDOUT;
558 	}
559 
560 	*ltssi = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
561 	if (chan->band == NL80211_BAND_5GHZ)
562 		*ltssi += 128;
563 
564 	/* set packet info#1 mode */
565 	mt76_wr(dev, MT_BBP(CORE, 34), 0x80041);
566 	info[0] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
567 
568 	/* set packet info#2 mode */
569 	mt76_wr(dev, MT_BBP(CORE, 34), 0x80042);
570 	info[1] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
571 
572 	/* set packet info#3 mode */
573 	mt76_wr(dev, MT_BBP(CORE, 34), 0x80043);
574 	info[2] = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
575 
576 	return 0;
577 }
578 
579 static u8 mt76x0_phy_get_rf_pa_mode(struct mt76x02_dev *dev,
580 				    int index, u8 tx_rate)
581 {
582 	u32 val, reg;
583 
584 	reg = (index == 1) ? MT_RF_PA_MODE_CFG1 : MT_RF_PA_MODE_CFG0;
585 	val = mt76_rr(dev, reg);
586 	return (val & (3 << (tx_rate * 2))) >> (tx_rate * 2);
587 }
588 
589 static int
590 mt76x0_phy_get_target_power(struct mt76x02_dev *dev, u8 tx_mode,
591 			    u8 *info, s8 *target_power,
592 			    s8 *target_pa_power)
593 {
594 	u8 tx_rate, cur_power;
595 
596 	cur_power = mt76_rr(dev, MT_TX_ALC_CFG_0) & MT_TX_ALC_CFG_0_CH_INIT_0;
597 	switch (tx_mode) {
598 	case 0:
599 		/* cck rates */
600 		tx_rate = (info[0] & 0x60) >> 5;
601 		if (tx_rate > 3)
602 			return -EINVAL;
603 
604 		*target_power = cur_power + dev->mt76.rate_power.cck[tx_rate];
605 		*target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, tx_rate);
606 		break;
607 	case 1: {
608 		u8 index;
609 
610 		/* ofdm rates */
611 		tx_rate = (info[0] & 0xf0) >> 4;
612 		switch (tx_rate) {
613 		case 0xb:
614 			index = 0;
615 			break;
616 		case 0xf:
617 			index = 1;
618 			break;
619 		case 0xa:
620 			index = 2;
621 			break;
622 		case 0xe:
623 			index = 3;
624 			break;
625 		case 0x9:
626 			index = 4;
627 			break;
628 		case 0xd:
629 			index = 5;
630 			break;
631 		case 0x8:
632 			index = 6;
633 			break;
634 		case 0xc:
635 			index = 7;
636 			break;
637 		default:
638 			return -EINVAL;
639 		}
640 
641 		*target_power = cur_power + dev->mt76.rate_power.ofdm[index];
642 		*target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 0, index + 4);
643 		break;
644 	}
645 	case 4:
646 		/* vht rates */
647 		tx_rate = info[1] & 0xf;
648 		if (tx_rate > 9)
649 			return -EINVAL;
650 
651 		*target_power = cur_power + dev->mt76.rate_power.vht[tx_rate];
652 		*target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
653 		break;
654 	default:
655 		/* ht rates */
656 		tx_rate = info[1] & 0x7f;
657 		if (tx_rate > 9)
658 			return -EINVAL;
659 
660 		*target_power = cur_power + dev->mt76.rate_power.ht[tx_rate];
661 		*target_pa_power = mt76x0_phy_get_rf_pa_mode(dev, 1, tx_rate);
662 		break;
663 	}
664 
665 	return 0;
666 }
667 
668 static s16 mt76x0_phy_lin2db(u16 val)
669 {
670 	u32 mantissa = val << 4;
671 	int ret, data;
672 	s16 exp = -4;
673 
674 	while (mantissa < BIT(15)) {
675 		mantissa <<= 1;
676 		if (--exp < -20)
677 			return -10000;
678 	}
679 	while (mantissa > 0xffff) {
680 		mantissa >>= 1;
681 		if (++exp > 20)
682 			return -10000;
683 	}
684 
685 	/* s(15,0) */
686 	if (mantissa <= 47104)
687 		data = mantissa + (mantissa >> 3) + (mantissa >> 4) - 38400;
688 	else
689 		data = mantissa - (mantissa >> 3) - (mantissa >> 6) - 23040;
690 	data = max_t(int, 0, data);
691 
692 	ret = ((15 + exp) << 15) + data;
693 	ret = (ret << 2) + (ret << 1) + (ret >> 6) + (ret >> 7);
694 	return ret >> 10;
695 }
696 
697 static int
698 mt76x0_phy_get_delta_power(struct mt76x02_dev *dev, u8 tx_mode,
699 			   s8 target_power, s8 target_pa_power,
700 			   s16 ltssi)
701 {
702 	struct ieee80211_channel *chan = dev->mt76.chandef.chan;
703 	int tssi_target = target_power << 12, tssi_slope;
704 	int tssi_offset, tssi_db, ret;
705 	u32 data;
706 	u16 val;
707 
708 	if (chan->band == NL80211_BAND_5GHZ) {
709 		u8 bound[7];
710 		int i, err;
711 
712 		err = mt76x02_eeprom_copy(dev, MT_EE_TSSI_BOUND1, bound,
713 					  sizeof(bound));
714 		if (err < 0)
715 			return err;
716 
717 		for (i = 0; i < ARRAY_SIZE(bound); i++) {
718 			if (chan->hw_value <= bound[i] || !bound[i])
719 				break;
720 		}
721 		val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_5G + i * 2);
722 
723 		tssi_offset = val >> 8;
724 		if ((tssi_offset >= 64 && tssi_offset <= 127) ||
725 		    (tssi_offset & BIT(7)))
726 			tssi_offset -= BIT(8);
727 	} else {
728 		val = mt76x02_eeprom_get(dev, MT_EE_TSSI_SLOPE_2G);
729 
730 		tssi_offset = val >> 8;
731 		if (tssi_offset & BIT(7))
732 			tssi_offset -= BIT(8);
733 	}
734 	tssi_slope = val & 0xff;
735 
736 	switch (target_pa_power) {
737 	case 1:
738 		if (chan->band == NL80211_BAND_2GHZ)
739 			tssi_target += 29491; /* 3.6 * 8192 */
740 		/* fall through */
741 	case 0:
742 		break;
743 	default:
744 		tssi_target += 4424; /* 0.54 * 8192 */
745 		break;
746 	}
747 
748 	if (!tx_mode) {
749 		data = mt76_rr(dev, MT_BBP(CORE, 1));
750 		if (is_mt7630(dev) && mt76_is_mmio(dev)) {
751 			int offset;
752 
753 			/* 2.3 * 8192 or 1.5 * 8192 */
754 			offset = (data & BIT(5)) ? 18841 : 12288;
755 			tssi_target += offset;
756 		} else if (data & BIT(5)) {
757 			/* 0.8 * 8192 */
758 			tssi_target += 6554;
759 		}
760 	}
761 
762 	data = mt76_rr(dev, MT_BBP(TXBE, 4));
763 	switch (data & 0x3) {
764 	case 1:
765 		tssi_target -= 49152; /* -6db * 8192 */
766 		break;
767 	case 2:
768 		tssi_target -= 98304; /* -12db * 8192 */
769 		break;
770 	case 3:
771 		tssi_target += 49152; /* 6db * 8192 */
772 		break;
773 	default:
774 		break;
775 	}
776 
777 	tssi_db = mt76x0_phy_lin2db(ltssi - dev->cal.tssi_dc) * tssi_slope;
778 	if (chan->band == NL80211_BAND_5GHZ) {
779 		tssi_db += ((tssi_offset - 50) << 10); /* offset s4.3 */
780 		tssi_target -= tssi_db;
781 		if (ltssi > 254 && tssi_target > 0) {
782 			/* upper saturate */
783 			tssi_target = 0;
784 		}
785 	} else {
786 		tssi_db += (tssi_offset << 9); /* offset s3.4 */
787 		tssi_target -= tssi_db;
788 		/* upper-lower saturate */
789 		if ((ltssi > 126 && tssi_target > 0) ||
790 		    ((ltssi - dev->cal.tssi_dc) < 1 && tssi_target < 0)) {
791 			tssi_target = 0;
792 		}
793 	}
794 
795 	if ((dev->cal.tssi_target ^ tssi_target) < 0 &&
796 	    dev->cal.tssi_target > -4096 && dev->cal.tssi_target < 4096 &&
797 	    tssi_target > -4096 && tssi_target < 4096) {
798 		if ((tssi_target < 0 &&
799 		     tssi_target + dev->cal.tssi_target > 0) ||
800 		    (tssi_target > 0 &&
801 		     tssi_target + dev->cal.tssi_target <= 0))
802 			tssi_target = 0;
803 		else
804 			dev->cal.tssi_target = tssi_target;
805 	} else {
806 		dev->cal.tssi_target = tssi_target;
807 	}
808 
809 	/* make the compensate value to the nearest compensate code */
810 	if (tssi_target > 0)
811 		tssi_target += 2048;
812 	else
813 		tssi_target -= 2048;
814 	tssi_target >>= 12;
815 
816 	ret = mt76_get_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP);
817 	if (ret & BIT(5))
818 		ret -= BIT(6);
819 	ret += tssi_target;
820 
821 	ret = min_t(int, 31, ret);
822 	return max_t(int, -32, ret);
823 }
824 
825 static void mt76x0_phy_tssi_calibrate(struct mt76x02_dev *dev)
826 {
827 	s8 target_power, target_pa_power;
828 	u8 tssi_info[3], tx_mode;
829 	s16 ltssi;
830 	s8 val;
831 
832 	if (mt76x0_phy_tssi_adc_calibrate(dev, &ltssi, tssi_info) < 0)
833 		return;
834 
835 	tx_mode = tssi_info[0] & 0x7;
836 	if (mt76x0_phy_get_target_power(dev, tx_mode, tssi_info,
837 					&target_power, &target_pa_power) < 0)
838 		return;
839 
840 	val = mt76x0_phy_get_delta_power(dev, tx_mode, target_power,
841 					 target_pa_power, ltssi);
842 	mt76_rmw_field(dev, MT_TX_ALC_CFG_1, MT_TX_ALC_CFG_1_TEMP_COMP, val);
843 }
844 
845 void mt76x0_phy_set_txpower(struct mt76x02_dev *dev)
846 {
847 	struct mt76_rate_power *t = &dev->mt76.rate_power;
848 	s8 info;
849 
850 	mt76x0_get_tx_power_per_rate(dev, dev->mt76.chandef.chan, t);
851 	mt76x0_get_power_info(dev, dev->mt76.chandef.chan, &info);
852 
853 	mt76x02_add_rate_power_offset(t, info);
854 	mt76x02_limit_rate_power(t, dev->mt76.txpower_conf);
855 	dev->mt76.txpower_cur = mt76x02_get_max_rate_power(t);
856 	mt76x02_add_rate_power_offset(t, -info);
857 
858 	dev->target_power = info;
859 	mt76x02_phy_set_txpower(dev, info, info);
860 }
861 
862 void mt76x0_phy_calibrate(struct mt76x02_dev *dev, bool power_on)
863 {
864 	struct ieee80211_channel *chan = dev->mt76.chandef.chan;
865 	int is_5ghz = (chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
866 	u32 val, tx_alc, reg_val;
867 
868 	if (is_mt7630(dev))
869 		return;
870 
871 	if (power_on) {
872 		mt76x02_mcu_calibrate(dev, MCU_CAL_R, 0);
873 		mt76x02_mcu_calibrate(dev, MCU_CAL_VCO, chan->hw_value);
874 		usleep_range(10, 20);
875 
876 		if (mt76x0_tssi_enabled(dev)) {
877 			mt76_wr(dev, MT_MAC_SYS_CTRL,
878 				MT_MAC_SYS_CTRL_ENABLE_RX);
879 			mt76x0_phy_tssi_dc_calibrate(dev);
880 			mt76_wr(dev, MT_MAC_SYS_CTRL,
881 				MT_MAC_SYS_CTRL_ENABLE_TX |
882 				MT_MAC_SYS_CTRL_ENABLE_RX);
883 		}
884 	}
885 
886 	tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
887 	mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
888 	usleep_range(500, 700);
889 
890 	reg_val = mt76_rr(dev, MT_BBP(IBI, 9));
891 	mt76_wr(dev, MT_BBP(IBI, 9), 0xffffff7e);
892 
893 	if (is_5ghz) {
894 		if (chan->hw_value < 100)
895 			val = 0x701;
896 		else if (chan->hw_value < 140)
897 			val = 0x801;
898 		else
899 			val = 0x901;
900 	} else {
901 		val = 0x600;
902 	}
903 
904 	mt76x02_mcu_calibrate(dev, MCU_CAL_FULL, val);
905 	msleep(350);
906 	mt76x02_mcu_calibrate(dev, MCU_CAL_LC, is_5ghz);
907 	usleep_range(15000, 20000);
908 
909 	mt76_wr(dev, MT_BBP(IBI, 9), reg_val);
910 	mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
911 	mt76x02_mcu_calibrate(dev, MCU_CAL_RXDCOC, 1);
912 }
913 EXPORT_SYMBOL_GPL(mt76x0_phy_calibrate);
914 
915 int mt76x0_phy_set_channel(struct mt76x02_dev *dev,
916 			   struct cfg80211_chan_def *chandef)
917 {
918 	u32 ext_cca_chan[4] = {
919 		[0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
920 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
921 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
922 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
923 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
924 		[1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
925 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
926 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
927 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
928 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
929 		[2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
930 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
931 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
932 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
933 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
934 		[3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
935 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
936 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
937 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
938 		      FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
939 	};
940 	bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
941 	int ch_group_index, freq, freq1;
942 	u8 channel;
943 	u32 val;
944 	u16 rf_bw_band;
945 
946 	freq = chandef->chan->center_freq;
947 	freq1 = chandef->center_freq1;
948 	channel = chandef->chan->hw_value;
949 	rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND;
950 	dev->mt76.chandef = *chandef;
951 
952 	switch (chandef->width) {
953 	case NL80211_CHAN_WIDTH_40:
954 		if (freq1 > freq)
955 			ch_group_index = 0;
956 		else
957 			ch_group_index = 1;
958 		channel += 2 - ch_group_index * 4;
959 		rf_bw_band |= RF_BW_40;
960 		break;
961 	case NL80211_CHAN_WIDTH_80:
962 		ch_group_index = (freq - freq1 + 30) / 20;
963 		if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
964 			ch_group_index = 0;
965 		channel += 6 - ch_group_index * 4;
966 		rf_bw_band |= RF_BW_80;
967 		break;
968 	default:
969 		ch_group_index = 0;
970 		rf_bw_band |= RF_BW_20;
971 		break;
972 	}
973 
974 	if (mt76_is_usb(dev)) {
975 		mt76x0_phy_bbp_set_bw(dev, chandef->width);
976 	} else {
977 		if (chandef->width == NL80211_CHAN_WIDTH_80 ||
978 		    chandef->width == NL80211_CHAN_WIDTH_40)
979 			val = 0x201;
980 		else
981 			val = 0x601;
982 		mt76_wr(dev, MT_TX_SW_CFG0, val);
983 	}
984 	mt76x02_phy_set_bw(dev, chandef->width, ch_group_index);
985 	mt76x02_phy_set_band(dev, chandef->chan->band,
986 			     ch_group_index & 1);
987 
988 	mt76_rmw(dev, MT_EXT_CCA_CFG,
989 		 (MT_EXT_CCA_CFG_CCA0 |
990 		  MT_EXT_CCA_CFG_CCA1 |
991 		  MT_EXT_CCA_CFG_CCA2 |
992 		  MT_EXT_CCA_CFG_CCA3 |
993 		  MT_EXT_CCA_CFG_CCA_MASK),
994 		 ext_cca_chan[ch_group_index]);
995 
996 	mt76x0_phy_set_band(dev, chandef->chan->band);
997 	mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
998 
999 	/* set Japan Tx filter at channel 14 */
1000 	if (channel == 14)
1001 		mt76_set(dev, MT_BBP(CORE, 1), 0x20);
1002 	else
1003 		mt76_clear(dev, MT_BBP(CORE, 1), 0x20);
1004 
1005 	mt76x0_read_rx_gain(dev);
1006 	mt76x0_phy_set_chan_bbp_params(dev, rf_bw_band);
1007 
1008 	/* enable vco */
1009 	mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7));
1010 	if (scan)
1011 		return 0;
1012 
1013 	mt76x02_init_agc_gain(dev);
1014 	mt76x0_phy_calibrate(dev, false);
1015 	mt76x0_phy_set_txpower(dev);
1016 
1017 	ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1018 				     MT_CALIBRATE_INTERVAL);
1019 
1020 	return 0;
1021 }
1022 
1023 static void mt76x0_phy_temp_sensor(struct mt76x02_dev *dev)
1024 {
1025 	u8 rf_b7_73, rf_b0_66, rf_b0_67;
1026 	s8 val;
1027 
1028 	rf_b7_73 = mt76x0_rf_rr(dev, MT_RF(7, 73));
1029 	rf_b0_66 = mt76x0_rf_rr(dev, MT_RF(0, 66));
1030 	rf_b0_67 = mt76x0_rf_rr(dev, MT_RF(0, 67));
1031 
1032 	mt76x0_rf_wr(dev, MT_RF(7, 73), 0x02);
1033 	mt76x0_rf_wr(dev, MT_RF(0, 66), 0x23);
1034 	mt76x0_rf_wr(dev, MT_RF(0, 67), 0x01);
1035 
1036 	mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
1037 	if (!mt76_poll_msec(dev, MT_BBP(CORE, 34), BIT(4), 0, 200)) {
1038 		mt76_clear(dev, MT_BBP(CORE, 34), BIT(4));
1039 		goto done;
1040 	}
1041 
1042 	val = mt76_rr(dev, MT_BBP(CORE, 35));
1043 	val = (35 * (val - dev->cal.rx.temp_offset)) / 10 + 25;
1044 
1045 	if (abs(val - dev->cal.temp_vco) > 20) {
1046 		mt76x02_mcu_calibrate(dev, MCU_CAL_VCO,
1047 				      dev->mt76.chandef.chan->hw_value);
1048 		dev->cal.temp_vco = val;
1049 	}
1050 	if (abs(val - dev->cal.temp) > 30) {
1051 		mt76x0_phy_calibrate(dev, false);
1052 		dev->cal.temp = val;
1053 	}
1054 
1055 done:
1056 	mt76x0_rf_wr(dev, MT_RF(7, 73), rf_b7_73);
1057 	mt76x0_rf_wr(dev, MT_RF(0, 66), rf_b0_66);
1058 	mt76x0_rf_wr(dev, MT_RF(0, 67), rf_b0_67);
1059 }
1060 
1061 static void mt76x0_phy_set_gain_val(struct mt76x02_dev *dev)
1062 {
1063 	u8 gain = dev->cal.agc_gain_cur[0] - dev->cal.agc_gain_adjust;
1064 
1065 	mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, gain);
1066 
1067 	if ((dev->mt76.chandef.chan->flags & IEEE80211_CHAN_RADAR) &&
1068 	    !is_mt7630(dev))
1069 		mt76x02_phy_dfs_adjust_agc(dev);
1070 }
1071 
1072 static void
1073 mt76x0_phy_update_channel_gain(struct mt76x02_dev *dev)
1074 {
1075 	bool gain_change;
1076 	u8 gain_delta;
1077 	int low_gain;
1078 
1079 	dev->cal.avg_rssi_all = mt76_get_min_avg_rssi(&dev->mt76);
1080 	if (!dev->cal.avg_rssi_all)
1081 		dev->cal.avg_rssi_all = -75;
1082 
1083 	low_gain = (dev->cal.avg_rssi_all > mt76x02_get_rssi_gain_thresh(dev)) +
1084 		   (dev->cal.avg_rssi_all > mt76x02_get_low_rssi_gain_thresh(dev));
1085 
1086 	gain_change = dev->cal.low_gain < 0 ||
1087 		      (dev->cal.low_gain & 2) ^ (low_gain & 2);
1088 	dev->cal.low_gain = low_gain;
1089 
1090 	if (!gain_change) {
1091 		if (mt76x02_phy_adjust_vga_gain(dev))
1092 			mt76x0_phy_set_gain_val(dev);
1093 		return;
1094 	}
1095 
1096 	dev->cal.agc_gain_adjust = (low_gain == 2) ? 0 : 10;
1097 	gain_delta = (low_gain == 2) ? 10 : 0;
1098 
1099 	dev->cal.agc_gain_cur[0] = dev->cal.agc_gain_init[0] - gain_delta;
1100 	mt76x0_phy_set_gain_val(dev);
1101 
1102 	/* clear false CCA counters */
1103 	mt76_rr(dev, MT_RX_STAT_1);
1104 }
1105 
1106 static void mt76x0_phy_calibration_work(struct work_struct *work)
1107 {
1108 	struct mt76x02_dev *dev = container_of(work, struct mt76x02_dev,
1109 					       cal_work.work);
1110 
1111 	mt76x0_phy_update_channel_gain(dev);
1112 	if (mt76x0_tssi_enabled(dev))
1113 		mt76x0_phy_tssi_calibrate(dev);
1114 	else
1115 		mt76x0_phy_temp_sensor(dev);
1116 
1117 	ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
1118 				     4 * MT_CALIBRATE_INTERVAL);
1119 }
1120 
1121 static void mt76x0_rf_patch_reg_array(struct mt76x02_dev *dev,
1122 				      const struct mt76_reg_pair *rp, int len)
1123 {
1124 	int i;
1125 
1126 	for (i = 0; i < len; i++) {
1127 		u32 reg = rp[i].reg;
1128 		u8 val = rp[i].value;
1129 
1130 		switch (reg) {
1131 		case MT_RF(0, 3):
1132 			if (mt76_is_mmio(dev)) {
1133 				if (is_mt7630(dev))
1134 					val = 0x70;
1135 				else
1136 					val = 0x63;
1137 			} else {
1138 				val = 0x73;
1139 			}
1140 			break;
1141 		case MT_RF(0, 21):
1142 			if (is_mt7610e(dev))
1143 				val = 0x10;
1144 			else
1145 				val = 0x12;
1146 			break;
1147 		case MT_RF(5, 2):
1148 			if (is_mt7630(dev))
1149 				val = 0x1d;
1150 			else if (is_mt7610e(dev))
1151 				val = 0x00;
1152 			else
1153 				val = 0x0c;
1154 			break;
1155 		default:
1156 			break;
1157 		}
1158 		mt76x0_rf_wr(dev, reg, val);
1159 	}
1160 }
1161 
1162 static void mt76x0_phy_rf_init(struct mt76x02_dev *dev)
1163 {
1164 	int i;
1165 	u8 val;
1166 
1167 	mt76x0_rf_patch_reg_array(dev, mt76x0_rf_central_tab,
1168 				  ARRAY_SIZE(mt76x0_rf_central_tab));
1169 	mt76x0_rf_patch_reg_array(dev, mt76x0_rf_2g_channel_0_tab,
1170 				  ARRAY_SIZE(mt76x0_rf_2g_channel_0_tab));
1171 	RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
1172 	RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab);
1173 
1174 	for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
1175 		const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i];
1176 
1177 		if (item->bw_band == RF_BW_20)
1178 			mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1179 		else if (((RF_G_BAND | RF_BW_20) & item->bw_band) == (RF_G_BAND | RF_BW_20))
1180 			mt76x0_rf_wr(dev, item->rf_bank_reg, item->value);
1181 	}
1182 
1183 	for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
1184 		if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) {
1185 			mt76x0_rf_wr(dev,
1186 				     mt76x0_rf_band_switch_tab[i].rf_bank_reg,
1187 				     mt76x0_rf_band_switch_tab[i].value);
1188 		}
1189 	}
1190 
1191 	/*
1192 	   Frequency calibration
1193 	   E1: B0.R22<6:0>: xo_cxo<6:0>
1194 	   E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
1195 	 */
1196 	mt76x0_rf_wr(dev, MT_RF(0, 22),
1197 		     min_t(u8, dev->cal.rx.freq_offset, 0xbf));
1198 	val = mt76x0_rf_rr(dev, MT_RF(0, 22));
1199 
1200 	/* Reset procedure DAC during power-up:
1201 	 * - set B0.R73<7>
1202 	 * - clear B0.R73<7>
1203 	 * - set B0.R73<7>
1204 	 */
1205 	mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1206 	mt76x0_rf_clear(dev, MT_RF(0, 73), BIT(7));
1207 	mt76x0_rf_set(dev, MT_RF(0, 73), BIT(7));
1208 
1209 	/* vcocal_en: initiate VCO calibration (reset after completion)) */
1210 	mt76x0_rf_set(dev, MT_RF(0, 4), 0x80);
1211 }
1212 
1213 void mt76x0_phy_init(struct mt76x02_dev *dev)
1214 {
1215 	INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibration_work);
1216 
1217 	mt76x0_phy_ant_select(dev);
1218 	mt76x0_phy_rf_init(dev);
1219 	mt76x02_phy_set_rxpath(dev);
1220 	mt76x02_phy_set_txdac(dev);
1221 }
1222