xref: /openbmc/linux/drivers/net/wireless/ath/ath9k/hw.c (revision e8e0929d)
1 /*
2  * Copyright (c) 2008-2009 Atheros Communications Inc.
3  *
4  * Permission to use, copy, modify, and/or distribute this software for any
5  * purpose with or without fee is hereby granted, provided that the above
6  * copyright notice and this permission notice appear in all copies.
7  *
8  * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9  * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10  * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11  * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12  * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13  * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14  * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15  */
16 
17 #include <linux/io.h>
18 #include <asm/unaligned.h>
19 #include <linux/pci.h>
20 
21 #include "ath9k.h"
22 #include "initvals.h"
23 
24 #define ATH9K_CLOCK_RATE_CCK		22
25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM	40
26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM	44
27 
28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
30 			      enum ath9k_ht_macmode macmode);
31 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
32 			      struct ar5416_eeprom_def *pEepData,
33 			      u32 reg, u32 value);
34 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
35 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
36 
37 /********************/
38 /* Helper Functions */
39 /********************/
40 
41 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
42 {
43 	struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
44 
45 	if (!ah->curchan) /* should really check for CCK instead */
46 		return clks / ATH9K_CLOCK_RATE_CCK;
47 	if (conf->channel->band == IEEE80211_BAND_2GHZ)
48 		return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
49 
50 	return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
51 }
52 
53 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
54 {
55 	struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
56 
57 	if (conf_is_ht40(conf))
58 		return ath9k_hw_mac_usec(ah, clks) / 2;
59 	else
60 		return ath9k_hw_mac_usec(ah, clks);
61 }
62 
63 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
64 {
65 	struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
66 
67 	if (!ah->curchan) /* should really check for CCK instead */
68 		return usecs *ATH9K_CLOCK_RATE_CCK;
69 	if (conf->channel->band == IEEE80211_BAND_2GHZ)
70 		return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
71 	return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
72 }
73 
74 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
75 {
76 	struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
77 
78 	if (conf_is_ht40(conf))
79 		return ath9k_hw_mac_clks(ah, usecs) * 2;
80 	else
81 		return ath9k_hw_mac_clks(ah, usecs);
82 }
83 
84 /*
85  * Read and write, they both share the same lock. We do this to serialize
86  * reads and writes on Atheros 802.11n PCI devices only. This is required
87  * as the FIFO on these devices can only accept sanely 2 requests. After
88  * that the device goes bananas. Serializing the reads/writes prevents this
89  * from happening.
90  */
91 
92 void ath9k_iowrite32(struct ath_hw *ah, u32 reg_offset, u32 val)
93 {
94 	if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
95 		unsigned long flags;
96 		spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
97 		iowrite32(val, ah->ah_sc->mem + reg_offset);
98 		spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
99 	} else
100 		iowrite32(val, ah->ah_sc->mem + reg_offset);
101 }
102 
103 unsigned int ath9k_ioread32(struct ath_hw *ah, u32 reg_offset)
104 {
105 	u32 val;
106 	if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
107 		unsigned long flags;
108 		spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
109 		val = ioread32(ah->ah_sc->mem + reg_offset);
110 		spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
111 	} else
112 		val = ioread32(ah->ah_sc->mem + reg_offset);
113 	return val;
114 }
115 
116 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
117 {
118 	int i;
119 
120 	BUG_ON(timeout < AH_TIME_QUANTUM);
121 
122 	for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
123 		if ((REG_READ(ah, reg) & mask) == val)
124 			return true;
125 
126 		udelay(AH_TIME_QUANTUM);
127 	}
128 
129 	DPRINTF(ah->ah_sc, ATH_DBG_ANY,
130 		"timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
131 		timeout, reg, REG_READ(ah, reg), mask, val);
132 
133 	return false;
134 }
135 
136 u32 ath9k_hw_reverse_bits(u32 val, u32 n)
137 {
138 	u32 retval;
139 	int i;
140 
141 	for (i = 0, retval = 0; i < n; i++) {
142 		retval = (retval << 1) | (val & 1);
143 		val >>= 1;
144 	}
145 	return retval;
146 }
147 
148 bool ath9k_get_channel_edges(struct ath_hw *ah,
149 			     u16 flags, u16 *low,
150 			     u16 *high)
151 {
152 	struct ath9k_hw_capabilities *pCap = &ah->caps;
153 
154 	if (flags & CHANNEL_5GHZ) {
155 		*low = pCap->low_5ghz_chan;
156 		*high = pCap->high_5ghz_chan;
157 		return true;
158 	}
159 	if ((flags & CHANNEL_2GHZ)) {
160 		*low = pCap->low_2ghz_chan;
161 		*high = pCap->high_2ghz_chan;
162 		return true;
163 	}
164 	return false;
165 }
166 
167 u16 ath9k_hw_computetxtime(struct ath_hw *ah,
168 			   const struct ath_rate_table *rates,
169 			   u32 frameLen, u16 rateix,
170 			   bool shortPreamble)
171 {
172 	u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
173 	u32 kbps;
174 
175 	kbps = rates->info[rateix].ratekbps;
176 
177 	if (kbps == 0)
178 		return 0;
179 
180 	switch (rates->info[rateix].phy) {
181 	case WLAN_RC_PHY_CCK:
182 		phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
183 		if (shortPreamble && rates->info[rateix].short_preamble)
184 			phyTime >>= 1;
185 		numBits = frameLen << 3;
186 		txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
187 		break;
188 	case WLAN_RC_PHY_OFDM:
189 		if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
190 			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
191 			numBits = OFDM_PLCP_BITS + (frameLen << 3);
192 			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
193 			txTime = OFDM_SIFS_TIME_QUARTER
194 				+ OFDM_PREAMBLE_TIME_QUARTER
195 				+ (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
196 		} else if (ah->curchan &&
197 			   IS_CHAN_HALF_RATE(ah->curchan)) {
198 			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
199 			numBits = OFDM_PLCP_BITS + (frameLen << 3);
200 			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
201 			txTime = OFDM_SIFS_TIME_HALF +
202 				OFDM_PREAMBLE_TIME_HALF
203 				+ (numSymbols * OFDM_SYMBOL_TIME_HALF);
204 		} else {
205 			bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
206 			numBits = OFDM_PLCP_BITS + (frameLen << 3);
207 			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
208 			txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
209 				+ (numSymbols * OFDM_SYMBOL_TIME);
210 		}
211 		break;
212 	default:
213 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
214 			"Unknown phy %u (rate ix %u)\n",
215 			rates->info[rateix].phy, rateix);
216 		txTime = 0;
217 		break;
218 	}
219 
220 	return txTime;
221 }
222 
223 void ath9k_hw_get_channel_centers(struct ath_hw *ah,
224 				  struct ath9k_channel *chan,
225 				  struct chan_centers *centers)
226 {
227 	int8_t extoff;
228 
229 	if (!IS_CHAN_HT40(chan)) {
230 		centers->ctl_center = centers->ext_center =
231 			centers->synth_center = chan->channel;
232 		return;
233 	}
234 
235 	if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
236 	    (chan->chanmode == CHANNEL_G_HT40PLUS)) {
237 		centers->synth_center =
238 			chan->channel + HT40_CHANNEL_CENTER_SHIFT;
239 		extoff = 1;
240 	} else {
241 		centers->synth_center =
242 			chan->channel - HT40_CHANNEL_CENTER_SHIFT;
243 		extoff = -1;
244 	}
245 
246 	centers->ctl_center =
247 		centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
248 	centers->ext_center =
249 		centers->synth_center + (extoff *
250 			 ((ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_20) ?
251 			  HT40_CHANNEL_CENTER_SHIFT : 15));
252 }
253 
254 /******************/
255 /* Chip Revisions */
256 /******************/
257 
258 static void ath9k_hw_read_revisions(struct ath_hw *ah)
259 {
260 	u32 val;
261 
262 	val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
263 
264 	if (val == 0xFF) {
265 		val = REG_READ(ah, AR_SREV);
266 		ah->hw_version.macVersion =
267 			(val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
268 		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
269 		ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
270 	} else {
271 		if (!AR_SREV_9100(ah))
272 			ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
273 
274 		ah->hw_version.macRev = val & AR_SREV_REVISION;
275 
276 		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
277 			ah->is_pciexpress = true;
278 	}
279 }
280 
281 static int ath9k_hw_get_radiorev(struct ath_hw *ah)
282 {
283 	u32 val;
284 	int i;
285 
286 	REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
287 
288 	for (i = 0; i < 8; i++)
289 		REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
290 	val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
291 	val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
292 
293 	return ath9k_hw_reverse_bits(val, 8);
294 }
295 
296 /************************************/
297 /* HW Attach, Detach, Init Routines */
298 /************************************/
299 
300 static void ath9k_hw_disablepcie(struct ath_hw *ah)
301 {
302 	if (AR_SREV_9100(ah))
303 		return;
304 
305 	REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
306 	REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
307 	REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
308 	REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
309 	REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
310 	REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
311 	REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
312 	REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
313 	REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
314 
315 	REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
316 }
317 
318 static bool ath9k_hw_chip_test(struct ath_hw *ah)
319 {
320 	u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
321 	u32 regHold[2];
322 	u32 patternData[4] = { 0x55555555,
323 			       0xaaaaaaaa,
324 			       0x66666666,
325 			       0x99999999 };
326 	int i, j;
327 
328 	for (i = 0; i < 2; i++) {
329 		u32 addr = regAddr[i];
330 		u32 wrData, rdData;
331 
332 		regHold[i] = REG_READ(ah, addr);
333 		for (j = 0; j < 0x100; j++) {
334 			wrData = (j << 16) | j;
335 			REG_WRITE(ah, addr, wrData);
336 			rdData = REG_READ(ah, addr);
337 			if (rdData != wrData) {
338 				DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
339 					"address test failed "
340 					"addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
341 					addr, wrData, rdData);
342 				return false;
343 			}
344 		}
345 		for (j = 0; j < 4; j++) {
346 			wrData = patternData[j];
347 			REG_WRITE(ah, addr, wrData);
348 			rdData = REG_READ(ah, addr);
349 			if (wrData != rdData) {
350 				DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
351 					"address test failed "
352 					"addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
353 					addr, wrData, rdData);
354 				return false;
355 			}
356 		}
357 		REG_WRITE(ah, regAddr[i], regHold[i]);
358 	}
359 	udelay(100);
360 
361 	return true;
362 }
363 
364 static const char *ath9k_hw_devname(u16 devid)
365 {
366 	switch (devid) {
367 	case AR5416_DEVID_PCI:
368 		return "Atheros 5416";
369 	case AR5416_DEVID_PCIE:
370 		return "Atheros 5418";
371 	case AR9160_DEVID_PCI:
372 		return "Atheros 9160";
373 	case AR5416_AR9100_DEVID:
374 		return "Atheros 9100";
375 	case AR9280_DEVID_PCI:
376 	case AR9280_DEVID_PCIE:
377 		return "Atheros 9280";
378 	case AR9285_DEVID_PCIE:
379 		return "Atheros 9285";
380 	case AR5416_DEVID_AR9287_PCI:
381 	case AR5416_DEVID_AR9287_PCIE:
382 		return "Atheros 9287";
383 	}
384 
385 	return NULL;
386 }
387 
388 static void ath9k_hw_init_config(struct ath_hw *ah)
389 {
390 	int i;
391 
392 	ah->config.dma_beacon_response_time = 2;
393 	ah->config.sw_beacon_response_time = 10;
394 	ah->config.additional_swba_backoff = 0;
395 	ah->config.ack_6mb = 0x0;
396 	ah->config.cwm_ignore_extcca = 0;
397 	ah->config.pcie_powersave_enable = 0;
398 	ah->config.pcie_clock_req = 0;
399 	ah->config.pcie_waen = 0;
400 	ah->config.analog_shiftreg = 1;
401 	ah->config.ht_enable = 1;
402 	ah->config.ofdm_trig_low = 200;
403 	ah->config.ofdm_trig_high = 500;
404 	ah->config.cck_trig_high = 200;
405 	ah->config.cck_trig_low = 100;
406 	ah->config.enable_ani = 1;
407 	ah->config.diversity_control = ATH9K_ANT_VARIABLE;
408 	ah->config.antenna_switch_swap = 0;
409 
410 	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
411 		ah->config.spurchans[i][0] = AR_NO_SPUR;
412 		ah->config.spurchans[i][1] = AR_NO_SPUR;
413 	}
414 
415 	ah->config.intr_mitigation = true;
416 
417 	/*
418 	 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
419 	 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
420 	 * This means we use it for all AR5416 devices, and the few
421 	 * minor PCI AR9280 devices out there.
422 	 *
423 	 * Serialization is required because these devices do not handle
424 	 * well the case of two concurrent reads/writes due to the latency
425 	 * involved. During one read/write another read/write can be issued
426 	 * on another CPU while the previous read/write may still be working
427 	 * on our hardware, if we hit this case the hardware poops in a loop.
428 	 * We prevent this by serializing reads and writes.
429 	 *
430 	 * This issue is not present on PCI-Express devices or pre-AR5416
431 	 * devices (legacy, 802.11abg).
432 	 */
433 	if (num_possible_cpus() > 1)
434 		ah->config.serialize_regmode = SER_REG_MODE_AUTO;
435 }
436 
437 static void ath9k_hw_init_defaults(struct ath_hw *ah)
438 {
439 	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
440 
441 	regulatory->country_code = CTRY_DEFAULT;
442 	regulatory->power_limit = MAX_RATE_POWER;
443 	regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
444 
445 	ah->hw_version.magic = AR5416_MAGIC;
446 	ah->hw_version.subvendorid = 0;
447 
448 	ah->ah_flags = 0;
449 	if (ah->hw_version.devid == AR5416_AR9100_DEVID)
450 		ah->hw_version.macVersion = AR_SREV_VERSION_9100;
451 	if (!AR_SREV_9100(ah))
452 		ah->ah_flags = AH_USE_EEPROM;
453 
454 	ah->atim_window = 0;
455 	ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
456 	ah->beacon_interval = 100;
457 	ah->enable_32kHz_clock = DONT_USE_32KHZ;
458 	ah->slottime = (u32) -1;
459 	ah->acktimeout = (u32) -1;
460 	ah->ctstimeout = (u32) -1;
461 	ah->globaltxtimeout = (u32) -1;
462 
463 	ah->gbeacon_rate = 0;
464 
465 	ah->power_mode = ATH9K_PM_UNDEFINED;
466 }
467 
468 static int ath9k_hw_rfattach(struct ath_hw *ah)
469 {
470 	bool rfStatus = false;
471 	int ecode = 0;
472 
473 	rfStatus = ath9k_hw_init_rf(ah, &ecode);
474 	if (!rfStatus) {
475 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
476 			"RF setup failed, status: %u\n", ecode);
477 		return ecode;
478 	}
479 
480 	return 0;
481 }
482 
483 static int ath9k_hw_rf_claim(struct ath_hw *ah)
484 {
485 	u32 val;
486 
487 	REG_WRITE(ah, AR_PHY(0), 0x00000007);
488 
489 	val = ath9k_hw_get_radiorev(ah);
490 	switch (val & AR_RADIO_SREV_MAJOR) {
491 	case 0:
492 		val = AR_RAD5133_SREV_MAJOR;
493 		break;
494 	case AR_RAD5133_SREV_MAJOR:
495 	case AR_RAD5122_SREV_MAJOR:
496 	case AR_RAD2133_SREV_MAJOR:
497 	case AR_RAD2122_SREV_MAJOR:
498 		break;
499 	default:
500 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
501 			"Radio Chip Rev 0x%02X not supported\n",
502 			val & AR_RADIO_SREV_MAJOR);
503 		return -EOPNOTSUPP;
504 	}
505 
506 	ah->hw_version.analog5GhzRev = val;
507 
508 	return 0;
509 }
510 
511 static int ath9k_hw_init_macaddr(struct ath_hw *ah)
512 {
513 	u32 sum;
514 	int i;
515 	u16 eeval;
516 
517 	sum = 0;
518 	for (i = 0; i < 3; i++) {
519 		eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
520 		sum += eeval;
521 		ah->macaddr[2 * i] = eeval >> 8;
522 		ah->macaddr[2 * i + 1] = eeval & 0xff;
523 	}
524 	if (sum == 0 || sum == 0xffff * 3)
525 		return -EADDRNOTAVAIL;
526 
527 	return 0;
528 }
529 
530 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
531 {
532 	u32 rxgain_type;
533 
534 	if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
535 		rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
536 
537 		if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
538 			INIT_INI_ARRAY(&ah->iniModesRxGain,
539 			ar9280Modes_backoff_13db_rxgain_9280_2,
540 			ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
541 		else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
542 			INIT_INI_ARRAY(&ah->iniModesRxGain,
543 			ar9280Modes_backoff_23db_rxgain_9280_2,
544 			ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
545 		else
546 			INIT_INI_ARRAY(&ah->iniModesRxGain,
547 			ar9280Modes_original_rxgain_9280_2,
548 			ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
549 	} else {
550 		INIT_INI_ARRAY(&ah->iniModesRxGain,
551 			ar9280Modes_original_rxgain_9280_2,
552 			ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
553 	}
554 }
555 
556 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
557 {
558 	u32 txgain_type;
559 
560 	if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
561 		txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
562 
563 		if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
564 			INIT_INI_ARRAY(&ah->iniModesTxGain,
565 			ar9280Modes_high_power_tx_gain_9280_2,
566 			ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
567 		else
568 			INIT_INI_ARRAY(&ah->iniModesTxGain,
569 			ar9280Modes_original_tx_gain_9280_2,
570 			ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
571 	} else {
572 		INIT_INI_ARRAY(&ah->iniModesTxGain,
573 		ar9280Modes_original_tx_gain_9280_2,
574 		ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
575 	}
576 }
577 
578 static int ath9k_hw_post_init(struct ath_hw *ah)
579 {
580 	int ecode;
581 
582 	if (!ath9k_hw_chip_test(ah))
583 		return -ENODEV;
584 
585 	ecode = ath9k_hw_rf_claim(ah);
586 	if (ecode != 0)
587 		return ecode;
588 
589 	ecode = ath9k_hw_eeprom_init(ah);
590 	if (ecode != 0)
591 		return ecode;
592 
593 	DPRINTF(ah->ah_sc, ATH_DBG_CONFIG, "Eeprom VER: %d, REV: %d\n",
594 		ah->eep_ops->get_eeprom_ver(ah), ah->eep_ops->get_eeprom_rev(ah));
595 
596 	ecode = ath9k_hw_rfattach(ah);
597 	if (ecode != 0)
598 		return ecode;
599 
600 	if (!AR_SREV_9100(ah)) {
601 		ath9k_hw_ani_setup(ah);
602 		ath9k_hw_ani_init(ah);
603 	}
604 
605 	return 0;
606 }
607 
608 static bool ath9k_hw_devid_supported(u16 devid)
609 {
610 	switch (devid) {
611 	case AR5416_DEVID_PCI:
612 	case AR5416_DEVID_PCIE:
613 	case AR5416_AR9100_DEVID:
614 	case AR9160_DEVID_PCI:
615 	case AR9280_DEVID_PCI:
616 	case AR9280_DEVID_PCIE:
617 	case AR9285_DEVID_PCIE:
618 	case AR5416_DEVID_AR9287_PCI:
619 	case AR5416_DEVID_AR9287_PCIE:
620 		return true;
621 	default:
622 		break;
623 	}
624 	return false;
625 }
626 
627 static bool ath9k_hw_macversion_supported(u32 macversion)
628 {
629 	switch (macversion) {
630 	case AR_SREV_VERSION_5416_PCI:
631 	case AR_SREV_VERSION_5416_PCIE:
632 	case AR_SREV_VERSION_9160:
633 	case AR_SREV_VERSION_9100:
634 	case AR_SREV_VERSION_9280:
635 	case AR_SREV_VERSION_9285:
636 	case AR_SREV_VERSION_9287:
637 		return true;
638 	/* Not yet */
639 	case AR_SREV_VERSION_9271:
640 	default:
641 		break;
642 	}
643 	return false;
644 }
645 
646 static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
647 {
648 	if (AR_SREV_9160_10_OR_LATER(ah)) {
649 		if (AR_SREV_9280_10_OR_LATER(ah)) {
650 			ah->iq_caldata.calData = &iq_cal_single_sample;
651 			ah->adcgain_caldata.calData =
652 				&adc_gain_cal_single_sample;
653 			ah->adcdc_caldata.calData =
654 				&adc_dc_cal_single_sample;
655 			ah->adcdc_calinitdata.calData =
656 				&adc_init_dc_cal;
657 		} else {
658 			ah->iq_caldata.calData = &iq_cal_multi_sample;
659 			ah->adcgain_caldata.calData =
660 				&adc_gain_cal_multi_sample;
661 			ah->adcdc_caldata.calData =
662 				&adc_dc_cal_multi_sample;
663 			ah->adcdc_calinitdata.calData =
664 				&adc_init_dc_cal;
665 		}
666 		ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
667 	}
668 }
669 
670 static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
671 {
672 	if (AR_SREV_9271(ah)) {
673 		INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
674 			       ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
675 		INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
676 			       ARRAY_SIZE(ar9271Common_9271_1_0), 2);
677 		return;
678 	}
679 
680 	if (AR_SREV_9287_11_OR_LATER(ah)) {
681 		INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
682 				ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
683 		INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
684 				ARRAY_SIZE(ar9287Common_9287_1_1), 2);
685 		if (ah->config.pcie_clock_req)
686 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
687 			ar9287PciePhy_clkreq_off_L1_9287_1_1,
688 			ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
689 		else
690 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
691 			ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
692 			ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
693 					2);
694 	} else if (AR_SREV_9287_10_OR_LATER(ah)) {
695 		INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
696 				ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
697 		INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
698 				ARRAY_SIZE(ar9287Common_9287_1_0), 2);
699 
700 		if (ah->config.pcie_clock_req)
701 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
702 			ar9287PciePhy_clkreq_off_L1_9287_1_0,
703 			ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
704 		else
705 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
706 			ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
707 			ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
708 				  2);
709 	} else if (AR_SREV_9285_12_OR_LATER(ah)) {
710 
711 
712 		INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
713 			       ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
714 		INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
715 			       ARRAY_SIZE(ar9285Common_9285_1_2), 2);
716 
717 		if (ah->config.pcie_clock_req) {
718 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
719 			ar9285PciePhy_clkreq_off_L1_9285_1_2,
720 			ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
721 		} else {
722 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
723 			ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
724 			ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
725 				  2);
726 		}
727 	} else if (AR_SREV_9285_10_OR_LATER(ah)) {
728 		INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
729 			       ARRAY_SIZE(ar9285Modes_9285), 6);
730 		INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
731 			       ARRAY_SIZE(ar9285Common_9285), 2);
732 
733 		if (ah->config.pcie_clock_req) {
734 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
735 			ar9285PciePhy_clkreq_off_L1_9285,
736 			ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
737 		} else {
738 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
739 			ar9285PciePhy_clkreq_always_on_L1_9285,
740 			ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
741 		}
742 	} else if (AR_SREV_9280_20_OR_LATER(ah)) {
743 		INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
744 			       ARRAY_SIZE(ar9280Modes_9280_2), 6);
745 		INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
746 			       ARRAY_SIZE(ar9280Common_9280_2), 2);
747 
748 		if (ah->config.pcie_clock_req) {
749 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
750 			       ar9280PciePhy_clkreq_off_L1_9280,
751 			       ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
752 		} else {
753 			INIT_INI_ARRAY(&ah->iniPcieSerdes,
754 			       ar9280PciePhy_clkreq_always_on_L1_9280,
755 			       ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
756 		}
757 		INIT_INI_ARRAY(&ah->iniModesAdditional,
758 			       ar9280Modes_fast_clock_9280_2,
759 			       ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
760 	} else if (AR_SREV_9280_10_OR_LATER(ah)) {
761 		INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
762 			       ARRAY_SIZE(ar9280Modes_9280), 6);
763 		INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
764 			       ARRAY_SIZE(ar9280Common_9280), 2);
765 	} else if (AR_SREV_9160_10_OR_LATER(ah)) {
766 		INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
767 			       ARRAY_SIZE(ar5416Modes_9160), 6);
768 		INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
769 			       ARRAY_SIZE(ar5416Common_9160), 2);
770 		INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
771 			       ARRAY_SIZE(ar5416Bank0_9160), 2);
772 		INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
773 			       ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
774 		INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
775 			       ARRAY_SIZE(ar5416Bank1_9160), 2);
776 		INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
777 			       ARRAY_SIZE(ar5416Bank2_9160), 2);
778 		INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
779 			       ARRAY_SIZE(ar5416Bank3_9160), 3);
780 		INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
781 			       ARRAY_SIZE(ar5416Bank6_9160), 3);
782 		INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
783 			       ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
784 		INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
785 			       ARRAY_SIZE(ar5416Bank7_9160), 2);
786 		if (AR_SREV_9160_11(ah)) {
787 			INIT_INI_ARRAY(&ah->iniAddac,
788 				       ar5416Addac_91601_1,
789 				       ARRAY_SIZE(ar5416Addac_91601_1), 2);
790 		} else {
791 			INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
792 				       ARRAY_SIZE(ar5416Addac_9160), 2);
793 		}
794 	} else if (AR_SREV_9100_OR_LATER(ah)) {
795 		INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
796 			       ARRAY_SIZE(ar5416Modes_9100), 6);
797 		INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
798 			       ARRAY_SIZE(ar5416Common_9100), 2);
799 		INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
800 			       ARRAY_SIZE(ar5416Bank0_9100), 2);
801 		INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
802 			       ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
803 		INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
804 			       ARRAY_SIZE(ar5416Bank1_9100), 2);
805 		INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
806 			       ARRAY_SIZE(ar5416Bank2_9100), 2);
807 		INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
808 			       ARRAY_SIZE(ar5416Bank3_9100), 3);
809 		INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
810 			       ARRAY_SIZE(ar5416Bank6_9100), 3);
811 		INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
812 			       ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
813 		INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
814 			       ARRAY_SIZE(ar5416Bank7_9100), 2);
815 		INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
816 			       ARRAY_SIZE(ar5416Addac_9100), 2);
817 	} else {
818 		INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
819 			       ARRAY_SIZE(ar5416Modes), 6);
820 		INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
821 			       ARRAY_SIZE(ar5416Common), 2);
822 		INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
823 			       ARRAY_SIZE(ar5416Bank0), 2);
824 		INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
825 			       ARRAY_SIZE(ar5416BB_RfGain), 3);
826 		INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
827 			       ARRAY_SIZE(ar5416Bank1), 2);
828 		INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
829 			       ARRAY_SIZE(ar5416Bank2), 2);
830 		INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
831 			       ARRAY_SIZE(ar5416Bank3), 3);
832 		INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
833 			       ARRAY_SIZE(ar5416Bank6), 3);
834 		INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
835 			       ARRAY_SIZE(ar5416Bank6TPC), 3);
836 		INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
837 			       ARRAY_SIZE(ar5416Bank7), 2);
838 		INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
839 			       ARRAY_SIZE(ar5416Addac), 2);
840 	}
841 }
842 
843 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
844 {
845 	if (AR_SREV_9287_11_OR_LATER(ah))
846 		INIT_INI_ARRAY(&ah->iniModesRxGain,
847 		ar9287Modes_rx_gain_9287_1_1,
848 		ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
849 	else if (AR_SREV_9287_10(ah))
850 		INIT_INI_ARRAY(&ah->iniModesRxGain,
851 		ar9287Modes_rx_gain_9287_1_0,
852 		ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
853 	else if (AR_SREV_9280_20(ah))
854 		ath9k_hw_init_rxgain_ini(ah);
855 
856 	if (AR_SREV_9287_11_OR_LATER(ah)) {
857 		INIT_INI_ARRAY(&ah->iniModesTxGain,
858 		ar9287Modes_tx_gain_9287_1_1,
859 		ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
860 	} else if (AR_SREV_9287_10(ah)) {
861 		INIT_INI_ARRAY(&ah->iniModesTxGain,
862 		ar9287Modes_tx_gain_9287_1_0,
863 		ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
864 	} else if (AR_SREV_9280_20(ah)) {
865 		ath9k_hw_init_txgain_ini(ah);
866 	} else if (AR_SREV_9285_12_OR_LATER(ah)) {
867 		u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
868 
869 		/* txgain table */
870 		if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
871 			INIT_INI_ARRAY(&ah->iniModesTxGain,
872 			ar9285Modes_high_power_tx_gain_9285_1_2,
873 			ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
874 		} else {
875 			INIT_INI_ARRAY(&ah->iniModesTxGain,
876 			ar9285Modes_original_tx_gain_9285_1_2,
877 			ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
878 		}
879 
880 	}
881 }
882 
883 static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
884 {
885 	u32 i, j;
886 
887 	if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
888 	    test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
889 
890 		/* EEPROM Fixup */
891 		for (i = 0; i < ah->iniModes.ia_rows; i++) {
892 			u32 reg = INI_RA(&ah->iniModes, i, 0);
893 
894 			for (j = 1; j < ah->iniModes.ia_columns; j++) {
895 				u32 val = INI_RA(&ah->iniModes, i, j);
896 
897 				INI_RA(&ah->iniModes, i, j) =
898 					ath9k_hw_ini_fixup(ah,
899 							   &ah->eeprom.def,
900 							   reg, val);
901 			}
902 		}
903 	}
904 }
905 
906 int ath9k_hw_init(struct ath_hw *ah)
907 {
908 	int r = 0;
909 
910 	if (!ath9k_hw_devid_supported(ah->hw_version.devid))
911 		return -EOPNOTSUPP;
912 
913 	ath9k_hw_init_defaults(ah);
914 	ath9k_hw_init_config(ah);
915 
916 	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
917 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't reset chip\n");
918 		return -EIO;
919 	}
920 
921 	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
922 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
923 		return -EIO;
924 	}
925 
926 	if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
927 		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
928 		    (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
929 			ah->config.serialize_regmode =
930 				SER_REG_MODE_ON;
931 		} else {
932 			ah->config.serialize_regmode =
933 				SER_REG_MODE_OFF;
934 		}
935 	}
936 
937 	DPRINTF(ah->ah_sc, ATH_DBG_RESET, "serialize_regmode is %d\n",
938 		ah->config.serialize_regmode);
939 
940 	if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
941 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
942 			"Mac Chip Rev 0x%02x.%x is not supported by "
943 			"this driver\n", ah->hw_version.macVersion,
944 			ah->hw_version.macRev);
945 		return -EOPNOTSUPP;
946 	}
947 
948 	if (AR_SREV_9100(ah)) {
949 		ah->iq_caldata.calData = &iq_cal_multi_sample;
950 		ah->supp_cals = IQ_MISMATCH_CAL;
951 		ah->is_pciexpress = false;
952 	}
953 
954 	if (AR_SREV_9271(ah))
955 		ah->is_pciexpress = false;
956 
957 	ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
958 
959 	ath9k_hw_init_cal_settings(ah);
960 
961 	ah->ani_function = ATH9K_ANI_ALL;
962 	if (AR_SREV_9280_10_OR_LATER(ah))
963 		ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
964 
965 	ath9k_hw_init_mode_regs(ah);
966 
967 	if (ah->is_pciexpress)
968 		ath9k_hw_configpcipowersave(ah, 0, 0);
969 	else
970 		ath9k_hw_disablepcie(ah);
971 
972 	r = ath9k_hw_post_init(ah);
973 	if (r)
974 		return r;
975 
976 	ath9k_hw_init_mode_gain_regs(ah);
977 	ath9k_hw_fill_cap_info(ah);
978 	ath9k_hw_init_11a_eeprom_fix(ah);
979 
980 	r = ath9k_hw_init_macaddr(ah);
981 	if (r) {
982 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
983 			"Failed to initialize MAC address\n");
984 		return r;
985 	}
986 
987 	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
988 		ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
989 	else
990 		ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
991 
992 	ath9k_init_nfcal_hist_buffer(ah);
993 
994 	return 0;
995 }
996 
997 static void ath9k_hw_init_bb(struct ath_hw *ah,
998 			     struct ath9k_channel *chan)
999 {
1000 	u32 synthDelay;
1001 
1002 	synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1003 	if (IS_CHAN_B(chan))
1004 		synthDelay = (4 * synthDelay) / 22;
1005 	else
1006 		synthDelay /= 10;
1007 
1008 	REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1009 
1010 	udelay(synthDelay + BASE_ACTIVATE_DELAY);
1011 }
1012 
1013 static void ath9k_hw_init_qos(struct ath_hw *ah)
1014 {
1015 	REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1016 	REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1017 
1018 	REG_WRITE(ah, AR_QOS_NO_ACK,
1019 		  SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1020 		  SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1021 		  SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1022 
1023 	REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1024 	REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1025 	REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1026 	REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1027 	REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1028 }
1029 
1030 static void ath9k_hw_init_pll(struct ath_hw *ah,
1031 			      struct ath9k_channel *chan)
1032 {
1033 	u32 pll;
1034 
1035 	if (AR_SREV_9100(ah)) {
1036 		if (chan && IS_CHAN_5GHZ(chan))
1037 			pll = 0x1450;
1038 		else
1039 			pll = 0x1458;
1040 	} else {
1041 		if (AR_SREV_9280_10_OR_LATER(ah)) {
1042 			pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1043 
1044 			if (chan && IS_CHAN_HALF_RATE(chan))
1045 				pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1046 			else if (chan && IS_CHAN_QUARTER_RATE(chan))
1047 				pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1048 
1049 			if (chan && IS_CHAN_5GHZ(chan)) {
1050 				pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1051 
1052 
1053 				if (AR_SREV_9280_20(ah)) {
1054 					if (((chan->channel % 20) == 0)
1055 					    || ((chan->channel % 10) == 0))
1056 						pll = 0x2850;
1057 					else
1058 						pll = 0x142c;
1059 				}
1060 			} else {
1061 				pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1062 			}
1063 
1064 		} else if (AR_SREV_9160_10_OR_LATER(ah)) {
1065 
1066 			pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1067 
1068 			if (chan && IS_CHAN_HALF_RATE(chan))
1069 				pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1070 			else if (chan && IS_CHAN_QUARTER_RATE(chan))
1071 				pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1072 
1073 			if (chan && IS_CHAN_5GHZ(chan))
1074 				pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1075 			else
1076 				pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1077 		} else {
1078 			pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1079 
1080 			if (chan && IS_CHAN_HALF_RATE(chan))
1081 				pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1082 			else if (chan && IS_CHAN_QUARTER_RATE(chan))
1083 				pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1084 
1085 			if (chan && IS_CHAN_5GHZ(chan))
1086 				pll |= SM(0xa, AR_RTC_PLL_DIV);
1087 			else
1088 				pll |= SM(0xb, AR_RTC_PLL_DIV);
1089 		}
1090 	}
1091 	REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1092 
1093 	udelay(RTC_PLL_SETTLE_DELAY);
1094 
1095 	REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1096 }
1097 
1098 static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1099 {
1100 	int rx_chainmask, tx_chainmask;
1101 
1102 	rx_chainmask = ah->rxchainmask;
1103 	tx_chainmask = ah->txchainmask;
1104 
1105 	switch (rx_chainmask) {
1106 	case 0x5:
1107 		REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1108 			    AR_PHY_SWAP_ALT_CHAIN);
1109 	case 0x3:
1110 		if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1111 			REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1112 			REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1113 			break;
1114 		}
1115 	case 0x1:
1116 	case 0x2:
1117 	case 0x7:
1118 		REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1119 		REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1120 		break;
1121 	default:
1122 		break;
1123 	}
1124 
1125 	REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1126 	if (tx_chainmask == 0x5) {
1127 		REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1128 			    AR_PHY_SWAP_ALT_CHAIN);
1129 	}
1130 	if (AR_SREV_9100(ah))
1131 		REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1132 			  REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1133 }
1134 
1135 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1136 					  enum nl80211_iftype opmode)
1137 {
1138 	ah->mask_reg = AR_IMR_TXERR |
1139 		AR_IMR_TXURN |
1140 		AR_IMR_RXERR |
1141 		AR_IMR_RXORN |
1142 		AR_IMR_BCNMISC;
1143 
1144 	if (ah->config.intr_mitigation)
1145 		ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1146 	else
1147 		ah->mask_reg |= AR_IMR_RXOK;
1148 
1149 	ah->mask_reg |= AR_IMR_TXOK;
1150 
1151 	if (opmode == NL80211_IFTYPE_AP)
1152 		ah->mask_reg |= AR_IMR_MIB;
1153 
1154 	REG_WRITE(ah, AR_IMR, ah->mask_reg);
1155 	REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1156 
1157 	if (!AR_SREV_9100(ah)) {
1158 		REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1159 		REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1160 		REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1161 	}
1162 }
1163 
1164 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1165 {
1166 	if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1167 		DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad ack timeout %u\n", us);
1168 		ah->acktimeout = (u32) -1;
1169 		return false;
1170 	} else {
1171 		REG_RMW_FIELD(ah, AR_TIME_OUT,
1172 			      AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1173 		ah->acktimeout = us;
1174 		return true;
1175 	}
1176 }
1177 
1178 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1179 {
1180 	if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1181 		DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad cts timeout %u\n", us);
1182 		ah->ctstimeout = (u32) -1;
1183 		return false;
1184 	} else {
1185 		REG_RMW_FIELD(ah, AR_TIME_OUT,
1186 			      AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1187 		ah->ctstimeout = us;
1188 		return true;
1189 	}
1190 }
1191 
1192 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1193 {
1194 	if (tu > 0xFFFF) {
1195 		DPRINTF(ah->ah_sc, ATH_DBG_XMIT,
1196 			"bad global tx timeout %u\n", tu);
1197 		ah->globaltxtimeout = (u32) -1;
1198 		return false;
1199 	} else {
1200 		REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1201 		ah->globaltxtimeout = tu;
1202 		return true;
1203 	}
1204 }
1205 
1206 static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1207 {
1208 	DPRINTF(ah->ah_sc, ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1209 		ah->misc_mode);
1210 
1211 	if (ah->misc_mode != 0)
1212 		REG_WRITE(ah, AR_PCU_MISC,
1213 			  REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1214 	if (ah->slottime != (u32) -1)
1215 		ath9k_hw_setslottime(ah, ah->slottime);
1216 	if (ah->acktimeout != (u32) -1)
1217 		ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1218 	if (ah->ctstimeout != (u32) -1)
1219 		ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1220 	if (ah->globaltxtimeout != (u32) -1)
1221 		ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1222 }
1223 
1224 const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1225 {
1226 	return vendorid == ATHEROS_VENDOR_ID ?
1227 		ath9k_hw_devname(devid) : NULL;
1228 }
1229 
1230 void ath9k_hw_detach(struct ath_hw *ah)
1231 {
1232 	if (!AR_SREV_9100(ah))
1233 		ath9k_hw_ani_disable(ah);
1234 
1235 	ath9k_hw_rf_free(ah);
1236 	ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1237 	kfree(ah);
1238 	ah = NULL;
1239 }
1240 
1241 /*******/
1242 /* INI */
1243 /*******/
1244 
1245 static void ath9k_hw_override_ini(struct ath_hw *ah,
1246 				  struct ath9k_channel *chan)
1247 {
1248 	u32 val;
1249 
1250 	if (AR_SREV_9271(ah)) {
1251 		/*
1252 		 * Enable spectral scan to solution for issues with stuck
1253 		 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1254 		 * AR9271 1.1
1255 		 */
1256 		if (AR_SREV_9271_10(ah)) {
1257 			val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1258 			REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1259 		}
1260 		else if (AR_SREV_9271_11(ah))
1261 			/*
1262 			 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1263 			 * present on AR9271 1.1
1264 			 */
1265 			REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1266 		return;
1267 	}
1268 
1269 	/*
1270 	 * Set the RX_ABORT and RX_DIS and clear if off only after
1271 	 * RXE is set for MAC. This prevents frames with corrupted
1272 	 * descriptor status.
1273 	 */
1274 	REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1275 
1276 	if (AR_SREV_9280_10_OR_LATER(ah)) {
1277 		val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1278 			       (~AR_PCU_MISC_MODE2_HWWAR1);
1279 
1280 		if (AR_SREV_9287_10_OR_LATER(ah))
1281 			val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1282 
1283 		REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1284 	}
1285 
1286 	if (!AR_SREV_5416_20_OR_LATER(ah) ||
1287 	    AR_SREV_9280_10_OR_LATER(ah))
1288 		return;
1289 	/*
1290 	 * Disable BB clock gating
1291 	 * Necessary to avoid issues on AR5416 2.0
1292 	 */
1293 	REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1294 }
1295 
1296 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1297 			      struct ar5416_eeprom_def *pEepData,
1298 			      u32 reg, u32 value)
1299 {
1300 	struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1301 
1302 	switch (ah->hw_version.devid) {
1303 	case AR9280_DEVID_PCI:
1304 		if (reg == 0x7894) {
1305 			DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1306 				"ini VAL: %x  EEPROM: %x\n", value,
1307 				(pBase->version & 0xff));
1308 
1309 			if ((pBase->version & 0xff) > 0x0a) {
1310 				DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1311 					"PWDCLKIND: %d\n",
1312 					pBase->pwdclkind);
1313 				value &= ~AR_AN_TOP2_PWDCLKIND;
1314 				value |= AR_AN_TOP2_PWDCLKIND &
1315 					(pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1316 			} else {
1317 				DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1318 					"PWDCLKIND Earlier Rev\n");
1319 			}
1320 
1321 			DPRINTF(ah->ah_sc, ATH_DBG_EEPROM,
1322 				"final ini VAL: %x\n", value);
1323 		}
1324 		break;
1325 	}
1326 
1327 	return value;
1328 }
1329 
1330 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1331 			      struct ar5416_eeprom_def *pEepData,
1332 			      u32 reg, u32 value)
1333 {
1334 	if (ah->eep_map == EEP_MAP_4KBITS)
1335 		return value;
1336 	else
1337 		return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1338 }
1339 
1340 static void ath9k_olc_init(struct ath_hw *ah)
1341 {
1342 	u32 i;
1343 
1344 	if (OLC_FOR_AR9287_10_LATER) {
1345 		REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1346 				AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1347 		ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1348 				AR9287_AN_TXPC0_TXPCMODE,
1349 				AR9287_AN_TXPC0_TXPCMODE_S,
1350 				AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1351 		udelay(100);
1352 	} else {
1353 		for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1354 			ah->originalGain[i] =
1355 				MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1356 						AR_PHY_TX_GAIN);
1357 		ah->PDADCdelta = 0;
1358 	}
1359 }
1360 
1361 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1362 			      struct ath9k_channel *chan)
1363 {
1364 	u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1365 
1366 	if (IS_CHAN_B(chan))
1367 		ctl |= CTL_11B;
1368 	else if (IS_CHAN_G(chan))
1369 		ctl |= CTL_11G;
1370 	else
1371 		ctl |= CTL_11A;
1372 
1373 	return ctl;
1374 }
1375 
1376 static int ath9k_hw_process_ini(struct ath_hw *ah,
1377 				struct ath9k_channel *chan,
1378 				enum ath9k_ht_macmode macmode)
1379 {
1380 	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1381 	int i, regWrites = 0;
1382 	struct ieee80211_channel *channel = chan->chan;
1383 	u32 modesIndex, freqIndex;
1384 
1385 	switch (chan->chanmode) {
1386 	case CHANNEL_A:
1387 	case CHANNEL_A_HT20:
1388 		modesIndex = 1;
1389 		freqIndex = 1;
1390 		break;
1391 	case CHANNEL_A_HT40PLUS:
1392 	case CHANNEL_A_HT40MINUS:
1393 		modesIndex = 2;
1394 		freqIndex = 1;
1395 		break;
1396 	case CHANNEL_G:
1397 	case CHANNEL_G_HT20:
1398 	case CHANNEL_B:
1399 		modesIndex = 4;
1400 		freqIndex = 2;
1401 		break;
1402 	case CHANNEL_G_HT40PLUS:
1403 	case CHANNEL_G_HT40MINUS:
1404 		modesIndex = 3;
1405 		freqIndex = 2;
1406 		break;
1407 
1408 	default:
1409 		return -EINVAL;
1410 	}
1411 
1412 	REG_WRITE(ah, AR_PHY(0), 0x00000007);
1413 	REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1414 	ah->eep_ops->set_addac(ah, chan);
1415 
1416 	if (AR_SREV_5416_22_OR_LATER(ah)) {
1417 		REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1418 	} else {
1419 		struct ar5416IniArray temp;
1420 		u32 addacSize =
1421 			sizeof(u32) * ah->iniAddac.ia_rows *
1422 			ah->iniAddac.ia_columns;
1423 
1424 		memcpy(ah->addac5416_21,
1425 		       ah->iniAddac.ia_array, addacSize);
1426 
1427 		(ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1428 
1429 		temp.ia_array = ah->addac5416_21;
1430 		temp.ia_columns = ah->iniAddac.ia_columns;
1431 		temp.ia_rows = ah->iniAddac.ia_rows;
1432 		REG_WRITE_ARRAY(&temp, 1, regWrites);
1433 	}
1434 
1435 	REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1436 
1437 	for (i = 0; i < ah->iniModes.ia_rows; i++) {
1438 		u32 reg = INI_RA(&ah->iniModes, i, 0);
1439 		u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1440 
1441 		REG_WRITE(ah, reg, val);
1442 
1443 		if (reg >= 0x7800 && reg < 0x78a0
1444 		    && ah->config.analog_shiftreg) {
1445 			udelay(100);
1446 		}
1447 
1448 		DO_DELAY(regWrites);
1449 	}
1450 
1451 	if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1452 		REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1453 
1454 	if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1455 	    AR_SREV_9287_10_OR_LATER(ah))
1456 		REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1457 
1458 	for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1459 		u32 reg = INI_RA(&ah->iniCommon, i, 0);
1460 		u32 val = INI_RA(&ah->iniCommon, i, 1);
1461 
1462 		REG_WRITE(ah, reg, val);
1463 
1464 		if (reg >= 0x7800 && reg < 0x78a0
1465 		    && ah->config.analog_shiftreg) {
1466 			udelay(100);
1467 		}
1468 
1469 		DO_DELAY(regWrites);
1470 	}
1471 
1472 	ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1473 
1474 	if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1475 		REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1476 				regWrites);
1477 	}
1478 
1479 	ath9k_hw_override_ini(ah, chan);
1480 	ath9k_hw_set_regs(ah, chan, macmode);
1481 	ath9k_hw_init_chain_masks(ah);
1482 
1483 	if (OLC_FOR_AR9280_20_LATER)
1484 		ath9k_olc_init(ah);
1485 
1486 	ah->eep_ops->set_txpower(ah, chan,
1487 				 ath9k_regd_get_ctl(regulatory, chan),
1488 				 channel->max_antenna_gain * 2,
1489 				 channel->max_power * 2,
1490 				 min((u32) MAX_RATE_POWER,
1491 				 (u32) regulatory->power_limit));
1492 
1493 	if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1494 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1495 			"ar5416SetRfRegs failed\n");
1496 		return -EIO;
1497 	}
1498 
1499 	return 0;
1500 }
1501 
1502 /****************************************/
1503 /* Reset and Channel Switching Routines */
1504 /****************************************/
1505 
1506 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1507 {
1508 	u32 rfMode = 0;
1509 
1510 	if (chan == NULL)
1511 		return;
1512 
1513 	rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1514 		? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1515 
1516 	if (!AR_SREV_9280_10_OR_LATER(ah))
1517 		rfMode |= (IS_CHAN_5GHZ(chan)) ?
1518 			AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1519 
1520 	if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1521 		rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1522 
1523 	REG_WRITE(ah, AR_PHY_MODE, rfMode);
1524 }
1525 
1526 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1527 {
1528 	REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1529 }
1530 
1531 static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1532 {
1533 	u32 regval;
1534 
1535 	/*
1536 	 * set AHB_MODE not to do cacheline prefetches
1537 	*/
1538 	regval = REG_READ(ah, AR_AHB_MODE);
1539 	REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1540 
1541 	/*
1542 	 * let mac dma reads be in 128 byte chunks
1543 	 */
1544 	regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1545 	REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1546 
1547 	/*
1548 	 * Restore TX Trigger Level to its pre-reset value.
1549 	 * The initial value depends on whether aggregation is enabled, and is
1550 	 * adjusted whenever underruns are detected.
1551 	 */
1552 	REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1553 
1554 	/*
1555 	 * let mac dma writes be in 128 byte chunks
1556 	 */
1557 	regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1558 	REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1559 
1560 	/*
1561 	 * Setup receive FIFO threshold to hold off TX activities
1562 	 */
1563 	REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1564 
1565 	/*
1566 	 * reduce the number of usable entries in PCU TXBUF to avoid
1567 	 * wrap around issues.
1568 	 */
1569 	if (AR_SREV_9285(ah)) {
1570 		/* For AR9285 the number of Fifos are reduced to half.
1571 		 * So set the usable tx buf size also to half to
1572 		 * avoid data/delimiter underruns
1573 		 */
1574 		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1575 			  AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1576 	} else if (!AR_SREV_9271(ah)) {
1577 		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1578 			  AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1579 	}
1580 }
1581 
1582 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1583 {
1584 	u32 val;
1585 
1586 	val = REG_READ(ah, AR_STA_ID1);
1587 	val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1588 	switch (opmode) {
1589 	case NL80211_IFTYPE_AP:
1590 		REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1591 			  | AR_STA_ID1_KSRCH_MODE);
1592 		REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1593 		break;
1594 	case NL80211_IFTYPE_ADHOC:
1595 	case NL80211_IFTYPE_MESH_POINT:
1596 		REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1597 			  | AR_STA_ID1_KSRCH_MODE);
1598 		REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1599 		break;
1600 	case NL80211_IFTYPE_STATION:
1601 	case NL80211_IFTYPE_MONITOR:
1602 		REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1603 		break;
1604 	}
1605 }
1606 
1607 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1608 						 u32 coef_scaled,
1609 						 u32 *coef_mantissa,
1610 						 u32 *coef_exponent)
1611 {
1612 	u32 coef_exp, coef_man;
1613 
1614 	for (coef_exp = 31; coef_exp > 0; coef_exp--)
1615 		if ((coef_scaled >> coef_exp) & 0x1)
1616 			break;
1617 
1618 	coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1619 
1620 	coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1621 
1622 	*coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1623 	*coef_exponent = coef_exp - 16;
1624 }
1625 
1626 static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1627 				     struct ath9k_channel *chan)
1628 {
1629 	u32 coef_scaled, ds_coef_exp, ds_coef_man;
1630 	u32 clockMhzScaled = 0x64000000;
1631 	struct chan_centers centers;
1632 
1633 	if (IS_CHAN_HALF_RATE(chan))
1634 		clockMhzScaled = clockMhzScaled >> 1;
1635 	else if (IS_CHAN_QUARTER_RATE(chan))
1636 		clockMhzScaled = clockMhzScaled >> 2;
1637 
1638 	ath9k_hw_get_channel_centers(ah, chan, &centers);
1639 	coef_scaled = clockMhzScaled / centers.synth_center;
1640 
1641 	ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1642 				      &ds_coef_exp);
1643 
1644 	REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1645 		      AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1646 	REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1647 		      AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1648 
1649 	coef_scaled = (9 * coef_scaled) / 10;
1650 
1651 	ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1652 				      &ds_coef_exp);
1653 
1654 	REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1655 		      AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1656 	REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1657 		      AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1658 }
1659 
1660 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1661 {
1662 	u32 rst_flags;
1663 	u32 tmpReg;
1664 
1665 	if (AR_SREV_9100(ah)) {
1666 		u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1667 		val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1668 		val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1669 		REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1670 		(void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1671 	}
1672 
1673 	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1674 		  AR_RTC_FORCE_WAKE_ON_INT);
1675 
1676 	if (AR_SREV_9100(ah)) {
1677 		rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1678 			AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1679 	} else {
1680 		tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1681 		if (tmpReg &
1682 		    (AR_INTR_SYNC_LOCAL_TIMEOUT |
1683 		     AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1684 			REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1685 			REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1686 		} else {
1687 			REG_WRITE(ah, AR_RC, AR_RC_AHB);
1688 		}
1689 
1690 		rst_flags = AR_RTC_RC_MAC_WARM;
1691 		if (type == ATH9K_RESET_COLD)
1692 			rst_flags |= AR_RTC_RC_MAC_COLD;
1693 	}
1694 
1695 	REG_WRITE(ah, AR_RTC_RC, rst_flags);
1696 	udelay(50);
1697 
1698 	REG_WRITE(ah, AR_RTC_RC, 0);
1699 	if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1700 		DPRINTF(ah->ah_sc, ATH_DBG_RESET,
1701 			"RTC stuck in MAC reset\n");
1702 		return false;
1703 	}
1704 
1705 	if (!AR_SREV_9100(ah))
1706 		REG_WRITE(ah, AR_RC, 0);
1707 
1708 	ath9k_hw_init_pll(ah, NULL);
1709 
1710 	if (AR_SREV_9100(ah))
1711 		udelay(50);
1712 
1713 	return true;
1714 }
1715 
1716 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1717 {
1718 	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1719 		  AR_RTC_FORCE_WAKE_ON_INT);
1720 
1721 	if (!AR_SREV_9100(ah))
1722 		REG_WRITE(ah, AR_RC, AR_RC_AHB);
1723 
1724 	REG_WRITE(ah, AR_RTC_RESET, 0);
1725 	udelay(2);
1726 
1727 	if (!AR_SREV_9100(ah))
1728 		REG_WRITE(ah, AR_RC, 0);
1729 
1730 	REG_WRITE(ah, AR_RTC_RESET, 1);
1731 
1732 	if (!ath9k_hw_wait(ah,
1733 			   AR_RTC_STATUS,
1734 			   AR_RTC_STATUS_M,
1735 			   AR_RTC_STATUS_ON,
1736 			   AH_WAIT_TIMEOUT)) {
1737 		DPRINTF(ah->ah_sc, ATH_DBG_RESET, "RTC not waking up\n");
1738 		return false;
1739 	}
1740 
1741 	ath9k_hw_read_revisions(ah);
1742 
1743 	return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1744 }
1745 
1746 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1747 {
1748 	REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1749 		  AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1750 
1751 	switch (type) {
1752 	case ATH9K_RESET_POWER_ON:
1753 		return ath9k_hw_set_reset_power_on(ah);
1754 	case ATH9K_RESET_WARM:
1755 	case ATH9K_RESET_COLD:
1756 		return ath9k_hw_set_reset(ah, type);
1757 	default:
1758 		return false;
1759 	}
1760 }
1761 
1762 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
1763 			      enum ath9k_ht_macmode macmode)
1764 {
1765 	u32 phymode;
1766 	u32 enableDacFifo = 0;
1767 
1768 	if (AR_SREV_9285_10_OR_LATER(ah))
1769 		enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1770 					 AR_PHY_FC_ENABLE_DAC_FIFO);
1771 
1772 	phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1773 		| AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1774 
1775 	if (IS_CHAN_HT40(chan)) {
1776 		phymode |= AR_PHY_FC_DYN2040_EN;
1777 
1778 		if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1779 		    (chan->chanmode == CHANNEL_G_HT40PLUS))
1780 			phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1781 
1782 		if (ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_25)
1783 			phymode |= AR_PHY_FC_DYN2040_EXT_CH;
1784 	}
1785 	REG_WRITE(ah, AR_PHY_TURBO, phymode);
1786 
1787 	ath9k_hw_set11nmac2040(ah, macmode);
1788 
1789 	REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1790 	REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1791 }
1792 
1793 static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1794 				struct ath9k_channel *chan)
1795 {
1796 	if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1797 		if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1798 			return false;
1799 	} else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1800 		return false;
1801 
1802 	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1803 		return false;
1804 
1805 	ah->chip_fullsleep = false;
1806 	ath9k_hw_init_pll(ah, chan);
1807 	ath9k_hw_set_rfmode(ah, chan);
1808 
1809 	return true;
1810 }
1811 
1812 static bool ath9k_hw_channel_change(struct ath_hw *ah,
1813 				    struct ath9k_channel *chan,
1814 				    enum ath9k_ht_macmode macmode)
1815 {
1816 	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1817 	struct ieee80211_channel *channel = chan->chan;
1818 	u32 synthDelay, qnum;
1819 
1820 	for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1821 		if (ath9k_hw_numtxpending(ah, qnum)) {
1822 			DPRINTF(ah->ah_sc, ATH_DBG_QUEUE,
1823 				"Transmit frames pending on queue %d\n", qnum);
1824 			return false;
1825 		}
1826 	}
1827 
1828 	REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1829 	if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1830 			   AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1831 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1832 			"Could not kill baseband RX\n");
1833 		return false;
1834 	}
1835 
1836 	ath9k_hw_set_regs(ah, chan, macmode);
1837 
1838 	if (AR_SREV_9280_10_OR_LATER(ah)) {
1839 		ath9k_hw_ar9280_set_channel(ah, chan);
1840 	} else {
1841 		if (!(ath9k_hw_set_channel(ah, chan))) {
1842 			DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
1843 				"Failed to set channel\n");
1844 			return false;
1845 		}
1846 	}
1847 
1848 	ah->eep_ops->set_txpower(ah, chan,
1849 			     ath9k_regd_get_ctl(regulatory, chan),
1850 			     channel->max_antenna_gain * 2,
1851 			     channel->max_power * 2,
1852 			     min((u32) MAX_RATE_POWER,
1853 			     (u32) regulatory->power_limit));
1854 
1855 	synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1856 	if (IS_CHAN_B(chan))
1857 		synthDelay = (4 * synthDelay) / 22;
1858 	else
1859 		synthDelay /= 10;
1860 
1861 	udelay(synthDelay + BASE_ACTIVATE_DELAY);
1862 
1863 	REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1864 
1865 	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1866 		ath9k_hw_set_delta_slope(ah, chan);
1867 
1868 	if (AR_SREV_9280_10_OR_LATER(ah))
1869 		ath9k_hw_9280_spur_mitigate(ah, chan);
1870 	else
1871 		ath9k_hw_spur_mitigate(ah, chan);
1872 
1873 	if (!chan->oneTimeCalsDone)
1874 		chan->oneTimeCalsDone = true;
1875 
1876 	return true;
1877 }
1878 
1879 static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1880 {
1881 	int bb_spur = AR_NO_SPUR;
1882 	int freq;
1883 	int bin, cur_bin;
1884 	int bb_spur_off, spur_subchannel_sd;
1885 	int spur_freq_sd;
1886 	int spur_delta_phase;
1887 	int denominator;
1888 	int upper, lower, cur_vit_mask;
1889 	int tmp, newVal;
1890 	int i;
1891 	int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1892 			  AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1893 	};
1894 	int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1895 			 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1896 	};
1897 	int inc[4] = { 0, 100, 0, 0 };
1898 	struct chan_centers centers;
1899 
1900 	int8_t mask_m[123];
1901 	int8_t mask_p[123];
1902 	int8_t mask_amt;
1903 	int tmp_mask;
1904 	int cur_bb_spur;
1905 	bool is2GHz = IS_CHAN_2GHZ(chan);
1906 
1907 	memset(&mask_m, 0, sizeof(int8_t) * 123);
1908 	memset(&mask_p, 0, sizeof(int8_t) * 123);
1909 
1910 	ath9k_hw_get_channel_centers(ah, chan, &centers);
1911 	freq = centers.synth_center;
1912 
1913 	ah->config.spurmode = SPUR_ENABLE_EEPROM;
1914 	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1915 		cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1916 
1917 		if (is2GHz)
1918 			cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1919 		else
1920 			cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1921 
1922 		if (AR_NO_SPUR == cur_bb_spur)
1923 			break;
1924 		cur_bb_spur = cur_bb_spur - freq;
1925 
1926 		if (IS_CHAN_HT40(chan)) {
1927 			if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1928 			    (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1929 				bb_spur = cur_bb_spur;
1930 				break;
1931 			}
1932 		} else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1933 			   (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1934 			bb_spur = cur_bb_spur;
1935 			break;
1936 		}
1937 	}
1938 
1939 	if (AR_NO_SPUR == bb_spur) {
1940 		REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1941 			    AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1942 		return;
1943 	} else {
1944 		REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1945 			    AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1946 	}
1947 
1948 	bin = bb_spur * 320;
1949 
1950 	tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1951 
1952 	newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1953 			AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1954 			AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1955 			AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1956 	REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1957 
1958 	newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1959 		  AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1960 		  AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1961 		  AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1962 		  SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1963 	REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1964 
1965 	if (IS_CHAN_HT40(chan)) {
1966 		if (bb_spur < 0) {
1967 			spur_subchannel_sd = 1;
1968 			bb_spur_off = bb_spur + 10;
1969 		} else {
1970 			spur_subchannel_sd = 0;
1971 			bb_spur_off = bb_spur - 10;
1972 		}
1973 	} else {
1974 		spur_subchannel_sd = 0;
1975 		bb_spur_off = bb_spur;
1976 	}
1977 
1978 	if (IS_CHAN_HT40(chan))
1979 		spur_delta_phase =
1980 			((bb_spur * 262144) /
1981 			 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1982 	else
1983 		spur_delta_phase =
1984 			((bb_spur * 524288) /
1985 			 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1986 
1987 	denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1988 	spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1989 
1990 	newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1991 		  SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
1992 		  SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
1993 	REG_WRITE(ah, AR_PHY_TIMING11, newVal);
1994 
1995 	newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
1996 	REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
1997 
1998 	cur_bin = -6000;
1999 	upper = bin + 100;
2000 	lower = bin - 100;
2001 
2002 	for (i = 0; i < 4; i++) {
2003 		int pilot_mask = 0;
2004 		int chan_mask = 0;
2005 		int bp = 0;
2006 		for (bp = 0; bp < 30; bp++) {
2007 			if ((cur_bin > lower) && (cur_bin < upper)) {
2008 				pilot_mask = pilot_mask | 0x1 << bp;
2009 				chan_mask = chan_mask | 0x1 << bp;
2010 			}
2011 			cur_bin += 100;
2012 		}
2013 		cur_bin += inc[i];
2014 		REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2015 		REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2016 	}
2017 
2018 	cur_vit_mask = 6100;
2019 	upper = bin + 120;
2020 	lower = bin - 120;
2021 
2022 	for (i = 0; i < 123; i++) {
2023 		if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2024 
2025 			/* workaround for gcc bug #37014 */
2026 			volatile int tmp_v = abs(cur_vit_mask - bin);
2027 
2028 			if (tmp_v < 75)
2029 				mask_amt = 1;
2030 			else
2031 				mask_amt = 0;
2032 			if (cur_vit_mask < 0)
2033 				mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2034 			else
2035 				mask_p[cur_vit_mask / 100] = mask_amt;
2036 		}
2037 		cur_vit_mask -= 100;
2038 	}
2039 
2040 	tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2041 		| (mask_m[48] << 26) | (mask_m[49] << 24)
2042 		| (mask_m[50] << 22) | (mask_m[51] << 20)
2043 		| (mask_m[52] << 18) | (mask_m[53] << 16)
2044 		| (mask_m[54] << 14) | (mask_m[55] << 12)
2045 		| (mask_m[56] << 10) | (mask_m[57] << 8)
2046 		| (mask_m[58] << 6) | (mask_m[59] << 4)
2047 		| (mask_m[60] << 2) | (mask_m[61] << 0);
2048 	REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2049 	REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2050 
2051 	tmp_mask = (mask_m[31] << 28)
2052 		| (mask_m[32] << 26) | (mask_m[33] << 24)
2053 		| (mask_m[34] << 22) | (mask_m[35] << 20)
2054 		| (mask_m[36] << 18) | (mask_m[37] << 16)
2055 		| (mask_m[48] << 14) | (mask_m[39] << 12)
2056 		| (mask_m[40] << 10) | (mask_m[41] << 8)
2057 		| (mask_m[42] << 6) | (mask_m[43] << 4)
2058 		| (mask_m[44] << 2) | (mask_m[45] << 0);
2059 	REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2060 	REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2061 
2062 	tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2063 		| (mask_m[18] << 26) | (mask_m[18] << 24)
2064 		| (mask_m[20] << 22) | (mask_m[20] << 20)
2065 		| (mask_m[22] << 18) | (mask_m[22] << 16)
2066 		| (mask_m[24] << 14) | (mask_m[24] << 12)
2067 		| (mask_m[25] << 10) | (mask_m[26] << 8)
2068 		| (mask_m[27] << 6) | (mask_m[28] << 4)
2069 		| (mask_m[29] << 2) | (mask_m[30] << 0);
2070 	REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2071 	REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2072 
2073 	tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2074 		| (mask_m[2] << 26) | (mask_m[3] << 24)
2075 		| (mask_m[4] << 22) | (mask_m[5] << 20)
2076 		| (mask_m[6] << 18) | (mask_m[7] << 16)
2077 		| (mask_m[8] << 14) | (mask_m[9] << 12)
2078 		| (mask_m[10] << 10) | (mask_m[11] << 8)
2079 		| (mask_m[12] << 6) | (mask_m[13] << 4)
2080 		| (mask_m[14] << 2) | (mask_m[15] << 0);
2081 	REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2082 	REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2083 
2084 	tmp_mask = (mask_p[15] << 28)
2085 		| (mask_p[14] << 26) | (mask_p[13] << 24)
2086 		| (mask_p[12] << 22) | (mask_p[11] << 20)
2087 		| (mask_p[10] << 18) | (mask_p[9] << 16)
2088 		| (mask_p[8] << 14) | (mask_p[7] << 12)
2089 		| (mask_p[6] << 10) | (mask_p[5] << 8)
2090 		| (mask_p[4] << 6) | (mask_p[3] << 4)
2091 		| (mask_p[2] << 2) | (mask_p[1] << 0);
2092 	REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2093 	REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2094 
2095 	tmp_mask = (mask_p[30] << 28)
2096 		| (mask_p[29] << 26) | (mask_p[28] << 24)
2097 		| (mask_p[27] << 22) | (mask_p[26] << 20)
2098 		| (mask_p[25] << 18) | (mask_p[24] << 16)
2099 		| (mask_p[23] << 14) | (mask_p[22] << 12)
2100 		| (mask_p[21] << 10) | (mask_p[20] << 8)
2101 		| (mask_p[19] << 6) | (mask_p[18] << 4)
2102 		| (mask_p[17] << 2) | (mask_p[16] << 0);
2103 	REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2104 	REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2105 
2106 	tmp_mask = (mask_p[45] << 28)
2107 		| (mask_p[44] << 26) | (mask_p[43] << 24)
2108 		| (mask_p[42] << 22) | (mask_p[41] << 20)
2109 		| (mask_p[40] << 18) | (mask_p[39] << 16)
2110 		| (mask_p[38] << 14) | (mask_p[37] << 12)
2111 		| (mask_p[36] << 10) | (mask_p[35] << 8)
2112 		| (mask_p[34] << 6) | (mask_p[33] << 4)
2113 		| (mask_p[32] << 2) | (mask_p[31] << 0);
2114 	REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2115 	REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2116 
2117 	tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2118 		| (mask_p[59] << 26) | (mask_p[58] << 24)
2119 		| (mask_p[57] << 22) | (mask_p[56] << 20)
2120 		| (mask_p[55] << 18) | (mask_p[54] << 16)
2121 		| (mask_p[53] << 14) | (mask_p[52] << 12)
2122 		| (mask_p[51] << 10) | (mask_p[50] << 8)
2123 		| (mask_p[49] << 6) | (mask_p[48] << 4)
2124 		| (mask_p[47] << 2) | (mask_p[46] << 0);
2125 	REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2126 	REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2127 }
2128 
2129 static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2130 {
2131 	int bb_spur = AR_NO_SPUR;
2132 	int bin, cur_bin;
2133 	int spur_freq_sd;
2134 	int spur_delta_phase;
2135 	int denominator;
2136 	int upper, lower, cur_vit_mask;
2137 	int tmp, new;
2138 	int i;
2139 	int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2140 			  AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2141 	};
2142 	int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2143 			 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2144 	};
2145 	int inc[4] = { 0, 100, 0, 0 };
2146 
2147 	int8_t mask_m[123];
2148 	int8_t mask_p[123];
2149 	int8_t mask_amt;
2150 	int tmp_mask;
2151 	int cur_bb_spur;
2152 	bool is2GHz = IS_CHAN_2GHZ(chan);
2153 
2154 	memset(&mask_m, 0, sizeof(int8_t) * 123);
2155 	memset(&mask_p, 0, sizeof(int8_t) * 123);
2156 
2157 	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2158 		cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2159 		if (AR_NO_SPUR == cur_bb_spur)
2160 			break;
2161 		cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2162 		if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2163 			bb_spur = cur_bb_spur;
2164 			break;
2165 		}
2166 	}
2167 
2168 	if (AR_NO_SPUR == bb_spur)
2169 		return;
2170 
2171 	bin = bb_spur * 32;
2172 
2173 	tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2174 	new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2175 		     AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2176 		     AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2177 		     AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2178 
2179 	REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2180 
2181 	new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2182 	       AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2183 	       AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2184 	       AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2185 	       SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2186 	REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2187 
2188 	spur_delta_phase = ((bb_spur * 524288) / 100) &
2189 		AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2190 
2191 	denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2192 	spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2193 
2194 	new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2195 	       SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2196 	       SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2197 	REG_WRITE(ah, AR_PHY_TIMING11, new);
2198 
2199 	cur_bin = -6000;
2200 	upper = bin + 100;
2201 	lower = bin - 100;
2202 
2203 	for (i = 0; i < 4; i++) {
2204 		int pilot_mask = 0;
2205 		int chan_mask = 0;
2206 		int bp = 0;
2207 		for (bp = 0; bp < 30; bp++) {
2208 			if ((cur_bin > lower) && (cur_bin < upper)) {
2209 				pilot_mask = pilot_mask | 0x1 << bp;
2210 				chan_mask = chan_mask | 0x1 << bp;
2211 			}
2212 			cur_bin += 100;
2213 		}
2214 		cur_bin += inc[i];
2215 		REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2216 		REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2217 	}
2218 
2219 	cur_vit_mask = 6100;
2220 	upper = bin + 120;
2221 	lower = bin - 120;
2222 
2223 	for (i = 0; i < 123; i++) {
2224 		if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2225 
2226 			/* workaround for gcc bug #37014 */
2227 			volatile int tmp_v = abs(cur_vit_mask - bin);
2228 
2229 			if (tmp_v < 75)
2230 				mask_amt = 1;
2231 			else
2232 				mask_amt = 0;
2233 			if (cur_vit_mask < 0)
2234 				mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2235 			else
2236 				mask_p[cur_vit_mask / 100] = mask_amt;
2237 		}
2238 		cur_vit_mask -= 100;
2239 	}
2240 
2241 	tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2242 		| (mask_m[48] << 26) | (mask_m[49] << 24)
2243 		| (mask_m[50] << 22) | (mask_m[51] << 20)
2244 		| (mask_m[52] << 18) | (mask_m[53] << 16)
2245 		| (mask_m[54] << 14) | (mask_m[55] << 12)
2246 		| (mask_m[56] << 10) | (mask_m[57] << 8)
2247 		| (mask_m[58] << 6) | (mask_m[59] << 4)
2248 		| (mask_m[60] << 2) | (mask_m[61] << 0);
2249 	REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2250 	REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2251 
2252 	tmp_mask = (mask_m[31] << 28)
2253 		| (mask_m[32] << 26) | (mask_m[33] << 24)
2254 		| (mask_m[34] << 22) | (mask_m[35] << 20)
2255 		| (mask_m[36] << 18) | (mask_m[37] << 16)
2256 		| (mask_m[48] << 14) | (mask_m[39] << 12)
2257 		| (mask_m[40] << 10) | (mask_m[41] << 8)
2258 		| (mask_m[42] << 6) | (mask_m[43] << 4)
2259 		| (mask_m[44] << 2) | (mask_m[45] << 0);
2260 	REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2261 	REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2262 
2263 	tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2264 		| (mask_m[18] << 26) | (mask_m[18] << 24)
2265 		| (mask_m[20] << 22) | (mask_m[20] << 20)
2266 		| (mask_m[22] << 18) | (mask_m[22] << 16)
2267 		| (mask_m[24] << 14) | (mask_m[24] << 12)
2268 		| (mask_m[25] << 10) | (mask_m[26] << 8)
2269 		| (mask_m[27] << 6) | (mask_m[28] << 4)
2270 		| (mask_m[29] << 2) | (mask_m[30] << 0);
2271 	REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2272 	REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2273 
2274 	tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2275 		| (mask_m[2] << 26) | (mask_m[3] << 24)
2276 		| (mask_m[4] << 22) | (mask_m[5] << 20)
2277 		| (mask_m[6] << 18) | (mask_m[7] << 16)
2278 		| (mask_m[8] << 14) | (mask_m[9] << 12)
2279 		| (mask_m[10] << 10) | (mask_m[11] << 8)
2280 		| (mask_m[12] << 6) | (mask_m[13] << 4)
2281 		| (mask_m[14] << 2) | (mask_m[15] << 0);
2282 	REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2283 	REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2284 
2285 	tmp_mask = (mask_p[15] << 28)
2286 		| (mask_p[14] << 26) | (mask_p[13] << 24)
2287 		| (mask_p[12] << 22) | (mask_p[11] << 20)
2288 		| (mask_p[10] << 18) | (mask_p[9] << 16)
2289 		| (mask_p[8] << 14) | (mask_p[7] << 12)
2290 		| (mask_p[6] << 10) | (mask_p[5] << 8)
2291 		| (mask_p[4] << 6) | (mask_p[3] << 4)
2292 		| (mask_p[2] << 2) | (mask_p[1] << 0);
2293 	REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2294 	REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2295 
2296 	tmp_mask = (mask_p[30] << 28)
2297 		| (mask_p[29] << 26) | (mask_p[28] << 24)
2298 		| (mask_p[27] << 22) | (mask_p[26] << 20)
2299 		| (mask_p[25] << 18) | (mask_p[24] << 16)
2300 		| (mask_p[23] << 14) | (mask_p[22] << 12)
2301 		| (mask_p[21] << 10) | (mask_p[20] << 8)
2302 		| (mask_p[19] << 6) | (mask_p[18] << 4)
2303 		| (mask_p[17] << 2) | (mask_p[16] << 0);
2304 	REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2305 	REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2306 
2307 	tmp_mask = (mask_p[45] << 28)
2308 		| (mask_p[44] << 26) | (mask_p[43] << 24)
2309 		| (mask_p[42] << 22) | (mask_p[41] << 20)
2310 		| (mask_p[40] << 18) | (mask_p[39] << 16)
2311 		| (mask_p[38] << 14) | (mask_p[37] << 12)
2312 		| (mask_p[36] << 10) | (mask_p[35] << 8)
2313 		| (mask_p[34] << 6) | (mask_p[33] << 4)
2314 		| (mask_p[32] << 2) | (mask_p[31] << 0);
2315 	REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2316 	REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2317 
2318 	tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2319 		| (mask_p[59] << 26) | (mask_p[58] << 24)
2320 		| (mask_p[57] << 22) | (mask_p[56] << 20)
2321 		| (mask_p[55] << 18) | (mask_p[54] << 16)
2322 		| (mask_p[53] << 14) | (mask_p[52] << 12)
2323 		| (mask_p[51] << 10) | (mask_p[50] << 8)
2324 		| (mask_p[49] << 6) | (mask_p[48] << 4)
2325 		| (mask_p[47] << 2) | (mask_p[46] << 0);
2326 	REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2327 	REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2328 }
2329 
2330 static void ath9k_enable_rfkill(struct ath_hw *ah)
2331 {
2332 	REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2333 		    AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2334 
2335 	REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2336 		    AR_GPIO_INPUT_MUX2_RFSILENT);
2337 
2338 	ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2339 	REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2340 }
2341 
2342 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2343 		    bool bChannelChange)
2344 {
2345 	u32 saveLedState;
2346 	struct ath_softc *sc = ah->ah_sc;
2347 	struct ath9k_channel *curchan = ah->curchan;
2348 	u32 saveDefAntenna;
2349 	u32 macStaId1;
2350 	u64 tsf = 0;
2351 	int i, rx_chainmask, r;
2352 
2353 	ah->extprotspacing = sc->ht_extprotspacing;
2354 	ah->txchainmask = sc->tx_chainmask;
2355 	ah->rxchainmask = sc->rx_chainmask;
2356 
2357 	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2358 		return -EIO;
2359 
2360 	if (curchan && !ah->chip_fullsleep)
2361 		ath9k_hw_getnf(ah, curchan);
2362 
2363 	if (bChannelChange &&
2364 	    (ah->chip_fullsleep != true) &&
2365 	    (ah->curchan != NULL) &&
2366 	    (chan->channel != ah->curchan->channel) &&
2367 	    ((chan->channelFlags & CHANNEL_ALL) ==
2368 	     (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2369 	     !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
2370 	     IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
2371 
2372 		if (ath9k_hw_channel_change(ah, chan, sc->tx_chan_width)) {
2373 			ath9k_hw_loadnf(ah, ah->curchan);
2374 			ath9k_hw_start_nfcal(ah);
2375 			return 0;
2376 		}
2377 	}
2378 
2379 	saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2380 	if (saveDefAntenna == 0)
2381 		saveDefAntenna = 1;
2382 
2383 	macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2384 
2385 	/* For chips on which RTC reset is done, save TSF before it gets cleared */
2386 	if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2387 		tsf = ath9k_hw_gettsf64(ah);
2388 
2389 	saveLedState = REG_READ(ah, AR_CFG_LED) &
2390 		(AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2391 		 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2392 
2393 	ath9k_hw_mark_phy_inactive(ah);
2394 
2395 	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2396 		REG_WRITE(ah,
2397 			  AR9271_RESET_POWER_DOWN_CONTROL,
2398 			  AR9271_RADIO_RF_RST);
2399 		udelay(50);
2400 	}
2401 
2402 	if (!ath9k_hw_chip_reset(ah, chan)) {
2403 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Chip reset failed\n");
2404 		return -EINVAL;
2405 	}
2406 
2407 	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2408 		ah->htc_reset_init = false;
2409 		REG_WRITE(ah,
2410 			  AR9271_RESET_POWER_DOWN_CONTROL,
2411 			  AR9271_GATE_MAC_CTL);
2412 		udelay(50);
2413 	}
2414 
2415 	/* Restore TSF */
2416 	if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2417 		ath9k_hw_settsf64(ah, tsf);
2418 
2419 	if (AR_SREV_9280_10_OR_LATER(ah))
2420 		REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2421 
2422 	if (AR_SREV_9287_12_OR_LATER(ah)) {
2423 		/* Enable ASYNC FIFO */
2424 		REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2425 				AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2426 		REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2427 		REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2428 				AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2429 		REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2430 				AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2431 	}
2432 	r = ath9k_hw_process_ini(ah, chan, sc->tx_chan_width);
2433 	if (r)
2434 		return r;
2435 
2436 	/* Setup MFP options for CCMP */
2437 	if (AR_SREV_9280_20_OR_LATER(ah)) {
2438 		/* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2439 		 * frames when constructing CCMP AAD. */
2440 		REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2441 			      0xc7ff);
2442 		ah->sw_mgmt_crypto = false;
2443 	} else if (AR_SREV_9160_10_OR_LATER(ah)) {
2444 		/* Disable hardware crypto for management frames */
2445 		REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2446 			    AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2447 		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2448 			    AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2449 		ah->sw_mgmt_crypto = true;
2450 	} else
2451 		ah->sw_mgmt_crypto = true;
2452 
2453 	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2454 		ath9k_hw_set_delta_slope(ah, chan);
2455 
2456 	if (AR_SREV_9280_10_OR_LATER(ah))
2457 		ath9k_hw_9280_spur_mitigate(ah, chan);
2458 	else
2459 		ath9k_hw_spur_mitigate(ah, chan);
2460 
2461 	ah->eep_ops->set_board_values(ah, chan);
2462 
2463 	ath9k_hw_decrease_chain_power(ah, chan);
2464 
2465 	REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(ah->macaddr));
2466 	REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(ah->macaddr + 4)
2467 		  | macStaId1
2468 		  | AR_STA_ID1_RTS_USE_DEF
2469 		  | (ah->config.
2470 		     ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2471 		  | ah->sta_id1_defaults);
2472 	ath9k_hw_set_operating_mode(ah, ah->opmode);
2473 
2474 	REG_WRITE(ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask));
2475 	REG_WRITE(ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4));
2476 
2477 	REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2478 
2479 	REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid));
2480 	REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) |
2481 		  ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2482 
2483 	REG_WRITE(ah, AR_ISR, ~0);
2484 
2485 	REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2486 
2487 	if (AR_SREV_9280_10_OR_LATER(ah))
2488 		ath9k_hw_ar9280_set_channel(ah, chan);
2489 	else
2490 		if (!(ath9k_hw_set_channel(ah, chan)))
2491 			return -EIO;
2492 
2493 	for (i = 0; i < AR_NUM_DCU; i++)
2494 		REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2495 
2496 	ah->intr_txqs = 0;
2497 	for (i = 0; i < ah->caps.total_queues; i++)
2498 		ath9k_hw_resettxqueue(ah, i);
2499 
2500 	ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2501 	ath9k_hw_init_qos(ah);
2502 
2503 	if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2504 		ath9k_enable_rfkill(ah);
2505 
2506 	ath9k_hw_init_user_settings(ah);
2507 
2508 	if (AR_SREV_9287_12_OR_LATER(ah)) {
2509 		REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2510 			  AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2511 		REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2512 			  AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2513 		REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2514 			  AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2515 
2516 		REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2517 		REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2518 
2519 		REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2520 			    AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2521 		REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2522 			      AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2523 	}
2524 	if (AR_SREV_9287_12_OR_LATER(ah)) {
2525 		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2526 				AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2527 	}
2528 
2529 	REG_WRITE(ah, AR_STA_ID1,
2530 		  REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2531 
2532 	ath9k_hw_set_dma(ah);
2533 
2534 	REG_WRITE(ah, AR_OBS, 8);
2535 
2536 	if (ah->config.intr_mitigation) {
2537 		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2538 		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2539 	}
2540 
2541 	ath9k_hw_init_bb(ah, chan);
2542 
2543 	if (!ath9k_hw_init_cal(ah, chan))
2544 		return -EIO;
2545 
2546 	rx_chainmask = ah->rxchainmask;
2547 	if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2548 		REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2549 		REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2550 	}
2551 
2552 	REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2553 
2554 	/*
2555 	 * For big endian systems turn on swapping for descriptors
2556 	 */
2557 	if (AR_SREV_9100(ah)) {
2558 		u32 mask;
2559 		mask = REG_READ(ah, AR_CFG);
2560 		if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2561 			DPRINTF(ah->ah_sc, ATH_DBG_RESET,
2562 				"CFG Byte Swap Set 0x%x\n", mask);
2563 		} else {
2564 			mask =
2565 				INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2566 			REG_WRITE(ah, AR_CFG, mask);
2567 			DPRINTF(ah->ah_sc, ATH_DBG_RESET,
2568 				"Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2569 		}
2570 	} else {
2571 		/* Configure AR9271 target WLAN */
2572                 if (AR_SREV_9271(ah))
2573 			REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2574 #ifdef __BIG_ENDIAN
2575                 else
2576 			REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2577 #endif
2578 	}
2579 
2580 	if (ah->ah_sc->sc_flags & SC_OP_BTCOEX_ENABLED)
2581 		ath9k_hw_btcoex_enable(ah);
2582 
2583 	return 0;
2584 }
2585 
2586 /************************/
2587 /* Key Cache Management */
2588 /************************/
2589 
2590 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2591 {
2592 	u32 keyType;
2593 
2594 	if (entry >= ah->caps.keycache_size) {
2595 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2596 			"keychache entry %u out of range\n", entry);
2597 		return false;
2598 	}
2599 
2600 	keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2601 
2602 	REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2603 	REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2604 	REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2605 	REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2606 	REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2607 	REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2608 	REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2609 	REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2610 
2611 	if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2612 		u16 micentry = entry + 64;
2613 
2614 		REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2615 		REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2616 		REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2617 		REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2618 
2619 	}
2620 
2621 	return true;
2622 }
2623 
2624 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2625 {
2626 	u32 macHi, macLo;
2627 
2628 	if (entry >= ah->caps.keycache_size) {
2629 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2630 			"keychache entry %u out of range\n", entry);
2631 		return false;
2632 	}
2633 
2634 	if (mac != NULL) {
2635 		macHi = (mac[5] << 8) | mac[4];
2636 		macLo = (mac[3] << 24) |
2637 			(mac[2] << 16) |
2638 			(mac[1] << 8) |
2639 			mac[0];
2640 		macLo >>= 1;
2641 		macLo |= (macHi & 1) << 31;
2642 		macHi >>= 1;
2643 	} else {
2644 		macLo = macHi = 0;
2645 	}
2646 	REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2647 	REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2648 
2649 	return true;
2650 }
2651 
2652 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2653 				 const struct ath9k_keyval *k,
2654 				 const u8 *mac)
2655 {
2656 	const struct ath9k_hw_capabilities *pCap = &ah->caps;
2657 	u32 key0, key1, key2, key3, key4;
2658 	u32 keyType;
2659 
2660 	if (entry >= pCap->keycache_size) {
2661 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2662 			"keycache entry %u out of range\n", entry);
2663 		return false;
2664 	}
2665 
2666 	switch (k->kv_type) {
2667 	case ATH9K_CIPHER_AES_OCB:
2668 		keyType = AR_KEYTABLE_TYPE_AES;
2669 		break;
2670 	case ATH9K_CIPHER_AES_CCM:
2671 		if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2672 			DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2673 				"AES-CCM not supported by mac rev 0x%x\n",
2674 				ah->hw_version.macRev);
2675 			return false;
2676 		}
2677 		keyType = AR_KEYTABLE_TYPE_CCM;
2678 		break;
2679 	case ATH9K_CIPHER_TKIP:
2680 		keyType = AR_KEYTABLE_TYPE_TKIP;
2681 		if (ATH9K_IS_MIC_ENABLED(ah)
2682 		    && entry + 64 >= pCap->keycache_size) {
2683 			DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2684 				"entry %u inappropriate for TKIP\n", entry);
2685 			return false;
2686 		}
2687 		break;
2688 	case ATH9K_CIPHER_WEP:
2689 		if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2690 			DPRINTF(ah->ah_sc, ATH_DBG_ANY,
2691 				"WEP key length %u too small\n", k->kv_len);
2692 			return false;
2693 		}
2694 		if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2695 			keyType = AR_KEYTABLE_TYPE_40;
2696 		else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2697 			keyType = AR_KEYTABLE_TYPE_104;
2698 		else
2699 			keyType = AR_KEYTABLE_TYPE_128;
2700 		break;
2701 	case ATH9K_CIPHER_CLR:
2702 		keyType = AR_KEYTABLE_TYPE_CLR;
2703 		break;
2704 	default:
2705 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2706 			"cipher %u not supported\n", k->kv_type);
2707 		return false;
2708 	}
2709 
2710 	key0 = get_unaligned_le32(k->kv_val + 0);
2711 	key1 = get_unaligned_le16(k->kv_val + 4);
2712 	key2 = get_unaligned_le32(k->kv_val + 6);
2713 	key3 = get_unaligned_le16(k->kv_val + 10);
2714 	key4 = get_unaligned_le32(k->kv_val + 12);
2715 	if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2716 		key4 &= 0xff;
2717 
2718 	/*
2719 	 * Note: Key cache registers access special memory area that requires
2720 	 * two 32-bit writes to actually update the values in the internal
2721 	 * memory. Consequently, the exact order and pairs used here must be
2722 	 * maintained.
2723 	 */
2724 
2725 	if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2726 		u16 micentry = entry + 64;
2727 
2728 		/*
2729 		 * Write inverted key[47:0] first to avoid Michael MIC errors
2730 		 * on frames that could be sent or received at the same time.
2731 		 * The correct key will be written in the end once everything
2732 		 * else is ready.
2733 		 */
2734 		REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2735 		REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2736 
2737 		/* Write key[95:48] */
2738 		REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2739 		REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2740 
2741 		/* Write key[127:96] and key type */
2742 		REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2743 		REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2744 
2745 		/* Write MAC address for the entry */
2746 		(void) ath9k_hw_keysetmac(ah, entry, mac);
2747 
2748 		if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2749 			/*
2750 			 * TKIP uses two key cache entries:
2751 			 * Michael MIC TX/RX keys in the same key cache entry
2752 			 * (idx = main index + 64):
2753 			 * key0 [31:0] = RX key [31:0]
2754 			 * key1 [15:0] = TX key [31:16]
2755 			 * key1 [31:16] = reserved
2756 			 * key2 [31:0] = RX key [63:32]
2757 			 * key3 [15:0] = TX key [15:0]
2758 			 * key3 [31:16] = reserved
2759 			 * key4 [31:0] = TX key [63:32]
2760 			 */
2761 			u32 mic0, mic1, mic2, mic3, mic4;
2762 
2763 			mic0 = get_unaligned_le32(k->kv_mic + 0);
2764 			mic2 = get_unaligned_le32(k->kv_mic + 4);
2765 			mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2766 			mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2767 			mic4 = get_unaligned_le32(k->kv_txmic + 4);
2768 
2769 			/* Write RX[31:0] and TX[31:16] */
2770 			REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2771 			REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2772 
2773 			/* Write RX[63:32] and TX[15:0] */
2774 			REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2775 			REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2776 
2777 			/* Write TX[63:32] and keyType(reserved) */
2778 			REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2779 			REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2780 				  AR_KEYTABLE_TYPE_CLR);
2781 
2782 		} else {
2783 			/*
2784 			 * TKIP uses four key cache entries (two for group
2785 			 * keys):
2786 			 * Michael MIC TX/RX keys are in different key cache
2787 			 * entries (idx = main index + 64 for TX and
2788 			 * main index + 32 + 96 for RX):
2789 			 * key0 [31:0] = TX/RX MIC key [31:0]
2790 			 * key1 [31:0] = reserved
2791 			 * key2 [31:0] = TX/RX MIC key [63:32]
2792 			 * key3 [31:0] = reserved
2793 			 * key4 [31:0] = reserved
2794 			 *
2795 			 * Upper layer code will call this function separately
2796 			 * for TX and RX keys when these registers offsets are
2797 			 * used.
2798 			 */
2799 			u32 mic0, mic2;
2800 
2801 			mic0 = get_unaligned_le32(k->kv_mic + 0);
2802 			mic2 = get_unaligned_le32(k->kv_mic + 4);
2803 
2804 			/* Write MIC key[31:0] */
2805 			REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2806 			REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2807 
2808 			/* Write MIC key[63:32] */
2809 			REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2810 			REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2811 
2812 			/* Write TX[63:32] and keyType(reserved) */
2813 			REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2814 			REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2815 				  AR_KEYTABLE_TYPE_CLR);
2816 		}
2817 
2818 		/* MAC address registers are reserved for the MIC entry */
2819 		REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2820 		REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2821 
2822 		/*
2823 		 * Write the correct (un-inverted) key[47:0] last to enable
2824 		 * TKIP now that all other registers are set with correct
2825 		 * values.
2826 		 */
2827 		REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2828 		REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2829 	} else {
2830 		/* Write key[47:0] */
2831 		REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2832 		REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2833 
2834 		/* Write key[95:48] */
2835 		REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2836 		REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2837 
2838 		/* Write key[127:96] and key type */
2839 		REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2840 		REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2841 
2842 		/* Write MAC address for the entry */
2843 		(void) ath9k_hw_keysetmac(ah, entry, mac);
2844 	}
2845 
2846 	return true;
2847 }
2848 
2849 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2850 {
2851 	if (entry < ah->caps.keycache_size) {
2852 		u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2853 		if (val & AR_KEYTABLE_VALID)
2854 			return true;
2855 	}
2856 	return false;
2857 }
2858 
2859 /******************************/
2860 /* Power Management (Chipset) */
2861 /******************************/
2862 
2863 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2864 {
2865 	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2866 	if (setChip) {
2867 		REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2868 			    AR_RTC_FORCE_WAKE_EN);
2869 		if (!AR_SREV_9100(ah))
2870 			REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2871 
2872 		REG_CLR_BIT(ah, (AR_RTC_RESET),
2873 			    AR_RTC_RESET_EN);
2874 	}
2875 }
2876 
2877 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2878 {
2879 	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2880 	if (setChip) {
2881 		struct ath9k_hw_capabilities *pCap = &ah->caps;
2882 
2883 		if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2884 			REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2885 				  AR_RTC_FORCE_WAKE_ON_INT);
2886 		} else {
2887 			REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2888 				    AR_RTC_FORCE_WAKE_EN);
2889 		}
2890 	}
2891 }
2892 
2893 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2894 {
2895 	u32 val;
2896 	int i;
2897 
2898 	if (setChip) {
2899 		if ((REG_READ(ah, AR_RTC_STATUS) &
2900 		     AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2901 			if (ath9k_hw_set_reset_reg(ah,
2902 					   ATH9K_RESET_POWER_ON) != true) {
2903 				return false;
2904 			}
2905 		}
2906 		if (AR_SREV_9100(ah))
2907 			REG_SET_BIT(ah, AR_RTC_RESET,
2908 				    AR_RTC_RESET_EN);
2909 
2910 		REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2911 			    AR_RTC_FORCE_WAKE_EN);
2912 		udelay(50);
2913 
2914 		for (i = POWER_UP_TIME / 50; i > 0; i--) {
2915 			val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2916 			if (val == AR_RTC_STATUS_ON)
2917 				break;
2918 			udelay(50);
2919 			REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2920 				    AR_RTC_FORCE_WAKE_EN);
2921 		}
2922 		if (i == 0) {
2923 			DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2924 				"Failed to wakeup in %uus\n", POWER_UP_TIME / 20);
2925 			return false;
2926 		}
2927 	}
2928 
2929 	REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2930 
2931 	return true;
2932 }
2933 
2934 static bool ath9k_hw_setpower_nolock(struct ath_hw *ah,
2935 				     enum ath9k_power_mode mode)
2936 {
2937 	int status = true, setChip = true;
2938 	static const char *modes[] = {
2939 		"AWAKE",
2940 		"FULL-SLEEP",
2941 		"NETWORK SLEEP",
2942 		"UNDEFINED"
2943 	};
2944 
2945 	if (ah->power_mode == mode)
2946 		return status;
2947 
2948 	DPRINTF(ah->ah_sc, ATH_DBG_RESET, "%s -> %s\n",
2949 		modes[ah->power_mode], modes[mode]);
2950 
2951 	switch (mode) {
2952 	case ATH9K_PM_AWAKE:
2953 		status = ath9k_hw_set_power_awake(ah, setChip);
2954 		break;
2955 	case ATH9K_PM_FULL_SLEEP:
2956 		ath9k_set_power_sleep(ah, setChip);
2957 		ah->chip_fullsleep = true;
2958 		break;
2959 	case ATH9K_PM_NETWORK_SLEEP:
2960 		ath9k_set_power_network_sleep(ah, setChip);
2961 		break;
2962 	default:
2963 		DPRINTF(ah->ah_sc, ATH_DBG_FATAL,
2964 			"Unknown power mode %u\n", mode);
2965 		return false;
2966 	}
2967 	ah->power_mode = mode;
2968 
2969 	return status;
2970 }
2971 
2972 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2973 {
2974 	unsigned long flags;
2975 	bool ret;
2976 
2977 	spin_lock_irqsave(&ah->ah_sc->sc_pm_lock, flags);
2978 	ret = ath9k_hw_setpower_nolock(ah, mode);
2979 	spin_unlock_irqrestore(&ah->ah_sc->sc_pm_lock, flags);
2980 
2981 	return ret;
2982 }
2983 
2984 void ath9k_ps_wakeup(struct ath_softc *sc)
2985 {
2986 	unsigned long flags;
2987 
2988 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
2989 	if (++sc->ps_usecount != 1)
2990 		goto unlock;
2991 
2992 	ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_AWAKE);
2993 
2994  unlock:
2995 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
2996 }
2997 
2998 void ath9k_ps_restore(struct ath_softc *sc)
2999 {
3000 	unsigned long flags;
3001 
3002 	spin_lock_irqsave(&sc->sc_pm_lock, flags);
3003 	if (--sc->ps_usecount != 0)
3004 		goto unlock;
3005 
3006 	if (sc->ps_enabled &&
3007 	    !(sc->sc_flags & (SC_OP_WAIT_FOR_BEACON |
3008 			      SC_OP_WAIT_FOR_CAB |
3009 			      SC_OP_WAIT_FOR_PSPOLL_DATA |
3010 			      SC_OP_WAIT_FOR_TX_ACK)))
3011 		ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_NETWORK_SLEEP);
3012 
3013  unlock:
3014 	spin_unlock_irqrestore(&sc->sc_pm_lock, flags);
3015 }
3016 
3017 /*
3018  * Helper for ASPM support.
3019  *
3020  * Disable PLL when in L0s as well as receiver clock when in L1.
3021  * This power saving option must be enabled through the SerDes.
3022  *
3023  * Programming the SerDes must go through the same 288 bit serial shift
3024  * register as the other analog registers.  Hence the 9 writes.
3025  */
3026 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
3027 {
3028 	u8 i;
3029 	u32 val;
3030 
3031 	if (ah->is_pciexpress != true)
3032 		return;
3033 
3034 	/* Do not touch SerDes registers */
3035 	if (ah->config.pcie_powersave_enable == 2)
3036 		return;
3037 
3038 	/* Nothing to do on restore for 11N */
3039 	if (!restore) {
3040 		if (AR_SREV_9280_20_OR_LATER(ah)) {
3041 			/*
3042 			 * AR9280 2.0 or later chips use SerDes values from the
3043 			 * initvals.h initialized depending on chipset during
3044 			 * ath9k_hw_init()
3045 			 */
3046 			for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
3047 				REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3048 					  INI_RA(&ah->iniPcieSerdes, i, 1));
3049 			}
3050 		} else if (AR_SREV_9280(ah) &&
3051 			   (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3052 			REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3053 			REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3054 
3055 			/* RX shut off when elecidle is asserted */
3056 			REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3057 			REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3058 			REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3059 
3060 			/* Shut off CLKREQ active in L1 */
3061 			if (ah->config.pcie_clock_req)
3062 				REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3063 			else
3064 				REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3065 
3066 			REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3067 			REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3068 			REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3069 
3070 			/* Load the new settings */
3071 			REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3072 
3073 		} else {
3074 			REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3075 			REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3076 
3077 			/* RX shut off when elecidle is asserted */
3078 			REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3079 			REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3080 			REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3081 
3082 			/*
3083 			 * Ignore ah->ah_config.pcie_clock_req setting for
3084 			 * pre-AR9280 11n
3085 			 */
3086 			REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3087 
3088 			REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3089 			REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3090 			REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3091 
3092 			/* Load the new settings */
3093 			REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3094 		}
3095 
3096 		udelay(1000);
3097 
3098 		/* set bit 19 to allow forcing of pcie core into L1 state */
3099 		REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3100 
3101 		/* Several PCIe massages to ensure proper behaviour */
3102 		if (ah->config.pcie_waen) {
3103 			val = ah->config.pcie_waen;
3104 			if (!power_off)
3105 				val &= (~AR_WA_D3_L1_DISABLE);
3106 		} else {
3107 			if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3108 			    AR_SREV_9287(ah)) {
3109 				val = AR9285_WA_DEFAULT;
3110 				if (!power_off)
3111 					val &= (~AR_WA_D3_L1_DISABLE);
3112 			} else if (AR_SREV_9280(ah)) {
3113 				/*
3114 				 * On AR9280 chips bit 22 of 0x4004 needs to be
3115 				 * set otherwise card may disappear.
3116 				 */
3117 				val = AR9280_WA_DEFAULT;
3118 				if (!power_off)
3119 					val &= (~AR_WA_D3_L1_DISABLE);
3120 			} else
3121 				val = AR_WA_DEFAULT;
3122 		}
3123 
3124 		REG_WRITE(ah, AR_WA, val);
3125 	}
3126 
3127 	if (power_off) {
3128 		/*
3129 		 * Set PCIe workaround bits
3130 		 * bit 14 in WA register (disable L1) should only
3131 		 * be set when device enters D3 and be cleared
3132 		 * when device comes back to D0.
3133 		 */
3134 		if (ah->config.pcie_waen) {
3135 			if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE)
3136 				REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3137 		} else {
3138 			if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3139 			      AR_SREV_9287(ah)) &&
3140 			     (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) ||
3141 			    (AR_SREV_9280(ah) &&
3142 			     (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) {
3143 				REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3144 			}
3145 		}
3146 	}
3147 }
3148 
3149 /**********************/
3150 /* Interrupt Handling */
3151 /**********************/
3152 
3153 bool ath9k_hw_intrpend(struct ath_hw *ah)
3154 {
3155 	u32 host_isr;
3156 
3157 	if (AR_SREV_9100(ah))
3158 		return true;
3159 
3160 	host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3161 	if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3162 		return true;
3163 
3164 	host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3165 	if ((host_isr & AR_INTR_SYNC_DEFAULT)
3166 	    && (host_isr != AR_INTR_SPURIOUS))
3167 		return true;
3168 
3169 	return false;
3170 }
3171 
3172 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3173 {
3174 	u32 isr = 0;
3175 	u32 mask2 = 0;
3176 	struct ath9k_hw_capabilities *pCap = &ah->caps;
3177 	u32 sync_cause = 0;
3178 	bool fatal_int = false;
3179 
3180 	if (!AR_SREV_9100(ah)) {
3181 		if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3182 			if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3183 			    == AR_RTC_STATUS_ON) {
3184 				isr = REG_READ(ah, AR_ISR);
3185 			}
3186 		}
3187 
3188 		sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3189 			AR_INTR_SYNC_DEFAULT;
3190 
3191 		*masked = 0;
3192 
3193 		if (!isr && !sync_cause)
3194 			return false;
3195 	} else {
3196 		*masked = 0;
3197 		isr = REG_READ(ah, AR_ISR);
3198 	}
3199 
3200 	if (isr) {
3201 		if (isr & AR_ISR_BCNMISC) {
3202 			u32 isr2;
3203 			isr2 = REG_READ(ah, AR_ISR_S2);
3204 			if (isr2 & AR_ISR_S2_TIM)
3205 				mask2 |= ATH9K_INT_TIM;
3206 			if (isr2 & AR_ISR_S2_DTIM)
3207 				mask2 |= ATH9K_INT_DTIM;
3208 			if (isr2 & AR_ISR_S2_DTIMSYNC)
3209 				mask2 |= ATH9K_INT_DTIMSYNC;
3210 			if (isr2 & (AR_ISR_S2_CABEND))
3211 				mask2 |= ATH9K_INT_CABEND;
3212 			if (isr2 & AR_ISR_S2_GTT)
3213 				mask2 |= ATH9K_INT_GTT;
3214 			if (isr2 & AR_ISR_S2_CST)
3215 				mask2 |= ATH9K_INT_CST;
3216 			if (isr2 & AR_ISR_S2_TSFOOR)
3217 				mask2 |= ATH9K_INT_TSFOOR;
3218 		}
3219 
3220 		isr = REG_READ(ah, AR_ISR_RAC);
3221 		if (isr == 0xffffffff) {
3222 			*masked = 0;
3223 			return false;
3224 		}
3225 
3226 		*masked = isr & ATH9K_INT_COMMON;
3227 
3228 		if (ah->config.intr_mitigation) {
3229 			if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3230 				*masked |= ATH9K_INT_RX;
3231 		}
3232 
3233 		if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3234 			*masked |= ATH9K_INT_RX;
3235 		if (isr &
3236 		    (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3237 		     AR_ISR_TXEOL)) {
3238 			u32 s0_s, s1_s;
3239 
3240 			*masked |= ATH9K_INT_TX;
3241 
3242 			s0_s = REG_READ(ah, AR_ISR_S0_S);
3243 			ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3244 			ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3245 
3246 			s1_s = REG_READ(ah, AR_ISR_S1_S);
3247 			ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3248 			ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3249 		}
3250 
3251 		if (isr & AR_ISR_RXORN) {
3252 			DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3253 				"receive FIFO overrun interrupt\n");
3254 		}
3255 
3256 		if (!AR_SREV_9100(ah)) {
3257 			if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3258 				u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3259 				if (isr5 & AR_ISR_S5_TIM_TIMER)
3260 					*masked |= ATH9K_INT_TIM_TIMER;
3261 			}
3262 		}
3263 
3264 		*masked |= mask2;
3265 	}
3266 
3267 	if (AR_SREV_9100(ah))
3268 		return true;
3269 
3270 	if (isr & AR_ISR_GENTMR) {
3271 		u32 s5_s;
3272 
3273 		s5_s = REG_READ(ah, AR_ISR_S5_S);
3274 		if (isr & AR_ISR_GENTMR) {
3275 			ah->intr_gen_timer_trigger =
3276 				MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
3277 
3278 			ah->intr_gen_timer_thresh =
3279 				MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
3280 
3281 			if (ah->intr_gen_timer_trigger)
3282 				*masked |= ATH9K_INT_GENTIMER;
3283 
3284 		}
3285 	}
3286 
3287 	if (sync_cause) {
3288 		fatal_int =
3289 			(sync_cause &
3290 			 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3291 			? true : false;
3292 
3293 		if (fatal_int) {
3294 			if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3295 				DPRINTF(ah->ah_sc, ATH_DBG_ANY,
3296 					"received PCI FATAL interrupt\n");
3297 			}
3298 			if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3299 				DPRINTF(ah->ah_sc, ATH_DBG_ANY,
3300 					"received PCI PERR interrupt\n");
3301 			}
3302 			*masked |= ATH9K_INT_FATAL;
3303 		}
3304 		if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3305 			DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3306 				"AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3307 			REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3308 			REG_WRITE(ah, AR_RC, 0);
3309 			*masked |= ATH9K_INT_FATAL;
3310 		}
3311 		if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3312 			DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT,
3313 				"AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3314 		}
3315 
3316 		REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3317 		(void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3318 	}
3319 
3320 	return true;
3321 }
3322 
3323 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3324 {
3325 	u32 omask = ah->mask_reg;
3326 	u32 mask, mask2;
3327 	struct ath9k_hw_capabilities *pCap = &ah->caps;
3328 
3329 	DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3330 
3331 	if (omask & ATH9K_INT_GLOBAL) {
3332 		DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "disable IER\n");
3333 		REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3334 		(void) REG_READ(ah, AR_IER);
3335 		if (!AR_SREV_9100(ah)) {
3336 			REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3337 			(void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3338 
3339 			REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3340 			(void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3341 		}
3342 	}
3343 
3344 	mask = ints & ATH9K_INT_COMMON;
3345 	mask2 = 0;
3346 
3347 	if (ints & ATH9K_INT_TX) {
3348 		if (ah->txok_interrupt_mask)
3349 			mask |= AR_IMR_TXOK;
3350 		if (ah->txdesc_interrupt_mask)
3351 			mask |= AR_IMR_TXDESC;
3352 		if (ah->txerr_interrupt_mask)
3353 			mask |= AR_IMR_TXERR;
3354 		if (ah->txeol_interrupt_mask)
3355 			mask |= AR_IMR_TXEOL;
3356 	}
3357 	if (ints & ATH9K_INT_RX) {
3358 		mask |= AR_IMR_RXERR;
3359 		if (ah->config.intr_mitigation)
3360 			mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3361 		else
3362 			mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3363 		if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3364 			mask |= AR_IMR_GENTMR;
3365 	}
3366 
3367 	if (ints & (ATH9K_INT_BMISC)) {
3368 		mask |= AR_IMR_BCNMISC;
3369 		if (ints & ATH9K_INT_TIM)
3370 			mask2 |= AR_IMR_S2_TIM;
3371 		if (ints & ATH9K_INT_DTIM)
3372 			mask2 |= AR_IMR_S2_DTIM;
3373 		if (ints & ATH9K_INT_DTIMSYNC)
3374 			mask2 |= AR_IMR_S2_DTIMSYNC;
3375 		if (ints & ATH9K_INT_CABEND)
3376 			mask2 |= AR_IMR_S2_CABEND;
3377 		if (ints & ATH9K_INT_TSFOOR)
3378 			mask2 |= AR_IMR_S2_TSFOOR;
3379 	}
3380 
3381 	if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3382 		mask |= AR_IMR_BCNMISC;
3383 		if (ints & ATH9K_INT_GTT)
3384 			mask2 |= AR_IMR_S2_GTT;
3385 		if (ints & ATH9K_INT_CST)
3386 			mask2 |= AR_IMR_S2_CST;
3387 	}
3388 
3389 	DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3390 	REG_WRITE(ah, AR_IMR, mask);
3391 	mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3392 					   AR_IMR_S2_DTIM |
3393 					   AR_IMR_S2_DTIMSYNC |
3394 					   AR_IMR_S2_CABEND |
3395 					   AR_IMR_S2_CABTO |
3396 					   AR_IMR_S2_TSFOOR |
3397 					   AR_IMR_S2_GTT | AR_IMR_S2_CST);
3398 	REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3399 	ah->mask_reg = ints;
3400 
3401 	if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3402 		if (ints & ATH9K_INT_TIM_TIMER)
3403 			REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3404 		else
3405 			REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3406 	}
3407 
3408 	if (ints & ATH9K_INT_GLOBAL) {
3409 		DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "enable IER\n");
3410 		REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3411 		if (!AR_SREV_9100(ah)) {
3412 			REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3413 				  AR_INTR_MAC_IRQ);
3414 			REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3415 
3416 
3417 			REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3418 				  AR_INTR_SYNC_DEFAULT);
3419 			REG_WRITE(ah, AR_INTR_SYNC_MASK,
3420 				  AR_INTR_SYNC_DEFAULT);
3421 		}
3422 		DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3423 			 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3424 	}
3425 
3426 	return omask;
3427 }
3428 
3429 /*******************/
3430 /* Beacon Handling */
3431 /*******************/
3432 
3433 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3434 {
3435 	int flags = 0;
3436 
3437 	ah->beacon_interval = beacon_period;
3438 
3439 	switch (ah->opmode) {
3440 	case NL80211_IFTYPE_STATION:
3441 	case NL80211_IFTYPE_MONITOR:
3442 		REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3443 		REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3444 		REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3445 		flags |= AR_TBTT_TIMER_EN;
3446 		break;
3447 	case NL80211_IFTYPE_ADHOC:
3448 	case NL80211_IFTYPE_MESH_POINT:
3449 		REG_SET_BIT(ah, AR_TXCFG,
3450 			    AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3451 		REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3452 			  TU_TO_USEC(next_beacon +
3453 				     (ah->atim_window ? ah->
3454 				      atim_window : 1)));
3455 		flags |= AR_NDP_TIMER_EN;
3456 	case NL80211_IFTYPE_AP:
3457 		REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3458 		REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3459 			  TU_TO_USEC(next_beacon -
3460 				     ah->config.
3461 				     dma_beacon_response_time));
3462 		REG_WRITE(ah, AR_NEXT_SWBA,
3463 			  TU_TO_USEC(next_beacon -
3464 				     ah->config.
3465 				     sw_beacon_response_time));
3466 		flags |=
3467 			AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3468 		break;
3469 	default:
3470 		DPRINTF(ah->ah_sc, ATH_DBG_BEACON,
3471 			"%s: unsupported opmode: %d\n",
3472 			__func__, ah->opmode);
3473 		return;
3474 		break;
3475 	}
3476 
3477 	REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3478 	REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3479 	REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3480 	REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3481 
3482 	beacon_period &= ~ATH9K_BEACON_ENA;
3483 	if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3484 		beacon_period &= ~ATH9K_BEACON_RESET_TSF;
3485 		ath9k_hw_reset_tsf(ah);
3486 	}
3487 
3488 	REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3489 }
3490 
3491 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3492 				    const struct ath9k_beacon_state *bs)
3493 {
3494 	u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3495 	struct ath9k_hw_capabilities *pCap = &ah->caps;
3496 
3497 	REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3498 
3499 	REG_WRITE(ah, AR_BEACON_PERIOD,
3500 		  TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3501 	REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3502 		  TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3503 
3504 	REG_RMW_FIELD(ah, AR_RSSI_THR,
3505 		      AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3506 
3507 	beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3508 
3509 	if (bs->bs_sleepduration > beaconintval)
3510 		beaconintval = bs->bs_sleepduration;
3511 
3512 	dtimperiod = bs->bs_dtimperiod;
3513 	if (bs->bs_sleepduration > dtimperiod)
3514 		dtimperiod = bs->bs_sleepduration;
3515 
3516 	if (beaconintval == dtimperiod)
3517 		nextTbtt = bs->bs_nextdtim;
3518 	else
3519 		nextTbtt = bs->bs_nexttbtt;
3520 
3521 	DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3522 	DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3523 	DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3524 	DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3525 
3526 	REG_WRITE(ah, AR_NEXT_DTIM,
3527 		  TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3528 	REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3529 
3530 	REG_WRITE(ah, AR_SLEEP1,
3531 		  SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3532 		  | AR_SLEEP1_ASSUME_DTIM);
3533 
3534 	if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3535 		beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3536 	else
3537 		beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3538 
3539 	REG_WRITE(ah, AR_SLEEP2,
3540 		  SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3541 
3542 	REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3543 	REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3544 
3545 	REG_SET_BIT(ah, AR_TIMER_MODE,
3546 		    AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3547 		    AR_DTIM_TIMER_EN);
3548 
3549 	/* TSF Out of Range Threshold */
3550 	REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3551 }
3552 
3553 /*******************/
3554 /* HW Capabilities */
3555 /*******************/
3556 
3557 void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3558 {
3559 	struct ath9k_hw_capabilities *pCap = &ah->caps;
3560 	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3561 	struct ath_btcoex_info *btcoex_info = &ah->ah_sc->btcoex_info;
3562 
3563 	u16 capField = 0, eeval;
3564 
3565 	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3566 	regulatory->current_rd = eeval;
3567 
3568 	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3569 	if (AR_SREV_9285_10_OR_LATER(ah))
3570 		eeval |= AR9285_RDEXT_DEFAULT;
3571 	regulatory->current_rd_ext = eeval;
3572 
3573 	capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP);
3574 
3575 	if (ah->opmode != NL80211_IFTYPE_AP &&
3576 	    ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
3577 		if (regulatory->current_rd == 0x64 ||
3578 		    regulatory->current_rd == 0x65)
3579 			regulatory->current_rd += 5;
3580 		else if (regulatory->current_rd == 0x41)
3581 			regulatory->current_rd = 0x43;
3582 		DPRINTF(ah->ah_sc, ATH_DBG_REGULATORY,
3583 			"regdomain mapped to 0x%x\n", regulatory->current_rd);
3584 	}
3585 
3586 	eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
3587 	bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX);
3588 
3589 	if (eeval & AR5416_OPFLAGS_11A) {
3590 		set_bit(ATH9K_MODE_11A, pCap->wireless_modes);
3591 		if (ah->config.ht_enable) {
3592 			if (!(eeval & AR5416_OPFLAGS_N_5G_HT20))
3593 				set_bit(ATH9K_MODE_11NA_HT20,
3594 					pCap->wireless_modes);
3595 			if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) {
3596 				set_bit(ATH9K_MODE_11NA_HT40PLUS,
3597 					pCap->wireless_modes);
3598 				set_bit(ATH9K_MODE_11NA_HT40MINUS,
3599 					pCap->wireless_modes);
3600 			}
3601 		}
3602 	}
3603 
3604 	if (eeval & AR5416_OPFLAGS_11G) {
3605 		set_bit(ATH9K_MODE_11G, pCap->wireless_modes);
3606 		if (ah->config.ht_enable) {
3607 			if (!(eeval & AR5416_OPFLAGS_N_2G_HT20))
3608 				set_bit(ATH9K_MODE_11NG_HT20,
3609 					pCap->wireless_modes);
3610 			if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) {
3611 				set_bit(ATH9K_MODE_11NG_HT40PLUS,
3612 					pCap->wireless_modes);
3613 				set_bit(ATH9K_MODE_11NG_HT40MINUS,
3614 					pCap->wireless_modes);
3615 			}
3616 		}
3617 	}
3618 
3619 	pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
3620 	/*
3621 	 * For AR9271 we will temporarilly uses the rx chainmax as read from
3622 	 * the EEPROM.
3623 	 */
3624 	if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
3625 	    !(eeval & AR5416_OPFLAGS_11A) &&
3626 	    !(AR_SREV_9271(ah)))
3627 		/* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
3628 		pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
3629 	else
3630 		/* Use rx_chainmask from EEPROM. */
3631 		pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
3632 
3633 	if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0)))
3634 		ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
3635 
3636 	pCap->low_2ghz_chan = 2312;
3637 	pCap->high_2ghz_chan = 2732;
3638 
3639 	pCap->low_5ghz_chan = 4920;
3640 	pCap->high_5ghz_chan = 6100;
3641 
3642 	pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP;
3643 	pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP;
3644 	pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM;
3645 
3646 	pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP;
3647 	pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP;
3648 	pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM;
3649 
3650 	if (ah->config.ht_enable)
3651 		pCap->hw_caps |= ATH9K_HW_CAP_HT;
3652 	else
3653 		pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
3654 
3655 	pCap->hw_caps |= ATH9K_HW_CAP_GTT;
3656 	pCap->hw_caps |= ATH9K_HW_CAP_VEOL;
3657 	pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK;
3658 	pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH;
3659 
3660 	if (capField & AR_EEPROM_EEPCAP_MAXQCU)
3661 		pCap->total_queues =
3662 			MS(capField, AR_EEPROM_EEPCAP_MAXQCU);
3663 	else
3664 		pCap->total_queues = ATH9K_NUM_TX_QUEUES;
3665 
3666 	if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES)
3667 		pCap->keycache_size =
3668 			1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES);
3669 	else
3670 		pCap->keycache_size = AR_KEYTABLE_SIZE;
3671 
3672 	pCap->hw_caps |= ATH9K_HW_CAP_FASTCC;
3673 	pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD;
3674 
3675 	if (AR_SREV_9285_10_OR_LATER(ah))
3676 		pCap->num_gpio_pins = AR9285_NUM_GPIO;
3677 	else if (AR_SREV_9280_10_OR_LATER(ah))
3678 		pCap->num_gpio_pins = AR928X_NUM_GPIO;
3679 	else
3680 		pCap->num_gpio_pins = AR_NUM_GPIO;
3681 
3682 	if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
3683 		pCap->hw_caps |= ATH9K_HW_CAP_CST;
3684 		pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
3685 	} else {
3686 		pCap->rts_aggr_limit = (8 * 1024);
3687 	}
3688 
3689 	pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM;
3690 
3691 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
3692 	ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
3693 	if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
3694 		ah->rfkill_gpio =
3695 			MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
3696 		ah->rfkill_polarity =
3697 			MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
3698 
3699 		pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
3700 	}
3701 #endif
3702 
3703 	pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
3704 
3705 	if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
3706 		pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
3707 	else
3708 		pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
3709 
3710 	if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) {
3711 		pCap->reg_cap =
3712 			AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3713 			AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN |
3714 			AR_EEPROM_EEREGCAP_EN_KK_U2 |
3715 			AR_EEPROM_EEREGCAP_EN_KK_MIDBAND;
3716 	} else {
3717 		pCap->reg_cap =
3718 			AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3719 			AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN;
3720 	}
3721 
3722 	pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND;
3723 
3724 	pCap->num_antcfg_5ghz =
3725 		ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ);
3726 	pCap->num_antcfg_2ghz =
3727 		ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ);
3728 
3729 	if (AR_SREV_9280_10_OR_LATER(ah) &&
3730 	    ath_btcoex_supported(ah->hw_version.subsysid)) {
3731 		btcoex_info->btactive_gpio = ATH_BTACTIVE_GPIO;
3732 		btcoex_info->wlanactive_gpio = ATH_WLANACTIVE_GPIO;
3733 
3734 		if (AR_SREV_9285(ah)) {
3735 			btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_3WIRE;
3736 			btcoex_info->btpriority_gpio = ATH_BTPRIORITY_GPIO;
3737 		} else {
3738 			btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_2WIRE;
3739 		}
3740 	} else {
3741 		btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_NONE;
3742 	}
3743 }
3744 
3745 bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3746 			    u32 capability, u32 *result)
3747 {
3748 	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3749 	switch (type) {
3750 	case ATH9K_CAP_CIPHER:
3751 		switch (capability) {
3752 		case ATH9K_CIPHER_AES_CCM:
3753 		case ATH9K_CIPHER_AES_OCB:
3754 		case ATH9K_CIPHER_TKIP:
3755 		case ATH9K_CIPHER_WEP:
3756 		case ATH9K_CIPHER_MIC:
3757 		case ATH9K_CIPHER_CLR:
3758 			return true;
3759 		default:
3760 			return false;
3761 		}
3762 	case ATH9K_CAP_TKIP_MIC:
3763 		switch (capability) {
3764 		case 0:
3765 			return true;
3766 		case 1:
3767 			return (ah->sta_id1_defaults &
3768 				AR_STA_ID1_CRPT_MIC_ENABLE) ? true :
3769 			false;
3770 		}
3771 	case ATH9K_CAP_TKIP_SPLIT:
3772 		return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ?
3773 			false : true;
3774 	case ATH9K_CAP_DIVERSITY:
3775 		return (REG_READ(ah, AR_PHY_CCK_DETECT) &
3776 			AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ?
3777 			true : false;
3778 	case ATH9K_CAP_MCAST_KEYSRCH:
3779 		switch (capability) {
3780 		case 0:
3781 			return true;
3782 		case 1:
3783 			if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) {
3784 				return false;
3785 			} else {
3786 				return (ah->sta_id1_defaults &
3787 					AR_STA_ID1_MCAST_KSRCH) ? true :
3788 					false;
3789 			}
3790 		}
3791 		return false;
3792 	case ATH9K_CAP_TXPOW:
3793 		switch (capability) {
3794 		case 0:
3795 			return 0;
3796 		case 1:
3797 			*result = regulatory->power_limit;
3798 			return 0;
3799 		case 2:
3800 			*result = regulatory->max_power_level;
3801 			return 0;
3802 		case 3:
3803 			*result = regulatory->tp_scale;
3804 			return 0;
3805 		}
3806 		return false;
3807 	case ATH9K_CAP_DS:
3808 		return (AR_SREV_9280_20_OR_LATER(ah) &&
3809 			(ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1))
3810 			? false : true;
3811 	default:
3812 		return false;
3813 	}
3814 }
3815 
3816 bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3817 			    u32 capability, u32 setting, int *status)
3818 {
3819 	u32 v;
3820 
3821 	switch (type) {
3822 	case ATH9K_CAP_TKIP_MIC:
3823 		if (setting)
3824 			ah->sta_id1_defaults |=
3825 				AR_STA_ID1_CRPT_MIC_ENABLE;
3826 		else
3827 			ah->sta_id1_defaults &=
3828 				~AR_STA_ID1_CRPT_MIC_ENABLE;
3829 		return true;
3830 	case ATH9K_CAP_DIVERSITY:
3831 		v = REG_READ(ah, AR_PHY_CCK_DETECT);
3832 		if (setting)
3833 			v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3834 		else
3835 			v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3836 		REG_WRITE(ah, AR_PHY_CCK_DETECT, v);
3837 		return true;
3838 	case ATH9K_CAP_MCAST_KEYSRCH:
3839 		if (setting)
3840 			ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH;
3841 		else
3842 			ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH;
3843 		return true;
3844 	default:
3845 		return false;
3846 	}
3847 }
3848 
3849 /****************************/
3850 /* GPIO / RFKILL / Antennae */
3851 /****************************/
3852 
3853 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
3854 					 u32 gpio, u32 type)
3855 {
3856 	int addr;
3857 	u32 gpio_shift, tmp;
3858 
3859 	if (gpio > 11)
3860 		addr = AR_GPIO_OUTPUT_MUX3;
3861 	else if (gpio > 5)
3862 		addr = AR_GPIO_OUTPUT_MUX2;
3863 	else
3864 		addr = AR_GPIO_OUTPUT_MUX1;
3865 
3866 	gpio_shift = (gpio % 6) * 5;
3867 
3868 	if (AR_SREV_9280_20_OR_LATER(ah)
3869 	    || (addr != AR_GPIO_OUTPUT_MUX1)) {
3870 		REG_RMW(ah, addr, (type << gpio_shift),
3871 			(0x1f << gpio_shift));
3872 	} else {
3873 		tmp = REG_READ(ah, addr);
3874 		tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
3875 		tmp &= ~(0x1f << gpio_shift);
3876 		tmp |= (type << gpio_shift);
3877 		REG_WRITE(ah, addr, tmp);
3878 	}
3879 }
3880 
3881 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
3882 {
3883 	u32 gpio_shift;
3884 
3885 	ASSERT(gpio < ah->caps.num_gpio_pins);
3886 
3887 	gpio_shift = gpio << 1;
3888 
3889 	REG_RMW(ah,
3890 		AR_GPIO_OE_OUT,
3891 		(AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
3892 		(AR_GPIO_OE_OUT_DRV << gpio_shift));
3893 }
3894 
3895 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
3896 {
3897 #define MS_REG_READ(x, y) \
3898 	(MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
3899 
3900 	if (gpio >= ah->caps.num_gpio_pins)
3901 		return 0xffffffff;
3902 
3903 	if (AR_SREV_9287_10_OR_LATER(ah))
3904 		return MS_REG_READ(AR9287, gpio) != 0;
3905 	else if (AR_SREV_9285_10_OR_LATER(ah))
3906 		return MS_REG_READ(AR9285, gpio) != 0;
3907 	else if (AR_SREV_9280_10_OR_LATER(ah))
3908 		return MS_REG_READ(AR928X, gpio) != 0;
3909 	else
3910 		return MS_REG_READ(AR, gpio) != 0;
3911 }
3912 
3913 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
3914 			 u32 ah_signal_type)
3915 {
3916 	u32 gpio_shift;
3917 
3918 	ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
3919 
3920 	gpio_shift = 2 * gpio;
3921 
3922 	REG_RMW(ah,
3923 		AR_GPIO_OE_OUT,
3924 		(AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
3925 		(AR_GPIO_OE_OUT_DRV << gpio_shift));
3926 }
3927 
3928 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
3929 {
3930 	REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
3931 		AR_GPIO_BIT(gpio));
3932 }
3933 
3934 u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
3935 {
3936 	return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
3937 }
3938 
3939 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
3940 {
3941 	REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
3942 }
3943 
3944 bool ath9k_hw_setantennaswitch(struct ath_hw *ah,
3945 			       enum ath9k_ant_setting settings,
3946 			       struct ath9k_channel *chan,
3947 			       u8 *tx_chainmask,
3948 			       u8 *rx_chainmask,
3949 			       u8 *antenna_cfgd)
3950 {
3951 	static u8 tx_chainmask_cfg, rx_chainmask_cfg;
3952 
3953 	if (AR_SREV_9280(ah)) {
3954 		if (!tx_chainmask_cfg) {
3955 
3956 			tx_chainmask_cfg = *tx_chainmask;
3957 			rx_chainmask_cfg = *rx_chainmask;
3958 		}
3959 
3960 		switch (settings) {
3961 		case ATH9K_ANT_FIXED_A:
3962 			*tx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3963 			*rx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3964 			*antenna_cfgd = true;
3965 			break;
3966 		case ATH9K_ANT_FIXED_B:
3967 			if (ah->caps.tx_chainmask >
3968 			    ATH9K_ANTENNA1_CHAINMASK) {
3969 				*tx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3970 			}
3971 			*rx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3972 			*antenna_cfgd = true;
3973 			break;
3974 		case ATH9K_ANT_VARIABLE:
3975 			*tx_chainmask = tx_chainmask_cfg;
3976 			*rx_chainmask = rx_chainmask_cfg;
3977 			*antenna_cfgd = true;
3978 			break;
3979 		default:
3980 			break;
3981 		}
3982 	} else {
3983 		ah->config.diversity_control = settings;
3984 	}
3985 
3986 	return true;
3987 }
3988 
3989 /*********************/
3990 /* General Operation */
3991 /*********************/
3992 
3993 u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
3994 {
3995 	u32 bits = REG_READ(ah, AR_RX_FILTER);
3996 	u32 phybits = REG_READ(ah, AR_PHY_ERR);
3997 
3998 	if (phybits & AR_PHY_ERR_RADAR)
3999 		bits |= ATH9K_RX_FILTER_PHYRADAR;
4000 	if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
4001 		bits |= ATH9K_RX_FILTER_PHYERR;
4002 
4003 	return bits;
4004 }
4005 
4006 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
4007 {
4008 	u32 phybits;
4009 
4010 	REG_WRITE(ah, AR_RX_FILTER, bits);
4011 
4012 	phybits = 0;
4013 	if (bits & ATH9K_RX_FILTER_PHYRADAR)
4014 		phybits |= AR_PHY_ERR_RADAR;
4015 	if (bits & ATH9K_RX_FILTER_PHYERR)
4016 		phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
4017 	REG_WRITE(ah, AR_PHY_ERR, phybits);
4018 
4019 	if (phybits)
4020 		REG_WRITE(ah, AR_RXCFG,
4021 			  REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA);
4022 	else
4023 		REG_WRITE(ah, AR_RXCFG,
4024 			  REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA);
4025 }
4026 
4027 bool ath9k_hw_phy_disable(struct ath_hw *ah)
4028 {
4029 	return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM);
4030 }
4031 
4032 bool ath9k_hw_disable(struct ath_hw *ah)
4033 {
4034 	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
4035 		return false;
4036 
4037 	return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD);
4038 }
4039 
4040 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit)
4041 {
4042 	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
4043 	struct ath9k_channel *chan = ah->curchan;
4044 	struct ieee80211_channel *channel = chan->chan;
4045 
4046 	regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
4047 
4048 	ah->eep_ops->set_txpower(ah, chan,
4049 				 ath9k_regd_get_ctl(regulatory, chan),
4050 				 channel->max_antenna_gain * 2,
4051 				 channel->max_power * 2,
4052 				 min((u32) MAX_RATE_POWER,
4053 				 (u32) regulatory->power_limit));
4054 }
4055 
4056 void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac)
4057 {
4058 	memcpy(ah->macaddr, mac, ETH_ALEN);
4059 }
4060 
4061 void ath9k_hw_setopmode(struct ath_hw *ah)
4062 {
4063 	ath9k_hw_set_operating_mode(ah, ah->opmode);
4064 }
4065 
4066 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
4067 {
4068 	REG_WRITE(ah, AR_MCAST_FIL0, filter0);
4069 	REG_WRITE(ah, AR_MCAST_FIL1, filter1);
4070 }
4071 
4072 void ath9k_hw_setbssidmask(struct ath_softc *sc)
4073 {
4074 	REG_WRITE(sc->sc_ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask));
4075 	REG_WRITE(sc->sc_ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4));
4076 }
4077 
4078 void ath9k_hw_write_associd(struct ath_softc *sc)
4079 {
4080 	REG_WRITE(sc->sc_ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid));
4081 	REG_WRITE(sc->sc_ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) |
4082 		  ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
4083 }
4084 
4085 u64 ath9k_hw_gettsf64(struct ath_hw *ah)
4086 {
4087 	u64 tsf;
4088 
4089 	tsf = REG_READ(ah, AR_TSF_U32);
4090 	tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32);
4091 
4092 	return tsf;
4093 }
4094 
4095 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
4096 {
4097 	REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
4098 	REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
4099 }
4100 
4101 void ath9k_hw_reset_tsf(struct ath_hw *ah)
4102 {
4103 	ath9k_ps_wakeup(ah->ah_sc);
4104 	if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
4105 			   AH_TSF_WRITE_TIMEOUT))
4106 		DPRINTF(ah->ah_sc, ATH_DBG_RESET,
4107 			"AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
4108 
4109 	REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
4110 	ath9k_ps_restore(ah->ah_sc);
4111 }
4112 
4113 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
4114 {
4115 	if (setting)
4116 		ah->misc_mode |= AR_PCU_TX_ADD_TSF;
4117 	else
4118 		ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
4119 }
4120 
4121 bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
4122 {
4123 	if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) {
4124 		DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad slot time %u\n", us);
4125 		ah->slottime = (u32) -1;
4126 		return false;
4127 	} else {
4128 		REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us));
4129 		ah->slottime = us;
4130 		return true;
4131 	}
4132 }
4133 
4134 void ath9k_hw_set11nmac2040(struct ath_hw *ah, enum ath9k_ht_macmode mode)
4135 {
4136 	u32 macmode;
4137 
4138 	if (mode == ATH9K_HT_MACMODE_2040 &&
4139 	    !ah->config.cwm_ignore_extcca)
4140 		macmode = AR_2040_JOINED_RX_CLEAR;
4141 	else
4142 		macmode = 0;
4143 
4144 	REG_WRITE(ah, AR_2040_MODE, macmode);
4145 }
4146 
4147 /* HW Generic timers configuration */
4148 
4149 static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
4150 {
4151 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4152 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4153 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4154 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4155 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4156 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4157 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4158 	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4159 	{AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
4160 	{AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
4161 				AR_NDP2_TIMER_MODE, 0x0002},
4162 	{AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
4163 				AR_NDP2_TIMER_MODE, 0x0004},
4164 	{AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
4165 				AR_NDP2_TIMER_MODE, 0x0008},
4166 	{AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
4167 				AR_NDP2_TIMER_MODE, 0x0010},
4168 	{AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
4169 				AR_NDP2_TIMER_MODE, 0x0020},
4170 	{AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
4171 				AR_NDP2_TIMER_MODE, 0x0040},
4172 	{AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
4173 				AR_NDP2_TIMER_MODE, 0x0080}
4174 };
4175 
4176 /* HW generic timer primitives */
4177 
4178 /* compute and clear index of rightmost 1 */
4179 static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
4180 {
4181 	u32 b;
4182 
4183 	b = *mask;
4184 	b &= (0-b);
4185 	*mask &= ~b;
4186 	b *= debruijn32;
4187 	b >>= 27;
4188 
4189 	return timer_table->gen_timer_index[b];
4190 }
4191 
4192 u32 ath9k_hw_gettsf32(struct ath_hw *ah)
4193 {
4194 	return REG_READ(ah, AR_TSF_L32);
4195 }
4196 
4197 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
4198 					  void (*trigger)(void *),
4199 					  void (*overflow)(void *),
4200 					  void *arg,
4201 					  u8 timer_index)
4202 {
4203 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4204 	struct ath_gen_timer *timer;
4205 
4206 	timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
4207 
4208 	if (timer == NULL) {
4209 		printk(KERN_DEBUG "Failed to allocate memory"
4210 		       "for hw timer[%d]\n", timer_index);
4211 		return NULL;
4212 	}
4213 
4214 	/* allocate a hardware generic timer slot */
4215 	timer_table->timers[timer_index] = timer;
4216 	timer->index = timer_index;
4217 	timer->trigger = trigger;
4218 	timer->overflow = overflow;
4219 	timer->arg = arg;
4220 
4221 	return timer;
4222 }
4223 
4224 void ath_gen_timer_start(struct ath_hw *ah,
4225 			 struct ath_gen_timer *timer,
4226 			 u32 timer_next, u32 timer_period)
4227 {
4228 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4229 	u32 tsf;
4230 
4231 	BUG_ON(!timer_period);
4232 
4233 	set_bit(timer->index, &timer_table->timer_mask.timer_bits);
4234 
4235 	tsf = ath9k_hw_gettsf32(ah);
4236 
4237 	DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER, "curent tsf %x period %x"
4238 		"timer_next %x\n", tsf, timer_period, timer_next);
4239 
4240 	/*
4241 	 * Pull timer_next forward if the current TSF already passed it
4242 	 * because of software latency
4243 	 */
4244 	if (timer_next < tsf)
4245 		timer_next = tsf + timer_period;
4246 
4247 	/*
4248 	 * Program generic timer registers
4249 	 */
4250 	REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
4251 		 timer_next);
4252 	REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
4253 		  timer_period);
4254 	REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4255 		    gen_tmr_configuration[timer->index].mode_mask);
4256 
4257 	/* Enable both trigger and thresh interrupt masks */
4258 	REG_SET_BIT(ah, AR_IMR_S5,
4259 		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4260 		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4261 
4262 	if ((ah->ah_sc->imask & ATH9K_INT_GENTIMER) == 0) {
4263 		ath9k_hw_set_interrupts(ah, 0);
4264 		ah->ah_sc->imask |= ATH9K_INT_GENTIMER;
4265 		ath9k_hw_set_interrupts(ah, ah->ah_sc->imask);
4266 	}
4267 }
4268 
4269 void ath_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
4270 {
4271 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4272 
4273 	if ((timer->index < AR_FIRST_NDP_TIMER) ||
4274 		(timer->index >= ATH_MAX_GEN_TIMER)) {
4275 		return;
4276 	}
4277 
4278 	/* Clear generic timer enable bits. */
4279 	REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4280 			gen_tmr_configuration[timer->index].mode_mask);
4281 
4282 	/* Disable both trigger and thresh interrupt masks */
4283 	REG_CLR_BIT(ah, AR_IMR_S5,
4284 		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4285 		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4286 
4287 	clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
4288 
4289 	/* if no timer is enabled, turn off interrupt mask */
4290 	if (timer_table->timer_mask.val == 0) {
4291 		ath9k_hw_set_interrupts(ah, 0);
4292 		ah->ah_sc->imask &= ~ATH9K_INT_GENTIMER;
4293 		ath9k_hw_set_interrupts(ah, ah->ah_sc->imask);
4294 	}
4295 }
4296 
4297 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
4298 {
4299 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4300 
4301 	/* free the hardware generic timer slot */
4302 	timer_table->timers[timer->index] = NULL;
4303 	kfree(timer);
4304 }
4305 
4306 /*
4307  * Generic Timer Interrupts handling
4308  */
4309 void ath_gen_timer_isr(struct ath_hw *ah)
4310 {
4311 	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4312 	struct ath_gen_timer *timer;
4313 	u32 trigger_mask, thresh_mask, index;
4314 
4315 	/* get hardware generic timer interrupt status */
4316 	trigger_mask = ah->intr_gen_timer_trigger;
4317 	thresh_mask = ah->intr_gen_timer_thresh;
4318 	trigger_mask &= timer_table->timer_mask.val;
4319 	thresh_mask &= timer_table->timer_mask.val;
4320 
4321 	trigger_mask &= ~thresh_mask;
4322 
4323 	while (thresh_mask) {
4324 		index = rightmost_index(timer_table, &thresh_mask);
4325 		timer = timer_table->timers[index];
4326 		BUG_ON(!timer);
4327 		DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER,
4328 			"TSF overflow for Gen timer %d\n", index);
4329 		timer->overflow(timer->arg);
4330 	}
4331 
4332 	while (trigger_mask) {
4333 		index = rightmost_index(timer_table, &trigger_mask);
4334 		timer = timer_table->timers[index];
4335 		BUG_ON(!timer);
4336 		DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER,
4337 			"Gen timer[%d] trigger\n", index);
4338 		timer->trigger(timer->arg);
4339 	}
4340 }
4341 
4342 /*
4343  * Primitive to disable ASPM
4344  */
4345 void ath_pcie_aspm_disable(struct ath_softc *sc)
4346 {
4347 	struct pci_dev *pdev = to_pci_dev(sc->dev);
4348 	u8 aspm;
4349 
4350 	pci_read_config_byte(pdev, ATH_PCIE_CAP_LINK_CTRL, &aspm);
4351 	aspm &= ~(ATH_PCIE_CAP_LINK_L0S | ATH_PCIE_CAP_LINK_L1);
4352 	pci_write_config_byte(pdev, ATH_PCIE_CAP_LINK_CTRL, aspm);
4353 }
4354