1 /* 2 * Copyright (c) 2008-2009 Atheros Communications Inc. 3 * 4 * Permission to use, copy, modify, and/or distribute this software for any 5 * purpose with or without fee is hereby granted, provided that the above 6 * copyright notice and this permission notice appear in all copies. 7 * 8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 15 */ 16 17 #include <linux/io.h> 18 #include <asm/unaligned.h> 19 20 #include "hw.h" 21 #include "rc.h" 22 #include "initvals.h" 23 24 #define ATH9K_CLOCK_RATE_CCK 22 25 #define ATH9K_CLOCK_RATE_5GHZ_OFDM 40 26 #define ATH9K_CLOCK_RATE_2GHZ_OFDM 44 27 28 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type); 29 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan); 30 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah, 31 struct ar5416_eeprom_def *pEepData, 32 u32 reg, u32 value); 33 34 MODULE_AUTHOR("Atheros Communications"); 35 MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards."); 36 MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards"); 37 MODULE_LICENSE("Dual BSD/GPL"); 38 39 static int __init ath9k_init(void) 40 { 41 return 0; 42 } 43 module_init(ath9k_init); 44 45 static void __exit ath9k_exit(void) 46 { 47 return; 48 } 49 module_exit(ath9k_exit); 50 51 /********************/ 52 /* Helper Functions */ 53 /********************/ 54 55 static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks) 56 { 57 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 58 59 if (!ah->curchan) /* should really check for CCK instead */ 60 return clks / ATH9K_CLOCK_RATE_CCK; 61 if (conf->channel->band == IEEE80211_BAND_2GHZ) 62 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM; 63 64 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM; 65 } 66 67 static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks) 68 { 69 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 70 71 if (conf_is_ht40(conf)) 72 return ath9k_hw_mac_usec(ah, clks) / 2; 73 else 74 return ath9k_hw_mac_usec(ah, clks); 75 } 76 77 static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs) 78 { 79 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 80 81 if (!ah->curchan) /* should really check for CCK instead */ 82 return usecs *ATH9K_CLOCK_RATE_CCK; 83 if (conf->channel->band == IEEE80211_BAND_2GHZ) 84 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM; 85 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM; 86 } 87 88 static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs) 89 { 90 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 91 92 if (conf_is_ht40(conf)) 93 return ath9k_hw_mac_clks(ah, usecs) * 2; 94 else 95 return ath9k_hw_mac_clks(ah, usecs); 96 } 97 98 bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout) 99 { 100 int i; 101 102 BUG_ON(timeout < AH_TIME_QUANTUM); 103 104 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) { 105 if ((REG_READ(ah, reg) & mask) == val) 106 return true; 107 108 udelay(AH_TIME_QUANTUM); 109 } 110 111 ath_print(ath9k_hw_common(ah), ATH_DBG_ANY, 112 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n", 113 timeout, reg, REG_READ(ah, reg), mask, val); 114 115 return false; 116 } 117 EXPORT_SYMBOL(ath9k_hw_wait); 118 119 u32 ath9k_hw_reverse_bits(u32 val, u32 n) 120 { 121 u32 retval; 122 int i; 123 124 for (i = 0, retval = 0; i < n; i++) { 125 retval = (retval << 1) | (val & 1); 126 val >>= 1; 127 } 128 return retval; 129 } 130 131 bool ath9k_get_channel_edges(struct ath_hw *ah, 132 u16 flags, u16 *low, 133 u16 *high) 134 { 135 struct ath9k_hw_capabilities *pCap = &ah->caps; 136 137 if (flags & CHANNEL_5GHZ) { 138 *low = pCap->low_5ghz_chan; 139 *high = pCap->high_5ghz_chan; 140 return true; 141 } 142 if ((flags & CHANNEL_2GHZ)) { 143 *low = pCap->low_2ghz_chan; 144 *high = pCap->high_2ghz_chan; 145 return true; 146 } 147 return false; 148 } 149 150 u16 ath9k_hw_computetxtime(struct ath_hw *ah, 151 u8 phy, int kbps, 152 u32 frameLen, u16 rateix, 153 bool shortPreamble) 154 { 155 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime; 156 157 if (kbps == 0) 158 return 0; 159 160 switch (phy) { 161 case WLAN_RC_PHY_CCK: 162 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS; 163 if (shortPreamble) 164 phyTime >>= 1; 165 numBits = frameLen << 3; 166 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps); 167 break; 168 case WLAN_RC_PHY_OFDM: 169 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) { 170 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000; 171 numBits = OFDM_PLCP_BITS + (frameLen << 3); 172 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 173 txTime = OFDM_SIFS_TIME_QUARTER 174 + OFDM_PREAMBLE_TIME_QUARTER 175 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER); 176 } else if (ah->curchan && 177 IS_CHAN_HALF_RATE(ah->curchan)) { 178 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000; 179 numBits = OFDM_PLCP_BITS + (frameLen << 3); 180 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 181 txTime = OFDM_SIFS_TIME_HALF + 182 OFDM_PREAMBLE_TIME_HALF 183 + (numSymbols * OFDM_SYMBOL_TIME_HALF); 184 } else { 185 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000; 186 numBits = OFDM_PLCP_BITS + (frameLen << 3); 187 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 188 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME 189 + (numSymbols * OFDM_SYMBOL_TIME); 190 } 191 break; 192 default: 193 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 194 "Unknown phy %u (rate ix %u)\n", phy, rateix); 195 txTime = 0; 196 break; 197 } 198 199 return txTime; 200 } 201 EXPORT_SYMBOL(ath9k_hw_computetxtime); 202 203 void ath9k_hw_get_channel_centers(struct ath_hw *ah, 204 struct ath9k_channel *chan, 205 struct chan_centers *centers) 206 { 207 int8_t extoff; 208 209 if (!IS_CHAN_HT40(chan)) { 210 centers->ctl_center = centers->ext_center = 211 centers->synth_center = chan->channel; 212 return; 213 } 214 215 if ((chan->chanmode == CHANNEL_A_HT40PLUS) || 216 (chan->chanmode == CHANNEL_G_HT40PLUS)) { 217 centers->synth_center = 218 chan->channel + HT40_CHANNEL_CENTER_SHIFT; 219 extoff = 1; 220 } else { 221 centers->synth_center = 222 chan->channel - HT40_CHANNEL_CENTER_SHIFT; 223 extoff = -1; 224 } 225 226 centers->ctl_center = 227 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT); 228 /* 25 MHz spacing is supported by hw but not on upper layers */ 229 centers->ext_center = 230 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT); 231 } 232 233 /******************/ 234 /* Chip Revisions */ 235 /******************/ 236 237 static void ath9k_hw_read_revisions(struct ath_hw *ah) 238 { 239 u32 val; 240 241 val = REG_READ(ah, AR_SREV) & AR_SREV_ID; 242 243 if (val == 0xFF) { 244 val = REG_READ(ah, AR_SREV); 245 ah->hw_version.macVersion = 246 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S; 247 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2); 248 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1; 249 } else { 250 if (!AR_SREV_9100(ah)) 251 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION); 252 253 ah->hw_version.macRev = val & AR_SREV_REVISION; 254 255 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE) 256 ah->is_pciexpress = true; 257 } 258 } 259 260 static int ath9k_hw_get_radiorev(struct ath_hw *ah) 261 { 262 u32 val; 263 int i; 264 265 REG_WRITE(ah, AR_PHY(0x36), 0x00007058); 266 267 for (i = 0; i < 8; i++) 268 REG_WRITE(ah, AR_PHY(0x20), 0x00010000); 269 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff; 270 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4); 271 272 return ath9k_hw_reverse_bits(val, 8); 273 } 274 275 /************************************/ 276 /* HW Attach, Detach, Init Routines */ 277 /************************************/ 278 279 static void ath9k_hw_disablepcie(struct ath_hw *ah) 280 { 281 if (AR_SREV_9100(ah)) 282 return; 283 284 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00); 285 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 286 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029); 287 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824); 288 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579); 289 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000); 290 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 291 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 292 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007); 293 294 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 295 } 296 297 static bool ath9k_hw_chip_test(struct ath_hw *ah) 298 { 299 struct ath_common *common = ath9k_hw_common(ah); 300 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) }; 301 u32 regHold[2]; 302 u32 patternData[4] = { 0x55555555, 303 0xaaaaaaaa, 304 0x66666666, 305 0x99999999 }; 306 int i, j; 307 308 for (i = 0; i < 2; i++) { 309 u32 addr = regAddr[i]; 310 u32 wrData, rdData; 311 312 regHold[i] = REG_READ(ah, addr); 313 for (j = 0; j < 0x100; j++) { 314 wrData = (j << 16) | j; 315 REG_WRITE(ah, addr, wrData); 316 rdData = REG_READ(ah, addr); 317 if (rdData != wrData) { 318 ath_print(common, ATH_DBG_FATAL, 319 "address test failed " 320 "addr: 0x%08x - wr:0x%08x != " 321 "rd:0x%08x\n", 322 addr, wrData, rdData); 323 return false; 324 } 325 } 326 for (j = 0; j < 4; j++) { 327 wrData = patternData[j]; 328 REG_WRITE(ah, addr, wrData); 329 rdData = REG_READ(ah, addr); 330 if (wrData != rdData) { 331 ath_print(common, ATH_DBG_FATAL, 332 "address test failed " 333 "addr: 0x%08x - wr:0x%08x != " 334 "rd:0x%08x\n", 335 addr, wrData, rdData); 336 return false; 337 } 338 } 339 REG_WRITE(ah, regAddr[i], regHold[i]); 340 } 341 udelay(100); 342 343 return true; 344 } 345 346 static const char *ath9k_hw_devname(u16 devid) 347 { 348 switch (devid) { 349 case AR5416_DEVID_PCI: 350 return "Atheros 5416"; 351 case AR5416_DEVID_PCIE: 352 return "Atheros 5418"; 353 case AR9160_DEVID_PCI: 354 return "Atheros 9160"; 355 case AR5416_AR9100_DEVID: 356 return "Atheros 9100"; 357 case AR9280_DEVID_PCI: 358 case AR9280_DEVID_PCIE: 359 return "Atheros 9280"; 360 case AR9285_DEVID_PCIE: 361 return "Atheros 9285"; 362 case AR5416_DEVID_AR9287_PCI: 363 case AR5416_DEVID_AR9287_PCIE: 364 return "Atheros 9287"; 365 } 366 367 return NULL; 368 } 369 370 static void ath9k_hw_init_config(struct ath_hw *ah) 371 { 372 int i; 373 374 ah->config.dma_beacon_response_time = 2; 375 ah->config.sw_beacon_response_time = 10; 376 ah->config.additional_swba_backoff = 0; 377 ah->config.ack_6mb = 0x0; 378 ah->config.cwm_ignore_extcca = 0; 379 ah->config.pcie_powersave_enable = 0; 380 ah->config.pcie_clock_req = 0; 381 ah->config.pcie_waen = 0; 382 ah->config.analog_shiftreg = 1; 383 ah->config.ht_enable = 1; 384 ah->config.ofdm_trig_low = 200; 385 ah->config.ofdm_trig_high = 500; 386 ah->config.cck_trig_high = 200; 387 ah->config.cck_trig_low = 100; 388 ah->config.enable_ani = 1; 389 390 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) { 391 ah->config.spurchans[i][0] = AR_NO_SPUR; 392 ah->config.spurchans[i][1] = AR_NO_SPUR; 393 } 394 395 ah->config.intr_mitigation = true; 396 397 /* 398 * We need this for PCI devices only (Cardbus, PCI, miniPCI) 399 * _and_ if on non-uniprocessor systems (Multiprocessor/HT). 400 * This means we use it for all AR5416 devices, and the few 401 * minor PCI AR9280 devices out there. 402 * 403 * Serialization is required because these devices do not handle 404 * well the case of two concurrent reads/writes due to the latency 405 * involved. During one read/write another read/write can be issued 406 * on another CPU while the previous read/write may still be working 407 * on our hardware, if we hit this case the hardware poops in a loop. 408 * We prevent this by serializing reads and writes. 409 * 410 * This issue is not present on PCI-Express devices or pre-AR5416 411 * devices (legacy, 802.11abg). 412 */ 413 if (num_possible_cpus() > 1) 414 ah->config.serialize_regmode = SER_REG_MODE_AUTO; 415 } 416 EXPORT_SYMBOL(ath9k_hw_init); 417 418 static void ath9k_hw_init_defaults(struct ath_hw *ah) 419 { 420 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 421 422 regulatory->country_code = CTRY_DEFAULT; 423 regulatory->power_limit = MAX_RATE_POWER; 424 regulatory->tp_scale = ATH9K_TP_SCALE_MAX; 425 426 ah->hw_version.magic = AR5416_MAGIC; 427 ah->hw_version.subvendorid = 0; 428 429 ah->ah_flags = 0; 430 if (ah->hw_version.devid == AR5416_AR9100_DEVID) 431 ah->hw_version.macVersion = AR_SREV_VERSION_9100; 432 if (!AR_SREV_9100(ah)) 433 ah->ah_flags = AH_USE_EEPROM; 434 435 ah->atim_window = 0; 436 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE; 437 ah->beacon_interval = 100; 438 ah->enable_32kHz_clock = DONT_USE_32KHZ; 439 ah->slottime = (u32) -1; 440 ah->acktimeout = (u32) -1; 441 ah->ctstimeout = (u32) -1; 442 ah->globaltxtimeout = (u32) -1; 443 ah->power_mode = ATH9K_PM_UNDEFINED; 444 } 445 446 static int ath9k_hw_rf_claim(struct ath_hw *ah) 447 { 448 u32 val; 449 450 REG_WRITE(ah, AR_PHY(0), 0x00000007); 451 452 val = ath9k_hw_get_radiorev(ah); 453 switch (val & AR_RADIO_SREV_MAJOR) { 454 case 0: 455 val = AR_RAD5133_SREV_MAJOR; 456 break; 457 case AR_RAD5133_SREV_MAJOR: 458 case AR_RAD5122_SREV_MAJOR: 459 case AR_RAD2133_SREV_MAJOR: 460 case AR_RAD2122_SREV_MAJOR: 461 break; 462 default: 463 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 464 "Radio Chip Rev 0x%02X not supported\n", 465 val & AR_RADIO_SREV_MAJOR); 466 return -EOPNOTSUPP; 467 } 468 469 ah->hw_version.analog5GhzRev = val; 470 471 return 0; 472 } 473 474 static int ath9k_hw_init_macaddr(struct ath_hw *ah) 475 { 476 struct ath_common *common = ath9k_hw_common(ah); 477 u32 sum; 478 int i; 479 u16 eeval; 480 481 sum = 0; 482 for (i = 0; i < 3; i++) { 483 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i)); 484 sum += eeval; 485 common->macaddr[2 * i] = eeval >> 8; 486 common->macaddr[2 * i + 1] = eeval & 0xff; 487 } 488 if (sum == 0 || sum == 0xffff * 3) 489 return -EADDRNOTAVAIL; 490 491 return 0; 492 } 493 494 static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah) 495 { 496 u32 rxgain_type; 497 498 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) { 499 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE); 500 501 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF) 502 INIT_INI_ARRAY(&ah->iniModesRxGain, 503 ar9280Modes_backoff_13db_rxgain_9280_2, 504 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6); 505 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF) 506 INIT_INI_ARRAY(&ah->iniModesRxGain, 507 ar9280Modes_backoff_23db_rxgain_9280_2, 508 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6); 509 else 510 INIT_INI_ARRAY(&ah->iniModesRxGain, 511 ar9280Modes_original_rxgain_9280_2, 512 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6); 513 } else { 514 INIT_INI_ARRAY(&ah->iniModesRxGain, 515 ar9280Modes_original_rxgain_9280_2, 516 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6); 517 } 518 } 519 520 static void ath9k_hw_init_txgain_ini(struct ath_hw *ah) 521 { 522 u32 txgain_type; 523 524 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) { 525 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE); 526 527 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) 528 INIT_INI_ARRAY(&ah->iniModesTxGain, 529 ar9280Modes_high_power_tx_gain_9280_2, 530 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6); 531 else 532 INIT_INI_ARRAY(&ah->iniModesTxGain, 533 ar9280Modes_original_tx_gain_9280_2, 534 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6); 535 } else { 536 INIT_INI_ARRAY(&ah->iniModesTxGain, 537 ar9280Modes_original_tx_gain_9280_2, 538 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6); 539 } 540 } 541 542 static int ath9k_hw_post_init(struct ath_hw *ah) 543 { 544 int ecode; 545 546 if (!ath9k_hw_chip_test(ah)) 547 return -ENODEV; 548 549 ecode = ath9k_hw_rf_claim(ah); 550 if (ecode != 0) 551 return ecode; 552 553 ecode = ath9k_hw_eeprom_init(ah); 554 if (ecode != 0) 555 return ecode; 556 557 ath_print(ath9k_hw_common(ah), ATH_DBG_CONFIG, 558 "Eeprom VER: %d, REV: %d\n", 559 ah->eep_ops->get_eeprom_ver(ah), 560 ah->eep_ops->get_eeprom_rev(ah)); 561 562 if (!AR_SREV_9280_10_OR_LATER(ah)) { 563 ecode = ath9k_hw_rf_alloc_ext_banks(ah); 564 if (ecode) { 565 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 566 "Failed allocating banks for " 567 "external radio\n"); 568 return ecode; 569 } 570 } 571 572 if (!AR_SREV_9100(ah)) { 573 ath9k_hw_ani_setup(ah); 574 ath9k_hw_ani_init(ah); 575 } 576 577 return 0; 578 } 579 580 static bool ath9k_hw_devid_supported(u16 devid) 581 { 582 switch (devid) { 583 case AR5416_DEVID_PCI: 584 case AR5416_DEVID_PCIE: 585 case AR5416_AR9100_DEVID: 586 case AR9160_DEVID_PCI: 587 case AR9280_DEVID_PCI: 588 case AR9280_DEVID_PCIE: 589 case AR9285_DEVID_PCIE: 590 case AR5416_DEVID_AR9287_PCI: 591 case AR5416_DEVID_AR9287_PCIE: 592 case AR9271_USB: 593 return true; 594 default: 595 break; 596 } 597 return false; 598 } 599 600 static bool ath9k_hw_macversion_supported(u32 macversion) 601 { 602 switch (macversion) { 603 case AR_SREV_VERSION_5416_PCI: 604 case AR_SREV_VERSION_5416_PCIE: 605 case AR_SREV_VERSION_9160: 606 case AR_SREV_VERSION_9100: 607 case AR_SREV_VERSION_9280: 608 case AR_SREV_VERSION_9285: 609 case AR_SREV_VERSION_9287: 610 case AR_SREV_VERSION_9271: 611 return true; 612 default: 613 break; 614 } 615 return false; 616 } 617 618 static void ath9k_hw_init_cal_settings(struct ath_hw *ah) 619 { 620 if (AR_SREV_9160_10_OR_LATER(ah)) { 621 if (AR_SREV_9280_10_OR_LATER(ah)) { 622 ah->iq_caldata.calData = &iq_cal_single_sample; 623 ah->adcgain_caldata.calData = 624 &adc_gain_cal_single_sample; 625 ah->adcdc_caldata.calData = 626 &adc_dc_cal_single_sample; 627 ah->adcdc_calinitdata.calData = 628 &adc_init_dc_cal; 629 } else { 630 ah->iq_caldata.calData = &iq_cal_multi_sample; 631 ah->adcgain_caldata.calData = 632 &adc_gain_cal_multi_sample; 633 ah->adcdc_caldata.calData = 634 &adc_dc_cal_multi_sample; 635 ah->adcdc_calinitdata.calData = 636 &adc_init_dc_cal; 637 } 638 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL; 639 } 640 } 641 642 static void ath9k_hw_init_mode_regs(struct ath_hw *ah) 643 { 644 if (AR_SREV_9271(ah)) { 645 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271, 646 ARRAY_SIZE(ar9271Modes_9271), 6); 647 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271, 648 ARRAY_SIZE(ar9271Common_9271), 2); 649 INIT_INI_ARRAY(&ah->iniModes_9271_1_0_only, 650 ar9271Modes_9271_1_0_only, 651 ARRAY_SIZE(ar9271Modes_9271_1_0_only), 6); 652 return; 653 } 654 655 if (AR_SREV_9287_11_OR_LATER(ah)) { 656 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1, 657 ARRAY_SIZE(ar9287Modes_9287_1_1), 6); 658 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1, 659 ARRAY_SIZE(ar9287Common_9287_1_1), 2); 660 if (ah->config.pcie_clock_req) 661 INIT_INI_ARRAY(&ah->iniPcieSerdes, 662 ar9287PciePhy_clkreq_off_L1_9287_1_1, 663 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2); 664 else 665 INIT_INI_ARRAY(&ah->iniPcieSerdes, 666 ar9287PciePhy_clkreq_always_on_L1_9287_1_1, 667 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1), 668 2); 669 } else if (AR_SREV_9287_10_OR_LATER(ah)) { 670 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0, 671 ARRAY_SIZE(ar9287Modes_9287_1_0), 6); 672 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0, 673 ARRAY_SIZE(ar9287Common_9287_1_0), 2); 674 675 if (ah->config.pcie_clock_req) 676 INIT_INI_ARRAY(&ah->iniPcieSerdes, 677 ar9287PciePhy_clkreq_off_L1_9287_1_0, 678 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2); 679 else 680 INIT_INI_ARRAY(&ah->iniPcieSerdes, 681 ar9287PciePhy_clkreq_always_on_L1_9287_1_0, 682 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0), 683 2); 684 } else if (AR_SREV_9285_12_OR_LATER(ah)) { 685 686 687 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2, 688 ARRAY_SIZE(ar9285Modes_9285_1_2), 6); 689 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2, 690 ARRAY_SIZE(ar9285Common_9285_1_2), 2); 691 692 if (ah->config.pcie_clock_req) { 693 INIT_INI_ARRAY(&ah->iniPcieSerdes, 694 ar9285PciePhy_clkreq_off_L1_9285_1_2, 695 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2); 696 } else { 697 INIT_INI_ARRAY(&ah->iniPcieSerdes, 698 ar9285PciePhy_clkreq_always_on_L1_9285_1_2, 699 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2), 700 2); 701 } 702 } else if (AR_SREV_9285_10_OR_LATER(ah)) { 703 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285, 704 ARRAY_SIZE(ar9285Modes_9285), 6); 705 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285, 706 ARRAY_SIZE(ar9285Common_9285), 2); 707 708 if (ah->config.pcie_clock_req) { 709 INIT_INI_ARRAY(&ah->iniPcieSerdes, 710 ar9285PciePhy_clkreq_off_L1_9285, 711 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2); 712 } else { 713 INIT_INI_ARRAY(&ah->iniPcieSerdes, 714 ar9285PciePhy_clkreq_always_on_L1_9285, 715 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2); 716 } 717 } else if (AR_SREV_9280_20_OR_LATER(ah)) { 718 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2, 719 ARRAY_SIZE(ar9280Modes_9280_2), 6); 720 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2, 721 ARRAY_SIZE(ar9280Common_9280_2), 2); 722 723 if (ah->config.pcie_clock_req) { 724 INIT_INI_ARRAY(&ah->iniPcieSerdes, 725 ar9280PciePhy_clkreq_off_L1_9280, 726 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2); 727 } else { 728 INIT_INI_ARRAY(&ah->iniPcieSerdes, 729 ar9280PciePhy_clkreq_always_on_L1_9280, 730 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2); 731 } 732 INIT_INI_ARRAY(&ah->iniModesAdditional, 733 ar9280Modes_fast_clock_9280_2, 734 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3); 735 } else if (AR_SREV_9280_10_OR_LATER(ah)) { 736 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280, 737 ARRAY_SIZE(ar9280Modes_9280), 6); 738 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280, 739 ARRAY_SIZE(ar9280Common_9280), 2); 740 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 741 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160, 742 ARRAY_SIZE(ar5416Modes_9160), 6); 743 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160, 744 ARRAY_SIZE(ar5416Common_9160), 2); 745 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160, 746 ARRAY_SIZE(ar5416Bank0_9160), 2); 747 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160, 748 ARRAY_SIZE(ar5416BB_RfGain_9160), 3); 749 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160, 750 ARRAY_SIZE(ar5416Bank1_9160), 2); 751 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160, 752 ARRAY_SIZE(ar5416Bank2_9160), 2); 753 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160, 754 ARRAY_SIZE(ar5416Bank3_9160), 3); 755 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160, 756 ARRAY_SIZE(ar5416Bank6_9160), 3); 757 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160, 758 ARRAY_SIZE(ar5416Bank6TPC_9160), 3); 759 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160, 760 ARRAY_SIZE(ar5416Bank7_9160), 2); 761 if (AR_SREV_9160_11(ah)) { 762 INIT_INI_ARRAY(&ah->iniAddac, 763 ar5416Addac_91601_1, 764 ARRAY_SIZE(ar5416Addac_91601_1), 2); 765 } else { 766 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160, 767 ARRAY_SIZE(ar5416Addac_9160), 2); 768 } 769 } else if (AR_SREV_9100_OR_LATER(ah)) { 770 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100, 771 ARRAY_SIZE(ar5416Modes_9100), 6); 772 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100, 773 ARRAY_SIZE(ar5416Common_9100), 2); 774 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100, 775 ARRAY_SIZE(ar5416Bank0_9100), 2); 776 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100, 777 ARRAY_SIZE(ar5416BB_RfGain_9100), 3); 778 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100, 779 ARRAY_SIZE(ar5416Bank1_9100), 2); 780 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100, 781 ARRAY_SIZE(ar5416Bank2_9100), 2); 782 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100, 783 ARRAY_SIZE(ar5416Bank3_9100), 3); 784 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100, 785 ARRAY_SIZE(ar5416Bank6_9100), 3); 786 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100, 787 ARRAY_SIZE(ar5416Bank6TPC_9100), 3); 788 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100, 789 ARRAY_SIZE(ar5416Bank7_9100), 2); 790 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100, 791 ARRAY_SIZE(ar5416Addac_9100), 2); 792 } else { 793 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes, 794 ARRAY_SIZE(ar5416Modes), 6); 795 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common, 796 ARRAY_SIZE(ar5416Common), 2); 797 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0, 798 ARRAY_SIZE(ar5416Bank0), 2); 799 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain, 800 ARRAY_SIZE(ar5416BB_RfGain), 3); 801 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1, 802 ARRAY_SIZE(ar5416Bank1), 2); 803 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2, 804 ARRAY_SIZE(ar5416Bank2), 2); 805 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3, 806 ARRAY_SIZE(ar5416Bank3), 3); 807 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6, 808 ARRAY_SIZE(ar5416Bank6), 3); 809 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC, 810 ARRAY_SIZE(ar5416Bank6TPC), 3); 811 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7, 812 ARRAY_SIZE(ar5416Bank7), 2); 813 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac, 814 ARRAY_SIZE(ar5416Addac), 2); 815 } 816 } 817 818 static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah) 819 { 820 if (AR_SREV_9287_11_OR_LATER(ah)) 821 INIT_INI_ARRAY(&ah->iniModesRxGain, 822 ar9287Modes_rx_gain_9287_1_1, 823 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6); 824 else if (AR_SREV_9287_10(ah)) 825 INIT_INI_ARRAY(&ah->iniModesRxGain, 826 ar9287Modes_rx_gain_9287_1_0, 827 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6); 828 else if (AR_SREV_9280_20(ah)) 829 ath9k_hw_init_rxgain_ini(ah); 830 831 if (AR_SREV_9287_11_OR_LATER(ah)) { 832 INIT_INI_ARRAY(&ah->iniModesTxGain, 833 ar9287Modes_tx_gain_9287_1_1, 834 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6); 835 } else if (AR_SREV_9287_10(ah)) { 836 INIT_INI_ARRAY(&ah->iniModesTxGain, 837 ar9287Modes_tx_gain_9287_1_0, 838 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6); 839 } else if (AR_SREV_9280_20(ah)) { 840 ath9k_hw_init_txgain_ini(ah); 841 } else if (AR_SREV_9285_12_OR_LATER(ah)) { 842 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE); 843 844 /* txgain table */ 845 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) { 846 INIT_INI_ARRAY(&ah->iniModesTxGain, 847 ar9285Modes_high_power_tx_gain_9285_1_2, 848 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6); 849 } else { 850 INIT_INI_ARRAY(&ah->iniModesTxGain, 851 ar9285Modes_original_tx_gain_9285_1_2, 852 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6); 853 } 854 855 } 856 } 857 858 static void ath9k_hw_init_eeprom_fix(struct ath_hw *ah) 859 { 860 u32 i, j; 861 862 if (ah->hw_version.devid == AR9280_DEVID_PCI) { 863 864 /* EEPROM Fixup */ 865 for (i = 0; i < ah->iniModes.ia_rows; i++) { 866 u32 reg = INI_RA(&ah->iniModes, i, 0); 867 868 for (j = 1; j < ah->iniModes.ia_columns; j++) { 869 u32 val = INI_RA(&ah->iniModes, i, j); 870 871 INI_RA(&ah->iniModes, i, j) = 872 ath9k_hw_ini_fixup(ah, 873 &ah->eeprom.def, 874 reg, val); 875 } 876 } 877 } 878 } 879 880 int ath9k_hw_init(struct ath_hw *ah) 881 { 882 struct ath_common *common = ath9k_hw_common(ah); 883 int r = 0; 884 885 if (!ath9k_hw_devid_supported(ah->hw_version.devid)) { 886 ath_print(common, ATH_DBG_FATAL, 887 "Unsupported device ID: 0x%0x\n", 888 ah->hw_version.devid); 889 return -EOPNOTSUPP; 890 } 891 892 ath9k_hw_init_defaults(ah); 893 ath9k_hw_init_config(ah); 894 895 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) { 896 ath_print(common, ATH_DBG_FATAL, 897 "Couldn't reset chip\n"); 898 return -EIO; 899 } 900 901 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) { 902 ath_print(common, ATH_DBG_FATAL, "Couldn't wakeup chip\n"); 903 return -EIO; 904 } 905 906 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) { 907 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI || 908 (AR_SREV_9280(ah) && !ah->is_pciexpress)) { 909 ah->config.serialize_regmode = 910 SER_REG_MODE_ON; 911 } else { 912 ah->config.serialize_regmode = 913 SER_REG_MODE_OFF; 914 } 915 } 916 917 ath_print(common, ATH_DBG_RESET, "serialize_regmode is %d\n", 918 ah->config.serialize_regmode); 919 920 if (AR_SREV_9285(ah) || AR_SREV_9271(ah)) 921 ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD >> 1; 922 else 923 ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD; 924 925 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) { 926 ath_print(common, ATH_DBG_FATAL, 927 "Mac Chip Rev 0x%02x.%x is not supported by " 928 "this driver\n", ah->hw_version.macVersion, 929 ah->hw_version.macRev); 930 return -EOPNOTSUPP; 931 } 932 933 if (AR_SREV_9100(ah)) { 934 ah->iq_caldata.calData = &iq_cal_multi_sample; 935 ah->supp_cals = IQ_MISMATCH_CAL; 936 ah->is_pciexpress = false; 937 } 938 939 if (AR_SREV_9271(ah)) 940 ah->is_pciexpress = false; 941 942 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID); 943 944 ath9k_hw_init_cal_settings(ah); 945 946 ah->ani_function = ATH9K_ANI_ALL; 947 if (AR_SREV_9280_10_OR_LATER(ah)) { 948 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL; 949 ah->ath9k_hw_rf_set_freq = &ath9k_hw_ar9280_set_channel; 950 ah->ath9k_hw_spur_mitigate_freq = &ath9k_hw_9280_spur_mitigate; 951 } else { 952 ah->ath9k_hw_rf_set_freq = &ath9k_hw_set_channel; 953 ah->ath9k_hw_spur_mitigate_freq = &ath9k_hw_spur_mitigate; 954 } 955 956 ath9k_hw_init_mode_regs(ah); 957 958 if (ah->is_pciexpress) 959 ath9k_hw_configpcipowersave(ah, 0, 0); 960 else 961 ath9k_hw_disablepcie(ah); 962 963 /* Support for Japan ch.14 (2484) spread */ 964 if (AR_SREV_9287_11_OR_LATER(ah)) { 965 INIT_INI_ARRAY(&ah->iniCckfirNormal, 966 ar9287Common_normal_cck_fir_coeff_92871_1, 967 ARRAY_SIZE(ar9287Common_normal_cck_fir_coeff_92871_1), 2); 968 INIT_INI_ARRAY(&ah->iniCckfirJapan2484, 969 ar9287Common_japan_2484_cck_fir_coeff_92871_1, 970 ARRAY_SIZE(ar9287Common_japan_2484_cck_fir_coeff_92871_1), 2); 971 } 972 973 r = ath9k_hw_post_init(ah); 974 if (r) 975 return r; 976 977 ath9k_hw_init_mode_gain_regs(ah); 978 r = ath9k_hw_fill_cap_info(ah); 979 if (r) 980 return r; 981 982 ath9k_hw_init_eeprom_fix(ah); 983 984 r = ath9k_hw_init_macaddr(ah); 985 if (r) { 986 ath_print(common, ATH_DBG_FATAL, 987 "Failed to initialize MAC address\n"); 988 return r; 989 } 990 991 if (AR_SREV_9285(ah) || AR_SREV_9271(ah)) 992 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S); 993 else 994 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S); 995 996 ath9k_init_nfcal_hist_buffer(ah); 997 998 common->state = ATH_HW_INITIALIZED; 999 1000 return 0; 1001 } 1002 1003 static void ath9k_hw_init_bb(struct ath_hw *ah, 1004 struct ath9k_channel *chan) 1005 { 1006 u32 synthDelay; 1007 1008 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY; 1009 if (IS_CHAN_B(chan)) 1010 synthDelay = (4 * synthDelay) / 22; 1011 else 1012 synthDelay /= 10; 1013 1014 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN); 1015 1016 udelay(synthDelay + BASE_ACTIVATE_DELAY); 1017 } 1018 1019 static void ath9k_hw_init_qos(struct ath_hw *ah) 1020 { 1021 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa); 1022 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210); 1023 1024 REG_WRITE(ah, AR_QOS_NO_ACK, 1025 SM(2, AR_QOS_NO_ACK_TWO_BIT) | 1026 SM(5, AR_QOS_NO_ACK_BIT_OFF) | 1027 SM(0, AR_QOS_NO_ACK_BYTE_OFF)); 1028 1029 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL); 1030 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF); 1031 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF); 1032 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF); 1033 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF); 1034 } 1035 1036 static void ath9k_hw_change_target_baud(struct ath_hw *ah, u32 freq, u32 baud) 1037 { 1038 u32 lcr; 1039 u32 baud_divider = freq * 1000 * 1000 / 16 / baud; 1040 1041 lcr = REG_READ(ah , 0x5100c); 1042 lcr |= 0x80; 1043 1044 REG_WRITE(ah, 0x5100c, lcr); 1045 REG_WRITE(ah, 0x51004, (baud_divider >> 8)); 1046 REG_WRITE(ah, 0x51000, (baud_divider & 0xff)); 1047 1048 lcr &= ~0x80; 1049 REG_WRITE(ah, 0x5100c, lcr); 1050 } 1051 1052 static void ath9k_hw_init_pll(struct ath_hw *ah, 1053 struct ath9k_channel *chan) 1054 { 1055 u32 pll; 1056 1057 if (AR_SREV_9100(ah)) { 1058 if (chan && IS_CHAN_5GHZ(chan)) 1059 pll = 0x1450; 1060 else 1061 pll = 0x1458; 1062 } else { 1063 if (AR_SREV_9280_10_OR_LATER(ah)) { 1064 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV); 1065 1066 if (chan && IS_CHAN_HALF_RATE(chan)) 1067 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL); 1068 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1069 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL); 1070 1071 if (chan && IS_CHAN_5GHZ(chan)) { 1072 pll |= SM(0x28, AR_RTC_9160_PLL_DIV); 1073 1074 1075 if (AR_SREV_9280_20(ah)) { 1076 if (((chan->channel % 20) == 0) 1077 || ((chan->channel % 10) == 0)) 1078 pll = 0x2850; 1079 else 1080 pll = 0x142c; 1081 } 1082 } else { 1083 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV); 1084 } 1085 1086 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 1087 1088 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV); 1089 1090 if (chan && IS_CHAN_HALF_RATE(chan)) 1091 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL); 1092 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1093 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL); 1094 1095 if (chan && IS_CHAN_5GHZ(chan)) 1096 pll |= SM(0x50, AR_RTC_9160_PLL_DIV); 1097 else 1098 pll |= SM(0x58, AR_RTC_9160_PLL_DIV); 1099 } else { 1100 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2; 1101 1102 if (chan && IS_CHAN_HALF_RATE(chan)) 1103 pll |= SM(0x1, AR_RTC_PLL_CLKSEL); 1104 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1105 pll |= SM(0x2, AR_RTC_PLL_CLKSEL); 1106 1107 if (chan && IS_CHAN_5GHZ(chan)) 1108 pll |= SM(0xa, AR_RTC_PLL_DIV); 1109 else 1110 pll |= SM(0xb, AR_RTC_PLL_DIV); 1111 } 1112 } 1113 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll); 1114 1115 /* Switch the core clock for ar9271 to 117Mhz */ 1116 if (AR_SREV_9271(ah)) { 1117 if ((pll == 0x142c) || (pll == 0x2850) ) { 1118 udelay(500); 1119 /* set CLKOBS to output AHB clock */ 1120 REG_WRITE(ah, 0x7020, 0xe); 1121 /* 1122 * 0x304: 117Mhz, ahb_ratio: 1x1 1123 * 0x306: 40Mhz, ahb_ratio: 1x1 1124 */ 1125 REG_WRITE(ah, 0x50040, 0x304); 1126 /* 1127 * makes adjustments for the baud dividor to keep the 1128 * targetted baud rate based on the used core clock. 1129 */ 1130 ath9k_hw_change_target_baud(ah, AR9271_CORE_CLOCK, 1131 AR9271_TARGET_BAUD_RATE); 1132 } 1133 } 1134 1135 udelay(RTC_PLL_SETTLE_DELAY); 1136 1137 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK); 1138 } 1139 1140 static void ath9k_hw_init_chain_masks(struct ath_hw *ah) 1141 { 1142 int rx_chainmask, tx_chainmask; 1143 1144 rx_chainmask = ah->rxchainmask; 1145 tx_chainmask = ah->txchainmask; 1146 1147 switch (rx_chainmask) { 1148 case 0x5: 1149 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP, 1150 AR_PHY_SWAP_ALT_CHAIN); 1151 case 0x3: 1152 if (ah->hw_version.macVersion == AR_SREV_REVISION_5416_10) { 1153 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7); 1154 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7); 1155 break; 1156 } 1157 case 0x1: 1158 case 0x2: 1159 case 0x7: 1160 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask); 1161 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask); 1162 break; 1163 default: 1164 break; 1165 } 1166 1167 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask); 1168 if (tx_chainmask == 0x5) { 1169 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP, 1170 AR_PHY_SWAP_ALT_CHAIN); 1171 } 1172 if (AR_SREV_9100(ah)) 1173 REG_WRITE(ah, AR_PHY_ANALOG_SWAP, 1174 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001); 1175 } 1176 1177 static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah, 1178 enum nl80211_iftype opmode) 1179 { 1180 ah->mask_reg = AR_IMR_TXERR | 1181 AR_IMR_TXURN | 1182 AR_IMR_RXERR | 1183 AR_IMR_RXORN | 1184 AR_IMR_BCNMISC; 1185 1186 if (ah->config.intr_mitigation) 1187 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR; 1188 else 1189 ah->mask_reg |= AR_IMR_RXOK; 1190 1191 ah->mask_reg |= AR_IMR_TXOK; 1192 1193 if (opmode == NL80211_IFTYPE_AP) 1194 ah->mask_reg |= AR_IMR_MIB; 1195 1196 REG_WRITE(ah, AR_IMR, ah->mask_reg); 1197 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT); 1198 1199 if (!AR_SREV_9100(ah)) { 1200 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF); 1201 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT); 1202 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0); 1203 } 1204 } 1205 1206 static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us) 1207 { 1208 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) { 1209 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 1210 "bad ack timeout %u\n", us); 1211 ah->acktimeout = (u32) -1; 1212 return false; 1213 } else { 1214 REG_RMW_FIELD(ah, AR_TIME_OUT, 1215 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us)); 1216 ah->acktimeout = us; 1217 return true; 1218 } 1219 } 1220 1221 static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us) 1222 { 1223 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) { 1224 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 1225 "bad cts timeout %u\n", us); 1226 ah->ctstimeout = (u32) -1; 1227 return false; 1228 } else { 1229 REG_RMW_FIELD(ah, AR_TIME_OUT, 1230 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us)); 1231 ah->ctstimeout = us; 1232 return true; 1233 } 1234 } 1235 1236 static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu) 1237 { 1238 if (tu > 0xFFFF) { 1239 ath_print(ath9k_hw_common(ah), ATH_DBG_XMIT, 1240 "bad global tx timeout %u\n", tu); 1241 ah->globaltxtimeout = (u32) -1; 1242 return false; 1243 } else { 1244 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu); 1245 ah->globaltxtimeout = tu; 1246 return true; 1247 } 1248 } 1249 1250 static void ath9k_hw_init_user_settings(struct ath_hw *ah) 1251 { 1252 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n", 1253 ah->misc_mode); 1254 1255 if (ah->misc_mode != 0) 1256 REG_WRITE(ah, AR_PCU_MISC, 1257 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode); 1258 if (ah->slottime != (u32) -1) 1259 ath9k_hw_setslottime(ah, ah->slottime); 1260 if (ah->acktimeout != (u32) -1) 1261 ath9k_hw_set_ack_timeout(ah, ah->acktimeout); 1262 if (ah->ctstimeout != (u32) -1) 1263 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout); 1264 if (ah->globaltxtimeout != (u32) -1) 1265 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout); 1266 } 1267 1268 const char *ath9k_hw_probe(u16 vendorid, u16 devid) 1269 { 1270 return vendorid == ATHEROS_VENDOR_ID ? 1271 ath9k_hw_devname(devid) : NULL; 1272 } 1273 1274 void ath9k_hw_detach(struct ath_hw *ah) 1275 { 1276 struct ath_common *common = ath9k_hw_common(ah); 1277 1278 if (common->state <= ATH_HW_INITIALIZED) 1279 goto free_hw; 1280 1281 if (!AR_SREV_9100(ah)) 1282 ath9k_hw_ani_disable(ah); 1283 1284 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP); 1285 1286 free_hw: 1287 if (!AR_SREV_9280_10_OR_LATER(ah)) 1288 ath9k_hw_rf_free_ext_banks(ah); 1289 kfree(ah); 1290 ah = NULL; 1291 } 1292 EXPORT_SYMBOL(ath9k_hw_detach); 1293 1294 /*******/ 1295 /* INI */ 1296 /*******/ 1297 1298 static void ath9k_hw_override_ini(struct ath_hw *ah, 1299 struct ath9k_channel *chan) 1300 { 1301 u32 val; 1302 1303 if (AR_SREV_9271(ah)) { 1304 /* 1305 * Enable spectral scan to solution for issues with stuck 1306 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on 1307 * AR9271 1.1 1308 */ 1309 if (AR_SREV_9271_10(ah)) { 1310 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | 1311 AR_PHY_SPECTRAL_SCAN_ENABLE; 1312 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val); 1313 } 1314 else if (AR_SREV_9271_11(ah)) 1315 /* 1316 * change AR_PHY_RF_CTL3 setting to fix MAC issue 1317 * present on AR9271 1.1 1318 */ 1319 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001); 1320 return; 1321 } 1322 1323 /* 1324 * Set the RX_ABORT and RX_DIS and clear if off only after 1325 * RXE is set for MAC. This prevents frames with corrupted 1326 * descriptor status. 1327 */ 1328 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT)); 1329 1330 if (AR_SREV_9280_10_OR_LATER(ah)) { 1331 val = REG_READ(ah, AR_PCU_MISC_MODE2) & 1332 (~AR_PCU_MISC_MODE2_HWWAR1); 1333 1334 if (AR_SREV_9287_10_OR_LATER(ah)) 1335 val = val & (~AR_PCU_MISC_MODE2_HWWAR2); 1336 1337 REG_WRITE(ah, AR_PCU_MISC_MODE2, val); 1338 } 1339 1340 if (!AR_SREV_5416_20_OR_LATER(ah) || 1341 AR_SREV_9280_10_OR_LATER(ah)) 1342 return; 1343 /* 1344 * Disable BB clock gating 1345 * Necessary to avoid issues on AR5416 2.0 1346 */ 1347 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11); 1348 } 1349 1350 static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah, 1351 struct ar5416_eeprom_def *pEepData, 1352 u32 reg, u32 value) 1353 { 1354 struct base_eep_header *pBase = &(pEepData->baseEepHeader); 1355 struct ath_common *common = ath9k_hw_common(ah); 1356 1357 switch (ah->hw_version.devid) { 1358 case AR9280_DEVID_PCI: 1359 if (reg == 0x7894) { 1360 ath_print(common, ATH_DBG_EEPROM, 1361 "ini VAL: %x EEPROM: %x\n", value, 1362 (pBase->version & 0xff)); 1363 1364 if ((pBase->version & 0xff) > 0x0a) { 1365 ath_print(common, ATH_DBG_EEPROM, 1366 "PWDCLKIND: %d\n", 1367 pBase->pwdclkind); 1368 value &= ~AR_AN_TOP2_PWDCLKIND; 1369 value |= AR_AN_TOP2_PWDCLKIND & 1370 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S); 1371 } else { 1372 ath_print(common, ATH_DBG_EEPROM, 1373 "PWDCLKIND Earlier Rev\n"); 1374 } 1375 1376 ath_print(common, ATH_DBG_EEPROM, 1377 "final ini VAL: %x\n", value); 1378 } 1379 break; 1380 } 1381 1382 return value; 1383 } 1384 1385 static u32 ath9k_hw_ini_fixup(struct ath_hw *ah, 1386 struct ar5416_eeprom_def *pEepData, 1387 u32 reg, u32 value) 1388 { 1389 if (ah->eep_map == EEP_MAP_4KBITS) 1390 return value; 1391 else 1392 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value); 1393 } 1394 1395 static void ath9k_olc_init(struct ath_hw *ah) 1396 { 1397 u32 i; 1398 1399 if (OLC_FOR_AR9287_10_LATER) { 1400 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9, 1401 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL); 1402 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0, 1403 AR9287_AN_TXPC0_TXPCMODE, 1404 AR9287_AN_TXPC0_TXPCMODE_S, 1405 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE); 1406 udelay(100); 1407 } else { 1408 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++) 1409 ah->originalGain[i] = 1410 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4), 1411 AR_PHY_TX_GAIN); 1412 ah->PDADCdelta = 0; 1413 } 1414 } 1415 1416 static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg, 1417 struct ath9k_channel *chan) 1418 { 1419 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band); 1420 1421 if (IS_CHAN_B(chan)) 1422 ctl |= CTL_11B; 1423 else if (IS_CHAN_G(chan)) 1424 ctl |= CTL_11G; 1425 else 1426 ctl |= CTL_11A; 1427 1428 return ctl; 1429 } 1430 1431 static int ath9k_hw_process_ini(struct ath_hw *ah, 1432 struct ath9k_channel *chan) 1433 { 1434 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 1435 int i, regWrites = 0; 1436 struct ieee80211_channel *channel = chan->chan; 1437 u32 modesIndex, freqIndex; 1438 1439 switch (chan->chanmode) { 1440 case CHANNEL_A: 1441 case CHANNEL_A_HT20: 1442 modesIndex = 1; 1443 freqIndex = 1; 1444 break; 1445 case CHANNEL_A_HT40PLUS: 1446 case CHANNEL_A_HT40MINUS: 1447 modesIndex = 2; 1448 freqIndex = 1; 1449 break; 1450 case CHANNEL_G: 1451 case CHANNEL_G_HT20: 1452 case CHANNEL_B: 1453 modesIndex = 4; 1454 freqIndex = 2; 1455 break; 1456 case CHANNEL_G_HT40PLUS: 1457 case CHANNEL_G_HT40MINUS: 1458 modesIndex = 3; 1459 freqIndex = 2; 1460 break; 1461 1462 default: 1463 return -EINVAL; 1464 } 1465 1466 REG_WRITE(ah, AR_PHY(0), 0x00000007); 1467 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO); 1468 ah->eep_ops->set_addac(ah, chan); 1469 1470 if (AR_SREV_5416_22_OR_LATER(ah)) { 1471 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites); 1472 } else { 1473 struct ar5416IniArray temp; 1474 u32 addacSize = 1475 sizeof(u32) * ah->iniAddac.ia_rows * 1476 ah->iniAddac.ia_columns; 1477 1478 memcpy(ah->addac5416_21, 1479 ah->iniAddac.ia_array, addacSize); 1480 1481 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0; 1482 1483 temp.ia_array = ah->addac5416_21; 1484 temp.ia_columns = ah->iniAddac.ia_columns; 1485 temp.ia_rows = ah->iniAddac.ia_rows; 1486 REG_WRITE_ARRAY(&temp, 1, regWrites); 1487 } 1488 1489 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC); 1490 1491 for (i = 0; i < ah->iniModes.ia_rows; i++) { 1492 u32 reg = INI_RA(&ah->iniModes, i, 0); 1493 u32 val = INI_RA(&ah->iniModes, i, modesIndex); 1494 1495 REG_WRITE(ah, reg, val); 1496 1497 if (reg >= 0x7800 && reg < 0x78a0 1498 && ah->config.analog_shiftreg) { 1499 udelay(100); 1500 } 1501 1502 DO_DELAY(regWrites); 1503 } 1504 1505 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah)) 1506 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites); 1507 1508 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) || 1509 AR_SREV_9287_10_OR_LATER(ah)) 1510 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites); 1511 1512 for (i = 0; i < ah->iniCommon.ia_rows; i++) { 1513 u32 reg = INI_RA(&ah->iniCommon, i, 0); 1514 u32 val = INI_RA(&ah->iniCommon, i, 1); 1515 1516 REG_WRITE(ah, reg, val); 1517 1518 if (reg >= 0x7800 && reg < 0x78a0 1519 && ah->config.analog_shiftreg) { 1520 udelay(100); 1521 } 1522 1523 DO_DELAY(regWrites); 1524 } 1525 1526 ath9k_hw_write_regs(ah, freqIndex, regWrites); 1527 1528 if (AR_SREV_9271_10(ah)) 1529 REG_WRITE_ARRAY(&ah->iniModes_9271_1_0_only, 1530 modesIndex, regWrites); 1531 1532 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) { 1533 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex, 1534 regWrites); 1535 } 1536 1537 ath9k_hw_override_ini(ah, chan); 1538 ath9k_hw_set_regs(ah, chan); 1539 ath9k_hw_init_chain_masks(ah); 1540 1541 if (OLC_FOR_AR9280_20_LATER) 1542 ath9k_olc_init(ah); 1543 1544 ah->eep_ops->set_txpower(ah, chan, 1545 ath9k_regd_get_ctl(regulatory, chan), 1546 channel->max_antenna_gain * 2, 1547 channel->max_power * 2, 1548 min((u32) MAX_RATE_POWER, 1549 (u32) regulatory->power_limit)); 1550 1551 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) { 1552 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 1553 "ar5416SetRfRegs failed\n"); 1554 return -EIO; 1555 } 1556 1557 return 0; 1558 } 1559 1560 /****************************************/ 1561 /* Reset and Channel Switching Routines */ 1562 /****************************************/ 1563 1564 static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan) 1565 { 1566 u32 rfMode = 0; 1567 1568 if (chan == NULL) 1569 return; 1570 1571 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan)) 1572 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM; 1573 1574 if (!AR_SREV_9280_10_OR_LATER(ah)) 1575 rfMode |= (IS_CHAN_5GHZ(chan)) ? 1576 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ; 1577 1578 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) 1579 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE); 1580 1581 REG_WRITE(ah, AR_PHY_MODE, rfMode); 1582 } 1583 1584 static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah) 1585 { 1586 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS); 1587 } 1588 1589 static inline void ath9k_hw_set_dma(struct ath_hw *ah) 1590 { 1591 u32 regval; 1592 1593 /* 1594 * set AHB_MODE not to do cacheline prefetches 1595 */ 1596 regval = REG_READ(ah, AR_AHB_MODE); 1597 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN); 1598 1599 /* 1600 * let mac dma reads be in 128 byte chunks 1601 */ 1602 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK; 1603 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B); 1604 1605 /* 1606 * Restore TX Trigger Level to its pre-reset value. 1607 * The initial value depends on whether aggregation is enabled, and is 1608 * adjusted whenever underruns are detected. 1609 */ 1610 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level); 1611 1612 /* 1613 * let mac dma writes be in 128 byte chunks 1614 */ 1615 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK; 1616 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B); 1617 1618 /* 1619 * Setup receive FIFO threshold to hold off TX activities 1620 */ 1621 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200); 1622 1623 /* 1624 * reduce the number of usable entries in PCU TXBUF to avoid 1625 * wrap around issues. 1626 */ 1627 if (AR_SREV_9285(ah)) { 1628 /* For AR9285 the number of Fifos are reduced to half. 1629 * So set the usable tx buf size also to half to 1630 * avoid data/delimiter underruns 1631 */ 1632 REG_WRITE(ah, AR_PCU_TXBUF_CTRL, 1633 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE); 1634 } else if (!AR_SREV_9271(ah)) { 1635 REG_WRITE(ah, AR_PCU_TXBUF_CTRL, 1636 AR_PCU_TXBUF_CTRL_USABLE_SIZE); 1637 } 1638 } 1639 1640 static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode) 1641 { 1642 u32 val; 1643 1644 val = REG_READ(ah, AR_STA_ID1); 1645 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC); 1646 switch (opmode) { 1647 case NL80211_IFTYPE_AP: 1648 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP 1649 | AR_STA_ID1_KSRCH_MODE); 1650 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION); 1651 break; 1652 case NL80211_IFTYPE_ADHOC: 1653 case NL80211_IFTYPE_MESH_POINT: 1654 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC 1655 | AR_STA_ID1_KSRCH_MODE); 1656 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION); 1657 break; 1658 case NL80211_IFTYPE_STATION: 1659 case NL80211_IFTYPE_MONITOR: 1660 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE); 1661 break; 1662 } 1663 } 1664 1665 static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah, 1666 u32 coef_scaled, 1667 u32 *coef_mantissa, 1668 u32 *coef_exponent) 1669 { 1670 u32 coef_exp, coef_man; 1671 1672 for (coef_exp = 31; coef_exp > 0; coef_exp--) 1673 if ((coef_scaled >> coef_exp) & 0x1) 1674 break; 1675 1676 coef_exp = 14 - (coef_exp - COEF_SCALE_S); 1677 1678 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1)); 1679 1680 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp); 1681 *coef_exponent = coef_exp - 16; 1682 } 1683 1684 static void ath9k_hw_set_delta_slope(struct ath_hw *ah, 1685 struct ath9k_channel *chan) 1686 { 1687 u32 coef_scaled, ds_coef_exp, ds_coef_man; 1688 u32 clockMhzScaled = 0x64000000; 1689 struct chan_centers centers; 1690 1691 if (IS_CHAN_HALF_RATE(chan)) 1692 clockMhzScaled = clockMhzScaled >> 1; 1693 else if (IS_CHAN_QUARTER_RATE(chan)) 1694 clockMhzScaled = clockMhzScaled >> 2; 1695 1696 ath9k_hw_get_channel_centers(ah, chan, ¢ers); 1697 coef_scaled = clockMhzScaled / centers.synth_center; 1698 1699 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man, 1700 &ds_coef_exp); 1701 1702 REG_RMW_FIELD(ah, AR_PHY_TIMING3, 1703 AR_PHY_TIMING3_DSC_MAN, ds_coef_man); 1704 REG_RMW_FIELD(ah, AR_PHY_TIMING3, 1705 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp); 1706 1707 coef_scaled = (9 * coef_scaled) / 10; 1708 1709 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man, 1710 &ds_coef_exp); 1711 1712 REG_RMW_FIELD(ah, AR_PHY_HALFGI, 1713 AR_PHY_HALFGI_DSC_MAN, ds_coef_man); 1714 REG_RMW_FIELD(ah, AR_PHY_HALFGI, 1715 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp); 1716 } 1717 1718 static bool ath9k_hw_set_reset(struct ath_hw *ah, int type) 1719 { 1720 u32 rst_flags; 1721 u32 tmpReg; 1722 1723 if (AR_SREV_9100(ah)) { 1724 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK); 1725 val &= ~AR_RTC_DERIVED_CLK_PERIOD; 1726 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD); 1727 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val); 1728 (void)REG_READ(ah, AR_RTC_DERIVED_CLK); 1729 } 1730 1731 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN | 1732 AR_RTC_FORCE_WAKE_ON_INT); 1733 1734 if (AR_SREV_9100(ah)) { 1735 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD | 1736 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET; 1737 } else { 1738 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE); 1739 if (tmpReg & 1740 (AR_INTR_SYNC_LOCAL_TIMEOUT | 1741 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) { 1742 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0); 1743 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF); 1744 } else { 1745 REG_WRITE(ah, AR_RC, AR_RC_AHB); 1746 } 1747 1748 rst_flags = AR_RTC_RC_MAC_WARM; 1749 if (type == ATH9K_RESET_COLD) 1750 rst_flags |= AR_RTC_RC_MAC_COLD; 1751 } 1752 1753 REG_WRITE(ah, AR_RTC_RC, rst_flags); 1754 udelay(50); 1755 1756 REG_WRITE(ah, AR_RTC_RC, 0); 1757 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) { 1758 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 1759 "RTC stuck in MAC reset\n"); 1760 return false; 1761 } 1762 1763 if (!AR_SREV_9100(ah)) 1764 REG_WRITE(ah, AR_RC, 0); 1765 1766 if (AR_SREV_9100(ah)) 1767 udelay(50); 1768 1769 return true; 1770 } 1771 1772 static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah) 1773 { 1774 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN | 1775 AR_RTC_FORCE_WAKE_ON_INT); 1776 1777 if (!AR_SREV_9100(ah)) 1778 REG_WRITE(ah, AR_RC, AR_RC_AHB); 1779 1780 REG_WRITE(ah, AR_RTC_RESET, 0); 1781 udelay(2); 1782 1783 if (!AR_SREV_9100(ah)) 1784 REG_WRITE(ah, AR_RC, 0); 1785 1786 REG_WRITE(ah, AR_RTC_RESET, 1); 1787 1788 if (!ath9k_hw_wait(ah, 1789 AR_RTC_STATUS, 1790 AR_RTC_STATUS_M, 1791 AR_RTC_STATUS_ON, 1792 AH_WAIT_TIMEOUT)) { 1793 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 1794 "RTC not waking up\n"); 1795 return false; 1796 } 1797 1798 ath9k_hw_read_revisions(ah); 1799 1800 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM); 1801 } 1802 1803 static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type) 1804 { 1805 REG_WRITE(ah, AR_RTC_FORCE_WAKE, 1806 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT); 1807 1808 switch (type) { 1809 case ATH9K_RESET_POWER_ON: 1810 return ath9k_hw_set_reset_power_on(ah); 1811 case ATH9K_RESET_WARM: 1812 case ATH9K_RESET_COLD: 1813 return ath9k_hw_set_reset(ah, type); 1814 default: 1815 return false; 1816 } 1817 } 1818 1819 static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan) 1820 { 1821 u32 phymode; 1822 u32 enableDacFifo = 0; 1823 1824 if (AR_SREV_9285_10_OR_LATER(ah)) 1825 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) & 1826 AR_PHY_FC_ENABLE_DAC_FIFO); 1827 1828 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 1829 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo; 1830 1831 if (IS_CHAN_HT40(chan)) { 1832 phymode |= AR_PHY_FC_DYN2040_EN; 1833 1834 if ((chan->chanmode == CHANNEL_A_HT40PLUS) || 1835 (chan->chanmode == CHANNEL_G_HT40PLUS)) 1836 phymode |= AR_PHY_FC_DYN2040_PRI_CH; 1837 1838 } 1839 REG_WRITE(ah, AR_PHY_TURBO, phymode); 1840 1841 ath9k_hw_set11nmac2040(ah); 1842 1843 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S); 1844 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S); 1845 } 1846 1847 static bool ath9k_hw_chip_reset(struct ath_hw *ah, 1848 struct ath9k_channel *chan) 1849 { 1850 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) { 1851 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) 1852 return false; 1853 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM)) 1854 return false; 1855 1856 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 1857 return false; 1858 1859 ah->chip_fullsleep = false; 1860 ath9k_hw_init_pll(ah, chan); 1861 ath9k_hw_set_rfmode(ah, chan); 1862 1863 return true; 1864 } 1865 1866 static bool ath9k_hw_channel_change(struct ath_hw *ah, 1867 struct ath9k_channel *chan) 1868 { 1869 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 1870 struct ath_common *common = ath9k_hw_common(ah); 1871 struct ieee80211_channel *channel = chan->chan; 1872 u32 synthDelay, qnum; 1873 int r; 1874 1875 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) { 1876 if (ath9k_hw_numtxpending(ah, qnum)) { 1877 ath_print(common, ATH_DBG_QUEUE, 1878 "Transmit frames pending on " 1879 "queue %d\n", qnum); 1880 return false; 1881 } 1882 } 1883 1884 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN); 1885 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN, 1886 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) { 1887 ath_print(common, ATH_DBG_FATAL, 1888 "Could not kill baseband RX\n"); 1889 return false; 1890 } 1891 1892 ath9k_hw_set_regs(ah, chan); 1893 1894 r = ah->ath9k_hw_rf_set_freq(ah, chan); 1895 if (r) { 1896 ath_print(common, ATH_DBG_FATAL, 1897 "Failed to set channel\n"); 1898 return false; 1899 } 1900 1901 ah->eep_ops->set_txpower(ah, chan, 1902 ath9k_regd_get_ctl(regulatory, chan), 1903 channel->max_antenna_gain * 2, 1904 channel->max_power * 2, 1905 min((u32) MAX_RATE_POWER, 1906 (u32) regulatory->power_limit)); 1907 1908 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY; 1909 if (IS_CHAN_B(chan)) 1910 synthDelay = (4 * synthDelay) / 22; 1911 else 1912 synthDelay /= 10; 1913 1914 udelay(synthDelay + BASE_ACTIVATE_DELAY); 1915 1916 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0); 1917 1918 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan)) 1919 ath9k_hw_set_delta_slope(ah, chan); 1920 1921 ah->ath9k_hw_spur_mitigate_freq(ah, chan); 1922 1923 if (!chan->oneTimeCalsDone) 1924 chan->oneTimeCalsDone = true; 1925 1926 return true; 1927 } 1928 1929 static void ath9k_enable_rfkill(struct ath_hw *ah) 1930 { 1931 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, 1932 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB); 1933 1934 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2, 1935 AR_GPIO_INPUT_MUX2_RFSILENT); 1936 1937 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio); 1938 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB); 1939 } 1940 1941 int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan, 1942 bool bChannelChange) 1943 { 1944 struct ath_common *common = ath9k_hw_common(ah); 1945 u32 saveLedState; 1946 struct ath9k_channel *curchan = ah->curchan; 1947 u32 saveDefAntenna; 1948 u32 macStaId1; 1949 u64 tsf = 0; 1950 int i, rx_chainmask, r; 1951 1952 ah->txchainmask = common->tx_chainmask; 1953 ah->rxchainmask = common->rx_chainmask; 1954 1955 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 1956 return -EIO; 1957 1958 if (curchan && !ah->chip_fullsleep) 1959 ath9k_hw_getnf(ah, curchan); 1960 1961 if (bChannelChange && 1962 (ah->chip_fullsleep != true) && 1963 (ah->curchan != NULL) && 1964 (chan->channel != ah->curchan->channel) && 1965 ((chan->channelFlags & CHANNEL_ALL) == 1966 (ah->curchan->channelFlags & CHANNEL_ALL)) && 1967 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) || 1968 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) { 1969 1970 if (ath9k_hw_channel_change(ah, chan)) { 1971 ath9k_hw_loadnf(ah, ah->curchan); 1972 ath9k_hw_start_nfcal(ah); 1973 return 0; 1974 } 1975 } 1976 1977 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA); 1978 if (saveDefAntenna == 0) 1979 saveDefAntenna = 1; 1980 1981 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B; 1982 1983 /* For chips on which RTC reset is done, save TSF before it gets cleared */ 1984 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) 1985 tsf = ath9k_hw_gettsf64(ah); 1986 1987 saveLedState = REG_READ(ah, AR_CFG_LED) & 1988 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL | 1989 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW); 1990 1991 ath9k_hw_mark_phy_inactive(ah); 1992 1993 if (AR_SREV_9271(ah) && ah->htc_reset_init) { 1994 REG_WRITE(ah, 1995 AR9271_RESET_POWER_DOWN_CONTROL, 1996 AR9271_RADIO_RF_RST); 1997 udelay(50); 1998 } 1999 2000 if (!ath9k_hw_chip_reset(ah, chan)) { 2001 ath_print(common, ATH_DBG_FATAL, "Chip reset failed\n"); 2002 return -EINVAL; 2003 } 2004 2005 if (AR_SREV_9271(ah) && ah->htc_reset_init) { 2006 ah->htc_reset_init = false; 2007 REG_WRITE(ah, 2008 AR9271_RESET_POWER_DOWN_CONTROL, 2009 AR9271_GATE_MAC_CTL); 2010 udelay(50); 2011 } 2012 2013 /* Restore TSF */ 2014 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) 2015 ath9k_hw_settsf64(ah, tsf); 2016 2017 if (AR_SREV_9280_10_OR_LATER(ah)) 2018 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE); 2019 2020 if (AR_SREV_9287_12_OR_LATER(ah)) { 2021 /* Enable ASYNC FIFO */ 2022 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2023 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL); 2024 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO); 2025 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2026 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET); 2027 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2028 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET); 2029 } 2030 r = ath9k_hw_process_ini(ah, chan); 2031 if (r) 2032 return r; 2033 2034 /* Setup MFP options for CCMP */ 2035 if (AR_SREV_9280_20_OR_LATER(ah)) { 2036 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt 2037 * frames when constructing CCMP AAD. */ 2038 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT, 2039 0xc7ff); 2040 ah->sw_mgmt_crypto = false; 2041 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 2042 /* Disable hardware crypto for management frames */ 2043 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2, 2044 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE); 2045 REG_SET_BIT(ah, AR_PCU_MISC_MODE2, 2046 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT); 2047 ah->sw_mgmt_crypto = true; 2048 } else 2049 ah->sw_mgmt_crypto = true; 2050 2051 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan)) 2052 ath9k_hw_set_delta_slope(ah, chan); 2053 2054 ah->ath9k_hw_spur_mitigate_freq(ah, chan); 2055 ah->eep_ops->set_board_values(ah, chan); 2056 2057 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr)); 2058 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4) 2059 | macStaId1 2060 | AR_STA_ID1_RTS_USE_DEF 2061 | (ah->config. 2062 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0) 2063 | ah->sta_id1_defaults); 2064 ath9k_hw_set_operating_mode(ah, ah->opmode); 2065 2066 ath_hw_setbssidmask(common); 2067 2068 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna); 2069 2070 ath9k_hw_write_associd(ah); 2071 2072 REG_WRITE(ah, AR_ISR, ~0); 2073 2074 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR); 2075 2076 r = ah->ath9k_hw_rf_set_freq(ah, chan); 2077 if (r) 2078 return r; 2079 2080 for (i = 0; i < AR_NUM_DCU; i++) 2081 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i); 2082 2083 ah->intr_txqs = 0; 2084 for (i = 0; i < ah->caps.total_queues; i++) 2085 ath9k_hw_resettxqueue(ah, i); 2086 2087 ath9k_hw_init_interrupt_masks(ah, ah->opmode); 2088 ath9k_hw_init_qos(ah); 2089 2090 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT) 2091 ath9k_enable_rfkill(ah); 2092 2093 ath9k_hw_init_user_settings(ah); 2094 2095 if (AR_SREV_9287_12_OR_LATER(ah)) { 2096 REG_WRITE(ah, AR_D_GBL_IFS_SIFS, 2097 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR); 2098 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, 2099 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR); 2100 REG_WRITE(ah, AR_D_GBL_IFS_EIFS, 2101 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR); 2102 2103 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR); 2104 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR); 2105 2106 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER, 2107 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768); 2108 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN, 2109 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL); 2110 } 2111 if (AR_SREV_9287_12_OR_LATER(ah)) { 2112 REG_SET_BIT(ah, AR_PCU_MISC_MODE2, 2113 AR_PCU_MISC_MODE2_ENABLE_AGGWEP); 2114 } 2115 2116 REG_WRITE(ah, AR_STA_ID1, 2117 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM); 2118 2119 ath9k_hw_set_dma(ah); 2120 2121 REG_WRITE(ah, AR_OBS, 8); 2122 2123 if (ah->config.intr_mitigation) { 2124 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500); 2125 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000); 2126 } 2127 2128 ath9k_hw_init_bb(ah, chan); 2129 2130 if (!ath9k_hw_init_cal(ah, chan)) 2131 return -EIO; 2132 2133 rx_chainmask = ah->rxchainmask; 2134 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) { 2135 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask); 2136 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask); 2137 } 2138 2139 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ); 2140 2141 /* 2142 * For big endian systems turn on swapping for descriptors 2143 */ 2144 if (AR_SREV_9100(ah)) { 2145 u32 mask; 2146 mask = REG_READ(ah, AR_CFG); 2147 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) { 2148 ath_print(common, ATH_DBG_RESET, 2149 "CFG Byte Swap Set 0x%x\n", mask); 2150 } else { 2151 mask = 2152 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB; 2153 REG_WRITE(ah, AR_CFG, mask); 2154 ath_print(common, ATH_DBG_RESET, 2155 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG)); 2156 } 2157 } else { 2158 /* Configure AR9271 target WLAN */ 2159 if (AR_SREV_9271(ah)) 2160 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB); 2161 #ifdef __BIG_ENDIAN 2162 else 2163 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD); 2164 #endif 2165 } 2166 2167 if (ah->btcoex_hw.enabled) 2168 ath9k_hw_btcoex_enable(ah); 2169 2170 return 0; 2171 } 2172 EXPORT_SYMBOL(ath9k_hw_reset); 2173 2174 /************************/ 2175 /* Key Cache Management */ 2176 /************************/ 2177 2178 bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry) 2179 { 2180 u32 keyType; 2181 2182 if (entry >= ah->caps.keycache_size) { 2183 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 2184 "keychache entry %u out of range\n", entry); 2185 return false; 2186 } 2187 2188 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry)); 2189 2190 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0); 2191 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0); 2192 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0); 2193 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0); 2194 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0); 2195 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR); 2196 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0); 2197 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0); 2198 2199 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) { 2200 u16 micentry = entry + 64; 2201 2202 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0); 2203 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0); 2204 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0); 2205 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0); 2206 2207 } 2208 2209 return true; 2210 } 2211 EXPORT_SYMBOL(ath9k_hw_keyreset); 2212 2213 bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac) 2214 { 2215 u32 macHi, macLo; 2216 2217 if (entry >= ah->caps.keycache_size) { 2218 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 2219 "keychache entry %u out of range\n", entry); 2220 return false; 2221 } 2222 2223 if (mac != NULL) { 2224 macHi = (mac[5] << 8) | mac[4]; 2225 macLo = (mac[3] << 24) | 2226 (mac[2] << 16) | 2227 (mac[1] << 8) | 2228 mac[0]; 2229 macLo >>= 1; 2230 macLo |= (macHi & 1) << 31; 2231 macHi >>= 1; 2232 } else { 2233 macLo = macHi = 0; 2234 } 2235 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo); 2236 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID); 2237 2238 return true; 2239 } 2240 EXPORT_SYMBOL(ath9k_hw_keysetmac); 2241 2242 bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry, 2243 const struct ath9k_keyval *k, 2244 const u8 *mac) 2245 { 2246 const struct ath9k_hw_capabilities *pCap = &ah->caps; 2247 struct ath_common *common = ath9k_hw_common(ah); 2248 u32 key0, key1, key2, key3, key4; 2249 u32 keyType; 2250 2251 if (entry >= pCap->keycache_size) { 2252 ath_print(common, ATH_DBG_FATAL, 2253 "keycache entry %u out of range\n", entry); 2254 return false; 2255 } 2256 2257 switch (k->kv_type) { 2258 case ATH9K_CIPHER_AES_OCB: 2259 keyType = AR_KEYTABLE_TYPE_AES; 2260 break; 2261 case ATH9K_CIPHER_AES_CCM: 2262 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) { 2263 ath_print(common, ATH_DBG_ANY, 2264 "AES-CCM not supported by mac rev 0x%x\n", 2265 ah->hw_version.macRev); 2266 return false; 2267 } 2268 keyType = AR_KEYTABLE_TYPE_CCM; 2269 break; 2270 case ATH9K_CIPHER_TKIP: 2271 keyType = AR_KEYTABLE_TYPE_TKIP; 2272 if (ATH9K_IS_MIC_ENABLED(ah) 2273 && entry + 64 >= pCap->keycache_size) { 2274 ath_print(common, ATH_DBG_ANY, 2275 "entry %u inappropriate for TKIP\n", entry); 2276 return false; 2277 } 2278 break; 2279 case ATH9K_CIPHER_WEP: 2280 if (k->kv_len < WLAN_KEY_LEN_WEP40) { 2281 ath_print(common, ATH_DBG_ANY, 2282 "WEP key length %u too small\n", k->kv_len); 2283 return false; 2284 } 2285 if (k->kv_len <= WLAN_KEY_LEN_WEP40) 2286 keyType = AR_KEYTABLE_TYPE_40; 2287 else if (k->kv_len <= WLAN_KEY_LEN_WEP104) 2288 keyType = AR_KEYTABLE_TYPE_104; 2289 else 2290 keyType = AR_KEYTABLE_TYPE_128; 2291 break; 2292 case ATH9K_CIPHER_CLR: 2293 keyType = AR_KEYTABLE_TYPE_CLR; 2294 break; 2295 default: 2296 ath_print(common, ATH_DBG_FATAL, 2297 "cipher %u not supported\n", k->kv_type); 2298 return false; 2299 } 2300 2301 key0 = get_unaligned_le32(k->kv_val + 0); 2302 key1 = get_unaligned_le16(k->kv_val + 4); 2303 key2 = get_unaligned_le32(k->kv_val + 6); 2304 key3 = get_unaligned_le16(k->kv_val + 10); 2305 key4 = get_unaligned_le32(k->kv_val + 12); 2306 if (k->kv_len <= WLAN_KEY_LEN_WEP104) 2307 key4 &= 0xff; 2308 2309 /* 2310 * Note: Key cache registers access special memory area that requires 2311 * two 32-bit writes to actually update the values in the internal 2312 * memory. Consequently, the exact order and pairs used here must be 2313 * maintained. 2314 */ 2315 2316 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) { 2317 u16 micentry = entry + 64; 2318 2319 /* 2320 * Write inverted key[47:0] first to avoid Michael MIC errors 2321 * on frames that could be sent or received at the same time. 2322 * The correct key will be written in the end once everything 2323 * else is ready. 2324 */ 2325 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0); 2326 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1); 2327 2328 /* Write key[95:48] */ 2329 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2); 2330 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3); 2331 2332 /* Write key[127:96] and key type */ 2333 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4); 2334 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType); 2335 2336 /* Write MAC address for the entry */ 2337 (void) ath9k_hw_keysetmac(ah, entry, mac); 2338 2339 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) { 2340 /* 2341 * TKIP uses two key cache entries: 2342 * Michael MIC TX/RX keys in the same key cache entry 2343 * (idx = main index + 64): 2344 * key0 [31:0] = RX key [31:0] 2345 * key1 [15:0] = TX key [31:16] 2346 * key1 [31:16] = reserved 2347 * key2 [31:0] = RX key [63:32] 2348 * key3 [15:0] = TX key [15:0] 2349 * key3 [31:16] = reserved 2350 * key4 [31:0] = TX key [63:32] 2351 */ 2352 u32 mic0, mic1, mic2, mic3, mic4; 2353 2354 mic0 = get_unaligned_le32(k->kv_mic + 0); 2355 mic2 = get_unaligned_le32(k->kv_mic + 4); 2356 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff; 2357 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff; 2358 mic4 = get_unaligned_le32(k->kv_txmic + 4); 2359 2360 /* Write RX[31:0] and TX[31:16] */ 2361 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0); 2362 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1); 2363 2364 /* Write RX[63:32] and TX[15:0] */ 2365 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2); 2366 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3); 2367 2368 /* Write TX[63:32] and keyType(reserved) */ 2369 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4); 2370 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry), 2371 AR_KEYTABLE_TYPE_CLR); 2372 2373 } else { 2374 /* 2375 * TKIP uses four key cache entries (two for group 2376 * keys): 2377 * Michael MIC TX/RX keys are in different key cache 2378 * entries (idx = main index + 64 for TX and 2379 * main index + 32 + 96 for RX): 2380 * key0 [31:0] = TX/RX MIC key [31:0] 2381 * key1 [31:0] = reserved 2382 * key2 [31:0] = TX/RX MIC key [63:32] 2383 * key3 [31:0] = reserved 2384 * key4 [31:0] = reserved 2385 * 2386 * Upper layer code will call this function separately 2387 * for TX and RX keys when these registers offsets are 2388 * used. 2389 */ 2390 u32 mic0, mic2; 2391 2392 mic0 = get_unaligned_le32(k->kv_mic + 0); 2393 mic2 = get_unaligned_le32(k->kv_mic + 4); 2394 2395 /* Write MIC key[31:0] */ 2396 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0); 2397 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0); 2398 2399 /* Write MIC key[63:32] */ 2400 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2); 2401 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0); 2402 2403 /* Write TX[63:32] and keyType(reserved) */ 2404 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0); 2405 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry), 2406 AR_KEYTABLE_TYPE_CLR); 2407 } 2408 2409 /* MAC address registers are reserved for the MIC entry */ 2410 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0); 2411 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0); 2412 2413 /* 2414 * Write the correct (un-inverted) key[47:0] last to enable 2415 * TKIP now that all other registers are set with correct 2416 * values. 2417 */ 2418 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0); 2419 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1); 2420 } else { 2421 /* Write key[47:0] */ 2422 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0); 2423 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1); 2424 2425 /* Write key[95:48] */ 2426 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2); 2427 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3); 2428 2429 /* Write key[127:96] and key type */ 2430 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4); 2431 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType); 2432 2433 /* Write MAC address for the entry */ 2434 (void) ath9k_hw_keysetmac(ah, entry, mac); 2435 } 2436 2437 return true; 2438 } 2439 EXPORT_SYMBOL(ath9k_hw_set_keycache_entry); 2440 2441 bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry) 2442 { 2443 if (entry < ah->caps.keycache_size) { 2444 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry)); 2445 if (val & AR_KEYTABLE_VALID) 2446 return true; 2447 } 2448 return false; 2449 } 2450 EXPORT_SYMBOL(ath9k_hw_keyisvalid); 2451 2452 /******************************/ 2453 /* Power Management (Chipset) */ 2454 /******************************/ 2455 2456 static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip) 2457 { 2458 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2459 if (setChip) { 2460 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, 2461 AR_RTC_FORCE_WAKE_EN); 2462 if (!AR_SREV_9100(ah)) 2463 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF); 2464 2465 if(!AR_SREV_5416(ah)) 2466 REG_CLR_BIT(ah, (AR_RTC_RESET), 2467 AR_RTC_RESET_EN); 2468 } 2469 } 2470 2471 static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip) 2472 { 2473 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2474 if (setChip) { 2475 struct ath9k_hw_capabilities *pCap = &ah->caps; 2476 2477 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 2478 REG_WRITE(ah, AR_RTC_FORCE_WAKE, 2479 AR_RTC_FORCE_WAKE_ON_INT); 2480 } else { 2481 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, 2482 AR_RTC_FORCE_WAKE_EN); 2483 } 2484 } 2485 } 2486 2487 static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip) 2488 { 2489 u32 val; 2490 int i; 2491 2492 if (setChip) { 2493 if ((REG_READ(ah, AR_RTC_STATUS) & 2494 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) { 2495 if (ath9k_hw_set_reset_reg(ah, 2496 ATH9K_RESET_POWER_ON) != true) { 2497 return false; 2498 } 2499 ath9k_hw_init_pll(ah, NULL); 2500 } 2501 if (AR_SREV_9100(ah)) 2502 REG_SET_BIT(ah, AR_RTC_RESET, 2503 AR_RTC_RESET_EN); 2504 2505 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE, 2506 AR_RTC_FORCE_WAKE_EN); 2507 udelay(50); 2508 2509 for (i = POWER_UP_TIME / 50; i > 0; i--) { 2510 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M; 2511 if (val == AR_RTC_STATUS_ON) 2512 break; 2513 udelay(50); 2514 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE, 2515 AR_RTC_FORCE_WAKE_EN); 2516 } 2517 if (i == 0) { 2518 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 2519 "Failed to wakeup in %uus\n", 2520 POWER_UP_TIME / 20); 2521 return false; 2522 } 2523 } 2524 2525 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2526 2527 return true; 2528 } 2529 2530 bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode) 2531 { 2532 struct ath_common *common = ath9k_hw_common(ah); 2533 int status = true, setChip = true; 2534 static const char *modes[] = { 2535 "AWAKE", 2536 "FULL-SLEEP", 2537 "NETWORK SLEEP", 2538 "UNDEFINED" 2539 }; 2540 2541 if (ah->power_mode == mode) 2542 return status; 2543 2544 ath_print(common, ATH_DBG_RESET, "%s -> %s\n", 2545 modes[ah->power_mode], modes[mode]); 2546 2547 switch (mode) { 2548 case ATH9K_PM_AWAKE: 2549 status = ath9k_hw_set_power_awake(ah, setChip); 2550 break; 2551 case ATH9K_PM_FULL_SLEEP: 2552 ath9k_set_power_sleep(ah, setChip); 2553 ah->chip_fullsleep = true; 2554 break; 2555 case ATH9K_PM_NETWORK_SLEEP: 2556 ath9k_set_power_network_sleep(ah, setChip); 2557 break; 2558 default: 2559 ath_print(common, ATH_DBG_FATAL, 2560 "Unknown power mode %u\n", mode); 2561 return false; 2562 } 2563 ah->power_mode = mode; 2564 2565 return status; 2566 } 2567 EXPORT_SYMBOL(ath9k_hw_setpower); 2568 2569 /* 2570 * Helper for ASPM support. 2571 * 2572 * Disable PLL when in L0s as well as receiver clock when in L1. 2573 * This power saving option must be enabled through the SerDes. 2574 * 2575 * Programming the SerDes must go through the same 288 bit serial shift 2576 * register as the other analog registers. Hence the 9 writes. 2577 */ 2578 void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off) 2579 { 2580 u8 i; 2581 u32 val; 2582 2583 if (ah->is_pciexpress != true) 2584 return; 2585 2586 /* Do not touch SerDes registers */ 2587 if (ah->config.pcie_powersave_enable == 2) 2588 return; 2589 2590 /* Nothing to do on restore for 11N */ 2591 if (!restore) { 2592 if (AR_SREV_9280_20_OR_LATER(ah)) { 2593 /* 2594 * AR9280 2.0 or later chips use SerDes values from the 2595 * initvals.h initialized depending on chipset during 2596 * ath9k_hw_init() 2597 */ 2598 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) { 2599 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0), 2600 INI_RA(&ah->iniPcieSerdes, i, 1)); 2601 } 2602 } else if (AR_SREV_9280(ah) && 2603 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) { 2604 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00); 2605 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 2606 2607 /* RX shut off when elecidle is asserted */ 2608 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019); 2609 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820); 2610 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560); 2611 2612 /* Shut off CLKREQ active in L1 */ 2613 if (ah->config.pcie_clock_req) 2614 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc); 2615 else 2616 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd); 2617 2618 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 2619 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 2620 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007); 2621 2622 /* Load the new settings */ 2623 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 2624 2625 } else { 2626 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00); 2627 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 2628 2629 /* RX shut off when elecidle is asserted */ 2630 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039); 2631 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824); 2632 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579); 2633 2634 /* 2635 * Ignore ah->ah_config.pcie_clock_req setting for 2636 * pre-AR9280 11n 2637 */ 2638 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff); 2639 2640 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 2641 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 2642 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007); 2643 2644 /* Load the new settings */ 2645 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 2646 } 2647 2648 udelay(1000); 2649 2650 /* set bit 19 to allow forcing of pcie core into L1 state */ 2651 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA); 2652 2653 /* Several PCIe massages to ensure proper behaviour */ 2654 if (ah->config.pcie_waen) { 2655 val = ah->config.pcie_waen; 2656 if (!power_off) 2657 val &= (~AR_WA_D3_L1_DISABLE); 2658 } else { 2659 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) || 2660 AR_SREV_9287(ah)) { 2661 val = AR9285_WA_DEFAULT; 2662 if (!power_off) 2663 val &= (~AR_WA_D3_L1_DISABLE); 2664 } else if (AR_SREV_9280(ah)) { 2665 /* 2666 * On AR9280 chips bit 22 of 0x4004 needs to be 2667 * set otherwise card may disappear. 2668 */ 2669 val = AR9280_WA_DEFAULT; 2670 if (!power_off) 2671 val &= (~AR_WA_D3_L1_DISABLE); 2672 } else 2673 val = AR_WA_DEFAULT; 2674 } 2675 2676 REG_WRITE(ah, AR_WA, val); 2677 } 2678 2679 if (power_off) { 2680 /* 2681 * Set PCIe workaround bits 2682 * bit 14 in WA register (disable L1) should only 2683 * be set when device enters D3 and be cleared 2684 * when device comes back to D0. 2685 */ 2686 if (ah->config.pcie_waen) { 2687 if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE) 2688 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE); 2689 } else { 2690 if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) || 2691 AR_SREV_9287(ah)) && 2692 (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) || 2693 (AR_SREV_9280(ah) && 2694 (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) { 2695 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE); 2696 } 2697 } 2698 } 2699 } 2700 EXPORT_SYMBOL(ath9k_hw_configpcipowersave); 2701 2702 /**********************/ 2703 /* Interrupt Handling */ 2704 /**********************/ 2705 2706 bool ath9k_hw_intrpend(struct ath_hw *ah) 2707 { 2708 u32 host_isr; 2709 2710 if (AR_SREV_9100(ah)) 2711 return true; 2712 2713 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE); 2714 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS)) 2715 return true; 2716 2717 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE); 2718 if ((host_isr & AR_INTR_SYNC_DEFAULT) 2719 && (host_isr != AR_INTR_SPURIOUS)) 2720 return true; 2721 2722 return false; 2723 } 2724 EXPORT_SYMBOL(ath9k_hw_intrpend); 2725 2726 bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked) 2727 { 2728 u32 isr = 0; 2729 u32 mask2 = 0; 2730 struct ath9k_hw_capabilities *pCap = &ah->caps; 2731 u32 sync_cause = 0; 2732 bool fatal_int = false; 2733 struct ath_common *common = ath9k_hw_common(ah); 2734 2735 if (!AR_SREV_9100(ah)) { 2736 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) { 2737 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M) 2738 == AR_RTC_STATUS_ON) { 2739 isr = REG_READ(ah, AR_ISR); 2740 } 2741 } 2742 2743 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) & 2744 AR_INTR_SYNC_DEFAULT; 2745 2746 *masked = 0; 2747 2748 if (!isr && !sync_cause) 2749 return false; 2750 } else { 2751 *masked = 0; 2752 isr = REG_READ(ah, AR_ISR); 2753 } 2754 2755 if (isr) { 2756 if (isr & AR_ISR_BCNMISC) { 2757 u32 isr2; 2758 isr2 = REG_READ(ah, AR_ISR_S2); 2759 if (isr2 & AR_ISR_S2_TIM) 2760 mask2 |= ATH9K_INT_TIM; 2761 if (isr2 & AR_ISR_S2_DTIM) 2762 mask2 |= ATH9K_INT_DTIM; 2763 if (isr2 & AR_ISR_S2_DTIMSYNC) 2764 mask2 |= ATH9K_INT_DTIMSYNC; 2765 if (isr2 & (AR_ISR_S2_CABEND)) 2766 mask2 |= ATH9K_INT_CABEND; 2767 if (isr2 & AR_ISR_S2_GTT) 2768 mask2 |= ATH9K_INT_GTT; 2769 if (isr2 & AR_ISR_S2_CST) 2770 mask2 |= ATH9K_INT_CST; 2771 if (isr2 & AR_ISR_S2_TSFOOR) 2772 mask2 |= ATH9K_INT_TSFOOR; 2773 } 2774 2775 isr = REG_READ(ah, AR_ISR_RAC); 2776 if (isr == 0xffffffff) { 2777 *masked = 0; 2778 return false; 2779 } 2780 2781 *masked = isr & ATH9K_INT_COMMON; 2782 2783 if (ah->config.intr_mitigation) { 2784 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM)) 2785 *masked |= ATH9K_INT_RX; 2786 } 2787 2788 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR)) 2789 *masked |= ATH9K_INT_RX; 2790 if (isr & 2791 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR | 2792 AR_ISR_TXEOL)) { 2793 u32 s0_s, s1_s; 2794 2795 *masked |= ATH9K_INT_TX; 2796 2797 s0_s = REG_READ(ah, AR_ISR_S0_S); 2798 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK); 2799 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC); 2800 2801 s1_s = REG_READ(ah, AR_ISR_S1_S); 2802 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR); 2803 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL); 2804 } 2805 2806 if (isr & AR_ISR_RXORN) { 2807 ath_print(common, ATH_DBG_INTERRUPT, 2808 "receive FIFO overrun interrupt\n"); 2809 } 2810 2811 if (!AR_SREV_9100(ah)) { 2812 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 2813 u32 isr5 = REG_READ(ah, AR_ISR_S5_S); 2814 if (isr5 & AR_ISR_S5_TIM_TIMER) 2815 *masked |= ATH9K_INT_TIM_TIMER; 2816 } 2817 } 2818 2819 *masked |= mask2; 2820 } 2821 2822 if (AR_SREV_9100(ah)) 2823 return true; 2824 2825 if (isr & AR_ISR_GENTMR) { 2826 u32 s5_s; 2827 2828 s5_s = REG_READ(ah, AR_ISR_S5_S); 2829 if (isr & AR_ISR_GENTMR) { 2830 ah->intr_gen_timer_trigger = 2831 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG); 2832 2833 ah->intr_gen_timer_thresh = 2834 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH); 2835 2836 if (ah->intr_gen_timer_trigger) 2837 *masked |= ATH9K_INT_GENTIMER; 2838 2839 } 2840 } 2841 2842 if (sync_cause) { 2843 fatal_int = 2844 (sync_cause & 2845 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) 2846 ? true : false; 2847 2848 if (fatal_int) { 2849 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) { 2850 ath_print(common, ATH_DBG_ANY, 2851 "received PCI FATAL interrupt\n"); 2852 } 2853 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) { 2854 ath_print(common, ATH_DBG_ANY, 2855 "received PCI PERR interrupt\n"); 2856 } 2857 *masked |= ATH9K_INT_FATAL; 2858 } 2859 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) { 2860 ath_print(common, ATH_DBG_INTERRUPT, 2861 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n"); 2862 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF); 2863 REG_WRITE(ah, AR_RC, 0); 2864 *masked |= ATH9K_INT_FATAL; 2865 } 2866 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) { 2867 ath_print(common, ATH_DBG_INTERRUPT, 2868 "AR_INTR_SYNC_LOCAL_TIMEOUT\n"); 2869 } 2870 2871 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause); 2872 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR); 2873 } 2874 2875 return true; 2876 } 2877 EXPORT_SYMBOL(ath9k_hw_getisr); 2878 2879 enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints) 2880 { 2881 u32 omask = ah->mask_reg; 2882 u32 mask, mask2; 2883 struct ath9k_hw_capabilities *pCap = &ah->caps; 2884 struct ath_common *common = ath9k_hw_common(ah); 2885 2886 ath_print(common, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints); 2887 2888 if (omask & ATH9K_INT_GLOBAL) { 2889 ath_print(common, ATH_DBG_INTERRUPT, "disable IER\n"); 2890 REG_WRITE(ah, AR_IER, AR_IER_DISABLE); 2891 (void) REG_READ(ah, AR_IER); 2892 if (!AR_SREV_9100(ah)) { 2893 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0); 2894 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE); 2895 2896 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0); 2897 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE); 2898 } 2899 } 2900 2901 mask = ints & ATH9K_INT_COMMON; 2902 mask2 = 0; 2903 2904 if (ints & ATH9K_INT_TX) { 2905 if (ah->txok_interrupt_mask) 2906 mask |= AR_IMR_TXOK; 2907 if (ah->txdesc_interrupt_mask) 2908 mask |= AR_IMR_TXDESC; 2909 if (ah->txerr_interrupt_mask) 2910 mask |= AR_IMR_TXERR; 2911 if (ah->txeol_interrupt_mask) 2912 mask |= AR_IMR_TXEOL; 2913 } 2914 if (ints & ATH9K_INT_RX) { 2915 mask |= AR_IMR_RXERR; 2916 if (ah->config.intr_mitigation) 2917 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM; 2918 else 2919 mask |= AR_IMR_RXOK | AR_IMR_RXDESC; 2920 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) 2921 mask |= AR_IMR_GENTMR; 2922 } 2923 2924 if (ints & (ATH9K_INT_BMISC)) { 2925 mask |= AR_IMR_BCNMISC; 2926 if (ints & ATH9K_INT_TIM) 2927 mask2 |= AR_IMR_S2_TIM; 2928 if (ints & ATH9K_INT_DTIM) 2929 mask2 |= AR_IMR_S2_DTIM; 2930 if (ints & ATH9K_INT_DTIMSYNC) 2931 mask2 |= AR_IMR_S2_DTIMSYNC; 2932 if (ints & ATH9K_INT_CABEND) 2933 mask2 |= AR_IMR_S2_CABEND; 2934 if (ints & ATH9K_INT_TSFOOR) 2935 mask2 |= AR_IMR_S2_TSFOOR; 2936 } 2937 2938 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) { 2939 mask |= AR_IMR_BCNMISC; 2940 if (ints & ATH9K_INT_GTT) 2941 mask2 |= AR_IMR_S2_GTT; 2942 if (ints & ATH9K_INT_CST) 2943 mask2 |= AR_IMR_S2_CST; 2944 } 2945 2946 ath_print(common, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask); 2947 REG_WRITE(ah, AR_IMR, mask); 2948 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM | 2949 AR_IMR_S2_DTIM | 2950 AR_IMR_S2_DTIMSYNC | 2951 AR_IMR_S2_CABEND | 2952 AR_IMR_S2_CABTO | 2953 AR_IMR_S2_TSFOOR | 2954 AR_IMR_S2_GTT | AR_IMR_S2_CST); 2955 REG_WRITE(ah, AR_IMR_S2, mask | mask2); 2956 ah->mask_reg = ints; 2957 2958 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 2959 if (ints & ATH9K_INT_TIM_TIMER) 2960 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER); 2961 else 2962 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER); 2963 } 2964 2965 if (ints & ATH9K_INT_GLOBAL) { 2966 ath_print(common, ATH_DBG_INTERRUPT, "enable IER\n"); 2967 REG_WRITE(ah, AR_IER, AR_IER_ENABLE); 2968 if (!AR_SREV_9100(ah)) { 2969 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 2970 AR_INTR_MAC_IRQ); 2971 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ); 2972 2973 2974 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 2975 AR_INTR_SYNC_DEFAULT); 2976 REG_WRITE(ah, AR_INTR_SYNC_MASK, 2977 AR_INTR_SYNC_DEFAULT); 2978 } 2979 ath_print(common, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n", 2980 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER)); 2981 } 2982 2983 return omask; 2984 } 2985 EXPORT_SYMBOL(ath9k_hw_set_interrupts); 2986 2987 /*******************/ 2988 /* Beacon Handling */ 2989 /*******************/ 2990 2991 void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period) 2992 { 2993 int flags = 0; 2994 2995 ah->beacon_interval = beacon_period; 2996 2997 switch (ah->opmode) { 2998 case NL80211_IFTYPE_STATION: 2999 case NL80211_IFTYPE_MONITOR: 3000 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon)); 3001 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff); 3002 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff); 3003 flags |= AR_TBTT_TIMER_EN; 3004 break; 3005 case NL80211_IFTYPE_ADHOC: 3006 case NL80211_IFTYPE_MESH_POINT: 3007 REG_SET_BIT(ah, AR_TXCFG, 3008 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY); 3009 REG_WRITE(ah, AR_NEXT_NDP_TIMER, 3010 TU_TO_USEC(next_beacon + 3011 (ah->atim_window ? ah-> 3012 atim_window : 1))); 3013 flags |= AR_NDP_TIMER_EN; 3014 case NL80211_IFTYPE_AP: 3015 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon)); 3016 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 3017 TU_TO_USEC(next_beacon - 3018 ah->config. 3019 dma_beacon_response_time)); 3020 REG_WRITE(ah, AR_NEXT_SWBA, 3021 TU_TO_USEC(next_beacon - 3022 ah->config. 3023 sw_beacon_response_time)); 3024 flags |= 3025 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN; 3026 break; 3027 default: 3028 ath_print(ath9k_hw_common(ah), ATH_DBG_BEACON, 3029 "%s: unsupported opmode: %d\n", 3030 __func__, ah->opmode); 3031 return; 3032 break; 3033 } 3034 3035 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period)); 3036 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period)); 3037 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period)); 3038 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period)); 3039 3040 beacon_period &= ~ATH9K_BEACON_ENA; 3041 if (beacon_period & ATH9K_BEACON_RESET_TSF) { 3042 ath9k_hw_reset_tsf(ah); 3043 } 3044 3045 REG_SET_BIT(ah, AR_TIMER_MODE, flags); 3046 } 3047 EXPORT_SYMBOL(ath9k_hw_beaconinit); 3048 3049 void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah, 3050 const struct ath9k_beacon_state *bs) 3051 { 3052 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout; 3053 struct ath9k_hw_capabilities *pCap = &ah->caps; 3054 struct ath_common *common = ath9k_hw_common(ah); 3055 3056 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt)); 3057 3058 REG_WRITE(ah, AR_BEACON_PERIOD, 3059 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD)); 3060 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, 3061 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD)); 3062 3063 REG_RMW_FIELD(ah, AR_RSSI_THR, 3064 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold); 3065 3066 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD; 3067 3068 if (bs->bs_sleepduration > beaconintval) 3069 beaconintval = bs->bs_sleepduration; 3070 3071 dtimperiod = bs->bs_dtimperiod; 3072 if (bs->bs_sleepduration > dtimperiod) 3073 dtimperiod = bs->bs_sleepduration; 3074 3075 if (beaconintval == dtimperiod) 3076 nextTbtt = bs->bs_nextdtim; 3077 else 3078 nextTbtt = bs->bs_nexttbtt; 3079 3080 ath_print(common, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim); 3081 ath_print(common, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt); 3082 ath_print(common, ATH_DBG_BEACON, "beacon period %d\n", beaconintval); 3083 ath_print(common, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod); 3084 3085 REG_WRITE(ah, AR_NEXT_DTIM, 3086 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP)); 3087 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP)); 3088 3089 REG_WRITE(ah, AR_SLEEP1, 3090 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT) 3091 | AR_SLEEP1_ASSUME_DTIM); 3092 3093 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP) 3094 beacontimeout = (BEACON_TIMEOUT_VAL << 3); 3095 else 3096 beacontimeout = MIN_BEACON_TIMEOUT_VAL; 3097 3098 REG_WRITE(ah, AR_SLEEP2, 3099 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT)); 3100 3101 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval)); 3102 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod)); 3103 3104 REG_SET_BIT(ah, AR_TIMER_MODE, 3105 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN | 3106 AR_DTIM_TIMER_EN); 3107 3108 /* TSF Out of Range Threshold */ 3109 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold); 3110 } 3111 EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers); 3112 3113 /*******************/ 3114 /* HW Capabilities */ 3115 /*******************/ 3116 3117 int ath9k_hw_fill_cap_info(struct ath_hw *ah) 3118 { 3119 struct ath9k_hw_capabilities *pCap = &ah->caps; 3120 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 3121 struct ath_common *common = ath9k_hw_common(ah); 3122 struct ath_btcoex_hw *btcoex_hw = &ah->btcoex_hw; 3123 3124 u16 capField = 0, eeval; 3125 3126 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0); 3127 regulatory->current_rd = eeval; 3128 3129 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1); 3130 if (AR_SREV_9285_10_OR_LATER(ah)) 3131 eeval |= AR9285_RDEXT_DEFAULT; 3132 regulatory->current_rd_ext = eeval; 3133 3134 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP); 3135 3136 if (ah->opmode != NL80211_IFTYPE_AP && 3137 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) { 3138 if (regulatory->current_rd == 0x64 || 3139 regulatory->current_rd == 0x65) 3140 regulatory->current_rd += 5; 3141 else if (regulatory->current_rd == 0x41) 3142 regulatory->current_rd = 0x43; 3143 ath_print(common, ATH_DBG_REGULATORY, 3144 "regdomain mapped to 0x%x\n", regulatory->current_rd); 3145 } 3146 3147 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE); 3148 if ((eeval & (AR5416_OPFLAGS_11G | AR5416_OPFLAGS_11A)) == 0) { 3149 ath_print(common, ATH_DBG_FATAL, 3150 "no band has been marked as supported in EEPROM.\n"); 3151 return -EINVAL; 3152 } 3153 3154 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX); 3155 3156 if (eeval & AR5416_OPFLAGS_11A) { 3157 set_bit(ATH9K_MODE_11A, pCap->wireless_modes); 3158 if (ah->config.ht_enable) { 3159 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20)) 3160 set_bit(ATH9K_MODE_11NA_HT20, 3161 pCap->wireless_modes); 3162 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) { 3163 set_bit(ATH9K_MODE_11NA_HT40PLUS, 3164 pCap->wireless_modes); 3165 set_bit(ATH9K_MODE_11NA_HT40MINUS, 3166 pCap->wireless_modes); 3167 } 3168 } 3169 } 3170 3171 if (eeval & AR5416_OPFLAGS_11G) { 3172 set_bit(ATH9K_MODE_11G, pCap->wireless_modes); 3173 if (ah->config.ht_enable) { 3174 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20)) 3175 set_bit(ATH9K_MODE_11NG_HT20, 3176 pCap->wireless_modes); 3177 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) { 3178 set_bit(ATH9K_MODE_11NG_HT40PLUS, 3179 pCap->wireless_modes); 3180 set_bit(ATH9K_MODE_11NG_HT40MINUS, 3181 pCap->wireless_modes); 3182 } 3183 } 3184 } 3185 3186 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK); 3187 /* 3188 * For AR9271 we will temporarilly uses the rx chainmax as read from 3189 * the EEPROM. 3190 */ 3191 if ((ah->hw_version.devid == AR5416_DEVID_PCI) && 3192 !(eeval & AR5416_OPFLAGS_11A) && 3193 !(AR_SREV_9271(ah))) 3194 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */ 3195 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7; 3196 else 3197 /* Use rx_chainmask from EEPROM. */ 3198 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK); 3199 3200 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0))) 3201 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA; 3202 3203 pCap->low_2ghz_chan = 2312; 3204 pCap->high_2ghz_chan = 2732; 3205 3206 pCap->low_5ghz_chan = 4920; 3207 pCap->high_5ghz_chan = 6100; 3208 3209 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP; 3210 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP; 3211 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM; 3212 3213 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP; 3214 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP; 3215 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM; 3216 3217 if (ah->config.ht_enable) 3218 pCap->hw_caps |= ATH9K_HW_CAP_HT; 3219 else 3220 pCap->hw_caps &= ~ATH9K_HW_CAP_HT; 3221 3222 pCap->hw_caps |= ATH9K_HW_CAP_GTT; 3223 pCap->hw_caps |= ATH9K_HW_CAP_VEOL; 3224 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK; 3225 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH; 3226 3227 if (capField & AR_EEPROM_EEPCAP_MAXQCU) 3228 pCap->total_queues = 3229 MS(capField, AR_EEPROM_EEPCAP_MAXQCU); 3230 else 3231 pCap->total_queues = ATH9K_NUM_TX_QUEUES; 3232 3233 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES) 3234 pCap->keycache_size = 3235 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES); 3236 else 3237 pCap->keycache_size = AR_KEYTABLE_SIZE; 3238 3239 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC; 3240 3241 if (AR_SREV_9285(ah) || AR_SREV_9271(ah)) 3242 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD >> 1; 3243 else 3244 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD; 3245 3246 if (AR_SREV_9285_10_OR_LATER(ah)) 3247 pCap->num_gpio_pins = AR9285_NUM_GPIO; 3248 else if (AR_SREV_9280_10_OR_LATER(ah)) 3249 pCap->num_gpio_pins = AR928X_NUM_GPIO; 3250 else 3251 pCap->num_gpio_pins = AR_NUM_GPIO; 3252 3253 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) { 3254 pCap->hw_caps |= ATH9K_HW_CAP_CST; 3255 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX; 3256 } else { 3257 pCap->rts_aggr_limit = (8 * 1024); 3258 } 3259 3260 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM; 3261 3262 #if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE) 3263 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT); 3264 if (ah->rfsilent & EEP_RFSILENT_ENABLED) { 3265 ah->rfkill_gpio = 3266 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL); 3267 ah->rfkill_polarity = 3268 MS(ah->rfsilent, EEP_RFSILENT_POLARITY); 3269 3270 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT; 3271 } 3272 #endif 3273 3274 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP; 3275 3276 if (AR_SREV_9280(ah) || AR_SREV_9285(ah)) 3277 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS; 3278 else 3279 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS; 3280 3281 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) { 3282 pCap->reg_cap = 3283 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A | 3284 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN | 3285 AR_EEPROM_EEREGCAP_EN_KK_U2 | 3286 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND; 3287 } else { 3288 pCap->reg_cap = 3289 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A | 3290 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN; 3291 } 3292 3293 /* Advertise midband for AR5416 with FCC midband set in eeprom */ 3294 if (regulatory->current_rd_ext & (1 << REG_EXT_FCC_MIDBAND) && 3295 AR_SREV_5416(ah)) 3296 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND; 3297 3298 pCap->num_antcfg_5ghz = 3299 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ); 3300 pCap->num_antcfg_2ghz = 3301 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ); 3302 3303 if (AR_SREV_9280_10_OR_LATER(ah) && 3304 ath9k_hw_btcoex_supported(ah)) { 3305 btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO; 3306 btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO; 3307 3308 if (AR_SREV_9285(ah)) { 3309 btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE; 3310 btcoex_hw->btpriority_gpio = ATH_BTPRIORITY_GPIO; 3311 } else { 3312 btcoex_hw->scheme = ATH_BTCOEX_CFG_2WIRE; 3313 } 3314 } else { 3315 btcoex_hw->scheme = ATH_BTCOEX_CFG_NONE; 3316 } 3317 3318 return 0; 3319 } 3320 3321 bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type, 3322 u32 capability, u32 *result) 3323 { 3324 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 3325 switch (type) { 3326 case ATH9K_CAP_CIPHER: 3327 switch (capability) { 3328 case ATH9K_CIPHER_AES_CCM: 3329 case ATH9K_CIPHER_AES_OCB: 3330 case ATH9K_CIPHER_TKIP: 3331 case ATH9K_CIPHER_WEP: 3332 case ATH9K_CIPHER_MIC: 3333 case ATH9K_CIPHER_CLR: 3334 return true; 3335 default: 3336 return false; 3337 } 3338 case ATH9K_CAP_TKIP_MIC: 3339 switch (capability) { 3340 case 0: 3341 return true; 3342 case 1: 3343 return (ah->sta_id1_defaults & 3344 AR_STA_ID1_CRPT_MIC_ENABLE) ? true : 3345 false; 3346 } 3347 case ATH9K_CAP_TKIP_SPLIT: 3348 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ? 3349 false : true; 3350 case ATH9K_CAP_DIVERSITY: 3351 return (REG_READ(ah, AR_PHY_CCK_DETECT) & 3352 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ? 3353 true : false; 3354 case ATH9K_CAP_MCAST_KEYSRCH: 3355 switch (capability) { 3356 case 0: 3357 return true; 3358 case 1: 3359 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) { 3360 return false; 3361 } else { 3362 return (ah->sta_id1_defaults & 3363 AR_STA_ID1_MCAST_KSRCH) ? true : 3364 false; 3365 } 3366 } 3367 return false; 3368 case ATH9K_CAP_TXPOW: 3369 switch (capability) { 3370 case 0: 3371 return 0; 3372 case 1: 3373 *result = regulatory->power_limit; 3374 return 0; 3375 case 2: 3376 *result = regulatory->max_power_level; 3377 return 0; 3378 case 3: 3379 *result = regulatory->tp_scale; 3380 return 0; 3381 } 3382 return false; 3383 case ATH9K_CAP_DS: 3384 return (AR_SREV_9280_20_OR_LATER(ah) && 3385 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1)) 3386 ? false : true; 3387 default: 3388 return false; 3389 } 3390 } 3391 EXPORT_SYMBOL(ath9k_hw_getcapability); 3392 3393 bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type, 3394 u32 capability, u32 setting, int *status) 3395 { 3396 u32 v; 3397 3398 switch (type) { 3399 case ATH9K_CAP_TKIP_MIC: 3400 if (setting) 3401 ah->sta_id1_defaults |= 3402 AR_STA_ID1_CRPT_MIC_ENABLE; 3403 else 3404 ah->sta_id1_defaults &= 3405 ~AR_STA_ID1_CRPT_MIC_ENABLE; 3406 return true; 3407 case ATH9K_CAP_DIVERSITY: 3408 v = REG_READ(ah, AR_PHY_CCK_DETECT); 3409 if (setting) 3410 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV; 3411 else 3412 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV; 3413 REG_WRITE(ah, AR_PHY_CCK_DETECT, v); 3414 return true; 3415 case ATH9K_CAP_MCAST_KEYSRCH: 3416 if (setting) 3417 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH; 3418 else 3419 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH; 3420 return true; 3421 default: 3422 return false; 3423 } 3424 } 3425 EXPORT_SYMBOL(ath9k_hw_setcapability); 3426 3427 /****************************/ 3428 /* GPIO / RFKILL / Antennae */ 3429 /****************************/ 3430 3431 static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah, 3432 u32 gpio, u32 type) 3433 { 3434 int addr; 3435 u32 gpio_shift, tmp; 3436 3437 if (gpio > 11) 3438 addr = AR_GPIO_OUTPUT_MUX3; 3439 else if (gpio > 5) 3440 addr = AR_GPIO_OUTPUT_MUX2; 3441 else 3442 addr = AR_GPIO_OUTPUT_MUX1; 3443 3444 gpio_shift = (gpio % 6) * 5; 3445 3446 if (AR_SREV_9280_20_OR_LATER(ah) 3447 || (addr != AR_GPIO_OUTPUT_MUX1)) { 3448 REG_RMW(ah, addr, (type << gpio_shift), 3449 (0x1f << gpio_shift)); 3450 } else { 3451 tmp = REG_READ(ah, addr); 3452 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0); 3453 tmp &= ~(0x1f << gpio_shift); 3454 tmp |= (type << gpio_shift); 3455 REG_WRITE(ah, addr, tmp); 3456 } 3457 } 3458 3459 void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio) 3460 { 3461 u32 gpio_shift; 3462 3463 BUG_ON(gpio >= ah->caps.num_gpio_pins); 3464 3465 gpio_shift = gpio << 1; 3466 3467 REG_RMW(ah, 3468 AR_GPIO_OE_OUT, 3469 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift), 3470 (AR_GPIO_OE_OUT_DRV << gpio_shift)); 3471 } 3472 EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input); 3473 3474 u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio) 3475 { 3476 #define MS_REG_READ(x, y) \ 3477 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y))) 3478 3479 if (gpio >= ah->caps.num_gpio_pins) 3480 return 0xffffffff; 3481 3482 if (AR_SREV_9287_10_OR_LATER(ah)) 3483 return MS_REG_READ(AR9287, gpio) != 0; 3484 else if (AR_SREV_9285_10_OR_LATER(ah)) 3485 return MS_REG_READ(AR9285, gpio) != 0; 3486 else if (AR_SREV_9280_10_OR_LATER(ah)) 3487 return MS_REG_READ(AR928X, gpio) != 0; 3488 else 3489 return MS_REG_READ(AR, gpio) != 0; 3490 } 3491 EXPORT_SYMBOL(ath9k_hw_gpio_get); 3492 3493 void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio, 3494 u32 ah_signal_type) 3495 { 3496 u32 gpio_shift; 3497 3498 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type); 3499 3500 gpio_shift = 2 * gpio; 3501 3502 REG_RMW(ah, 3503 AR_GPIO_OE_OUT, 3504 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift), 3505 (AR_GPIO_OE_OUT_DRV << gpio_shift)); 3506 } 3507 EXPORT_SYMBOL(ath9k_hw_cfg_output); 3508 3509 void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val) 3510 { 3511 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio), 3512 AR_GPIO_BIT(gpio)); 3513 } 3514 EXPORT_SYMBOL(ath9k_hw_set_gpio); 3515 3516 u32 ath9k_hw_getdefantenna(struct ath_hw *ah) 3517 { 3518 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7; 3519 } 3520 EXPORT_SYMBOL(ath9k_hw_getdefantenna); 3521 3522 void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna) 3523 { 3524 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7)); 3525 } 3526 EXPORT_SYMBOL(ath9k_hw_setantenna); 3527 3528 /*********************/ 3529 /* General Operation */ 3530 /*********************/ 3531 3532 u32 ath9k_hw_getrxfilter(struct ath_hw *ah) 3533 { 3534 u32 bits = REG_READ(ah, AR_RX_FILTER); 3535 u32 phybits = REG_READ(ah, AR_PHY_ERR); 3536 3537 if (phybits & AR_PHY_ERR_RADAR) 3538 bits |= ATH9K_RX_FILTER_PHYRADAR; 3539 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING)) 3540 bits |= ATH9K_RX_FILTER_PHYERR; 3541 3542 return bits; 3543 } 3544 EXPORT_SYMBOL(ath9k_hw_getrxfilter); 3545 3546 void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits) 3547 { 3548 u32 phybits; 3549 3550 REG_WRITE(ah, AR_RX_FILTER, bits); 3551 3552 phybits = 0; 3553 if (bits & ATH9K_RX_FILTER_PHYRADAR) 3554 phybits |= AR_PHY_ERR_RADAR; 3555 if (bits & ATH9K_RX_FILTER_PHYERR) 3556 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING; 3557 REG_WRITE(ah, AR_PHY_ERR, phybits); 3558 3559 if (phybits) 3560 REG_WRITE(ah, AR_RXCFG, 3561 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA); 3562 else 3563 REG_WRITE(ah, AR_RXCFG, 3564 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA); 3565 } 3566 EXPORT_SYMBOL(ath9k_hw_setrxfilter); 3567 3568 bool ath9k_hw_phy_disable(struct ath_hw *ah) 3569 { 3570 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM)) 3571 return false; 3572 3573 ath9k_hw_init_pll(ah, NULL); 3574 return true; 3575 } 3576 EXPORT_SYMBOL(ath9k_hw_phy_disable); 3577 3578 bool ath9k_hw_disable(struct ath_hw *ah) 3579 { 3580 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 3581 return false; 3582 3583 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD)) 3584 return false; 3585 3586 ath9k_hw_init_pll(ah, NULL); 3587 return true; 3588 } 3589 EXPORT_SYMBOL(ath9k_hw_disable); 3590 3591 void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit) 3592 { 3593 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 3594 struct ath9k_channel *chan = ah->curchan; 3595 struct ieee80211_channel *channel = chan->chan; 3596 3597 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER); 3598 3599 ah->eep_ops->set_txpower(ah, chan, 3600 ath9k_regd_get_ctl(regulatory, chan), 3601 channel->max_antenna_gain * 2, 3602 channel->max_power * 2, 3603 min((u32) MAX_RATE_POWER, 3604 (u32) regulatory->power_limit)); 3605 } 3606 EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit); 3607 3608 void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac) 3609 { 3610 memcpy(ath9k_hw_common(ah)->macaddr, mac, ETH_ALEN); 3611 } 3612 EXPORT_SYMBOL(ath9k_hw_setmac); 3613 3614 void ath9k_hw_setopmode(struct ath_hw *ah) 3615 { 3616 ath9k_hw_set_operating_mode(ah, ah->opmode); 3617 } 3618 EXPORT_SYMBOL(ath9k_hw_setopmode); 3619 3620 void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1) 3621 { 3622 REG_WRITE(ah, AR_MCAST_FIL0, filter0); 3623 REG_WRITE(ah, AR_MCAST_FIL1, filter1); 3624 } 3625 EXPORT_SYMBOL(ath9k_hw_setmcastfilter); 3626 3627 void ath9k_hw_write_associd(struct ath_hw *ah) 3628 { 3629 struct ath_common *common = ath9k_hw_common(ah); 3630 3631 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid)); 3632 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) | 3633 ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S)); 3634 } 3635 EXPORT_SYMBOL(ath9k_hw_write_associd); 3636 3637 u64 ath9k_hw_gettsf64(struct ath_hw *ah) 3638 { 3639 u64 tsf; 3640 3641 tsf = REG_READ(ah, AR_TSF_U32); 3642 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32); 3643 3644 return tsf; 3645 } 3646 EXPORT_SYMBOL(ath9k_hw_gettsf64); 3647 3648 void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64) 3649 { 3650 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff); 3651 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff); 3652 } 3653 EXPORT_SYMBOL(ath9k_hw_settsf64); 3654 3655 void ath9k_hw_reset_tsf(struct ath_hw *ah) 3656 { 3657 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0, 3658 AH_TSF_WRITE_TIMEOUT)) 3659 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 3660 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n"); 3661 3662 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE); 3663 } 3664 EXPORT_SYMBOL(ath9k_hw_reset_tsf); 3665 3666 void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting) 3667 { 3668 if (setting) 3669 ah->misc_mode |= AR_PCU_TX_ADD_TSF; 3670 else 3671 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF; 3672 } 3673 EXPORT_SYMBOL(ath9k_hw_set_tsfadjust); 3674 3675 /* 3676 * Extend 15-bit time stamp from rx descriptor to 3677 * a full 64-bit TSF using the current h/w TSF. 3678 */ 3679 u64 ath9k_hw_extend_tsf(struct ath_hw *ah, u32 rstamp) 3680 { 3681 u64 tsf; 3682 3683 tsf = ath9k_hw_gettsf64(ah); 3684 if ((tsf & 0x7fff) < rstamp) 3685 tsf -= 0x8000; 3686 return (tsf & ~0x7fff) | rstamp; 3687 } 3688 EXPORT_SYMBOL(ath9k_hw_extend_tsf); 3689 3690 bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us) 3691 { 3692 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) { 3693 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 3694 "bad slot time %u\n", us); 3695 ah->slottime = (u32) -1; 3696 return false; 3697 } else { 3698 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us)); 3699 ah->slottime = us; 3700 return true; 3701 } 3702 } 3703 EXPORT_SYMBOL(ath9k_hw_setslottime); 3704 3705 void ath9k_hw_set11nmac2040(struct ath_hw *ah) 3706 { 3707 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 3708 u32 macmode; 3709 3710 if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca) 3711 macmode = AR_2040_JOINED_RX_CLEAR; 3712 else 3713 macmode = 0; 3714 3715 REG_WRITE(ah, AR_2040_MODE, macmode); 3716 } 3717 3718 /* HW Generic timers configuration */ 3719 3720 static const struct ath_gen_timer_configuration gen_tmr_configuration[] = 3721 { 3722 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 3723 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 3724 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 3725 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 3726 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 3727 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 3728 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 3729 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 3730 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001}, 3731 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4, 3732 AR_NDP2_TIMER_MODE, 0x0002}, 3733 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4, 3734 AR_NDP2_TIMER_MODE, 0x0004}, 3735 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4, 3736 AR_NDP2_TIMER_MODE, 0x0008}, 3737 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4, 3738 AR_NDP2_TIMER_MODE, 0x0010}, 3739 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4, 3740 AR_NDP2_TIMER_MODE, 0x0020}, 3741 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4, 3742 AR_NDP2_TIMER_MODE, 0x0040}, 3743 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4, 3744 AR_NDP2_TIMER_MODE, 0x0080} 3745 }; 3746 3747 /* HW generic timer primitives */ 3748 3749 /* compute and clear index of rightmost 1 */ 3750 static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask) 3751 { 3752 u32 b; 3753 3754 b = *mask; 3755 b &= (0-b); 3756 *mask &= ~b; 3757 b *= debruijn32; 3758 b >>= 27; 3759 3760 return timer_table->gen_timer_index[b]; 3761 } 3762 3763 u32 ath9k_hw_gettsf32(struct ath_hw *ah) 3764 { 3765 return REG_READ(ah, AR_TSF_L32); 3766 } 3767 EXPORT_SYMBOL(ath9k_hw_gettsf32); 3768 3769 struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah, 3770 void (*trigger)(void *), 3771 void (*overflow)(void *), 3772 void *arg, 3773 u8 timer_index) 3774 { 3775 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 3776 struct ath_gen_timer *timer; 3777 3778 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL); 3779 3780 if (timer == NULL) { 3781 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 3782 "Failed to allocate memory" 3783 "for hw timer[%d]\n", timer_index); 3784 return NULL; 3785 } 3786 3787 /* allocate a hardware generic timer slot */ 3788 timer_table->timers[timer_index] = timer; 3789 timer->index = timer_index; 3790 timer->trigger = trigger; 3791 timer->overflow = overflow; 3792 timer->arg = arg; 3793 3794 return timer; 3795 } 3796 EXPORT_SYMBOL(ath_gen_timer_alloc); 3797 3798 void ath9k_hw_gen_timer_start(struct ath_hw *ah, 3799 struct ath_gen_timer *timer, 3800 u32 timer_next, 3801 u32 timer_period) 3802 { 3803 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 3804 u32 tsf; 3805 3806 BUG_ON(!timer_period); 3807 3808 set_bit(timer->index, &timer_table->timer_mask.timer_bits); 3809 3810 tsf = ath9k_hw_gettsf32(ah); 3811 3812 ath_print(ath9k_hw_common(ah), ATH_DBG_HWTIMER, 3813 "curent tsf %x period %x" 3814 "timer_next %x\n", tsf, timer_period, timer_next); 3815 3816 /* 3817 * Pull timer_next forward if the current TSF already passed it 3818 * because of software latency 3819 */ 3820 if (timer_next < tsf) 3821 timer_next = tsf + timer_period; 3822 3823 /* 3824 * Program generic timer registers 3825 */ 3826 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr, 3827 timer_next); 3828 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr, 3829 timer_period); 3830 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr, 3831 gen_tmr_configuration[timer->index].mode_mask); 3832 3833 /* Enable both trigger and thresh interrupt masks */ 3834 REG_SET_BIT(ah, AR_IMR_S5, 3835 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) | 3836 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG))); 3837 } 3838 EXPORT_SYMBOL(ath9k_hw_gen_timer_start); 3839 3840 void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer) 3841 { 3842 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 3843 3844 if ((timer->index < AR_FIRST_NDP_TIMER) || 3845 (timer->index >= ATH_MAX_GEN_TIMER)) { 3846 return; 3847 } 3848 3849 /* Clear generic timer enable bits. */ 3850 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr, 3851 gen_tmr_configuration[timer->index].mode_mask); 3852 3853 /* Disable both trigger and thresh interrupt masks */ 3854 REG_CLR_BIT(ah, AR_IMR_S5, 3855 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) | 3856 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG))); 3857 3858 clear_bit(timer->index, &timer_table->timer_mask.timer_bits); 3859 } 3860 EXPORT_SYMBOL(ath9k_hw_gen_timer_stop); 3861 3862 void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer) 3863 { 3864 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 3865 3866 /* free the hardware generic timer slot */ 3867 timer_table->timers[timer->index] = NULL; 3868 kfree(timer); 3869 } 3870 EXPORT_SYMBOL(ath_gen_timer_free); 3871 3872 /* 3873 * Generic Timer Interrupts handling 3874 */ 3875 void ath_gen_timer_isr(struct ath_hw *ah) 3876 { 3877 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 3878 struct ath_gen_timer *timer; 3879 struct ath_common *common = ath9k_hw_common(ah); 3880 u32 trigger_mask, thresh_mask, index; 3881 3882 /* get hardware generic timer interrupt status */ 3883 trigger_mask = ah->intr_gen_timer_trigger; 3884 thresh_mask = ah->intr_gen_timer_thresh; 3885 trigger_mask &= timer_table->timer_mask.val; 3886 thresh_mask &= timer_table->timer_mask.val; 3887 3888 trigger_mask &= ~thresh_mask; 3889 3890 while (thresh_mask) { 3891 index = rightmost_index(timer_table, &thresh_mask); 3892 timer = timer_table->timers[index]; 3893 BUG_ON(!timer); 3894 ath_print(common, ATH_DBG_HWTIMER, 3895 "TSF overflow for Gen timer %d\n", index); 3896 timer->overflow(timer->arg); 3897 } 3898 3899 while (trigger_mask) { 3900 index = rightmost_index(timer_table, &trigger_mask); 3901 timer = timer_table->timers[index]; 3902 BUG_ON(!timer); 3903 ath_print(common, ATH_DBG_HWTIMER, 3904 "Gen timer[%d] trigger\n", index); 3905 timer->trigger(timer->arg); 3906 } 3907 } 3908 EXPORT_SYMBOL(ath_gen_timer_isr); 3909 3910 static struct { 3911 u32 version; 3912 const char * name; 3913 } ath_mac_bb_names[] = { 3914 /* Devices with external radios */ 3915 { AR_SREV_VERSION_5416_PCI, "5416" }, 3916 { AR_SREV_VERSION_5416_PCIE, "5418" }, 3917 { AR_SREV_VERSION_9100, "9100" }, 3918 { AR_SREV_VERSION_9160, "9160" }, 3919 /* Single-chip solutions */ 3920 { AR_SREV_VERSION_9280, "9280" }, 3921 { AR_SREV_VERSION_9285, "9285" }, 3922 { AR_SREV_VERSION_9287, "9287" }, 3923 { AR_SREV_VERSION_9271, "9271" }, 3924 }; 3925 3926 /* For devices with external radios */ 3927 static struct { 3928 u16 version; 3929 const char * name; 3930 } ath_rf_names[] = { 3931 { 0, "5133" }, 3932 { AR_RAD5133_SREV_MAJOR, "5133" }, 3933 { AR_RAD5122_SREV_MAJOR, "5122" }, 3934 { AR_RAD2133_SREV_MAJOR, "2133" }, 3935 { AR_RAD2122_SREV_MAJOR, "2122" } 3936 }; 3937 3938 /* 3939 * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown. 3940 */ 3941 static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version) 3942 { 3943 int i; 3944 3945 for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) { 3946 if (ath_mac_bb_names[i].version == mac_bb_version) { 3947 return ath_mac_bb_names[i].name; 3948 } 3949 } 3950 3951 return "????"; 3952 } 3953 3954 /* 3955 * Return the RF name. "????" is returned if the RF is unknown. 3956 * Used for devices with external radios. 3957 */ 3958 static const char *ath9k_hw_rf_name(u16 rf_version) 3959 { 3960 int i; 3961 3962 for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) { 3963 if (ath_rf_names[i].version == rf_version) { 3964 return ath_rf_names[i].name; 3965 } 3966 } 3967 3968 return "????"; 3969 } 3970 3971 void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len) 3972 { 3973 int used; 3974 3975 /* chipsets >= AR9280 are single-chip */ 3976 if (AR_SREV_9280_10_OR_LATER(ah)) { 3977 used = snprintf(hw_name, len, 3978 "Atheros AR%s Rev:%x", 3979 ath9k_hw_mac_bb_name(ah->hw_version.macVersion), 3980 ah->hw_version.macRev); 3981 } 3982 else { 3983 used = snprintf(hw_name, len, 3984 "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x", 3985 ath9k_hw_mac_bb_name(ah->hw_version.macVersion), 3986 ah->hw_version.macRev, 3987 ath9k_hw_rf_name((ah->hw_version.analog5GhzRev & 3988 AR_RADIO_SREV_MAJOR)), 3989 ah->hw_version.phyRev); 3990 } 3991 3992 hw_name[used] = '\0'; 3993 } 3994 EXPORT_SYMBOL(ath9k_hw_name); 3995