hw.c revision fe12946e66575677879941a14f75b70ca2d2962a
1/* 2 * Copyright (c) 2008-2009 Atheros Communications Inc. 3 * 4 * Permission to use, copy, modify, and/or distribute this software for any 5 * purpose with or without fee is hereby granted, provided that the above 6 * copyright notice and this permission notice appear in all copies. 7 * 8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 15 */ 16 17#include <linux/io.h> 18#include <asm/unaligned.h> 19#include <linux/pci.h> 20 21#include "ath9k.h" 22#include "initvals.h" 23 24static int btcoex_enable; 25module_param(btcoex_enable, bool, 0); 26MODULE_PARM_DESC(btcoex_enable, "Enable Bluetooth coexistence support"); 27 28#define ATH9K_CLOCK_RATE_CCK 22 29#define ATH9K_CLOCK_RATE_5GHZ_OFDM 40 30#define ATH9K_CLOCK_RATE_2GHZ_OFDM 44 31 32static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type); 33static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan, 34 enum ath9k_ht_macmode macmode); 35static u32 ath9k_hw_ini_fixup(struct ath_hw *ah, 36 struct ar5416_eeprom_def *pEepData, 37 u32 reg, u32 value); 38static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan); 39static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan); 40 41/********************/ 42/* Helper Functions */ 43/********************/ 44 45static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks) 46{ 47 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf; 48 49 if (!ah->curchan) /* should really check for CCK instead */ 50 return clks / ATH9K_CLOCK_RATE_CCK; 51 if (conf->channel->band == IEEE80211_BAND_2GHZ) 52 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM; 53 54 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM; 55} 56 57static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks) 58{ 59 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf; 60 61 if (conf_is_ht40(conf)) 62 return ath9k_hw_mac_usec(ah, clks) / 2; 63 else 64 return ath9k_hw_mac_usec(ah, clks); 65} 66 67static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs) 68{ 69 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf; 70 71 if (!ah->curchan) /* should really check for CCK instead */ 72 return usecs *ATH9K_CLOCK_RATE_CCK; 73 if (conf->channel->band == IEEE80211_BAND_2GHZ) 74 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM; 75 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM; 76} 77 78static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs) 79{ 80 struct ieee80211_conf *conf = &ah->ah_sc->hw->conf; 81 82 if (conf_is_ht40(conf)) 83 return ath9k_hw_mac_clks(ah, usecs) * 2; 84 else 85 return ath9k_hw_mac_clks(ah, usecs); 86} 87 88/* 89 * Read and write, they both share the same lock. We do this to serialize 90 * reads and writes on Atheros 802.11n PCI devices only. This is required 91 * as the FIFO on these devices can only accept sanely 2 requests. After 92 * that the device goes bananas. Serializing the reads/writes prevents this 93 * from happening. 94 */ 95 96void ath9k_iowrite32(struct ath_hw *ah, u32 reg_offset, u32 val) 97{ 98 if (ah->config.serialize_regmode == SER_REG_MODE_ON) { 99 unsigned long flags; 100 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags); 101 iowrite32(val, ah->ah_sc->mem + reg_offset); 102 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags); 103 } else 104 iowrite32(val, ah->ah_sc->mem + reg_offset); 105} 106 107unsigned int ath9k_ioread32(struct ath_hw *ah, u32 reg_offset) 108{ 109 u32 val; 110 if (ah->config.serialize_regmode == SER_REG_MODE_ON) { 111 unsigned long flags; 112 spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags); 113 val = ioread32(ah->ah_sc->mem + reg_offset); 114 spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags); 115 } else 116 val = ioread32(ah->ah_sc->mem + reg_offset); 117 return val; 118} 119 120bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout) 121{ 122 int i; 123 124 BUG_ON(timeout < AH_TIME_QUANTUM); 125 126 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) { 127 if ((REG_READ(ah, reg) & mask) == val) 128 return true; 129 130 udelay(AH_TIME_QUANTUM); 131 } 132 133 DPRINTF(ah->ah_sc, ATH_DBG_ANY, 134 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n", 135 timeout, reg, REG_READ(ah, reg), mask, val); 136 137 return false; 138} 139 140u32 ath9k_hw_reverse_bits(u32 val, u32 n) 141{ 142 u32 retval; 143 int i; 144 145 for (i = 0, retval = 0; i < n; i++) { 146 retval = (retval << 1) | (val & 1); 147 val >>= 1; 148 } 149 return retval; 150} 151 152bool ath9k_get_channel_edges(struct ath_hw *ah, 153 u16 flags, u16 *low, 154 u16 *high) 155{ 156 struct ath9k_hw_capabilities *pCap = &ah->caps; 157 158 if (flags & CHANNEL_5GHZ) { 159 *low = pCap->low_5ghz_chan; 160 *high = pCap->high_5ghz_chan; 161 return true; 162 } 163 if ((flags & CHANNEL_2GHZ)) { 164 *low = pCap->low_2ghz_chan; 165 *high = pCap->high_2ghz_chan; 166 return true; 167 } 168 return false; 169} 170 171u16 ath9k_hw_computetxtime(struct ath_hw *ah, 172 const struct ath_rate_table *rates, 173 u32 frameLen, u16 rateix, 174 bool shortPreamble) 175{ 176 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime; 177 u32 kbps; 178 179 kbps = rates->info[rateix].ratekbps; 180 181 if (kbps == 0) 182 return 0; 183 184 switch (rates->info[rateix].phy) { 185 case WLAN_RC_PHY_CCK: 186 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS; 187 if (shortPreamble && rates->info[rateix].short_preamble) 188 phyTime >>= 1; 189 numBits = frameLen << 3; 190 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps); 191 break; 192 case WLAN_RC_PHY_OFDM: 193 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) { 194 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000; 195 numBits = OFDM_PLCP_BITS + (frameLen << 3); 196 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 197 txTime = OFDM_SIFS_TIME_QUARTER 198 + OFDM_PREAMBLE_TIME_QUARTER 199 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER); 200 } else if (ah->curchan && 201 IS_CHAN_HALF_RATE(ah->curchan)) { 202 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000; 203 numBits = OFDM_PLCP_BITS + (frameLen << 3); 204 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 205 txTime = OFDM_SIFS_TIME_HALF + 206 OFDM_PREAMBLE_TIME_HALF 207 + (numSymbols * OFDM_SYMBOL_TIME_HALF); 208 } else { 209 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000; 210 numBits = OFDM_PLCP_BITS + (frameLen << 3); 211 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 212 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME 213 + (numSymbols * OFDM_SYMBOL_TIME); 214 } 215 break; 216 default: 217 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 218 "Unknown phy %u (rate ix %u)\n", 219 rates->info[rateix].phy, rateix); 220 txTime = 0; 221 break; 222 } 223 224 return txTime; 225} 226 227void ath9k_hw_get_channel_centers(struct ath_hw *ah, 228 struct ath9k_channel *chan, 229 struct chan_centers *centers) 230{ 231 int8_t extoff; 232 233 if (!IS_CHAN_HT40(chan)) { 234 centers->ctl_center = centers->ext_center = 235 centers->synth_center = chan->channel; 236 return; 237 } 238 239 if ((chan->chanmode == CHANNEL_A_HT40PLUS) || 240 (chan->chanmode == CHANNEL_G_HT40PLUS)) { 241 centers->synth_center = 242 chan->channel + HT40_CHANNEL_CENTER_SHIFT; 243 extoff = 1; 244 } else { 245 centers->synth_center = 246 chan->channel - HT40_CHANNEL_CENTER_SHIFT; 247 extoff = -1; 248 } 249 250 centers->ctl_center = 251 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT); 252 centers->ext_center = 253 centers->synth_center + (extoff * 254 ((ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_20) ? 255 HT40_CHANNEL_CENTER_SHIFT : 15)); 256} 257 258/******************/ 259/* Chip Revisions */ 260/******************/ 261 262static void ath9k_hw_read_revisions(struct ath_hw *ah) 263{ 264 u32 val; 265 266 val = REG_READ(ah, AR_SREV) & AR_SREV_ID; 267 268 if (val == 0xFF) { 269 val = REG_READ(ah, AR_SREV); 270 ah->hw_version.macVersion = 271 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S; 272 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2); 273 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1; 274 } else { 275 if (!AR_SREV_9100(ah)) 276 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION); 277 278 ah->hw_version.macRev = val & AR_SREV_REVISION; 279 280 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE) 281 ah->is_pciexpress = true; 282 } 283} 284 285static int ath9k_hw_get_radiorev(struct ath_hw *ah) 286{ 287 u32 val; 288 int i; 289 290 REG_WRITE(ah, AR_PHY(0x36), 0x00007058); 291 292 for (i = 0; i < 8; i++) 293 REG_WRITE(ah, AR_PHY(0x20), 0x00010000); 294 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff; 295 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4); 296 297 return ath9k_hw_reverse_bits(val, 8); 298} 299 300/************************************/ 301/* HW Attach, Detach, Init Routines */ 302/************************************/ 303 304static void ath9k_hw_disablepcie(struct ath_hw *ah) 305{ 306 if (AR_SREV_9100(ah)) 307 return; 308 309 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00); 310 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 311 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029); 312 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824); 313 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579); 314 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000); 315 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 316 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 317 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007); 318 319 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 320} 321 322static bool ath9k_hw_chip_test(struct ath_hw *ah) 323{ 324 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) }; 325 u32 regHold[2]; 326 u32 patternData[4] = { 0x55555555, 327 0xaaaaaaaa, 328 0x66666666, 329 0x99999999 }; 330 int i, j; 331 332 for (i = 0; i < 2; i++) { 333 u32 addr = regAddr[i]; 334 u32 wrData, rdData; 335 336 regHold[i] = REG_READ(ah, addr); 337 for (j = 0; j < 0x100; j++) { 338 wrData = (j << 16) | j; 339 REG_WRITE(ah, addr, wrData); 340 rdData = REG_READ(ah, addr); 341 if (rdData != wrData) { 342 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 343 "address test failed " 344 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n", 345 addr, wrData, rdData); 346 return false; 347 } 348 } 349 for (j = 0; j < 4; j++) { 350 wrData = patternData[j]; 351 REG_WRITE(ah, addr, wrData); 352 rdData = REG_READ(ah, addr); 353 if (wrData != rdData) { 354 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 355 "address test failed " 356 "addr: 0x%08x - wr:0x%08x != rd:0x%08x\n", 357 addr, wrData, rdData); 358 return false; 359 } 360 } 361 REG_WRITE(ah, regAddr[i], regHold[i]); 362 } 363 udelay(100); 364 365 return true; 366} 367 368static const char *ath9k_hw_devname(u16 devid) 369{ 370 switch (devid) { 371 case AR5416_DEVID_PCI: 372 return "Atheros 5416"; 373 case AR5416_DEVID_PCIE: 374 return "Atheros 5418"; 375 case AR9160_DEVID_PCI: 376 return "Atheros 9160"; 377 case AR5416_AR9100_DEVID: 378 return "Atheros 9100"; 379 case AR9280_DEVID_PCI: 380 case AR9280_DEVID_PCIE: 381 return "Atheros 9280"; 382 case AR9285_DEVID_PCIE: 383 return "Atheros 9285"; 384 case AR5416_DEVID_AR9287_PCI: 385 case AR5416_DEVID_AR9287_PCIE: 386 return "Atheros 9287"; 387 } 388 389 return NULL; 390} 391 392static void ath9k_hw_init_config(struct ath_hw *ah) 393{ 394 int i; 395 396 ah->config.dma_beacon_response_time = 2; 397 ah->config.sw_beacon_response_time = 10; 398 ah->config.additional_swba_backoff = 0; 399 ah->config.ack_6mb = 0x0; 400 ah->config.cwm_ignore_extcca = 0; 401 ah->config.pcie_powersave_enable = 0; 402 ah->config.pcie_clock_req = 0; 403 ah->config.pcie_waen = 0; 404 ah->config.analog_shiftreg = 1; 405 ah->config.ht_enable = 1; 406 ah->config.ofdm_trig_low = 200; 407 ah->config.ofdm_trig_high = 500; 408 ah->config.cck_trig_high = 200; 409 ah->config.cck_trig_low = 100; 410 ah->config.enable_ani = 1; 411 ah->config.diversity_control = ATH9K_ANT_VARIABLE; 412 ah->config.antenna_switch_swap = 0; 413 414 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) { 415 ah->config.spurchans[i][0] = AR_NO_SPUR; 416 ah->config.spurchans[i][1] = AR_NO_SPUR; 417 } 418 419 ah->config.intr_mitigation = true; 420 421 /* 422 * We need this for PCI devices only (Cardbus, PCI, miniPCI) 423 * _and_ if on non-uniprocessor systems (Multiprocessor/HT). 424 * This means we use it for all AR5416 devices, and the few 425 * minor PCI AR9280 devices out there. 426 * 427 * Serialization is required because these devices do not handle 428 * well the case of two concurrent reads/writes due to the latency 429 * involved. During one read/write another read/write can be issued 430 * on another CPU while the previous read/write may still be working 431 * on our hardware, if we hit this case the hardware poops in a loop. 432 * We prevent this by serializing reads and writes. 433 * 434 * This issue is not present on PCI-Express devices or pre-AR5416 435 * devices (legacy, 802.11abg). 436 */ 437 if (num_possible_cpus() > 1) 438 ah->config.serialize_regmode = SER_REG_MODE_AUTO; 439} 440 441static void ath9k_hw_init_defaults(struct ath_hw *ah) 442{ 443 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 444 445 regulatory->country_code = CTRY_DEFAULT; 446 regulatory->power_limit = MAX_RATE_POWER; 447 regulatory->tp_scale = ATH9K_TP_SCALE_MAX; 448 449 ah->hw_version.magic = AR5416_MAGIC; 450 ah->hw_version.subvendorid = 0; 451 452 ah->ah_flags = 0; 453 if (ah->hw_version.devid == AR5416_AR9100_DEVID) 454 ah->hw_version.macVersion = AR_SREV_VERSION_9100; 455 if (!AR_SREV_9100(ah)) 456 ah->ah_flags = AH_USE_EEPROM; 457 458 ah->atim_window = 0; 459 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE; 460 ah->beacon_interval = 100; 461 ah->enable_32kHz_clock = DONT_USE_32KHZ; 462 ah->slottime = (u32) -1; 463 ah->acktimeout = (u32) -1; 464 ah->ctstimeout = (u32) -1; 465 ah->globaltxtimeout = (u32) -1; 466 467 ah->gbeacon_rate = 0; 468 469 ah->power_mode = ATH9K_PM_UNDEFINED; 470} 471 472static int ath9k_hw_rfattach(struct ath_hw *ah) 473{ 474 bool rfStatus = false; 475 int ecode = 0; 476 477 rfStatus = ath9k_hw_init_rf(ah, &ecode); 478 if (!rfStatus) { 479 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 480 "RF setup failed, status: %u\n", ecode); 481 return ecode; 482 } 483 484 return 0; 485} 486 487static int ath9k_hw_rf_claim(struct ath_hw *ah) 488{ 489 u32 val; 490 491 REG_WRITE(ah, AR_PHY(0), 0x00000007); 492 493 val = ath9k_hw_get_radiorev(ah); 494 switch (val & AR_RADIO_SREV_MAJOR) { 495 case 0: 496 val = AR_RAD5133_SREV_MAJOR; 497 break; 498 case AR_RAD5133_SREV_MAJOR: 499 case AR_RAD5122_SREV_MAJOR: 500 case AR_RAD2133_SREV_MAJOR: 501 case AR_RAD2122_SREV_MAJOR: 502 break; 503 default: 504 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 505 "Radio Chip Rev 0x%02X not supported\n", 506 val & AR_RADIO_SREV_MAJOR); 507 return -EOPNOTSUPP; 508 } 509 510 ah->hw_version.analog5GhzRev = val; 511 512 return 0; 513} 514 515static int ath9k_hw_init_macaddr(struct ath_hw *ah) 516{ 517 u32 sum; 518 int i; 519 u16 eeval; 520 521 sum = 0; 522 for (i = 0; i < 3; i++) { 523 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i)); 524 sum += eeval; 525 ah->macaddr[2 * i] = eeval >> 8; 526 ah->macaddr[2 * i + 1] = eeval & 0xff; 527 } 528 if (sum == 0 || sum == 0xffff * 3) 529 return -EADDRNOTAVAIL; 530 531 return 0; 532} 533 534static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah) 535{ 536 u32 rxgain_type; 537 538 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) { 539 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE); 540 541 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF) 542 INIT_INI_ARRAY(&ah->iniModesRxGain, 543 ar9280Modes_backoff_13db_rxgain_9280_2, 544 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6); 545 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF) 546 INIT_INI_ARRAY(&ah->iniModesRxGain, 547 ar9280Modes_backoff_23db_rxgain_9280_2, 548 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6); 549 else 550 INIT_INI_ARRAY(&ah->iniModesRxGain, 551 ar9280Modes_original_rxgain_9280_2, 552 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6); 553 } else { 554 INIT_INI_ARRAY(&ah->iniModesRxGain, 555 ar9280Modes_original_rxgain_9280_2, 556 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6); 557 } 558} 559 560static void ath9k_hw_init_txgain_ini(struct ath_hw *ah) 561{ 562 u32 txgain_type; 563 564 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) { 565 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE); 566 567 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) 568 INIT_INI_ARRAY(&ah->iniModesTxGain, 569 ar9280Modes_high_power_tx_gain_9280_2, 570 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6); 571 else 572 INIT_INI_ARRAY(&ah->iniModesTxGain, 573 ar9280Modes_original_tx_gain_9280_2, 574 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6); 575 } else { 576 INIT_INI_ARRAY(&ah->iniModesTxGain, 577 ar9280Modes_original_tx_gain_9280_2, 578 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6); 579 } 580} 581 582static int ath9k_hw_post_init(struct ath_hw *ah) 583{ 584 int ecode; 585 586 if (!ath9k_hw_chip_test(ah)) 587 return -ENODEV; 588 589 ecode = ath9k_hw_rf_claim(ah); 590 if (ecode != 0) 591 return ecode; 592 593 ecode = ath9k_hw_eeprom_init(ah); 594 if (ecode != 0) 595 return ecode; 596 597 DPRINTF(ah->ah_sc, ATH_DBG_CONFIG, "Eeprom VER: %d, REV: %d\n", 598 ah->eep_ops->get_eeprom_ver(ah), ah->eep_ops->get_eeprom_rev(ah)); 599 600 ecode = ath9k_hw_rfattach(ah); 601 if (ecode != 0) 602 return ecode; 603 604 if (!AR_SREV_9100(ah)) { 605 ath9k_hw_ani_setup(ah); 606 ath9k_hw_ani_init(ah); 607 } 608 609 return 0; 610} 611 612static bool ath9k_hw_devid_supported(u16 devid) 613{ 614 switch (devid) { 615 case AR5416_DEVID_PCI: 616 case AR5416_DEVID_PCIE: 617 case AR5416_AR9100_DEVID: 618 case AR9160_DEVID_PCI: 619 case AR9280_DEVID_PCI: 620 case AR9280_DEVID_PCIE: 621 case AR9285_DEVID_PCIE: 622 case AR5416_DEVID_AR9287_PCI: 623 case AR5416_DEVID_AR9287_PCIE: 624 return true; 625 default: 626 break; 627 } 628 return false; 629} 630 631static bool ath9k_hw_macversion_supported(u32 macversion) 632{ 633 switch (macversion) { 634 case AR_SREV_VERSION_5416_PCI: 635 case AR_SREV_VERSION_5416_PCIE: 636 case AR_SREV_VERSION_9160: 637 case AR_SREV_VERSION_9100: 638 case AR_SREV_VERSION_9280: 639 case AR_SREV_VERSION_9285: 640 case AR_SREV_VERSION_9287: 641 return true; 642 /* Not yet */ 643 case AR_SREV_VERSION_9271: 644 default: 645 break; 646 } 647 return false; 648} 649 650static void ath9k_hw_init_cal_settings(struct ath_hw *ah) 651{ 652 if (AR_SREV_9160_10_OR_LATER(ah)) { 653 if (AR_SREV_9280_10_OR_LATER(ah)) { 654 ah->iq_caldata.calData = &iq_cal_single_sample; 655 ah->adcgain_caldata.calData = 656 &adc_gain_cal_single_sample; 657 ah->adcdc_caldata.calData = 658 &adc_dc_cal_single_sample; 659 ah->adcdc_calinitdata.calData = 660 &adc_init_dc_cal; 661 } else { 662 ah->iq_caldata.calData = &iq_cal_multi_sample; 663 ah->adcgain_caldata.calData = 664 &adc_gain_cal_multi_sample; 665 ah->adcdc_caldata.calData = 666 &adc_dc_cal_multi_sample; 667 ah->adcdc_calinitdata.calData = 668 &adc_init_dc_cal; 669 } 670 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL; 671 } 672} 673 674static void ath9k_hw_init_mode_regs(struct ath_hw *ah) 675{ 676 if (AR_SREV_9271(ah)) { 677 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0, 678 ARRAY_SIZE(ar9271Modes_9271_1_0), 6); 679 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0, 680 ARRAY_SIZE(ar9271Common_9271_1_0), 2); 681 return; 682 } 683 684 if (AR_SREV_9287_11_OR_LATER(ah)) { 685 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1, 686 ARRAY_SIZE(ar9287Modes_9287_1_1), 6); 687 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1, 688 ARRAY_SIZE(ar9287Common_9287_1_1), 2); 689 if (ah->config.pcie_clock_req) 690 INIT_INI_ARRAY(&ah->iniPcieSerdes, 691 ar9287PciePhy_clkreq_off_L1_9287_1_1, 692 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2); 693 else 694 INIT_INI_ARRAY(&ah->iniPcieSerdes, 695 ar9287PciePhy_clkreq_always_on_L1_9287_1_1, 696 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1), 697 2); 698 } else if (AR_SREV_9287_10_OR_LATER(ah)) { 699 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0, 700 ARRAY_SIZE(ar9287Modes_9287_1_0), 6); 701 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0, 702 ARRAY_SIZE(ar9287Common_9287_1_0), 2); 703 704 if (ah->config.pcie_clock_req) 705 INIT_INI_ARRAY(&ah->iniPcieSerdes, 706 ar9287PciePhy_clkreq_off_L1_9287_1_0, 707 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2); 708 else 709 INIT_INI_ARRAY(&ah->iniPcieSerdes, 710 ar9287PciePhy_clkreq_always_on_L1_9287_1_0, 711 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0), 712 2); 713 } else if (AR_SREV_9285_12_OR_LATER(ah)) { 714 715 716 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2, 717 ARRAY_SIZE(ar9285Modes_9285_1_2), 6); 718 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2, 719 ARRAY_SIZE(ar9285Common_9285_1_2), 2); 720 721 if (ah->config.pcie_clock_req) { 722 INIT_INI_ARRAY(&ah->iniPcieSerdes, 723 ar9285PciePhy_clkreq_off_L1_9285_1_2, 724 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2); 725 } else { 726 INIT_INI_ARRAY(&ah->iniPcieSerdes, 727 ar9285PciePhy_clkreq_always_on_L1_9285_1_2, 728 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2), 729 2); 730 } 731 } else if (AR_SREV_9285_10_OR_LATER(ah)) { 732 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285, 733 ARRAY_SIZE(ar9285Modes_9285), 6); 734 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285, 735 ARRAY_SIZE(ar9285Common_9285), 2); 736 737 if (ah->config.pcie_clock_req) { 738 INIT_INI_ARRAY(&ah->iniPcieSerdes, 739 ar9285PciePhy_clkreq_off_L1_9285, 740 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2); 741 } else { 742 INIT_INI_ARRAY(&ah->iniPcieSerdes, 743 ar9285PciePhy_clkreq_always_on_L1_9285, 744 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2); 745 } 746 } else if (AR_SREV_9280_20_OR_LATER(ah)) { 747 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2, 748 ARRAY_SIZE(ar9280Modes_9280_2), 6); 749 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2, 750 ARRAY_SIZE(ar9280Common_9280_2), 2); 751 752 if (ah->config.pcie_clock_req) { 753 INIT_INI_ARRAY(&ah->iniPcieSerdes, 754 ar9280PciePhy_clkreq_off_L1_9280, 755 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2); 756 } else { 757 INIT_INI_ARRAY(&ah->iniPcieSerdes, 758 ar9280PciePhy_clkreq_always_on_L1_9280, 759 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2); 760 } 761 INIT_INI_ARRAY(&ah->iniModesAdditional, 762 ar9280Modes_fast_clock_9280_2, 763 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3); 764 } else if (AR_SREV_9280_10_OR_LATER(ah)) { 765 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280, 766 ARRAY_SIZE(ar9280Modes_9280), 6); 767 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280, 768 ARRAY_SIZE(ar9280Common_9280), 2); 769 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 770 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160, 771 ARRAY_SIZE(ar5416Modes_9160), 6); 772 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160, 773 ARRAY_SIZE(ar5416Common_9160), 2); 774 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160, 775 ARRAY_SIZE(ar5416Bank0_9160), 2); 776 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160, 777 ARRAY_SIZE(ar5416BB_RfGain_9160), 3); 778 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160, 779 ARRAY_SIZE(ar5416Bank1_9160), 2); 780 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160, 781 ARRAY_SIZE(ar5416Bank2_9160), 2); 782 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160, 783 ARRAY_SIZE(ar5416Bank3_9160), 3); 784 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160, 785 ARRAY_SIZE(ar5416Bank6_9160), 3); 786 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160, 787 ARRAY_SIZE(ar5416Bank6TPC_9160), 3); 788 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160, 789 ARRAY_SIZE(ar5416Bank7_9160), 2); 790 if (AR_SREV_9160_11(ah)) { 791 INIT_INI_ARRAY(&ah->iniAddac, 792 ar5416Addac_91601_1, 793 ARRAY_SIZE(ar5416Addac_91601_1), 2); 794 } else { 795 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160, 796 ARRAY_SIZE(ar5416Addac_9160), 2); 797 } 798 } else if (AR_SREV_9100_OR_LATER(ah)) { 799 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100, 800 ARRAY_SIZE(ar5416Modes_9100), 6); 801 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100, 802 ARRAY_SIZE(ar5416Common_9100), 2); 803 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100, 804 ARRAY_SIZE(ar5416Bank0_9100), 2); 805 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100, 806 ARRAY_SIZE(ar5416BB_RfGain_9100), 3); 807 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100, 808 ARRAY_SIZE(ar5416Bank1_9100), 2); 809 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100, 810 ARRAY_SIZE(ar5416Bank2_9100), 2); 811 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100, 812 ARRAY_SIZE(ar5416Bank3_9100), 3); 813 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100, 814 ARRAY_SIZE(ar5416Bank6_9100), 3); 815 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100, 816 ARRAY_SIZE(ar5416Bank6TPC_9100), 3); 817 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100, 818 ARRAY_SIZE(ar5416Bank7_9100), 2); 819 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100, 820 ARRAY_SIZE(ar5416Addac_9100), 2); 821 } else { 822 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes, 823 ARRAY_SIZE(ar5416Modes), 6); 824 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common, 825 ARRAY_SIZE(ar5416Common), 2); 826 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0, 827 ARRAY_SIZE(ar5416Bank0), 2); 828 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain, 829 ARRAY_SIZE(ar5416BB_RfGain), 3); 830 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1, 831 ARRAY_SIZE(ar5416Bank1), 2); 832 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2, 833 ARRAY_SIZE(ar5416Bank2), 2); 834 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3, 835 ARRAY_SIZE(ar5416Bank3), 3); 836 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6, 837 ARRAY_SIZE(ar5416Bank6), 3); 838 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC, 839 ARRAY_SIZE(ar5416Bank6TPC), 3); 840 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7, 841 ARRAY_SIZE(ar5416Bank7), 2); 842 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac, 843 ARRAY_SIZE(ar5416Addac), 2); 844 } 845} 846 847static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah) 848{ 849 if (AR_SREV_9287_11(ah)) 850 INIT_INI_ARRAY(&ah->iniModesRxGain, 851 ar9287Modes_rx_gain_9287_1_1, 852 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6); 853 else if (AR_SREV_9287_10(ah)) 854 INIT_INI_ARRAY(&ah->iniModesRxGain, 855 ar9287Modes_rx_gain_9287_1_0, 856 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6); 857 else if (AR_SREV_9280_20(ah)) 858 ath9k_hw_init_rxgain_ini(ah); 859 860 if (AR_SREV_9287_11(ah)) { 861 INIT_INI_ARRAY(&ah->iniModesTxGain, 862 ar9287Modes_tx_gain_9287_1_1, 863 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6); 864 } else if (AR_SREV_9287_10(ah)) { 865 INIT_INI_ARRAY(&ah->iniModesTxGain, 866 ar9287Modes_tx_gain_9287_1_0, 867 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6); 868 } else if (AR_SREV_9280_20(ah)) { 869 ath9k_hw_init_txgain_ini(ah); 870 } else if (AR_SREV_9285_12_OR_LATER(ah)) { 871 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE); 872 873 /* txgain table */ 874 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) { 875 INIT_INI_ARRAY(&ah->iniModesTxGain, 876 ar9285Modes_high_power_tx_gain_9285_1_2, 877 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6); 878 } else { 879 INIT_INI_ARRAY(&ah->iniModesTxGain, 880 ar9285Modes_original_tx_gain_9285_1_2, 881 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6); 882 } 883 884 } 885} 886 887static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah) 888{ 889 u32 i, j; 890 891 if ((ah->hw_version.devid == AR9280_DEVID_PCI) && 892 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) { 893 894 /* EEPROM Fixup */ 895 for (i = 0; i < ah->iniModes.ia_rows; i++) { 896 u32 reg = INI_RA(&ah->iniModes, i, 0); 897 898 for (j = 1; j < ah->iniModes.ia_columns; j++) { 899 u32 val = INI_RA(&ah->iniModes, i, j); 900 901 INI_RA(&ah->iniModes, i, j) = 902 ath9k_hw_ini_fixup(ah, 903 &ah->eeprom.def, 904 reg, val); 905 } 906 } 907 } 908} 909 910int ath9k_hw_init(struct ath_hw *ah) 911{ 912 int r = 0; 913 914 if (!ath9k_hw_devid_supported(ah->hw_version.devid)) 915 return -EOPNOTSUPP; 916 917 ath9k_hw_init_defaults(ah); 918 ath9k_hw_init_config(ah); 919 920 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) { 921 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't reset chip\n"); 922 return -EIO; 923 } 924 925 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) { 926 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Couldn't wakeup chip\n"); 927 return -EIO; 928 } 929 930 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) { 931 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI || 932 (AR_SREV_9280(ah) && !ah->is_pciexpress)) { 933 ah->config.serialize_regmode = 934 SER_REG_MODE_ON; 935 } else { 936 ah->config.serialize_regmode = 937 SER_REG_MODE_OFF; 938 } 939 } 940 941 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "serialize_regmode is %d\n", 942 ah->config.serialize_regmode); 943 944 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) { 945 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 946 "Mac Chip Rev 0x%02x.%x is not supported by " 947 "this driver\n", ah->hw_version.macVersion, 948 ah->hw_version.macRev); 949 return -EOPNOTSUPP; 950 } 951 952 if (AR_SREV_9100(ah)) { 953 ah->iq_caldata.calData = &iq_cal_multi_sample; 954 ah->supp_cals = IQ_MISMATCH_CAL; 955 ah->is_pciexpress = false; 956 } 957 958 if (AR_SREV_9271(ah)) 959 ah->is_pciexpress = false; 960 961 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID); 962 963 ath9k_hw_init_cal_settings(ah); 964 965 ah->ani_function = ATH9K_ANI_ALL; 966 if (AR_SREV_9280_10_OR_LATER(ah)) 967 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL; 968 969 ath9k_hw_init_mode_regs(ah); 970 971 if (ah->is_pciexpress) 972 ath9k_hw_configpcipowersave(ah, 0); 973 else 974 ath9k_hw_disablepcie(ah); 975 976 r = ath9k_hw_post_init(ah); 977 if (r) 978 return r; 979 980 ath9k_hw_init_mode_gain_regs(ah); 981 ath9k_hw_fill_cap_info(ah); 982 ath9k_hw_init_11a_eeprom_fix(ah); 983 984 r = ath9k_hw_init_macaddr(ah); 985 if (r) { 986 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 987 "Failed to initialize MAC address\n"); 988 return r; 989 } 990 991 if (AR_SREV_9285(ah) || AR_SREV_9271(ah)) 992 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S); 993 else 994 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S); 995 996 ath9k_init_nfcal_hist_buffer(ah); 997 998 return 0; 999} 1000 1001static void ath9k_hw_init_bb(struct ath_hw *ah, 1002 struct ath9k_channel *chan) 1003{ 1004 u32 synthDelay; 1005 1006 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY; 1007 if (IS_CHAN_B(chan)) 1008 synthDelay = (4 * synthDelay) / 22; 1009 else 1010 synthDelay /= 10; 1011 1012 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN); 1013 1014 udelay(synthDelay + BASE_ACTIVATE_DELAY); 1015} 1016 1017static void ath9k_hw_init_qos(struct ath_hw *ah) 1018{ 1019 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa); 1020 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210); 1021 1022 REG_WRITE(ah, AR_QOS_NO_ACK, 1023 SM(2, AR_QOS_NO_ACK_TWO_BIT) | 1024 SM(5, AR_QOS_NO_ACK_BIT_OFF) | 1025 SM(0, AR_QOS_NO_ACK_BYTE_OFF)); 1026 1027 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL); 1028 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF); 1029 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF); 1030 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF); 1031 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF); 1032} 1033 1034static void ath9k_hw_init_pll(struct ath_hw *ah, 1035 struct ath9k_channel *chan) 1036{ 1037 u32 pll; 1038 1039 if (AR_SREV_9100(ah)) { 1040 if (chan && IS_CHAN_5GHZ(chan)) 1041 pll = 0x1450; 1042 else 1043 pll = 0x1458; 1044 } else { 1045 if (AR_SREV_9280_10_OR_LATER(ah)) { 1046 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV); 1047 1048 if (chan && IS_CHAN_HALF_RATE(chan)) 1049 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL); 1050 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1051 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL); 1052 1053 if (chan && IS_CHAN_5GHZ(chan)) { 1054 pll |= SM(0x28, AR_RTC_9160_PLL_DIV); 1055 1056 1057 if (AR_SREV_9280_20(ah)) { 1058 if (((chan->channel % 20) == 0) 1059 || ((chan->channel % 10) == 0)) 1060 pll = 0x2850; 1061 else 1062 pll = 0x142c; 1063 } 1064 } else { 1065 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV); 1066 } 1067 1068 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 1069 1070 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV); 1071 1072 if (chan && IS_CHAN_HALF_RATE(chan)) 1073 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL); 1074 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1075 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL); 1076 1077 if (chan && IS_CHAN_5GHZ(chan)) 1078 pll |= SM(0x50, AR_RTC_9160_PLL_DIV); 1079 else 1080 pll |= SM(0x58, AR_RTC_9160_PLL_DIV); 1081 } else { 1082 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2; 1083 1084 if (chan && IS_CHAN_HALF_RATE(chan)) 1085 pll |= SM(0x1, AR_RTC_PLL_CLKSEL); 1086 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1087 pll |= SM(0x2, AR_RTC_PLL_CLKSEL); 1088 1089 if (chan && IS_CHAN_5GHZ(chan)) 1090 pll |= SM(0xa, AR_RTC_PLL_DIV); 1091 else 1092 pll |= SM(0xb, AR_RTC_PLL_DIV); 1093 } 1094 } 1095 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll); 1096 1097 udelay(RTC_PLL_SETTLE_DELAY); 1098 1099 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK); 1100} 1101 1102static void ath9k_hw_init_chain_masks(struct ath_hw *ah) 1103{ 1104 int rx_chainmask, tx_chainmask; 1105 1106 rx_chainmask = ah->rxchainmask; 1107 tx_chainmask = ah->txchainmask; 1108 1109 switch (rx_chainmask) { 1110 case 0x5: 1111 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP, 1112 AR_PHY_SWAP_ALT_CHAIN); 1113 case 0x3: 1114 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) { 1115 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7); 1116 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7); 1117 break; 1118 } 1119 case 0x1: 1120 case 0x2: 1121 case 0x7: 1122 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask); 1123 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask); 1124 break; 1125 default: 1126 break; 1127 } 1128 1129 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask); 1130 if (tx_chainmask == 0x5) { 1131 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP, 1132 AR_PHY_SWAP_ALT_CHAIN); 1133 } 1134 if (AR_SREV_9100(ah)) 1135 REG_WRITE(ah, AR_PHY_ANALOG_SWAP, 1136 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001); 1137} 1138 1139static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah, 1140 enum nl80211_iftype opmode) 1141{ 1142 ah->mask_reg = AR_IMR_TXERR | 1143 AR_IMR_TXURN | 1144 AR_IMR_RXERR | 1145 AR_IMR_RXORN | 1146 AR_IMR_BCNMISC; 1147 1148 if (ah->config.intr_mitigation) 1149 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR; 1150 else 1151 ah->mask_reg |= AR_IMR_RXOK; 1152 1153 ah->mask_reg |= AR_IMR_TXOK; 1154 1155 if (opmode == NL80211_IFTYPE_AP) 1156 ah->mask_reg |= AR_IMR_MIB; 1157 1158 REG_WRITE(ah, AR_IMR, ah->mask_reg); 1159 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT); 1160 1161 if (!AR_SREV_9100(ah)) { 1162 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF); 1163 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT); 1164 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0); 1165 } 1166} 1167 1168static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us) 1169{ 1170 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) { 1171 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad ack timeout %u\n", us); 1172 ah->acktimeout = (u32) -1; 1173 return false; 1174 } else { 1175 REG_RMW_FIELD(ah, AR_TIME_OUT, 1176 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us)); 1177 ah->acktimeout = us; 1178 return true; 1179 } 1180} 1181 1182static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us) 1183{ 1184 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) { 1185 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad cts timeout %u\n", us); 1186 ah->ctstimeout = (u32) -1; 1187 return false; 1188 } else { 1189 REG_RMW_FIELD(ah, AR_TIME_OUT, 1190 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us)); 1191 ah->ctstimeout = us; 1192 return true; 1193 } 1194} 1195 1196static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu) 1197{ 1198 if (tu > 0xFFFF) { 1199 DPRINTF(ah->ah_sc, ATH_DBG_XMIT, 1200 "bad global tx timeout %u\n", tu); 1201 ah->globaltxtimeout = (u32) -1; 1202 return false; 1203 } else { 1204 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu); 1205 ah->globaltxtimeout = tu; 1206 return true; 1207 } 1208} 1209 1210static void ath9k_hw_init_user_settings(struct ath_hw *ah) 1211{ 1212 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "ah->misc_mode 0x%x\n", 1213 ah->misc_mode); 1214 1215 if (ah->misc_mode != 0) 1216 REG_WRITE(ah, AR_PCU_MISC, 1217 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode); 1218 if (ah->slottime != (u32) -1) 1219 ath9k_hw_setslottime(ah, ah->slottime); 1220 if (ah->acktimeout != (u32) -1) 1221 ath9k_hw_set_ack_timeout(ah, ah->acktimeout); 1222 if (ah->ctstimeout != (u32) -1) 1223 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout); 1224 if (ah->globaltxtimeout != (u32) -1) 1225 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout); 1226} 1227 1228const char *ath9k_hw_probe(u16 vendorid, u16 devid) 1229{ 1230 return vendorid == ATHEROS_VENDOR_ID ? 1231 ath9k_hw_devname(devid) : NULL; 1232} 1233 1234void ath9k_hw_detach(struct ath_hw *ah) 1235{ 1236 if (!AR_SREV_9100(ah)) 1237 ath9k_hw_ani_disable(ah); 1238 1239 ath9k_hw_rf_free(ah); 1240 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP); 1241 kfree(ah); 1242 ah = NULL; 1243} 1244 1245/*******/ 1246/* INI */ 1247/*******/ 1248 1249static void ath9k_hw_override_ini(struct ath_hw *ah, 1250 struct ath9k_channel *chan) 1251{ 1252 u32 val; 1253 1254 if (AR_SREV_9271(ah)) { 1255 /* 1256 * Enable spectral scan to solution for issues with stuck 1257 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on 1258 * AR9271 1.1 1259 */ 1260 if (AR_SREV_9271_10(ah)) { 1261 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE; 1262 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val); 1263 } 1264 else if (AR_SREV_9271_11(ah)) 1265 /* 1266 * change AR_PHY_RF_CTL3 setting to fix MAC issue 1267 * present on AR9271 1.1 1268 */ 1269 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001); 1270 return; 1271 } 1272 1273 /* 1274 * Set the RX_ABORT and RX_DIS and clear if off only after 1275 * RXE is set for MAC. This prevents frames with corrupted 1276 * descriptor status. 1277 */ 1278 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT)); 1279 1280 1281 if (!AR_SREV_5416_20_OR_LATER(ah) || 1282 AR_SREV_9280_10_OR_LATER(ah)) 1283 return; 1284 /* 1285 * Disable BB clock gating 1286 * Necessary to avoid issues on AR5416 2.0 1287 */ 1288 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11); 1289} 1290 1291static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah, 1292 struct ar5416_eeprom_def *pEepData, 1293 u32 reg, u32 value) 1294{ 1295 struct base_eep_header *pBase = &(pEepData->baseEepHeader); 1296 1297 switch (ah->hw_version.devid) { 1298 case AR9280_DEVID_PCI: 1299 if (reg == 0x7894) { 1300 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM, 1301 "ini VAL: %x EEPROM: %x\n", value, 1302 (pBase->version & 0xff)); 1303 1304 if ((pBase->version & 0xff) > 0x0a) { 1305 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM, 1306 "PWDCLKIND: %d\n", 1307 pBase->pwdclkind); 1308 value &= ~AR_AN_TOP2_PWDCLKIND; 1309 value |= AR_AN_TOP2_PWDCLKIND & 1310 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S); 1311 } else { 1312 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM, 1313 "PWDCLKIND Earlier Rev\n"); 1314 } 1315 1316 DPRINTF(ah->ah_sc, ATH_DBG_EEPROM, 1317 "final ini VAL: %x\n", value); 1318 } 1319 break; 1320 } 1321 1322 return value; 1323} 1324 1325static u32 ath9k_hw_ini_fixup(struct ath_hw *ah, 1326 struct ar5416_eeprom_def *pEepData, 1327 u32 reg, u32 value) 1328{ 1329 if (ah->eep_map == EEP_MAP_4KBITS) 1330 return value; 1331 else 1332 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value); 1333} 1334 1335static void ath9k_olc_init(struct ath_hw *ah) 1336{ 1337 u32 i; 1338 1339 if (OLC_FOR_AR9287_10_LATER) { 1340 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9, 1341 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL); 1342 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0, 1343 AR9287_AN_TXPC0_TXPCMODE, 1344 AR9287_AN_TXPC0_TXPCMODE_S, 1345 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE); 1346 udelay(100); 1347 } else { 1348 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++) 1349 ah->originalGain[i] = 1350 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4), 1351 AR_PHY_TX_GAIN); 1352 ah->PDADCdelta = 0; 1353 } 1354} 1355 1356static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg, 1357 struct ath9k_channel *chan) 1358{ 1359 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band); 1360 1361 if (IS_CHAN_B(chan)) 1362 ctl |= CTL_11B; 1363 else if (IS_CHAN_G(chan)) 1364 ctl |= CTL_11G; 1365 else 1366 ctl |= CTL_11A; 1367 1368 return ctl; 1369} 1370 1371static int ath9k_hw_process_ini(struct ath_hw *ah, 1372 struct ath9k_channel *chan, 1373 enum ath9k_ht_macmode macmode) 1374{ 1375 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 1376 int i, regWrites = 0; 1377 struct ieee80211_channel *channel = chan->chan; 1378 u32 modesIndex, freqIndex; 1379 1380 switch (chan->chanmode) { 1381 case CHANNEL_A: 1382 case CHANNEL_A_HT20: 1383 modesIndex = 1; 1384 freqIndex = 1; 1385 break; 1386 case CHANNEL_A_HT40PLUS: 1387 case CHANNEL_A_HT40MINUS: 1388 modesIndex = 2; 1389 freqIndex = 1; 1390 break; 1391 case CHANNEL_G: 1392 case CHANNEL_G_HT20: 1393 case CHANNEL_B: 1394 modesIndex = 4; 1395 freqIndex = 2; 1396 break; 1397 case CHANNEL_G_HT40PLUS: 1398 case CHANNEL_G_HT40MINUS: 1399 modesIndex = 3; 1400 freqIndex = 2; 1401 break; 1402 1403 default: 1404 return -EINVAL; 1405 } 1406 1407 REG_WRITE(ah, AR_PHY(0), 0x00000007); 1408 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO); 1409 ah->eep_ops->set_addac(ah, chan); 1410 1411 if (AR_SREV_5416_22_OR_LATER(ah)) { 1412 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites); 1413 } else { 1414 struct ar5416IniArray temp; 1415 u32 addacSize = 1416 sizeof(u32) * ah->iniAddac.ia_rows * 1417 ah->iniAddac.ia_columns; 1418 1419 memcpy(ah->addac5416_21, 1420 ah->iniAddac.ia_array, addacSize); 1421 1422 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0; 1423 1424 temp.ia_array = ah->addac5416_21; 1425 temp.ia_columns = ah->iniAddac.ia_columns; 1426 temp.ia_rows = ah->iniAddac.ia_rows; 1427 REG_WRITE_ARRAY(&temp, 1, regWrites); 1428 } 1429 1430 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC); 1431 1432 for (i = 0; i < ah->iniModes.ia_rows; i++) { 1433 u32 reg = INI_RA(&ah->iniModes, i, 0); 1434 u32 val = INI_RA(&ah->iniModes, i, modesIndex); 1435 1436 REG_WRITE(ah, reg, val); 1437 1438 if (reg >= 0x7800 && reg < 0x78a0 1439 && ah->config.analog_shiftreg) { 1440 udelay(100); 1441 } 1442 1443 DO_DELAY(regWrites); 1444 } 1445 1446 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah)) 1447 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites); 1448 1449 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) || 1450 AR_SREV_9287_10_OR_LATER(ah)) 1451 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites); 1452 1453 for (i = 0; i < ah->iniCommon.ia_rows; i++) { 1454 u32 reg = INI_RA(&ah->iniCommon, i, 0); 1455 u32 val = INI_RA(&ah->iniCommon, i, 1); 1456 1457 REG_WRITE(ah, reg, val); 1458 1459 if (reg >= 0x7800 && reg < 0x78a0 1460 && ah->config.analog_shiftreg) { 1461 udelay(100); 1462 } 1463 1464 DO_DELAY(regWrites); 1465 } 1466 1467 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites); 1468 1469 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) { 1470 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex, 1471 regWrites); 1472 } 1473 1474 ath9k_hw_override_ini(ah, chan); 1475 ath9k_hw_set_regs(ah, chan, macmode); 1476 ath9k_hw_init_chain_masks(ah); 1477 1478 if (OLC_FOR_AR9280_20_LATER) 1479 ath9k_olc_init(ah); 1480 1481 ah->eep_ops->set_txpower(ah, chan, 1482 ath9k_regd_get_ctl(regulatory, chan), 1483 channel->max_antenna_gain * 2, 1484 channel->max_power * 2, 1485 min((u32) MAX_RATE_POWER, 1486 (u32) regulatory->power_limit)); 1487 1488 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) { 1489 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 1490 "ar5416SetRfRegs failed\n"); 1491 return -EIO; 1492 } 1493 1494 return 0; 1495} 1496 1497/****************************************/ 1498/* Reset and Channel Switching Routines */ 1499/****************************************/ 1500 1501static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan) 1502{ 1503 u32 rfMode = 0; 1504 1505 if (chan == NULL) 1506 return; 1507 1508 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan)) 1509 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM; 1510 1511 if (!AR_SREV_9280_10_OR_LATER(ah)) 1512 rfMode |= (IS_CHAN_5GHZ(chan)) ? 1513 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ; 1514 1515 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) 1516 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE); 1517 1518 REG_WRITE(ah, AR_PHY_MODE, rfMode); 1519} 1520 1521static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah) 1522{ 1523 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS); 1524} 1525 1526static inline void ath9k_hw_set_dma(struct ath_hw *ah) 1527{ 1528 u32 regval; 1529 1530 /* 1531 * set AHB_MODE not to do cacheline prefetches 1532 */ 1533 regval = REG_READ(ah, AR_AHB_MODE); 1534 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN); 1535 1536 /* 1537 * let mac dma reads be in 128 byte chunks 1538 */ 1539 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK; 1540 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B); 1541 1542 /* 1543 * Restore TX Trigger Level to its pre-reset value. 1544 * The initial value depends on whether aggregation is enabled, and is 1545 * adjusted whenever underruns are detected. 1546 */ 1547 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level); 1548 1549 /* 1550 * let mac dma writes be in 128 byte chunks 1551 */ 1552 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK; 1553 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B); 1554 1555 /* 1556 * Setup receive FIFO threshold to hold off TX activities 1557 */ 1558 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200); 1559 1560 /* 1561 * reduce the number of usable entries in PCU TXBUF to avoid 1562 * wrap around issues. 1563 */ 1564 if (AR_SREV_9285(ah)) { 1565 /* For AR9285 the number of Fifos are reduced to half. 1566 * So set the usable tx buf size also to half to 1567 * avoid data/delimiter underruns 1568 */ 1569 REG_WRITE(ah, AR_PCU_TXBUF_CTRL, 1570 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE); 1571 } else if (!AR_SREV_9271(ah)) { 1572 REG_WRITE(ah, AR_PCU_TXBUF_CTRL, 1573 AR_PCU_TXBUF_CTRL_USABLE_SIZE); 1574 } 1575} 1576 1577static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode) 1578{ 1579 u32 val; 1580 1581 val = REG_READ(ah, AR_STA_ID1); 1582 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC); 1583 switch (opmode) { 1584 case NL80211_IFTYPE_AP: 1585 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP 1586 | AR_STA_ID1_KSRCH_MODE); 1587 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION); 1588 break; 1589 case NL80211_IFTYPE_ADHOC: 1590 case NL80211_IFTYPE_MESH_POINT: 1591 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC 1592 | AR_STA_ID1_KSRCH_MODE); 1593 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION); 1594 break; 1595 case NL80211_IFTYPE_STATION: 1596 case NL80211_IFTYPE_MONITOR: 1597 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE); 1598 break; 1599 } 1600} 1601 1602static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah, 1603 u32 coef_scaled, 1604 u32 *coef_mantissa, 1605 u32 *coef_exponent) 1606{ 1607 u32 coef_exp, coef_man; 1608 1609 for (coef_exp = 31; coef_exp > 0; coef_exp--) 1610 if ((coef_scaled >> coef_exp) & 0x1) 1611 break; 1612 1613 coef_exp = 14 - (coef_exp - COEF_SCALE_S); 1614 1615 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1)); 1616 1617 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp); 1618 *coef_exponent = coef_exp - 16; 1619} 1620 1621static void ath9k_hw_set_delta_slope(struct ath_hw *ah, 1622 struct ath9k_channel *chan) 1623{ 1624 u32 coef_scaled, ds_coef_exp, ds_coef_man; 1625 u32 clockMhzScaled = 0x64000000; 1626 struct chan_centers centers; 1627 1628 if (IS_CHAN_HALF_RATE(chan)) 1629 clockMhzScaled = clockMhzScaled >> 1; 1630 else if (IS_CHAN_QUARTER_RATE(chan)) 1631 clockMhzScaled = clockMhzScaled >> 2; 1632 1633 ath9k_hw_get_channel_centers(ah, chan, ¢ers); 1634 coef_scaled = clockMhzScaled / centers.synth_center; 1635 1636 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man, 1637 &ds_coef_exp); 1638 1639 REG_RMW_FIELD(ah, AR_PHY_TIMING3, 1640 AR_PHY_TIMING3_DSC_MAN, ds_coef_man); 1641 REG_RMW_FIELD(ah, AR_PHY_TIMING3, 1642 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp); 1643 1644 coef_scaled = (9 * coef_scaled) / 10; 1645 1646 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man, 1647 &ds_coef_exp); 1648 1649 REG_RMW_FIELD(ah, AR_PHY_HALFGI, 1650 AR_PHY_HALFGI_DSC_MAN, ds_coef_man); 1651 REG_RMW_FIELD(ah, AR_PHY_HALFGI, 1652 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp); 1653} 1654 1655static bool ath9k_hw_set_reset(struct ath_hw *ah, int type) 1656{ 1657 u32 rst_flags; 1658 u32 tmpReg; 1659 1660 if (AR_SREV_9100(ah)) { 1661 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK); 1662 val &= ~AR_RTC_DERIVED_CLK_PERIOD; 1663 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD); 1664 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val); 1665 (void)REG_READ(ah, AR_RTC_DERIVED_CLK); 1666 } 1667 1668 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN | 1669 AR_RTC_FORCE_WAKE_ON_INT); 1670 1671 if (AR_SREV_9100(ah)) { 1672 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD | 1673 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET; 1674 } else { 1675 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE); 1676 if (tmpReg & 1677 (AR_INTR_SYNC_LOCAL_TIMEOUT | 1678 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) { 1679 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0); 1680 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF); 1681 } else { 1682 REG_WRITE(ah, AR_RC, AR_RC_AHB); 1683 } 1684 1685 rst_flags = AR_RTC_RC_MAC_WARM; 1686 if (type == ATH9K_RESET_COLD) 1687 rst_flags |= AR_RTC_RC_MAC_COLD; 1688 } 1689 1690 REG_WRITE(ah, AR_RTC_RC, rst_flags); 1691 udelay(50); 1692 1693 REG_WRITE(ah, AR_RTC_RC, 0); 1694 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) { 1695 DPRINTF(ah->ah_sc, ATH_DBG_RESET, 1696 "RTC stuck in MAC reset\n"); 1697 return false; 1698 } 1699 1700 if (!AR_SREV_9100(ah)) 1701 REG_WRITE(ah, AR_RC, 0); 1702 1703 ath9k_hw_init_pll(ah, NULL); 1704 1705 if (AR_SREV_9100(ah)) 1706 udelay(50); 1707 1708 return true; 1709} 1710 1711static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah) 1712{ 1713 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN | 1714 AR_RTC_FORCE_WAKE_ON_INT); 1715 1716 if (!AR_SREV_9100(ah)) 1717 REG_WRITE(ah, AR_RC, AR_RC_AHB); 1718 1719 REG_WRITE(ah, AR_RTC_RESET, 0); 1720 udelay(2); 1721 1722 if (!AR_SREV_9100(ah)) 1723 REG_WRITE(ah, AR_RC, 0); 1724 1725 REG_WRITE(ah, AR_RTC_RESET, 1); 1726 1727 if (!ath9k_hw_wait(ah, 1728 AR_RTC_STATUS, 1729 AR_RTC_STATUS_M, 1730 AR_RTC_STATUS_ON, 1731 AH_WAIT_TIMEOUT)) { 1732 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "RTC not waking up\n"); 1733 return false; 1734 } 1735 1736 ath9k_hw_read_revisions(ah); 1737 1738 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM); 1739} 1740 1741static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type) 1742{ 1743 REG_WRITE(ah, AR_RTC_FORCE_WAKE, 1744 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT); 1745 1746 switch (type) { 1747 case ATH9K_RESET_POWER_ON: 1748 return ath9k_hw_set_reset_power_on(ah); 1749 case ATH9K_RESET_WARM: 1750 case ATH9K_RESET_COLD: 1751 return ath9k_hw_set_reset(ah, type); 1752 default: 1753 return false; 1754 } 1755} 1756 1757static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan, 1758 enum ath9k_ht_macmode macmode) 1759{ 1760 u32 phymode; 1761 u32 enableDacFifo = 0; 1762 1763 if (AR_SREV_9285_10_OR_LATER(ah)) 1764 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) & 1765 AR_PHY_FC_ENABLE_DAC_FIFO); 1766 1767 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 1768 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo; 1769 1770 if (IS_CHAN_HT40(chan)) { 1771 phymode |= AR_PHY_FC_DYN2040_EN; 1772 1773 if ((chan->chanmode == CHANNEL_A_HT40PLUS) || 1774 (chan->chanmode == CHANNEL_G_HT40PLUS)) 1775 phymode |= AR_PHY_FC_DYN2040_PRI_CH; 1776 1777 if (ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_25) 1778 phymode |= AR_PHY_FC_DYN2040_EXT_CH; 1779 } 1780 REG_WRITE(ah, AR_PHY_TURBO, phymode); 1781 1782 ath9k_hw_set11nmac2040(ah, macmode); 1783 1784 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S); 1785 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S); 1786} 1787 1788static bool ath9k_hw_chip_reset(struct ath_hw *ah, 1789 struct ath9k_channel *chan) 1790{ 1791 if (OLC_FOR_AR9280_20_LATER) { 1792 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) 1793 return false; 1794 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM)) 1795 return false; 1796 1797 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 1798 return false; 1799 1800 ah->chip_fullsleep = false; 1801 ath9k_hw_init_pll(ah, chan); 1802 ath9k_hw_set_rfmode(ah, chan); 1803 1804 return true; 1805} 1806 1807static bool ath9k_hw_channel_change(struct ath_hw *ah, 1808 struct ath9k_channel *chan, 1809 enum ath9k_ht_macmode macmode) 1810{ 1811 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 1812 struct ieee80211_channel *channel = chan->chan; 1813 u32 synthDelay, qnum; 1814 1815 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) { 1816 if (ath9k_hw_numtxpending(ah, qnum)) { 1817 DPRINTF(ah->ah_sc, ATH_DBG_QUEUE, 1818 "Transmit frames pending on queue %d\n", qnum); 1819 return false; 1820 } 1821 } 1822 1823 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN); 1824 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN, 1825 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) { 1826 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 1827 "Could not kill baseband RX\n"); 1828 return false; 1829 } 1830 1831 ath9k_hw_set_regs(ah, chan, macmode); 1832 1833 if (AR_SREV_9280_10_OR_LATER(ah)) { 1834 ath9k_hw_ar9280_set_channel(ah, chan); 1835 } else { 1836 if (!(ath9k_hw_set_channel(ah, chan))) { 1837 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 1838 "Failed to set channel\n"); 1839 return false; 1840 } 1841 } 1842 1843 ah->eep_ops->set_txpower(ah, chan, 1844 ath9k_regd_get_ctl(regulatory, chan), 1845 channel->max_antenna_gain * 2, 1846 channel->max_power * 2, 1847 min((u32) MAX_RATE_POWER, 1848 (u32) regulatory->power_limit)); 1849 1850 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY; 1851 if (IS_CHAN_B(chan)) 1852 synthDelay = (4 * synthDelay) / 22; 1853 else 1854 synthDelay /= 10; 1855 1856 udelay(synthDelay + BASE_ACTIVATE_DELAY); 1857 1858 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0); 1859 1860 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan)) 1861 ath9k_hw_set_delta_slope(ah, chan); 1862 1863 if (AR_SREV_9280_10_OR_LATER(ah)) 1864 ath9k_hw_9280_spur_mitigate(ah, chan); 1865 else 1866 ath9k_hw_spur_mitigate(ah, chan); 1867 1868 if (!chan->oneTimeCalsDone) 1869 chan->oneTimeCalsDone = true; 1870 1871 return true; 1872} 1873 1874static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan) 1875{ 1876 int bb_spur = AR_NO_SPUR; 1877 int freq; 1878 int bin, cur_bin; 1879 int bb_spur_off, spur_subchannel_sd; 1880 int spur_freq_sd; 1881 int spur_delta_phase; 1882 int denominator; 1883 int upper, lower, cur_vit_mask; 1884 int tmp, newVal; 1885 int i; 1886 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8, 1887 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60 1888 }; 1889 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10, 1890 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60 1891 }; 1892 int inc[4] = { 0, 100, 0, 0 }; 1893 struct chan_centers centers; 1894 1895 int8_t mask_m[123]; 1896 int8_t mask_p[123]; 1897 int8_t mask_amt; 1898 int tmp_mask; 1899 int cur_bb_spur; 1900 bool is2GHz = IS_CHAN_2GHZ(chan); 1901 1902 memset(&mask_m, 0, sizeof(int8_t) * 123); 1903 memset(&mask_p, 0, sizeof(int8_t) * 123); 1904 1905 ath9k_hw_get_channel_centers(ah, chan, ¢ers); 1906 freq = centers.synth_center; 1907 1908 ah->config.spurmode = SPUR_ENABLE_EEPROM; 1909 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) { 1910 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz); 1911 1912 if (is2GHz) 1913 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ; 1914 else 1915 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ; 1916 1917 if (AR_NO_SPUR == cur_bb_spur) 1918 break; 1919 cur_bb_spur = cur_bb_spur - freq; 1920 1921 if (IS_CHAN_HT40(chan)) { 1922 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) && 1923 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) { 1924 bb_spur = cur_bb_spur; 1925 break; 1926 } 1927 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) && 1928 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) { 1929 bb_spur = cur_bb_spur; 1930 break; 1931 } 1932 } 1933 1934 if (AR_NO_SPUR == bb_spur) { 1935 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK, 1936 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX); 1937 return; 1938 } else { 1939 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK, 1940 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX); 1941 } 1942 1943 bin = bb_spur * 320; 1944 1945 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0)); 1946 1947 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI | 1948 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER | 1949 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK | 1950 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK); 1951 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal); 1952 1953 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL | 1954 AR_PHY_SPUR_REG_ENABLE_MASK_PPM | 1955 AR_PHY_SPUR_REG_MASK_RATE_SELECT | 1956 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI | 1957 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH)); 1958 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal); 1959 1960 if (IS_CHAN_HT40(chan)) { 1961 if (bb_spur < 0) { 1962 spur_subchannel_sd = 1; 1963 bb_spur_off = bb_spur + 10; 1964 } else { 1965 spur_subchannel_sd = 0; 1966 bb_spur_off = bb_spur - 10; 1967 } 1968 } else { 1969 spur_subchannel_sd = 0; 1970 bb_spur_off = bb_spur; 1971 } 1972 1973 if (IS_CHAN_HT40(chan)) 1974 spur_delta_phase = 1975 ((bb_spur * 262144) / 1976 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE; 1977 else 1978 spur_delta_phase = 1979 ((bb_spur * 524288) / 1980 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE; 1981 1982 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40; 1983 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff; 1984 1985 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC | 1986 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) | 1987 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE)); 1988 REG_WRITE(ah, AR_PHY_TIMING11, newVal); 1989 1990 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S; 1991 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal); 1992 1993 cur_bin = -6000; 1994 upper = bin + 100; 1995 lower = bin - 100; 1996 1997 for (i = 0; i < 4; i++) { 1998 int pilot_mask = 0; 1999 int chan_mask = 0; 2000 int bp = 0; 2001 for (bp = 0; bp < 30; bp++) { 2002 if ((cur_bin > lower) && (cur_bin < upper)) { 2003 pilot_mask = pilot_mask | 0x1 << bp; 2004 chan_mask = chan_mask | 0x1 << bp; 2005 } 2006 cur_bin += 100; 2007 } 2008 cur_bin += inc[i]; 2009 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask); 2010 REG_WRITE(ah, chan_mask_reg[i], chan_mask); 2011 } 2012 2013 cur_vit_mask = 6100; 2014 upper = bin + 120; 2015 lower = bin - 120; 2016 2017 for (i = 0; i < 123; i++) { 2018 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) { 2019 2020 /* workaround for gcc bug #37014 */ 2021 volatile int tmp_v = abs(cur_vit_mask - bin); 2022 2023 if (tmp_v < 75) 2024 mask_amt = 1; 2025 else 2026 mask_amt = 0; 2027 if (cur_vit_mask < 0) 2028 mask_m[abs(cur_vit_mask / 100)] = mask_amt; 2029 else 2030 mask_p[cur_vit_mask / 100] = mask_amt; 2031 } 2032 cur_vit_mask -= 100; 2033 } 2034 2035 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28) 2036 | (mask_m[48] << 26) | (mask_m[49] << 24) 2037 | (mask_m[50] << 22) | (mask_m[51] << 20) 2038 | (mask_m[52] << 18) | (mask_m[53] << 16) 2039 | (mask_m[54] << 14) | (mask_m[55] << 12) 2040 | (mask_m[56] << 10) | (mask_m[57] << 8) 2041 | (mask_m[58] << 6) | (mask_m[59] << 4) 2042 | (mask_m[60] << 2) | (mask_m[61] << 0); 2043 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask); 2044 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask); 2045 2046 tmp_mask = (mask_m[31] << 28) 2047 | (mask_m[32] << 26) | (mask_m[33] << 24) 2048 | (mask_m[34] << 22) | (mask_m[35] << 20) 2049 | (mask_m[36] << 18) | (mask_m[37] << 16) 2050 | (mask_m[48] << 14) | (mask_m[39] << 12) 2051 | (mask_m[40] << 10) | (mask_m[41] << 8) 2052 | (mask_m[42] << 6) | (mask_m[43] << 4) 2053 | (mask_m[44] << 2) | (mask_m[45] << 0); 2054 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask); 2055 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask); 2056 2057 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28) 2058 | (mask_m[18] << 26) | (mask_m[18] << 24) 2059 | (mask_m[20] << 22) | (mask_m[20] << 20) 2060 | (mask_m[22] << 18) | (mask_m[22] << 16) 2061 | (mask_m[24] << 14) | (mask_m[24] << 12) 2062 | (mask_m[25] << 10) | (mask_m[26] << 8) 2063 | (mask_m[27] << 6) | (mask_m[28] << 4) 2064 | (mask_m[29] << 2) | (mask_m[30] << 0); 2065 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask); 2066 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask); 2067 2068 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28) 2069 | (mask_m[2] << 26) | (mask_m[3] << 24) 2070 | (mask_m[4] << 22) | (mask_m[5] << 20) 2071 | (mask_m[6] << 18) | (mask_m[7] << 16) 2072 | (mask_m[8] << 14) | (mask_m[9] << 12) 2073 | (mask_m[10] << 10) | (mask_m[11] << 8) 2074 | (mask_m[12] << 6) | (mask_m[13] << 4) 2075 | (mask_m[14] << 2) | (mask_m[15] << 0); 2076 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask); 2077 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask); 2078 2079 tmp_mask = (mask_p[15] << 28) 2080 | (mask_p[14] << 26) | (mask_p[13] << 24) 2081 | (mask_p[12] << 22) | (mask_p[11] << 20) 2082 | (mask_p[10] << 18) | (mask_p[9] << 16) 2083 | (mask_p[8] << 14) | (mask_p[7] << 12) 2084 | (mask_p[6] << 10) | (mask_p[5] << 8) 2085 | (mask_p[4] << 6) | (mask_p[3] << 4) 2086 | (mask_p[2] << 2) | (mask_p[1] << 0); 2087 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask); 2088 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask); 2089 2090 tmp_mask = (mask_p[30] << 28) 2091 | (mask_p[29] << 26) | (mask_p[28] << 24) 2092 | (mask_p[27] << 22) | (mask_p[26] << 20) 2093 | (mask_p[25] << 18) | (mask_p[24] << 16) 2094 | (mask_p[23] << 14) | (mask_p[22] << 12) 2095 | (mask_p[21] << 10) | (mask_p[20] << 8) 2096 | (mask_p[19] << 6) | (mask_p[18] << 4) 2097 | (mask_p[17] << 2) | (mask_p[16] << 0); 2098 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask); 2099 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask); 2100 2101 tmp_mask = (mask_p[45] << 28) 2102 | (mask_p[44] << 26) | (mask_p[43] << 24) 2103 | (mask_p[42] << 22) | (mask_p[41] << 20) 2104 | (mask_p[40] << 18) | (mask_p[39] << 16) 2105 | (mask_p[38] << 14) | (mask_p[37] << 12) 2106 | (mask_p[36] << 10) | (mask_p[35] << 8) 2107 | (mask_p[34] << 6) | (mask_p[33] << 4) 2108 | (mask_p[32] << 2) | (mask_p[31] << 0); 2109 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask); 2110 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask); 2111 2112 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28) 2113 | (mask_p[59] << 26) | (mask_p[58] << 24) 2114 | (mask_p[57] << 22) | (mask_p[56] << 20) 2115 | (mask_p[55] << 18) | (mask_p[54] << 16) 2116 | (mask_p[53] << 14) | (mask_p[52] << 12) 2117 | (mask_p[51] << 10) | (mask_p[50] << 8) 2118 | (mask_p[49] << 6) | (mask_p[48] << 4) 2119 | (mask_p[47] << 2) | (mask_p[46] << 0); 2120 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask); 2121 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask); 2122} 2123 2124static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan) 2125{ 2126 int bb_spur = AR_NO_SPUR; 2127 int bin, cur_bin; 2128 int spur_freq_sd; 2129 int spur_delta_phase; 2130 int denominator; 2131 int upper, lower, cur_vit_mask; 2132 int tmp, new; 2133 int i; 2134 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8, 2135 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60 2136 }; 2137 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10, 2138 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60 2139 }; 2140 int inc[4] = { 0, 100, 0, 0 }; 2141 2142 int8_t mask_m[123]; 2143 int8_t mask_p[123]; 2144 int8_t mask_amt; 2145 int tmp_mask; 2146 int cur_bb_spur; 2147 bool is2GHz = IS_CHAN_2GHZ(chan); 2148 2149 memset(&mask_m, 0, sizeof(int8_t) * 123); 2150 memset(&mask_p, 0, sizeof(int8_t) * 123); 2151 2152 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) { 2153 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz); 2154 if (AR_NO_SPUR == cur_bb_spur) 2155 break; 2156 cur_bb_spur = cur_bb_spur - (chan->channel * 10); 2157 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) { 2158 bb_spur = cur_bb_spur; 2159 break; 2160 } 2161 } 2162 2163 if (AR_NO_SPUR == bb_spur) 2164 return; 2165 2166 bin = bb_spur * 32; 2167 2168 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0)); 2169 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI | 2170 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER | 2171 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK | 2172 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK); 2173 2174 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new); 2175 2176 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL | 2177 AR_PHY_SPUR_REG_ENABLE_MASK_PPM | 2178 AR_PHY_SPUR_REG_MASK_RATE_SELECT | 2179 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI | 2180 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH)); 2181 REG_WRITE(ah, AR_PHY_SPUR_REG, new); 2182 2183 spur_delta_phase = ((bb_spur * 524288) / 100) & 2184 AR_PHY_TIMING11_SPUR_DELTA_PHASE; 2185 2186 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400; 2187 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff; 2188 2189 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC | 2190 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) | 2191 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE)); 2192 REG_WRITE(ah, AR_PHY_TIMING11, new); 2193 2194 cur_bin = -6000; 2195 upper = bin + 100; 2196 lower = bin - 100; 2197 2198 for (i = 0; i < 4; i++) { 2199 int pilot_mask = 0; 2200 int chan_mask = 0; 2201 int bp = 0; 2202 for (bp = 0; bp < 30; bp++) { 2203 if ((cur_bin > lower) && (cur_bin < upper)) { 2204 pilot_mask = pilot_mask | 0x1 << bp; 2205 chan_mask = chan_mask | 0x1 << bp; 2206 } 2207 cur_bin += 100; 2208 } 2209 cur_bin += inc[i]; 2210 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask); 2211 REG_WRITE(ah, chan_mask_reg[i], chan_mask); 2212 } 2213 2214 cur_vit_mask = 6100; 2215 upper = bin + 120; 2216 lower = bin - 120; 2217 2218 for (i = 0; i < 123; i++) { 2219 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) { 2220 2221 /* workaround for gcc bug #37014 */ 2222 volatile int tmp_v = abs(cur_vit_mask - bin); 2223 2224 if (tmp_v < 75) 2225 mask_amt = 1; 2226 else 2227 mask_amt = 0; 2228 if (cur_vit_mask < 0) 2229 mask_m[abs(cur_vit_mask / 100)] = mask_amt; 2230 else 2231 mask_p[cur_vit_mask / 100] = mask_amt; 2232 } 2233 cur_vit_mask -= 100; 2234 } 2235 2236 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28) 2237 | (mask_m[48] << 26) | (mask_m[49] << 24) 2238 | (mask_m[50] << 22) | (mask_m[51] << 20) 2239 | (mask_m[52] << 18) | (mask_m[53] << 16) 2240 | (mask_m[54] << 14) | (mask_m[55] << 12) 2241 | (mask_m[56] << 10) | (mask_m[57] << 8) 2242 | (mask_m[58] << 6) | (mask_m[59] << 4) 2243 | (mask_m[60] << 2) | (mask_m[61] << 0); 2244 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask); 2245 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask); 2246 2247 tmp_mask = (mask_m[31] << 28) 2248 | (mask_m[32] << 26) | (mask_m[33] << 24) 2249 | (mask_m[34] << 22) | (mask_m[35] << 20) 2250 | (mask_m[36] << 18) | (mask_m[37] << 16) 2251 | (mask_m[48] << 14) | (mask_m[39] << 12) 2252 | (mask_m[40] << 10) | (mask_m[41] << 8) 2253 | (mask_m[42] << 6) | (mask_m[43] << 4) 2254 | (mask_m[44] << 2) | (mask_m[45] << 0); 2255 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask); 2256 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask); 2257 2258 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28) 2259 | (mask_m[18] << 26) | (mask_m[18] << 24) 2260 | (mask_m[20] << 22) | (mask_m[20] << 20) 2261 | (mask_m[22] << 18) | (mask_m[22] << 16) 2262 | (mask_m[24] << 14) | (mask_m[24] << 12) 2263 | (mask_m[25] << 10) | (mask_m[26] << 8) 2264 | (mask_m[27] << 6) | (mask_m[28] << 4) 2265 | (mask_m[29] << 2) | (mask_m[30] << 0); 2266 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask); 2267 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask); 2268 2269 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28) 2270 | (mask_m[2] << 26) | (mask_m[3] << 24) 2271 | (mask_m[4] << 22) | (mask_m[5] << 20) 2272 | (mask_m[6] << 18) | (mask_m[7] << 16) 2273 | (mask_m[8] << 14) | (mask_m[9] << 12) 2274 | (mask_m[10] << 10) | (mask_m[11] << 8) 2275 | (mask_m[12] << 6) | (mask_m[13] << 4) 2276 | (mask_m[14] << 2) | (mask_m[15] << 0); 2277 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask); 2278 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask); 2279 2280 tmp_mask = (mask_p[15] << 28) 2281 | (mask_p[14] << 26) | (mask_p[13] << 24) 2282 | (mask_p[12] << 22) | (mask_p[11] << 20) 2283 | (mask_p[10] << 18) | (mask_p[9] << 16) 2284 | (mask_p[8] << 14) | (mask_p[7] << 12) 2285 | (mask_p[6] << 10) | (mask_p[5] << 8) 2286 | (mask_p[4] << 6) | (mask_p[3] << 4) 2287 | (mask_p[2] << 2) | (mask_p[1] << 0); 2288 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask); 2289 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask); 2290 2291 tmp_mask = (mask_p[30] << 28) 2292 | (mask_p[29] << 26) | (mask_p[28] << 24) 2293 | (mask_p[27] << 22) | (mask_p[26] << 20) 2294 | (mask_p[25] << 18) | (mask_p[24] << 16) 2295 | (mask_p[23] << 14) | (mask_p[22] << 12) 2296 | (mask_p[21] << 10) | (mask_p[20] << 8) 2297 | (mask_p[19] << 6) | (mask_p[18] << 4) 2298 | (mask_p[17] << 2) | (mask_p[16] << 0); 2299 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask); 2300 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask); 2301 2302 tmp_mask = (mask_p[45] << 28) 2303 | (mask_p[44] << 26) | (mask_p[43] << 24) 2304 | (mask_p[42] << 22) | (mask_p[41] << 20) 2305 | (mask_p[40] << 18) | (mask_p[39] << 16) 2306 | (mask_p[38] << 14) | (mask_p[37] << 12) 2307 | (mask_p[36] << 10) | (mask_p[35] << 8) 2308 | (mask_p[34] << 6) | (mask_p[33] << 4) 2309 | (mask_p[32] << 2) | (mask_p[31] << 0); 2310 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask); 2311 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask); 2312 2313 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28) 2314 | (mask_p[59] << 26) | (mask_p[58] << 24) 2315 | (mask_p[57] << 22) | (mask_p[56] << 20) 2316 | (mask_p[55] << 18) | (mask_p[54] << 16) 2317 | (mask_p[53] << 14) | (mask_p[52] << 12) 2318 | (mask_p[51] << 10) | (mask_p[50] << 8) 2319 | (mask_p[49] << 6) | (mask_p[48] << 4) 2320 | (mask_p[47] << 2) | (mask_p[46] << 0); 2321 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask); 2322 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask); 2323} 2324 2325static void ath9k_enable_rfkill(struct ath_hw *ah) 2326{ 2327 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, 2328 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB); 2329 2330 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2, 2331 AR_GPIO_INPUT_MUX2_RFSILENT); 2332 2333 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio); 2334 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB); 2335} 2336 2337int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan, 2338 bool bChannelChange) 2339{ 2340 u32 saveLedState; 2341 struct ath_softc *sc = ah->ah_sc; 2342 struct ath9k_channel *curchan = ah->curchan; 2343 u32 saveDefAntenna; 2344 u32 macStaId1; 2345 int i, rx_chainmask, r; 2346 2347 ah->extprotspacing = sc->ht_extprotspacing; 2348 ah->txchainmask = sc->tx_chainmask; 2349 ah->rxchainmask = sc->rx_chainmask; 2350 2351 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 2352 return -EIO; 2353 2354 if (curchan) 2355 ath9k_hw_getnf(ah, curchan); 2356 2357 if (bChannelChange && 2358 (ah->chip_fullsleep != true) && 2359 (ah->curchan != NULL) && 2360 (chan->channel != ah->curchan->channel) && 2361 ((chan->channelFlags & CHANNEL_ALL) == 2362 (ah->curchan->channelFlags & CHANNEL_ALL)) && 2363 (!AR_SREV_9280(ah) || (!IS_CHAN_A_5MHZ_SPACED(chan) && 2364 !IS_CHAN_A_5MHZ_SPACED(ah->curchan)))) { 2365 2366 if (ath9k_hw_channel_change(ah, chan, sc->tx_chan_width)) { 2367 ath9k_hw_loadnf(ah, ah->curchan); 2368 ath9k_hw_start_nfcal(ah); 2369 return 0; 2370 } 2371 } 2372 2373 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA); 2374 if (saveDefAntenna == 0) 2375 saveDefAntenna = 1; 2376 2377 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B; 2378 2379 saveLedState = REG_READ(ah, AR_CFG_LED) & 2380 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL | 2381 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW); 2382 2383 ath9k_hw_mark_phy_inactive(ah); 2384 2385 if (AR_SREV_9271(ah) && ah->htc_reset_init) { 2386 REG_WRITE(ah, 2387 AR9271_RESET_POWER_DOWN_CONTROL, 2388 AR9271_RADIO_RF_RST); 2389 udelay(50); 2390 } 2391 2392 if (!ath9k_hw_chip_reset(ah, chan)) { 2393 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, "Chip reset failed\n"); 2394 return -EINVAL; 2395 } 2396 2397 if (AR_SREV_9271(ah) && ah->htc_reset_init) { 2398 ah->htc_reset_init = false; 2399 REG_WRITE(ah, 2400 AR9271_RESET_POWER_DOWN_CONTROL, 2401 AR9271_GATE_MAC_CTL); 2402 udelay(50); 2403 } 2404 2405 if (AR_SREV_9280_10_OR_LATER(ah)) 2406 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE); 2407 2408 if (AR_SREV_9287_12_OR_LATER(ah)) { 2409 /* Enable ASYNC FIFO */ 2410 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2411 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL); 2412 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO); 2413 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2414 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET); 2415 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2416 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET); 2417 } 2418 r = ath9k_hw_process_ini(ah, chan, sc->tx_chan_width); 2419 if (r) 2420 return r; 2421 2422 /* Setup MFP options for CCMP */ 2423 if (AR_SREV_9280_20_OR_LATER(ah)) { 2424 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt 2425 * frames when constructing CCMP AAD. */ 2426 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT, 2427 0xc7ff); 2428 ah->sw_mgmt_crypto = false; 2429 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 2430 /* Disable hardware crypto for management frames */ 2431 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2, 2432 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE); 2433 REG_SET_BIT(ah, AR_PCU_MISC_MODE2, 2434 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT); 2435 ah->sw_mgmt_crypto = true; 2436 } else 2437 ah->sw_mgmt_crypto = true; 2438 2439 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan)) 2440 ath9k_hw_set_delta_slope(ah, chan); 2441 2442 if (AR_SREV_9280_10_OR_LATER(ah)) 2443 ath9k_hw_9280_spur_mitigate(ah, chan); 2444 else 2445 ath9k_hw_spur_mitigate(ah, chan); 2446 2447 ah->eep_ops->set_board_values(ah, chan); 2448 2449 ath9k_hw_decrease_chain_power(ah, chan); 2450 2451 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(ah->macaddr)); 2452 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(ah->macaddr + 4) 2453 | macStaId1 2454 | AR_STA_ID1_RTS_USE_DEF 2455 | (ah->config. 2456 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0) 2457 | ah->sta_id1_defaults); 2458 ath9k_hw_set_operating_mode(ah, ah->opmode); 2459 2460 REG_WRITE(ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask)); 2461 REG_WRITE(ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4)); 2462 2463 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna); 2464 2465 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid)); 2466 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) | 2467 ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S)); 2468 2469 REG_WRITE(ah, AR_ISR, ~0); 2470 2471 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR); 2472 2473 if (AR_SREV_9280_10_OR_LATER(ah)) 2474 ath9k_hw_ar9280_set_channel(ah, chan); 2475 else 2476 if (!(ath9k_hw_set_channel(ah, chan))) 2477 return -EIO; 2478 2479 for (i = 0; i < AR_NUM_DCU; i++) 2480 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i); 2481 2482 ah->intr_txqs = 0; 2483 for (i = 0; i < ah->caps.total_queues; i++) 2484 ath9k_hw_resettxqueue(ah, i); 2485 2486 ath9k_hw_init_interrupt_masks(ah, ah->opmode); 2487 ath9k_hw_init_qos(ah); 2488 2489 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT) 2490 ath9k_enable_rfkill(ah); 2491 2492 ath9k_hw_init_user_settings(ah); 2493 2494 if (AR_SREV_9287_12_OR_LATER(ah)) { 2495 REG_WRITE(ah, AR_D_GBL_IFS_SIFS, 2496 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR); 2497 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, 2498 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR); 2499 REG_WRITE(ah, AR_D_GBL_IFS_EIFS, 2500 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR); 2501 2502 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR); 2503 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR); 2504 2505 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER, 2506 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768); 2507 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN, 2508 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL); 2509 } 2510 if (AR_SREV_9287_12_OR_LATER(ah)) { 2511 REG_SET_BIT(ah, AR_PCU_MISC_MODE2, 2512 AR_PCU_MISC_MODE2_ENABLE_AGGWEP); 2513 } 2514 2515 REG_WRITE(ah, AR_STA_ID1, 2516 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM); 2517 2518 ath9k_hw_set_dma(ah); 2519 2520 REG_WRITE(ah, AR_OBS, 8); 2521 2522 if (ah->config.intr_mitigation) { 2523 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500); 2524 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000); 2525 } 2526 2527 ath9k_hw_init_bb(ah, chan); 2528 2529 if (!ath9k_hw_init_cal(ah, chan)) 2530 return -EIO; 2531 2532 rx_chainmask = ah->rxchainmask; 2533 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) { 2534 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask); 2535 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask); 2536 } 2537 2538 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ); 2539 2540 /* 2541 * For big endian systems turn on swapping for descriptors 2542 */ 2543 if (AR_SREV_9100(ah)) { 2544 u32 mask; 2545 mask = REG_READ(ah, AR_CFG); 2546 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) { 2547 DPRINTF(ah->ah_sc, ATH_DBG_RESET, 2548 "CFG Byte Swap Set 0x%x\n", mask); 2549 } else { 2550 mask = 2551 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB; 2552 REG_WRITE(ah, AR_CFG, mask); 2553 DPRINTF(ah->ah_sc, ATH_DBG_RESET, 2554 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG)); 2555 } 2556 } else { 2557 /* Configure AR9271 target WLAN */ 2558 if (AR_SREV_9271(ah)) 2559 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB); 2560#ifdef __BIG_ENDIAN 2561 else 2562 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD); 2563#endif 2564 } 2565 2566 if (ah->ah_sc->sc_flags & SC_OP_BTCOEX_ENABLED) 2567 ath9k_hw_btcoex_enable(ah); 2568 2569 return 0; 2570} 2571 2572/************************/ 2573/* Key Cache Management */ 2574/************************/ 2575 2576bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry) 2577{ 2578 u32 keyType; 2579 2580 if (entry >= ah->caps.keycache_size) { 2581 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 2582 "keychache entry %u out of range\n", entry); 2583 return false; 2584 } 2585 2586 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry)); 2587 2588 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0); 2589 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0); 2590 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0); 2591 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0); 2592 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0); 2593 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR); 2594 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0); 2595 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0); 2596 2597 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) { 2598 u16 micentry = entry + 64; 2599 2600 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0); 2601 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0); 2602 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0); 2603 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0); 2604 2605 } 2606 2607 return true; 2608} 2609 2610bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac) 2611{ 2612 u32 macHi, macLo; 2613 2614 if (entry >= ah->caps.keycache_size) { 2615 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 2616 "keychache entry %u out of range\n", entry); 2617 return false; 2618 } 2619 2620 if (mac != NULL) { 2621 macHi = (mac[5] << 8) | mac[4]; 2622 macLo = (mac[3] << 24) | 2623 (mac[2] << 16) | 2624 (mac[1] << 8) | 2625 mac[0]; 2626 macLo >>= 1; 2627 macLo |= (macHi & 1) << 31; 2628 macHi >>= 1; 2629 } else { 2630 macLo = macHi = 0; 2631 } 2632 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo); 2633 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID); 2634 2635 return true; 2636} 2637 2638bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry, 2639 const struct ath9k_keyval *k, 2640 const u8 *mac) 2641{ 2642 const struct ath9k_hw_capabilities *pCap = &ah->caps; 2643 u32 key0, key1, key2, key3, key4; 2644 u32 keyType; 2645 2646 if (entry >= pCap->keycache_size) { 2647 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 2648 "keycache entry %u out of range\n", entry); 2649 return false; 2650 } 2651 2652 switch (k->kv_type) { 2653 case ATH9K_CIPHER_AES_OCB: 2654 keyType = AR_KEYTABLE_TYPE_AES; 2655 break; 2656 case ATH9K_CIPHER_AES_CCM: 2657 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) { 2658 DPRINTF(ah->ah_sc, ATH_DBG_ANY, 2659 "AES-CCM not supported by mac rev 0x%x\n", 2660 ah->hw_version.macRev); 2661 return false; 2662 } 2663 keyType = AR_KEYTABLE_TYPE_CCM; 2664 break; 2665 case ATH9K_CIPHER_TKIP: 2666 keyType = AR_KEYTABLE_TYPE_TKIP; 2667 if (ATH9K_IS_MIC_ENABLED(ah) 2668 && entry + 64 >= pCap->keycache_size) { 2669 DPRINTF(ah->ah_sc, ATH_DBG_ANY, 2670 "entry %u inappropriate for TKIP\n", entry); 2671 return false; 2672 } 2673 break; 2674 case ATH9K_CIPHER_WEP: 2675 if (k->kv_len < WLAN_KEY_LEN_WEP40) { 2676 DPRINTF(ah->ah_sc, ATH_DBG_ANY, 2677 "WEP key length %u too small\n", k->kv_len); 2678 return false; 2679 } 2680 if (k->kv_len <= WLAN_KEY_LEN_WEP40) 2681 keyType = AR_KEYTABLE_TYPE_40; 2682 else if (k->kv_len <= WLAN_KEY_LEN_WEP104) 2683 keyType = AR_KEYTABLE_TYPE_104; 2684 else 2685 keyType = AR_KEYTABLE_TYPE_128; 2686 break; 2687 case ATH9K_CIPHER_CLR: 2688 keyType = AR_KEYTABLE_TYPE_CLR; 2689 break; 2690 default: 2691 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 2692 "cipher %u not supported\n", k->kv_type); 2693 return false; 2694 } 2695 2696 key0 = get_unaligned_le32(k->kv_val + 0); 2697 key1 = get_unaligned_le16(k->kv_val + 4); 2698 key2 = get_unaligned_le32(k->kv_val + 6); 2699 key3 = get_unaligned_le16(k->kv_val + 10); 2700 key4 = get_unaligned_le32(k->kv_val + 12); 2701 if (k->kv_len <= WLAN_KEY_LEN_WEP104) 2702 key4 &= 0xff; 2703 2704 /* 2705 * Note: Key cache registers access special memory area that requires 2706 * two 32-bit writes to actually update the values in the internal 2707 * memory. Consequently, the exact order and pairs used here must be 2708 * maintained. 2709 */ 2710 2711 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) { 2712 u16 micentry = entry + 64; 2713 2714 /* 2715 * Write inverted key[47:0] first to avoid Michael MIC errors 2716 * on frames that could be sent or received at the same time. 2717 * The correct key will be written in the end once everything 2718 * else is ready. 2719 */ 2720 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0); 2721 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1); 2722 2723 /* Write key[95:48] */ 2724 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2); 2725 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3); 2726 2727 /* Write key[127:96] and key type */ 2728 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4); 2729 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType); 2730 2731 /* Write MAC address for the entry */ 2732 (void) ath9k_hw_keysetmac(ah, entry, mac); 2733 2734 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) { 2735 /* 2736 * TKIP uses two key cache entries: 2737 * Michael MIC TX/RX keys in the same key cache entry 2738 * (idx = main index + 64): 2739 * key0 [31:0] = RX key [31:0] 2740 * key1 [15:0] = TX key [31:16] 2741 * key1 [31:16] = reserved 2742 * key2 [31:0] = RX key [63:32] 2743 * key3 [15:0] = TX key [15:0] 2744 * key3 [31:16] = reserved 2745 * key4 [31:0] = TX key [63:32] 2746 */ 2747 u32 mic0, mic1, mic2, mic3, mic4; 2748 2749 mic0 = get_unaligned_le32(k->kv_mic + 0); 2750 mic2 = get_unaligned_le32(k->kv_mic + 4); 2751 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff; 2752 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff; 2753 mic4 = get_unaligned_le32(k->kv_txmic + 4); 2754 2755 /* Write RX[31:0] and TX[31:16] */ 2756 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0); 2757 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1); 2758 2759 /* Write RX[63:32] and TX[15:0] */ 2760 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2); 2761 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3); 2762 2763 /* Write TX[63:32] and keyType(reserved) */ 2764 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4); 2765 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry), 2766 AR_KEYTABLE_TYPE_CLR); 2767 2768 } else { 2769 /* 2770 * TKIP uses four key cache entries (two for group 2771 * keys): 2772 * Michael MIC TX/RX keys are in different key cache 2773 * entries (idx = main index + 64 for TX and 2774 * main index + 32 + 96 for RX): 2775 * key0 [31:0] = TX/RX MIC key [31:0] 2776 * key1 [31:0] = reserved 2777 * key2 [31:0] = TX/RX MIC key [63:32] 2778 * key3 [31:0] = reserved 2779 * key4 [31:0] = reserved 2780 * 2781 * Upper layer code will call this function separately 2782 * for TX and RX keys when these registers offsets are 2783 * used. 2784 */ 2785 u32 mic0, mic2; 2786 2787 mic0 = get_unaligned_le32(k->kv_mic + 0); 2788 mic2 = get_unaligned_le32(k->kv_mic + 4); 2789 2790 /* Write MIC key[31:0] */ 2791 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0); 2792 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0); 2793 2794 /* Write MIC key[63:32] */ 2795 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2); 2796 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0); 2797 2798 /* Write TX[63:32] and keyType(reserved) */ 2799 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0); 2800 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry), 2801 AR_KEYTABLE_TYPE_CLR); 2802 } 2803 2804 /* MAC address registers are reserved for the MIC entry */ 2805 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0); 2806 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0); 2807 2808 /* 2809 * Write the correct (un-inverted) key[47:0] last to enable 2810 * TKIP now that all other registers are set with correct 2811 * values. 2812 */ 2813 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0); 2814 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1); 2815 } else { 2816 /* Write key[47:0] */ 2817 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0); 2818 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1); 2819 2820 /* Write key[95:48] */ 2821 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2); 2822 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3); 2823 2824 /* Write key[127:96] and key type */ 2825 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4); 2826 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType); 2827 2828 /* Write MAC address for the entry */ 2829 (void) ath9k_hw_keysetmac(ah, entry, mac); 2830 } 2831 2832 return true; 2833} 2834 2835bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry) 2836{ 2837 if (entry < ah->caps.keycache_size) { 2838 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry)); 2839 if (val & AR_KEYTABLE_VALID) 2840 return true; 2841 } 2842 return false; 2843} 2844 2845/******************************/ 2846/* Power Management (Chipset) */ 2847/******************************/ 2848 2849static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip) 2850{ 2851 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2852 if (setChip) { 2853 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, 2854 AR_RTC_FORCE_WAKE_EN); 2855 if (!AR_SREV_9100(ah)) 2856 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF); 2857 2858 REG_CLR_BIT(ah, (AR_RTC_RESET), 2859 AR_RTC_RESET_EN); 2860 } 2861} 2862 2863static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip) 2864{ 2865 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2866 if (setChip) { 2867 struct ath9k_hw_capabilities *pCap = &ah->caps; 2868 2869 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 2870 REG_WRITE(ah, AR_RTC_FORCE_WAKE, 2871 AR_RTC_FORCE_WAKE_ON_INT); 2872 } else { 2873 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, 2874 AR_RTC_FORCE_WAKE_EN); 2875 } 2876 } 2877} 2878 2879static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip) 2880{ 2881 u32 val; 2882 int i; 2883 2884 if (setChip) { 2885 if ((REG_READ(ah, AR_RTC_STATUS) & 2886 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) { 2887 if (ath9k_hw_set_reset_reg(ah, 2888 ATH9K_RESET_POWER_ON) != true) { 2889 return false; 2890 } 2891 } 2892 if (AR_SREV_9100(ah)) 2893 REG_SET_BIT(ah, AR_RTC_RESET, 2894 AR_RTC_RESET_EN); 2895 2896 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE, 2897 AR_RTC_FORCE_WAKE_EN); 2898 udelay(50); 2899 2900 for (i = POWER_UP_TIME / 50; i > 0; i--) { 2901 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M; 2902 if (val == AR_RTC_STATUS_ON) 2903 break; 2904 udelay(50); 2905 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE, 2906 AR_RTC_FORCE_WAKE_EN); 2907 } 2908 if (i == 0) { 2909 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 2910 "Failed to wakeup in %uus\n", POWER_UP_TIME / 20); 2911 return false; 2912 } 2913 } 2914 2915 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2916 2917 return true; 2918} 2919 2920static bool ath9k_hw_setpower_nolock(struct ath_hw *ah, 2921 enum ath9k_power_mode mode) 2922{ 2923 int status = true, setChip = true; 2924 static const char *modes[] = { 2925 "AWAKE", 2926 "FULL-SLEEP", 2927 "NETWORK SLEEP", 2928 "UNDEFINED" 2929 }; 2930 2931 if (ah->power_mode == mode) 2932 return status; 2933 2934 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "%s -> %s\n", 2935 modes[ah->power_mode], modes[mode]); 2936 2937 switch (mode) { 2938 case ATH9K_PM_AWAKE: 2939 status = ath9k_hw_set_power_awake(ah, setChip); 2940 break; 2941 case ATH9K_PM_FULL_SLEEP: 2942 ath9k_set_power_sleep(ah, setChip); 2943 ah->chip_fullsleep = true; 2944 break; 2945 case ATH9K_PM_NETWORK_SLEEP: 2946 ath9k_set_power_network_sleep(ah, setChip); 2947 break; 2948 default: 2949 DPRINTF(ah->ah_sc, ATH_DBG_FATAL, 2950 "Unknown power mode %u\n", mode); 2951 return false; 2952 } 2953 ah->power_mode = mode; 2954 2955 return status; 2956} 2957 2958bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode) 2959{ 2960 unsigned long flags; 2961 bool ret; 2962 2963 spin_lock_irqsave(&ah->ah_sc->sc_pm_lock, flags); 2964 ret = ath9k_hw_setpower_nolock(ah, mode); 2965 spin_unlock_irqrestore(&ah->ah_sc->sc_pm_lock, flags); 2966 2967 return ret; 2968} 2969 2970void ath9k_ps_wakeup(struct ath_softc *sc) 2971{ 2972 unsigned long flags; 2973 2974 spin_lock_irqsave(&sc->sc_pm_lock, flags); 2975 if (++sc->ps_usecount != 1) 2976 goto unlock; 2977 2978 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_AWAKE); 2979 2980 unlock: 2981 spin_unlock_irqrestore(&sc->sc_pm_lock, flags); 2982} 2983 2984void ath9k_ps_restore(struct ath_softc *sc) 2985{ 2986 unsigned long flags; 2987 2988 spin_lock_irqsave(&sc->sc_pm_lock, flags); 2989 if (--sc->ps_usecount != 0) 2990 goto unlock; 2991 2992 if (sc->ps_enabled && 2993 !(sc->sc_flags & (SC_OP_WAIT_FOR_BEACON | 2994 SC_OP_WAIT_FOR_CAB | 2995 SC_OP_WAIT_FOR_PSPOLL_DATA | 2996 SC_OP_WAIT_FOR_TX_ACK))) 2997 ath9k_hw_setpower_nolock(sc->sc_ah, ATH9K_PM_NETWORK_SLEEP); 2998 2999 unlock: 3000 spin_unlock_irqrestore(&sc->sc_pm_lock, flags); 3001} 3002 3003/* 3004 * Helper for ASPM support. 3005 * 3006 * Disable PLL when in L0s as well as receiver clock when in L1. 3007 * This power saving option must be enabled through the SerDes. 3008 * 3009 * Programming the SerDes must go through the same 288 bit serial shift 3010 * register as the other analog registers. Hence the 9 writes. 3011 */ 3012void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore) 3013{ 3014 u8 i; 3015 3016 if (ah->is_pciexpress != true) 3017 return; 3018 3019 /* Do not touch SerDes registers */ 3020 if (ah->config.pcie_powersave_enable == 2) 3021 return; 3022 3023 /* Nothing to do on restore for 11N */ 3024 if (restore) 3025 return; 3026 3027 if (AR_SREV_9280_20_OR_LATER(ah)) { 3028 /* 3029 * AR9280 2.0 or later chips use SerDes values from the 3030 * initvals.h initialized depending on chipset during 3031 * ath9k_hw_init() 3032 */ 3033 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) { 3034 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0), 3035 INI_RA(&ah->iniPcieSerdes, i, 1)); 3036 } 3037 } else if (AR_SREV_9280(ah) && 3038 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) { 3039 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00); 3040 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 3041 3042 /* RX shut off when elecidle is asserted */ 3043 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019); 3044 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820); 3045 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560); 3046 3047 /* Shut off CLKREQ active in L1 */ 3048 if (ah->config.pcie_clock_req) 3049 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc); 3050 else 3051 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd); 3052 3053 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 3054 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 3055 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007); 3056 3057 /* Load the new settings */ 3058 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 3059 3060 } else { 3061 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00); 3062 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 3063 3064 /* RX shut off when elecidle is asserted */ 3065 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039); 3066 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824); 3067 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579); 3068 3069 /* 3070 * Ignore ah->ah_config.pcie_clock_req setting for 3071 * pre-AR9280 11n 3072 */ 3073 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff); 3074 3075 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 3076 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 3077 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007); 3078 3079 /* Load the new settings */ 3080 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 3081 } 3082 3083 udelay(1000); 3084 3085 /* set bit 19 to allow forcing of pcie core into L1 state */ 3086 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA); 3087 3088 /* Several PCIe massages to ensure proper behaviour */ 3089 if (ah->config.pcie_waen) { 3090 REG_WRITE(ah, AR_WA, ah->config.pcie_waen); 3091 } else { 3092 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) || AR_SREV_9287(ah)) 3093 REG_WRITE(ah, AR_WA, AR9285_WA_DEFAULT); 3094 /* 3095 * On AR9280 chips bit 22 of 0x4004 needs to be set to 3096 * otherwise card may disappear. 3097 */ 3098 else if (AR_SREV_9280(ah)) 3099 REG_WRITE(ah, AR_WA, AR9280_WA_DEFAULT); 3100 else 3101 REG_WRITE(ah, AR_WA, AR_WA_DEFAULT); 3102 } 3103} 3104 3105/**********************/ 3106/* Interrupt Handling */ 3107/**********************/ 3108 3109bool ath9k_hw_intrpend(struct ath_hw *ah) 3110{ 3111 u32 host_isr; 3112 3113 if (AR_SREV_9100(ah)) 3114 return true; 3115 3116 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE); 3117 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS)) 3118 return true; 3119 3120 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE); 3121 if ((host_isr & AR_INTR_SYNC_DEFAULT) 3122 && (host_isr != AR_INTR_SPURIOUS)) 3123 return true; 3124 3125 return false; 3126} 3127 3128bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked) 3129{ 3130 u32 isr = 0; 3131 u32 mask2 = 0; 3132 struct ath9k_hw_capabilities *pCap = &ah->caps; 3133 u32 sync_cause = 0; 3134 bool fatal_int = false; 3135 3136 if (!AR_SREV_9100(ah)) { 3137 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) { 3138 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M) 3139 == AR_RTC_STATUS_ON) { 3140 isr = REG_READ(ah, AR_ISR); 3141 } 3142 } 3143 3144 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) & 3145 AR_INTR_SYNC_DEFAULT; 3146 3147 *masked = 0; 3148 3149 if (!isr && !sync_cause) 3150 return false; 3151 } else { 3152 *masked = 0; 3153 isr = REG_READ(ah, AR_ISR); 3154 } 3155 3156 if (isr) { 3157 if (isr & AR_ISR_BCNMISC) { 3158 u32 isr2; 3159 isr2 = REG_READ(ah, AR_ISR_S2); 3160 if (isr2 & AR_ISR_S2_TIM) 3161 mask2 |= ATH9K_INT_TIM; 3162 if (isr2 & AR_ISR_S2_DTIM) 3163 mask2 |= ATH9K_INT_DTIM; 3164 if (isr2 & AR_ISR_S2_DTIMSYNC) 3165 mask2 |= ATH9K_INT_DTIMSYNC; 3166 if (isr2 & (AR_ISR_S2_CABEND)) 3167 mask2 |= ATH9K_INT_CABEND; 3168 if (isr2 & AR_ISR_S2_GTT) 3169 mask2 |= ATH9K_INT_GTT; 3170 if (isr2 & AR_ISR_S2_CST) 3171 mask2 |= ATH9K_INT_CST; 3172 if (isr2 & AR_ISR_S2_TSFOOR) 3173 mask2 |= ATH9K_INT_TSFOOR; 3174 } 3175 3176 isr = REG_READ(ah, AR_ISR_RAC); 3177 if (isr == 0xffffffff) { 3178 *masked = 0; 3179 return false; 3180 } 3181 3182 *masked = isr & ATH9K_INT_COMMON; 3183 3184 if (ah->config.intr_mitigation) { 3185 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM)) 3186 *masked |= ATH9K_INT_RX; 3187 } 3188 3189 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR)) 3190 *masked |= ATH9K_INT_RX; 3191 if (isr & 3192 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR | 3193 AR_ISR_TXEOL)) { 3194 u32 s0_s, s1_s; 3195 3196 *masked |= ATH9K_INT_TX; 3197 3198 s0_s = REG_READ(ah, AR_ISR_S0_S); 3199 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK); 3200 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC); 3201 3202 s1_s = REG_READ(ah, AR_ISR_S1_S); 3203 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR); 3204 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL); 3205 } 3206 3207 if (isr & AR_ISR_RXORN) { 3208 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, 3209 "receive FIFO overrun interrupt\n"); 3210 } 3211 3212 if (!AR_SREV_9100(ah)) { 3213 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 3214 u32 isr5 = REG_READ(ah, AR_ISR_S5_S); 3215 if (isr5 & AR_ISR_S5_TIM_TIMER) 3216 *masked |= ATH9K_INT_TIM_TIMER; 3217 } 3218 } 3219 3220 *masked |= mask2; 3221 } 3222 3223 if (AR_SREV_9100(ah)) 3224 return true; 3225 3226 if (isr & AR_ISR_GENTMR) { 3227 u32 s5_s; 3228 3229 s5_s = REG_READ(ah, AR_ISR_S5_S); 3230 if (isr & AR_ISR_GENTMR) { 3231 ah->intr_gen_timer_trigger = 3232 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG); 3233 3234 ah->intr_gen_timer_thresh = 3235 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH); 3236 3237 if (ah->intr_gen_timer_trigger) 3238 *masked |= ATH9K_INT_GENTIMER; 3239 3240 } 3241 } 3242 3243 if (sync_cause) { 3244 fatal_int = 3245 (sync_cause & 3246 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) 3247 ? true : false; 3248 3249 if (fatal_int) { 3250 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) { 3251 DPRINTF(ah->ah_sc, ATH_DBG_ANY, 3252 "received PCI FATAL interrupt\n"); 3253 } 3254 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) { 3255 DPRINTF(ah->ah_sc, ATH_DBG_ANY, 3256 "received PCI PERR interrupt\n"); 3257 } 3258 *masked |= ATH9K_INT_FATAL; 3259 } 3260 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) { 3261 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, 3262 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n"); 3263 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF); 3264 REG_WRITE(ah, AR_RC, 0); 3265 *masked |= ATH9K_INT_FATAL; 3266 } 3267 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) { 3268 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, 3269 "AR_INTR_SYNC_LOCAL_TIMEOUT\n"); 3270 } 3271 3272 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause); 3273 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR); 3274 } 3275 3276 return true; 3277} 3278 3279enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints) 3280{ 3281 u32 omask = ah->mask_reg; 3282 u32 mask, mask2; 3283 struct ath9k_hw_capabilities *pCap = &ah->caps; 3284 3285 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints); 3286 3287 if (omask & ATH9K_INT_GLOBAL) { 3288 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "disable IER\n"); 3289 REG_WRITE(ah, AR_IER, AR_IER_DISABLE); 3290 (void) REG_READ(ah, AR_IER); 3291 if (!AR_SREV_9100(ah)) { 3292 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0); 3293 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE); 3294 3295 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0); 3296 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE); 3297 } 3298 } 3299 3300 mask = ints & ATH9K_INT_COMMON; 3301 mask2 = 0; 3302 3303 if (ints & ATH9K_INT_TX) { 3304 if (ah->txok_interrupt_mask) 3305 mask |= AR_IMR_TXOK; 3306 if (ah->txdesc_interrupt_mask) 3307 mask |= AR_IMR_TXDESC; 3308 if (ah->txerr_interrupt_mask) 3309 mask |= AR_IMR_TXERR; 3310 if (ah->txeol_interrupt_mask) 3311 mask |= AR_IMR_TXEOL; 3312 } 3313 if (ints & ATH9K_INT_RX) { 3314 mask |= AR_IMR_RXERR; 3315 if (ah->config.intr_mitigation) 3316 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM; 3317 else 3318 mask |= AR_IMR_RXOK | AR_IMR_RXDESC; 3319 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) 3320 mask |= AR_IMR_GENTMR; 3321 } 3322 3323 if (ints & (ATH9K_INT_BMISC)) { 3324 mask |= AR_IMR_BCNMISC; 3325 if (ints & ATH9K_INT_TIM) 3326 mask2 |= AR_IMR_S2_TIM; 3327 if (ints & ATH9K_INT_DTIM) 3328 mask2 |= AR_IMR_S2_DTIM; 3329 if (ints & ATH9K_INT_DTIMSYNC) 3330 mask2 |= AR_IMR_S2_DTIMSYNC; 3331 if (ints & ATH9K_INT_CABEND) 3332 mask2 |= AR_IMR_S2_CABEND; 3333 if (ints & ATH9K_INT_TSFOOR) 3334 mask2 |= AR_IMR_S2_TSFOOR; 3335 } 3336 3337 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) { 3338 mask |= AR_IMR_BCNMISC; 3339 if (ints & ATH9K_INT_GTT) 3340 mask2 |= AR_IMR_S2_GTT; 3341 if (ints & ATH9K_INT_CST) 3342 mask2 |= AR_IMR_S2_CST; 3343 } 3344 3345 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask); 3346 REG_WRITE(ah, AR_IMR, mask); 3347 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM | 3348 AR_IMR_S2_DTIM | 3349 AR_IMR_S2_DTIMSYNC | 3350 AR_IMR_S2_CABEND | 3351 AR_IMR_S2_CABTO | 3352 AR_IMR_S2_TSFOOR | 3353 AR_IMR_S2_GTT | AR_IMR_S2_CST); 3354 REG_WRITE(ah, AR_IMR_S2, mask | mask2); 3355 ah->mask_reg = ints; 3356 3357 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 3358 if (ints & ATH9K_INT_TIM_TIMER) 3359 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER); 3360 else 3361 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER); 3362 } 3363 3364 if (ints & ATH9K_INT_GLOBAL) { 3365 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "enable IER\n"); 3366 REG_WRITE(ah, AR_IER, AR_IER_ENABLE); 3367 if (!AR_SREV_9100(ah)) { 3368 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 3369 AR_INTR_MAC_IRQ); 3370 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ); 3371 3372 3373 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 3374 AR_INTR_SYNC_DEFAULT); 3375 REG_WRITE(ah, AR_INTR_SYNC_MASK, 3376 AR_INTR_SYNC_DEFAULT); 3377 } 3378 DPRINTF(ah->ah_sc, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n", 3379 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER)); 3380 } 3381 3382 return omask; 3383} 3384 3385/*******************/ 3386/* Beacon Handling */ 3387/*******************/ 3388 3389void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period) 3390{ 3391 int flags = 0; 3392 3393 ah->beacon_interval = beacon_period; 3394 3395 switch (ah->opmode) { 3396 case NL80211_IFTYPE_STATION: 3397 case NL80211_IFTYPE_MONITOR: 3398 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon)); 3399 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff); 3400 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff); 3401 flags |= AR_TBTT_TIMER_EN; 3402 break; 3403 case NL80211_IFTYPE_ADHOC: 3404 case NL80211_IFTYPE_MESH_POINT: 3405 REG_SET_BIT(ah, AR_TXCFG, 3406 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY); 3407 REG_WRITE(ah, AR_NEXT_NDP_TIMER, 3408 TU_TO_USEC(next_beacon + 3409 (ah->atim_window ? ah-> 3410 atim_window : 1))); 3411 flags |= AR_NDP_TIMER_EN; 3412 case NL80211_IFTYPE_AP: 3413 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon)); 3414 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 3415 TU_TO_USEC(next_beacon - 3416 ah->config. 3417 dma_beacon_response_time)); 3418 REG_WRITE(ah, AR_NEXT_SWBA, 3419 TU_TO_USEC(next_beacon - 3420 ah->config. 3421 sw_beacon_response_time)); 3422 flags |= 3423 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN; 3424 break; 3425 default: 3426 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, 3427 "%s: unsupported opmode: %d\n", 3428 __func__, ah->opmode); 3429 return; 3430 break; 3431 } 3432 3433 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period)); 3434 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period)); 3435 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period)); 3436 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period)); 3437 3438 beacon_period &= ~ATH9K_BEACON_ENA; 3439 if (beacon_period & ATH9K_BEACON_RESET_TSF) { 3440 beacon_period &= ~ATH9K_BEACON_RESET_TSF; 3441 ath9k_hw_reset_tsf(ah); 3442 } 3443 3444 REG_SET_BIT(ah, AR_TIMER_MODE, flags); 3445} 3446 3447void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah, 3448 const struct ath9k_beacon_state *bs) 3449{ 3450 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout; 3451 struct ath9k_hw_capabilities *pCap = &ah->caps; 3452 3453 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt)); 3454 3455 REG_WRITE(ah, AR_BEACON_PERIOD, 3456 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD)); 3457 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, 3458 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD)); 3459 3460 REG_RMW_FIELD(ah, AR_RSSI_THR, 3461 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold); 3462 3463 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD; 3464 3465 if (bs->bs_sleepduration > beaconintval) 3466 beaconintval = bs->bs_sleepduration; 3467 3468 dtimperiod = bs->bs_dtimperiod; 3469 if (bs->bs_sleepduration > dtimperiod) 3470 dtimperiod = bs->bs_sleepduration; 3471 3472 if (beaconintval == dtimperiod) 3473 nextTbtt = bs->bs_nextdtim; 3474 else 3475 nextTbtt = bs->bs_nexttbtt; 3476 3477 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim); 3478 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt); 3479 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "beacon period %d\n", beaconintval); 3480 DPRINTF(ah->ah_sc, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod); 3481 3482 REG_WRITE(ah, AR_NEXT_DTIM, 3483 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP)); 3484 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP)); 3485 3486 REG_WRITE(ah, AR_SLEEP1, 3487 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT) 3488 | AR_SLEEP1_ASSUME_DTIM); 3489 3490 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP) 3491 beacontimeout = (BEACON_TIMEOUT_VAL << 3); 3492 else 3493 beacontimeout = MIN_BEACON_TIMEOUT_VAL; 3494 3495 REG_WRITE(ah, AR_SLEEP2, 3496 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT)); 3497 3498 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval)); 3499 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod)); 3500 3501 REG_SET_BIT(ah, AR_TIMER_MODE, 3502 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN | 3503 AR_DTIM_TIMER_EN); 3504 3505 /* TSF Out of Range Threshold */ 3506 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold); 3507} 3508 3509/*******************/ 3510/* HW Capabilities */ 3511/*******************/ 3512 3513void ath9k_hw_fill_cap_info(struct ath_hw *ah) 3514{ 3515 struct ath9k_hw_capabilities *pCap = &ah->caps; 3516 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 3517 struct ath_btcoex_info *btcoex_info = &ah->ah_sc->btcoex_info; 3518 3519 u16 capField = 0, eeval; 3520 3521 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0); 3522 regulatory->current_rd = eeval; 3523 3524 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1); 3525 if (AR_SREV_9285_10_OR_LATER(ah)) 3526 eeval |= AR9285_RDEXT_DEFAULT; 3527 regulatory->current_rd_ext = eeval; 3528 3529 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP); 3530 3531 if (ah->opmode != NL80211_IFTYPE_AP && 3532 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) { 3533 if (regulatory->current_rd == 0x64 || 3534 regulatory->current_rd == 0x65) 3535 regulatory->current_rd += 5; 3536 else if (regulatory->current_rd == 0x41) 3537 regulatory->current_rd = 0x43; 3538 DPRINTF(ah->ah_sc, ATH_DBG_REGULATORY, 3539 "regdomain mapped to 0x%x\n", regulatory->current_rd); 3540 } 3541 3542 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE); 3543 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX); 3544 3545 if (eeval & AR5416_OPFLAGS_11A) { 3546 set_bit(ATH9K_MODE_11A, pCap->wireless_modes); 3547 if (ah->config.ht_enable) { 3548 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20)) 3549 set_bit(ATH9K_MODE_11NA_HT20, 3550 pCap->wireless_modes); 3551 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) { 3552 set_bit(ATH9K_MODE_11NA_HT40PLUS, 3553 pCap->wireless_modes); 3554 set_bit(ATH9K_MODE_11NA_HT40MINUS, 3555 pCap->wireless_modes); 3556 } 3557 } 3558 } 3559 3560 if (eeval & AR5416_OPFLAGS_11G) { 3561 set_bit(ATH9K_MODE_11G, pCap->wireless_modes); 3562 if (ah->config.ht_enable) { 3563 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20)) 3564 set_bit(ATH9K_MODE_11NG_HT20, 3565 pCap->wireless_modes); 3566 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) { 3567 set_bit(ATH9K_MODE_11NG_HT40PLUS, 3568 pCap->wireless_modes); 3569 set_bit(ATH9K_MODE_11NG_HT40MINUS, 3570 pCap->wireless_modes); 3571 } 3572 } 3573 } 3574 3575 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK); 3576 /* 3577 * For AR9271 we will temporarilly uses the rx chainmax as read from 3578 * the EEPROM. 3579 */ 3580 if ((ah->hw_version.devid == AR5416_DEVID_PCI) && 3581 !(eeval & AR5416_OPFLAGS_11A) && 3582 !(AR_SREV_9271(ah))) 3583 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */ 3584 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7; 3585 else 3586 /* Use rx_chainmask from EEPROM. */ 3587 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK); 3588 3589 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0))) 3590 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA; 3591 3592 pCap->low_2ghz_chan = 2312; 3593 pCap->high_2ghz_chan = 2732; 3594 3595 pCap->low_5ghz_chan = 4920; 3596 pCap->high_5ghz_chan = 6100; 3597 3598 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP; 3599 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP; 3600 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM; 3601 3602 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP; 3603 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP; 3604 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM; 3605 3606 if (ah->config.ht_enable) 3607 pCap->hw_caps |= ATH9K_HW_CAP_HT; 3608 else 3609 pCap->hw_caps &= ~ATH9K_HW_CAP_HT; 3610 3611 pCap->hw_caps |= ATH9K_HW_CAP_GTT; 3612 pCap->hw_caps |= ATH9K_HW_CAP_VEOL; 3613 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK; 3614 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH; 3615 3616 if (capField & AR_EEPROM_EEPCAP_MAXQCU) 3617 pCap->total_queues = 3618 MS(capField, AR_EEPROM_EEPCAP_MAXQCU); 3619 else 3620 pCap->total_queues = ATH9K_NUM_TX_QUEUES; 3621 3622 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES) 3623 pCap->keycache_size = 3624 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES); 3625 else 3626 pCap->keycache_size = AR_KEYTABLE_SIZE; 3627 3628 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC; 3629 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD; 3630 3631 if (AR_SREV_9285_10_OR_LATER(ah)) 3632 pCap->num_gpio_pins = AR9285_NUM_GPIO; 3633 else if (AR_SREV_9280_10_OR_LATER(ah)) 3634 pCap->num_gpio_pins = AR928X_NUM_GPIO; 3635 else 3636 pCap->num_gpio_pins = AR_NUM_GPIO; 3637 3638 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) { 3639 pCap->hw_caps |= ATH9K_HW_CAP_CST; 3640 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX; 3641 } else { 3642 pCap->rts_aggr_limit = (8 * 1024); 3643 } 3644 3645 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM; 3646 3647#if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE) 3648 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT); 3649 if (ah->rfsilent & EEP_RFSILENT_ENABLED) { 3650 ah->rfkill_gpio = 3651 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL); 3652 ah->rfkill_polarity = 3653 MS(ah->rfsilent, EEP_RFSILENT_POLARITY); 3654 3655 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT; 3656 } 3657#endif 3658 3659 if ((ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI) || 3660 (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE) || 3661 (ah->hw_version.macVersion == AR_SREV_VERSION_9160) || 3662 (ah->hw_version.macVersion == AR_SREV_VERSION_9100) || 3663 (ah->hw_version.macVersion == AR_SREV_VERSION_9280) || 3664 (ah->hw_version.macVersion == AR_SREV_VERSION_9285)) 3665 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP; 3666 else 3667 pCap->hw_caps |= ATH9K_HW_CAP_AUTOSLEEP; 3668 3669 if (AR_SREV_9280(ah) || AR_SREV_9285(ah)) 3670 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS; 3671 else 3672 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS; 3673 3674 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) { 3675 pCap->reg_cap = 3676 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A | 3677 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN | 3678 AR_EEPROM_EEREGCAP_EN_KK_U2 | 3679 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND; 3680 } else { 3681 pCap->reg_cap = 3682 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A | 3683 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN; 3684 } 3685 3686 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND; 3687 3688 pCap->num_antcfg_5ghz = 3689 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ); 3690 pCap->num_antcfg_2ghz = 3691 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ); 3692 3693 if (AR_SREV_9280_10_OR_LATER(ah) && 3694 ath_btcoex_supported(ah->hw_version.subsysid)) { 3695 btcoex_info->btactive_gpio = ATH_BTACTIVE_GPIO; 3696 btcoex_info->wlanactive_gpio = ATH_WLANACTIVE_GPIO; 3697 3698 if (AR_SREV_9285(ah)) 3699 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_3WIRE; 3700 else 3701 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_2WIRE; 3702 } else { 3703 btcoex_info->btcoex_scheme = ATH_BTCOEX_CFG_NONE; 3704 } 3705} 3706 3707bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type, 3708 u32 capability, u32 *result) 3709{ 3710 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 3711 switch (type) { 3712 case ATH9K_CAP_CIPHER: 3713 switch (capability) { 3714 case ATH9K_CIPHER_AES_CCM: 3715 case ATH9K_CIPHER_AES_OCB: 3716 case ATH9K_CIPHER_TKIP: 3717 case ATH9K_CIPHER_WEP: 3718 case ATH9K_CIPHER_MIC: 3719 case ATH9K_CIPHER_CLR: 3720 return true; 3721 default: 3722 return false; 3723 } 3724 case ATH9K_CAP_TKIP_MIC: 3725 switch (capability) { 3726 case 0: 3727 return true; 3728 case 1: 3729 return (ah->sta_id1_defaults & 3730 AR_STA_ID1_CRPT_MIC_ENABLE) ? true : 3731 false; 3732 } 3733 case ATH9K_CAP_TKIP_SPLIT: 3734 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ? 3735 false : true; 3736 case ATH9K_CAP_DIVERSITY: 3737 return (REG_READ(ah, AR_PHY_CCK_DETECT) & 3738 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ? 3739 true : false; 3740 case ATH9K_CAP_MCAST_KEYSRCH: 3741 switch (capability) { 3742 case 0: 3743 return true; 3744 case 1: 3745 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) { 3746 return false; 3747 } else { 3748 return (ah->sta_id1_defaults & 3749 AR_STA_ID1_MCAST_KSRCH) ? true : 3750 false; 3751 } 3752 } 3753 return false; 3754 case ATH9K_CAP_TXPOW: 3755 switch (capability) { 3756 case 0: 3757 return 0; 3758 case 1: 3759 *result = regulatory->power_limit; 3760 return 0; 3761 case 2: 3762 *result = regulatory->max_power_level; 3763 return 0; 3764 case 3: 3765 *result = regulatory->tp_scale; 3766 return 0; 3767 } 3768 return false; 3769 case ATH9K_CAP_DS: 3770 return (AR_SREV_9280_20_OR_LATER(ah) && 3771 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1)) 3772 ? false : true; 3773 default: 3774 return false; 3775 } 3776} 3777 3778bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type, 3779 u32 capability, u32 setting, int *status) 3780{ 3781 u32 v; 3782 3783 switch (type) { 3784 case ATH9K_CAP_TKIP_MIC: 3785 if (setting) 3786 ah->sta_id1_defaults |= 3787 AR_STA_ID1_CRPT_MIC_ENABLE; 3788 else 3789 ah->sta_id1_defaults &= 3790 ~AR_STA_ID1_CRPT_MIC_ENABLE; 3791 return true; 3792 case ATH9K_CAP_DIVERSITY: 3793 v = REG_READ(ah, AR_PHY_CCK_DETECT); 3794 if (setting) 3795 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV; 3796 else 3797 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV; 3798 REG_WRITE(ah, AR_PHY_CCK_DETECT, v); 3799 return true; 3800 case ATH9K_CAP_MCAST_KEYSRCH: 3801 if (setting) 3802 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH; 3803 else 3804 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH; 3805 return true; 3806 default: 3807 return false; 3808 } 3809} 3810 3811/****************************/ 3812/* GPIO / RFKILL / Antennae */ 3813/****************************/ 3814 3815static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah, 3816 u32 gpio, u32 type) 3817{ 3818 int addr; 3819 u32 gpio_shift, tmp; 3820 3821 if (gpio > 11) 3822 addr = AR_GPIO_OUTPUT_MUX3; 3823 else if (gpio > 5) 3824 addr = AR_GPIO_OUTPUT_MUX2; 3825 else 3826 addr = AR_GPIO_OUTPUT_MUX1; 3827 3828 gpio_shift = (gpio % 6) * 5; 3829 3830 if (AR_SREV_9280_20_OR_LATER(ah) 3831 || (addr != AR_GPIO_OUTPUT_MUX1)) { 3832 REG_RMW(ah, addr, (type << gpio_shift), 3833 (0x1f << gpio_shift)); 3834 } else { 3835 tmp = REG_READ(ah, addr); 3836 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0); 3837 tmp &= ~(0x1f << gpio_shift); 3838 tmp |= (type << gpio_shift); 3839 REG_WRITE(ah, addr, tmp); 3840 } 3841} 3842 3843void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio) 3844{ 3845 u32 gpio_shift; 3846 3847 ASSERT(gpio < ah->caps.num_gpio_pins); 3848 3849 gpio_shift = gpio << 1; 3850 3851 REG_RMW(ah, 3852 AR_GPIO_OE_OUT, 3853 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift), 3854 (AR_GPIO_OE_OUT_DRV << gpio_shift)); 3855} 3856 3857u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio) 3858{ 3859#define MS_REG_READ(x, y) \ 3860 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y))) 3861 3862 if (gpio >= ah->caps.num_gpio_pins) 3863 return 0xffffffff; 3864 3865 if (AR_SREV_9287_10_OR_LATER(ah)) 3866 return MS_REG_READ(AR9287, gpio) != 0; 3867 else if (AR_SREV_9285_10_OR_LATER(ah)) 3868 return MS_REG_READ(AR9285, gpio) != 0; 3869 else if (AR_SREV_9280_10_OR_LATER(ah)) 3870 return MS_REG_READ(AR928X, gpio) != 0; 3871 else 3872 return MS_REG_READ(AR, gpio) != 0; 3873} 3874 3875void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio, 3876 u32 ah_signal_type) 3877{ 3878 u32 gpio_shift; 3879 3880 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type); 3881 3882 gpio_shift = 2 * gpio; 3883 3884 REG_RMW(ah, 3885 AR_GPIO_OE_OUT, 3886 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift), 3887 (AR_GPIO_OE_OUT_DRV << gpio_shift)); 3888} 3889 3890void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val) 3891{ 3892 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio), 3893 AR_GPIO_BIT(gpio)); 3894} 3895 3896u32 ath9k_hw_getdefantenna(struct ath_hw *ah) 3897{ 3898 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7; 3899} 3900 3901void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna) 3902{ 3903 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7)); 3904} 3905 3906bool ath9k_hw_setantennaswitch(struct ath_hw *ah, 3907 enum ath9k_ant_setting settings, 3908 struct ath9k_channel *chan, 3909 u8 *tx_chainmask, 3910 u8 *rx_chainmask, 3911 u8 *antenna_cfgd) 3912{ 3913 static u8 tx_chainmask_cfg, rx_chainmask_cfg; 3914 3915 if (AR_SREV_9280(ah)) { 3916 if (!tx_chainmask_cfg) { 3917 3918 tx_chainmask_cfg = *tx_chainmask; 3919 rx_chainmask_cfg = *rx_chainmask; 3920 } 3921 3922 switch (settings) { 3923 case ATH9K_ANT_FIXED_A: 3924 *tx_chainmask = ATH9K_ANTENNA0_CHAINMASK; 3925 *rx_chainmask = ATH9K_ANTENNA0_CHAINMASK; 3926 *antenna_cfgd = true; 3927 break; 3928 case ATH9K_ANT_FIXED_B: 3929 if (ah->caps.tx_chainmask > 3930 ATH9K_ANTENNA1_CHAINMASK) { 3931 *tx_chainmask = ATH9K_ANTENNA1_CHAINMASK; 3932 } 3933 *rx_chainmask = ATH9K_ANTENNA1_CHAINMASK; 3934 *antenna_cfgd = true; 3935 break; 3936 case ATH9K_ANT_VARIABLE: 3937 *tx_chainmask = tx_chainmask_cfg; 3938 *rx_chainmask = rx_chainmask_cfg; 3939 *antenna_cfgd = true; 3940 break; 3941 default: 3942 break; 3943 } 3944 } else { 3945 ah->config.diversity_control = settings; 3946 } 3947 3948 return true; 3949} 3950 3951/*********************/ 3952/* General Operation */ 3953/*********************/ 3954 3955u32 ath9k_hw_getrxfilter(struct ath_hw *ah) 3956{ 3957 u32 bits = REG_READ(ah, AR_RX_FILTER); 3958 u32 phybits = REG_READ(ah, AR_PHY_ERR); 3959 3960 if (phybits & AR_PHY_ERR_RADAR) 3961 bits |= ATH9K_RX_FILTER_PHYRADAR; 3962 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING)) 3963 bits |= ATH9K_RX_FILTER_PHYERR; 3964 3965 return bits; 3966} 3967 3968void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits) 3969{ 3970 u32 phybits; 3971 3972 REG_WRITE(ah, AR_RX_FILTER, bits); 3973 3974 phybits = 0; 3975 if (bits & ATH9K_RX_FILTER_PHYRADAR) 3976 phybits |= AR_PHY_ERR_RADAR; 3977 if (bits & ATH9K_RX_FILTER_PHYERR) 3978 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING; 3979 REG_WRITE(ah, AR_PHY_ERR, phybits); 3980 3981 if (phybits) 3982 REG_WRITE(ah, AR_RXCFG, 3983 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA); 3984 else 3985 REG_WRITE(ah, AR_RXCFG, 3986 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA); 3987} 3988 3989bool ath9k_hw_phy_disable(struct ath_hw *ah) 3990{ 3991 return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM); 3992} 3993 3994bool ath9k_hw_disable(struct ath_hw *ah) 3995{ 3996 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 3997 return false; 3998 3999 return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD); 4000} 4001 4002void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit) 4003{ 4004 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 4005 struct ath9k_channel *chan = ah->curchan; 4006 struct ieee80211_channel *channel = chan->chan; 4007 4008 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER); 4009 4010 ah->eep_ops->set_txpower(ah, chan, 4011 ath9k_regd_get_ctl(regulatory, chan), 4012 channel->max_antenna_gain * 2, 4013 channel->max_power * 2, 4014 min((u32) MAX_RATE_POWER, 4015 (u32) regulatory->power_limit)); 4016} 4017 4018void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac) 4019{ 4020 memcpy(ah->macaddr, mac, ETH_ALEN); 4021} 4022 4023void ath9k_hw_setopmode(struct ath_hw *ah) 4024{ 4025 ath9k_hw_set_operating_mode(ah, ah->opmode); 4026} 4027 4028void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1) 4029{ 4030 REG_WRITE(ah, AR_MCAST_FIL0, filter0); 4031 REG_WRITE(ah, AR_MCAST_FIL1, filter1); 4032} 4033 4034void ath9k_hw_setbssidmask(struct ath_softc *sc) 4035{ 4036 REG_WRITE(sc->sc_ah, AR_BSSMSKL, get_unaligned_le32(sc->bssidmask)); 4037 REG_WRITE(sc->sc_ah, AR_BSSMSKU, get_unaligned_le16(sc->bssidmask + 4)); 4038} 4039 4040void ath9k_hw_write_associd(struct ath_softc *sc) 4041{ 4042 REG_WRITE(sc->sc_ah, AR_BSS_ID0, get_unaligned_le32(sc->curbssid)); 4043 REG_WRITE(sc->sc_ah, AR_BSS_ID1, get_unaligned_le16(sc->curbssid + 4) | 4044 ((sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S)); 4045} 4046 4047u64 ath9k_hw_gettsf64(struct ath_hw *ah) 4048{ 4049 u64 tsf; 4050 4051 tsf = REG_READ(ah, AR_TSF_U32); 4052 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32); 4053 4054 return tsf; 4055} 4056 4057void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64) 4058{ 4059 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff); 4060 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff); 4061} 4062 4063void ath9k_hw_reset_tsf(struct ath_hw *ah) 4064{ 4065 ath9k_ps_wakeup(ah->ah_sc); 4066 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0, 4067 AH_TSF_WRITE_TIMEOUT)) 4068 DPRINTF(ah->ah_sc, ATH_DBG_RESET, 4069 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n"); 4070 4071 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE); 4072 ath9k_ps_restore(ah->ah_sc); 4073} 4074 4075void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting) 4076{ 4077 if (setting) 4078 ah->misc_mode |= AR_PCU_TX_ADD_TSF; 4079 else 4080 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF; 4081} 4082 4083bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us) 4084{ 4085 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) { 4086 DPRINTF(ah->ah_sc, ATH_DBG_RESET, "bad slot time %u\n", us); 4087 ah->slottime = (u32) -1; 4088 return false; 4089 } else { 4090 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us)); 4091 ah->slottime = us; 4092 return true; 4093 } 4094} 4095 4096void ath9k_hw_set11nmac2040(struct ath_hw *ah, enum ath9k_ht_macmode mode) 4097{ 4098 u32 macmode; 4099 4100 if (mode == ATH9K_HT_MACMODE_2040 && 4101 !ah->config.cwm_ignore_extcca) 4102 macmode = AR_2040_JOINED_RX_CLEAR; 4103 else 4104 macmode = 0; 4105 4106 REG_WRITE(ah, AR_2040_MODE, macmode); 4107} 4108 4109/* HW Generic timers configuration */ 4110 4111static const struct ath_gen_timer_configuration gen_tmr_configuration[] = 4112{ 4113 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4114 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4115 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4116 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4117 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4118 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4119 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4120 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4121 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001}, 4122 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4, 4123 AR_NDP2_TIMER_MODE, 0x0002}, 4124 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4, 4125 AR_NDP2_TIMER_MODE, 0x0004}, 4126 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4, 4127 AR_NDP2_TIMER_MODE, 0x0008}, 4128 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4, 4129 AR_NDP2_TIMER_MODE, 0x0010}, 4130 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4, 4131 AR_NDP2_TIMER_MODE, 0x0020}, 4132 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4, 4133 AR_NDP2_TIMER_MODE, 0x0040}, 4134 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4, 4135 AR_NDP2_TIMER_MODE, 0x0080} 4136}; 4137 4138/* HW generic timer primitives */ 4139 4140/* compute and clear index of rightmost 1 */ 4141static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask) 4142{ 4143 u32 b; 4144 4145 b = *mask; 4146 b &= (0-b); 4147 *mask &= ~b; 4148 b *= debruijn32; 4149 b >>= 27; 4150 4151 return timer_table->gen_timer_index[b]; 4152} 4153 4154u32 ath9k_hw_gettsf32(struct ath_hw *ah) 4155{ 4156 return REG_READ(ah, AR_TSF_L32); 4157} 4158 4159struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah, 4160 void (*trigger)(void *), 4161 void (*overflow)(void *), 4162 void *arg, 4163 u8 timer_index) 4164{ 4165 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4166 struct ath_gen_timer *timer; 4167 4168 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL); 4169 4170 if (timer == NULL) { 4171 printk(KERN_DEBUG "Failed to allocate memory" 4172 "for hw timer[%d]\n", timer_index); 4173 return NULL; 4174 } 4175 4176 /* allocate a hardware generic timer slot */ 4177 timer_table->timers[timer_index] = timer; 4178 timer->index = timer_index; 4179 timer->trigger = trigger; 4180 timer->overflow = overflow; 4181 timer->arg = arg; 4182 4183 return timer; 4184} 4185 4186void ath_gen_timer_start(struct ath_hw *ah, 4187 struct ath_gen_timer *timer, 4188 u32 timer_next, u32 timer_period) 4189{ 4190 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4191 u32 tsf; 4192 4193 BUG_ON(!timer_period); 4194 4195 set_bit(timer->index, &timer_table->timer_mask.timer_bits); 4196 4197 tsf = ath9k_hw_gettsf32(ah); 4198 4199 DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER, "curent tsf %x period %x" 4200 "timer_next %x\n", tsf, timer_period, timer_next); 4201 4202 /* 4203 * Pull timer_next forward if the current TSF already passed it 4204 * because of software latency 4205 */ 4206 if (timer_next < tsf) 4207 timer_next = tsf + timer_period; 4208 4209 /* 4210 * Program generic timer registers 4211 */ 4212 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr, 4213 timer_next); 4214 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr, 4215 timer_period); 4216 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr, 4217 gen_tmr_configuration[timer->index].mode_mask); 4218 4219 /* Enable both trigger and thresh interrupt masks */ 4220 REG_SET_BIT(ah, AR_IMR_S5, 4221 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) | 4222 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG))); 4223 4224 if ((ah->ah_sc->imask & ATH9K_INT_GENTIMER) == 0) { 4225 ath9k_hw_set_interrupts(ah, 0); 4226 ah->ah_sc->imask |= ATH9K_INT_GENTIMER; 4227 ath9k_hw_set_interrupts(ah, ah->ah_sc->imask); 4228 } 4229} 4230 4231void ath_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer) 4232{ 4233 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4234 4235 if ((timer->index < AR_FIRST_NDP_TIMER) || 4236 (timer->index >= ATH_MAX_GEN_TIMER)) { 4237 return; 4238 } 4239 4240 /* Clear generic timer enable bits. */ 4241 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr, 4242 gen_tmr_configuration[timer->index].mode_mask); 4243 4244 /* Disable both trigger and thresh interrupt masks */ 4245 REG_CLR_BIT(ah, AR_IMR_S5, 4246 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) | 4247 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG))); 4248 4249 clear_bit(timer->index, &timer_table->timer_mask.timer_bits); 4250 4251 /* if no timer is enabled, turn off interrupt mask */ 4252 if (timer_table->timer_mask.val == 0) { 4253 ath9k_hw_set_interrupts(ah, 0); 4254 ah->ah_sc->imask &= ~ATH9K_INT_GENTIMER; 4255 ath9k_hw_set_interrupts(ah, ah->ah_sc->imask); 4256 } 4257} 4258 4259void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer) 4260{ 4261 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4262 4263 /* free the hardware generic timer slot */ 4264 timer_table->timers[timer->index] = NULL; 4265 kfree(timer); 4266} 4267 4268/* 4269 * Generic Timer Interrupts handling 4270 */ 4271void ath_gen_timer_isr(struct ath_hw *ah) 4272{ 4273 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4274 struct ath_gen_timer *timer; 4275 u32 trigger_mask, thresh_mask, index; 4276 4277 /* get hardware generic timer interrupt status */ 4278 trigger_mask = ah->intr_gen_timer_trigger; 4279 thresh_mask = ah->intr_gen_timer_thresh; 4280 trigger_mask &= timer_table->timer_mask.val; 4281 thresh_mask &= timer_table->timer_mask.val; 4282 4283 trigger_mask &= ~thresh_mask; 4284 4285 while (thresh_mask) { 4286 index = rightmost_index(timer_table, &thresh_mask); 4287 timer = timer_table->timers[index]; 4288 BUG_ON(!timer); 4289 DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER, 4290 "TSF overflow for Gen timer %d\n", index); 4291 timer->overflow(timer->arg); 4292 } 4293 4294 while (trigger_mask) { 4295 index = rightmost_index(timer_table, &trigger_mask); 4296 timer = timer_table->timers[index]; 4297 BUG_ON(!timer); 4298 DPRINTF(ah->ah_sc, ATH_DBG_HWTIMER, 4299 "Gen timer[%d] trigger\n", index); 4300 timer->trigger(timer->arg); 4301 } 4302} 4303 4304/* 4305 * Primitive to disable ASPM 4306 */ 4307void ath_pcie_aspm_disable(struct ath_softc *sc) 4308{ 4309 struct pci_dev *pdev = to_pci_dev(sc->dev); 4310 u8 aspm; 4311 4312 pci_read_config_byte(pdev, ATH_PCIE_CAP_LINK_CTRL, &aspm); 4313 aspm &= ~(ATH_PCIE_CAP_LINK_L0S | ATH_PCIE_CAP_LINK_L1); 4314 pci_write_config_byte(pdev, ATH_PCIE_CAP_LINK_CTRL, aspm); 4315} 4316