hw.c revision cfe8cba982cda73d4970dab712411bebdcc3b9cd
1/* 2 * Copyright (c) 2008-2009 Atheros Communications Inc. 3 * 4 * Permission to use, copy, modify, and/or distribute this software for any 5 * purpose with or without fee is hereby granted, provided that the above 6 * copyright notice and this permission notice appear in all copies. 7 * 8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 15 */ 16 17#include <linux/io.h> 18#include <asm/unaligned.h> 19 20#include "hw.h" 21#include "rc.h" 22#include "initvals.h" 23 24#define ATH9K_CLOCK_RATE_CCK 22 25#define ATH9K_CLOCK_RATE_5GHZ_OFDM 40 26#define ATH9K_CLOCK_RATE_2GHZ_OFDM 44 27 28static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type); 29static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan); 30static u32 ath9k_hw_ini_fixup(struct ath_hw *ah, 31 struct ar5416_eeprom_def *pEepData, 32 u32 reg, u32 value); 33static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan); 34static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan); 35 36/********************/ 37/* Helper Functions */ 38/********************/ 39 40static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks) 41{ 42 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 43 44 if (!ah->curchan) /* should really check for CCK instead */ 45 return clks / ATH9K_CLOCK_RATE_CCK; 46 if (conf->channel->band == IEEE80211_BAND_2GHZ) 47 return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM; 48 49 return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM; 50} 51 52static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks) 53{ 54 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 55 56 if (conf_is_ht40(conf)) 57 return ath9k_hw_mac_usec(ah, clks) / 2; 58 else 59 return ath9k_hw_mac_usec(ah, clks); 60} 61 62static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs) 63{ 64 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 65 66 if (!ah->curchan) /* should really check for CCK instead */ 67 return usecs *ATH9K_CLOCK_RATE_CCK; 68 if (conf->channel->band == IEEE80211_BAND_2GHZ) 69 return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM; 70 return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM; 71} 72 73static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs) 74{ 75 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 76 77 if (conf_is_ht40(conf)) 78 return ath9k_hw_mac_clks(ah, usecs) * 2; 79 else 80 return ath9k_hw_mac_clks(ah, usecs); 81} 82 83bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout) 84{ 85 int i; 86 87 BUG_ON(timeout < AH_TIME_QUANTUM); 88 89 for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) { 90 if ((REG_READ(ah, reg) & mask) == val) 91 return true; 92 93 udelay(AH_TIME_QUANTUM); 94 } 95 96 ath_print(ath9k_hw_common(ah), ATH_DBG_ANY, 97 "timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n", 98 timeout, reg, REG_READ(ah, reg), mask, val); 99 100 return false; 101} 102 103u32 ath9k_hw_reverse_bits(u32 val, u32 n) 104{ 105 u32 retval; 106 int i; 107 108 for (i = 0, retval = 0; i < n; i++) { 109 retval = (retval << 1) | (val & 1); 110 val >>= 1; 111 } 112 return retval; 113} 114 115bool ath9k_get_channel_edges(struct ath_hw *ah, 116 u16 flags, u16 *low, 117 u16 *high) 118{ 119 struct ath9k_hw_capabilities *pCap = &ah->caps; 120 121 if (flags & CHANNEL_5GHZ) { 122 *low = pCap->low_5ghz_chan; 123 *high = pCap->high_5ghz_chan; 124 return true; 125 } 126 if ((flags & CHANNEL_2GHZ)) { 127 *low = pCap->low_2ghz_chan; 128 *high = pCap->high_2ghz_chan; 129 return true; 130 } 131 return false; 132} 133 134u16 ath9k_hw_computetxtime(struct ath_hw *ah, 135 const struct ath_rate_table *rates, 136 u32 frameLen, u16 rateix, 137 bool shortPreamble) 138{ 139 u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime; 140 u32 kbps; 141 142 kbps = rates->info[rateix].ratekbps; 143 144 if (kbps == 0) 145 return 0; 146 147 switch (rates->info[rateix].phy) { 148 case WLAN_RC_PHY_CCK: 149 phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS; 150 if (shortPreamble && rates->info[rateix].short_preamble) 151 phyTime >>= 1; 152 numBits = frameLen << 3; 153 txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps); 154 break; 155 case WLAN_RC_PHY_OFDM: 156 if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) { 157 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000; 158 numBits = OFDM_PLCP_BITS + (frameLen << 3); 159 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 160 txTime = OFDM_SIFS_TIME_QUARTER 161 + OFDM_PREAMBLE_TIME_QUARTER 162 + (numSymbols * OFDM_SYMBOL_TIME_QUARTER); 163 } else if (ah->curchan && 164 IS_CHAN_HALF_RATE(ah->curchan)) { 165 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME_HALF) / 1000; 166 numBits = OFDM_PLCP_BITS + (frameLen << 3); 167 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 168 txTime = OFDM_SIFS_TIME_HALF + 169 OFDM_PREAMBLE_TIME_HALF 170 + (numSymbols * OFDM_SYMBOL_TIME_HALF); 171 } else { 172 bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000; 173 numBits = OFDM_PLCP_BITS + (frameLen << 3); 174 numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol); 175 txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME 176 + (numSymbols * OFDM_SYMBOL_TIME); 177 } 178 break; 179 default: 180 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 181 "Unknown phy %u (rate ix %u)\n", 182 rates->info[rateix].phy, rateix); 183 txTime = 0; 184 break; 185 } 186 187 return txTime; 188} 189 190void ath9k_hw_get_channel_centers(struct ath_hw *ah, 191 struct ath9k_channel *chan, 192 struct chan_centers *centers) 193{ 194 int8_t extoff; 195 196 if (!IS_CHAN_HT40(chan)) { 197 centers->ctl_center = centers->ext_center = 198 centers->synth_center = chan->channel; 199 return; 200 } 201 202 if ((chan->chanmode == CHANNEL_A_HT40PLUS) || 203 (chan->chanmode == CHANNEL_G_HT40PLUS)) { 204 centers->synth_center = 205 chan->channel + HT40_CHANNEL_CENTER_SHIFT; 206 extoff = 1; 207 } else { 208 centers->synth_center = 209 chan->channel - HT40_CHANNEL_CENTER_SHIFT; 210 extoff = -1; 211 } 212 213 centers->ctl_center = 214 centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT); 215 /* 25 MHz spacing is supported by hw but not on upper layers */ 216 centers->ext_center = 217 centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT); 218} 219 220/******************/ 221/* Chip Revisions */ 222/******************/ 223 224static void ath9k_hw_read_revisions(struct ath_hw *ah) 225{ 226 u32 val; 227 228 val = REG_READ(ah, AR_SREV) & AR_SREV_ID; 229 230 if (val == 0xFF) { 231 val = REG_READ(ah, AR_SREV); 232 ah->hw_version.macVersion = 233 (val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S; 234 ah->hw_version.macRev = MS(val, AR_SREV_REVISION2); 235 ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1; 236 } else { 237 if (!AR_SREV_9100(ah)) 238 ah->hw_version.macVersion = MS(val, AR_SREV_VERSION); 239 240 ah->hw_version.macRev = val & AR_SREV_REVISION; 241 242 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE) 243 ah->is_pciexpress = true; 244 } 245} 246 247static int ath9k_hw_get_radiorev(struct ath_hw *ah) 248{ 249 u32 val; 250 int i; 251 252 REG_WRITE(ah, AR_PHY(0x36), 0x00007058); 253 254 for (i = 0; i < 8; i++) 255 REG_WRITE(ah, AR_PHY(0x20), 0x00010000); 256 val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff; 257 val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4); 258 259 return ath9k_hw_reverse_bits(val, 8); 260} 261 262/************************************/ 263/* HW Attach, Detach, Init Routines */ 264/************************************/ 265 266static void ath9k_hw_disablepcie(struct ath_hw *ah) 267{ 268 if (AR_SREV_9100(ah)) 269 return; 270 271 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00); 272 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 273 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029); 274 REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824); 275 REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579); 276 REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000); 277 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 278 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 279 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007); 280 281 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 282} 283 284static bool ath9k_hw_chip_test(struct ath_hw *ah) 285{ 286 struct ath_common *common = ath9k_hw_common(ah); 287 u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) }; 288 u32 regHold[2]; 289 u32 patternData[4] = { 0x55555555, 290 0xaaaaaaaa, 291 0x66666666, 292 0x99999999 }; 293 int i, j; 294 295 for (i = 0; i < 2; i++) { 296 u32 addr = regAddr[i]; 297 u32 wrData, rdData; 298 299 regHold[i] = REG_READ(ah, addr); 300 for (j = 0; j < 0x100; j++) { 301 wrData = (j << 16) | j; 302 REG_WRITE(ah, addr, wrData); 303 rdData = REG_READ(ah, addr); 304 if (rdData != wrData) { 305 ath_print(common, ATH_DBG_FATAL, 306 "address test failed " 307 "addr: 0x%08x - wr:0x%08x != " 308 "rd:0x%08x\n", 309 addr, wrData, rdData); 310 return false; 311 } 312 } 313 for (j = 0; j < 4; j++) { 314 wrData = patternData[j]; 315 REG_WRITE(ah, addr, wrData); 316 rdData = REG_READ(ah, addr); 317 if (wrData != rdData) { 318 ath_print(common, ATH_DBG_FATAL, 319 "address test failed " 320 "addr: 0x%08x - wr:0x%08x != " 321 "rd:0x%08x\n", 322 addr, wrData, rdData); 323 return false; 324 } 325 } 326 REG_WRITE(ah, regAddr[i], regHold[i]); 327 } 328 udelay(100); 329 330 return true; 331} 332 333static const char *ath9k_hw_devname(u16 devid) 334{ 335 switch (devid) { 336 case AR5416_DEVID_PCI: 337 return "Atheros 5416"; 338 case AR5416_DEVID_PCIE: 339 return "Atheros 5418"; 340 case AR9160_DEVID_PCI: 341 return "Atheros 9160"; 342 case AR5416_AR9100_DEVID: 343 return "Atheros 9100"; 344 case AR9280_DEVID_PCI: 345 case AR9280_DEVID_PCIE: 346 return "Atheros 9280"; 347 case AR9285_DEVID_PCIE: 348 return "Atheros 9285"; 349 case AR5416_DEVID_AR9287_PCI: 350 case AR5416_DEVID_AR9287_PCIE: 351 return "Atheros 9287"; 352 } 353 354 return NULL; 355} 356 357static void ath9k_hw_init_config(struct ath_hw *ah) 358{ 359 int i; 360 361 ah->config.dma_beacon_response_time = 2; 362 ah->config.sw_beacon_response_time = 10; 363 ah->config.additional_swba_backoff = 0; 364 ah->config.ack_6mb = 0x0; 365 ah->config.cwm_ignore_extcca = 0; 366 ah->config.pcie_powersave_enable = 0; 367 ah->config.pcie_clock_req = 0; 368 ah->config.pcie_waen = 0; 369 ah->config.analog_shiftreg = 1; 370 ah->config.ht_enable = 1; 371 ah->config.ofdm_trig_low = 200; 372 ah->config.ofdm_trig_high = 500; 373 ah->config.cck_trig_high = 200; 374 ah->config.cck_trig_low = 100; 375 ah->config.enable_ani = 1; 376 ah->config.diversity_control = ATH9K_ANT_VARIABLE; 377 ah->config.antenna_switch_swap = 0; 378 379 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) { 380 ah->config.spurchans[i][0] = AR_NO_SPUR; 381 ah->config.spurchans[i][1] = AR_NO_SPUR; 382 } 383 384 ah->config.intr_mitigation = true; 385 386 /* 387 * We need this for PCI devices only (Cardbus, PCI, miniPCI) 388 * _and_ if on non-uniprocessor systems (Multiprocessor/HT). 389 * This means we use it for all AR5416 devices, and the few 390 * minor PCI AR9280 devices out there. 391 * 392 * Serialization is required because these devices do not handle 393 * well the case of two concurrent reads/writes due to the latency 394 * involved. During one read/write another read/write can be issued 395 * on another CPU while the previous read/write may still be working 396 * on our hardware, if we hit this case the hardware poops in a loop. 397 * We prevent this by serializing reads and writes. 398 * 399 * This issue is not present on PCI-Express devices or pre-AR5416 400 * devices (legacy, 802.11abg). 401 */ 402 if (num_possible_cpus() > 1) 403 ah->config.serialize_regmode = SER_REG_MODE_AUTO; 404} 405 406static void ath9k_hw_init_defaults(struct ath_hw *ah) 407{ 408 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 409 410 regulatory->country_code = CTRY_DEFAULT; 411 regulatory->power_limit = MAX_RATE_POWER; 412 regulatory->tp_scale = ATH9K_TP_SCALE_MAX; 413 414 ah->hw_version.magic = AR5416_MAGIC; 415 ah->hw_version.subvendorid = 0; 416 417 ah->ah_flags = 0; 418 if (ah->hw_version.devid == AR5416_AR9100_DEVID) 419 ah->hw_version.macVersion = AR_SREV_VERSION_9100; 420 if (!AR_SREV_9100(ah)) 421 ah->ah_flags = AH_USE_EEPROM; 422 423 ah->atim_window = 0; 424 ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE; 425 ah->beacon_interval = 100; 426 ah->enable_32kHz_clock = DONT_USE_32KHZ; 427 ah->slottime = (u32) -1; 428 ah->acktimeout = (u32) -1; 429 ah->ctstimeout = (u32) -1; 430 ah->globaltxtimeout = (u32) -1; 431 432 ah->gbeacon_rate = 0; 433 434 ah->power_mode = ATH9K_PM_UNDEFINED; 435} 436 437static int ath9k_hw_rfattach(struct ath_hw *ah) 438{ 439 bool rfStatus = false; 440 int ecode = 0; 441 442 rfStatus = ath9k_hw_init_rf(ah, &ecode); 443 if (!rfStatus) { 444 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 445 "RF setup failed, status: %u\n", ecode); 446 return ecode; 447 } 448 449 return 0; 450} 451 452static int ath9k_hw_rf_claim(struct ath_hw *ah) 453{ 454 u32 val; 455 456 REG_WRITE(ah, AR_PHY(0), 0x00000007); 457 458 val = ath9k_hw_get_radiorev(ah); 459 switch (val & AR_RADIO_SREV_MAJOR) { 460 case 0: 461 val = AR_RAD5133_SREV_MAJOR; 462 break; 463 case AR_RAD5133_SREV_MAJOR: 464 case AR_RAD5122_SREV_MAJOR: 465 case AR_RAD2133_SREV_MAJOR: 466 case AR_RAD2122_SREV_MAJOR: 467 break; 468 default: 469 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 470 "Radio Chip Rev 0x%02X not supported\n", 471 val & AR_RADIO_SREV_MAJOR); 472 return -EOPNOTSUPP; 473 } 474 475 ah->hw_version.analog5GhzRev = val; 476 477 return 0; 478} 479 480static int ath9k_hw_init_macaddr(struct ath_hw *ah) 481{ 482 struct ath_common *common = ath9k_hw_common(ah); 483 u32 sum; 484 int i; 485 u16 eeval; 486 487 sum = 0; 488 for (i = 0; i < 3; i++) { 489 eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i)); 490 sum += eeval; 491 common->macaddr[2 * i] = eeval >> 8; 492 common->macaddr[2 * i + 1] = eeval & 0xff; 493 } 494 if (sum == 0 || sum == 0xffff * 3) 495 return -EADDRNOTAVAIL; 496 497 return 0; 498} 499 500static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah) 501{ 502 u32 rxgain_type; 503 504 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) { 505 rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE); 506 507 if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF) 508 INIT_INI_ARRAY(&ah->iniModesRxGain, 509 ar9280Modes_backoff_13db_rxgain_9280_2, 510 ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6); 511 else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF) 512 INIT_INI_ARRAY(&ah->iniModesRxGain, 513 ar9280Modes_backoff_23db_rxgain_9280_2, 514 ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6); 515 else 516 INIT_INI_ARRAY(&ah->iniModesRxGain, 517 ar9280Modes_original_rxgain_9280_2, 518 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6); 519 } else { 520 INIT_INI_ARRAY(&ah->iniModesRxGain, 521 ar9280Modes_original_rxgain_9280_2, 522 ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6); 523 } 524} 525 526static void ath9k_hw_init_txgain_ini(struct ath_hw *ah) 527{ 528 u32 txgain_type; 529 530 if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) { 531 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE); 532 533 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) 534 INIT_INI_ARRAY(&ah->iniModesTxGain, 535 ar9280Modes_high_power_tx_gain_9280_2, 536 ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6); 537 else 538 INIT_INI_ARRAY(&ah->iniModesTxGain, 539 ar9280Modes_original_tx_gain_9280_2, 540 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6); 541 } else { 542 INIT_INI_ARRAY(&ah->iniModesTxGain, 543 ar9280Modes_original_tx_gain_9280_2, 544 ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6); 545 } 546} 547 548static int ath9k_hw_post_init(struct ath_hw *ah) 549{ 550 int ecode; 551 552 if (!ath9k_hw_chip_test(ah)) 553 return -ENODEV; 554 555 ecode = ath9k_hw_rf_claim(ah); 556 if (ecode != 0) 557 return ecode; 558 559 ecode = ath9k_hw_eeprom_init(ah); 560 if (ecode != 0) 561 return ecode; 562 563 ath_print(ath9k_hw_common(ah), ATH_DBG_CONFIG, 564 "Eeprom VER: %d, REV: %d\n", 565 ah->eep_ops->get_eeprom_ver(ah), 566 ah->eep_ops->get_eeprom_rev(ah)); 567 568 ecode = ath9k_hw_rfattach(ah); 569 if (ecode != 0) 570 return ecode; 571 572 if (!AR_SREV_9100(ah)) { 573 ath9k_hw_ani_setup(ah); 574 ath9k_hw_ani_init(ah); 575 } 576 577 return 0; 578} 579 580static bool ath9k_hw_devid_supported(u16 devid) 581{ 582 switch (devid) { 583 case AR5416_DEVID_PCI: 584 case AR5416_DEVID_PCIE: 585 case AR5416_AR9100_DEVID: 586 case AR9160_DEVID_PCI: 587 case AR9280_DEVID_PCI: 588 case AR9280_DEVID_PCIE: 589 case AR9285_DEVID_PCIE: 590 case AR5416_DEVID_AR9287_PCI: 591 case AR5416_DEVID_AR9287_PCIE: 592 return true; 593 default: 594 break; 595 } 596 return false; 597} 598 599static bool ath9k_hw_macversion_supported(u32 macversion) 600{ 601 switch (macversion) { 602 case AR_SREV_VERSION_5416_PCI: 603 case AR_SREV_VERSION_5416_PCIE: 604 case AR_SREV_VERSION_9160: 605 case AR_SREV_VERSION_9100: 606 case AR_SREV_VERSION_9280: 607 case AR_SREV_VERSION_9285: 608 case AR_SREV_VERSION_9287: 609 return true; 610 /* Not yet */ 611 case AR_SREV_VERSION_9271: 612 default: 613 break; 614 } 615 return false; 616} 617 618static void ath9k_hw_init_cal_settings(struct ath_hw *ah) 619{ 620 if (AR_SREV_9160_10_OR_LATER(ah)) { 621 if (AR_SREV_9280_10_OR_LATER(ah)) { 622 ah->iq_caldata.calData = &iq_cal_single_sample; 623 ah->adcgain_caldata.calData = 624 &adc_gain_cal_single_sample; 625 ah->adcdc_caldata.calData = 626 &adc_dc_cal_single_sample; 627 ah->adcdc_calinitdata.calData = 628 &adc_init_dc_cal; 629 } else { 630 ah->iq_caldata.calData = &iq_cal_multi_sample; 631 ah->adcgain_caldata.calData = 632 &adc_gain_cal_multi_sample; 633 ah->adcdc_caldata.calData = 634 &adc_dc_cal_multi_sample; 635 ah->adcdc_calinitdata.calData = 636 &adc_init_dc_cal; 637 } 638 ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL; 639 } 640} 641 642static void ath9k_hw_init_mode_regs(struct ath_hw *ah) 643{ 644 if (AR_SREV_9271(ah)) { 645 INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0, 646 ARRAY_SIZE(ar9271Modes_9271_1_0), 6); 647 INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0, 648 ARRAY_SIZE(ar9271Common_9271_1_0), 2); 649 return; 650 } 651 652 if (AR_SREV_9287_11_OR_LATER(ah)) { 653 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1, 654 ARRAY_SIZE(ar9287Modes_9287_1_1), 6); 655 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1, 656 ARRAY_SIZE(ar9287Common_9287_1_1), 2); 657 if (ah->config.pcie_clock_req) 658 INIT_INI_ARRAY(&ah->iniPcieSerdes, 659 ar9287PciePhy_clkreq_off_L1_9287_1_1, 660 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2); 661 else 662 INIT_INI_ARRAY(&ah->iniPcieSerdes, 663 ar9287PciePhy_clkreq_always_on_L1_9287_1_1, 664 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1), 665 2); 666 } else if (AR_SREV_9287_10_OR_LATER(ah)) { 667 INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0, 668 ARRAY_SIZE(ar9287Modes_9287_1_0), 6); 669 INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0, 670 ARRAY_SIZE(ar9287Common_9287_1_0), 2); 671 672 if (ah->config.pcie_clock_req) 673 INIT_INI_ARRAY(&ah->iniPcieSerdes, 674 ar9287PciePhy_clkreq_off_L1_9287_1_0, 675 ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2); 676 else 677 INIT_INI_ARRAY(&ah->iniPcieSerdes, 678 ar9287PciePhy_clkreq_always_on_L1_9287_1_0, 679 ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0), 680 2); 681 } else if (AR_SREV_9285_12_OR_LATER(ah)) { 682 683 684 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2, 685 ARRAY_SIZE(ar9285Modes_9285_1_2), 6); 686 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2, 687 ARRAY_SIZE(ar9285Common_9285_1_2), 2); 688 689 if (ah->config.pcie_clock_req) { 690 INIT_INI_ARRAY(&ah->iniPcieSerdes, 691 ar9285PciePhy_clkreq_off_L1_9285_1_2, 692 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2); 693 } else { 694 INIT_INI_ARRAY(&ah->iniPcieSerdes, 695 ar9285PciePhy_clkreq_always_on_L1_9285_1_2, 696 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2), 697 2); 698 } 699 } else if (AR_SREV_9285_10_OR_LATER(ah)) { 700 INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285, 701 ARRAY_SIZE(ar9285Modes_9285), 6); 702 INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285, 703 ARRAY_SIZE(ar9285Common_9285), 2); 704 705 if (ah->config.pcie_clock_req) { 706 INIT_INI_ARRAY(&ah->iniPcieSerdes, 707 ar9285PciePhy_clkreq_off_L1_9285, 708 ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2); 709 } else { 710 INIT_INI_ARRAY(&ah->iniPcieSerdes, 711 ar9285PciePhy_clkreq_always_on_L1_9285, 712 ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2); 713 } 714 } else if (AR_SREV_9280_20_OR_LATER(ah)) { 715 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2, 716 ARRAY_SIZE(ar9280Modes_9280_2), 6); 717 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2, 718 ARRAY_SIZE(ar9280Common_9280_2), 2); 719 720 if (ah->config.pcie_clock_req) { 721 INIT_INI_ARRAY(&ah->iniPcieSerdes, 722 ar9280PciePhy_clkreq_off_L1_9280, 723 ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2); 724 } else { 725 INIT_INI_ARRAY(&ah->iniPcieSerdes, 726 ar9280PciePhy_clkreq_always_on_L1_9280, 727 ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2); 728 } 729 INIT_INI_ARRAY(&ah->iniModesAdditional, 730 ar9280Modes_fast_clock_9280_2, 731 ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3); 732 } else if (AR_SREV_9280_10_OR_LATER(ah)) { 733 INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280, 734 ARRAY_SIZE(ar9280Modes_9280), 6); 735 INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280, 736 ARRAY_SIZE(ar9280Common_9280), 2); 737 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 738 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160, 739 ARRAY_SIZE(ar5416Modes_9160), 6); 740 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160, 741 ARRAY_SIZE(ar5416Common_9160), 2); 742 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160, 743 ARRAY_SIZE(ar5416Bank0_9160), 2); 744 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160, 745 ARRAY_SIZE(ar5416BB_RfGain_9160), 3); 746 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160, 747 ARRAY_SIZE(ar5416Bank1_9160), 2); 748 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160, 749 ARRAY_SIZE(ar5416Bank2_9160), 2); 750 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160, 751 ARRAY_SIZE(ar5416Bank3_9160), 3); 752 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160, 753 ARRAY_SIZE(ar5416Bank6_9160), 3); 754 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160, 755 ARRAY_SIZE(ar5416Bank6TPC_9160), 3); 756 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160, 757 ARRAY_SIZE(ar5416Bank7_9160), 2); 758 if (AR_SREV_9160_11(ah)) { 759 INIT_INI_ARRAY(&ah->iniAddac, 760 ar5416Addac_91601_1, 761 ARRAY_SIZE(ar5416Addac_91601_1), 2); 762 } else { 763 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160, 764 ARRAY_SIZE(ar5416Addac_9160), 2); 765 } 766 } else if (AR_SREV_9100_OR_LATER(ah)) { 767 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100, 768 ARRAY_SIZE(ar5416Modes_9100), 6); 769 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100, 770 ARRAY_SIZE(ar5416Common_9100), 2); 771 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100, 772 ARRAY_SIZE(ar5416Bank0_9100), 2); 773 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100, 774 ARRAY_SIZE(ar5416BB_RfGain_9100), 3); 775 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100, 776 ARRAY_SIZE(ar5416Bank1_9100), 2); 777 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100, 778 ARRAY_SIZE(ar5416Bank2_9100), 2); 779 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100, 780 ARRAY_SIZE(ar5416Bank3_9100), 3); 781 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100, 782 ARRAY_SIZE(ar5416Bank6_9100), 3); 783 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100, 784 ARRAY_SIZE(ar5416Bank6TPC_9100), 3); 785 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100, 786 ARRAY_SIZE(ar5416Bank7_9100), 2); 787 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100, 788 ARRAY_SIZE(ar5416Addac_9100), 2); 789 } else { 790 INIT_INI_ARRAY(&ah->iniModes, ar5416Modes, 791 ARRAY_SIZE(ar5416Modes), 6); 792 INIT_INI_ARRAY(&ah->iniCommon, ar5416Common, 793 ARRAY_SIZE(ar5416Common), 2); 794 INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0, 795 ARRAY_SIZE(ar5416Bank0), 2); 796 INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain, 797 ARRAY_SIZE(ar5416BB_RfGain), 3); 798 INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1, 799 ARRAY_SIZE(ar5416Bank1), 2); 800 INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2, 801 ARRAY_SIZE(ar5416Bank2), 2); 802 INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3, 803 ARRAY_SIZE(ar5416Bank3), 3); 804 INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6, 805 ARRAY_SIZE(ar5416Bank6), 3); 806 INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC, 807 ARRAY_SIZE(ar5416Bank6TPC), 3); 808 INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7, 809 ARRAY_SIZE(ar5416Bank7), 2); 810 INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac, 811 ARRAY_SIZE(ar5416Addac), 2); 812 } 813} 814 815static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah) 816{ 817 if (AR_SREV_9287_11_OR_LATER(ah)) 818 INIT_INI_ARRAY(&ah->iniModesRxGain, 819 ar9287Modes_rx_gain_9287_1_1, 820 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6); 821 else if (AR_SREV_9287_10(ah)) 822 INIT_INI_ARRAY(&ah->iniModesRxGain, 823 ar9287Modes_rx_gain_9287_1_0, 824 ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6); 825 else if (AR_SREV_9280_20(ah)) 826 ath9k_hw_init_rxgain_ini(ah); 827 828 if (AR_SREV_9287_11_OR_LATER(ah)) { 829 INIT_INI_ARRAY(&ah->iniModesTxGain, 830 ar9287Modes_tx_gain_9287_1_1, 831 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6); 832 } else if (AR_SREV_9287_10(ah)) { 833 INIT_INI_ARRAY(&ah->iniModesTxGain, 834 ar9287Modes_tx_gain_9287_1_0, 835 ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6); 836 } else if (AR_SREV_9280_20(ah)) { 837 ath9k_hw_init_txgain_ini(ah); 838 } else if (AR_SREV_9285_12_OR_LATER(ah)) { 839 u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE); 840 841 /* txgain table */ 842 if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) { 843 INIT_INI_ARRAY(&ah->iniModesTxGain, 844 ar9285Modes_high_power_tx_gain_9285_1_2, 845 ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6); 846 } else { 847 INIT_INI_ARRAY(&ah->iniModesTxGain, 848 ar9285Modes_original_tx_gain_9285_1_2, 849 ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6); 850 } 851 852 } 853} 854 855static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah) 856{ 857 u32 i, j; 858 859 if ((ah->hw_version.devid == AR9280_DEVID_PCI) && 860 test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) { 861 862 /* EEPROM Fixup */ 863 for (i = 0; i < ah->iniModes.ia_rows; i++) { 864 u32 reg = INI_RA(&ah->iniModes, i, 0); 865 866 for (j = 1; j < ah->iniModes.ia_columns; j++) { 867 u32 val = INI_RA(&ah->iniModes, i, j); 868 869 INI_RA(&ah->iniModes, i, j) = 870 ath9k_hw_ini_fixup(ah, 871 &ah->eeprom.def, 872 reg, val); 873 } 874 } 875 } 876} 877 878int ath9k_hw_init(struct ath_hw *ah) 879{ 880 struct ath_common *common = ath9k_hw_common(ah); 881 int r = 0; 882 883 if (!ath9k_hw_devid_supported(ah->hw_version.devid)) 884 return -EOPNOTSUPP; 885 886 ath9k_hw_init_defaults(ah); 887 ath9k_hw_init_config(ah); 888 889 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) { 890 ath_print(common, ATH_DBG_FATAL, 891 "Couldn't reset chip\n"); 892 return -EIO; 893 } 894 895 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) { 896 ath_print(common, ATH_DBG_FATAL, "Couldn't wakeup chip\n"); 897 return -EIO; 898 } 899 900 if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) { 901 if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI || 902 (AR_SREV_9280(ah) && !ah->is_pciexpress)) { 903 ah->config.serialize_regmode = 904 SER_REG_MODE_ON; 905 } else { 906 ah->config.serialize_regmode = 907 SER_REG_MODE_OFF; 908 } 909 } 910 911 ath_print(common, ATH_DBG_RESET, "serialize_regmode is %d\n", 912 ah->config.serialize_regmode); 913 914 if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) { 915 ath_print(common, ATH_DBG_FATAL, 916 "Mac Chip Rev 0x%02x.%x is not supported by " 917 "this driver\n", ah->hw_version.macVersion, 918 ah->hw_version.macRev); 919 return -EOPNOTSUPP; 920 } 921 922 if (AR_SREV_9100(ah)) { 923 ah->iq_caldata.calData = &iq_cal_multi_sample; 924 ah->supp_cals = IQ_MISMATCH_CAL; 925 ah->is_pciexpress = false; 926 } 927 928 if (AR_SREV_9271(ah)) 929 ah->is_pciexpress = false; 930 931 ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID); 932 933 ath9k_hw_init_cal_settings(ah); 934 935 ah->ani_function = ATH9K_ANI_ALL; 936 if (AR_SREV_9280_10_OR_LATER(ah)) 937 ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL; 938 939 ath9k_hw_init_mode_regs(ah); 940 941 if (ah->is_pciexpress) 942 ath9k_hw_configpcipowersave(ah, 0, 0); 943 else 944 ath9k_hw_disablepcie(ah); 945 946 r = ath9k_hw_post_init(ah); 947 if (r) 948 return r; 949 950 ath9k_hw_init_mode_gain_regs(ah); 951 ath9k_hw_fill_cap_info(ah); 952 ath9k_hw_init_11a_eeprom_fix(ah); 953 954 r = ath9k_hw_init_macaddr(ah); 955 if (r) { 956 ath_print(common, ATH_DBG_FATAL, 957 "Failed to initialize MAC address\n"); 958 return r; 959 } 960 961 if (AR_SREV_9285(ah) || AR_SREV_9271(ah)) 962 ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S); 963 else 964 ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S); 965 966 ath9k_init_nfcal_hist_buffer(ah); 967 968 return 0; 969} 970 971static void ath9k_hw_init_bb(struct ath_hw *ah, 972 struct ath9k_channel *chan) 973{ 974 u32 synthDelay; 975 976 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY; 977 if (IS_CHAN_B(chan)) 978 synthDelay = (4 * synthDelay) / 22; 979 else 980 synthDelay /= 10; 981 982 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN); 983 984 udelay(synthDelay + BASE_ACTIVATE_DELAY); 985} 986 987static void ath9k_hw_init_qos(struct ath_hw *ah) 988{ 989 REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa); 990 REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210); 991 992 REG_WRITE(ah, AR_QOS_NO_ACK, 993 SM(2, AR_QOS_NO_ACK_TWO_BIT) | 994 SM(5, AR_QOS_NO_ACK_BIT_OFF) | 995 SM(0, AR_QOS_NO_ACK_BYTE_OFF)); 996 997 REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL); 998 REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF); 999 REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF); 1000 REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF); 1001 REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF); 1002} 1003 1004static void ath9k_hw_init_pll(struct ath_hw *ah, 1005 struct ath9k_channel *chan) 1006{ 1007 u32 pll; 1008 1009 if (AR_SREV_9100(ah)) { 1010 if (chan && IS_CHAN_5GHZ(chan)) 1011 pll = 0x1450; 1012 else 1013 pll = 0x1458; 1014 } else { 1015 if (AR_SREV_9280_10_OR_LATER(ah)) { 1016 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV); 1017 1018 if (chan && IS_CHAN_HALF_RATE(chan)) 1019 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL); 1020 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1021 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL); 1022 1023 if (chan && IS_CHAN_5GHZ(chan)) { 1024 pll |= SM(0x28, AR_RTC_9160_PLL_DIV); 1025 1026 1027 if (AR_SREV_9280_20(ah)) { 1028 if (((chan->channel % 20) == 0) 1029 || ((chan->channel % 10) == 0)) 1030 pll = 0x2850; 1031 else 1032 pll = 0x142c; 1033 } 1034 } else { 1035 pll |= SM(0x2c, AR_RTC_9160_PLL_DIV); 1036 } 1037 1038 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 1039 1040 pll = SM(0x5, AR_RTC_9160_PLL_REFDIV); 1041 1042 if (chan && IS_CHAN_HALF_RATE(chan)) 1043 pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL); 1044 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1045 pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL); 1046 1047 if (chan && IS_CHAN_5GHZ(chan)) 1048 pll |= SM(0x50, AR_RTC_9160_PLL_DIV); 1049 else 1050 pll |= SM(0x58, AR_RTC_9160_PLL_DIV); 1051 } else { 1052 pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2; 1053 1054 if (chan && IS_CHAN_HALF_RATE(chan)) 1055 pll |= SM(0x1, AR_RTC_PLL_CLKSEL); 1056 else if (chan && IS_CHAN_QUARTER_RATE(chan)) 1057 pll |= SM(0x2, AR_RTC_PLL_CLKSEL); 1058 1059 if (chan && IS_CHAN_5GHZ(chan)) 1060 pll |= SM(0xa, AR_RTC_PLL_DIV); 1061 else 1062 pll |= SM(0xb, AR_RTC_PLL_DIV); 1063 } 1064 } 1065 REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll); 1066 1067 udelay(RTC_PLL_SETTLE_DELAY); 1068 1069 REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK); 1070} 1071 1072static void ath9k_hw_init_chain_masks(struct ath_hw *ah) 1073{ 1074 int rx_chainmask, tx_chainmask; 1075 1076 rx_chainmask = ah->rxchainmask; 1077 tx_chainmask = ah->txchainmask; 1078 1079 switch (rx_chainmask) { 1080 case 0x5: 1081 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP, 1082 AR_PHY_SWAP_ALT_CHAIN); 1083 case 0x3: 1084 if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) { 1085 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7); 1086 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7); 1087 break; 1088 } 1089 case 0x1: 1090 case 0x2: 1091 case 0x7: 1092 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask); 1093 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask); 1094 break; 1095 default: 1096 break; 1097 } 1098 1099 REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask); 1100 if (tx_chainmask == 0x5) { 1101 REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP, 1102 AR_PHY_SWAP_ALT_CHAIN); 1103 } 1104 if (AR_SREV_9100(ah)) 1105 REG_WRITE(ah, AR_PHY_ANALOG_SWAP, 1106 REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001); 1107} 1108 1109static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah, 1110 enum nl80211_iftype opmode) 1111{ 1112 ah->mask_reg = AR_IMR_TXERR | 1113 AR_IMR_TXURN | 1114 AR_IMR_RXERR | 1115 AR_IMR_RXORN | 1116 AR_IMR_BCNMISC; 1117 1118 if (ah->config.intr_mitigation) 1119 ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR; 1120 else 1121 ah->mask_reg |= AR_IMR_RXOK; 1122 1123 ah->mask_reg |= AR_IMR_TXOK; 1124 1125 if (opmode == NL80211_IFTYPE_AP) 1126 ah->mask_reg |= AR_IMR_MIB; 1127 1128 REG_WRITE(ah, AR_IMR, ah->mask_reg); 1129 REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT); 1130 1131 if (!AR_SREV_9100(ah)) { 1132 REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF); 1133 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT); 1134 REG_WRITE(ah, AR_INTR_SYNC_MASK, 0); 1135 } 1136} 1137 1138static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us) 1139{ 1140 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) { 1141 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 1142 "bad ack timeout %u\n", us); 1143 ah->acktimeout = (u32) -1; 1144 return false; 1145 } else { 1146 REG_RMW_FIELD(ah, AR_TIME_OUT, 1147 AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us)); 1148 ah->acktimeout = us; 1149 return true; 1150 } 1151} 1152 1153static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us) 1154{ 1155 if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) { 1156 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 1157 "bad cts timeout %u\n", us); 1158 ah->ctstimeout = (u32) -1; 1159 return false; 1160 } else { 1161 REG_RMW_FIELD(ah, AR_TIME_OUT, 1162 AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us)); 1163 ah->ctstimeout = us; 1164 return true; 1165 } 1166} 1167 1168static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu) 1169{ 1170 if (tu > 0xFFFF) { 1171 ath_print(ath9k_hw_common(ah), ATH_DBG_XMIT, 1172 "bad global tx timeout %u\n", tu); 1173 ah->globaltxtimeout = (u32) -1; 1174 return false; 1175 } else { 1176 REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu); 1177 ah->globaltxtimeout = tu; 1178 return true; 1179 } 1180} 1181 1182static void ath9k_hw_init_user_settings(struct ath_hw *ah) 1183{ 1184 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, "ah->misc_mode 0x%x\n", 1185 ah->misc_mode); 1186 1187 if (ah->misc_mode != 0) 1188 REG_WRITE(ah, AR_PCU_MISC, 1189 REG_READ(ah, AR_PCU_MISC) | ah->misc_mode); 1190 if (ah->slottime != (u32) -1) 1191 ath9k_hw_setslottime(ah, ah->slottime); 1192 if (ah->acktimeout != (u32) -1) 1193 ath9k_hw_set_ack_timeout(ah, ah->acktimeout); 1194 if (ah->ctstimeout != (u32) -1) 1195 ath9k_hw_set_cts_timeout(ah, ah->ctstimeout); 1196 if (ah->globaltxtimeout != (u32) -1) 1197 ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout); 1198} 1199 1200const char *ath9k_hw_probe(u16 vendorid, u16 devid) 1201{ 1202 return vendorid == ATHEROS_VENDOR_ID ? 1203 ath9k_hw_devname(devid) : NULL; 1204} 1205 1206void ath9k_hw_detach(struct ath_hw *ah) 1207{ 1208 if (!AR_SREV_9100(ah)) 1209 ath9k_hw_ani_disable(ah); 1210 1211 ath9k_hw_rf_free(ah); 1212 ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP); 1213 kfree(ah); 1214 ah = NULL; 1215} 1216 1217/*******/ 1218/* INI */ 1219/*******/ 1220 1221static void ath9k_hw_override_ini(struct ath_hw *ah, 1222 struct ath9k_channel *chan) 1223{ 1224 u32 val; 1225 1226 if (AR_SREV_9271(ah)) { 1227 /* 1228 * Enable spectral scan to solution for issues with stuck 1229 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on 1230 * AR9271 1.1 1231 */ 1232 if (AR_SREV_9271_10(ah)) { 1233 val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE; 1234 REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val); 1235 } 1236 else if (AR_SREV_9271_11(ah)) 1237 /* 1238 * change AR_PHY_RF_CTL3 setting to fix MAC issue 1239 * present on AR9271 1.1 1240 */ 1241 REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001); 1242 return; 1243 } 1244 1245 /* 1246 * Set the RX_ABORT and RX_DIS and clear if off only after 1247 * RXE is set for MAC. This prevents frames with corrupted 1248 * descriptor status. 1249 */ 1250 REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT)); 1251 1252 if (AR_SREV_9280_10_OR_LATER(ah)) { 1253 val = REG_READ(ah, AR_PCU_MISC_MODE2) & 1254 (~AR_PCU_MISC_MODE2_HWWAR1); 1255 1256 if (AR_SREV_9287_10_OR_LATER(ah)) 1257 val = val & (~AR_PCU_MISC_MODE2_HWWAR2); 1258 1259 REG_WRITE(ah, AR_PCU_MISC_MODE2, val); 1260 } 1261 1262 if (!AR_SREV_5416_20_OR_LATER(ah) || 1263 AR_SREV_9280_10_OR_LATER(ah)) 1264 return; 1265 /* 1266 * Disable BB clock gating 1267 * Necessary to avoid issues on AR5416 2.0 1268 */ 1269 REG_WRITE(ah, 0x9800 + (651 << 2), 0x11); 1270} 1271 1272static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah, 1273 struct ar5416_eeprom_def *pEepData, 1274 u32 reg, u32 value) 1275{ 1276 struct base_eep_header *pBase = &(pEepData->baseEepHeader); 1277 struct ath_common *common = ath9k_hw_common(ah); 1278 1279 switch (ah->hw_version.devid) { 1280 case AR9280_DEVID_PCI: 1281 if (reg == 0x7894) { 1282 ath_print(common, ATH_DBG_EEPROM, 1283 "ini VAL: %x EEPROM: %x\n", value, 1284 (pBase->version & 0xff)); 1285 1286 if ((pBase->version & 0xff) > 0x0a) { 1287 ath_print(common, ATH_DBG_EEPROM, 1288 "PWDCLKIND: %d\n", 1289 pBase->pwdclkind); 1290 value &= ~AR_AN_TOP2_PWDCLKIND; 1291 value |= AR_AN_TOP2_PWDCLKIND & 1292 (pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S); 1293 } else { 1294 ath_print(common, ATH_DBG_EEPROM, 1295 "PWDCLKIND Earlier Rev\n"); 1296 } 1297 1298 ath_print(common, ATH_DBG_EEPROM, 1299 "final ini VAL: %x\n", value); 1300 } 1301 break; 1302 } 1303 1304 return value; 1305} 1306 1307static u32 ath9k_hw_ini_fixup(struct ath_hw *ah, 1308 struct ar5416_eeprom_def *pEepData, 1309 u32 reg, u32 value) 1310{ 1311 if (ah->eep_map == EEP_MAP_4KBITS) 1312 return value; 1313 else 1314 return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value); 1315} 1316 1317static void ath9k_olc_init(struct ath_hw *ah) 1318{ 1319 u32 i; 1320 1321 if (OLC_FOR_AR9287_10_LATER) { 1322 REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9, 1323 AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL); 1324 ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0, 1325 AR9287_AN_TXPC0_TXPCMODE, 1326 AR9287_AN_TXPC0_TXPCMODE_S, 1327 AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE); 1328 udelay(100); 1329 } else { 1330 for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++) 1331 ah->originalGain[i] = 1332 MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4), 1333 AR_PHY_TX_GAIN); 1334 ah->PDADCdelta = 0; 1335 } 1336} 1337 1338static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg, 1339 struct ath9k_channel *chan) 1340{ 1341 u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band); 1342 1343 if (IS_CHAN_B(chan)) 1344 ctl |= CTL_11B; 1345 else if (IS_CHAN_G(chan)) 1346 ctl |= CTL_11G; 1347 else 1348 ctl |= CTL_11A; 1349 1350 return ctl; 1351} 1352 1353static int ath9k_hw_process_ini(struct ath_hw *ah, 1354 struct ath9k_channel *chan) 1355{ 1356 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 1357 int i, regWrites = 0; 1358 struct ieee80211_channel *channel = chan->chan; 1359 u32 modesIndex, freqIndex; 1360 1361 switch (chan->chanmode) { 1362 case CHANNEL_A: 1363 case CHANNEL_A_HT20: 1364 modesIndex = 1; 1365 freqIndex = 1; 1366 break; 1367 case CHANNEL_A_HT40PLUS: 1368 case CHANNEL_A_HT40MINUS: 1369 modesIndex = 2; 1370 freqIndex = 1; 1371 break; 1372 case CHANNEL_G: 1373 case CHANNEL_G_HT20: 1374 case CHANNEL_B: 1375 modesIndex = 4; 1376 freqIndex = 2; 1377 break; 1378 case CHANNEL_G_HT40PLUS: 1379 case CHANNEL_G_HT40MINUS: 1380 modesIndex = 3; 1381 freqIndex = 2; 1382 break; 1383 1384 default: 1385 return -EINVAL; 1386 } 1387 1388 REG_WRITE(ah, AR_PHY(0), 0x00000007); 1389 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO); 1390 ah->eep_ops->set_addac(ah, chan); 1391 1392 if (AR_SREV_5416_22_OR_LATER(ah)) { 1393 REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites); 1394 } else { 1395 struct ar5416IniArray temp; 1396 u32 addacSize = 1397 sizeof(u32) * ah->iniAddac.ia_rows * 1398 ah->iniAddac.ia_columns; 1399 1400 memcpy(ah->addac5416_21, 1401 ah->iniAddac.ia_array, addacSize); 1402 1403 (ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0; 1404 1405 temp.ia_array = ah->addac5416_21; 1406 temp.ia_columns = ah->iniAddac.ia_columns; 1407 temp.ia_rows = ah->iniAddac.ia_rows; 1408 REG_WRITE_ARRAY(&temp, 1, regWrites); 1409 } 1410 1411 REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC); 1412 1413 for (i = 0; i < ah->iniModes.ia_rows; i++) { 1414 u32 reg = INI_RA(&ah->iniModes, i, 0); 1415 u32 val = INI_RA(&ah->iniModes, i, modesIndex); 1416 1417 REG_WRITE(ah, reg, val); 1418 1419 if (reg >= 0x7800 && reg < 0x78a0 1420 && ah->config.analog_shiftreg) { 1421 udelay(100); 1422 } 1423 1424 DO_DELAY(regWrites); 1425 } 1426 1427 if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah)) 1428 REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites); 1429 1430 if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) || 1431 AR_SREV_9287_10_OR_LATER(ah)) 1432 REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites); 1433 1434 for (i = 0; i < ah->iniCommon.ia_rows; i++) { 1435 u32 reg = INI_RA(&ah->iniCommon, i, 0); 1436 u32 val = INI_RA(&ah->iniCommon, i, 1); 1437 1438 REG_WRITE(ah, reg, val); 1439 1440 if (reg >= 0x7800 && reg < 0x78a0 1441 && ah->config.analog_shiftreg) { 1442 udelay(100); 1443 } 1444 1445 DO_DELAY(regWrites); 1446 } 1447 1448 ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites); 1449 1450 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) { 1451 REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex, 1452 regWrites); 1453 } 1454 1455 ath9k_hw_override_ini(ah, chan); 1456 ath9k_hw_set_regs(ah, chan); 1457 ath9k_hw_init_chain_masks(ah); 1458 1459 if (OLC_FOR_AR9280_20_LATER) 1460 ath9k_olc_init(ah); 1461 1462 ah->eep_ops->set_txpower(ah, chan, 1463 ath9k_regd_get_ctl(regulatory, chan), 1464 channel->max_antenna_gain * 2, 1465 channel->max_power * 2, 1466 min((u32) MAX_RATE_POWER, 1467 (u32) regulatory->power_limit)); 1468 1469 if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) { 1470 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 1471 "ar5416SetRfRegs failed\n"); 1472 return -EIO; 1473 } 1474 1475 return 0; 1476} 1477 1478/****************************************/ 1479/* Reset and Channel Switching Routines */ 1480/****************************************/ 1481 1482static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan) 1483{ 1484 u32 rfMode = 0; 1485 1486 if (chan == NULL) 1487 return; 1488 1489 rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan)) 1490 ? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM; 1491 1492 if (!AR_SREV_9280_10_OR_LATER(ah)) 1493 rfMode |= (IS_CHAN_5GHZ(chan)) ? 1494 AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ; 1495 1496 if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) 1497 rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE); 1498 1499 REG_WRITE(ah, AR_PHY_MODE, rfMode); 1500} 1501 1502static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah) 1503{ 1504 REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS); 1505} 1506 1507static inline void ath9k_hw_set_dma(struct ath_hw *ah) 1508{ 1509 u32 regval; 1510 1511 /* 1512 * set AHB_MODE not to do cacheline prefetches 1513 */ 1514 regval = REG_READ(ah, AR_AHB_MODE); 1515 REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN); 1516 1517 /* 1518 * let mac dma reads be in 128 byte chunks 1519 */ 1520 regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK; 1521 REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B); 1522 1523 /* 1524 * Restore TX Trigger Level to its pre-reset value. 1525 * The initial value depends on whether aggregation is enabled, and is 1526 * adjusted whenever underruns are detected. 1527 */ 1528 REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level); 1529 1530 /* 1531 * let mac dma writes be in 128 byte chunks 1532 */ 1533 regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK; 1534 REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B); 1535 1536 /* 1537 * Setup receive FIFO threshold to hold off TX activities 1538 */ 1539 REG_WRITE(ah, AR_RXFIFO_CFG, 0x200); 1540 1541 /* 1542 * reduce the number of usable entries in PCU TXBUF to avoid 1543 * wrap around issues. 1544 */ 1545 if (AR_SREV_9285(ah)) { 1546 /* For AR9285 the number of Fifos are reduced to half. 1547 * So set the usable tx buf size also to half to 1548 * avoid data/delimiter underruns 1549 */ 1550 REG_WRITE(ah, AR_PCU_TXBUF_CTRL, 1551 AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE); 1552 } else if (!AR_SREV_9271(ah)) { 1553 REG_WRITE(ah, AR_PCU_TXBUF_CTRL, 1554 AR_PCU_TXBUF_CTRL_USABLE_SIZE); 1555 } 1556} 1557 1558static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode) 1559{ 1560 u32 val; 1561 1562 val = REG_READ(ah, AR_STA_ID1); 1563 val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC); 1564 switch (opmode) { 1565 case NL80211_IFTYPE_AP: 1566 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP 1567 | AR_STA_ID1_KSRCH_MODE); 1568 REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION); 1569 break; 1570 case NL80211_IFTYPE_ADHOC: 1571 case NL80211_IFTYPE_MESH_POINT: 1572 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC 1573 | AR_STA_ID1_KSRCH_MODE); 1574 REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION); 1575 break; 1576 case NL80211_IFTYPE_STATION: 1577 case NL80211_IFTYPE_MONITOR: 1578 REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE); 1579 break; 1580 } 1581} 1582 1583static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah, 1584 u32 coef_scaled, 1585 u32 *coef_mantissa, 1586 u32 *coef_exponent) 1587{ 1588 u32 coef_exp, coef_man; 1589 1590 for (coef_exp = 31; coef_exp > 0; coef_exp--) 1591 if ((coef_scaled >> coef_exp) & 0x1) 1592 break; 1593 1594 coef_exp = 14 - (coef_exp - COEF_SCALE_S); 1595 1596 coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1)); 1597 1598 *coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp); 1599 *coef_exponent = coef_exp - 16; 1600} 1601 1602static void ath9k_hw_set_delta_slope(struct ath_hw *ah, 1603 struct ath9k_channel *chan) 1604{ 1605 u32 coef_scaled, ds_coef_exp, ds_coef_man; 1606 u32 clockMhzScaled = 0x64000000; 1607 struct chan_centers centers; 1608 1609 if (IS_CHAN_HALF_RATE(chan)) 1610 clockMhzScaled = clockMhzScaled >> 1; 1611 else if (IS_CHAN_QUARTER_RATE(chan)) 1612 clockMhzScaled = clockMhzScaled >> 2; 1613 1614 ath9k_hw_get_channel_centers(ah, chan, ¢ers); 1615 coef_scaled = clockMhzScaled / centers.synth_center; 1616 1617 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man, 1618 &ds_coef_exp); 1619 1620 REG_RMW_FIELD(ah, AR_PHY_TIMING3, 1621 AR_PHY_TIMING3_DSC_MAN, ds_coef_man); 1622 REG_RMW_FIELD(ah, AR_PHY_TIMING3, 1623 AR_PHY_TIMING3_DSC_EXP, ds_coef_exp); 1624 1625 coef_scaled = (9 * coef_scaled) / 10; 1626 1627 ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man, 1628 &ds_coef_exp); 1629 1630 REG_RMW_FIELD(ah, AR_PHY_HALFGI, 1631 AR_PHY_HALFGI_DSC_MAN, ds_coef_man); 1632 REG_RMW_FIELD(ah, AR_PHY_HALFGI, 1633 AR_PHY_HALFGI_DSC_EXP, ds_coef_exp); 1634} 1635 1636static bool ath9k_hw_set_reset(struct ath_hw *ah, int type) 1637{ 1638 u32 rst_flags; 1639 u32 tmpReg; 1640 1641 if (AR_SREV_9100(ah)) { 1642 u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK); 1643 val &= ~AR_RTC_DERIVED_CLK_PERIOD; 1644 val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD); 1645 REG_WRITE(ah, AR_RTC_DERIVED_CLK, val); 1646 (void)REG_READ(ah, AR_RTC_DERIVED_CLK); 1647 } 1648 1649 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN | 1650 AR_RTC_FORCE_WAKE_ON_INT); 1651 1652 if (AR_SREV_9100(ah)) { 1653 rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD | 1654 AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET; 1655 } else { 1656 tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE); 1657 if (tmpReg & 1658 (AR_INTR_SYNC_LOCAL_TIMEOUT | 1659 AR_INTR_SYNC_RADM_CPL_TIMEOUT)) { 1660 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0); 1661 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF); 1662 } else { 1663 REG_WRITE(ah, AR_RC, AR_RC_AHB); 1664 } 1665 1666 rst_flags = AR_RTC_RC_MAC_WARM; 1667 if (type == ATH9K_RESET_COLD) 1668 rst_flags |= AR_RTC_RC_MAC_COLD; 1669 } 1670 1671 REG_WRITE(ah, AR_RTC_RC, rst_flags); 1672 udelay(50); 1673 1674 REG_WRITE(ah, AR_RTC_RC, 0); 1675 if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) { 1676 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 1677 "RTC stuck in MAC reset\n"); 1678 return false; 1679 } 1680 1681 if (!AR_SREV_9100(ah)) 1682 REG_WRITE(ah, AR_RC, 0); 1683 1684 ath9k_hw_init_pll(ah, NULL); 1685 1686 if (AR_SREV_9100(ah)) 1687 udelay(50); 1688 1689 return true; 1690} 1691 1692static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah) 1693{ 1694 REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN | 1695 AR_RTC_FORCE_WAKE_ON_INT); 1696 1697 if (!AR_SREV_9100(ah)) 1698 REG_WRITE(ah, AR_RC, AR_RC_AHB); 1699 1700 REG_WRITE(ah, AR_RTC_RESET, 0); 1701 udelay(2); 1702 1703 if (!AR_SREV_9100(ah)) 1704 REG_WRITE(ah, AR_RC, 0); 1705 1706 REG_WRITE(ah, AR_RTC_RESET, 1); 1707 1708 if (!ath9k_hw_wait(ah, 1709 AR_RTC_STATUS, 1710 AR_RTC_STATUS_M, 1711 AR_RTC_STATUS_ON, 1712 AH_WAIT_TIMEOUT)) { 1713 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 1714 "RTC not waking up\n"); 1715 return false; 1716 } 1717 1718 ath9k_hw_read_revisions(ah); 1719 1720 return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM); 1721} 1722 1723static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type) 1724{ 1725 REG_WRITE(ah, AR_RTC_FORCE_WAKE, 1726 AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT); 1727 1728 switch (type) { 1729 case ATH9K_RESET_POWER_ON: 1730 return ath9k_hw_set_reset_power_on(ah); 1731 case ATH9K_RESET_WARM: 1732 case ATH9K_RESET_COLD: 1733 return ath9k_hw_set_reset(ah, type); 1734 default: 1735 return false; 1736 } 1737} 1738 1739static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan) 1740{ 1741 u32 phymode; 1742 u32 enableDacFifo = 0; 1743 1744 if (AR_SREV_9285_10_OR_LATER(ah)) 1745 enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) & 1746 AR_PHY_FC_ENABLE_DAC_FIFO); 1747 1748 phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40 1749 | AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo; 1750 1751 if (IS_CHAN_HT40(chan)) { 1752 phymode |= AR_PHY_FC_DYN2040_EN; 1753 1754 if ((chan->chanmode == CHANNEL_A_HT40PLUS) || 1755 (chan->chanmode == CHANNEL_G_HT40PLUS)) 1756 phymode |= AR_PHY_FC_DYN2040_PRI_CH; 1757 1758 } 1759 REG_WRITE(ah, AR_PHY_TURBO, phymode); 1760 1761 ath9k_hw_set11nmac2040(ah); 1762 1763 REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S); 1764 REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S); 1765} 1766 1767static bool ath9k_hw_chip_reset(struct ath_hw *ah, 1768 struct ath9k_channel *chan) 1769{ 1770 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) { 1771 if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) 1772 return false; 1773 } else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM)) 1774 return false; 1775 1776 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 1777 return false; 1778 1779 ah->chip_fullsleep = false; 1780 ath9k_hw_init_pll(ah, chan); 1781 ath9k_hw_set_rfmode(ah, chan); 1782 1783 return true; 1784} 1785 1786static bool ath9k_hw_channel_change(struct ath_hw *ah, 1787 struct ath9k_channel *chan) 1788{ 1789 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 1790 struct ath_common *common = ath9k_hw_common(ah); 1791 struct ieee80211_channel *channel = chan->chan; 1792 u32 synthDelay, qnum; 1793 1794 for (qnum = 0; qnum < AR_NUM_QCU; qnum++) { 1795 if (ath9k_hw_numtxpending(ah, qnum)) { 1796 ath_print(common, ATH_DBG_QUEUE, 1797 "Transmit frames pending on " 1798 "queue %d\n", qnum); 1799 return false; 1800 } 1801 } 1802 1803 REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN); 1804 if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN, 1805 AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) { 1806 ath_print(common, ATH_DBG_FATAL, 1807 "Could not kill baseband RX\n"); 1808 return false; 1809 } 1810 1811 ath9k_hw_set_regs(ah, chan); 1812 1813 if (AR_SREV_9280_10_OR_LATER(ah)) { 1814 ath9k_hw_ar9280_set_channel(ah, chan); 1815 } else { 1816 if (!(ath9k_hw_set_channel(ah, chan))) { 1817 ath_print(common, ATH_DBG_FATAL, 1818 "Failed to set channel\n"); 1819 return false; 1820 } 1821 } 1822 1823 ah->eep_ops->set_txpower(ah, chan, 1824 ath9k_regd_get_ctl(regulatory, chan), 1825 channel->max_antenna_gain * 2, 1826 channel->max_power * 2, 1827 min((u32) MAX_RATE_POWER, 1828 (u32) regulatory->power_limit)); 1829 1830 synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY; 1831 if (IS_CHAN_B(chan)) 1832 synthDelay = (4 * synthDelay) / 22; 1833 else 1834 synthDelay /= 10; 1835 1836 udelay(synthDelay + BASE_ACTIVATE_DELAY); 1837 1838 REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0); 1839 1840 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan)) 1841 ath9k_hw_set_delta_slope(ah, chan); 1842 1843 if (AR_SREV_9280_10_OR_LATER(ah)) 1844 ath9k_hw_9280_spur_mitigate(ah, chan); 1845 else 1846 ath9k_hw_spur_mitigate(ah, chan); 1847 1848 if (!chan->oneTimeCalsDone) 1849 chan->oneTimeCalsDone = true; 1850 1851 return true; 1852} 1853 1854static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan) 1855{ 1856 int bb_spur = AR_NO_SPUR; 1857 int freq; 1858 int bin, cur_bin; 1859 int bb_spur_off, spur_subchannel_sd; 1860 int spur_freq_sd; 1861 int spur_delta_phase; 1862 int denominator; 1863 int upper, lower, cur_vit_mask; 1864 int tmp, newVal; 1865 int i; 1866 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8, 1867 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60 1868 }; 1869 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10, 1870 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60 1871 }; 1872 int inc[4] = { 0, 100, 0, 0 }; 1873 struct chan_centers centers; 1874 1875 int8_t mask_m[123]; 1876 int8_t mask_p[123]; 1877 int8_t mask_amt; 1878 int tmp_mask; 1879 int cur_bb_spur; 1880 bool is2GHz = IS_CHAN_2GHZ(chan); 1881 1882 memset(&mask_m, 0, sizeof(int8_t) * 123); 1883 memset(&mask_p, 0, sizeof(int8_t) * 123); 1884 1885 ath9k_hw_get_channel_centers(ah, chan, ¢ers); 1886 freq = centers.synth_center; 1887 1888 ah->config.spurmode = SPUR_ENABLE_EEPROM; 1889 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) { 1890 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz); 1891 1892 if (is2GHz) 1893 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ; 1894 else 1895 cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ; 1896 1897 if (AR_NO_SPUR == cur_bb_spur) 1898 break; 1899 cur_bb_spur = cur_bb_spur - freq; 1900 1901 if (IS_CHAN_HT40(chan)) { 1902 if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) && 1903 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) { 1904 bb_spur = cur_bb_spur; 1905 break; 1906 } 1907 } else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) && 1908 (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) { 1909 bb_spur = cur_bb_spur; 1910 break; 1911 } 1912 } 1913 1914 if (AR_NO_SPUR == bb_spur) { 1915 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK, 1916 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX); 1917 return; 1918 } else { 1919 REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK, 1920 AR_PHY_FORCE_CLKEN_CCK_MRC_MUX); 1921 } 1922 1923 bin = bb_spur * 320; 1924 1925 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0)); 1926 1927 newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI | 1928 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER | 1929 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK | 1930 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK); 1931 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal); 1932 1933 newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL | 1934 AR_PHY_SPUR_REG_ENABLE_MASK_PPM | 1935 AR_PHY_SPUR_REG_MASK_RATE_SELECT | 1936 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI | 1937 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH)); 1938 REG_WRITE(ah, AR_PHY_SPUR_REG, newVal); 1939 1940 if (IS_CHAN_HT40(chan)) { 1941 if (bb_spur < 0) { 1942 spur_subchannel_sd = 1; 1943 bb_spur_off = bb_spur + 10; 1944 } else { 1945 spur_subchannel_sd = 0; 1946 bb_spur_off = bb_spur - 10; 1947 } 1948 } else { 1949 spur_subchannel_sd = 0; 1950 bb_spur_off = bb_spur; 1951 } 1952 1953 if (IS_CHAN_HT40(chan)) 1954 spur_delta_phase = 1955 ((bb_spur * 262144) / 1956 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE; 1957 else 1958 spur_delta_phase = 1959 ((bb_spur * 524288) / 1960 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE; 1961 1962 denominator = IS_CHAN_2GHZ(chan) ? 44 : 40; 1963 spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff; 1964 1965 newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC | 1966 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) | 1967 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE)); 1968 REG_WRITE(ah, AR_PHY_TIMING11, newVal); 1969 1970 newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S; 1971 REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal); 1972 1973 cur_bin = -6000; 1974 upper = bin + 100; 1975 lower = bin - 100; 1976 1977 for (i = 0; i < 4; i++) { 1978 int pilot_mask = 0; 1979 int chan_mask = 0; 1980 int bp = 0; 1981 for (bp = 0; bp < 30; bp++) { 1982 if ((cur_bin > lower) && (cur_bin < upper)) { 1983 pilot_mask = pilot_mask | 0x1 << bp; 1984 chan_mask = chan_mask | 0x1 << bp; 1985 } 1986 cur_bin += 100; 1987 } 1988 cur_bin += inc[i]; 1989 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask); 1990 REG_WRITE(ah, chan_mask_reg[i], chan_mask); 1991 } 1992 1993 cur_vit_mask = 6100; 1994 upper = bin + 120; 1995 lower = bin - 120; 1996 1997 for (i = 0; i < 123; i++) { 1998 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) { 1999 2000 /* workaround for gcc bug #37014 */ 2001 volatile int tmp_v = abs(cur_vit_mask - bin); 2002 2003 if (tmp_v < 75) 2004 mask_amt = 1; 2005 else 2006 mask_amt = 0; 2007 if (cur_vit_mask < 0) 2008 mask_m[abs(cur_vit_mask / 100)] = mask_amt; 2009 else 2010 mask_p[cur_vit_mask / 100] = mask_amt; 2011 } 2012 cur_vit_mask -= 100; 2013 } 2014 2015 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28) 2016 | (mask_m[48] << 26) | (mask_m[49] << 24) 2017 | (mask_m[50] << 22) | (mask_m[51] << 20) 2018 | (mask_m[52] << 18) | (mask_m[53] << 16) 2019 | (mask_m[54] << 14) | (mask_m[55] << 12) 2020 | (mask_m[56] << 10) | (mask_m[57] << 8) 2021 | (mask_m[58] << 6) | (mask_m[59] << 4) 2022 | (mask_m[60] << 2) | (mask_m[61] << 0); 2023 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask); 2024 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask); 2025 2026 tmp_mask = (mask_m[31] << 28) 2027 | (mask_m[32] << 26) | (mask_m[33] << 24) 2028 | (mask_m[34] << 22) | (mask_m[35] << 20) 2029 | (mask_m[36] << 18) | (mask_m[37] << 16) 2030 | (mask_m[48] << 14) | (mask_m[39] << 12) 2031 | (mask_m[40] << 10) | (mask_m[41] << 8) 2032 | (mask_m[42] << 6) | (mask_m[43] << 4) 2033 | (mask_m[44] << 2) | (mask_m[45] << 0); 2034 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask); 2035 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask); 2036 2037 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28) 2038 | (mask_m[18] << 26) | (mask_m[18] << 24) 2039 | (mask_m[20] << 22) | (mask_m[20] << 20) 2040 | (mask_m[22] << 18) | (mask_m[22] << 16) 2041 | (mask_m[24] << 14) | (mask_m[24] << 12) 2042 | (mask_m[25] << 10) | (mask_m[26] << 8) 2043 | (mask_m[27] << 6) | (mask_m[28] << 4) 2044 | (mask_m[29] << 2) | (mask_m[30] << 0); 2045 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask); 2046 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask); 2047 2048 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28) 2049 | (mask_m[2] << 26) | (mask_m[3] << 24) 2050 | (mask_m[4] << 22) | (mask_m[5] << 20) 2051 | (mask_m[6] << 18) | (mask_m[7] << 16) 2052 | (mask_m[8] << 14) | (mask_m[9] << 12) 2053 | (mask_m[10] << 10) | (mask_m[11] << 8) 2054 | (mask_m[12] << 6) | (mask_m[13] << 4) 2055 | (mask_m[14] << 2) | (mask_m[15] << 0); 2056 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask); 2057 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask); 2058 2059 tmp_mask = (mask_p[15] << 28) 2060 | (mask_p[14] << 26) | (mask_p[13] << 24) 2061 | (mask_p[12] << 22) | (mask_p[11] << 20) 2062 | (mask_p[10] << 18) | (mask_p[9] << 16) 2063 | (mask_p[8] << 14) | (mask_p[7] << 12) 2064 | (mask_p[6] << 10) | (mask_p[5] << 8) 2065 | (mask_p[4] << 6) | (mask_p[3] << 4) 2066 | (mask_p[2] << 2) | (mask_p[1] << 0); 2067 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask); 2068 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask); 2069 2070 tmp_mask = (mask_p[30] << 28) 2071 | (mask_p[29] << 26) | (mask_p[28] << 24) 2072 | (mask_p[27] << 22) | (mask_p[26] << 20) 2073 | (mask_p[25] << 18) | (mask_p[24] << 16) 2074 | (mask_p[23] << 14) | (mask_p[22] << 12) 2075 | (mask_p[21] << 10) | (mask_p[20] << 8) 2076 | (mask_p[19] << 6) | (mask_p[18] << 4) 2077 | (mask_p[17] << 2) | (mask_p[16] << 0); 2078 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask); 2079 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask); 2080 2081 tmp_mask = (mask_p[45] << 28) 2082 | (mask_p[44] << 26) | (mask_p[43] << 24) 2083 | (mask_p[42] << 22) | (mask_p[41] << 20) 2084 | (mask_p[40] << 18) | (mask_p[39] << 16) 2085 | (mask_p[38] << 14) | (mask_p[37] << 12) 2086 | (mask_p[36] << 10) | (mask_p[35] << 8) 2087 | (mask_p[34] << 6) | (mask_p[33] << 4) 2088 | (mask_p[32] << 2) | (mask_p[31] << 0); 2089 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask); 2090 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask); 2091 2092 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28) 2093 | (mask_p[59] << 26) | (mask_p[58] << 24) 2094 | (mask_p[57] << 22) | (mask_p[56] << 20) 2095 | (mask_p[55] << 18) | (mask_p[54] << 16) 2096 | (mask_p[53] << 14) | (mask_p[52] << 12) 2097 | (mask_p[51] << 10) | (mask_p[50] << 8) 2098 | (mask_p[49] << 6) | (mask_p[48] << 4) 2099 | (mask_p[47] << 2) | (mask_p[46] << 0); 2100 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask); 2101 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask); 2102} 2103 2104static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan) 2105{ 2106 int bb_spur = AR_NO_SPUR; 2107 int bin, cur_bin; 2108 int spur_freq_sd; 2109 int spur_delta_phase; 2110 int denominator; 2111 int upper, lower, cur_vit_mask; 2112 int tmp, new; 2113 int i; 2114 int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8, 2115 AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60 2116 }; 2117 int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10, 2118 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60 2119 }; 2120 int inc[4] = { 0, 100, 0, 0 }; 2121 2122 int8_t mask_m[123]; 2123 int8_t mask_p[123]; 2124 int8_t mask_amt; 2125 int tmp_mask; 2126 int cur_bb_spur; 2127 bool is2GHz = IS_CHAN_2GHZ(chan); 2128 2129 memset(&mask_m, 0, sizeof(int8_t) * 123); 2130 memset(&mask_p, 0, sizeof(int8_t) * 123); 2131 2132 for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) { 2133 cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz); 2134 if (AR_NO_SPUR == cur_bb_spur) 2135 break; 2136 cur_bb_spur = cur_bb_spur - (chan->channel * 10); 2137 if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) { 2138 bb_spur = cur_bb_spur; 2139 break; 2140 } 2141 } 2142 2143 if (AR_NO_SPUR == bb_spur) 2144 return; 2145 2146 bin = bb_spur * 32; 2147 2148 tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0)); 2149 new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI | 2150 AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER | 2151 AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK | 2152 AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK); 2153 2154 REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new); 2155 2156 new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL | 2157 AR_PHY_SPUR_REG_ENABLE_MASK_PPM | 2158 AR_PHY_SPUR_REG_MASK_RATE_SELECT | 2159 AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI | 2160 SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH)); 2161 REG_WRITE(ah, AR_PHY_SPUR_REG, new); 2162 2163 spur_delta_phase = ((bb_spur * 524288) / 100) & 2164 AR_PHY_TIMING11_SPUR_DELTA_PHASE; 2165 2166 denominator = IS_CHAN_2GHZ(chan) ? 440 : 400; 2167 spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff; 2168 2169 new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC | 2170 SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) | 2171 SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE)); 2172 REG_WRITE(ah, AR_PHY_TIMING11, new); 2173 2174 cur_bin = -6000; 2175 upper = bin + 100; 2176 lower = bin - 100; 2177 2178 for (i = 0; i < 4; i++) { 2179 int pilot_mask = 0; 2180 int chan_mask = 0; 2181 int bp = 0; 2182 for (bp = 0; bp < 30; bp++) { 2183 if ((cur_bin > lower) && (cur_bin < upper)) { 2184 pilot_mask = pilot_mask | 0x1 << bp; 2185 chan_mask = chan_mask | 0x1 << bp; 2186 } 2187 cur_bin += 100; 2188 } 2189 cur_bin += inc[i]; 2190 REG_WRITE(ah, pilot_mask_reg[i], pilot_mask); 2191 REG_WRITE(ah, chan_mask_reg[i], chan_mask); 2192 } 2193 2194 cur_vit_mask = 6100; 2195 upper = bin + 120; 2196 lower = bin - 120; 2197 2198 for (i = 0; i < 123; i++) { 2199 if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) { 2200 2201 /* workaround for gcc bug #37014 */ 2202 volatile int tmp_v = abs(cur_vit_mask - bin); 2203 2204 if (tmp_v < 75) 2205 mask_amt = 1; 2206 else 2207 mask_amt = 0; 2208 if (cur_vit_mask < 0) 2209 mask_m[abs(cur_vit_mask / 100)] = mask_amt; 2210 else 2211 mask_p[cur_vit_mask / 100] = mask_amt; 2212 } 2213 cur_vit_mask -= 100; 2214 } 2215 2216 tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28) 2217 | (mask_m[48] << 26) | (mask_m[49] << 24) 2218 | (mask_m[50] << 22) | (mask_m[51] << 20) 2219 | (mask_m[52] << 18) | (mask_m[53] << 16) 2220 | (mask_m[54] << 14) | (mask_m[55] << 12) 2221 | (mask_m[56] << 10) | (mask_m[57] << 8) 2222 | (mask_m[58] << 6) | (mask_m[59] << 4) 2223 | (mask_m[60] << 2) | (mask_m[61] << 0); 2224 REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask); 2225 REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask); 2226 2227 tmp_mask = (mask_m[31] << 28) 2228 | (mask_m[32] << 26) | (mask_m[33] << 24) 2229 | (mask_m[34] << 22) | (mask_m[35] << 20) 2230 | (mask_m[36] << 18) | (mask_m[37] << 16) 2231 | (mask_m[48] << 14) | (mask_m[39] << 12) 2232 | (mask_m[40] << 10) | (mask_m[41] << 8) 2233 | (mask_m[42] << 6) | (mask_m[43] << 4) 2234 | (mask_m[44] << 2) | (mask_m[45] << 0); 2235 REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask); 2236 REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask); 2237 2238 tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28) 2239 | (mask_m[18] << 26) | (mask_m[18] << 24) 2240 | (mask_m[20] << 22) | (mask_m[20] << 20) 2241 | (mask_m[22] << 18) | (mask_m[22] << 16) 2242 | (mask_m[24] << 14) | (mask_m[24] << 12) 2243 | (mask_m[25] << 10) | (mask_m[26] << 8) 2244 | (mask_m[27] << 6) | (mask_m[28] << 4) 2245 | (mask_m[29] << 2) | (mask_m[30] << 0); 2246 REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask); 2247 REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask); 2248 2249 tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28) 2250 | (mask_m[2] << 26) | (mask_m[3] << 24) 2251 | (mask_m[4] << 22) | (mask_m[5] << 20) 2252 | (mask_m[6] << 18) | (mask_m[7] << 16) 2253 | (mask_m[8] << 14) | (mask_m[9] << 12) 2254 | (mask_m[10] << 10) | (mask_m[11] << 8) 2255 | (mask_m[12] << 6) | (mask_m[13] << 4) 2256 | (mask_m[14] << 2) | (mask_m[15] << 0); 2257 REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask); 2258 REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask); 2259 2260 tmp_mask = (mask_p[15] << 28) 2261 | (mask_p[14] << 26) | (mask_p[13] << 24) 2262 | (mask_p[12] << 22) | (mask_p[11] << 20) 2263 | (mask_p[10] << 18) | (mask_p[9] << 16) 2264 | (mask_p[8] << 14) | (mask_p[7] << 12) 2265 | (mask_p[6] << 10) | (mask_p[5] << 8) 2266 | (mask_p[4] << 6) | (mask_p[3] << 4) 2267 | (mask_p[2] << 2) | (mask_p[1] << 0); 2268 REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask); 2269 REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask); 2270 2271 tmp_mask = (mask_p[30] << 28) 2272 | (mask_p[29] << 26) | (mask_p[28] << 24) 2273 | (mask_p[27] << 22) | (mask_p[26] << 20) 2274 | (mask_p[25] << 18) | (mask_p[24] << 16) 2275 | (mask_p[23] << 14) | (mask_p[22] << 12) 2276 | (mask_p[21] << 10) | (mask_p[20] << 8) 2277 | (mask_p[19] << 6) | (mask_p[18] << 4) 2278 | (mask_p[17] << 2) | (mask_p[16] << 0); 2279 REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask); 2280 REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask); 2281 2282 tmp_mask = (mask_p[45] << 28) 2283 | (mask_p[44] << 26) | (mask_p[43] << 24) 2284 | (mask_p[42] << 22) | (mask_p[41] << 20) 2285 | (mask_p[40] << 18) | (mask_p[39] << 16) 2286 | (mask_p[38] << 14) | (mask_p[37] << 12) 2287 | (mask_p[36] << 10) | (mask_p[35] << 8) 2288 | (mask_p[34] << 6) | (mask_p[33] << 4) 2289 | (mask_p[32] << 2) | (mask_p[31] << 0); 2290 REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask); 2291 REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask); 2292 2293 tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28) 2294 | (mask_p[59] << 26) | (mask_p[58] << 24) 2295 | (mask_p[57] << 22) | (mask_p[56] << 20) 2296 | (mask_p[55] << 18) | (mask_p[54] << 16) 2297 | (mask_p[53] << 14) | (mask_p[52] << 12) 2298 | (mask_p[51] << 10) | (mask_p[50] << 8) 2299 | (mask_p[49] << 6) | (mask_p[48] << 4) 2300 | (mask_p[47] << 2) | (mask_p[46] << 0); 2301 REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask); 2302 REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask); 2303} 2304 2305static void ath9k_enable_rfkill(struct ath_hw *ah) 2306{ 2307 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, 2308 AR_GPIO_INPUT_EN_VAL_RFSILENT_BB); 2309 2310 REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2, 2311 AR_GPIO_INPUT_MUX2_RFSILENT); 2312 2313 ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio); 2314 REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB); 2315} 2316 2317int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan, 2318 bool bChannelChange) 2319{ 2320 struct ath_common *common = ath9k_hw_common(ah); 2321 u32 saveLedState; 2322 struct ath9k_channel *curchan = ah->curchan; 2323 u32 saveDefAntenna; 2324 u32 macStaId1; 2325 u64 tsf = 0; 2326 int i, rx_chainmask, r; 2327 2328 ah->txchainmask = common->tx_chainmask; 2329 ah->rxchainmask = common->rx_chainmask; 2330 2331 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 2332 return -EIO; 2333 2334 if (curchan && !ah->chip_fullsleep) 2335 ath9k_hw_getnf(ah, curchan); 2336 2337 if (bChannelChange && 2338 (ah->chip_fullsleep != true) && 2339 (ah->curchan != NULL) && 2340 (chan->channel != ah->curchan->channel) && 2341 ((chan->channelFlags & CHANNEL_ALL) == 2342 (ah->curchan->channelFlags & CHANNEL_ALL)) && 2343 !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) || 2344 IS_CHAN_A_5MHZ_SPACED(ah->curchan))) { 2345 2346 if (ath9k_hw_channel_change(ah, chan)) { 2347 ath9k_hw_loadnf(ah, ah->curchan); 2348 ath9k_hw_start_nfcal(ah); 2349 return 0; 2350 } 2351 } 2352 2353 saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA); 2354 if (saveDefAntenna == 0) 2355 saveDefAntenna = 1; 2356 2357 macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B; 2358 2359 /* For chips on which RTC reset is done, save TSF before it gets cleared */ 2360 if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) 2361 tsf = ath9k_hw_gettsf64(ah); 2362 2363 saveLedState = REG_READ(ah, AR_CFG_LED) & 2364 (AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL | 2365 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW); 2366 2367 ath9k_hw_mark_phy_inactive(ah); 2368 2369 if (AR_SREV_9271(ah) && ah->htc_reset_init) { 2370 REG_WRITE(ah, 2371 AR9271_RESET_POWER_DOWN_CONTROL, 2372 AR9271_RADIO_RF_RST); 2373 udelay(50); 2374 } 2375 2376 if (!ath9k_hw_chip_reset(ah, chan)) { 2377 ath_print(common, ATH_DBG_FATAL, "Chip reset failed\n"); 2378 return -EINVAL; 2379 } 2380 2381 if (AR_SREV_9271(ah) && ah->htc_reset_init) { 2382 ah->htc_reset_init = false; 2383 REG_WRITE(ah, 2384 AR9271_RESET_POWER_DOWN_CONTROL, 2385 AR9271_GATE_MAC_CTL); 2386 udelay(50); 2387 } 2388 2389 /* Restore TSF */ 2390 if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) 2391 ath9k_hw_settsf64(ah, tsf); 2392 2393 if (AR_SREV_9280_10_OR_LATER(ah)) 2394 REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE); 2395 2396 if (AR_SREV_9287_12_OR_LATER(ah)) { 2397 /* Enable ASYNC FIFO */ 2398 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2399 AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL); 2400 REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO); 2401 REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2402 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET); 2403 REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3, 2404 AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET); 2405 } 2406 r = ath9k_hw_process_ini(ah, chan); 2407 if (r) 2408 return r; 2409 2410 /* Setup MFP options for CCMP */ 2411 if (AR_SREV_9280_20_OR_LATER(ah)) { 2412 /* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt 2413 * frames when constructing CCMP AAD. */ 2414 REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT, 2415 0xc7ff); 2416 ah->sw_mgmt_crypto = false; 2417 } else if (AR_SREV_9160_10_OR_LATER(ah)) { 2418 /* Disable hardware crypto for management frames */ 2419 REG_CLR_BIT(ah, AR_PCU_MISC_MODE2, 2420 AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE); 2421 REG_SET_BIT(ah, AR_PCU_MISC_MODE2, 2422 AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT); 2423 ah->sw_mgmt_crypto = true; 2424 } else 2425 ah->sw_mgmt_crypto = true; 2426 2427 if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan)) 2428 ath9k_hw_set_delta_slope(ah, chan); 2429 2430 if (AR_SREV_9280_10_OR_LATER(ah)) 2431 ath9k_hw_9280_spur_mitigate(ah, chan); 2432 else 2433 ath9k_hw_spur_mitigate(ah, chan); 2434 2435 ah->eep_ops->set_board_values(ah, chan); 2436 2437 ath9k_hw_decrease_chain_power(ah, chan); 2438 2439 REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(common->macaddr)); 2440 REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(common->macaddr + 4) 2441 | macStaId1 2442 | AR_STA_ID1_RTS_USE_DEF 2443 | (ah->config. 2444 ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0) 2445 | ah->sta_id1_defaults); 2446 ath9k_hw_set_operating_mode(ah, ah->opmode); 2447 2448 ath_hw_setbssidmask(common); 2449 2450 REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna); 2451 2452 ath9k_hw_write_associd(ah); 2453 2454 REG_WRITE(ah, AR_ISR, ~0); 2455 2456 REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR); 2457 2458 if (AR_SREV_9280_10_OR_LATER(ah)) 2459 ath9k_hw_ar9280_set_channel(ah, chan); 2460 else 2461 if (!(ath9k_hw_set_channel(ah, chan))) 2462 return -EIO; 2463 2464 for (i = 0; i < AR_NUM_DCU; i++) 2465 REG_WRITE(ah, AR_DQCUMASK(i), 1 << i); 2466 2467 ah->intr_txqs = 0; 2468 for (i = 0; i < ah->caps.total_queues; i++) 2469 ath9k_hw_resettxqueue(ah, i); 2470 2471 ath9k_hw_init_interrupt_masks(ah, ah->opmode); 2472 ath9k_hw_init_qos(ah); 2473 2474 if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT) 2475 ath9k_enable_rfkill(ah); 2476 2477 ath9k_hw_init_user_settings(ah); 2478 2479 if (AR_SREV_9287_12_OR_LATER(ah)) { 2480 REG_WRITE(ah, AR_D_GBL_IFS_SIFS, 2481 AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR); 2482 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, 2483 AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR); 2484 REG_WRITE(ah, AR_D_GBL_IFS_EIFS, 2485 AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR); 2486 2487 REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR); 2488 REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR); 2489 2490 REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER, 2491 AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768); 2492 REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN, 2493 AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL); 2494 } 2495 if (AR_SREV_9287_12_OR_LATER(ah)) { 2496 REG_SET_BIT(ah, AR_PCU_MISC_MODE2, 2497 AR_PCU_MISC_MODE2_ENABLE_AGGWEP); 2498 } 2499 2500 REG_WRITE(ah, AR_STA_ID1, 2501 REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM); 2502 2503 ath9k_hw_set_dma(ah); 2504 2505 REG_WRITE(ah, AR_OBS, 8); 2506 2507 if (ah->config.intr_mitigation) { 2508 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500); 2509 REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000); 2510 } 2511 2512 ath9k_hw_init_bb(ah, chan); 2513 2514 if (!ath9k_hw_init_cal(ah, chan)) 2515 return -EIO; 2516 2517 rx_chainmask = ah->rxchainmask; 2518 if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) { 2519 REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask); 2520 REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask); 2521 } 2522 2523 REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ); 2524 2525 /* 2526 * For big endian systems turn on swapping for descriptors 2527 */ 2528 if (AR_SREV_9100(ah)) { 2529 u32 mask; 2530 mask = REG_READ(ah, AR_CFG); 2531 if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) { 2532 ath_print(common, ATH_DBG_RESET, 2533 "CFG Byte Swap Set 0x%x\n", mask); 2534 } else { 2535 mask = 2536 INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB; 2537 REG_WRITE(ah, AR_CFG, mask); 2538 ath_print(common, ATH_DBG_RESET, 2539 "Setting CFG 0x%x\n", REG_READ(ah, AR_CFG)); 2540 } 2541 } else { 2542 /* Configure AR9271 target WLAN */ 2543 if (AR_SREV_9271(ah)) 2544 REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB); 2545#ifdef __BIG_ENDIAN 2546 else 2547 REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD); 2548#endif 2549 } 2550 2551 if (ah->btcoex_hw.enabled) 2552 ath9k_hw_btcoex_enable(ah); 2553 2554 return 0; 2555} 2556 2557/************************/ 2558/* Key Cache Management */ 2559/************************/ 2560 2561bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry) 2562{ 2563 u32 keyType; 2564 2565 if (entry >= ah->caps.keycache_size) { 2566 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 2567 "keychache entry %u out of range\n", entry); 2568 return false; 2569 } 2570 2571 keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry)); 2572 2573 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0); 2574 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0); 2575 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0); 2576 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0); 2577 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0); 2578 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR); 2579 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0); 2580 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0); 2581 2582 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) { 2583 u16 micentry = entry + 64; 2584 2585 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0); 2586 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0); 2587 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0); 2588 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0); 2589 2590 } 2591 2592 return true; 2593} 2594 2595bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac) 2596{ 2597 u32 macHi, macLo; 2598 2599 if (entry >= ah->caps.keycache_size) { 2600 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 2601 "keychache entry %u out of range\n", entry); 2602 return false; 2603 } 2604 2605 if (mac != NULL) { 2606 macHi = (mac[5] << 8) | mac[4]; 2607 macLo = (mac[3] << 24) | 2608 (mac[2] << 16) | 2609 (mac[1] << 8) | 2610 mac[0]; 2611 macLo >>= 1; 2612 macLo |= (macHi & 1) << 31; 2613 macHi >>= 1; 2614 } else { 2615 macLo = macHi = 0; 2616 } 2617 REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo); 2618 REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID); 2619 2620 return true; 2621} 2622 2623bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry, 2624 const struct ath9k_keyval *k, 2625 const u8 *mac) 2626{ 2627 const struct ath9k_hw_capabilities *pCap = &ah->caps; 2628 struct ath_common *common = ath9k_hw_common(ah); 2629 u32 key0, key1, key2, key3, key4; 2630 u32 keyType; 2631 2632 if (entry >= pCap->keycache_size) { 2633 ath_print(common, ATH_DBG_FATAL, 2634 "keycache entry %u out of range\n", entry); 2635 return false; 2636 } 2637 2638 switch (k->kv_type) { 2639 case ATH9K_CIPHER_AES_OCB: 2640 keyType = AR_KEYTABLE_TYPE_AES; 2641 break; 2642 case ATH9K_CIPHER_AES_CCM: 2643 if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) { 2644 ath_print(common, ATH_DBG_ANY, 2645 "AES-CCM not supported by mac rev 0x%x\n", 2646 ah->hw_version.macRev); 2647 return false; 2648 } 2649 keyType = AR_KEYTABLE_TYPE_CCM; 2650 break; 2651 case ATH9K_CIPHER_TKIP: 2652 keyType = AR_KEYTABLE_TYPE_TKIP; 2653 if (ATH9K_IS_MIC_ENABLED(ah) 2654 && entry + 64 >= pCap->keycache_size) { 2655 ath_print(common, ATH_DBG_ANY, 2656 "entry %u inappropriate for TKIP\n", entry); 2657 return false; 2658 } 2659 break; 2660 case ATH9K_CIPHER_WEP: 2661 if (k->kv_len < WLAN_KEY_LEN_WEP40) { 2662 ath_print(common, ATH_DBG_ANY, 2663 "WEP key length %u too small\n", k->kv_len); 2664 return false; 2665 } 2666 if (k->kv_len <= WLAN_KEY_LEN_WEP40) 2667 keyType = AR_KEYTABLE_TYPE_40; 2668 else if (k->kv_len <= WLAN_KEY_LEN_WEP104) 2669 keyType = AR_KEYTABLE_TYPE_104; 2670 else 2671 keyType = AR_KEYTABLE_TYPE_128; 2672 break; 2673 case ATH9K_CIPHER_CLR: 2674 keyType = AR_KEYTABLE_TYPE_CLR; 2675 break; 2676 default: 2677 ath_print(common, ATH_DBG_FATAL, 2678 "cipher %u not supported\n", k->kv_type); 2679 return false; 2680 } 2681 2682 key0 = get_unaligned_le32(k->kv_val + 0); 2683 key1 = get_unaligned_le16(k->kv_val + 4); 2684 key2 = get_unaligned_le32(k->kv_val + 6); 2685 key3 = get_unaligned_le16(k->kv_val + 10); 2686 key4 = get_unaligned_le32(k->kv_val + 12); 2687 if (k->kv_len <= WLAN_KEY_LEN_WEP104) 2688 key4 &= 0xff; 2689 2690 /* 2691 * Note: Key cache registers access special memory area that requires 2692 * two 32-bit writes to actually update the values in the internal 2693 * memory. Consequently, the exact order and pairs used here must be 2694 * maintained. 2695 */ 2696 2697 if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) { 2698 u16 micentry = entry + 64; 2699 2700 /* 2701 * Write inverted key[47:0] first to avoid Michael MIC errors 2702 * on frames that could be sent or received at the same time. 2703 * The correct key will be written in the end once everything 2704 * else is ready. 2705 */ 2706 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0); 2707 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1); 2708 2709 /* Write key[95:48] */ 2710 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2); 2711 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3); 2712 2713 /* Write key[127:96] and key type */ 2714 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4); 2715 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType); 2716 2717 /* Write MAC address for the entry */ 2718 (void) ath9k_hw_keysetmac(ah, entry, mac); 2719 2720 if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) { 2721 /* 2722 * TKIP uses two key cache entries: 2723 * Michael MIC TX/RX keys in the same key cache entry 2724 * (idx = main index + 64): 2725 * key0 [31:0] = RX key [31:0] 2726 * key1 [15:0] = TX key [31:16] 2727 * key1 [31:16] = reserved 2728 * key2 [31:0] = RX key [63:32] 2729 * key3 [15:0] = TX key [15:0] 2730 * key3 [31:16] = reserved 2731 * key4 [31:0] = TX key [63:32] 2732 */ 2733 u32 mic0, mic1, mic2, mic3, mic4; 2734 2735 mic0 = get_unaligned_le32(k->kv_mic + 0); 2736 mic2 = get_unaligned_le32(k->kv_mic + 4); 2737 mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff; 2738 mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff; 2739 mic4 = get_unaligned_le32(k->kv_txmic + 4); 2740 2741 /* Write RX[31:0] and TX[31:16] */ 2742 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0); 2743 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1); 2744 2745 /* Write RX[63:32] and TX[15:0] */ 2746 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2); 2747 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3); 2748 2749 /* Write TX[63:32] and keyType(reserved) */ 2750 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4); 2751 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry), 2752 AR_KEYTABLE_TYPE_CLR); 2753 2754 } else { 2755 /* 2756 * TKIP uses four key cache entries (two for group 2757 * keys): 2758 * Michael MIC TX/RX keys are in different key cache 2759 * entries (idx = main index + 64 for TX and 2760 * main index + 32 + 96 for RX): 2761 * key0 [31:0] = TX/RX MIC key [31:0] 2762 * key1 [31:0] = reserved 2763 * key2 [31:0] = TX/RX MIC key [63:32] 2764 * key3 [31:0] = reserved 2765 * key4 [31:0] = reserved 2766 * 2767 * Upper layer code will call this function separately 2768 * for TX and RX keys when these registers offsets are 2769 * used. 2770 */ 2771 u32 mic0, mic2; 2772 2773 mic0 = get_unaligned_le32(k->kv_mic + 0); 2774 mic2 = get_unaligned_le32(k->kv_mic + 4); 2775 2776 /* Write MIC key[31:0] */ 2777 REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0); 2778 REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0); 2779 2780 /* Write MIC key[63:32] */ 2781 REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2); 2782 REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0); 2783 2784 /* Write TX[63:32] and keyType(reserved) */ 2785 REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0); 2786 REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry), 2787 AR_KEYTABLE_TYPE_CLR); 2788 } 2789 2790 /* MAC address registers are reserved for the MIC entry */ 2791 REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0); 2792 REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0); 2793 2794 /* 2795 * Write the correct (un-inverted) key[47:0] last to enable 2796 * TKIP now that all other registers are set with correct 2797 * values. 2798 */ 2799 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0); 2800 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1); 2801 } else { 2802 /* Write key[47:0] */ 2803 REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0); 2804 REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1); 2805 2806 /* Write key[95:48] */ 2807 REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2); 2808 REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3); 2809 2810 /* Write key[127:96] and key type */ 2811 REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4); 2812 REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType); 2813 2814 /* Write MAC address for the entry */ 2815 (void) ath9k_hw_keysetmac(ah, entry, mac); 2816 } 2817 2818 return true; 2819} 2820 2821bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry) 2822{ 2823 if (entry < ah->caps.keycache_size) { 2824 u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry)); 2825 if (val & AR_KEYTABLE_VALID) 2826 return true; 2827 } 2828 return false; 2829} 2830 2831/******************************/ 2832/* Power Management (Chipset) */ 2833/******************************/ 2834 2835static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip) 2836{ 2837 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2838 if (setChip) { 2839 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, 2840 AR_RTC_FORCE_WAKE_EN); 2841 if (!AR_SREV_9100(ah)) 2842 REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF); 2843 2844 REG_CLR_BIT(ah, (AR_RTC_RESET), 2845 AR_RTC_RESET_EN); 2846 } 2847} 2848 2849static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip) 2850{ 2851 REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2852 if (setChip) { 2853 struct ath9k_hw_capabilities *pCap = &ah->caps; 2854 2855 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 2856 REG_WRITE(ah, AR_RTC_FORCE_WAKE, 2857 AR_RTC_FORCE_WAKE_ON_INT); 2858 } else { 2859 REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, 2860 AR_RTC_FORCE_WAKE_EN); 2861 } 2862 } 2863} 2864 2865static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip) 2866{ 2867 u32 val; 2868 int i; 2869 2870 if (setChip) { 2871 if ((REG_READ(ah, AR_RTC_STATUS) & 2872 AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) { 2873 if (ath9k_hw_set_reset_reg(ah, 2874 ATH9K_RESET_POWER_ON) != true) { 2875 return false; 2876 } 2877 } 2878 if (AR_SREV_9100(ah)) 2879 REG_SET_BIT(ah, AR_RTC_RESET, 2880 AR_RTC_RESET_EN); 2881 2882 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE, 2883 AR_RTC_FORCE_WAKE_EN); 2884 udelay(50); 2885 2886 for (i = POWER_UP_TIME / 50; i > 0; i--) { 2887 val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M; 2888 if (val == AR_RTC_STATUS_ON) 2889 break; 2890 udelay(50); 2891 REG_SET_BIT(ah, AR_RTC_FORCE_WAKE, 2892 AR_RTC_FORCE_WAKE_EN); 2893 } 2894 if (i == 0) { 2895 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 2896 "Failed to wakeup in %uus\n", 2897 POWER_UP_TIME / 20); 2898 return false; 2899 } 2900 } 2901 2902 REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV); 2903 2904 return true; 2905} 2906 2907bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode) 2908{ 2909 struct ath_common *common = ath9k_hw_common(ah); 2910 int status = true, setChip = true; 2911 static const char *modes[] = { 2912 "AWAKE", 2913 "FULL-SLEEP", 2914 "NETWORK SLEEP", 2915 "UNDEFINED" 2916 }; 2917 2918 if (ah->power_mode == mode) 2919 return status; 2920 2921 ath_print(common, ATH_DBG_RESET, "%s -> %s\n", 2922 modes[ah->power_mode], modes[mode]); 2923 2924 switch (mode) { 2925 case ATH9K_PM_AWAKE: 2926 status = ath9k_hw_set_power_awake(ah, setChip); 2927 break; 2928 case ATH9K_PM_FULL_SLEEP: 2929 ath9k_set_power_sleep(ah, setChip); 2930 ah->chip_fullsleep = true; 2931 break; 2932 case ATH9K_PM_NETWORK_SLEEP: 2933 ath9k_set_power_network_sleep(ah, setChip); 2934 break; 2935 default: 2936 ath_print(common, ATH_DBG_FATAL, 2937 "Unknown power mode %u\n", mode); 2938 return false; 2939 } 2940 ah->power_mode = mode; 2941 2942 return status; 2943} 2944 2945/* 2946 * Helper for ASPM support. 2947 * 2948 * Disable PLL when in L0s as well as receiver clock when in L1. 2949 * This power saving option must be enabled through the SerDes. 2950 * 2951 * Programming the SerDes must go through the same 288 bit serial shift 2952 * register as the other analog registers. Hence the 9 writes. 2953 */ 2954void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off) 2955{ 2956 u8 i; 2957 u32 val; 2958 2959 if (ah->is_pciexpress != true) 2960 return; 2961 2962 /* Do not touch SerDes registers */ 2963 if (ah->config.pcie_powersave_enable == 2) 2964 return; 2965 2966 /* Nothing to do on restore for 11N */ 2967 if (!restore) { 2968 if (AR_SREV_9280_20_OR_LATER(ah)) { 2969 /* 2970 * AR9280 2.0 or later chips use SerDes values from the 2971 * initvals.h initialized depending on chipset during 2972 * ath9k_hw_init() 2973 */ 2974 for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) { 2975 REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0), 2976 INI_RA(&ah->iniPcieSerdes, i, 1)); 2977 } 2978 } else if (AR_SREV_9280(ah) && 2979 (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) { 2980 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00); 2981 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 2982 2983 /* RX shut off when elecidle is asserted */ 2984 REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019); 2985 REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820); 2986 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560); 2987 2988 /* Shut off CLKREQ active in L1 */ 2989 if (ah->config.pcie_clock_req) 2990 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc); 2991 else 2992 REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd); 2993 2994 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 2995 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 2996 REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007); 2997 2998 /* Load the new settings */ 2999 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 3000 3001 } else { 3002 REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00); 3003 REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924); 3004 3005 /* RX shut off when elecidle is asserted */ 3006 REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039); 3007 REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824); 3008 REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579); 3009 3010 /* 3011 * Ignore ah->ah_config.pcie_clock_req setting for 3012 * pre-AR9280 11n 3013 */ 3014 REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff); 3015 3016 REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40); 3017 REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554); 3018 REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007); 3019 3020 /* Load the new settings */ 3021 REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000); 3022 } 3023 3024 udelay(1000); 3025 3026 /* set bit 19 to allow forcing of pcie core into L1 state */ 3027 REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA); 3028 3029 /* Several PCIe massages to ensure proper behaviour */ 3030 if (ah->config.pcie_waen) { 3031 val = ah->config.pcie_waen; 3032 if (!power_off) 3033 val &= (~AR_WA_D3_L1_DISABLE); 3034 } else { 3035 if (AR_SREV_9285(ah) || AR_SREV_9271(ah) || 3036 AR_SREV_9287(ah)) { 3037 val = AR9285_WA_DEFAULT; 3038 if (!power_off) 3039 val &= (~AR_WA_D3_L1_DISABLE); 3040 } else if (AR_SREV_9280(ah)) { 3041 /* 3042 * On AR9280 chips bit 22 of 0x4004 needs to be 3043 * set otherwise card may disappear. 3044 */ 3045 val = AR9280_WA_DEFAULT; 3046 if (!power_off) 3047 val &= (~AR_WA_D3_L1_DISABLE); 3048 } else 3049 val = AR_WA_DEFAULT; 3050 } 3051 3052 REG_WRITE(ah, AR_WA, val); 3053 } 3054 3055 if (power_off) { 3056 /* 3057 * Set PCIe workaround bits 3058 * bit 14 in WA register (disable L1) should only 3059 * be set when device enters D3 and be cleared 3060 * when device comes back to D0. 3061 */ 3062 if (ah->config.pcie_waen) { 3063 if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE) 3064 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE); 3065 } else { 3066 if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) || 3067 AR_SREV_9287(ah)) && 3068 (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) || 3069 (AR_SREV_9280(ah) && 3070 (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) { 3071 REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE); 3072 } 3073 } 3074 } 3075} 3076 3077/**********************/ 3078/* Interrupt Handling */ 3079/**********************/ 3080 3081bool ath9k_hw_intrpend(struct ath_hw *ah) 3082{ 3083 u32 host_isr; 3084 3085 if (AR_SREV_9100(ah)) 3086 return true; 3087 3088 host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE); 3089 if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS)) 3090 return true; 3091 3092 host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE); 3093 if ((host_isr & AR_INTR_SYNC_DEFAULT) 3094 && (host_isr != AR_INTR_SPURIOUS)) 3095 return true; 3096 3097 return false; 3098} 3099 3100bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked) 3101{ 3102 u32 isr = 0; 3103 u32 mask2 = 0; 3104 struct ath9k_hw_capabilities *pCap = &ah->caps; 3105 u32 sync_cause = 0; 3106 bool fatal_int = false; 3107 struct ath_common *common = ath9k_hw_common(ah); 3108 3109 if (!AR_SREV_9100(ah)) { 3110 if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) { 3111 if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M) 3112 == AR_RTC_STATUS_ON) { 3113 isr = REG_READ(ah, AR_ISR); 3114 } 3115 } 3116 3117 sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) & 3118 AR_INTR_SYNC_DEFAULT; 3119 3120 *masked = 0; 3121 3122 if (!isr && !sync_cause) 3123 return false; 3124 } else { 3125 *masked = 0; 3126 isr = REG_READ(ah, AR_ISR); 3127 } 3128 3129 if (isr) { 3130 if (isr & AR_ISR_BCNMISC) { 3131 u32 isr2; 3132 isr2 = REG_READ(ah, AR_ISR_S2); 3133 if (isr2 & AR_ISR_S2_TIM) 3134 mask2 |= ATH9K_INT_TIM; 3135 if (isr2 & AR_ISR_S2_DTIM) 3136 mask2 |= ATH9K_INT_DTIM; 3137 if (isr2 & AR_ISR_S2_DTIMSYNC) 3138 mask2 |= ATH9K_INT_DTIMSYNC; 3139 if (isr2 & (AR_ISR_S2_CABEND)) 3140 mask2 |= ATH9K_INT_CABEND; 3141 if (isr2 & AR_ISR_S2_GTT) 3142 mask2 |= ATH9K_INT_GTT; 3143 if (isr2 & AR_ISR_S2_CST) 3144 mask2 |= ATH9K_INT_CST; 3145 if (isr2 & AR_ISR_S2_TSFOOR) 3146 mask2 |= ATH9K_INT_TSFOOR; 3147 } 3148 3149 isr = REG_READ(ah, AR_ISR_RAC); 3150 if (isr == 0xffffffff) { 3151 *masked = 0; 3152 return false; 3153 } 3154 3155 *masked = isr & ATH9K_INT_COMMON; 3156 3157 if (ah->config.intr_mitigation) { 3158 if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM)) 3159 *masked |= ATH9K_INT_RX; 3160 } 3161 3162 if (isr & (AR_ISR_RXOK | AR_ISR_RXERR)) 3163 *masked |= ATH9K_INT_RX; 3164 if (isr & 3165 (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR | 3166 AR_ISR_TXEOL)) { 3167 u32 s0_s, s1_s; 3168 3169 *masked |= ATH9K_INT_TX; 3170 3171 s0_s = REG_READ(ah, AR_ISR_S0_S); 3172 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK); 3173 ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC); 3174 3175 s1_s = REG_READ(ah, AR_ISR_S1_S); 3176 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR); 3177 ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL); 3178 } 3179 3180 if (isr & AR_ISR_RXORN) { 3181 ath_print(common, ATH_DBG_INTERRUPT, 3182 "receive FIFO overrun interrupt\n"); 3183 } 3184 3185 if (!AR_SREV_9100(ah)) { 3186 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 3187 u32 isr5 = REG_READ(ah, AR_ISR_S5_S); 3188 if (isr5 & AR_ISR_S5_TIM_TIMER) 3189 *masked |= ATH9K_INT_TIM_TIMER; 3190 } 3191 } 3192 3193 *masked |= mask2; 3194 } 3195 3196 if (AR_SREV_9100(ah)) 3197 return true; 3198 3199 if (isr & AR_ISR_GENTMR) { 3200 u32 s5_s; 3201 3202 s5_s = REG_READ(ah, AR_ISR_S5_S); 3203 if (isr & AR_ISR_GENTMR) { 3204 ah->intr_gen_timer_trigger = 3205 MS(s5_s, AR_ISR_S5_GENTIMER_TRIG); 3206 3207 ah->intr_gen_timer_thresh = 3208 MS(s5_s, AR_ISR_S5_GENTIMER_THRESH); 3209 3210 if (ah->intr_gen_timer_trigger) 3211 *masked |= ATH9K_INT_GENTIMER; 3212 3213 } 3214 } 3215 3216 if (sync_cause) { 3217 fatal_int = 3218 (sync_cause & 3219 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR)) 3220 ? true : false; 3221 3222 if (fatal_int) { 3223 if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) { 3224 ath_print(common, ATH_DBG_ANY, 3225 "received PCI FATAL interrupt\n"); 3226 } 3227 if (sync_cause & AR_INTR_SYNC_HOST1_PERR) { 3228 ath_print(common, ATH_DBG_ANY, 3229 "received PCI PERR interrupt\n"); 3230 } 3231 *masked |= ATH9K_INT_FATAL; 3232 } 3233 if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) { 3234 ath_print(common, ATH_DBG_INTERRUPT, 3235 "AR_INTR_SYNC_RADM_CPL_TIMEOUT\n"); 3236 REG_WRITE(ah, AR_RC, AR_RC_HOSTIF); 3237 REG_WRITE(ah, AR_RC, 0); 3238 *masked |= ATH9K_INT_FATAL; 3239 } 3240 if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) { 3241 ath_print(common, ATH_DBG_INTERRUPT, 3242 "AR_INTR_SYNC_LOCAL_TIMEOUT\n"); 3243 } 3244 3245 REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause); 3246 (void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR); 3247 } 3248 3249 return true; 3250} 3251 3252enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints) 3253{ 3254 u32 omask = ah->mask_reg; 3255 u32 mask, mask2; 3256 struct ath9k_hw_capabilities *pCap = &ah->caps; 3257 struct ath_common *common = ath9k_hw_common(ah); 3258 3259 ath_print(common, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints); 3260 3261 if (omask & ATH9K_INT_GLOBAL) { 3262 ath_print(common, ATH_DBG_INTERRUPT, "disable IER\n"); 3263 REG_WRITE(ah, AR_IER, AR_IER_DISABLE); 3264 (void) REG_READ(ah, AR_IER); 3265 if (!AR_SREV_9100(ah)) { 3266 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0); 3267 (void) REG_READ(ah, AR_INTR_ASYNC_ENABLE); 3268 3269 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0); 3270 (void) REG_READ(ah, AR_INTR_SYNC_ENABLE); 3271 } 3272 } 3273 3274 mask = ints & ATH9K_INT_COMMON; 3275 mask2 = 0; 3276 3277 if (ints & ATH9K_INT_TX) { 3278 if (ah->txok_interrupt_mask) 3279 mask |= AR_IMR_TXOK; 3280 if (ah->txdesc_interrupt_mask) 3281 mask |= AR_IMR_TXDESC; 3282 if (ah->txerr_interrupt_mask) 3283 mask |= AR_IMR_TXERR; 3284 if (ah->txeol_interrupt_mask) 3285 mask |= AR_IMR_TXEOL; 3286 } 3287 if (ints & ATH9K_INT_RX) { 3288 mask |= AR_IMR_RXERR; 3289 if (ah->config.intr_mitigation) 3290 mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM; 3291 else 3292 mask |= AR_IMR_RXOK | AR_IMR_RXDESC; 3293 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) 3294 mask |= AR_IMR_GENTMR; 3295 } 3296 3297 if (ints & (ATH9K_INT_BMISC)) { 3298 mask |= AR_IMR_BCNMISC; 3299 if (ints & ATH9K_INT_TIM) 3300 mask2 |= AR_IMR_S2_TIM; 3301 if (ints & ATH9K_INT_DTIM) 3302 mask2 |= AR_IMR_S2_DTIM; 3303 if (ints & ATH9K_INT_DTIMSYNC) 3304 mask2 |= AR_IMR_S2_DTIMSYNC; 3305 if (ints & ATH9K_INT_CABEND) 3306 mask2 |= AR_IMR_S2_CABEND; 3307 if (ints & ATH9K_INT_TSFOOR) 3308 mask2 |= AR_IMR_S2_TSFOOR; 3309 } 3310 3311 if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) { 3312 mask |= AR_IMR_BCNMISC; 3313 if (ints & ATH9K_INT_GTT) 3314 mask2 |= AR_IMR_S2_GTT; 3315 if (ints & ATH9K_INT_CST) 3316 mask2 |= AR_IMR_S2_CST; 3317 } 3318 3319 ath_print(common, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask); 3320 REG_WRITE(ah, AR_IMR, mask); 3321 mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM | 3322 AR_IMR_S2_DTIM | 3323 AR_IMR_S2_DTIMSYNC | 3324 AR_IMR_S2_CABEND | 3325 AR_IMR_S2_CABTO | 3326 AR_IMR_S2_TSFOOR | 3327 AR_IMR_S2_GTT | AR_IMR_S2_CST); 3328 REG_WRITE(ah, AR_IMR_S2, mask | mask2); 3329 ah->mask_reg = ints; 3330 3331 if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) { 3332 if (ints & ATH9K_INT_TIM_TIMER) 3333 REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER); 3334 else 3335 REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER); 3336 } 3337 3338 if (ints & ATH9K_INT_GLOBAL) { 3339 ath_print(common, ATH_DBG_INTERRUPT, "enable IER\n"); 3340 REG_WRITE(ah, AR_IER, AR_IER_ENABLE); 3341 if (!AR_SREV_9100(ah)) { 3342 REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 3343 AR_INTR_MAC_IRQ); 3344 REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ); 3345 3346 3347 REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 3348 AR_INTR_SYNC_DEFAULT); 3349 REG_WRITE(ah, AR_INTR_SYNC_MASK, 3350 AR_INTR_SYNC_DEFAULT); 3351 } 3352 ath_print(common, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n", 3353 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER)); 3354 } 3355 3356 return omask; 3357} 3358 3359/*******************/ 3360/* Beacon Handling */ 3361/*******************/ 3362 3363void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period) 3364{ 3365 int flags = 0; 3366 3367 ah->beacon_interval = beacon_period; 3368 3369 switch (ah->opmode) { 3370 case NL80211_IFTYPE_STATION: 3371 case NL80211_IFTYPE_MONITOR: 3372 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon)); 3373 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff); 3374 REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff); 3375 flags |= AR_TBTT_TIMER_EN; 3376 break; 3377 case NL80211_IFTYPE_ADHOC: 3378 case NL80211_IFTYPE_MESH_POINT: 3379 REG_SET_BIT(ah, AR_TXCFG, 3380 AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY); 3381 REG_WRITE(ah, AR_NEXT_NDP_TIMER, 3382 TU_TO_USEC(next_beacon + 3383 (ah->atim_window ? ah-> 3384 atim_window : 1))); 3385 flags |= AR_NDP_TIMER_EN; 3386 case NL80211_IFTYPE_AP: 3387 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon)); 3388 REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 3389 TU_TO_USEC(next_beacon - 3390 ah->config. 3391 dma_beacon_response_time)); 3392 REG_WRITE(ah, AR_NEXT_SWBA, 3393 TU_TO_USEC(next_beacon - 3394 ah->config. 3395 sw_beacon_response_time)); 3396 flags |= 3397 AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN; 3398 break; 3399 default: 3400 ath_print(ath9k_hw_common(ah), ATH_DBG_BEACON, 3401 "%s: unsupported opmode: %d\n", 3402 __func__, ah->opmode); 3403 return; 3404 break; 3405 } 3406 3407 REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period)); 3408 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period)); 3409 REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period)); 3410 REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period)); 3411 3412 beacon_period &= ~ATH9K_BEACON_ENA; 3413 if (beacon_period & ATH9K_BEACON_RESET_TSF) { 3414 beacon_period &= ~ATH9K_BEACON_RESET_TSF; 3415 ath9k_hw_reset_tsf(ah); 3416 } 3417 3418 REG_SET_BIT(ah, AR_TIMER_MODE, flags); 3419} 3420 3421void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah, 3422 const struct ath9k_beacon_state *bs) 3423{ 3424 u32 nextTbtt, beaconintval, dtimperiod, beacontimeout; 3425 struct ath9k_hw_capabilities *pCap = &ah->caps; 3426 struct ath_common *common = ath9k_hw_common(ah); 3427 3428 REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt)); 3429 3430 REG_WRITE(ah, AR_BEACON_PERIOD, 3431 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD)); 3432 REG_WRITE(ah, AR_DMA_BEACON_PERIOD, 3433 TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD)); 3434 3435 REG_RMW_FIELD(ah, AR_RSSI_THR, 3436 AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold); 3437 3438 beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD; 3439 3440 if (bs->bs_sleepduration > beaconintval) 3441 beaconintval = bs->bs_sleepduration; 3442 3443 dtimperiod = bs->bs_dtimperiod; 3444 if (bs->bs_sleepduration > dtimperiod) 3445 dtimperiod = bs->bs_sleepduration; 3446 3447 if (beaconintval == dtimperiod) 3448 nextTbtt = bs->bs_nextdtim; 3449 else 3450 nextTbtt = bs->bs_nexttbtt; 3451 3452 ath_print(common, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim); 3453 ath_print(common, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt); 3454 ath_print(common, ATH_DBG_BEACON, "beacon period %d\n", beaconintval); 3455 ath_print(common, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod); 3456 3457 REG_WRITE(ah, AR_NEXT_DTIM, 3458 TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP)); 3459 REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP)); 3460 3461 REG_WRITE(ah, AR_SLEEP1, 3462 SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT) 3463 | AR_SLEEP1_ASSUME_DTIM); 3464 3465 if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP) 3466 beacontimeout = (BEACON_TIMEOUT_VAL << 3); 3467 else 3468 beacontimeout = MIN_BEACON_TIMEOUT_VAL; 3469 3470 REG_WRITE(ah, AR_SLEEP2, 3471 SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT)); 3472 3473 REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval)); 3474 REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod)); 3475 3476 REG_SET_BIT(ah, AR_TIMER_MODE, 3477 AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN | 3478 AR_DTIM_TIMER_EN); 3479 3480 /* TSF Out of Range Threshold */ 3481 REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold); 3482} 3483 3484/*******************/ 3485/* HW Capabilities */ 3486/*******************/ 3487 3488void ath9k_hw_fill_cap_info(struct ath_hw *ah) 3489{ 3490 struct ath9k_hw_capabilities *pCap = &ah->caps; 3491 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 3492 struct ath_common *common = ath9k_hw_common(ah); 3493 struct ath_btcoex_hw *btcoex_hw = &ah->btcoex_hw; 3494 3495 u16 capField = 0, eeval; 3496 3497 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0); 3498 regulatory->current_rd = eeval; 3499 3500 eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1); 3501 if (AR_SREV_9285_10_OR_LATER(ah)) 3502 eeval |= AR9285_RDEXT_DEFAULT; 3503 regulatory->current_rd_ext = eeval; 3504 3505 capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP); 3506 3507 if (ah->opmode != NL80211_IFTYPE_AP && 3508 ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) { 3509 if (regulatory->current_rd == 0x64 || 3510 regulatory->current_rd == 0x65) 3511 regulatory->current_rd += 5; 3512 else if (regulatory->current_rd == 0x41) 3513 regulatory->current_rd = 0x43; 3514 ath_print(common, ATH_DBG_REGULATORY, 3515 "regdomain mapped to 0x%x\n", regulatory->current_rd); 3516 } 3517 3518 eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE); 3519 bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX); 3520 3521 if (eeval & AR5416_OPFLAGS_11A) { 3522 set_bit(ATH9K_MODE_11A, pCap->wireless_modes); 3523 if (ah->config.ht_enable) { 3524 if (!(eeval & AR5416_OPFLAGS_N_5G_HT20)) 3525 set_bit(ATH9K_MODE_11NA_HT20, 3526 pCap->wireless_modes); 3527 if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) { 3528 set_bit(ATH9K_MODE_11NA_HT40PLUS, 3529 pCap->wireless_modes); 3530 set_bit(ATH9K_MODE_11NA_HT40MINUS, 3531 pCap->wireless_modes); 3532 } 3533 } 3534 } 3535 3536 if (eeval & AR5416_OPFLAGS_11G) { 3537 set_bit(ATH9K_MODE_11G, pCap->wireless_modes); 3538 if (ah->config.ht_enable) { 3539 if (!(eeval & AR5416_OPFLAGS_N_2G_HT20)) 3540 set_bit(ATH9K_MODE_11NG_HT20, 3541 pCap->wireless_modes); 3542 if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) { 3543 set_bit(ATH9K_MODE_11NG_HT40PLUS, 3544 pCap->wireless_modes); 3545 set_bit(ATH9K_MODE_11NG_HT40MINUS, 3546 pCap->wireless_modes); 3547 } 3548 } 3549 } 3550 3551 pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK); 3552 /* 3553 * For AR9271 we will temporarilly uses the rx chainmax as read from 3554 * the EEPROM. 3555 */ 3556 if ((ah->hw_version.devid == AR5416_DEVID_PCI) && 3557 !(eeval & AR5416_OPFLAGS_11A) && 3558 !(AR_SREV_9271(ah))) 3559 /* CB71: GPIO 0 is pulled down to indicate 3 rx chains */ 3560 pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7; 3561 else 3562 /* Use rx_chainmask from EEPROM. */ 3563 pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK); 3564 3565 if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0))) 3566 ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA; 3567 3568 pCap->low_2ghz_chan = 2312; 3569 pCap->high_2ghz_chan = 2732; 3570 3571 pCap->low_5ghz_chan = 4920; 3572 pCap->high_5ghz_chan = 6100; 3573 3574 pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP; 3575 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP; 3576 pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM; 3577 3578 pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP; 3579 pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP; 3580 pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM; 3581 3582 if (ah->config.ht_enable) 3583 pCap->hw_caps |= ATH9K_HW_CAP_HT; 3584 else 3585 pCap->hw_caps &= ~ATH9K_HW_CAP_HT; 3586 3587 pCap->hw_caps |= ATH9K_HW_CAP_GTT; 3588 pCap->hw_caps |= ATH9K_HW_CAP_VEOL; 3589 pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK; 3590 pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH; 3591 3592 if (capField & AR_EEPROM_EEPCAP_MAXQCU) 3593 pCap->total_queues = 3594 MS(capField, AR_EEPROM_EEPCAP_MAXQCU); 3595 else 3596 pCap->total_queues = ATH9K_NUM_TX_QUEUES; 3597 3598 if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES) 3599 pCap->keycache_size = 3600 1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES); 3601 else 3602 pCap->keycache_size = AR_KEYTABLE_SIZE; 3603 3604 pCap->hw_caps |= ATH9K_HW_CAP_FASTCC; 3605 pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD; 3606 3607 if (AR_SREV_9285_10_OR_LATER(ah)) 3608 pCap->num_gpio_pins = AR9285_NUM_GPIO; 3609 else if (AR_SREV_9280_10_OR_LATER(ah)) 3610 pCap->num_gpio_pins = AR928X_NUM_GPIO; 3611 else 3612 pCap->num_gpio_pins = AR_NUM_GPIO; 3613 3614 if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) { 3615 pCap->hw_caps |= ATH9K_HW_CAP_CST; 3616 pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX; 3617 } else { 3618 pCap->rts_aggr_limit = (8 * 1024); 3619 } 3620 3621 pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM; 3622 3623#if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE) 3624 ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT); 3625 if (ah->rfsilent & EEP_RFSILENT_ENABLED) { 3626 ah->rfkill_gpio = 3627 MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL); 3628 ah->rfkill_polarity = 3629 MS(ah->rfsilent, EEP_RFSILENT_POLARITY); 3630 3631 pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT; 3632 } 3633#endif 3634 3635 pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP; 3636 3637 if (AR_SREV_9280(ah) || AR_SREV_9285(ah)) 3638 pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS; 3639 else 3640 pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS; 3641 3642 if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) { 3643 pCap->reg_cap = 3644 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A | 3645 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN | 3646 AR_EEPROM_EEREGCAP_EN_KK_U2 | 3647 AR_EEPROM_EEREGCAP_EN_KK_MIDBAND; 3648 } else { 3649 pCap->reg_cap = 3650 AR_EEPROM_EEREGCAP_EN_KK_NEW_11A | 3651 AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN; 3652 } 3653 3654 pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND; 3655 3656 pCap->num_antcfg_5ghz = 3657 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ); 3658 pCap->num_antcfg_2ghz = 3659 ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ); 3660 3661 if (AR_SREV_9280_10_OR_LATER(ah) && 3662 ath9k_hw_btcoex_supported(ah)) { 3663 btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO; 3664 btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO; 3665 3666 if (AR_SREV_9285(ah)) { 3667 btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE; 3668 btcoex_hw->btpriority_gpio = ATH_BTPRIORITY_GPIO; 3669 } else { 3670 btcoex_hw->scheme = ATH_BTCOEX_CFG_2WIRE; 3671 } 3672 } else { 3673 btcoex_hw->scheme = ATH_BTCOEX_CFG_NONE; 3674 } 3675} 3676 3677bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type, 3678 u32 capability, u32 *result) 3679{ 3680 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 3681 switch (type) { 3682 case ATH9K_CAP_CIPHER: 3683 switch (capability) { 3684 case ATH9K_CIPHER_AES_CCM: 3685 case ATH9K_CIPHER_AES_OCB: 3686 case ATH9K_CIPHER_TKIP: 3687 case ATH9K_CIPHER_WEP: 3688 case ATH9K_CIPHER_MIC: 3689 case ATH9K_CIPHER_CLR: 3690 return true; 3691 default: 3692 return false; 3693 } 3694 case ATH9K_CAP_TKIP_MIC: 3695 switch (capability) { 3696 case 0: 3697 return true; 3698 case 1: 3699 return (ah->sta_id1_defaults & 3700 AR_STA_ID1_CRPT_MIC_ENABLE) ? true : 3701 false; 3702 } 3703 case ATH9K_CAP_TKIP_SPLIT: 3704 return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ? 3705 false : true; 3706 case ATH9K_CAP_DIVERSITY: 3707 return (REG_READ(ah, AR_PHY_CCK_DETECT) & 3708 AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ? 3709 true : false; 3710 case ATH9K_CAP_MCAST_KEYSRCH: 3711 switch (capability) { 3712 case 0: 3713 return true; 3714 case 1: 3715 if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) { 3716 return false; 3717 } else { 3718 return (ah->sta_id1_defaults & 3719 AR_STA_ID1_MCAST_KSRCH) ? true : 3720 false; 3721 } 3722 } 3723 return false; 3724 case ATH9K_CAP_TXPOW: 3725 switch (capability) { 3726 case 0: 3727 return 0; 3728 case 1: 3729 *result = regulatory->power_limit; 3730 return 0; 3731 case 2: 3732 *result = regulatory->max_power_level; 3733 return 0; 3734 case 3: 3735 *result = regulatory->tp_scale; 3736 return 0; 3737 } 3738 return false; 3739 case ATH9K_CAP_DS: 3740 return (AR_SREV_9280_20_OR_LATER(ah) && 3741 (ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1)) 3742 ? false : true; 3743 default: 3744 return false; 3745 } 3746} 3747 3748bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type, 3749 u32 capability, u32 setting, int *status) 3750{ 3751 u32 v; 3752 3753 switch (type) { 3754 case ATH9K_CAP_TKIP_MIC: 3755 if (setting) 3756 ah->sta_id1_defaults |= 3757 AR_STA_ID1_CRPT_MIC_ENABLE; 3758 else 3759 ah->sta_id1_defaults &= 3760 ~AR_STA_ID1_CRPT_MIC_ENABLE; 3761 return true; 3762 case ATH9K_CAP_DIVERSITY: 3763 v = REG_READ(ah, AR_PHY_CCK_DETECT); 3764 if (setting) 3765 v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV; 3766 else 3767 v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV; 3768 REG_WRITE(ah, AR_PHY_CCK_DETECT, v); 3769 return true; 3770 case ATH9K_CAP_MCAST_KEYSRCH: 3771 if (setting) 3772 ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH; 3773 else 3774 ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH; 3775 return true; 3776 default: 3777 return false; 3778 } 3779} 3780 3781/****************************/ 3782/* GPIO / RFKILL / Antennae */ 3783/****************************/ 3784 3785static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah, 3786 u32 gpio, u32 type) 3787{ 3788 int addr; 3789 u32 gpio_shift, tmp; 3790 3791 if (gpio > 11) 3792 addr = AR_GPIO_OUTPUT_MUX3; 3793 else if (gpio > 5) 3794 addr = AR_GPIO_OUTPUT_MUX2; 3795 else 3796 addr = AR_GPIO_OUTPUT_MUX1; 3797 3798 gpio_shift = (gpio % 6) * 5; 3799 3800 if (AR_SREV_9280_20_OR_LATER(ah) 3801 || (addr != AR_GPIO_OUTPUT_MUX1)) { 3802 REG_RMW(ah, addr, (type << gpio_shift), 3803 (0x1f << gpio_shift)); 3804 } else { 3805 tmp = REG_READ(ah, addr); 3806 tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0); 3807 tmp &= ~(0x1f << gpio_shift); 3808 tmp |= (type << gpio_shift); 3809 REG_WRITE(ah, addr, tmp); 3810 } 3811} 3812 3813void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio) 3814{ 3815 u32 gpio_shift; 3816 3817 BUG_ON(gpio >= ah->caps.num_gpio_pins); 3818 3819 gpio_shift = gpio << 1; 3820 3821 REG_RMW(ah, 3822 AR_GPIO_OE_OUT, 3823 (AR_GPIO_OE_OUT_DRV_NO << gpio_shift), 3824 (AR_GPIO_OE_OUT_DRV << gpio_shift)); 3825} 3826 3827u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio) 3828{ 3829#define MS_REG_READ(x, y) \ 3830 (MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y))) 3831 3832 if (gpio >= ah->caps.num_gpio_pins) 3833 return 0xffffffff; 3834 3835 if (AR_SREV_9287_10_OR_LATER(ah)) 3836 return MS_REG_READ(AR9287, gpio) != 0; 3837 else if (AR_SREV_9285_10_OR_LATER(ah)) 3838 return MS_REG_READ(AR9285, gpio) != 0; 3839 else if (AR_SREV_9280_10_OR_LATER(ah)) 3840 return MS_REG_READ(AR928X, gpio) != 0; 3841 else 3842 return MS_REG_READ(AR, gpio) != 0; 3843} 3844 3845void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio, 3846 u32 ah_signal_type) 3847{ 3848 u32 gpio_shift; 3849 3850 ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type); 3851 3852 gpio_shift = 2 * gpio; 3853 3854 REG_RMW(ah, 3855 AR_GPIO_OE_OUT, 3856 (AR_GPIO_OE_OUT_DRV_ALL << gpio_shift), 3857 (AR_GPIO_OE_OUT_DRV << gpio_shift)); 3858} 3859 3860void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val) 3861{ 3862 REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio), 3863 AR_GPIO_BIT(gpio)); 3864} 3865 3866u32 ath9k_hw_getdefantenna(struct ath_hw *ah) 3867{ 3868 return REG_READ(ah, AR_DEF_ANTENNA) & 0x7; 3869} 3870 3871void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna) 3872{ 3873 REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7)); 3874} 3875 3876bool ath9k_hw_setantennaswitch(struct ath_hw *ah, 3877 enum ath9k_ant_setting settings, 3878 struct ath9k_channel *chan, 3879 u8 *tx_chainmask, 3880 u8 *rx_chainmask, 3881 u8 *antenna_cfgd) 3882{ 3883 static u8 tx_chainmask_cfg, rx_chainmask_cfg; 3884 3885 if (AR_SREV_9280(ah)) { 3886 if (!tx_chainmask_cfg) { 3887 3888 tx_chainmask_cfg = *tx_chainmask; 3889 rx_chainmask_cfg = *rx_chainmask; 3890 } 3891 3892 switch (settings) { 3893 case ATH9K_ANT_FIXED_A: 3894 *tx_chainmask = ATH9K_ANTENNA0_CHAINMASK; 3895 *rx_chainmask = ATH9K_ANTENNA0_CHAINMASK; 3896 *antenna_cfgd = true; 3897 break; 3898 case ATH9K_ANT_FIXED_B: 3899 if (ah->caps.tx_chainmask > 3900 ATH9K_ANTENNA1_CHAINMASK) { 3901 *tx_chainmask = ATH9K_ANTENNA1_CHAINMASK; 3902 } 3903 *rx_chainmask = ATH9K_ANTENNA1_CHAINMASK; 3904 *antenna_cfgd = true; 3905 break; 3906 case ATH9K_ANT_VARIABLE: 3907 *tx_chainmask = tx_chainmask_cfg; 3908 *rx_chainmask = rx_chainmask_cfg; 3909 *antenna_cfgd = true; 3910 break; 3911 default: 3912 break; 3913 } 3914 } else { 3915 ah->config.diversity_control = settings; 3916 } 3917 3918 return true; 3919} 3920 3921/*********************/ 3922/* General Operation */ 3923/*********************/ 3924 3925u32 ath9k_hw_getrxfilter(struct ath_hw *ah) 3926{ 3927 u32 bits = REG_READ(ah, AR_RX_FILTER); 3928 u32 phybits = REG_READ(ah, AR_PHY_ERR); 3929 3930 if (phybits & AR_PHY_ERR_RADAR) 3931 bits |= ATH9K_RX_FILTER_PHYRADAR; 3932 if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING)) 3933 bits |= ATH9K_RX_FILTER_PHYERR; 3934 3935 return bits; 3936} 3937 3938void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits) 3939{ 3940 u32 phybits; 3941 3942 REG_WRITE(ah, AR_RX_FILTER, bits); 3943 3944 phybits = 0; 3945 if (bits & ATH9K_RX_FILTER_PHYRADAR) 3946 phybits |= AR_PHY_ERR_RADAR; 3947 if (bits & ATH9K_RX_FILTER_PHYERR) 3948 phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING; 3949 REG_WRITE(ah, AR_PHY_ERR, phybits); 3950 3951 if (phybits) 3952 REG_WRITE(ah, AR_RXCFG, 3953 REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA); 3954 else 3955 REG_WRITE(ah, AR_RXCFG, 3956 REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA); 3957} 3958 3959bool ath9k_hw_phy_disable(struct ath_hw *ah) 3960{ 3961 return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM); 3962} 3963 3964bool ath9k_hw_disable(struct ath_hw *ah) 3965{ 3966 if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) 3967 return false; 3968 3969 return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD); 3970} 3971 3972void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit) 3973{ 3974 struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah); 3975 struct ath9k_channel *chan = ah->curchan; 3976 struct ieee80211_channel *channel = chan->chan; 3977 3978 regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER); 3979 3980 ah->eep_ops->set_txpower(ah, chan, 3981 ath9k_regd_get_ctl(regulatory, chan), 3982 channel->max_antenna_gain * 2, 3983 channel->max_power * 2, 3984 min((u32) MAX_RATE_POWER, 3985 (u32) regulatory->power_limit)); 3986} 3987 3988void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac) 3989{ 3990 memcpy(ath9k_hw_common(ah)->macaddr, mac, ETH_ALEN); 3991} 3992 3993void ath9k_hw_setopmode(struct ath_hw *ah) 3994{ 3995 ath9k_hw_set_operating_mode(ah, ah->opmode); 3996} 3997 3998void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1) 3999{ 4000 REG_WRITE(ah, AR_MCAST_FIL0, filter0); 4001 REG_WRITE(ah, AR_MCAST_FIL1, filter1); 4002} 4003 4004void ath9k_hw_write_associd(struct ath_hw *ah) 4005{ 4006 struct ath_common *common = ath9k_hw_common(ah); 4007 4008 REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid)); 4009 REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) | 4010 ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S)); 4011} 4012 4013u64 ath9k_hw_gettsf64(struct ath_hw *ah) 4014{ 4015 u64 tsf; 4016 4017 tsf = REG_READ(ah, AR_TSF_U32); 4018 tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32); 4019 4020 return tsf; 4021} 4022 4023void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64) 4024{ 4025 REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff); 4026 REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff); 4027} 4028 4029void ath9k_hw_reset_tsf(struct ath_hw *ah) 4030{ 4031 if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0, 4032 AH_TSF_WRITE_TIMEOUT)) 4033 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 4034 "AR_SLP32_TSF_WRITE_STATUS limit exceeded\n"); 4035 4036 REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE); 4037} 4038 4039void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting) 4040{ 4041 if (setting) 4042 ah->misc_mode |= AR_PCU_TX_ADD_TSF; 4043 else 4044 ah->misc_mode &= ~AR_PCU_TX_ADD_TSF; 4045} 4046 4047bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us) 4048{ 4049 if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) { 4050 ath_print(ath9k_hw_common(ah), ATH_DBG_RESET, 4051 "bad slot time %u\n", us); 4052 ah->slottime = (u32) -1; 4053 return false; 4054 } else { 4055 REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us)); 4056 ah->slottime = us; 4057 return true; 4058 } 4059} 4060 4061void ath9k_hw_set11nmac2040(struct ath_hw *ah) 4062{ 4063 struct ieee80211_conf *conf = &ath9k_hw_common(ah)->hw->conf; 4064 u32 macmode; 4065 4066 if (conf_is_ht40(conf) && !ah->config.cwm_ignore_extcca) 4067 macmode = AR_2040_JOINED_RX_CLEAR; 4068 else 4069 macmode = 0; 4070 4071 REG_WRITE(ah, AR_2040_MODE, macmode); 4072} 4073 4074/* HW Generic timers configuration */ 4075 4076static const struct ath_gen_timer_configuration gen_tmr_configuration[] = 4077{ 4078 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4079 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4080 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4081 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4082 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4083 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4084 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4085 {AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080}, 4086 {AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001}, 4087 {AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4, 4088 AR_NDP2_TIMER_MODE, 0x0002}, 4089 {AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4, 4090 AR_NDP2_TIMER_MODE, 0x0004}, 4091 {AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4, 4092 AR_NDP2_TIMER_MODE, 0x0008}, 4093 {AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4, 4094 AR_NDP2_TIMER_MODE, 0x0010}, 4095 {AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4, 4096 AR_NDP2_TIMER_MODE, 0x0020}, 4097 {AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4, 4098 AR_NDP2_TIMER_MODE, 0x0040}, 4099 {AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4, 4100 AR_NDP2_TIMER_MODE, 0x0080} 4101}; 4102 4103/* HW generic timer primitives */ 4104 4105/* compute and clear index of rightmost 1 */ 4106static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask) 4107{ 4108 u32 b; 4109 4110 b = *mask; 4111 b &= (0-b); 4112 *mask &= ~b; 4113 b *= debruijn32; 4114 b >>= 27; 4115 4116 return timer_table->gen_timer_index[b]; 4117} 4118 4119u32 ath9k_hw_gettsf32(struct ath_hw *ah) 4120{ 4121 return REG_READ(ah, AR_TSF_L32); 4122} 4123 4124struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah, 4125 void (*trigger)(void *), 4126 void (*overflow)(void *), 4127 void *arg, 4128 u8 timer_index) 4129{ 4130 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4131 struct ath_gen_timer *timer; 4132 4133 timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL); 4134 4135 if (timer == NULL) { 4136 ath_print(ath9k_hw_common(ah), ATH_DBG_FATAL, 4137 "Failed to allocate memory" 4138 "for hw timer[%d]\n", timer_index); 4139 return NULL; 4140 } 4141 4142 /* allocate a hardware generic timer slot */ 4143 timer_table->timers[timer_index] = timer; 4144 timer->index = timer_index; 4145 timer->trigger = trigger; 4146 timer->overflow = overflow; 4147 timer->arg = arg; 4148 4149 return timer; 4150} 4151 4152void ath9k_hw_gen_timer_start(struct ath_hw *ah, 4153 struct ath_gen_timer *timer, 4154 u32 timer_next, 4155 u32 timer_period) 4156{ 4157 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4158 u32 tsf; 4159 4160 BUG_ON(!timer_period); 4161 4162 set_bit(timer->index, &timer_table->timer_mask.timer_bits); 4163 4164 tsf = ath9k_hw_gettsf32(ah); 4165 4166 ath_print(ath9k_hw_common(ah), ATH_DBG_HWTIMER, 4167 "curent tsf %x period %x" 4168 "timer_next %x\n", tsf, timer_period, timer_next); 4169 4170 /* 4171 * Pull timer_next forward if the current TSF already passed it 4172 * because of software latency 4173 */ 4174 if (timer_next < tsf) 4175 timer_next = tsf + timer_period; 4176 4177 /* 4178 * Program generic timer registers 4179 */ 4180 REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr, 4181 timer_next); 4182 REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr, 4183 timer_period); 4184 REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr, 4185 gen_tmr_configuration[timer->index].mode_mask); 4186 4187 /* Enable both trigger and thresh interrupt masks */ 4188 REG_SET_BIT(ah, AR_IMR_S5, 4189 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) | 4190 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG))); 4191} 4192 4193void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer) 4194{ 4195 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4196 4197 if ((timer->index < AR_FIRST_NDP_TIMER) || 4198 (timer->index >= ATH_MAX_GEN_TIMER)) { 4199 return; 4200 } 4201 4202 /* Clear generic timer enable bits. */ 4203 REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr, 4204 gen_tmr_configuration[timer->index].mode_mask); 4205 4206 /* Disable both trigger and thresh interrupt masks */ 4207 REG_CLR_BIT(ah, AR_IMR_S5, 4208 (SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) | 4209 SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG))); 4210 4211 clear_bit(timer->index, &timer_table->timer_mask.timer_bits); 4212} 4213 4214void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer) 4215{ 4216 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4217 4218 /* free the hardware generic timer slot */ 4219 timer_table->timers[timer->index] = NULL; 4220 kfree(timer); 4221} 4222 4223/* 4224 * Generic Timer Interrupts handling 4225 */ 4226void ath_gen_timer_isr(struct ath_hw *ah) 4227{ 4228 struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers; 4229 struct ath_gen_timer *timer; 4230 struct ath_common *common = ath9k_hw_common(ah); 4231 u32 trigger_mask, thresh_mask, index; 4232 4233 /* get hardware generic timer interrupt status */ 4234 trigger_mask = ah->intr_gen_timer_trigger; 4235 thresh_mask = ah->intr_gen_timer_thresh; 4236 trigger_mask &= timer_table->timer_mask.val; 4237 thresh_mask &= timer_table->timer_mask.val; 4238 4239 trigger_mask &= ~thresh_mask; 4240 4241 while (thresh_mask) { 4242 index = rightmost_index(timer_table, &thresh_mask); 4243 timer = timer_table->timers[index]; 4244 BUG_ON(!timer); 4245 ath_print(common, ATH_DBG_HWTIMER, 4246 "TSF overflow for Gen timer %d\n", index); 4247 timer->overflow(timer->arg); 4248 } 4249 4250 while (trigger_mask) { 4251 index = rightmost_index(timer_table, &trigger_mask); 4252 timer = timer_table->timers[index]; 4253 BUG_ON(!timer); 4254 ath_print(common, ATH_DBG_HWTIMER, 4255 "Gen timer[%d] trigger\n", index); 4256 timer->trigger(timer->arg); 4257 } 4258} 4259