hw.c revision 49685634c6cb943ba9b35ff182ee479ca5b73302
1/*
2 * Copyright (c) 2008-2011 Atheros Communications Inc.
3 *
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17#include <linux/io.h>
18#include <linux/slab.h>
19#include <linux/module.h>
20#include <linux/time.h>
21#include <linux/bitops.h>
22#include <asm/unaligned.h>
23
24#include "hw.h"
25#include "hw-ops.h"
26#include "rc.h"
27#include "ar9003_mac.h"
28#include "ar9003_mci.h"
29#include "ar9003_phy.h"
30#include "debug.h"
31#include "ath9k.h"
32
33static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
34
35MODULE_AUTHOR("Atheros Communications");
36MODULE_DESCRIPTION("Support for Atheros 802.11n wireless LAN cards.");
37MODULE_SUPPORTED_DEVICE("Atheros 802.11n WLAN cards");
38MODULE_LICENSE("Dual BSD/GPL");
39
40static int __init ath9k_init(void)
41{
42	return 0;
43}
44module_init(ath9k_init);
45
46static void __exit ath9k_exit(void)
47{
48	return;
49}
50module_exit(ath9k_exit);
51
52/* Private hardware callbacks */
53
54static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
55{
56	ath9k_hw_private_ops(ah)->init_cal_settings(ah);
57}
58
59static u32 ath9k_hw_compute_pll_control(struct ath_hw *ah,
60					struct ath9k_channel *chan)
61{
62	return ath9k_hw_private_ops(ah)->compute_pll_control(ah, chan);
63}
64
65static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
66{
67	if (!ath9k_hw_private_ops(ah)->init_mode_gain_regs)
68		return;
69
70	ath9k_hw_private_ops(ah)->init_mode_gain_regs(ah);
71}
72
73static void ath9k_hw_ani_cache_ini_regs(struct ath_hw *ah)
74{
75	/* You will not have this callback if using the old ANI */
76	if (!ath9k_hw_private_ops(ah)->ani_cache_ini_regs)
77		return;
78
79	ath9k_hw_private_ops(ah)->ani_cache_ini_regs(ah);
80}
81
82/********************/
83/* Helper Functions */
84/********************/
85
86#ifdef CONFIG_ATH9K_DEBUGFS
87
88void ath9k_debug_sync_cause(struct ath_common *common, u32 sync_cause)
89{
90	struct ath_softc *sc = common->priv;
91	if (sync_cause)
92		sc->debug.stats.istats.sync_cause_all++;
93	if (sync_cause & AR_INTR_SYNC_RTC_IRQ)
94		sc->debug.stats.istats.sync_rtc_irq++;
95	if (sync_cause & AR_INTR_SYNC_MAC_IRQ)
96		sc->debug.stats.istats.sync_mac_irq++;
97	if (sync_cause & AR_INTR_SYNC_EEPROM_ILLEGAL_ACCESS)
98		sc->debug.stats.istats.eeprom_illegal_access++;
99	if (sync_cause & AR_INTR_SYNC_APB_TIMEOUT)
100		sc->debug.stats.istats.apb_timeout++;
101	if (sync_cause & AR_INTR_SYNC_PCI_MODE_CONFLICT)
102		sc->debug.stats.istats.pci_mode_conflict++;
103	if (sync_cause & AR_INTR_SYNC_HOST1_FATAL)
104		sc->debug.stats.istats.host1_fatal++;
105	if (sync_cause & AR_INTR_SYNC_HOST1_PERR)
106		sc->debug.stats.istats.host1_perr++;
107	if (sync_cause & AR_INTR_SYNC_TRCV_FIFO_PERR)
108		sc->debug.stats.istats.trcv_fifo_perr++;
109	if (sync_cause & AR_INTR_SYNC_RADM_CPL_EP)
110		sc->debug.stats.istats.radm_cpl_ep++;
111	if (sync_cause & AR_INTR_SYNC_RADM_CPL_DLLP_ABORT)
112		sc->debug.stats.istats.radm_cpl_dllp_abort++;
113	if (sync_cause & AR_INTR_SYNC_RADM_CPL_TLP_ABORT)
114		sc->debug.stats.istats.radm_cpl_tlp_abort++;
115	if (sync_cause & AR_INTR_SYNC_RADM_CPL_ECRC_ERR)
116		sc->debug.stats.istats.radm_cpl_ecrc_err++;
117	if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT)
118		sc->debug.stats.istats.radm_cpl_timeout++;
119	if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT)
120		sc->debug.stats.istats.local_timeout++;
121	if (sync_cause & AR_INTR_SYNC_PM_ACCESS)
122		sc->debug.stats.istats.pm_access++;
123	if (sync_cause & AR_INTR_SYNC_MAC_AWAKE)
124		sc->debug.stats.istats.mac_awake++;
125	if (sync_cause & AR_INTR_SYNC_MAC_ASLEEP)
126		sc->debug.stats.istats.mac_asleep++;
127	if (sync_cause & AR_INTR_SYNC_MAC_SLEEP_ACCESS)
128		sc->debug.stats.istats.mac_sleep_access++;
129}
130#endif
131
132
133static void ath9k_hw_set_clockrate(struct ath_hw *ah)
134{
135	struct ath_common *common = ath9k_hw_common(ah);
136	struct ath9k_channel *chan = ah->curchan;
137	unsigned int clockrate;
138
139	/* AR9287 v1.3+ uses async FIFO and runs the MAC at 117 MHz */
140	if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah))
141		clockrate = 117;
142	else if (!chan) /* should really check for CCK instead */
143		clockrate = ATH9K_CLOCK_RATE_CCK;
144	else if (IS_CHAN_2GHZ(chan))
145		clockrate = ATH9K_CLOCK_RATE_2GHZ_OFDM;
146	else if (ah->caps.hw_caps & ATH9K_HW_CAP_FASTCLOCK)
147		clockrate = ATH9K_CLOCK_FAST_RATE_5GHZ_OFDM;
148	else
149		clockrate = ATH9K_CLOCK_RATE_5GHZ_OFDM;
150
151	if (chan) {
152		if (IS_CHAN_HT40(chan))
153			clockrate *= 2;
154		if (IS_CHAN_HALF_RATE(chan))
155			clockrate /= 2;
156		if (IS_CHAN_QUARTER_RATE(chan))
157			clockrate /= 4;
158	}
159
160	common->clockrate = clockrate;
161}
162
163static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
164{
165	struct ath_common *common = ath9k_hw_common(ah);
166
167	return usecs * common->clockrate;
168}
169
170bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
171{
172	int i;
173
174	BUG_ON(timeout < AH_TIME_QUANTUM);
175
176	for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
177		if ((REG_READ(ah, reg) & mask) == val)
178			return true;
179
180		udelay(AH_TIME_QUANTUM);
181	}
182
183	ath_dbg(ath9k_hw_common(ah), ANY,
184		"timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
185		timeout, reg, REG_READ(ah, reg), mask, val);
186
187	return false;
188}
189EXPORT_SYMBOL(ath9k_hw_wait);
190
191void ath9k_hw_synth_delay(struct ath_hw *ah, struct ath9k_channel *chan,
192			  int hw_delay)
193{
194	hw_delay /= 10;
195
196	if (IS_CHAN_HALF_RATE(chan))
197		hw_delay *= 2;
198	else if (IS_CHAN_QUARTER_RATE(chan))
199		hw_delay *= 4;
200
201	udelay(hw_delay + BASE_ACTIVATE_DELAY);
202}
203
204void ath9k_hw_write_array(struct ath_hw *ah, const struct ar5416IniArray *array,
205			  int column, unsigned int *writecnt)
206{
207	int r;
208
209	ENABLE_REGWRITE_BUFFER(ah);
210	for (r = 0; r < array->ia_rows; r++) {
211		REG_WRITE(ah, INI_RA(array, r, 0),
212			  INI_RA(array, r, column));
213		DO_DELAY(*writecnt);
214	}
215	REGWRITE_BUFFER_FLUSH(ah);
216}
217
218u32 ath9k_hw_reverse_bits(u32 val, u32 n)
219{
220	u32 retval;
221	int i;
222
223	for (i = 0, retval = 0; i < n; i++) {
224		retval = (retval << 1) | (val & 1);
225		val >>= 1;
226	}
227	return retval;
228}
229
230u16 ath9k_hw_computetxtime(struct ath_hw *ah,
231			   u8 phy, int kbps,
232			   u32 frameLen, u16 rateix,
233			   bool shortPreamble)
234{
235	u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
236
237	if (kbps == 0)
238		return 0;
239
240	switch (phy) {
241	case WLAN_RC_PHY_CCK:
242		phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
243		if (shortPreamble)
244			phyTime >>= 1;
245		numBits = frameLen << 3;
246		txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
247		break;
248	case WLAN_RC_PHY_OFDM:
249		if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
250			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
251			numBits = OFDM_PLCP_BITS + (frameLen << 3);
252			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
253			txTime = OFDM_SIFS_TIME_QUARTER
254				+ OFDM_PREAMBLE_TIME_QUARTER
255				+ (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
256		} else if (ah->curchan &&
257			   IS_CHAN_HALF_RATE(ah->curchan)) {
258			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
259			numBits = OFDM_PLCP_BITS + (frameLen << 3);
260			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
261			txTime = OFDM_SIFS_TIME_HALF +
262				OFDM_PREAMBLE_TIME_HALF
263				+ (numSymbols * OFDM_SYMBOL_TIME_HALF);
264		} else {
265			bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
266			numBits = OFDM_PLCP_BITS + (frameLen << 3);
267			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
268			txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
269				+ (numSymbols * OFDM_SYMBOL_TIME);
270		}
271		break;
272	default:
273		ath_err(ath9k_hw_common(ah),
274			"Unknown phy %u (rate ix %u)\n", phy, rateix);
275		txTime = 0;
276		break;
277	}
278
279	return txTime;
280}
281EXPORT_SYMBOL(ath9k_hw_computetxtime);
282
283void ath9k_hw_get_channel_centers(struct ath_hw *ah,
284				  struct ath9k_channel *chan,
285				  struct chan_centers *centers)
286{
287	int8_t extoff;
288
289	if (!IS_CHAN_HT40(chan)) {
290		centers->ctl_center = centers->ext_center =
291			centers->synth_center = chan->channel;
292		return;
293	}
294
295	if (IS_CHAN_HT40PLUS(chan)) {
296		centers->synth_center =
297			chan->channel + HT40_CHANNEL_CENTER_SHIFT;
298		extoff = 1;
299	} else {
300		centers->synth_center =
301			chan->channel - HT40_CHANNEL_CENTER_SHIFT;
302		extoff = -1;
303	}
304
305	centers->ctl_center =
306		centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
307	/* 25 MHz spacing is supported by hw but not on upper layers */
308	centers->ext_center =
309		centers->synth_center + (extoff * HT40_CHANNEL_CENTER_SHIFT);
310}
311
312/******************/
313/* Chip Revisions */
314/******************/
315
316static void ath9k_hw_read_revisions(struct ath_hw *ah)
317{
318	u32 val;
319
320	switch (ah->hw_version.devid) {
321	case AR5416_AR9100_DEVID:
322		ah->hw_version.macVersion = AR_SREV_VERSION_9100;
323		break;
324	case AR9300_DEVID_AR9330:
325		ah->hw_version.macVersion = AR_SREV_VERSION_9330;
326		if (ah->get_mac_revision) {
327			ah->hw_version.macRev = ah->get_mac_revision();
328		} else {
329			val = REG_READ(ah, AR_SREV);
330			ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
331		}
332		return;
333	case AR9300_DEVID_AR9340:
334		ah->hw_version.macVersion = AR_SREV_VERSION_9340;
335		val = REG_READ(ah, AR_SREV);
336		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
337		return;
338	case AR9300_DEVID_QCA955X:
339		ah->hw_version.macVersion = AR_SREV_VERSION_9550;
340		return;
341	}
342
343	val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
344
345	if (val == 0xFF) {
346		val = REG_READ(ah, AR_SREV);
347		ah->hw_version.macVersion =
348			(val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
349		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
350
351		if (AR_SREV_9462(ah) || AR_SREV_9565(ah))
352			ah->is_pciexpress = true;
353		else
354			ah->is_pciexpress = (val &
355					     AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
356	} else {
357		if (!AR_SREV_9100(ah))
358			ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
359
360		ah->hw_version.macRev = val & AR_SREV_REVISION;
361
362		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
363			ah->is_pciexpress = true;
364	}
365}
366
367/************************************/
368/* HW Attach, Detach, Init Routines */
369/************************************/
370
371static void ath9k_hw_disablepcie(struct ath_hw *ah)
372{
373	if (!AR_SREV_5416(ah))
374		return;
375
376	REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
377	REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
378	REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
379	REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
380	REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
381	REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
382	REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
383	REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
384	REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
385
386	REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
387}
388
389/* This should work for all families including legacy */
390static bool ath9k_hw_chip_test(struct ath_hw *ah)
391{
392	struct ath_common *common = ath9k_hw_common(ah);
393	u32 regAddr[2] = { AR_STA_ID0 };
394	u32 regHold[2];
395	static const u32 patternData[4] = {
396		0x55555555, 0xaaaaaaaa, 0x66666666, 0x99999999
397	};
398	int i, j, loop_max;
399
400	if (!AR_SREV_9300_20_OR_LATER(ah)) {
401		loop_max = 2;
402		regAddr[1] = AR_PHY_BASE + (8 << 2);
403	} else
404		loop_max = 1;
405
406	for (i = 0; i < loop_max; i++) {
407		u32 addr = regAddr[i];
408		u32 wrData, rdData;
409
410		regHold[i] = REG_READ(ah, addr);
411		for (j = 0; j < 0x100; j++) {
412			wrData = (j << 16) | j;
413			REG_WRITE(ah, addr, wrData);
414			rdData = REG_READ(ah, addr);
415			if (rdData != wrData) {
416				ath_err(common,
417					"address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
418					addr, wrData, rdData);
419				return false;
420			}
421		}
422		for (j = 0; j < 4; j++) {
423			wrData = patternData[j];
424			REG_WRITE(ah, addr, wrData);
425			rdData = REG_READ(ah, addr);
426			if (wrData != rdData) {
427				ath_err(common,
428					"address test failed addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
429					addr, wrData, rdData);
430				return false;
431			}
432		}
433		REG_WRITE(ah, regAddr[i], regHold[i]);
434	}
435	udelay(100);
436
437	return true;
438}
439
440static void ath9k_hw_init_config(struct ath_hw *ah)
441{
442	ah->config.dma_beacon_response_time = 1;
443	ah->config.sw_beacon_response_time = 6;
444	ah->config.ack_6mb = 0x0;
445	ah->config.cwm_ignore_extcca = 0;
446	ah->config.analog_shiftreg = 1;
447
448	ah->config.rx_intr_mitigation = true;
449
450	/*
451	 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
452	 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
453	 * This means we use it for all AR5416 devices, and the few
454	 * minor PCI AR9280 devices out there.
455	 *
456	 * Serialization is required because these devices do not handle
457	 * well the case of two concurrent reads/writes due to the latency
458	 * involved. During one read/write another read/write can be issued
459	 * on another CPU while the previous read/write may still be working
460	 * on our hardware, if we hit this case the hardware poops in a loop.
461	 * We prevent this by serializing reads and writes.
462	 *
463	 * This issue is not present on PCI-Express devices or pre-AR5416
464	 * devices (legacy, 802.11abg).
465	 */
466	if (num_possible_cpus() > 1)
467		ah->config.serialize_regmode = SER_REG_MODE_AUTO;
468}
469
470static void ath9k_hw_init_defaults(struct ath_hw *ah)
471{
472	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
473
474	regulatory->country_code = CTRY_DEFAULT;
475	regulatory->power_limit = MAX_RATE_POWER;
476
477	ah->hw_version.magic = AR5416_MAGIC;
478	ah->hw_version.subvendorid = 0;
479
480	ah->sta_id1_defaults =
481		AR_STA_ID1_CRPT_MIC_ENABLE |
482		AR_STA_ID1_MCAST_KSRCH;
483	if (AR_SREV_9100(ah))
484		ah->sta_id1_defaults |= AR_STA_ID1_AR9100_BA_FIX;
485	ah->slottime = ATH9K_SLOT_TIME_9;
486	ah->globaltxtimeout = (u32) -1;
487	ah->power_mode = ATH9K_PM_UNDEFINED;
488	ah->htc_reset_init = true;
489}
490
491static int ath9k_hw_init_macaddr(struct ath_hw *ah)
492{
493	struct ath_common *common = ath9k_hw_common(ah);
494	u32 sum;
495	int i;
496	u16 eeval;
497	static const u32 EEP_MAC[] = { EEP_MAC_LSW, EEP_MAC_MID, EEP_MAC_MSW };
498
499	sum = 0;
500	for (i = 0; i < 3; i++) {
501		eeval = ah->eep_ops->get_eeprom(ah, EEP_MAC[i]);
502		sum += eeval;
503		common->macaddr[2 * i] = eeval >> 8;
504		common->macaddr[2 * i + 1] = eeval & 0xff;
505	}
506	if (sum == 0 || sum == 0xffff * 3)
507		return -EADDRNOTAVAIL;
508
509	return 0;
510}
511
512static int ath9k_hw_post_init(struct ath_hw *ah)
513{
514	struct ath_common *common = ath9k_hw_common(ah);
515	int ecode;
516
517	if (common->bus_ops->ath_bus_type != ATH_USB) {
518		if (!ath9k_hw_chip_test(ah))
519			return -ENODEV;
520	}
521
522	if (!AR_SREV_9300_20_OR_LATER(ah)) {
523		ecode = ar9002_hw_rf_claim(ah);
524		if (ecode != 0)
525			return ecode;
526	}
527
528	ecode = ath9k_hw_eeprom_init(ah);
529	if (ecode != 0)
530		return ecode;
531
532	ath_dbg(ath9k_hw_common(ah), CONFIG, "Eeprom VER: %d, REV: %d\n",
533		ah->eep_ops->get_eeprom_ver(ah),
534		ah->eep_ops->get_eeprom_rev(ah));
535
536	ath9k_hw_ani_init(ah);
537
538	/*
539	 * EEPROM needs to be initialized before we do this.
540	 * This is required for regulatory compliance.
541	 */
542	if (AR_SREV_9300_20_OR_LATER(ah)) {
543		u16 regdmn = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
544		if ((regdmn & 0xF0) == CTL_FCC) {
545			ah->nf_2g.max = AR_PHY_CCA_MAX_GOOD_VAL_9300_FCC_2GHZ;
546			ah->nf_5g.max = AR_PHY_CCA_MAX_GOOD_VAL_9300_FCC_5GHZ;
547		}
548	}
549
550	return 0;
551}
552
553static int ath9k_hw_attach_ops(struct ath_hw *ah)
554{
555	if (!AR_SREV_9300_20_OR_LATER(ah))
556		return ar9002_hw_attach_ops(ah);
557
558	ar9003_hw_attach_ops(ah);
559	return 0;
560}
561
562/* Called for all hardware families */
563static int __ath9k_hw_init(struct ath_hw *ah)
564{
565	struct ath_common *common = ath9k_hw_common(ah);
566	int r = 0;
567
568	ath9k_hw_read_revisions(ah);
569
570	/*
571	 * Read back AR_WA into a permanent copy and set bits 14 and 17.
572	 * We need to do this to avoid RMW of this register. We cannot
573	 * read the reg when chip is asleep.
574	 */
575	if (AR_SREV_9300_20_OR_LATER(ah)) {
576		ah->WARegVal = REG_READ(ah, AR_WA);
577		ah->WARegVal |= (AR_WA_D3_L1_DISABLE |
578				 AR_WA_ASPM_TIMER_BASED_DISABLE);
579	}
580
581	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
582		ath_err(common, "Couldn't reset chip\n");
583		return -EIO;
584	}
585
586	if (AR_SREV_9565(ah)) {
587		ah->WARegVal |= AR_WA_BIT22;
588		REG_WRITE(ah, AR_WA, ah->WARegVal);
589	}
590
591	ath9k_hw_init_defaults(ah);
592	ath9k_hw_init_config(ah);
593
594	r = ath9k_hw_attach_ops(ah);
595	if (r)
596		return r;
597
598	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
599		ath_err(common, "Couldn't wakeup chip\n");
600		return -EIO;
601	}
602
603	if (NR_CPUS > 1 && ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
604		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
605		    ((AR_SREV_9160(ah) || AR_SREV_9280(ah) || AR_SREV_9287(ah)) &&
606		     !ah->is_pciexpress)) {
607			ah->config.serialize_regmode =
608				SER_REG_MODE_ON;
609		} else {
610			ah->config.serialize_regmode =
611				SER_REG_MODE_OFF;
612		}
613	}
614
615	ath_dbg(common, RESET, "serialize_regmode is %d\n",
616		ah->config.serialize_regmode);
617
618	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
619		ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD >> 1;
620	else
621		ah->config.max_txtrig_level = MAX_TX_FIFO_THRESHOLD;
622
623	switch (ah->hw_version.macVersion) {
624	case AR_SREV_VERSION_5416_PCI:
625	case AR_SREV_VERSION_5416_PCIE:
626	case AR_SREV_VERSION_9160:
627	case AR_SREV_VERSION_9100:
628	case AR_SREV_VERSION_9280:
629	case AR_SREV_VERSION_9285:
630	case AR_SREV_VERSION_9287:
631	case AR_SREV_VERSION_9271:
632	case AR_SREV_VERSION_9300:
633	case AR_SREV_VERSION_9330:
634	case AR_SREV_VERSION_9485:
635	case AR_SREV_VERSION_9340:
636	case AR_SREV_VERSION_9462:
637	case AR_SREV_VERSION_9550:
638	case AR_SREV_VERSION_9565:
639		break;
640	default:
641		ath_err(common,
642			"Mac Chip Rev 0x%02x.%x is not supported by this driver\n",
643			ah->hw_version.macVersion, ah->hw_version.macRev);
644		return -EOPNOTSUPP;
645	}
646
647	if (AR_SREV_9271(ah) || AR_SREV_9100(ah) || AR_SREV_9340(ah) ||
648	    AR_SREV_9330(ah) || AR_SREV_9550(ah))
649		ah->is_pciexpress = false;
650
651	ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
652	ath9k_hw_init_cal_settings(ah);
653
654	ah->ani_function = ATH9K_ANI_ALL;
655	if (!AR_SREV_9300_20_OR_LATER(ah))
656		ah->ani_function &= ~ATH9K_ANI_MRC_CCK;
657
658	if (!ah->is_pciexpress)
659		ath9k_hw_disablepcie(ah);
660
661	r = ath9k_hw_post_init(ah);
662	if (r)
663		return r;
664
665	ath9k_hw_init_mode_gain_regs(ah);
666	r = ath9k_hw_fill_cap_info(ah);
667	if (r)
668		return r;
669
670	r = ath9k_hw_init_macaddr(ah);
671	if (r) {
672		ath_err(common, "Failed to initialize MAC address\n");
673		return r;
674	}
675
676	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
677		ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
678	else
679		ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
680
681	if (AR_SREV_9330(ah))
682		ah->bb_watchdog_timeout_ms = 85;
683	else
684		ah->bb_watchdog_timeout_ms = 25;
685
686	common->state = ATH_HW_INITIALIZED;
687
688	return 0;
689}
690
691int ath9k_hw_init(struct ath_hw *ah)
692{
693	int ret;
694	struct ath_common *common = ath9k_hw_common(ah);
695
696	/* These are all the AR5008/AR9001/AR9002/AR9003 hardware family of chipsets */
697	switch (ah->hw_version.devid) {
698	case AR5416_DEVID_PCI:
699	case AR5416_DEVID_PCIE:
700	case AR5416_AR9100_DEVID:
701	case AR9160_DEVID_PCI:
702	case AR9280_DEVID_PCI:
703	case AR9280_DEVID_PCIE:
704	case AR9285_DEVID_PCIE:
705	case AR9287_DEVID_PCI:
706	case AR9287_DEVID_PCIE:
707	case AR2427_DEVID_PCIE:
708	case AR9300_DEVID_PCIE:
709	case AR9300_DEVID_AR9485_PCIE:
710	case AR9300_DEVID_AR9330:
711	case AR9300_DEVID_AR9340:
712	case AR9300_DEVID_QCA955X:
713	case AR9300_DEVID_AR9580:
714	case AR9300_DEVID_AR9462:
715	case AR9485_DEVID_AR1111:
716	case AR9300_DEVID_AR9565:
717		break;
718	default:
719		if (common->bus_ops->ath_bus_type == ATH_USB)
720			break;
721		ath_err(common, "Hardware device ID 0x%04x not supported\n",
722			ah->hw_version.devid);
723		return -EOPNOTSUPP;
724	}
725
726	ret = __ath9k_hw_init(ah);
727	if (ret) {
728		ath_err(common,
729			"Unable to initialize hardware; initialization status: %d\n",
730			ret);
731		return ret;
732	}
733
734	return 0;
735}
736EXPORT_SYMBOL(ath9k_hw_init);
737
738static void ath9k_hw_init_qos(struct ath_hw *ah)
739{
740	ENABLE_REGWRITE_BUFFER(ah);
741
742	REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
743	REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
744
745	REG_WRITE(ah, AR_QOS_NO_ACK,
746		  SM(2, AR_QOS_NO_ACK_TWO_BIT) |
747		  SM(5, AR_QOS_NO_ACK_BIT_OFF) |
748		  SM(0, AR_QOS_NO_ACK_BYTE_OFF));
749
750	REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
751	REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
752	REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
753	REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
754	REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
755
756	REGWRITE_BUFFER_FLUSH(ah);
757}
758
759u32 ar9003_get_pll_sqsum_dvc(struct ath_hw *ah)
760{
761	struct ath_common *common = ath9k_hw_common(ah);
762	int i = 0;
763
764	REG_CLR_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
765	udelay(100);
766	REG_SET_BIT(ah, PLL3, PLL3_DO_MEAS_MASK);
767
768	while ((REG_READ(ah, PLL4) & PLL4_MEAS_DONE) == 0) {
769
770		udelay(100);
771
772		if (WARN_ON_ONCE(i >= 100)) {
773			ath_err(common, "PLL4 meaurement not done\n");
774			break;
775		}
776
777		i++;
778	}
779
780	return (REG_READ(ah, PLL3) & SQSUM_DVC_MASK) >> 3;
781}
782EXPORT_SYMBOL(ar9003_get_pll_sqsum_dvc);
783
784static void ath9k_hw_init_pll(struct ath_hw *ah,
785			      struct ath9k_channel *chan)
786{
787	u32 pll;
788
789	if (AR_SREV_9485(ah) || AR_SREV_9565(ah)) {
790		/* program BB PLL ki and kd value, ki=0x4, kd=0x40 */
791		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
792			      AR_CH0_BB_DPLL2_PLL_PWD, 0x1);
793		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
794			      AR_CH0_DPLL2_KD, 0x40);
795		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
796			      AR_CH0_DPLL2_KI, 0x4);
797
798		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
799			      AR_CH0_BB_DPLL1_REFDIV, 0x5);
800		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
801			      AR_CH0_BB_DPLL1_NINI, 0x58);
802		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL1,
803			      AR_CH0_BB_DPLL1_NFRAC, 0x0);
804
805		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
806			      AR_CH0_BB_DPLL2_OUTDIV, 0x1);
807		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
808			      AR_CH0_BB_DPLL2_LOCAL_PLL, 0x1);
809		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
810			      AR_CH0_BB_DPLL2_EN_NEGTRIG, 0x1);
811
812		/* program BB PLL phase_shift to 0x6 */
813		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
814			      AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x6);
815
816		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2,
817			      AR_CH0_BB_DPLL2_PLL_PWD, 0x0);
818		udelay(1000);
819	} else if (AR_SREV_9330(ah)) {
820		u32 ddr_dpll2, pll_control2, kd;
821
822		if (ah->is_clk_25mhz) {
823			ddr_dpll2 = 0x18e82f01;
824			pll_control2 = 0xe04a3d;
825			kd = 0x1d;
826		} else {
827			ddr_dpll2 = 0x19e82f01;
828			pll_control2 = 0x886666;
829			kd = 0x3d;
830		}
831
832		/* program DDR PLL ki and kd value */
833		REG_WRITE(ah, AR_CH0_DDR_DPLL2, ddr_dpll2);
834
835		/* program DDR PLL phase_shift */
836		REG_RMW_FIELD(ah, AR_CH0_DDR_DPLL3,
837			      AR_CH0_DPLL3_PHASE_SHIFT, 0x1);
838
839		REG_WRITE(ah, AR_RTC_PLL_CONTROL, 0x1142c);
840		udelay(1000);
841
842		/* program refdiv, nint, frac to RTC register */
843		REG_WRITE(ah, AR_RTC_PLL_CONTROL2, pll_control2);
844
845		/* program BB PLL kd and ki value */
846		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KD, kd);
847		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL2, AR_CH0_DPLL2_KI, 0x06);
848
849		/* program BB PLL phase_shift */
850		REG_RMW_FIELD(ah, AR_CH0_BB_DPLL3,
851			      AR_CH0_BB_DPLL3_PHASE_SHIFT, 0x1);
852	} else if (AR_SREV_9340(ah) || AR_SREV_9550(ah)) {
853		u32 regval, pll2_divint, pll2_divfrac, refdiv;
854
855		REG_WRITE(ah, AR_RTC_PLL_CONTROL, 0x1142c);
856		udelay(1000);
857
858		REG_SET_BIT(ah, AR_PHY_PLL_MODE, 0x1 << 16);
859		udelay(100);
860
861		if (ah->is_clk_25mhz) {
862			pll2_divint = 0x54;
863			pll2_divfrac = 0x1eb85;
864			refdiv = 3;
865		} else {
866			if (AR_SREV_9340(ah)) {
867				pll2_divint = 88;
868				pll2_divfrac = 0;
869				refdiv = 5;
870			} else {
871				pll2_divint = 0x11;
872				pll2_divfrac = 0x26666;
873				refdiv = 1;
874			}
875		}
876
877		regval = REG_READ(ah, AR_PHY_PLL_MODE);
878		regval |= (0x1 << 16);
879		REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
880		udelay(100);
881
882		REG_WRITE(ah, AR_PHY_PLL_CONTROL, (refdiv << 27) |
883			  (pll2_divint << 18) | pll2_divfrac);
884		udelay(100);
885
886		regval = REG_READ(ah, AR_PHY_PLL_MODE);
887		if (AR_SREV_9340(ah))
888			regval = (regval & 0x80071fff) | (0x1 << 30) |
889				 (0x1 << 13) | (0x4 << 26) | (0x18 << 19);
890		else
891			regval = (regval & 0x80071fff) | (0x3 << 30) |
892				 (0x1 << 13) | (0x4 << 26) | (0x60 << 19);
893		REG_WRITE(ah, AR_PHY_PLL_MODE, regval);
894		REG_WRITE(ah, AR_PHY_PLL_MODE,
895			  REG_READ(ah, AR_PHY_PLL_MODE) & 0xfffeffff);
896		udelay(1000);
897	}
898
899	pll = ath9k_hw_compute_pll_control(ah, chan);
900	if (AR_SREV_9565(ah))
901		pll |= 0x40000;
902	REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
903
904	if (AR_SREV_9485(ah) || AR_SREV_9340(ah) || AR_SREV_9330(ah) ||
905	    AR_SREV_9550(ah))
906		udelay(1000);
907
908	/* Switch the core clock for ar9271 to 117Mhz */
909	if (AR_SREV_9271(ah)) {
910		udelay(500);
911		REG_WRITE(ah, 0x50040, 0x304);
912	}
913
914	udelay(RTC_PLL_SETTLE_DELAY);
915
916	REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
917
918	if (AR_SREV_9340(ah) || AR_SREV_9550(ah)) {
919		if (ah->is_clk_25mhz) {
920			REG_WRITE(ah, AR_RTC_DERIVED_CLK, 0x17c << 1);
921			REG_WRITE(ah, AR_SLP32_MODE, 0x0010f3d7);
922			REG_WRITE(ah,  AR_SLP32_INC, 0x0001e7ae);
923		} else {
924			REG_WRITE(ah, AR_RTC_DERIVED_CLK, 0x261 << 1);
925			REG_WRITE(ah, AR_SLP32_MODE, 0x0010f400);
926			REG_WRITE(ah,  AR_SLP32_INC, 0x0001e800);
927		}
928		udelay(100);
929	}
930}
931
932static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
933					  enum nl80211_iftype opmode)
934{
935	u32 sync_default = AR_INTR_SYNC_DEFAULT;
936	u32 imr_reg = AR_IMR_TXERR |
937		AR_IMR_TXURN |
938		AR_IMR_RXERR |
939		AR_IMR_RXORN |
940		AR_IMR_BCNMISC;
941
942	if (AR_SREV_9340(ah) || AR_SREV_9550(ah))
943		sync_default &= ~AR_INTR_SYNC_HOST1_FATAL;
944
945	if (AR_SREV_9300_20_OR_LATER(ah)) {
946		imr_reg |= AR_IMR_RXOK_HP;
947		if (ah->config.rx_intr_mitigation)
948			imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
949		else
950			imr_reg |= AR_IMR_RXOK_LP;
951
952	} else {
953		if (ah->config.rx_intr_mitigation)
954			imr_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
955		else
956			imr_reg |= AR_IMR_RXOK;
957	}
958
959	if (ah->config.tx_intr_mitigation)
960		imr_reg |= AR_IMR_TXINTM | AR_IMR_TXMINTR;
961	else
962		imr_reg |= AR_IMR_TXOK;
963
964	ENABLE_REGWRITE_BUFFER(ah);
965
966	REG_WRITE(ah, AR_IMR, imr_reg);
967	ah->imrs2_reg |= AR_IMR_S2_GTT;
968	REG_WRITE(ah, AR_IMR_S2, ah->imrs2_reg);
969
970	if (!AR_SREV_9100(ah)) {
971		REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
972		REG_WRITE(ah, AR_INTR_SYNC_ENABLE, sync_default);
973		REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
974	}
975
976	REGWRITE_BUFFER_FLUSH(ah);
977
978	if (AR_SREV_9300_20_OR_LATER(ah)) {
979		REG_WRITE(ah, AR_INTR_PRIO_ASYNC_ENABLE, 0);
980		REG_WRITE(ah, AR_INTR_PRIO_ASYNC_MASK, 0);
981		REG_WRITE(ah, AR_INTR_PRIO_SYNC_ENABLE, 0);
982		REG_WRITE(ah, AR_INTR_PRIO_SYNC_MASK, 0);
983	}
984}
985
986static void ath9k_hw_set_sifs_time(struct ath_hw *ah, u32 us)
987{
988	u32 val = ath9k_hw_mac_to_clks(ah, us - 2);
989	val = min(val, (u32) 0xFFFF);
990	REG_WRITE(ah, AR_D_GBL_IFS_SIFS, val);
991}
992
993static void ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
994{
995	u32 val = ath9k_hw_mac_to_clks(ah, us);
996	val = min(val, (u32) 0xFFFF);
997	REG_WRITE(ah, AR_D_GBL_IFS_SLOT, val);
998}
999
1000static void ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1001{
1002	u32 val = ath9k_hw_mac_to_clks(ah, us);
1003	val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_ACK));
1004	REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_ACK, val);
1005}
1006
1007static void ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1008{
1009	u32 val = ath9k_hw_mac_to_clks(ah, us);
1010	val = min(val, (u32) MS(0xFFFFFFFF, AR_TIME_OUT_CTS));
1011	REG_RMW_FIELD(ah, AR_TIME_OUT, AR_TIME_OUT_CTS, val);
1012}
1013
1014static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1015{
1016	if (tu > 0xFFFF) {
1017		ath_dbg(ath9k_hw_common(ah), XMIT, "bad global tx timeout %u\n",
1018			tu);
1019		ah->globaltxtimeout = (u32) -1;
1020		return false;
1021	} else {
1022		REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1023		ah->globaltxtimeout = tu;
1024		return true;
1025	}
1026}
1027
1028void ath9k_hw_init_global_settings(struct ath_hw *ah)
1029{
1030	struct ath_common *common = ath9k_hw_common(ah);
1031	const struct ath9k_channel *chan = ah->curchan;
1032	int acktimeout, ctstimeout, ack_offset = 0;
1033	int slottime;
1034	int sifstime;
1035	int rx_lat = 0, tx_lat = 0, eifs = 0;
1036	u32 reg;
1037
1038	ath_dbg(ath9k_hw_common(ah), RESET, "ah->misc_mode 0x%x\n",
1039		ah->misc_mode);
1040
1041	if (!chan)
1042		return;
1043
1044	if (ah->misc_mode != 0)
1045		REG_SET_BIT(ah, AR_PCU_MISC, ah->misc_mode);
1046
1047	if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1048		rx_lat = 41;
1049	else
1050		rx_lat = 37;
1051	tx_lat = 54;
1052
1053	if (IS_CHAN_5GHZ(chan))
1054		sifstime = 16;
1055	else
1056		sifstime = 10;
1057
1058	if (IS_CHAN_HALF_RATE(chan)) {
1059		eifs = 175;
1060		rx_lat *= 2;
1061		tx_lat *= 2;
1062		if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1063		    tx_lat += 11;
1064
1065		sifstime = 32;
1066		ack_offset = 16;
1067		slottime = 13;
1068	} else if (IS_CHAN_QUARTER_RATE(chan)) {
1069		eifs = 340;
1070		rx_lat = (rx_lat * 4) - 1;
1071		tx_lat *= 4;
1072		if (IS_CHAN_A_FAST_CLOCK(ah, chan))
1073		    tx_lat += 22;
1074
1075		sifstime = 64;
1076		ack_offset = 32;
1077		slottime = 21;
1078	} else {
1079		if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1080			eifs = AR_D_GBL_IFS_EIFS_ASYNC_FIFO;
1081			reg = AR_USEC_ASYNC_FIFO;
1082		} else {
1083			eifs = REG_READ(ah, AR_D_GBL_IFS_EIFS)/
1084				common->clockrate;
1085			reg = REG_READ(ah, AR_USEC);
1086		}
1087		rx_lat = MS(reg, AR_USEC_RX_LAT);
1088		tx_lat = MS(reg, AR_USEC_TX_LAT);
1089
1090		slottime = ah->slottime;
1091	}
1092
1093	/* As defined by IEEE 802.11-2007 17.3.8.6 */
1094	slottime += 3 * ah->coverage_class;
1095	acktimeout = slottime + sifstime + ack_offset;
1096	ctstimeout = acktimeout;
1097
1098	/*
1099	 * Workaround for early ACK timeouts, add an offset to match the
1100	 * initval's 64us ack timeout value. Use 48us for the CTS timeout.
1101	 * This was initially only meant to work around an issue with delayed
1102	 * BA frames in some implementations, but it has been found to fix ACK
1103	 * timeout issues in other cases as well.
1104	 */
1105	if (IS_CHAN_2GHZ(chan) &&
1106	    !IS_CHAN_HALF_RATE(chan) && !IS_CHAN_QUARTER_RATE(chan)) {
1107		acktimeout += 64 - sifstime - ah->slottime;
1108		ctstimeout += 48 - sifstime - ah->slottime;
1109	}
1110
1111	ath9k_hw_set_sifs_time(ah, sifstime);
1112	ath9k_hw_setslottime(ah, slottime);
1113	ath9k_hw_set_ack_timeout(ah, acktimeout);
1114	ath9k_hw_set_cts_timeout(ah, ctstimeout);
1115	if (ah->globaltxtimeout != (u32) -1)
1116		ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1117
1118	REG_WRITE(ah, AR_D_GBL_IFS_EIFS, ath9k_hw_mac_to_clks(ah, eifs));
1119	REG_RMW(ah, AR_USEC,
1120		(common->clockrate - 1) |
1121		SM(rx_lat, AR_USEC_RX_LAT) |
1122		SM(tx_lat, AR_USEC_TX_LAT),
1123		AR_USEC_TX_LAT | AR_USEC_RX_LAT | AR_USEC_USEC);
1124
1125}
1126EXPORT_SYMBOL(ath9k_hw_init_global_settings);
1127
1128void ath9k_hw_deinit(struct ath_hw *ah)
1129{
1130	struct ath_common *common = ath9k_hw_common(ah);
1131
1132	if (common->state < ATH_HW_INITIALIZED)
1133		return;
1134
1135	ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1136}
1137EXPORT_SYMBOL(ath9k_hw_deinit);
1138
1139/*******/
1140/* INI */
1141/*******/
1142
1143u32 ath9k_regd_get_ctl(struct ath_regulatory *reg, struct ath9k_channel *chan)
1144{
1145	u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1146
1147	if (IS_CHAN_2GHZ(chan))
1148		ctl |= CTL_11G;
1149	else
1150		ctl |= CTL_11A;
1151
1152	return ctl;
1153}
1154
1155/****************************************/
1156/* Reset and Channel Switching Routines */
1157/****************************************/
1158
1159static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1160{
1161	struct ath_common *common = ath9k_hw_common(ah);
1162	int txbuf_size;
1163
1164	ENABLE_REGWRITE_BUFFER(ah);
1165
1166	/*
1167	 * set AHB_MODE not to do cacheline prefetches
1168	*/
1169	if (!AR_SREV_9300_20_OR_LATER(ah))
1170		REG_SET_BIT(ah, AR_AHB_MODE, AR_AHB_PREFETCH_RD_EN);
1171
1172	/*
1173	 * let mac dma reads be in 128 byte chunks
1174	 */
1175	REG_RMW(ah, AR_TXCFG, AR_TXCFG_DMASZ_128B, AR_TXCFG_DMASZ_MASK);
1176
1177	REGWRITE_BUFFER_FLUSH(ah);
1178
1179	/*
1180	 * Restore TX Trigger Level to its pre-reset value.
1181	 * The initial value depends on whether aggregation is enabled, and is
1182	 * adjusted whenever underruns are detected.
1183	 */
1184	if (!AR_SREV_9300_20_OR_LATER(ah))
1185		REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1186
1187	ENABLE_REGWRITE_BUFFER(ah);
1188
1189	/*
1190	 * let mac dma writes be in 128 byte chunks
1191	 */
1192	REG_RMW(ah, AR_RXCFG, AR_RXCFG_DMASZ_128B, AR_RXCFG_DMASZ_MASK);
1193
1194	/*
1195	 * Setup receive FIFO threshold to hold off TX activities
1196	 */
1197	REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1198
1199	if (AR_SREV_9300_20_OR_LATER(ah)) {
1200		REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_HP, 0x1);
1201		REG_RMW_FIELD(ah, AR_RXBP_THRESH, AR_RXBP_THRESH_LP, 0x1);
1202
1203		ath9k_hw_set_rx_bufsize(ah, common->rx_bufsize -
1204			ah->caps.rx_status_len);
1205	}
1206
1207	/*
1208	 * reduce the number of usable entries in PCU TXBUF to avoid
1209	 * wrap around issues.
1210	 */
1211	if (AR_SREV_9285(ah)) {
1212		/* For AR9285 the number of Fifos are reduced to half.
1213		 * So set the usable tx buf size also to half to
1214		 * avoid data/delimiter underruns
1215		 */
1216		txbuf_size = AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE;
1217	} else if (AR_SREV_9340_13_OR_LATER(ah)) {
1218		/* Uses fewer entries for AR934x v1.3+ to prevent rx overruns */
1219		txbuf_size = AR_9340_PCU_TXBUF_CTRL_USABLE_SIZE;
1220	} else {
1221		txbuf_size = AR_PCU_TXBUF_CTRL_USABLE_SIZE;
1222	}
1223
1224	if (!AR_SREV_9271(ah))
1225		REG_WRITE(ah, AR_PCU_TXBUF_CTRL, txbuf_size);
1226
1227	REGWRITE_BUFFER_FLUSH(ah);
1228
1229	if (AR_SREV_9300_20_OR_LATER(ah))
1230		ath9k_hw_reset_txstatus_ring(ah);
1231}
1232
1233static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1234{
1235	u32 mask = AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC;
1236	u32 set = AR_STA_ID1_KSRCH_MODE;
1237
1238	switch (opmode) {
1239	case NL80211_IFTYPE_ADHOC:
1240		set |= AR_STA_ID1_ADHOC;
1241		REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1242		break;
1243	case NL80211_IFTYPE_MESH_POINT:
1244	case NL80211_IFTYPE_AP:
1245		set |= AR_STA_ID1_STA_AP;
1246		/* fall through */
1247	case NL80211_IFTYPE_STATION:
1248		REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1249		break;
1250	default:
1251		if (!ah->is_monitoring)
1252			set = 0;
1253		break;
1254	}
1255	REG_RMW(ah, AR_STA_ID1, set, mask);
1256}
1257
1258void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah, u32 coef_scaled,
1259				   u32 *coef_mantissa, u32 *coef_exponent)
1260{
1261	u32 coef_exp, coef_man;
1262
1263	for (coef_exp = 31; coef_exp > 0; coef_exp--)
1264		if ((coef_scaled >> coef_exp) & 0x1)
1265			break;
1266
1267	coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1268
1269	coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1270
1271	*coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1272	*coef_exponent = coef_exp - 16;
1273}
1274
1275static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1276{
1277	u32 rst_flags;
1278	u32 tmpReg;
1279
1280	if (AR_SREV_9100(ah)) {
1281		REG_RMW_FIELD(ah, AR_RTC_DERIVED_CLK,
1282			      AR_RTC_DERIVED_CLK_PERIOD, 1);
1283		(void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1284	}
1285
1286	ENABLE_REGWRITE_BUFFER(ah);
1287
1288	if (AR_SREV_9300_20_OR_LATER(ah)) {
1289		REG_WRITE(ah, AR_WA, ah->WARegVal);
1290		udelay(10);
1291	}
1292
1293	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1294		  AR_RTC_FORCE_WAKE_ON_INT);
1295
1296	if (AR_SREV_9100(ah)) {
1297		rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1298			AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1299	} else {
1300		tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1301		if (AR_SREV_9340(ah))
1302			tmpReg &= AR9340_INTR_SYNC_LOCAL_TIMEOUT;
1303		else
1304			tmpReg &= AR_INTR_SYNC_LOCAL_TIMEOUT |
1305				  AR_INTR_SYNC_RADM_CPL_TIMEOUT;
1306
1307		if (tmpReg) {
1308			u32 val;
1309			REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1310
1311			val = AR_RC_HOSTIF;
1312			if (!AR_SREV_9300_20_OR_LATER(ah))
1313				val |= AR_RC_AHB;
1314			REG_WRITE(ah, AR_RC, val);
1315
1316		} else if (!AR_SREV_9300_20_OR_LATER(ah))
1317			REG_WRITE(ah, AR_RC, AR_RC_AHB);
1318
1319		rst_flags = AR_RTC_RC_MAC_WARM;
1320		if (type == ATH9K_RESET_COLD)
1321			rst_flags |= AR_RTC_RC_MAC_COLD;
1322	}
1323
1324	if (AR_SREV_9330(ah)) {
1325		int npend = 0;
1326		int i;
1327
1328		/* AR9330 WAR:
1329		 * call external reset function to reset WMAC if:
1330		 * - doing a cold reset
1331		 * - we have pending frames in the TX queues
1332		 */
1333
1334		for (i = 0; i < AR_NUM_QCU; i++) {
1335			npend = ath9k_hw_numtxpending(ah, i);
1336			if (npend)
1337				break;
1338		}
1339
1340		if (ah->external_reset &&
1341		    (npend || type == ATH9K_RESET_COLD)) {
1342			int reset_err = 0;
1343
1344			ath_dbg(ath9k_hw_common(ah), RESET,
1345				"reset MAC via external reset\n");
1346
1347			reset_err = ah->external_reset();
1348			if (reset_err) {
1349				ath_err(ath9k_hw_common(ah),
1350					"External reset failed, err=%d\n",
1351					reset_err);
1352				return false;
1353			}
1354
1355			REG_WRITE(ah, AR_RTC_RESET, 1);
1356		}
1357	}
1358
1359	if (ath9k_hw_mci_is_enabled(ah))
1360		ar9003_mci_check_gpm_offset(ah);
1361
1362	REG_WRITE(ah, AR_RTC_RC, rst_flags);
1363
1364	REGWRITE_BUFFER_FLUSH(ah);
1365
1366	udelay(50);
1367
1368	REG_WRITE(ah, AR_RTC_RC, 0);
1369	if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1370		ath_dbg(ath9k_hw_common(ah), RESET, "RTC stuck in MAC reset\n");
1371		return false;
1372	}
1373
1374	if (!AR_SREV_9100(ah))
1375		REG_WRITE(ah, AR_RC, 0);
1376
1377	if (AR_SREV_9100(ah))
1378		udelay(50);
1379
1380	return true;
1381}
1382
1383static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1384{
1385	ENABLE_REGWRITE_BUFFER(ah);
1386
1387	if (AR_SREV_9300_20_OR_LATER(ah)) {
1388		REG_WRITE(ah, AR_WA, ah->WARegVal);
1389		udelay(10);
1390	}
1391
1392	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1393		  AR_RTC_FORCE_WAKE_ON_INT);
1394
1395	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1396		REG_WRITE(ah, AR_RC, AR_RC_AHB);
1397
1398	REG_WRITE(ah, AR_RTC_RESET, 0);
1399
1400	REGWRITE_BUFFER_FLUSH(ah);
1401
1402	if (!AR_SREV_9300_20_OR_LATER(ah))
1403		udelay(2);
1404
1405	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
1406		REG_WRITE(ah, AR_RC, 0);
1407
1408	REG_WRITE(ah, AR_RTC_RESET, 1);
1409
1410	if (!ath9k_hw_wait(ah,
1411			   AR_RTC_STATUS,
1412			   AR_RTC_STATUS_M,
1413			   AR_RTC_STATUS_ON,
1414			   AH_WAIT_TIMEOUT)) {
1415		ath_dbg(ath9k_hw_common(ah), RESET, "RTC not waking up\n");
1416		return false;
1417	}
1418
1419	return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1420}
1421
1422static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1423{
1424	bool ret = false;
1425
1426	if (AR_SREV_9300_20_OR_LATER(ah)) {
1427		REG_WRITE(ah, AR_WA, ah->WARegVal);
1428		udelay(10);
1429	}
1430
1431	REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1432		  AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1433
1434	if (!ah->reset_power_on)
1435		type = ATH9K_RESET_POWER_ON;
1436
1437	switch (type) {
1438	case ATH9K_RESET_POWER_ON:
1439		ret = ath9k_hw_set_reset_power_on(ah);
1440		if (ret)
1441			ah->reset_power_on = true;
1442		break;
1443	case ATH9K_RESET_WARM:
1444	case ATH9K_RESET_COLD:
1445		ret = ath9k_hw_set_reset(ah, type);
1446		break;
1447	default:
1448		break;
1449	}
1450
1451	return ret;
1452}
1453
1454static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1455				struct ath9k_channel *chan)
1456{
1457	int reset_type = ATH9K_RESET_WARM;
1458
1459	if (AR_SREV_9280(ah)) {
1460		if (ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
1461			reset_type = ATH9K_RESET_POWER_ON;
1462		else
1463			reset_type = ATH9K_RESET_COLD;
1464	} else if (ah->chip_fullsleep || REG_READ(ah, AR_Q_TXE) ||
1465		   (REG_READ(ah, AR_CR) & AR_CR_RXE))
1466		reset_type = ATH9K_RESET_COLD;
1467
1468	if (!ath9k_hw_set_reset_reg(ah, reset_type))
1469		return false;
1470
1471	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1472		return false;
1473
1474	ah->chip_fullsleep = false;
1475
1476	if (AR_SREV_9330(ah))
1477		ar9003_hw_internal_regulator_apply(ah);
1478	ath9k_hw_init_pll(ah, chan);
1479
1480	return true;
1481}
1482
1483static bool ath9k_hw_channel_change(struct ath_hw *ah,
1484				    struct ath9k_channel *chan)
1485{
1486	struct ath_common *common = ath9k_hw_common(ah);
1487	struct ath9k_hw_capabilities *pCap = &ah->caps;
1488	bool band_switch = false, mode_diff = false;
1489	u8 ini_reloaded = 0;
1490	u32 qnum;
1491	int r;
1492
1493	if (pCap->hw_caps & ATH9K_HW_CAP_FCC_BAND_SWITCH) {
1494		u32 flags_diff = chan->channelFlags ^ ah->curchan->channelFlags;
1495		band_switch = !!(flags_diff & CHANNEL_5GHZ);
1496		mode_diff = !!(flags_diff & ~CHANNEL_HT);
1497	}
1498
1499	for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1500		if (ath9k_hw_numtxpending(ah, qnum)) {
1501			ath_dbg(common, QUEUE,
1502				"Transmit frames pending on queue %d\n", qnum);
1503			return false;
1504		}
1505	}
1506
1507	if (!ath9k_hw_rfbus_req(ah)) {
1508		ath_err(common, "Could not kill baseband RX\n");
1509		return false;
1510	}
1511
1512	if (band_switch || mode_diff) {
1513		ath9k_hw_mark_phy_inactive(ah);
1514		udelay(5);
1515
1516		if (band_switch)
1517			ath9k_hw_init_pll(ah, chan);
1518
1519		if (ath9k_hw_fast_chan_change(ah, chan, &ini_reloaded)) {
1520			ath_err(common, "Failed to do fast channel change\n");
1521			return false;
1522		}
1523	}
1524
1525	ath9k_hw_set_channel_regs(ah, chan);
1526
1527	r = ath9k_hw_rf_set_freq(ah, chan);
1528	if (r) {
1529		ath_err(common, "Failed to set channel\n");
1530		return false;
1531	}
1532	ath9k_hw_set_clockrate(ah);
1533	ath9k_hw_apply_txpower(ah, chan, false);
1534
1535	ath9k_hw_set_delta_slope(ah, chan);
1536	ath9k_hw_spur_mitigate_freq(ah, chan);
1537
1538	if (band_switch || ini_reloaded)
1539		ah->eep_ops->set_board_values(ah, chan);
1540
1541	ath9k_hw_init_bb(ah, chan);
1542	ath9k_hw_rfbus_done(ah);
1543
1544	if (band_switch || ini_reloaded) {
1545		ah->ah_flags |= AH_FASTCC;
1546		ath9k_hw_init_cal(ah, chan);
1547		ah->ah_flags &= ~AH_FASTCC;
1548	}
1549
1550	return true;
1551}
1552
1553static void ath9k_hw_apply_gpio_override(struct ath_hw *ah)
1554{
1555	u32 gpio_mask = ah->gpio_mask;
1556	int i;
1557
1558	for (i = 0; gpio_mask; i++, gpio_mask >>= 1) {
1559		if (!(gpio_mask & 1))
1560			continue;
1561
1562		ath9k_hw_cfg_output(ah, i, AR_GPIO_OUTPUT_MUX_AS_OUTPUT);
1563		ath9k_hw_set_gpio(ah, i, !!(ah->gpio_val & BIT(i)));
1564	}
1565}
1566
1567static bool ath9k_hw_check_dcs(u32 dma_dbg, u32 num_dcu_states,
1568			       int *hang_state, int *hang_pos)
1569{
1570	static u32 dcu_chain_state[] = {5, 6, 9}; /* DCU chain stuck states */
1571	u32 chain_state, dcs_pos, i;
1572
1573	for (dcs_pos = 0; dcs_pos < num_dcu_states; dcs_pos++) {
1574		chain_state = (dma_dbg >> (5 * dcs_pos)) & 0x1f;
1575		for (i = 0; i < 3; i++) {
1576			if (chain_state == dcu_chain_state[i]) {
1577				*hang_state = chain_state;
1578				*hang_pos = dcs_pos;
1579				return true;
1580			}
1581		}
1582	}
1583	return false;
1584}
1585
1586#define DCU_COMPLETE_STATE        1
1587#define DCU_COMPLETE_STATE_MASK 0x3
1588#define NUM_STATUS_READS         50
1589static bool ath9k_hw_detect_mac_hang(struct ath_hw *ah)
1590{
1591	u32 chain_state, comp_state, dcs_reg = AR_DMADBG_4;
1592	u32 i, hang_pos, hang_state, num_state = 6;
1593
1594	comp_state = REG_READ(ah, AR_DMADBG_6);
1595
1596	if ((comp_state & DCU_COMPLETE_STATE_MASK) != DCU_COMPLETE_STATE) {
1597		ath_dbg(ath9k_hw_common(ah), RESET,
1598			"MAC Hang signature not found at DCU complete\n");
1599		return false;
1600	}
1601
1602	chain_state = REG_READ(ah, dcs_reg);
1603	if (ath9k_hw_check_dcs(chain_state, num_state, &hang_state, &hang_pos))
1604		goto hang_check_iter;
1605
1606	dcs_reg = AR_DMADBG_5;
1607	num_state = 4;
1608	chain_state = REG_READ(ah, dcs_reg);
1609	if (ath9k_hw_check_dcs(chain_state, num_state, &hang_state, &hang_pos))
1610		goto hang_check_iter;
1611
1612	ath_dbg(ath9k_hw_common(ah), RESET,
1613		"MAC Hang signature 1 not found\n");
1614	return false;
1615
1616hang_check_iter:
1617	ath_dbg(ath9k_hw_common(ah), RESET,
1618		"DCU registers: chain %08x complete %08x Hang: state %d pos %d\n",
1619		chain_state, comp_state, hang_state, hang_pos);
1620
1621	for (i = 0; i < NUM_STATUS_READS; i++) {
1622		chain_state = REG_READ(ah, dcs_reg);
1623		chain_state = (chain_state >> (5 * hang_pos)) & 0x1f;
1624		comp_state = REG_READ(ah, AR_DMADBG_6);
1625
1626		if (((comp_state & DCU_COMPLETE_STATE_MASK) !=
1627					DCU_COMPLETE_STATE) ||
1628		    (chain_state != hang_state))
1629			return false;
1630	}
1631
1632	ath_dbg(ath9k_hw_common(ah), RESET, "MAC Hang signature 1 found\n");
1633
1634	return true;
1635}
1636
1637void ath9k_hw_check_nav(struct ath_hw *ah)
1638{
1639	struct ath_common *common = ath9k_hw_common(ah);
1640	u32 val;
1641
1642	val = REG_READ(ah, AR_NAV);
1643	if (val != 0xdeadbeef && val > 0x7fff) {
1644		ath_dbg(common, BSTUCK, "Abnormal NAV: 0x%x\n", val);
1645		REG_WRITE(ah, AR_NAV, 0);
1646	}
1647}
1648EXPORT_SYMBOL(ath9k_hw_check_nav);
1649
1650bool ath9k_hw_check_alive(struct ath_hw *ah)
1651{
1652	int count = 50;
1653	u32 reg;
1654
1655	if (AR_SREV_9300(ah))
1656		return !ath9k_hw_detect_mac_hang(ah);
1657
1658	if (AR_SREV_9285_12_OR_LATER(ah))
1659		return true;
1660
1661	do {
1662		reg = REG_READ(ah, AR_OBS_BUS_1);
1663
1664		if ((reg & 0x7E7FFFEF) == 0x00702400)
1665			continue;
1666
1667		switch (reg & 0x7E000B00) {
1668		case 0x1E000000:
1669		case 0x52000B00:
1670		case 0x18000B00:
1671			continue;
1672		default:
1673			return true;
1674		}
1675	} while (count-- > 0);
1676
1677	return false;
1678}
1679EXPORT_SYMBOL(ath9k_hw_check_alive);
1680
1681static void ath9k_hw_init_mfp(struct ath_hw *ah)
1682{
1683	/* Setup MFP options for CCMP */
1684	if (AR_SREV_9280_20_OR_LATER(ah)) {
1685		/* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
1686		 * frames when constructing CCMP AAD. */
1687		REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
1688			      0xc7ff);
1689		ah->sw_mgmt_crypto = false;
1690	} else if (AR_SREV_9160_10_OR_LATER(ah)) {
1691		/* Disable hardware crypto for management frames */
1692		REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
1693			    AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
1694		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1695			    AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
1696		ah->sw_mgmt_crypto = true;
1697	} else {
1698		ah->sw_mgmt_crypto = true;
1699	}
1700}
1701
1702static void ath9k_hw_reset_opmode(struct ath_hw *ah,
1703				  u32 macStaId1, u32 saveDefAntenna)
1704{
1705	struct ath_common *common = ath9k_hw_common(ah);
1706
1707	ENABLE_REGWRITE_BUFFER(ah);
1708
1709	REG_RMW(ah, AR_STA_ID1, macStaId1
1710		  | AR_STA_ID1_RTS_USE_DEF
1711		  | (ah->config.ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
1712		  | ah->sta_id1_defaults,
1713		  ~AR_STA_ID1_SADH_MASK);
1714	ath_hw_setbssidmask(common);
1715	REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
1716	ath9k_hw_write_associd(ah);
1717	REG_WRITE(ah, AR_ISR, ~0);
1718	REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
1719
1720	REGWRITE_BUFFER_FLUSH(ah);
1721
1722	ath9k_hw_set_operating_mode(ah, ah->opmode);
1723}
1724
1725static void ath9k_hw_init_queues(struct ath_hw *ah)
1726{
1727	int i;
1728
1729	ENABLE_REGWRITE_BUFFER(ah);
1730
1731	for (i = 0; i < AR_NUM_DCU; i++)
1732		REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
1733
1734	REGWRITE_BUFFER_FLUSH(ah);
1735
1736	ah->intr_txqs = 0;
1737	for (i = 0; i < ATH9K_NUM_TX_QUEUES; i++)
1738		ath9k_hw_resettxqueue(ah, i);
1739}
1740
1741/*
1742 * For big endian systems turn on swapping for descriptors
1743 */
1744static void ath9k_hw_init_desc(struct ath_hw *ah)
1745{
1746	struct ath_common *common = ath9k_hw_common(ah);
1747
1748	if (AR_SREV_9100(ah)) {
1749		u32 mask;
1750		mask = REG_READ(ah, AR_CFG);
1751		if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
1752			ath_dbg(common, RESET, "CFG Byte Swap Set 0x%x\n",
1753				mask);
1754		} else {
1755			mask = INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
1756			REG_WRITE(ah, AR_CFG, mask);
1757			ath_dbg(common, RESET, "Setting CFG 0x%x\n",
1758				REG_READ(ah, AR_CFG));
1759		}
1760	} else {
1761		if (common->bus_ops->ath_bus_type == ATH_USB) {
1762			/* Configure AR9271 target WLAN */
1763			if (AR_SREV_9271(ah))
1764				REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
1765			else
1766				REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
1767		}
1768#ifdef __BIG_ENDIAN
1769		else if (AR_SREV_9330(ah) || AR_SREV_9340(ah) ||
1770			 AR_SREV_9550(ah))
1771			REG_RMW(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB, 0);
1772		else
1773			REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
1774#endif
1775	}
1776}
1777
1778/*
1779 * Fast channel change:
1780 * (Change synthesizer based on channel freq without resetting chip)
1781 */
1782static int ath9k_hw_do_fastcc(struct ath_hw *ah, struct ath9k_channel *chan)
1783{
1784	struct ath_common *common = ath9k_hw_common(ah);
1785	struct ath9k_hw_capabilities *pCap = &ah->caps;
1786	int ret;
1787
1788	if (AR_SREV_9280(ah) && common->bus_ops->ath_bus_type == ATH_PCI)
1789		goto fail;
1790
1791	if (ah->chip_fullsleep)
1792		goto fail;
1793
1794	if (!ah->curchan)
1795		goto fail;
1796
1797	if (chan->channel == ah->curchan->channel)
1798		goto fail;
1799
1800	if ((ah->curchan->channelFlags | chan->channelFlags) &
1801	    (CHANNEL_HALF | CHANNEL_QUARTER))
1802		goto fail;
1803
1804	/*
1805	 * If cross-band fcc is not supoprted, bail out if channelFlags differ.
1806	 */
1807	if (!(pCap->hw_caps & ATH9K_HW_CAP_FCC_BAND_SWITCH) &&
1808	    ((chan->channelFlags ^ ah->curchan->channelFlags) & ~CHANNEL_HT))
1809		goto fail;
1810
1811	if (!ath9k_hw_check_alive(ah))
1812		goto fail;
1813
1814	/*
1815	 * For AR9462, make sure that calibration data for
1816	 * re-using are present.
1817	 */
1818	if (AR_SREV_9462(ah) && (ah->caldata &&
1819				 (!test_bit(TXIQCAL_DONE, &ah->caldata->cal_flags) ||
1820				  !test_bit(TXCLCAL_DONE, &ah->caldata->cal_flags) ||
1821				  !test_bit(RTT_DONE, &ah->caldata->cal_flags))))
1822		goto fail;
1823
1824	ath_dbg(common, RESET, "FastChannelChange for %d -> %d\n",
1825		ah->curchan->channel, chan->channel);
1826
1827	ret = ath9k_hw_channel_change(ah, chan);
1828	if (!ret)
1829		goto fail;
1830
1831	if (ath9k_hw_mci_is_enabled(ah))
1832		ar9003_mci_2g5g_switch(ah, false);
1833
1834	ath9k_hw_loadnf(ah, ah->curchan);
1835	ath9k_hw_start_nfcal(ah, true);
1836
1837	if (AR_SREV_9271(ah))
1838		ar9002_hw_load_ani_reg(ah, chan);
1839
1840	return 0;
1841fail:
1842	return -EINVAL;
1843}
1844
1845int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
1846		   struct ath9k_hw_cal_data *caldata, bool fastcc)
1847{
1848	struct ath_common *common = ath9k_hw_common(ah);
1849	struct timespec ts;
1850	u32 saveLedState;
1851	u32 saveDefAntenna;
1852	u32 macStaId1;
1853	u64 tsf = 0;
1854	s64 usec = 0;
1855	int r;
1856	bool start_mci_reset = false;
1857	bool save_fullsleep = ah->chip_fullsleep;
1858
1859	if (ath9k_hw_mci_is_enabled(ah)) {
1860		start_mci_reset = ar9003_mci_start_reset(ah, chan);
1861		if (start_mci_reset)
1862			return 0;
1863	}
1864
1865	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1866		return -EIO;
1867
1868	if (ah->curchan && !ah->chip_fullsleep)
1869		ath9k_hw_getnf(ah, ah->curchan);
1870
1871	ah->caldata = caldata;
1872	if (caldata && (chan->channel != caldata->channel ||
1873			chan->channelFlags != caldata->channelFlags)) {
1874		/* Operating channel changed, reset channel calibration data */
1875		memset(caldata, 0, sizeof(*caldata));
1876		ath9k_init_nfcal_hist_buffer(ah, chan);
1877	} else if (caldata) {
1878		clear_bit(PAPRD_PACKET_SENT, &caldata->cal_flags);
1879	}
1880	ah->noise = ath9k_hw_getchan_noise(ah, chan, chan->noisefloor);
1881
1882	if (fastcc) {
1883		r = ath9k_hw_do_fastcc(ah, chan);
1884		if (!r)
1885			return r;
1886	}
1887
1888	if (ath9k_hw_mci_is_enabled(ah))
1889		ar9003_mci_stop_bt(ah, save_fullsleep);
1890
1891	saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
1892	if (saveDefAntenna == 0)
1893		saveDefAntenna = 1;
1894
1895	macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
1896
1897	/* Save TSF before chip reset, a cold reset clears it */
1898	tsf = ath9k_hw_gettsf64(ah);
1899	getrawmonotonic(&ts);
1900	usec = ts.tv_sec * 1000 + ts.tv_nsec / 1000;
1901
1902	saveLedState = REG_READ(ah, AR_CFG_LED) &
1903		(AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
1904		 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
1905
1906	ath9k_hw_mark_phy_inactive(ah);
1907
1908	ah->paprd_table_write_done = false;
1909
1910	/* Only required on the first reset */
1911	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1912		REG_WRITE(ah,
1913			  AR9271_RESET_POWER_DOWN_CONTROL,
1914			  AR9271_RADIO_RF_RST);
1915		udelay(50);
1916	}
1917
1918	if (!ath9k_hw_chip_reset(ah, chan)) {
1919		ath_err(common, "Chip reset failed\n");
1920		return -EINVAL;
1921	}
1922
1923	/* Only required on the first reset */
1924	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
1925		ah->htc_reset_init = false;
1926		REG_WRITE(ah,
1927			  AR9271_RESET_POWER_DOWN_CONTROL,
1928			  AR9271_GATE_MAC_CTL);
1929		udelay(50);
1930	}
1931
1932	/* Restore TSF */
1933	getrawmonotonic(&ts);
1934	usec = ts.tv_sec * 1000 + ts.tv_nsec / 1000 - usec;
1935	ath9k_hw_settsf64(ah, tsf + usec);
1936
1937	if (AR_SREV_9280_20_OR_LATER(ah))
1938		REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
1939
1940	if (!AR_SREV_9300_20_OR_LATER(ah))
1941		ar9002_hw_enable_async_fifo(ah);
1942
1943	r = ath9k_hw_process_ini(ah, chan);
1944	if (r)
1945		return r;
1946
1947	ath9k_hw_set_rfmode(ah, chan);
1948
1949	if (ath9k_hw_mci_is_enabled(ah))
1950		ar9003_mci_reset(ah, false, IS_CHAN_2GHZ(chan), save_fullsleep);
1951
1952	/*
1953	 * Some AR91xx SoC devices frequently fail to accept TSF writes
1954	 * right after the chip reset. When that happens, write a new
1955	 * value after the initvals have been applied, with an offset
1956	 * based on measured time difference
1957	 */
1958	if (AR_SREV_9100(ah) && (ath9k_hw_gettsf64(ah) < tsf)) {
1959		tsf += 1500;
1960		ath9k_hw_settsf64(ah, tsf);
1961	}
1962
1963	ath9k_hw_init_mfp(ah);
1964
1965	ath9k_hw_set_delta_slope(ah, chan);
1966	ath9k_hw_spur_mitigate_freq(ah, chan);
1967	ah->eep_ops->set_board_values(ah, chan);
1968
1969	ath9k_hw_reset_opmode(ah, macStaId1, saveDefAntenna);
1970
1971	r = ath9k_hw_rf_set_freq(ah, chan);
1972	if (r)
1973		return r;
1974
1975	ath9k_hw_set_clockrate(ah);
1976
1977	ath9k_hw_init_queues(ah);
1978	ath9k_hw_init_interrupt_masks(ah, ah->opmode);
1979	ath9k_hw_ani_cache_ini_regs(ah);
1980	ath9k_hw_init_qos(ah);
1981
1982	if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
1983		ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
1984
1985	ath9k_hw_init_global_settings(ah);
1986
1987	if (AR_SREV_9287(ah) && AR_SREV_9287_13_OR_LATER(ah)) {
1988		REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
1989			    AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
1990		REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
1991			      AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
1992		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
1993			    AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
1994	}
1995
1996	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PRESERVE_SEQNUM);
1997
1998	ath9k_hw_set_dma(ah);
1999
2000	if (!ath9k_hw_mci_is_enabled(ah))
2001		REG_WRITE(ah, AR_OBS, 8);
2002
2003	if (ah->config.rx_intr_mitigation) {
2004		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2005		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2006	}
2007
2008	if (ah->config.tx_intr_mitigation) {
2009		REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_LAST, 300);
2010		REG_RMW_FIELD(ah, AR_TIMT, AR_TIMT_FIRST, 750);
2011	}
2012
2013	ath9k_hw_init_bb(ah, chan);
2014
2015	if (caldata) {
2016		clear_bit(TXIQCAL_DONE, &caldata->cal_flags);
2017		clear_bit(TXCLCAL_DONE, &caldata->cal_flags);
2018	}
2019	if (!ath9k_hw_init_cal(ah, chan))
2020		return -EIO;
2021
2022	if (ath9k_hw_mci_is_enabled(ah) && ar9003_mci_end_reset(ah, chan, caldata))
2023		return -EIO;
2024
2025	ENABLE_REGWRITE_BUFFER(ah);
2026
2027	ath9k_hw_restore_chainmask(ah);
2028	REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2029
2030	REGWRITE_BUFFER_FLUSH(ah);
2031
2032	ath9k_hw_init_desc(ah);
2033
2034	if (ath9k_hw_btcoex_is_enabled(ah))
2035		ath9k_hw_btcoex_enable(ah);
2036
2037	if (ath9k_hw_mci_is_enabled(ah))
2038		ar9003_mci_check_bt(ah);
2039
2040	ath9k_hw_loadnf(ah, chan);
2041	ath9k_hw_start_nfcal(ah, true);
2042
2043	if (AR_SREV_9300_20_OR_LATER(ah)) {
2044		ar9003_hw_bb_watchdog_config(ah);
2045		ar9003_hw_disable_phy_restart(ah);
2046	}
2047
2048	ath9k_hw_apply_gpio_override(ah);
2049
2050	if (AR_SREV_9565(ah) && common->bt_ant_diversity)
2051		REG_SET_BIT(ah, AR_BTCOEX_WL_LNADIV, AR_BTCOEX_WL_LNADIV_FORCE_ON);
2052
2053	return 0;
2054}
2055EXPORT_SYMBOL(ath9k_hw_reset);
2056
2057/******************************/
2058/* Power Management (Chipset) */
2059/******************************/
2060
2061/*
2062 * Notify Power Mgt is disabled in self-generated frames.
2063 * If requested, force chip to sleep.
2064 */
2065static void ath9k_set_power_sleep(struct ath_hw *ah)
2066{
2067	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2068
2069	if (AR_SREV_9462(ah) || AR_SREV_9565(ah)) {
2070		REG_CLR_BIT(ah, AR_TIMER_MODE, 0xff);
2071		REG_CLR_BIT(ah, AR_NDP2_TIMER_MODE, 0xff);
2072		REG_CLR_BIT(ah, AR_SLP32_INC, 0xfffff);
2073		/* xxx Required for WLAN only case ? */
2074		REG_WRITE(ah, AR_MCI_INTERRUPT_RX_MSG_EN, 0);
2075		udelay(100);
2076	}
2077
2078	/*
2079	 * Clear the RTC force wake bit to allow the
2080	 * mac to go to sleep.
2081	 */
2082	REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN);
2083
2084	if (ath9k_hw_mci_is_enabled(ah))
2085		udelay(100);
2086
2087	if (!AR_SREV_9100(ah) && !AR_SREV_9300_20_OR_LATER(ah))
2088		REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2089
2090	/* Shutdown chip. Active low */
2091	if (!AR_SREV_5416(ah) && !AR_SREV_9271(ah)) {
2092		REG_CLR_BIT(ah, AR_RTC_RESET, AR_RTC_RESET_EN);
2093		udelay(2);
2094	}
2095
2096	/* Clear Bit 14 of AR_WA after putting chip into Full Sleep mode. */
2097	if (AR_SREV_9300_20_OR_LATER(ah))
2098		REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
2099}
2100
2101/*
2102 * Notify Power Management is enabled in self-generating
2103 * frames. If request, set power mode of chip to
2104 * auto/normal.  Duration in units of 128us (1/8 TU).
2105 */
2106static void ath9k_set_power_network_sleep(struct ath_hw *ah)
2107{
2108	struct ath9k_hw_capabilities *pCap = &ah->caps;
2109
2110	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2111
2112	if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2113		/* Set WakeOnInterrupt bit; clear ForceWake bit */
2114		REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2115			  AR_RTC_FORCE_WAKE_ON_INT);
2116	} else {
2117
2118		/* When chip goes into network sleep, it could be waken
2119		 * up by MCI_INT interrupt caused by BT's HW messages
2120		 * (LNA_xxx, CONT_xxx) which chould be in a very fast
2121		 * rate (~100us). This will cause chip to leave and
2122		 * re-enter network sleep mode frequently, which in
2123		 * consequence will have WLAN MCI HW to generate lots of
2124		 * SYS_WAKING and SYS_SLEEPING messages which will make
2125		 * BT CPU to busy to process.
2126		 */
2127		if (ath9k_hw_mci_is_enabled(ah))
2128			REG_CLR_BIT(ah, AR_MCI_INTERRUPT_RX_MSG_EN,
2129				    AR_MCI_INTERRUPT_RX_HW_MSG_MASK);
2130		/*
2131		 * Clear the RTC force wake bit to allow the
2132		 * mac to go to sleep.
2133		 */
2134		REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN);
2135
2136		if (ath9k_hw_mci_is_enabled(ah))
2137			udelay(30);
2138	}
2139
2140	/* Clear Bit 14 of AR_WA after putting chip into Net Sleep mode. */
2141	if (AR_SREV_9300_20_OR_LATER(ah))
2142		REG_WRITE(ah, AR_WA, ah->WARegVal & ~AR_WA_D3_L1_DISABLE);
2143}
2144
2145static bool ath9k_hw_set_power_awake(struct ath_hw *ah)
2146{
2147	u32 val;
2148	int i;
2149
2150	/* Set Bits 14 and 17 of AR_WA before powering on the chip. */
2151	if (AR_SREV_9300_20_OR_LATER(ah)) {
2152		REG_WRITE(ah, AR_WA, ah->WARegVal);
2153		udelay(10);
2154	}
2155
2156	if ((REG_READ(ah, AR_RTC_STATUS) &
2157	     AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2158		if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
2159			return false;
2160		}
2161		if (!AR_SREV_9300_20_OR_LATER(ah))
2162			ath9k_hw_init_pll(ah, NULL);
2163	}
2164	if (AR_SREV_9100(ah))
2165		REG_SET_BIT(ah, AR_RTC_RESET,
2166			    AR_RTC_RESET_EN);
2167
2168	REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2169		    AR_RTC_FORCE_WAKE_EN);
2170	udelay(50);
2171
2172	for (i = POWER_UP_TIME / 50; i > 0; i--) {
2173		val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2174		if (val == AR_RTC_STATUS_ON)
2175			break;
2176		udelay(50);
2177		REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2178			    AR_RTC_FORCE_WAKE_EN);
2179	}
2180	if (i == 0) {
2181		ath_err(ath9k_hw_common(ah),
2182			"Failed to wakeup in %uus\n",
2183			POWER_UP_TIME / 20);
2184		return false;
2185	}
2186
2187	if (ath9k_hw_mci_is_enabled(ah))
2188		ar9003_mci_set_power_awake(ah);
2189
2190	REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2191
2192	return true;
2193}
2194
2195bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2196{
2197	struct ath_common *common = ath9k_hw_common(ah);
2198	int status = true;
2199	static const char *modes[] = {
2200		"AWAKE",
2201		"FULL-SLEEP",
2202		"NETWORK SLEEP",
2203		"UNDEFINED"
2204	};
2205
2206	if (ah->power_mode == mode)
2207		return status;
2208
2209	ath_dbg(common, RESET, "%s -> %s\n",
2210		modes[ah->power_mode], modes[mode]);
2211
2212	switch (mode) {
2213	case ATH9K_PM_AWAKE:
2214		status = ath9k_hw_set_power_awake(ah);
2215		break;
2216	case ATH9K_PM_FULL_SLEEP:
2217		if (ath9k_hw_mci_is_enabled(ah))
2218			ar9003_mci_set_full_sleep(ah);
2219
2220		ath9k_set_power_sleep(ah);
2221		ah->chip_fullsleep = true;
2222		break;
2223	case ATH9K_PM_NETWORK_SLEEP:
2224		ath9k_set_power_network_sleep(ah);
2225		break;
2226	default:
2227		ath_err(common, "Unknown power mode %u\n", mode);
2228		return false;
2229	}
2230	ah->power_mode = mode;
2231
2232	/*
2233	 * XXX: If this warning never comes up after a while then
2234	 * simply keep the ATH_DBG_WARN_ON_ONCE() but make
2235	 * ath9k_hw_setpower() return type void.
2236	 */
2237
2238	if (!(ah->ah_flags & AH_UNPLUGGED))
2239		ATH_DBG_WARN_ON_ONCE(!status);
2240
2241	return status;
2242}
2243EXPORT_SYMBOL(ath9k_hw_setpower);
2244
2245/*******************/
2246/* Beacon Handling */
2247/*******************/
2248
2249void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
2250{
2251	int flags = 0;
2252
2253	ENABLE_REGWRITE_BUFFER(ah);
2254
2255	switch (ah->opmode) {
2256	case NL80211_IFTYPE_ADHOC:
2257		REG_SET_BIT(ah, AR_TXCFG,
2258			    AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
2259	case NL80211_IFTYPE_MESH_POINT:
2260	case NL80211_IFTYPE_AP:
2261		REG_WRITE(ah, AR_NEXT_TBTT_TIMER, next_beacon);
2262		REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, next_beacon -
2263			  TU_TO_USEC(ah->config.dma_beacon_response_time));
2264		REG_WRITE(ah, AR_NEXT_SWBA, next_beacon -
2265			  TU_TO_USEC(ah->config.sw_beacon_response_time));
2266		flags |=
2267			AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
2268		break;
2269	default:
2270		ath_dbg(ath9k_hw_common(ah), BEACON,
2271			"%s: unsupported opmode: %d\n", __func__, ah->opmode);
2272		return;
2273		break;
2274	}
2275
2276	REG_WRITE(ah, AR_BEACON_PERIOD, beacon_period);
2277	REG_WRITE(ah, AR_DMA_BEACON_PERIOD, beacon_period);
2278	REG_WRITE(ah, AR_SWBA_PERIOD, beacon_period);
2279
2280	REGWRITE_BUFFER_FLUSH(ah);
2281
2282	REG_SET_BIT(ah, AR_TIMER_MODE, flags);
2283}
2284EXPORT_SYMBOL(ath9k_hw_beaconinit);
2285
2286void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
2287				    const struct ath9k_beacon_state *bs)
2288{
2289	u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
2290	struct ath9k_hw_capabilities *pCap = &ah->caps;
2291	struct ath_common *common = ath9k_hw_common(ah);
2292
2293	ENABLE_REGWRITE_BUFFER(ah);
2294
2295	REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
2296
2297	REG_WRITE(ah, AR_BEACON_PERIOD,
2298		  TU_TO_USEC(bs->bs_intval));
2299	REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
2300		  TU_TO_USEC(bs->bs_intval));
2301
2302	REGWRITE_BUFFER_FLUSH(ah);
2303
2304	REG_RMW_FIELD(ah, AR_RSSI_THR,
2305		      AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
2306
2307	beaconintval = bs->bs_intval;
2308
2309	if (bs->bs_sleepduration > beaconintval)
2310		beaconintval = bs->bs_sleepduration;
2311
2312	dtimperiod = bs->bs_dtimperiod;
2313	if (bs->bs_sleepduration > dtimperiod)
2314		dtimperiod = bs->bs_sleepduration;
2315
2316	if (beaconintval == dtimperiod)
2317		nextTbtt = bs->bs_nextdtim;
2318	else
2319		nextTbtt = bs->bs_nexttbtt;
2320
2321	ath_dbg(common, BEACON, "next DTIM %d\n", bs->bs_nextdtim);
2322	ath_dbg(common, BEACON, "next beacon %d\n", nextTbtt);
2323	ath_dbg(common, BEACON, "beacon period %d\n", beaconintval);
2324	ath_dbg(common, BEACON, "DTIM period %d\n", dtimperiod);
2325
2326	ENABLE_REGWRITE_BUFFER(ah);
2327
2328	REG_WRITE(ah, AR_NEXT_DTIM,
2329		  TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
2330	REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
2331
2332	REG_WRITE(ah, AR_SLEEP1,
2333		  SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
2334		  | AR_SLEEP1_ASSUME_DTIM);
2335
2336	if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
2337		beacontimeout = (BEACON_TIMEOUT_VAL << 3);
2338	else
2339		beacontimeout = MIN_BEACON_TIMEOUT_VAL;
2340
2341	REG_WRITE(ah, AR_SLEEP2,
2342		  SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
2343
2344	REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
2345	REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
2346
2347	REGWRITE_BUFFER_FLUSH(ah);
2348
2349	REG_SET_BIT(ah, AR_TIMER_MODE,
2350		    AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
2351		    AR_DTIM_TIMER_EN);
2352
2353	/* TSF Out of Range Threshold */
2354	REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
2355}
2356EXPORT_SYMBOL(ath9k_hw_set_sta_beacon_timers);
2357
2358/*******************/
2359/* HW Capabilities */
2360/*******************/
2361
2362static u8 fixup_chainmask(u8 chip_chainmask, u8 eeprom_chainmask)
2363{
2364	eeprom_chainmask &= chip_chainmask;
2365	if (eeprom_chainmask)
2366		return eeprom_chainmask;
2367	else
2368		return chip_chainmask;
2369}
2370
2371/**
2372 * ath9k_hw_dfs_tested - checks if DFS has been tested with used chipset
2373 * @ah: the atheros hardware data structure
2374 *
2375 * We enable DFS support upstream on chipsets which have passed a series
2376 * of tests. The testing requirements are going to be documented. Desired
2377 * test requirements are documented at:
2378 *
2379 * http://wireless.kernel.org/en/users/Drivers/ath9k/dfs
2380 *
2381 * Once a new chipset gets properly tested an individual commit can be used
2382 * to document the testing for DFS for that chipset.
2383 */
2384static bool ath9k_hw_dfs_tested(struct ath_hw *ah)
2385{
2386
2387	switch (ah->hw_version.macVersion) {
2388	/* for temporary testing DFS with 9280 */
2389	case AR_SREV_VERSION_9280:
2390	/* AR9580 will likely be our first target to get testing on */
2391	case AR_SREV_VERSION_9580:
2392		return true;
2393	default:
2394		return false;
2395	}
2396}
2397
2398int ath9k_hw_fill_cap_info(struct ath_hw *ah)
2399{
2400	struct ath9k_hw_capabilities *pCap = &ah->caps;
2401	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
2402	struct ath_common *common = ath9k_hw_common(ah);
2403	unsigned int chip_chainmask;
2404
2405	u16 eeval;
2406	u8 ant_div_ctl1, tx_chainmask, rx_chainmask;
2407
2408	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
2409	regulatory->current_rd = eeval;
2410
2411	if (ah->opmode != NL80211_IFTYPE_AP &&
2412	    ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
2413		if (regulatory->current_rd == 0x64 ||
2414		    regulatory->current_rd == 0x65)
2415			regulatory->current_rd += 5;
2416		else if (regulatory->current_rd == 0x41)
2417			regulatory->current_rd = 0x43;
2418		ath_dbg(common, REGULATORY, "regdomain mapped to 0x%x\n",
2419			regulatory->current_rd);
2420	}
2421
2422	eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
2423	if ((eeval & (AR5416_OPFLAGS_11G | AR5416_OPFLAGS_11A)) == 0) {
2424		ath_err(common,
2425			"no band has been marked as supported in EEPROM\n");
2426		return -EINVAL;
2427	}
2428
2429	if (eeval & AR5416_OPFLAGS_11A)
2430		pCap->hw_caps |= ATH9K_HW_CAP_5GHZ;
2431
2432	if (eeval & AR5416_OPFLAGS_11G)
2433		pCap->hw_caps |= ATH9K_HW_CAP_2GHZ;
2434
2435	if (AR_SREV_9485(ah) ||
2436	    AR_SREV_9285(ah) ||
2437	    AR_SREV_9330(ah) ||
2438	    AR_SREV_9565(ah))
2439		chip_chainmask = 1;
2440	else if (AR_SREV_9462(ah))
2441		chip_chainmask = 3;
2442	else if (!AR_SREV_9280_20_OR_LATER(ah))
2443		chip_chainmask = 7;
2444	else if (!AR_SREV_9300_20_OR_LATER(ah) || AR_SREV_9340(ah))
2445		chip_chainmask = 3;
2446	else
2447		chip_chainmask = 7;
2448
2449	pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
2450	/*
2451	 * For AR9271 we will temporarilly uses the rx chainmax as read from
2452	 * the EEPROM.
2453	 */
2454	if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
2455	    !(eeval & AR5416_OPFLAGS_11A) &&
2456	    !(AR_SREV_9271(ah)))
2457		/* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
2458		pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
2459	else if (AR_SREV_9100(ah))
2460		pCap->rx_chainmask = 0x7;
2461	else
2462		/* Use rx_chainmask from EEPROM. */
2463		pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
2464
2465	pCap->tx_chainmask = fixup_chainmask(chip_chainmask, pCap->tx_chainmask);
2466	pCap->rx_chainmask = fixup_chainmask(chip_chainmask, pCap->rx_chainmask);
2467	ah->txchainmask = pCap->tx_chainmask;
2468	ah->rxchainmask = pCap->rx_chainmask;
2469
2470	ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
2471
2472	/* enable key search for every frame in an aggregate */
2473	if (AR_SREV_9300_20_OR_LATER(ah))
2474		ah->misc_mode |= AR_PCU_ALWAYS_PERFORM_KEYSEARCH;
2475
2476	common->crypt_caps |= ATH_CRYPT_CAP_CIPHER_AESCCM;
2477
2478	if (ah->hw_version.devid != AR2427_DEVID_PCIE)
2479		pCap->hw_caps |= ATH9K_HW_CAP_HT;
2480	else
2481		pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
2482
2483	if (AR_SREV_9271(ah))
2484		pCap->num_gpio_pins = AR9271_NUM_GPIO;
2485	else if (AR_DEVID_7010(ah))
2486		pCap->num_gpio_pins = AR7010_NUM_GPIO;
2487	else if (AR_SREV_9300_20_OR_LATER(ah))
2488		pCap->num_gpio_pins = AR9300_NUM_GPIO;
2489	else if (AR_SREV_9287_11_OR_LATER(ah))
2490		pCap->num_gpio_pins = AR9287_NUM_GPIO;
2491	else if (AR_SREV_9285_12_OR_LATER(ah))
2492		pCap->num_gpio_pins = AR9285_NUM_GPIO;
2493	else if (AR_SREV_9280_20_OR_LATER(ah))
2494		pCap->num_gpio_pins = AR928X_NUM_GPIO;
2495	else
2496		pCap->num_gpio_pins = AR_NUM_GPIO;
2497
2498	if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah))
2499		pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
2500	else
2501		pCap->rts_aggr_limit = (8 * 1024);
2502
2503#ifdef CONFIG_ATH9K_RFKILL
2504	ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
2505	if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
2506		ah->rfkill_gpio =
2507			MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
2508		ah->rfkill_polarity =
2509			MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
2510
2511		pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
2512	}
2513#endif
2514	if (AR_SREV_9271(ah) || AR_SREV_9300_20_OR_LATER(ah))
2515		pCap->hw_caps |= ATH9K_HW_CAP_AUTOSLEEP;
2516	else
2517		pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
2518
2519	if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
2520		pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
2521	else
2522		pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
2523
2524	if (AR_SREV_9300_20_OR_LATER(ah)) {
2525		pCap->hw_caps |= ATH9K_HW_CAP_EDMA | ATH9K_HW_CAP_FASTCLOCK;
2526		if (!AR_SREV_9330(ah) && !AR_SREV_9485(ah) && !AR_SREV_9565(ah))
2527			pCap->hw_caps |= ATH9K_HW_CAP_LDPC;
2528
2529		pCap->rx_hp_qdepth = ATH9K_HW_RX_HP_QDEPTH;
2530		pCap->rx_lp_qdepth = ATH9K_HW_RX_LP_QDEPTH;
2531		pCap->rx_status_len = sizeof(struct ar9003_rxs);
2532		pCap->tx_desc_len = sizeof(struct ar9003_txc);
2533		pCap->txs_len = sizeof(struct ar9003_txs);
2534	} else {
2535		pCap->tx_desc_len = sizeof(struct ath_desc);
2536		if (AR_SREV_9280_20(ah))
2537			pCap->hw_caps |= ATH9K_HW_CAP_FASTCLOCK;
2538	}
2539
2540	if (AR_SREV_9300_20_OR_LATER(ah))
2541		pCap->hw_caps |= ATH9K_HW_CAP_RAC_SUPPORTED;
2542
2543	if (AR_SREV_9300_20_OR_LATER(ah))
2544		ah->ent_mode = REG_READ(ah, AR_ENT_OTP);
2545
2546	if (AR_SREV_9287_11_OR_LATER(ah) || AR_SREV_9271(ah))
2547		pCap->hw_caps |= ATH9K_HW_CAP_SGI_20;
2548
2549	if (AR_SREV_9285(ah)) {
2550		if (ah->eep_ops->get_eeprom(ah, EEP_MODAL_VER) >= 3) {
2551			ant_div_ctl1 =
2552				ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2553			if ((ant_div_ctl1 & 0x1) && ((ant_div_ctl1 >> 3) & 0x1)) {
2554				pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2555				ath_info(common, "Enable LNA combining\n");
2556			}
2557		}
2558	}
2559
2560	if (AR_SREV_9300_20_OR_LATER(ah)) {
2561		if (ah->eep_ops->get_eeprom(ah, EEP_CHAIN_MASK_REDUCE))
2562			pCap->hw_caps |= ATH9K_HW_CAP_APM;
2563	}
2564
2565	if (AR_SREV_9330(ah) || AR_SREV_9485(ah) || AR_SREV_9565(ah)) {
2566		ant_div_ctl1 = ah->eep_ops->get_eeprom(ah, EEP_ANT_DIV_CTL1);
2567		if ((ant_div_ctl1 >> 0x6) == 0x3) {
2568			pCap->hw_caps |= ATH9K_HW_CAP_ANT_DIV_COMB;
2569			ath_info(common, "Enable LNA combining\n");
2570		}
2571	}
2572
2573	if (ath9k_hw_dfs_tested(ah))
2574		pCap->hw_caps |= ATH9K_HW_CAP_DFS;
2575
2576	tx_chainmask = pCap->tx_chainmask;
2577	rx_chainmask = pCap->rx_chainmask;
2578	while (tx_chainmask || rx_chainmask) {
2579		if (tx_chainmask & BIT(0))
2580			pCap->max_txchains++;
2581		if (rx_chainmask & BIT(0))
2582			pCap->max_rxchains++;
2583
2584		tx_chainmask >>= 1;
2585		rx_chainmask >>= 1;
2586	}
2587
2588	if (AR_SREV_9462(ah) || AR_SREV_9565(ah)) {
2589		if (!(ah->ent_mode & AR_ENT_OTP_49GHZ_DISABLE))
2590			pCap->hw_caps |= ATH9K_HW_CAP_MCI;
2591
2592		if (AR_SREV_9462_20_OR_LATER(ah))
2593			pCap->hw_caps |= ATH9K_HW_CAP_RTT;
2594	}
2595
2596	if (AR_SREV_9462(ah))
2597		pCap->hw_caps |= ATH9K_HW_WOW_DEVICE_CAPABLE;
2598
2599	if (AR_SREV_9300_20_OR_LATER(ah) &&
2600	    ah->eep_ops->get_eeprom(ah, EEP_PAPRD))
2601			pCap->hw_caps |= ATH9K_HW_CAP_PAPRD;
2602
2603	/*
2604	 * Fast channel change across bands is available
2605	 * only for AR9462 and AR9565.
2606	 */
2607	if (AR_SREV_9462(ah) || AR_SREV_9565(ah))
2608		pCap->hw_caps |= ATH9K_HW_CAP_FCC_BAND_SWITCH;
2609
2610	return 0;
2611}
2612
2613/****************************/
2614/* GPIO / RFKILL / Antennae */
2615/****************************/
2616
2617static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
2618					 u32 gpio, u32 type)
2619{
2620	int addr;
2621	u32 gpio_shift, tmp;
2622
2623	if (gpio > 11)
2624		addr = AR_GPIO_OUTPUT_MUX3;
2625	else if (gpio > 5)
2626		addr = AR_GPIO_OUTPUT_MUX2;
2627	else
2628		addr = AR_GPIO_OUTPUT_MUX1;
2629
2630	gpio_shift = (gpio % 6) * 5;
2631
2632	if (AR_SREV_9280_20_OR_LATER(ah)
2633	    || (addr != AR_GPIO_OUTPUT_MUX1)) {
2634		REG_RMW(ah, addr, (type << gpio_shift),
2635			(0x1f << gpio_shift));
2636	} else {
2637		tmp = REG_READ(ah, addr);
2638		tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
2639		tmp &= ~(0x1f << gpio_shift);
2640		tmp |= (type << gpio_shift);
2641		REG_WRITE(ah, addr, tmp);
2642	}
2643}
2644
2645void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
2646{
2647	u32 gpio_shift;
2648
2649	BUG_ON(gpio >= ah->caps.num_gpio_pins);
2650
2651	if (AR_DEVID_7010(ah)) {
2652		gpio_shift = gpio;
2653		REG_RMW(ah, AR7010_GPIO_OE,
2654			(AR7010_GPIO_OE_AS_INPUT << gpio_shift),
2655			(AR7010_GPIO_OE_MASK << gpio_shift));
2656		return;
2657	}
2658
2659	gpio_shift = gpio << 1;
2660	REG_RMW(ah,
2661		AR_GPIO_OE_OUT,
2662		(AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
2663		(AR_GPIO_OE_OUT_DRV << gpio_shift));
2664}
2665EXPORT_SYMBOL(ath9k_hw_cfg_gpio_input);
2666
2667u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
2668{
2669#define MS_REG_READ(x, y) \
2670	(MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
2671
2672	if (gpio >= ah->caps.num_gpio_pins)
2673		return 0xffffffff;
2674
2675	if (AR_DEVID_7010(ah)) {
2676		u32 val;
2677		val = REG_READ(ah, AR7010_GPIO_IN);
2678		return (MS(val, AR7010_GPIO_IN_VAL) & AR_GPIO_BIT(gpio)) == 0;
2679	} else if (AR_SREV_9300_20_OR_LATER(ah))
2680		return (MS(REG_READ(ah, AR_GPIO_IN), AR9300_GPIO_IN_VAL) &
2681			AR_GPIO_BIT(gpio)) != 0;
2682	else if (AR_SREV_9271(ah))
2683		return MS_REG_READ(AR9271, gpio) != 0;
2684	else if (AR_SREV_9287_11_OR_LATER(ah))
2685		return MS_REG_READ(AR9287, gpio) != 0;
2686	else if (AR_SREV_9285_12_OR_LATER(ah))
2687		return MS_REG_READ(AR9285, gpio) != 0;
2688	else if (AR_SREV_9280_20_OR_LATER(ah))
2689		return MS_REG_READ(AR928X, gpio) != 0;
2690	else
2691		return MS_REG_READ(AR, gpio) != 0;
2692}
2693EXPORT_SYMBOL(ath9k_hw_gpio_get);
2694
2695void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
2696			 u32 ah_signal_type)
2697{
2698	u32 gpio_shift;
2699
2700	if (AR_DEVID_7010(ah)) {
2701		gpio_shift = gpio;
2702		REG_RMW(ah, AR7010_GPIO_OE,
2703			(AR7010_GPIO_OE_AS_OUTPUT << gpio_shift),
2704			(AR7010_GPIO_OE_MASK << gpio_shift));
2705		return;
2706	}
2707
2708	ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
2709	gpio_shift = 2 * gpio;
2710	REG_RMW(ah,
2711		AR_GPIO_OE_OUT,
2712		(AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
2713		(AR_GPIO_OE_OUT_DRV << gpio_shift));
2714}
2715EXPORT_SYMBOL(ath9k_hw_cfg_output);
2716
2717void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
2718{
2719	if (AR_DEVID_7010(ah)) {
2720		val = val ? 0 : 1;
2721		REG_RMW(ah, AR7010_GPIO_OUT, ((val&1) << gpio),
2722			AR_GPIO_BIT(gpio));
2723		return;
2724	}
2725
2726	if (AR_SREV_9271(ah))
2727		val = ~val;
2728
2729	REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
2730		AR_GPIO_BIT(gpio));
2731}
2732EXPORT_SYMBOL(ath9k_hw_set_gpio);
2733
2734void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
2735{
2736	REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
2737}
2738EXPORT_SYMBOL(ath9k_hw_setantenna);
2739
2740/*********************/
2741/* General Operation */
2742/*********************/
2743
2744u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
2745{
2746	u32 bits = REG_READ(ah, AR_RX_FILTER);
2747	u32 phybits = REG_READ(ah, AR_PHY_ERR);
2748
2749	if (phybits & AR_PHY_ERR_RADAR)
2750		bits |= ATH9K_RX_FILTER_PHYRADAR;
2751	if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
2752		bits |= ATH9K_RX_FILTER_PHYERR;
2753
2754	return bits;
2755}
2756EXPORT_SYMBOL(ath9k_hw_getrxfilter);
2757
2758void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
2759{
2760	u32 phybits;
2761
2762	ENABLE_REGWRITE_BUFFER(ah);
2763
2764	if (AR_SREV_9462(ah) || AR_SREV_9565(ah))
2765		bits |= ATH9K_RX_FILTER_CONTROL_WRAPPER;
2766
2767	REG_WRITE(ah, AR_RX_FILTER, bits);
2768
2769	phybits = 0;
2770	if (bits & ATH9K_RX_FILTER_PHYRADAR)
2771		phybits |= AR_PHY_ERR_RADAR;
2772	if (bits & ATH9K_RX_FILTER_PHYERR)
2773		phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
2774	REG_WRITE(ah, AR_PHY_ERR, phybits);
2775
2776	if (phybits)
2777		REG_SET_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2778	else
2779		REG_CLR_BIT(ah, AR_RXCFG, AR_RXCFG_ZLFDMA);
2780
2781	REGWRITE_BUFFER_FLUSH(ah);
2782}
2783EXPORT_SYMBOL(ath9k_hw_setrxfilter);
2784
2785bool ath9k_hw_phy_disable(struct ath_hw *ah)
2786{
2787	if (ath9k_hw_mci_is_enabled(ah))
2788		ar9003_mci_bt_gain_ctrl(ah);
2789
2790	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
2791		return false;
2792
2793	ath9k_hw_init_pll(ah, NULL);
2794	ah->htc_reset_init = true;
2795	return true;
2796}
2797EXPORT_SYMBOL(ath9k_hw_phy_disable);
2798
2799bool ath9k_hw_disable(struct ath_hw *ah)
2800{
2801	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2802		return false;
2803
2804	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD))
2805		return false;
2806
2807	ath9k_hw_init_pll(ah, NULL);
2808	return true;
2809}
2810EXPORT_SYMBOL(ath9k_hw_disable);
2811
2812static int get_antenna_gain(struct ath_hw *ah, struct ath9k_channel *chan)
2813{
2814	enum eeprom_param gain_param;
2815
2816	if (IS_CHAN_2GHZ(chan))
2817		gain_param = EEP_ANTENNA_GAIN_2G;
2818	else
2819		gain_param = EEP_ANTENNA_GAIN_5G;
2820
2821	return ah->eep_ops->get_eeprom(ah, gain_param);
2822}
2823
2824void ath9k_hw_apply_txpower(struct ath_hw *ah, struct ath9k_channel *chan,
2825			    bool test)
2826{
2827	struct ath_regulatory *reg = ath9k_hw_regulatory(ah);
2828	struct ieee80211_channel *channel;
2829	int chan_pwr, new_pwr, max_gain;
2830	int ant_gain, ant_reduction = 0;
2831
2832	if (!chan)
2833		return;
2834
2835	channel = chan->chan;
2836	chan_pwr = min_t(int, channel->max_power * 2, MAX_RATE_POWER);
2837	new_pwr = min_t(int, chan_pwr, reg->power_limit);
2838	max_gain = chan_pwr - new_pwr + channel->max_antenna_gain * 2;
2839
2840	ant_gain = get_antenna_gain(ah, chan);
2841	if (ant_gain > max_gain)
2842		ant_reduction = ant_gain - max_gain;
2843
2844	ah->eep_ops->set_txpower(ah, chan,
2845				 ath9k_regd_get_ctl(reg, chan),
2846				 ant_reduction, new_pwr, test);
2847}
2848
2849void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit, bool test)
2850{
2851	struct ath_regulatory *reg = ath9k_hw_regulatory(ah);
2852	struct ath9k_channel *chan = ah->curchan;
2853	struct ieee80211_channel *channel = chan->chan;
2854
2855	reg->power_limit = min_t(u32, limit, MAX_RATE_POWER);
2856	if (test)
2857		channel->max_power = MAX_RATE_POWER / 2;
2858
2859	ath9k_hw_apply_txpower(ah, chan, test);
2860
2861	if (test)
2862		channel->max_power = DIV_ROUND_UP(reg->max_power_level, 2);
2863}
2864EXPORT_SYMBOL(ath9k_hw_set_txpowerlimit);
2865
2866void ath9k_hw_setopmode(struct ath_hw *ah)
2867{
2868	ath9k_hw_set_operating_mode(ah, ah->opmode);
2869}
2870EXPORT_SYMBOL(ath9k_hw_setopmode);
2871
2872void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
2873{
2874	REG_WRITE(ah, AR_MCAST_FIL0, filter0);
2875	REG_WRITE(ah, AR_MCAST_FIL1, filter1);
2876}
2877EXPORT_SYMBOL(ath9k_hw_setmcastfilter);
2878
2879void ath9k_hw_write_associd(struct ath_hw *ah)
2880{
2881	struct ath_common *common = ath9k_hw_common(ah);
2882
2883	REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(common->curbssid));
2884	REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(common->curbssid + 4) |
2885		  ((common->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
2886}
2887EXPORT_SYMBOL(ath9k_hw_write_associd);
2888
2889#define ATH9K_MAX_TSF_READ 10
2890
2891u64 ath9k_hw_gettsf64(struct ath_hw *ah)
2892{
2893	u32 tsf_lower, tsf_upper1, tsf_upper2;
2894	int i;
2895
2896	tsf_upper1 = REG_READ(ah, AR_TSF_U32);
2897	for (i = 0; i < ATH9K_MAX_TSF_READ; i++) {
2898		tsf_lower = REG_READ(ah, AR_TSF_L32);
2899		tsf_upper2 = REG_READ(ah, AR_TSF_U32);
2900		if (tsf_upper2 == tsf_upper1)
2901			break;
2902		tsf_upper1 = tsf_upper2;
2903	}
2904
2905	WARN_ON( i == ATH9K_MAX_TSF_READ );
2906
2907	return (((u64)tsf_upper1 << 32) | tsf_lower);
2908}
2909EXPORT_SYMBOL(ath9k_hw_gettsf64);
2910
2911void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
2912{
2913	REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
2914	REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
2915}
2916EXPORT_SYMBOL(ath9k_hw_settsf64);
2917
2918void ath9k_hw_reset_tsf(struct ath_hw *ah)
2919{
2920	if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
2921			   AH_TSF_WRITE_TIMEOUT))
2922		ath_dbg(ath9k_hw_common(ah), RESET,
2923			"AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
2924
2925	REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
2926}
2927EXPORT_SYMBOL(ath9k_hw_reset_tsf);
2928
2929void ath9k_hw_set_tsfadjust(struct ath_hw *ah, bool set)
2930{
2931	if (set)
2932		ah->misc_mode |= AR_PCU_TX_ADD_TSF;
2933	else
2934		ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
2935}
2936EXPORT_SYMBOL(ath9k_hw_set_tsfadjust);
2937
2938void ath9k_hw_set11nmac2040(struct ath_hw *ah, struct ath9k_channel *chan)
2939{
2940	u32 macmode;
2941
2942	if (IS_CHAN_HT40(chan) && !ah->config.cwm_ignore_extcca)
2943		macmode = AR_2040_JOINED_RX_CLEAR;
2944	else
2945		macmode = 0;
2946
2947	REG_WRITE(ah, AR_2040_MODE, macmode);
2948}
2949
2950/* HW Generic timers configuration */
2951
2952static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
2953{
2954	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2955	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2956	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2957	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2958	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2959	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2960	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2961	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
2962	{AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
2963	{AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
2964				AR_NDP2_TIMER_MODE, 0x0002},
2965	{AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
2966				AR_NDP2_TIMER_MODE, 0x0004},
2967	{AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
2968				AR_NDP2_TIMER_MODE, 0x0008},
2969	{AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
2970				AR_NDP2_TIMER_MODE, 0x0010},
2971	{AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
2972				AR_NDP2_TIMER_MODE, 0x0020},
2973	{AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
2974				AR_NDP2_TIMER_MODE, 0x0040},
2975	{AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
2976				AR_NDP2_TIMER_MODE, 0x0080}
2977};
2978
2979/* HW generic timer primitives */
2980
2981u32 ath9k_hw_gettsf32(struct ath_hw *ah)
2982{
2983	return REG_READ(ah, AR_TSF_L32);
2984}
2985EXPORT_SYMBOL(ath9k_hw_gettsf32);
2986
2987struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
2988					  void (*trigger)(void *),
2989					  void (*overflow)(void *),
2990					  void *arg,
2991					  u8 timer_index)
2992{
2993	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
2994	struct ath_gen_timer *timer;
2995
2996	if ((timer_index < AR_FIRST_NDP_TIMER) ||
2997		(timer_index >= ATH_MAX_GEN_TIMER))
2998		return NULL;
2999
3000	timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
3001	if (timer == NULL)
3002		return NULL;
3003
3004	/* allocate a hardware generic timer slot */
3005	timer_table->timers[timer_index] = timer;
3006	timer->index = timer_index;
3007	timer->trigger = trigger;
3008	timer->overflow = overflow;
3009	timer->arg = arg;
3010
3011	return timer;
3012}
3013EXPORT_SYMBOL(ath_gen_timer_alloc);
3014
3015void ath9k_hw_gen_timer_start(struct ath_hw *ah,
3016			      struct ath_gen_timer *timer,
3017			      u32 timer_next,
3018			      u32 timer_period)
3019{
3020	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3021	u32 mask = 0;
3022
3023	timer_table->timer_mask |= BIT(timer->index);
3024
3025	/*
3026	 * Program generic timer registers
3027	 */
3028	REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
3029		 timer_next);
3030	REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
3031		  timer_period);
3032	REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3033		    gen_tmr_configuration[timer->index].mode_mask);
3034
3035	if (AR_SREV_9462(ah) || AR_SREV_9565(ah)) {
3036		/*
3037		 * Starting from AR9462, each generic timer can select which tsf
3038		 * to use. But we still follow the old rule, 0 - 7 use tsf and
3039		 * 8 - 15  use tsf2.
3040		 */
3041		if ((timer->index < AR_GEN_TIMER_BANK_1_LEN))
3042			REG_CLR_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3043				       (1 << timer->index));
3044		else
3045			REG_SET_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3046				       (1 << timer->index));
3047	}
3048
3049	if (timer->trigger)
3050		mask |= SM(AR_GENTMR_BIT(timer->index),
3051			   AR_IMR_S5_GENTIMER_TRIG);
3052	if (timer->overflow)
3053		mask |= SM(AR_GENTMR_BIT(timer->index),
3054			   AR_IMR_S5_GENTIMER_THRESH);
3055
3056	REG_SET_BIT(ah, AR_IMR_S5, mask);
3057
3058	if ((ah->imask & ATH9K_INT_GENTIMER) == 0) {
3059		ah->imask |= ATH9K_INT_GENTIMER;
3060		ath9k_hw_set_interrupts(ah);
3061	}
3062}
3063EXPORT_SYMBOL(ath9k_hw_gen_timer_start);
3064
3065void ath9k_hw_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
3066{
3067	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3068
3069	/* Clear generic timer enable bits. */
3070	REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
3071			gen_tmr_configuration[timer->index].mode_mask);
3072
3073	if (AR_SREV_9462(ah) || AR_SREV_9565(ah)) {
3074		/*
3075		 * Need to switch back to TSF if it was using TSF2.
3076		 */
3077		if ((timer->index >= AR_GEN_TIMER_BANK_1_LEN)) {
3078			REG_CLR_BIT(ah, AR_MAC_PCU_GEN_TIMER_TSF_SEL,
3079				    (1 << timer->index));
3080		}
3081	}
3082
3083	/* Disable both trigger and thresh interrupt masks */
3084	REG_CLR_BIT(ah, AR_IMR_S5,
3085		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
3086		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
3087
3088	timer_table->timer_mask &= ~BIT(timer->index);
3089
3090	if (timer_table->timer_mask == 0) {
3091		ah->imask &= ~ATH9K_INT_GENTIMER;
3092		ath9k_hw_set_interrupts(ah);
3093	}
3094}
3095EXPORT_SYMBOL(ath9k_hw_gen_timer_stop);
3096
3097void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
3098{
3099	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3100
3101	/* free the hardware generic timer slot */
3102	timer_table->timers[timer->index] = NULL;
3103	kfree(timer);
3104}
3105EXPORT_SYMBOL(ath_gen_timer_free);
3106
3107/*
3108 * Generic Timer Interrupts handling
3109 */
3110void ath_gen_timer_isr(struct ath_hw *ah)
3111{
3112	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
3113	struct ath_gen_timer *timer;
3114	unsigned long trigger_mask, thresh_mask;
3115	unsigned int index;
3116
3117	/* get hardware generic timer interrupt status */
3118	trigger_mask = ah->intr_gen_timer_trigger;
3119	thresh_mask = ah->intr_gen_timer_thresh;
3120	trigger_mask &= timer_table->timer_mask;
3121	thresh_mask &= timer_table->timer_mask;
3122
3123	trigger_mask &= ~thresh_mask;
3124
3125	for_each_set_bit(index, &thresh_mask, ARRAY_SIZE(timer_table->timers)) {
3126		timer = timer_table->timers[index];
3127		if (!timer)
3128		    continue;
3129		if (!timer->overflow)
3130		    continue;
3131		timer->overflow(timer->arg);
3132	}
3133
3134	for_each_set_bit(index, &trigger_mask, ARRAY_SIZE(timer_table->timers)) {
3135		timer = timer_table->timers[index];
3136		if (!timer)
3137		    continue;
3138		if (!timer->trigger)
3139		    continue;
3140		timer->trigger(timer->arg);
3141	}
3142}
3143EXPORT_SYMBOL(ath_gen_timer_isr);
3144
3145/********/
3146/* HTC  */
3147/********/
3148
3149static struct {
3150	u32 version;
3151	const char * name;
3152} ath_mac_bb_names[] = {
3153	/* Devices with external radios */
3154	{ AR_SREV_VERSION_5416_PCI,	"5416" },
3155	{ AR_SREV_VERSION_5416_PCIE,	"5418" },
3156	{ AR_SREV_VERSION_9100,		"9100" },
3157	{ AR_SREV_VERSION_9160,		"9160" },
3158	/* Single-chip solutions */
3159	{ AR_SREV_VERSION_9280,		"9280" },
3160	{ AR_SREV_VERSION_9285,		"9285" },
3161	{ AR_SREV_VERSION_9287,         "9287" },
3162	{ AR_SREV_VERSION_9271,         "9271" },
3163	{ AR_SREV_VERSION_9300,         "9300" },
3164	{ AR_SREV_VERSION_9330,         "9330" },
3165	{ AR_SREV_VERSION_9340,		"9340" },
3166	{ AR_SREV_VERSION_9485,         "9485" },
3167	{ AR_SREV_VERSION_9462,         "9462" },
3168	{ AR_SREV_VERSION_9550,         "9550" },
3169	{ AR_SREV_VERSION_9565,         "9565" },
3170};
3171
3172/* For devices with external radios */
3173static struct {
3174	u16 version;
3175	const char * name;
3176} ath_rf_names[] = {
3177	{ 0,				"5133" },
3178	{ AR_RAD5133_SREV_MAJOR,	"5133" },
3179	{ AR_RAD5122_SREV_MAJOR,	"5122" },
3180	{ AR_RAD2133_SREV_MAJOR,	"2133" },
3181	{ AR_RAD2122_SREV_MAJOR,	"2122" }
3182};
3183
3184/*
3185 * Return the MAC/BB name. "????" is returned if the MAC/BB is unknown.
3186 */
3187static const char *ath9k_hw_mac_bb_name(u32 mac_bb_version)
3188{
3189	int i;
3190
3191	for (i=0; i<ARRAY_SIZE(ath_mac_bb_names); i++) {
3192		if (ath_mac_bb_names[i].version == mac_bb_version) {
3193			return ath_mac_bb_names[i].name;
3194		}
3195	}
3196
3197	return "????";
3198}
3199
3200/*
3201 * Return the RF name. "????" is returned if the RF is unknown.
3202 * Used for devices with external radios.
3203 */
3204static const char *ath9k_hw_rf_name(u16 rf_version)
3205{
3206	int i;
3207
3208	for (i=0; i<ARRAY_SIZE(ath_rf_names); i++) {
3209		if (ath_rf_names[i].version == rf_version) {
3210			return ath_rf_names[i].name;
3211		}
3212	}
3213
3214	return "????";
3215}
3216
3217void ath9k_hw_name(struct ath_hw *ah, char *hw_name, size_t len)
3218{
3219	int used;
3220
3221	/* chipsets >= AR9280 are single-chip */
3222	if (AR_SREV_9280_20_OR_LATER(ah)) {
3223		used = scnprintf(hw_name, len,
3224				 "Atheros AR%s Rev:%x",
3225				 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
3226				 ah->hw_version.macRev);
3227	}
3228	else {
3229		used = scnprintf(hw_name, len,
3230				 "Atheros AR%s MAC/BB Rev:%x AR%s RF Rev:%x",
3231				 ath9k_hw_mac_bb_name(ah->hw_version.macVersion),
3232				 ah->hw_version.macRev,
3233				 ath9k_hw_rf_name((ah->hw_version.analog5GhzRev
3234						  & AR_RADIO_SREV_MAJOR)),
3235				 ah->hw_version.phyRev);
3236	}
3237
3238	hw_name[used] = '\0';
3239}
3240EXPORT_SYMBOL(ath9k_hw_name);
3241