hw.c revision 3453ad8839ca91e1c11211d4d87dc3657c5a2b44
1/*
2 * Copyright (c) 2008-2009 Atheros Communications Inc.
3 *
4 * Permission to use, copy, modify, and/or distribute this software for any
5 * purpose with or without fee is hereby granted, provided that the above
6 * copyright notice and this permission notice appear in all copies.
7 *
8 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
9 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
10 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
11 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
12 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
13 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
14 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
15 */
16
17#include <linux/io.h>
18#include <asm/unaligned.h>
19#include <linux/pci.h>
20
21#include "hw.h"
22#include "ath9k.h"
23#include "initvals.h"
24
25#define ATH9K_CLOCK_RATE_CCK		22
26#define ATH9K_CLOCK_RATE_5GHZ_OFDM	40
27#define ATH9K_CLOCK_RATE_2GHZ_OFDM	44
28
29static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type);
30static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
31			      enum ath9k_ht_macmode macmode);
32static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
33			      struct ar5416_eeprom_def *pEepData,
34			      u32 reg, u32 value);
35static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
36static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan);
37
38/********************/
39/* Helper Functions */
40/********************/
41
42static u32 ath9k_hw_mac_usec(struct ath_hw *ah, u32 clks)
43{
44	struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
45
46	if (!ah->curchan) /* should really check for CCK instead */
47		return clks / ATH9K_CLOCK_RATE_CCK;
48	if (conf->channel->band == IEEE80211_BAND_2GHZ)
49		return clks / ATH9K_CLOCK_RATE_2GHZ_OFDM;
50
51	return clks / ATH9K_CLOCK_RATE_5GHZ_OFDM;
52}
53
54static u32 ath9k_hw_mac_to_usec(struct ath_hw *ah, u32 clks)
55{
56	struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
57
58	if (conf_is_ht40(conf))
59		return ath9k_hw_mac_usec(ah, clks) / 2;
60	else
61		return ath9k_hw_mac_usec(ah, clks);
62}
63
64static u32 ath9k_hw_mac_clks(struct ath_hw *ah, u32 usecs)
65{
66	struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
67
68	if (!ah->curchan) /* should really check for CCK instead */
69		return usecs *ATH9K_CLOCK_RATE_CCK;
70	if (conf->channel->band == IEEE80211_BAND_2GHZ)
71		return usecs *ATH9K_CLOCK_RATE_2GHZ_OFDM;
72	return usecs *ATH9K_CLOCK_RATE_5GHZ_OFDM;
73}
74
75static u32 ath9k_hw_mac_to_clks(struct ath_hw *ah, u32 usecs)
76{
77	struct ieee80211_conf *conf = &ah->ah_sc->hw->conf;
78
79	if (conf_is_ht40(conf))
80		return ath9k_hw_mac_clks(ah, usecs) * 2;
81	else
82		return ath9k_hw_mac_clks(ah, usecs);
83}
84
85/*
86 * Read and write, they both share the same lock. We do this to serialize
87 * reads and writes on Atheros 802.11n PCI devices only. This is required
88 * as the FIFO on these devices can only accept sanely 2 requests. After
89 * that the device goes bananas. Serializing the reads/writes prevents this
90 * from happening.
91 */
92
93void ath9k_iowrite32(struct ath_hw *ah, u32 reg_offset, u32 val)
94{
95	if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
96		unsigned long flags;
97		spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
98		iowrite32(val, ah->ah_sc->mem + reg_offset);
99		spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
100	} else
101		iowrite32(val, ah->ah_sc->mem + reg_offset);
102}
103
104unsigned int ath9k_ioread32(struct ath_hw *ah, u32 reg_offset)
105{
106	u32 val;
107	if (ah->config.serialize_regmode == SER_REG_MODE_ON) {
108		unsigned long flags;
109		spin_lock_irqsave(&ah->ah_sc->sc_serial_rw, flags);
110		val = ioread32(ah->ah_sc->mem + reg_offset);
111		spin_unlock_irqrestore(&ah->ah_sc->sc_serial_rw, flags);
112	} else
113		val = ioread32(ah->ah_sc->mem + reg_offset);
114	return val;
115}
116
117bool ath9k_hw_wait(struct ath_hw *ah, u32 reg, u32 mask, u32 val, u32 timeout)
118{
119	int i;
120
121	BUG_ON(timeout < AH_TIME_QUANTUM);
122
123	for (i = 0; i < (timeout / AH_TIME_QUANTUM); i++) {
124		if ((REG_READ(ah, reg) & mask) == val)
125			return true;
126
127		udelay(AH_TIME_QUANTUM);
128	}
129
130	DPRINTF(ah, ATH_DBG_ANY,
131		"timeout (%d us) on reg 0x%x: 0x%08x & 0x%08x != 0x%08x\n",
132		timeout, reg, REG_READ(ah, reg), mask, val);
133
134	return false;
135}
136
137u32 ath9k_hw_reverse_bits(u32 val, u32 n)
138{
139	u32 retval;
140	int i;
141
142	for (i = 0, retval = 0; i < n; i++) {
143		retval = (retval << 1) | (val & 1);
144		val >>= 1;
145	}
146	return retval;
147}
148
149bool ath9k_get_channel_edges(struct ath_hw *ah,
150			     u16 flags, u16 *low,
151			     u16 *high)
152{
153	struct ath9k_hw_capabilities *pCap = &ah->caps;
154
155	if (flags & CHANNEL_5GHZ) {
156		*low = pCap->low_5ghz_chan;
157		*high = pCap->high_5ghz_chan;
158		return true;
159	}
160	if ((flags & CHANNEL_2GHZ)) {
161		*low = pCap->low_2ghz_chan;
162		*high = pCap->high_2ghz_chan;
163		return true;
164	}
165	return false;
166}
167
168u16 ath9k_hw_computetxtime(struct ath_hw *ah,
169			   const struct ath_rate_table *rates,
170			   u32 frameLen, u16 rateix,
171			   bool shortPreamble)
172{
173	u32 bitsPerSymbol, numBits, numSymbols, phyTime, txTime;
174	u32 kbps;
175
176	kbps = rates->info[rateix].ratekbps;
177
178	if (kbps == 0)
179		return 0;
180
181	switch (rates->info[rateix].phy) {
182	case WLAN_RC_PHY_CCK:
183		phyTime = CCK_PREAMBLE_BITS + CCK_PLCP_BITS;
184		if (shortPreamble && rates->info[rateix].short_preamble)
185			phyTime >>= 1;
186		numBits = frameLen << 3;
187		txTime = CCK_SIFS_TIME + phyTime + ((numBits * 1000) / kbps);
188		break;
189	case WLAN_RC_PHY_OFDM:
190		if (ah->curchan && IS_CHAN_QUARTER_RATE(ah->curchan)) {
191			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_QUARTER) / 1000;
192			numBits = OFDM_PLCP_BITS + (frameLen << 3);
193			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
194			txTime = OFDM_SIFS_TIME_QUARTER
195				+ OFDM_PREAMBLE_TIME_QUARTER
196				+ (numSymbols * OFDM_SYMBOL_TIME_QUARTER);
197		} else if (ah->curchan &&
198			   IS_CHAN_HALF_RATE(ah->curchan)) {
199			bitsPerSymbol =	(kbps * OFDM_SYMBOL_TIME_HALF) / 1000;
200			numBits = OFDM_PLCP_BITS + (frameLen << 3);
201			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
202			txTime = OFDM_SIFS_TIME_HALF +
203				OFDM_PREAMBLE_TIME_HALF
204				+ (numSymbols * OFDM_SYMBOL_TIME_HALF);
205		} else {
206			bitsPerSymbol = (kbps * OFDM_SYMBOL_TIME) / 1000;
207			numBits = OFDM_PLCP_BITS + (frameLen << 3);
208			numSymbols = DIV_ROUND_UP(numBits, bitsPerSymbol);
209			txTime = OFDM_SIFS_TIME + OFDM_PREAMBLE_TIME
210				+ (numSymbols * OFDM_SYMBOL_TIME);
211		}
212		break;
213	default:
214		DPRINTF(ah, ATH_DBG_FATAL,
215			"Unknown phy %u (rate ix %u)\n",
216			rates->info[rateix].phy, rateix);
217		txTime = 0;
218		break;
219	}
220
221	return txTime;
222}
223
224void ath9k_hw_get_channel_centers(struct ath_hw *ah,
225				  struct ath9k_channel *chan,
226				  struct chan_centers *centers)
227{
228	int8_t extoff;
229
230	if (!IS_CHAN_HT40(chan)) {
231		centers->ctl_center = centers->ext_center =
232			centers->synth_center = chan->channel;
233		return;
234	}
235
236	if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
237	    (chan->chanmode == CHANNEL_G_HT40PLUS)) {
238		centers->synth_center =
239			chan->channel + HT40_CHANNEL_CENTER_SHIFT;
240		extoff = 1;
241	} else {
242		centers->synth_center =
243			chan->channel - HT40_CHANNEL_CENTER_SHIFT;
244		extoff = -1;
245	}
246
247	centers->ctl_center =
248		centers->synth_center - (extoff * HT40_CHANNEL_CENTER_SHIFT);
249	centers->ext_center =
250		centers->synth_center + (extoff *
251			 ((ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_20) ?
252			  HT40_CHANNEL_CENTER_SHIFT : 15));
253}
254
255/******************/
256/* Chip Revisions */
257/******************/
258
259static void ath9k_hw_read_revisions(struct ath_hw *ah)
260{
261	u32 val;
262
263	val = REG_READ(ah, AR_SREV) & AR_SREV_ID;
264
265	if (val == 0xFF) {
266		val = REG_READ(ah, AR_SREV);
267		ah->hw_version.macVersion =
268			(val & AR_SREV_VERSION2) >> AR_SREV_TYPE2_S;
269		ah->hw_version.macRev = MS(val, AR_SREV_REVISION2);
270		ah->is_pciexpress = (val & AR_SREV_TYPE2_HOST_MODE) ? 0 : 1;
271	} else {
272		if (!AR_SREV_9100(ah))
273			ah->hw_version.macVersion = MS(val, AR_SREV_VERSION);
274
275		ah->hw_version.macRev = val & AR_SREV_REVISION;
276
277		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCIE)
278			ah->is_pciexpress = true;
279	}
280}
281
282static int ath9k_hw_get_radiorev(struct ath_hw *ah)
283{
284	u32 val;
285	int i;
286
287	REG_WRITE(ah, AR_PHY(0x36), 0x00007058);
288
289	for (i = 0; i < 8; i++)
290		REG_WRITE(ah, AR_PHY(0x20), 0x00010000);
291	val = (REG_READ(ah, AR_PHY(256)) >> 24) & 0xff;
292	val = ((val & 0xf0) >> 4) | ((val & 0x0f) << 4);
293
294	return ath9k_hw_reverse_bits(val, 8);
295}
296
297/************************************/
298/* HW Attach, Detach, Init Routines */
299/************************************/
300
301static void ath9k_hw_disablepcie(struct ath_hw *ah)
302{
303	if (AR_SREV_9100(ah))
304		return;
305
306	REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
307	REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
308	REG_WRITE(ah, AR_PCIE_SERDES, 0x28000029);
309	REG_WRITE(ah, AR_PCIE_SERDES, 0x57160824);
310	REG_WRITE(ah, AR_PCIE_SERDES, 0x25980579);
311	REG_WRITE(ah, AR_PCIE_SERDES, 0x00000000);
312	REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
313	REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
314	REG_WRITE(ah, AR_PCIE_SERDES, 0x000e1007);
315
316	REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
317}
318
319static bool ath9k_hw_chip_test(struct ath_hw *ah)
320{
321	u32 regAddr[2] = { AR_STA_ID0, AR_PHY_BASE + (8 << 2) };
322	u32 regHold[2];
323	u32 patternData[4] = { 0x55555555,
324			       0xaaaaaaaa,
325			       0x66666666,
326			       0x99999999 };
327	int i, j;
328
329	for (i = 0; i < 2; i++) {
330		u32 addr = regAddr[i];
331		u32 wrData, rdData;
332
333		regHold[i] = REG_READ(ah, addr);
334		for (j = 0; j < 0x100; j++) {
335			wrData = (j << 16) | j;
336			REG_WRITE(ah, addr, wrData);
337			rdData = REG_READ(ah, addr);
338			if (rdData != wrData) {
339				DPRINTF(ah, ATH_DBG_FATAL,
340					"address test failed "
341					"addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
342					addr, wrData, rdData);
343				return false;
344			}
345		}
346		for (j = 0; j < 4; j++) {
347			wrData = patternData[j];
348			REG_WRITE(ah, addr, wrData);
349			rdData = REG_READ(ah, addr);
350			if (wrData != rdData) {
351				DPRINTF(ah, ATH_DBG_FATAL,
352					"address test failed "
353					"addr: 0x%08x - wr:0x%08x != rd:0x%08x\n",
354					addr, wrData, rdData);
355				return false;
356			}
357		}
358		REG_WRITE(ah, regAddr[i], regHold[i]);
359	}
360	udelay(100);
361
362	return true;
363}
364
365static const char *ath9k_hw_devname(u16 devid)
366{
367	switch (devid) {
368	case AR5416_DEVID_PCI:
369		return "Atheros 5416";
370	case AR5416_DEVID_PCIE:
371		return "Atheros 5418";
372	case AR9160_DEVID_PCI:
373		return "Atheros 9160";
374	case AR5416_AR9100_DEVID:
375		return "Atheros 9100";
376	case AR9280_DEVID_PCI:
377	case AR9280_DEVID_PCIE:
378		return "Atheros 9280";
379	case AR9285_DEVID_PCIE:
380		return "Atheros 9285";
381	case AR5416_DEVID_AR9287_PCI:
382	case AR5416_DEVID_AR9287_PCIE:
383		return "Atheros 9287";
384	}
385
386	return NULL;
387}
388
389static void ath9k_hw_init_config(struct ath_hw *ah)
390{
391	int i;
392
393	ah->config.dma_beacon_response_time = 2;
394	ah->config.sw_beacon_response_time = 10;
395	ah->config.additional_swba_backoff = 0;
396	ah->config.ack_6mb = 0x0;
397	ah->config.cwm_ignore_extcca = 0;
398	ah->config.pcie_powersave_enable = 0;
399	ah->config.pcie_clock_req = 0;
400	ah->config.pcie_waen = 0;
401	ah->config.analog_shiftreg = 1;
402	ah->config.ht_enable = 1;
403	ah->config.ofdm_trig_low = 200;
404	ah->config.ofdm_trig_high = 500;
405	ah->config.cck_trig_high = 200;
406	ah->config.cck_trig_low = 100;
407	ah->config.enable_ani = 1;
408	ah->config.diversity_control = ATH9K_ANT_VARIABLE;
409	ah->config.antenna_switch_swap = 0;
410
411	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
412		ah->config.spurchans[i][0] = AR_NO_SPUR;
413		ah->config.spurchans[i][1] = AR_NO_SPUR;
414	}
415
416	ah->config.intr_mitigation = true;
417
418	/*
419	 * We need this for PCI devices only (Cardbus, PCI, miniPCI)
420	 * _and_ if on non-uniprocessor systems (Multiprocessor/HT).
421	 * This means we use it for all AR5416 devices, and the few
422	 * minor PCI AR9280 devices out there.
423	 *
424	 * Serialization is required because these devices do not handle
425	 * well the case of two concurrent reads/writes due to the latency
426	 * involved. During one read/write another read/write can be issued
427	 * on another CPU while the previous read/write may still be working
428	 * on our hardware, if we hit this case the hardware poops in a loop.
429	 * We prevent this by serializing reads and writes.
430	 *
431	 * This issue is not present on PCI-Express devices or pre-AR5416
432	 * devices (legacy, 802.11abg).
433	 */
434	if (num_possible_cpus() > 1)
435		ah->config.serialize_regmode = SER_REG_MODE_AUTO;
436}
437
438static void ath9k_hw_init_defaults(struct ath_hw *ah)
439{
440	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
441
442	regulatory->country_code = CTRY_DEFAULT;
443	regulatory->power_limit = MAX_RATE_POWER;
444	regulatory->tp_scale = ATH9K_TP_SCALE_MAX;
445
446	ah->hw_version.magic = AR5416_MAGIC;
447	ah->hw_version.subvendorid = 0;
448
449	ah->ah_flags = 0;
450	if (ah->hw_version.devid == AR5416_AR9100_DEVID)
451		ah->hw_version.macVersion = AR_SREV_VERSION_9100;
452	if (!AR_SREV_9100(ah))
453		ah->ah_flags = AH_USE_EEPROM;
454
455	ah->atim_window = 0;
456	ah->sta_id1_defaults = AR_STA_ID1_CRPT_MIC_ENABLE;
457	ah->beacon_interval = 100;
458	ah->enable_32kHz_clock = DONT_USE_32KHZ;
459	ah->slottime = (u32) -1;
460	ah->acktimeout = (u32) -1;
461	ah->ctstimeout = (u32) -1;
462	ah->globaltxtimeout = (u32) -1;
463
464	ah->gbeacon_rate = 0;
465
466	ah->power_mode = ATH9K_PM_UNDEFINED;
467}
468
469static int ath9k_hw_rfattach(struct ath_hw *ah)
470{
471	bool rfStatus = false;
472	int ecode = 0;
473
474	rfStatus = ath9k_hw_init_rf(ah, &ecode);
475	if (!rfStatus) {
476		DPRINTF(ah, ATH_DBG_FATAL,
477			"RF setup failed, status: %u\n", ecode);
478		return ecode;
479	}
480
481	return 0;
482}
483
484static int ath9k_hw_rf_claim(struct ath_hw *ah)
485{
486	u32 val;
487
488	REG_WRITE(ah, AR_PHY(0), 0x00000007);
489
490	val = ath9k_hw_get_radiorev(ah);
491	switch (val & AR_RADIO_SREV_MAJOR) {
492	case 0:
493		val = AR_RAD5133_SREV_MAJOR;
494		break;
495	case AR_RAD5133_SREV_MAJOR:
496	case AR_RAD5122_SREV_MAJOR:
497	case AR_RAD2133_SREV_MAJOR:
498	case AR_RAD2122_SREV_MAJOR:
499		break;
500	default:
501		DPRINTF(ah, ATH_DBG_FATAL,
502			"Radio Chip Rev 0x%02X not supported\n",
503			val & AR_RADIO_SREV_MAJOR);
504		return -EOPNOTSUPP;
505	}
506
507	ah->hw_version.analog5GhzRev = val;
508
509	return 0;
510}
511
512static int ath9k_hw_init_macaddr(struct ath_hw *ah)
513{
514	u32 sum;
515	int i;
516	u16 eeval;
517
518	sum = 0;
519	for (i = 0; i < 3; i++) {
520		eeval = ah->eep_ops->get_eeprom(ah, AR_EEPROM_MAC(i));
521		sum += eeval;
522		ah->macaddr[2 * i] = eeval >> 8;
523		ah->macaddr[2 * i + 1] = eeval & 0xff;
524	}
525	if (sum == 0 || sum == 0xffff * 3)
526		return -EADDRNOTAVAIL;
527
528	return 0;
529}
530
531static void ath9k_hw_init_rxgain_ini(struct ath_hw *ah)
532{
533	u32 rxgain_type;
534
535	if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_17) {
536		rxgain_type = ah->eep_ops->get_eeprom(ah, EEP_RXGAIN_TYPE);
537
538		if (rxgain_type == AR5416_EEP_RXGAIN_13DB_BACKOFF)
539			INIT_INI_ARRAY(&ah->iniModesRxGain,
540			ar9280Modes_backoff_13db_rxgain_9280_2,
541			ARRAY_SIZE(ar9280Modes_backoff_13db_rxgain_9280_2), 6);
542		else if (rxgain_type == AR5416_EEP_RXGAIN_23DB_BACKOFF)
543			INIT_INI_ARRAY(&ah->iniModesRxGain,
544			ar9280Modes_backoff_23db_rxgain_9280_2,
545			ARRAY_SIZE(ar9280Modes_backoff_23db_rxgain_9280_2), 6);
546		else
547			INIT_INI_ARRAY(&ah->iniModesRxGain,
548			ar9280Modes_original_rxgain_9280_2,
549			ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
550	} else {
551		INIT_INI_ARRAY(&ah->iniModesRxGain,
552			ar9280Modes_original_rxgain_9280_2,
553			ARRAY_SIZE(ar9280Modes_original_rxgain_9280_2), 6);
554	}
555}
556
557static void ath9k_hw_init_txgain_ini(struct ath_hw *ah)
558{
559	u32 txgain_type;
560
561	if (ah->eep_ops->get_eeprom(ah, EEP_MINOR_REV) >= AR5416_EEP_MINOR_VER_19) {
562		txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
563
564		if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER)
565			INIT_INI_ARRAY(&ah->iniModesTxGain,
566			ar9280Modes_high_power_tx_gain_9280_2,
567			ARRAY_SIZE(ar9280Modes_high_power_tx_gain_9280_2), 6);
568		else
569			INIT_INI_ARRAY(&ah->iniModesTxGain,
570			ar9280Modes_original_tx_gain_9280_2,
571			ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
572	} else {
573		INIT_INI_ARRAY(&ah->iniModesTxGain,
574		ar9280Modes_original_tx_gain_9280_2,
575		ARRAY_SIZE(ar9280Modes_original_tx_gain_9280_2), 6);
576	}
577}
578
579static int ath9k_hw_post_init(struct ath_hw *ah)
580{
581	int ecode;
582
583	if (!ath9k_hw_chip_test(ah))
584		return -ENODEV;
585
586	ecode = ath9k_hw_rf_claim(ah);
587	if (ecode != 0)
588		return ecode;
589
590	ecode = ath9k_hw_eeprom_init(ah);
591	if (ecode != 0)
592		return ecode;
593
594	DPRINTF(ah, ATH_DBG_CONFIG, "Eeprom VER: %d, REV: %d\n",
595		ah->eep_ops->get_eeprom_ver(ah), ah->eep_ops->get_eeprom_rev(ah));
596
597	ecode = ath9k_hw_rfattach(ah);
598	if (ecode != 0)
599		return ecode;
600
601	if (!AR_SREV_9100(ah)) {
602		ath9k_hw_ani_setup(ah);
603		ath9k_hw_ani_init(ah);
604	}
605
606	return 0;
607}
608
609static bool ath9k_hw_devid_supported(u16 devid)
610{
611	switch (devid) {
612	case AR5416_DEVID_PCI:
613	case AR5416_DEVID_PCIE:
614	case AR5416_AR9100_DEVID:
615	case AR9160_DEVID_PCI:
616	case AR9280_DEVID_PCI:
617	case AR9280_DEVID_PCIE:
618	case AR9285_DEVID_PCIE:
619	case AR5416_DEVID_AR9287_PCI:
620	case AR5416_DEVID_AR9287_PCIE:
621		return true;
622	default:
623		break;
624	}
625	return false;
626}
627
628static bool ath9k_hw_macversion_supported(u32 macversion)
629{
630	switch (macversion) {
631	case AR_SREV_VERSION_5416_PCI:
632	case AR_SREV_VERSION_5416_PCIE:
633	case AR_SREV_VERSION_9160:
634	case AR_SREV_VERSION_9100:
635	case AR_SREV_VERSION_9280:
636	case AR_SREV_VERSION_9285:
637	case AR_SREV_VERSION_9287:
638		return true;
639	/* Not yet */
640	case AR_SREV_VERSION_9271:
641	default:
642		break;
643	}
644	return false;
645}
646
647static void ath9k_hw_init_cal_settings(struct ath_hw *ah)
648{
649	if (AR_SREV_9160_10_OR_LATER(ah)) {
650		if (AR_SREV_9280_10_OR_LATER(ah)) {
651			ah->iq_caldata.calData = &iq_cal_single_sample;
652			ah->adcgain_caldata.calData =
653				&adc_gain_cal_single_sample;
654			ah->adcdc_caldata.calData =
655				&adc_dc_cal_single_sample;
656			ah->adcdc_calinitdata.calData =
657				&adc_init_dc_cal;
658		} else {
659			ah->iq_caldata.calData = &iq_cal_multi_sample;
660			ah->adcgain_caldata.calData =
661				&adc_gain_cal_multi_sample;
662			ah->adcdc_caldata.calData =
663				&adc_dc_cal_multi_sample;
664			ah->adcdc_calinitdata.calData =
665				&adc_init_dc_cal;
666		}
667		ah->supp_cals = ADC_GAIN_CAL | ADC_DC_CAL | IQ_MISMATCH_CAL;
668	}
669}
670
671static void ath9k_hw_init_mode_regs(struct ath_hw *ah)
672{
673	if (AR_SREV_9271(ah)) {
674		INIT_INI_ARRAY(&ah->iniModes, ar9271Modes_9271_1_0,
675			       ARRAY_SIZE(ar9271Modes_9271_1_0), 6);
676		INIT_INI_ARRAY(&ah->iniCommon, ar9271Common_9271_1_0,
677			       ARRAY_SIZE(ar9271Common_9271_1_0), 2);
678		return;
679	}
680
681	if (AR_SREV_9287_11_OR_LATER(ah)) {
682		INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_1,
683				ARRAY_SIZE(ar9287Modes_9287_1_1), 6);
684		INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_1,
685				ARRAY_SIZE(ar9287Common_9287_1_1), 2);
686		if (ah->config.pcie_clock_req)
687			INIT_INI_ARRAY(&ah->iniPcieSerdes,
688			ar9287PciePhy_clkreq_off_L1_9287_1_1,
689			ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_1), 2);
690		else
691			INIT_INI_ARRAY(&ah->iniPcieSerdes,
692			ar9287PciePhy_clkreq_always_on_L1_9287_1_1,
693			ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_1),
694					2);
695	} else if (AR_SREV_9287_10_OR_LATER(ah)) {
696		INIT_INI_ARRAY(&ah->iniModes, ar9287Modes_9287_1_0,
697				ARRAY_SIZE(ar9287Modes_9287_1_0), 6);
698		INIT_INI_ARRAY(&ah->iniCommon, ar9287Common_9287_1_0,
699				ARRAY_SIZE(ar9287Common_9287_1_0), 2);
700
701		if (ah->config.pcie_clock_req)
702			INIT_INI_ARRAY(&ah->iniPcieSerdes,
703			ar9287PciePhy_clkreq_off_L1_9287_1_0,
704			ARRAY_SIZE(ar9287PciePhy_clkreq_off_L1_9287_1_0), 2);
705		else
706			INIT_INI_ARRAY(&ah->iniPcieSerdes,
707			ar9287PciePhy_clkreq_always_on_L1_9287_1_0,
708			ARRAY_SIZE(ar9287PciePhy_clkreq_always_on_L1_9287_1_0),
709				  2);
710	} else if (AR_SREV_9285_12_OR_LATER(ah)) {
711
712
713		INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285_1_2,
714			       ARRAY_SIZE(ar9285Modes_9285_1_2), 6);
715		INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285_1_2,
716			       ARRAY_SIZE(ar9285Common_9285_1_2), 2);
717
718		if (ah->config.pcie_clock_req) {
719			INIT_INI_ARRAY(&ah->iniPcieSerdes,
720			ar9285PciePhy_clkreq_off_L1_9285_1_2,
721			ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285_1_2), 2);
722		} else {
723			INIT_INI_ARRAY(&ah->iniPcieSerdes,
724			ar9285PciePhy_clkreq_always_on_L1_9285_1_2,
725			ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285_1_2),
726				  2);
727		}
728	} else if (AR_SREV_9285_10_OR_LATER(ah)) {
729		INIT_INI_ARRAY(&ah->iniModes, ar9285Modes_9285,
730			       ARRAY_SIZE(ar9285Modes_9285), 6);
731		INIT_INI_ARRAY(&ah->iniCommon, ar9285Common_9285,
732			       ARRAY_SIZE(ar9285Common_9285), 2);
733
734		if (ah->config.pcie_clock_req) {
735			INIT_INI_ARRAY(&ah->iniPcieSerdes,
736			ar9285PciePhy_clkreq_off_L1_9285,
737			ARRAY_SIZE(ar9285PciePhy_clkreq_off_L1_9285), 2);
738		} else {
739			INIT_INI_ARRAY(&ah->iniPcieSerdes,
740			ar9285PciePhy_clkreq_always_on_L1_9285,
741			ARRAY_SIZE(ar9285PciePhy_clkreq_always_on_L1_9285), 2);
742		}
743	} else if (AR_SREV_9280_20_OR_LATER(ah)) {
744		INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280_2,
745			       ARRAY_SIZE(ar9280Modes_9280_2), 6);
746		INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280_2,
747			       ARRAY_SIZE(ar9280Common_9280_2), 2);
748
749		if (ah->config.pcie_clock_req) {
750			INIT_INI_ARRAY(&ah->iniPcieSerdes,
751			       ar9280PciePhy_clkreq_off_L1_9280,
752			       ARRAY_SIZE(ar9280PciePhy_clkreq_off_L1_9280),2);
753		} else {
754			INIT_INI_ARRAY(&ah->iniPcieSerdes,
755			       ar9280PciePhy_clkreq_always_on_L1_9280,
756			       ARRAY_SIZE(ar9280PciePhy_clkreq_always_on_L1_9280), 2);
757		}
758		INIT_INI_ARRAY(&ah->iniModesAdditional,
759			       ar9280Modes_fast_clock_9280_2,
760			       ARRAY_SIZE(ar9280Modes_fast_clock_9280_2), 3);
761	} else if (AR_SREV_9280_10_OR_LATER(ah)) {
762		INIT_INI_ARRAY(&ah->iniModes, ar9280Modes_9280,
763			       ARRAY_SIZE(ar9280Modes_9280), 6);
764		INIT_INI_ARRAY(&ah->iniCommon, ar9280Common_9280,
765			       ARRAY_SIZE(ar9280Common_9280), 2);
766	} else if (AR_SREV_9160_10_OR_LATER(ah)) {
767		INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9160,
768			       ARRAY_SIZE(ar5416Modes_9160), 6);
769		INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9160,
770			       ARRAY_SIZE(ar5416Common_9160), 2);
771		INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9160,
772			       ARRAY_SIZE(ar5416Bank0_9160), 2);
773		INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9160,
774			       ARRAY_SIZE(ar5416BB_RfGain_9160), 3);
775		INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9160,
776			       ARRAY_SIZE(ar5416Bank1_9160), 2);
777		INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9160,
778			       ARRAY_SIZE(ar5416Bank2_9160), 2);
779		INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9160,
780			       ARRAY_SIZE(ar5416Bank3_9160), 3);
781		INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9160,
782			       ARRAY_SIZE(ar5416Bank6_9160), 3);
783		INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9160,
784			       ARRAY_SIZE(ar5416Bank6TPC_9160), 3);
785		INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9160,
786			       ARRAY_SIZE(ar5416Bank7_9160), 2);
787		if (AR_SREV_9160_11(ah)) {
788			INIT_INI_ARRAY(&ah->iniAddac,
789				       ar5416Addac_91601_1,
790				       ARRAY_SIZE(ar5416Addac_91601_1), 2);
791		} else {
792			INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9160,
793				       ARRAY_SIZE(ar5416Addac_9160), 2);
794		}
795	} else if (AR_SREV_9100_OR_LATER(ah)) {
796		INIT_INI_ARRAY(&ah->iniModes, ar5416Modes_9100,
797			       ARRAY_SIZE(ar5416Modes_9100), 6);
798		INIT_INI_ARRAY(&ah->iniCommon, ar5416Common_9100,
799			       ARRAY_SIZE(ar5416Common_9100), 2);
800		INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0_9100,
801			       ARRAY_SIZE(ar5416Bank0_9100), 2);
802		INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain_9100,
803			       ARRAY_SIZE(ar5416BB_RfGain_9100), 3);
804		INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1_9100,
805			       ARRAY_SIZE(ar5416Bank1_9100), 2);
806		INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2_9100,
807			       ARRAY_SIZE(ar5416Bank2_9100), 2);
808		INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3_9100,
809			       ARRAY_SIZE(ar5416Bank3_9100), 3);
810		INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6_9100,
811			       ARRAY_SIZE(ar5416Bank6_9100), 3);
812		INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC_9100,
813			       ARRAY_SIZE(ar5416Bank6TPC_9100), 3);
814		INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7_9100,
815			       ARRAY_SIZE(ar5416Bank7_9100), 2);
816		INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac_9100,
817			       ARRAY_SIZE(ar5416Addac_9100), 2);
818	} else {
819		INIT_INI_ARRAY(&ah->iniModes, ar5416Modes,
820			       ARRAY_SIZE(ar5416Modes), 6);
821		INIT_INI_ARRAY(&ah->iniCommon, ar5416Common,
822			       ARRAY_SIZE(ar5416Common), 2);
823		INIT_INI_ARRAY(&ah->iniBank0, ar5416Bank0,
824			       ARRAY_SIZE(ar5416Bank0), 2);
825		INIT_INI_ARRAY(&ah->iniBB_RfGain, ar5416BB_RfGain,
826			       ARRAY_SIZE(ar5416BB_RfGain), 3);
827		INIT_INI_ARRAY(&ah->iniBank1, ar5416Bank1,
828			       ARRAY_SIZE(ar5416Bank1), 2);
829		INIT_INI_ARRAY(&ah->iniBank2, ar5416Bank2,
830			       ARRAY_SIZE(ar5416Bank2), 2);
831		INIT_INI_ARRAY(&ah->iniBank3, ar5416Bank3,
832			       ARRAY_SIZE(ar5416Bank3), 3);
833		INIT_INI_ARRAY(&ah->iniBank6, ar5416Bank6,
834			       ARRAY_SIZE(ar5416Bank6), 3);
835		INIT_INI_ARRAY(&ah->iniBank6TPC, ar5416Bank6TPC,
836			       ARRAY_SIZE(ar5416Bank6TPC), 3);
837		INIT_INI_ARRAY(&ah->iniBank7, ar5416Bank7,
838			       ARRAY_SIZE(ar5416Bank7), 2);
839		INIT_INI_ARRAY(&ah->iniAddac, ar5416Addac,
840			       ARRAY_SIZE(ar5416Addac), 2);
841	}
842}
843
844static void ath9k_hw_init_mode_gain_regs(struct ath_hw *ah)
845{
846	if (AR_SREV_9287_11_OR_LATER(ah))
847		INIT_INI_ARRAY(&ah->iniModesRxGain,
848		ar9287Modes_rx_gain_9287_1_1,
849		ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_1), 6);
850	else if (AR_SREV_9287_10(ah))
851		INIT_INI_ARRAY(&ah->iniModesRxGain,
852		ar9287Modes_rx_gain_9287_1_0,
853		ARRAY_SIZE(ar9287Modes_rx_gain_9287_1_0), 6);
854	else if (AR_SREV_9280_20(ah))
855		ath9k_hw_init_rxgain_ini(ah);
856
857	if (AR_SREV_9287_11_OR_LATER(ah)) {
858		INIT_INI_ARRAY(&ah->iniModesTxGain,
859		ar9287Modes_tx_gain_9287_1_1,
860		ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_1), 6);
861	} else if (AR_SREV_9287_10(ah)) {
862		INIT_INI_ARRAY(&ah->iniModesTxGain,
863		ar9287Modes_tx_gain_9287_1_0,
864		ARRAY_SIZE(ar9287Modes_tx_gain_9287_1_0), 6);
865	} else if (AR_SREV_9280_20(ah)) {
866		ath9k_hw_init_txgain_ini(ah);
867	} else if (AR_SREV_9285_12_OR_LATER(ah)) {
868		u32 txgain_type = ah->eep_ops->get_eeprom(ah, EEP_TXGAIN_TYPE);
869
870		/* txgain table */
871		if (txgain_type == AR5416_EEP_TXGAIN_HIGH_POWER) {
872			INIT_INI_ARRAY(&ah->iniModesTxGain,
873			ar9285Modes_high_power_tx_gain_9285_1_2,
874			ARRAY_SIZE(ar9285Modes_high_power_tx_gain_9285_1_2), 6);
875		} else {
876			INIT_INI_ARRAY(&ah->iniModesTxGain,
877			ar9285Modes_original_tx_gain_9285_1_2,
878			ARRAY_SIZE(ar9285Modes_original_tx_gain_9285_1_2), 6);
879		}
880
881	}
882}
883
884static void ath9k_hw_init_11a_eeprom_fix(struct ath_hw *ah)
885{
886	u32 i, j;
887
888	if ((ah->hw_version.devid == AR9280_DEVID_PCI) &&
889	    test_bit(ATH9K_MODE_11A, ah->caps.wireless_modes)) {
890
891		/* EEPROM Fixup */
892		for (i = 0; i < ah->iniModes.ia_rows; i++) {
893			u32 reg = INI_RA(&ah->iniModes, i, 0);
894
895			for (j = 1; j < ah->iniModes.ia_columns; j++) {
896				u32 val = INI_RA(&ah->iniModes, i, j);
897
898				INI_RA(&ah->iniModes, i, j) =
899					ath9k_hw_ini_fixup(ah,
900							   &ah->eeprom.def,
901							   reg, val);
902			}
903		}
904	}
905}
906
907int ath9k_hw_init(struct ath_hw *ah)
908{
909	int r = 0;
910
911	if (!ath9k_hw_devid_supported(ah->hw_version.devid))
912		return -EOPNOTSUPP;
913
914	ath9k_hw_init_defaults(ah);
915	ath9k_hw_init_config(ah);
916
917	if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON)) {
918		DPRINTF(ah, ATH_DBG_FATAL, "Couldn't reset chip\n");
919		return -EIO;
920	}
921
922	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE)) {
923		DPRINTF(ah, ATH_DBG_FATAL, "Couldn't wakeup chip\n");
924		return -EIO;
925	}
926
927	if (ah->config.serialize_regmode == SER_REG_MODE_AUTO) {
928		if (ah->hw_version.macVersion == AR_SREV_VERSION_5416_PCI ||
929		    (AR_SREV_9280(ah) && !ah->is_pciexpress)) {
930			ah->config.serialize_regmode =
931				SER_REG_MODE_ON;
932		} else {
933			ah->config.serialize_regmode =
934				SER_REG_MODE_OFF;
935		}
936	}
937
938	DPRINTF(ah, ATH_DBG_RESET, "serialize_regmode is %d\n",
939		ah->config.serialize_regmode);
940
941	if (!ath9k_hw_macversion_supported(ah->hw_version.macVersion)) {
942		DPRINTF(ah, ATH_DBG_FATAL,
943			"Mac Chip Rev 0x%02x.%x is not supported by "
944			"this driver\n", ah->hw_version.macVersion,
945			ah->hw_version.macRev);
946		return -EOPNOTSUPP;
947	}
948
949	if (AR_SREV_9100(ah)) {
950		ah->iq_caldata.calData = &iq_cal_multi_sample;
951		ah->supp_cals = IQ_MISMATCH_CAL;
952		ah->is_pciexpress = false;
953	}
954
955	if (AR_SREV_9271(ah))
956		ah->is_pciexpress = false;
957
958	ah->hw_version.phyRev = REG_READ(ah, AR_PHY_CHIP_ID);
959
960	ath9k_hw_init_cal_settings(ah);
961
962	ah->ani_function = ATH9K_ANI_ALL;
963	if (AR_SREV_9280_10_OR_LATER(ah))
964		ah->ani_function &= ~ATH9K_ANI_NOISE_IMMUNITY_LEVEL;
965
966	ath9k_hw_init_mode_regs(ah);
967
968	if (ah->is_pciexpress)
969		ath9k_hw_configpcipowersave(ah, 0, 0);
970	else
971		ath9k_hw_disablepcie(ah);
972
973	r = ath9k_hw_post_init(ah);
974	if (r)
975		return r;
976
977	ath9k_hw_init_mode_gain_regs(ah);
978	ath9k_hw_fill_cap_info(ah);
979	ath9k_hw_init_11a_eeprom_fix(ah);
980
981	r = ath9k_hw_init_macaddr(ah);
982	if (r) {
983		DPRINTF(ah, ATH_DBG_FATAL,
984			"Failed to initialize MAC address\n");
985		return r;
986	}
987
988	if (AR_SREV_9285(ah) || AR_SREV_9271(ah))
989		ah->tx_trig_level = (AR_FTRIG_256B >> AR_FTRIG_S);
990	else
991		ah->tx_trig_level = (AR_FTRIG_512B >> AR_FTRIG_S);
992
993	ath9k_init_nfcal_hist_buffer(ah);
994
995	return 0;
996}
997
998static void ath9k_hw_init_bb(struct ath_hw *ah,
999			     struct ath9k_channel *chan)
1000{
1001	u32 synthDelay;
1002
1003	synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1004	if (IS_CHAN_B(chan))
1005		synthDelay = (4 * synthDelay) / 22;
1006	else
1007		synthDelay /= 10;
1008
1009	REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_EN);
1010
1011	udelay(synthDelay + BASE_ACTIVATE_DELAY);
1012}
1013
1014static void ath9k_hw_init_qos(struct ath_hw *ah)
1015{
1016	REG_WRITE(ah, AR_MIC_QOS_CONTROL, 0x100aa);
1017	REG_WRITE(ah, AR_MIC_QOS_SELECT, 0x3210);
1018
1019	REG_WRITE(ah, AR_QOS_NO_ACK,
1020		  SM(2, AR_QOS_NO_ACK_TWO_BIT) |
1021		  SM(5, AR_QOS_NO_ACK_BIT_OFF) |
1022		  SM(0, AR_QOS_NO_ACK_BYTE_OFF));
1023
1024	REG_WRITE(ah, AR_TXOP_X, AR_TXOP_X_VAL);
1025	REG_WRITE(ah, AR_TXOP_0_3, 0xFFFFFFFF);
1026	REG_WRITE(ah, AR_TXOP_4_7, 0xFFFFFFFF);
1027	REG_WRITE(ah, AR_TXOP_8_11, 0xFFFFFFFF);
1028	REG_WRITE(ah, AR_TXOP_12_15, 0xFFFFFFFF);
1029}
1030
1031static void ath9k_hw_init_pll(struct ath_hw *ah,
1032			      struct ath9k_channel *chan)
1033{
1034	u32 pll;
1035
1036	if (AR_SREV_9100(ah)) {
1037		if (chan && IS_CHAN_5GHZ(chan))
1038			pll = 0x1450;
1039		else
1040			pll = 0x1458;
1041	} else {
1042		if (AR_SREV_9280_10_OR_LATER(ah)) {
1043			pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1044
1045			if (chan && IS_CHAN_HALF_RATE(chan))
1046				pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1047			else if (chan && IS_CHAN_QUARTER_RATE(chan))
1048				pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1049
1050			if (chan && IS_CHAN_5GHZ(chan)) {
1051				pll |= SM(0x28, AR_RTC_9160_PLL_DIV);
1052
1053
1054				if (AR_SREV_9280_20(ah)) {
1055					if (((chan->channel % 20) == 0)
1056					    || ((chan->channel % 10) == 0))
1057						pll = 0x2850;
1058					else
1059						pll = 0x142c;
1060				}
1061			} else {
1062				pll |= SM(0x2c, AR_RTC_9160_PLL_DIV);
1063			}
1064
1065		} else if (AR_SREV_9160_10_OR_LATER(ah)) {
1066
1067			pll = SM(0x5, AR_RTC_9160_PLL_REFDIV);
1068
1069			if (chan && IS_CHAN_HALF_RATE(chan))
1070				pll |= SM(0x1, AR_RTC_9160_PLL_CLKSEL);
1071			else if (chan && IS_CHAN_QUARTER_RATE(chan))
1072				pll |= SM(0x2, AR_RTC_9160_PLL_CLKSEL);
1073
1074			if (chan && IS_CHAN_5GHZ(chan))
1075				pll |= SM(0x50, AR_RTC_9160_PLL_DIV);
1076			else
1077				pll |= SM(0x58, AR_RTC_9160_PLL_DIV);
1078		} else {
1079			pll = AR_RTC_PLL_REFDIV_5 | AR_RTC_PLL_DIV2;
1080
1081			if (chan && IS_CHAN_HALF_RATE(chan))
1082				pll |= SM(0x1, AR_RTC_PLL_CLKSEL);
1083			else if (chan && IS_CHAN_QUARTER_RATE(chan))
1084				pll |= SM(0x2, AR_RTC_PLL_CLKSEL);
1085
1086			if (chan && IS_CHAN_5GHZ(chan))
1087				pll |= SM(0xa, AR_RTC_PLL_DIV);
1088			else
1089				pll |= SM(0xb, AR_RTC_PLL_DIV);
1090		}
1091	}
1092	REG_WRITE(ah, AR_RTC_PLL_CONTROL, pll);
1093
1094	udelay(RTC_PLL_SETTLE_DELAY);
1095
1096	REG_WRITE(ah, AR_RTC_SLEEP_CLK, AR_RTC_FORCE_DERIVED_CLK);
1097}
1098
1099static void ath9k_hw_init_chain_masks(struct ath_hw *ah)
1100{
1101	int rx_chainmask, tx_chainmask;
1102
1103	rx_chainmask = ah->rxchainmask;
1104	tx_chainmask = ah->txchainmask;
1105
1106	switch (rx_chainmask) {
1107	case 0x5:
1108		REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1109			    AR_PHY_SWAP_ALT_CHAIN);
1110	case 0x3:
1111		if (((ah)->hw_version.macVersion <= AR_SREV_VERSION_9160)) {
1112			REG_WRITE(ah, AR_PHY_RX_CHAINMASK, 0x7);
1113			REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, 0x7);
1114			break;
1115		}
1116	case 0x1:
1117	case 0x2:
1118	case 0x7:
1119		REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
1120		REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
1121		break;
1122	default:
1123		break;
1124	}
1125
1126	REG_WRITE(ah, AR_SELFGEN_MASK, tx_chainmask);
1127	if (tx_chainmask == 0x5) {
1128		REG_SET_BIT(ah, AR_PHY_ANALOG_SWAP,
1129			    AR_PHY_SWAP_ALT_CHAIN);
1130	}
1131	if (AR_SREV_9100(ah))
1132		REG_WRITE(ah, AR_PHY_ANALOG_SWAP,
1133			  REG_READ(ah, AR_PHY_ANALOG_SWAP) | 0x00000001);
1134}
1135
1136static void ath9k_hw_init_interrupt_masks(struct ath_hw *ah,
1137					  enum nl80211_iftype opmode)
1138{
1139	ah->mask_reg = AR_IMR_TXERR |
1140		AR_IMR_TXURN |
1141		AR_IMR_RXERR |
1142		AR_IMR_RXORN |
1143		AR_IMR_BCNMISC;
1144
1145	if (ah->config.intr_mitigation)
1146		ah->mask_reg |= AR_IMR_RXINTM | AR_IMR_RXMINTR;
1147	else
1148		ah->mask_reg |= AR_IMR_RXOK;
1149
1150	ah->mask_reg |= AR_IMR_TXOK;
1151
1152	if (opmode == NL80211_IFTYPE_AP)
1153		ah->mask_reg |= AR_IMR_MIB;
1154
1155	REG_WRITE(ah, AR_IMR, ah->mask_reg);
1156	REG_WRITE(ah, AR_IMR_S2, REG_READ(ah, AR_IMR_S2) | AR_IMR_S2_GTT);
1157
1158	if (!AR_SREV_9100(ah)) {
1159		REG_WRITE(ah, AR_INTR_SYNC_CAUSE, 0xFFFFFFFF);
1160		REG_WRITE(ah, AR_INTR_SYNC_ENABLE, AR_INTR_SYNC_DEFAULT);
1161		REG_WRITE(ah, AR_INTR_SYNC_MASK, 0);
1162	}
1163}
1164
1165static bool ath9k_hw_set_ack_timeout(struct ath_hw *ah, u32 us)
1166{
1167	if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_ACK))) {
1168		DPRINTF(ah, ATH_DBG_RESET, "bad ack timeout %u\n", us);
1169		ah->acktimeout = (u32) -1;
1170		return false;
1171	} else {
1172		REG_RMW_FIELD(ah, AR_TIME_OUT,
1173			      AR_TIME_OUT_ACK, ath9k_hw_mac_to_clks(ah, us));
1174		ah->acktimeout = us;
1175		return true;
1176	}
1177}
1178
1179static bool ath9k_hw_set_cts_timeout(struct ath_hw *ah, u32 us)
1180{
1181	if (us > ath9k_hw_mac_to_usec(ah, MS(0xffffffff, AR_TIME_OUT_CTS))) {
1182		DPRINTF(ah, ATH_DBG_RESET, "bad cts timeout %u\n", us);
1183		ah->ctstimeout = (u32) -1;
1184		return false;
1185	} else {
1186		REG_RMW_FIELD(ah, AR_TIME_OUT,
1187			      AR_TIME_OUT_CTS, ath9k_hw_mac_to_clks(ah, us));
1188		ah->ctstimeout = us;
1189		return true;
1190	}
1191}
1192
1193static bool ath9k_hw_set_global_txtimeout(struct ath_hw *ah, u32 tu)
1194{
1195	if (tu > 0xFFFF) {
1196		DPRINTF(ah, ATH_DBG_XMIT,
1197			"bad global tx timeout %u\n", tu);
1198		ah->globaltxtimeout = (u32) -1;
1199		return false;
1200	} else {
1201		REG_RMW_FIELD(ah, AR_GTXTO, AR_GTXTO_TIMEOUT_LIMIT, tu);
1202		ah->globaltxtimeout = tu;
1203		return true;
1204	}
1205}
1206
1207static void ath9k_hw_init_user_settings(struct ath_hw *ah)
1208{
1209	DPRINTF(ah, ATH_DBG_RESET, "ah->misc_mode 0x%x\n",
1210		ah->misc_mode);
1211
1212	if (ah->misc_mode != 0)
1213		REG_WRITE(ah, AR_PCU_MISC,
1214			  REG_READ(ah, AR_PCU_MISC) | ah->misc_mode);
1215	if (ah->slottime != (u32) -1)
1216		ath9k_hw_setslottime(ah, ah->slottime);
1217	if (ah->acktimeout != (u32) -1)
1218		ath9k_hw_set_ack_timeout(ah, ah->acktimeout);
1219	if (ah->ctstimeout != (u32) -1)
1220		ath9k_hw_set_cts_timeout(ah, ah->ctstimeout);
1221	if (ah->globaltxtimeout != (u32) -1)
1222		ath9k_hw_set_global_txtimeout(ah, ah->globaltxtimeout);
1223}
1224
1225const char *ath9k_hw_probe(u16 vendorid, u16 devid)
1226{
1227	return vendorid == ATHEROS_VENDOR_ID ?
1228		ath9k_hw_devname(devid) : NULL;
1229}
1230
1231void ath9k_hw_detach(struct ath_hw *ah)
1232{
1233	if (!AR_SREV_9100(ah))
1234		ath9k_hw_ani_disable(ah);
1235
1236	ath9k_hw_rf_free(ah);
1237	ath9k_hw_setpower(ah, ATH9K_PM_FULL_SLEEP);
1238	kfree(ah);
1239	ah = NULL;
1240}
1241
1242/*******/
1243/* INI */
1244/*******/
1245
1246static void ath9k_hw_override_ini(struct ath_hw *ah,
1247				  struct ath9k_channel *chan)
1248{
1249	u32 val;
1250
1251	if (AR_SREV_9271(ah)) {
1252		/*
1253		 * Enable spectral scan to solution for issues with stuck
1254		 * beacons on AR9271 1.0. The beacon stuck issue is not seeon on
1255		 * AR9271 1.1
1256		 */
1257		if (AR_SREV_9271_10(ah)) {
1258			val = REG_READ(ah, AR_PHY_SPECTRAL_SCAN) | AR_PHY_SPECTRAL_SCAN_ENABLE;
1259			REG_WRITE(ah, AR_PHY_SPECTRAL_SCAN, val);
1260		}
1261		else if (AR_SREV_9271_11(ah))
1262			/*
1263			 * change AR_PHY_RF_CTL3 setting to fix MAC issue
1264			 * present on AR9271 1.1
1265			 */
1266			REG_WRITE(ah, AR_PHY_RF_CTL3, 0x3a020001);
1267		return;
1268	}
1269
1270	/*
1271	 * Set the RX_ABORT and RX_DIS and clear if off only after
1272	 * RXE is set for MAC. This prevents frames with corrupted
1273	 * descriptor status.
1274	 */
1275	REG_SET_BIT(ah, AR_DIAG_SW, (AR_DIAG_RX_DIS | AR_DIAG_RX_ABORT));
1276
1277	if (AR_SREV_9280_10_OR_LATER(ah)) {
1278		val = REG_READ(ah, AR_PCU_MISC_MODE2) &
1279			       (~AR_PCU_MISC_MODE2_HWWAR1);
1280
1281		if (AR_SREV_9287_10_OR_LATER(ah))
1282			val = val & (~AR_PCU_MISC_MODE2_HWWAR2);
1283
1284		REG_WRITE(ah, AR_PCU_MISC_MODE2, val);
1285	}
1286
1287	if (!AR_SREV_5416_20_OR_LATER(ah) ||
1288	    AR_SREV_9280_10_OR_LATER(ah))
1289		return;
1290	/*
1291	 * Disable BB clock gating
1292	 * Necessary to avoid issues on AR5416 2.0
1293	 */
1294	REG_WRITE(ah, 0x9800 + (651 << 2), 0x11);
1295}
1296
1297static u32 ath9k_hw_def_ini_fixup(struct ath_hw *ah,
1298			      struct ar5416_eeprom_def *pEepData,
1299			      u32 reg, u32 value)
1300{
1301	struct base_eep_header *pBase = &(pEepData->baseEepHeader);
1302
1303	switch (ah->hw_version.devid) {
1304	case AR9280_DEVID_PCI:
1305		if (reg == 0x7894) {
1306			DPRINTF(ah, ATH_DBG_EEPROM,
1307				"ini VAL: %x  EEPROM: %x\n", value,
1308				(pBase->version & 0xff));
1309
1310			if ((pBase->version & 0xff) > 0x0a) {
1311				DPRINTF(ah, ATH_DBG_EEPROM,
1312					"PWDCLKIND: %d\n",
1313					pBase->pwdclkind);
1314				value &= ~AR_AN_TOP2_PWDCLKIND;
1315				value |= AR_AN_TOP2_PWDCLKIND &
1316					(pBase->pwdclkind << AR_AN_TOP2_PWDCLKIND_S);
1317			} else {
1318				DPRINTF(ah, ATH_DBG_EEPROM,
1319					"PWDCLKIND Earlier Rev\n");
1320			}
1321
1322			DPRINTF(ah, ATH_DBG_EEPROM,
1323				"final ini VAL: %x\n", value);
1324		}
1325		break;
1326	}
1327
1328	return value;
1329}
1330
1331static u32 ath9k_hw_ini_fixup(struct ath_hw *ah,
1332			      struct ar5416_eeprom_def *pEepData,
1333			      u32 reg, u32 value)
1334{
1335	if (ah->eep_map == EEP_MAP_4KBITS)
1336		return value;
1337	else
1338		return ath9k_hw_def_ini_fixup(ah, pEepData, reg, value);
1339}
1340
1341static void ath9k_olc_init(struct ath_hw *ah)
1342{
1343	u32 i;
1344
1345	if (OLC_FOR_AR9287_10_LATER) {
1346		REG_SET_BIT(ah, AR_PHY_TX_PWRCTRL9,
1347				AR_PHY_TX_PWRCTRL9_RES_DC_REMOVAL);
1348		ath9k_hw_analog_shift_rmw(ah, AR9287_AN_TXPC0,
1349				AR9287_AN_TXPC0_TXPCMODE,
1350				AR9287_AN_TXPC0_TXPCMODE_S,
1351				AR9287_AN_TXPC0_TXPCMODE_TEMPSENSE);
1352		udelay(100);
1353	} else {
1354		for (i = 0; i < AR9280_TX_GAIN_TABLE_SIZE; i++)
1355			ah->originalGain[i] =
1356				MS(REG_READ(ah, AR_PHY_TX_GAIN_TBL1 + i * 4),
1357						AR_PHY_TX_GAIN);
1358		ah->PDADCdelta = 0;
1359	}
1360}
1361
1362static u32 ath9k_regd_get_ctl(struct ath_regulatory *reg,
1363			      struct ath9k_channel *chan)
1364{
1365	u32 ctl = ath_regd_get_band_ctl(reg, chan->chan->band);
1366
1367	if (IS_CHAN_B(chan))
1368		ctl |= CTL_11B;
1369	else if (IS_CHAN_G(chan))
1370		ctl |= CTL_11G;
1371	else
1372		ctl |= CTL_11A;
1373
1374	return ctl;
1375}
1376
1377static int ath9k_hw_process_ini(struct ath_hw *ah,
1378				struct ath9k_channel *chan,
1379				enum ath9k_ht_macmode macmode)
1380{
1381	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1382	int i, regWrites = 0;
1383	struct ieee80211_channel *channel = chan->chan;
1384	u32 modesIndex, freqIndex;
1385
1386	switch (chan->chanmode) {
1387	case CHANNEL_A:
1388	case CHANNEL_A_HT20:
1389		modesIndex = 1;
1390		freqIndex = 1;
1391		break;
1392	case CHANNEL_A_HT40PLUS:
1393	case CHANNEL_A_HT40MINUS:
1394		modesIndex = 2;
1395		freqIndex = 1;
1396		break;
1397	case CHANNEL_G:
1398	case CHANNEL_G_HT20:
1399	case CHANNEL_B:
1400		modesIndex = 4;
1401		freqIndex = 2;
1402		break;
1403	case CHANNEL_G_HT40PLUS:
1404	case CHANNEL_G_HT40MINUS:
1405		modesIndex = 3;
1406		freqIndex = 2;
1407		break;
1408
1409	default:
1410		return -EINVAL;
1411	}
1412
1413	REG_WRITE(ah, AR_PHY(0), 0x00000007);
1414	REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_EXTERNAL_RADIO);
1415	ah->eep_ops->set_addac(ah, chan);
1416
1417	if (AR_SREV_5416_22_OR_LATER(ah)) {
1418		REG_WRITE_ARRAY(&ah->iniAddac, 1, regWrites);
1419	} else {
1420		struct ar5416IniArray temp;
1421		u32 addacSize =
1422			sizeof(u32) * ah->iniAddac.ia_rows *
1423			ah->iniAddac.ia_columns;
1424
1425		memcpy(ah->addac5416_21,
1426		       ah->iniAddac.ia_array, addacSize);
1427
1428		(ah->addac5416_21)[31 * ah->iniAddac.ia_columns + 1] = 0;
1429
1430		temp.ia_array = ah->addac5416_21;
1431		temp.ia_columns = ah->iniAddac.ia_columns;
1432		temp.ia_rows = ah->iniAddac.ia_rows;
1433		REG_WRITE_ARRAY(&temp, 1, regWrites);
1434	}
1435
1436	REG_WRITE(ah, AR_PHY_ADC_SERIAL_CTL, AR_PHY_SEL_INTERNAL_ADDAC);
1437
1438	for (i = 0; i < ah->iniModes.ia_rows; i++) {
1439		u32 reg = INI_RA(&ah->iniModes, i, 0);
1440		u32 val = INI_RA(&ah->iniModes, i, modesIndex);
1441
1442		REG_WRITE(ah, reg, val);
1443
1444		if (reg >= 0x7800 && reg < 0x78a0
1445		    && ah->config.analog_shiftreg) {
1446			udelay(100);
1447		}
1448
1449		DO_DELAY(regWrites);
1450	}
1451
1452	if (AR_SREV_9280(ah) || AR_SREV_9287_10_OR_LATER(ah))
1453		REG_WRITE_ARRAY(&ah->iniModesRxGain, modesIndex, regWrites);
1454
1455	if (AR_SREV_9280(ah) || AR_SREV_9285_12_OR_LATER(ah) ||
1456	    AR_SREV_9287_10_OR_LATER(ah))
1457		REG_WRITE_ARRAY(&ah->iniModesTxGain, modesIndex, regWrites);
1458
1459	for (i = 0; i < ah->iniCommon.ia_rows; i++) {
1460		u32 reg = INI_RA(&ah->iniCommon, i, 0);
1461		u32 val = INI_RA(&ah->iniCommon, i, 1);
1462
1463		REG_WRITE(ah, reg, val);
1464
1465		if (reg >= 0x7800 && reg < 0x78a0
1466		    && ah->config.analog_shiftreg) {
1467			udelay(100);
1468		}
1469
1470		DO_DELAY(regWrites);
1471	}
1472
1473	ath9k_hw_write_regs(ah, modesIndex, freqIndex, regWrites);
1474
1475	if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan)) {
1476		REG_WRITE_ARRAY(&ah->iniModesAdditional, modesIndex,
1477				regWrites);
1478	}
1479
1480	ath9k_hw_override_ini(ah, chan);
1481	ath9k_hw_set_regs(ah, chan, macmode);
1482	ath9k_hw_init_chain_masks(ah);
1483
1484	if (OLC_FOR_AR9280_20_LATER)
1485		ath9k_olc_init(ah);
1486
1487	ah->eep_ops->set_txpower(ah, chan,
1488				 ath9k_regd_get_ctl(regulatory, chan),
1489				 channel->max_antenna_gain * 2,
1490				 channel->max_power * 2,
1491				 min((u32) MAX_RATE_POWER,
1492				 (u32) regulatory->power_limit));
1493
1494	if (!ath9k_hw_set_rf_regs(ah, chan, freqIndex)) {
1495		DPRINTF(ah, ATH_DBG_FATAL,
1496			"ar5416SetRfRegs failed\n");
1497		return -EIO;
1498	}
1499
1500	return 0;
1501}
1502
1503/****************************************/
1504/* Reset and Channel Switching Routines */
1505/****************************************/
1506
1507static void ath9k_hw_set_rfmode(struct ath_hw *ah, struct ath9k_channel *chan)
1508{
1509	u32 rfMode = 0;
1510
1511	if (chan == NULL)
1512		return;
1513
1514	rfMode |= (IS_CHAN_B(chan) || IS_CHAN_G(chan))
1515		? AR_PHY_MODE_DYNAMIC : AR_PHY_MODE_OFDM;
1516
1517	if (!AR_SREV_9280_10_OR_LATER(ah))
1518		rfMode |= (IS_CHAN_5GHZ(chan)) ?
1519			AR_PHY_MODE_RF5GHZ : AR_PHY_MODE_RF2GHZ;
1520
1521	if (AR_SREV_9280_20(ah) && IS_CHAN_A_5MHZ_SPACED(chan))
1522		rfMode |= (AR_PHY_MODE_DYNAMIC | AR_PHY_MODE_DYN_CCK_DISABLE);
1523
1524	REG_WRITE(ah, AR_PHY_MODE, rfMode);
1525}
1526
1527static void ath9k_hw_mark_phy_inactive(struct ath_hw *ah)
1528{
1529	REG_WRITE(ah, AR_PHY_ACTIVE, AR_PHY_ACTIVE_DIS);
1530}
1531
1532static inline void ath9k_hw_set_dma(struct ath_hw *ah)
1533{
1534	u32 regval;
1535
1536	/*
1537	 * set AHB_MODE not to do cacheline prefetches
1538	*/
1539	regval = REG_READ(ah, AR_AHB_MODE);
1540	REG_WRITE(ah, AR_AHB_MODE, regval | AR_AHB_PREFETCH_RD_EN);
1541
1542	/*
1543	 * let mac dma reads be in 128 byte chunks
1544	 */
1545	regval = REG_READ(ah, AR_TXCFG) & ~AR_TXCFG_DMASZ_MASK;
1546	REG_WRITE(ah, AR_TXCFG, regval | AR_TXCFG_DMASZ_128B);
1547
1548	/*
1549	 * Restore TX Trigger Level to its pre-reset value.
1550	 * The initial value depends on whether aggregation is enabled, and is
1551	 * adjusted whenever underruns are detected.
1552	 */
1553	REG_RMW_FIELD(ah, AR_TXCFG, AR_FTRIG, ah->tx_trig_level);
1554
1555	/*
1556	 * let mac dma writes be in 128 byte chunks
1557	 */
1558	regval = REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_DMASZ_MASK;
1559	REG_WRITE(ah, AR_RXCFG, regval | AR_RXCFG_DMASZ_128B);
1560
1561	/*
1562	 * Setup receive FIFO threshold to hold off TX activities
1563	 */
1564	REG_WRITE(ah, AR_RXFIFO_CFG, 0x200);
1565
1566	/*
1567	 * reduce the number of usable entries in PCU TXBUF to avoid
1568	 * wrap around issues.
1569	 */
1570	if (AR_SREV_9285(ah)) {
1571		/* For AR9285 the number of Fifos are reduced to half.
1572		 * So set the usable tx buf size also to half to
1573		 * avoid data/delimiter underruns
1574		 */
1575		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1576			  AR_9285_PCU_TXBUF_CTRL_USABLE_SIZE);
1577	} else if (!AR_SREV_9271(ah)) {
1578		REG_WRITE(ah, AR_PCU_TXBUF_CTRL,
1579			  AR_PCU_TXBUF_CTRL_USABLE_SIZE);
1580	}
1581}
1582
1583static void ath9k_hw_set_operating_mode(struct ath_hw *ah, int opmode)
1584{
1585	u32 val;
1586
1587	val = REG_READ(ah, AR_STA_ID1);
1588	val &= ~(AR_STA_ID1_STA_AP | AR_STA_ID1_ADHOC);
1589	switch (opmode) {
1590	case NL80211_IFTYPE_AP:
1591		REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_STA_AP
1592			  | AR_STA_ID1_KSRCH_MODE);
1593		REG_CLR_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1594		break;
1595	case NL80211_IFTYPE_ADHOC:
1596	case NL80211_IFTYPE_MESH_POINT:
1597		REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_ADHOC
1598			  | AR_STA_ID1_KSRCH_MODE);
1599		REG_SET_BIT(ah, AR_CFG, AR_CFG_AP_ADHOC_INDICATION);
1600		break;
1601	case NL80211_IFTYPE_STATION:
1602	case NL80211_IFTYPE_MONITOR:
1603		REG_WRITE(ah, AR_STA_ID1, val | AR_STA_ID1_KSRCH_MODE);
1604		break;
1605	}
1606}
1607
1608static inline void ath9k_hw_get_delta_slope_vals(struct ath_hw *ah,
1609						 u32 coef_scaled,
1610						 u32 *coef_mantissa,
1611						 u32 *coef_exponent)
1612{
1613	u32 coef_exp, coef_man;
1614
1615	for (coef_exp = 31; coef_exp > 0; coef_exp--)
1616		if ((coef_scaled >> coef_exp) & 0x1)
1617			break;
1618
1619	coef_exp = 14 - (coef_exp - COEF_SCALE_S);
1620
1621	coef_man = coef_scaled + (1 << (COEF_SCALE_S - coef_exp - 1));
1622
1623	*coef_mantissa = coef_man >> (COEF_SCALE_S - coef_exp);
1624	*coef_exponent = coef_exp - 16;
1625}
1626
1627static void ath9k_hw_set_delta_slope(struct ath_hw *ah,
1628				     struct ath9k_channel *chan)
1629{
1630	u32 coef_scaled, ds_coef_exp, ds_coef_man;
1631	u32 clockMhzScaled = 0x64000000;
1632	struct chan_centers centers;
1633
1634	if (IS_CHAN_HALF_RATE(chan))
1635		clockMhzScaled = clockMhzScaled >> 1;
1636	else if (IS_CHAN_QUARTER_RATE(chan))
1637		clockMhzScaled = clockMhzScaled >> 2;
1638
1639	ath9k_hw_get_channel_centers(ah, chan, &centers);
1640	coef_scaled = clockMhzScaled / centers.synth_center;
1641
1642	ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1643				      &ds_coef_exp);
1644
1645	REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1646		      AR_PHY_TIMING3_DSC_MAN, ds_coef_man);
1647	REG_RMW_FIELD(ah, AR_PHY_TIMING3,
1648		      AR_PHY_TIMING3_DSC_EXP, ds_coef_exp);
1649
1650	coef_scaled = (9 * coef_scaled) / 10;
1651
1652	ath9k_hw_get_delta_slope_vals(ah, coef_scaled, &ds_coef_man,
1653				      &ds_coef_exp);
1654
1655	REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1656		      AR_PHY_HALFGI_DSC_MAN, ds_coef_man);
1657	REG_RMW_FIELD(ah, AR_PHY_HALFGI,
1658		      AR_PHY_HALFGI_DSC_EXP, ds_coef_exp);
1659}
1660
1661static bool ath9k_hw_set_reset(struct ath_hw *ah, int type)
1662{
1663	u32 rst_flags;
1664	u32 tmpReg;
1665
1666	if (AR_SREV_9100(ah)) {
1667		u32 val = REG_READ(ah, AR_RTC_DERIVED_CLK);
1668		val &= ~AR_RTC_DERIVED_CLK_PERIOD;
1669		val |= SM(1, AR_RTC_DERIVED_CLK_PERIOD);
1670		REG_WRITE(ah, AR_RTC_DERIVED_CLK, val);
1671		(void)REG_READ(ah, AR_RTC_DERIVED_CLK);
1672	}
1673
1674	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1675		  AR_RTC_FORCE_WAKE_ON_INT);
1676
1677	if (AR_SREV_9100(ah)) {
1678		rst_flags = AR_RTC_RC_MAC_WARM | AR_RTC_RC_MAC_COLD |
1679			AR_RTC_RC_COLD_RESET | AR_RTC_RC_WARM_RESET;
1680	} else {
1681		tmpReg = REG_READ(ah, AR_INTR_SYNC_CAUSE);
1682		if (tmpReg &
1683		    (AR_INTR_SYNC_LOCAL_TIMEOUT |
1684		     AR_INTR_SYNC_RADM_CPL_TIMEOUT)) {
1685			REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
1686			REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
1687		} else {
1688			REG_WRITE(ah, AR_RC, AR_RC_AHB);
1689		}
1690
1691		rst_flags = AR_RTC_RC_MAC_WARM;
1692		if (type == ATH9K_RESET_COLD)
1693			rst_flags |= AR_RTC_RC_MAC_COLD;
1694	}
1695
1696	REG_WRITE(ah, AR_RTC_RC, rst_flags);
1697	udelay(50);
1698
1699	REG_WRITE(ah, AR_RTC_RC, 0);
1700	if (!ath9k_hw_wait(ah, AR_RTC_RC, AR_RTC_RC_M, 0, AH_WAIT_TIMEOUT)) {
1701		DPRINTF(ah, ATH_DBG_RESET,
1702			"RTC stuck in MAC reset\n");
1703		return false;
1704	}
1705
1706	if (!AR_SREV_9100(ah))
1707		REG_WRITE(ah, AR_RC, 0);
1708
1709	ath9k_hw_init_pll(ah, NULL);
1710
1711	if (AR_SREV_9100(ah))
1712		udelay(50);
1713
1714	return true;
1715}
1716
1717static bool ath9k_hw_set_reset_power_on(struct ath_hw *ah)
1718{
1719	REG_WRITE(ah, AR_RTC_FORCE_WAKE, AR_RTC_FORCE_WAKE_EN |
1720		  AR_RTC_FORCE_WAKE_ON_INT);
1721
1722	if (!AR_SREV_9100(ah))
1723		REG_WRITE(ah, AR_RC, AR_RC_AHB);
1724
1725	REG_WRITE(ah, AR_RTC_RESET, 0);
1726	udelay(2);
1727
1728	if (!AR_SREV_9100(ah))
1729		REG_WRITE(ah, AR_RC, 0);
1730
1731	REG_WRITE(ah, AR_RTC_RESET, 1);
1732
1733	if (!ath9k_hw_wait(ah,
1734			   AR_RTC_STATUS,
1735			   AR_RTC_STATUS_M,
1736			   AR_RTC_STATUS_ON,
1737			   AH_WAIT_TIMEOUT)) {
1738		DPRINTF(ah, ATH_DBG_RESET, "RTC not waking up\n");
1739		return false;
1740	}
1741
1742	ath9k_hw_read_revisions(ah);
1743
1744	return ath9k_hw_set_reset(ah, ATH9K_RESET_WARM);
1745}
1746
1747static bool ath9k_hw_set_reset_reg(struct ath_hw *ah, u32 type)
1748{
1749	REG_WRITE(ah, AR_RTC_FORCE_WAKE,
1750		  AR_RTC_FORCE_WAKE_EN | AR_RTC_FORCE_WAKE_ON_INT);
1751
1752	switch (type) {
1753	case ATH9K_RESET_POWER_ON:
1754		return ath9k_hw_set_reset_power_on(ah);
1755	case ATH9K_RESET_WARM:
1756	case ATH9K_RESET_COLD:
1757		return ath9k_hw_set_reset(ah, type);
1758	default:
1759		return false;
1760	}
1761}
1762
1763static void ath9k_hw_set_regs(struct ath_hw *ah, struct ath9k_channel *chan,
1764			      enum ath9k_ht_macmode macmode)
1765{
1766	u32 phymode;
1767	u32 enableDacFifo = 0;
1768
1769	if (AR_SREV_9285_10_OR_LATER(ah))
1770		enableDacFifo = (REG_READ(ah, AR_PHY_TURBO) &
1771					 AR_PHY_FC_ENABLE_DAC_FIFO);
1772
1773	phymode = AR_PHY_FC_HT_EN | AR_PHY_FC_SHORT_GI_40
1774		| AR_PHY_FC_SINGLE_HT_LTF1 | AR_PHY_FC_WALSH | enableDacFifo;
1775
1776	if (IS_CHAN_HT40(chan)) {
1777		phymode |= AR_PHY_FC_DYN2040_EN;
1778
1779		if ((chan->chanmode == CHANNEL_A_HT40PLUS) ||
1780		    (chan->chanmode == CHANNEL_G_HT40PLUS))
1781			phymode |= AR_PHY_FC_DYN2040_PRI_CH;
1782
1783		if (ah->extprotspacing == ATH9K_HT_EXTPROTSPACING_25)
1784			phymode |= AR_PHY_FC_DYN2040_EXT_CH;
1785	}
1786	REG_WRITE(ah, AR_PHY_TURBO, phymode);
1787
1788	ath9k_hw_set11nmac2040(ah, macmode);
1789
1790	REG_WRITE(ah, AR_GTXTO, 25 << AR_GTXTO_TIMEOUT_LIMIT_S);
1791	REG_WRITE(ah, AR_CST, 0xF << AR_CST_TIMEOUT_LIMIT_S);
1792}
1793
1794static bool ath9k_hw_chip_reset(struct ath_hw *ah,
1795				struct ath9k_channel *chan)
1796{
1797	if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL)) {
1798		if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_POWER_ON))
1799			return false;
1800	} else if (!ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM))
1801		return false;
1802
1803	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
1804		return false;
1805
1806	ah->chip_fullsleep = false;
1807	ath9k_hw_init_pll(ah, chan);
1808	ath9k_hw_set_rfmode(ah, chan);
1809
1810	return true;
1811}
1812
1813static bool ath9k_hw_channel_change(struct ath_hw *ah,
1814				    struct ath9k_channel *chan,
1815				    enum ath9k_ht_macmode macmode)
1816{
1817	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
1818	struct ieee80211_channel *channel = chan->chan;
1819	u32 synthDelay, qnum;
1820
1821	for (qnum = 0; qnum < AR_NUM_QCU; qnum++) {
1822		if (ath9k_hw_numtxpending(ah, qnum)) {
1823			DPRINTF(ah, ATH_DBG_QUEUE,
1824				"Transmit frames pending on queue %d\n", qnum);
1825			return false;
1826		}
1827	}
1828
1829	REG_WRITE(ah, AR_PHY_RFBUS_REQ, AR_PHY_RFBUS_REQ_EN);
1830	if (!ath9k_hw_wait(ah, AR_PHY_RFBUS_GRANT, AR_PHY_RFBUS_GRANT_EN,
1831			   AR_PHY_RFBUS_GRANT_EN, AH_WAIT_TIMEOUT)) {
1832		DPRINTF(ah, ATH_DBG_FATAL,
1833			"Could not kill baseband RX\n");
1834		return false;
1835	}
1836
1837	ath9k_hw_set_regs(ah, chan, macmode);
1838
1839	if (AR_SREV_9280_10_OR_LATER(ah)) {
1840		ath9k_hw_ar9280_set_channel(ah, chan);
1841	} else {
1842		if (!(ath9k_hw_set_channel(ah, chan))) {
1843			DPRINTF(ah, ATH_DBG_FATAL,
1844				"Failed to set channel\n");
1845			return false;
1846		}
1847	}
1848
1849	ah->eep_ops->set_txpower(ah, chan,
1850			     ath9k_regd_get_ctl(regulatory, chan),
1851			     channel->max_antenna_gain * 2,
1852			     channel->max_power * 2,
1853			     min((u32) MAX_RATE_POWER,
1854			     (u32) regulatory->power_limit));
1855
1856	synthDelay = REG_READ(ah, AR_PHY_RX_DELAY) & AR_PHY_RX_DELAY_DELAY;
1857	if (IS_CHAN_B(chan))
1858		synthDelay = (4 * synthDelay) / 22;
1859	else
1860		synthDelay /= 10;
1861
1862	udelay(synthDelay + BASE_ACTIVATE_DELAY);
1863
1864	REG_WRITE(ah, AR_PHY_RFBUS_REQ, 0);
1865
1866	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
1867		ath9k_hw_set_delta_slope(ah, chan);
1868
1869	if (AR_SREV_9280_10_OR_LATER(ah))
1870		ath9k_hw_9280_spur_mitigate(ah, chan);
1871	else
1872		ath9k_hw_spur_mitigate(ah, chan);
1873
1874	if (!chan->oneTimeCalsDone)
1875		chan->oneTimeCalsDone = true;
1876
1877	return true;
1878}
1879
1880static void ath9k_hw_9280_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
1881{
1882	int bb_spur = AR_NO_SPUR;
1883	int freq;
1884	int bin, cur_bin;
1885	int bb_spur_off, spur_subchannel_sd;
1886	int spur_freq_sd;
1887	int spur_delta_phase;
1888	int denominator;
1889	int upper, lower, cur_vit_mask;
1890	int tmp, newVal;
1891	int i;
1892	int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
1893			  AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
1894	};
1895	int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
1896			 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
1897	};
1898	int inc[4] = { 0, 100, 0, 0 };
1899	struct chan_centers centers;
1900
1901	int8_t mask_m[123];
1902	int8_t mask_p[123];
1903	int8_t mask_amt;
1904	int tmp_mask;
1905	int cur_bb_spur;
1906	bool is2GHz = IS_CHAN_2GHZ(chan);
1907
1908	memset(&mask_m, 0, sizeof(int8_t) * 123);
1909	memset(&mask_p, 0, sizeof(int8_t) * 123);
1910
1911	ath9k_hw_get_channel_centers(ah, chan, &centers);
1912	freq = centers.synth_center;
1913
1914	ah->config.spurmode = SPUR_ENABLE_EEPROM;
1915	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
1916		cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
1917
1918		if (is2GHz)
1919			cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_2GHZ;
1920		else
1921			cur_bb_spur = (cur_bb_spur / 10) + AR_BASE_FREQ_5GHZ;
1922
1923		if (AR_NO_SPUR == cur_bb_spur)
1924			break;
1925		cur_bb_spur = cur_bb_spur - freq;
1926
1927		if (IS_CHAN_HT40(chan)) {
1928			if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT40) &&
1929			    (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT40)) {
1930				bb_spur = cur_bb_spur;
1931				break;
1932			}
1933		} else if ((cur_bb_spur > -AR_SPUR_FEEQ_BOUND_HT20) &&
1934			   (cur_bb_spur < AR_SPUR_FEEQ_BOUND_HT20)) {
1935			bb_spur = cur_bb_spur;
1936			break;
1937		}
1938	}
1939
1940	if (AR_NO_SPUR == bb_spur) {
1941		REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1942			    AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1943		return;
1944	} else {
1945		REG_CLR_BIT(ah, AR_PHY_FORCE_CLKEN_CCK,
1946			    AR_PHY_FORCE_CLKEN_CCK_MRC_MUX);
1947	}
1948
1949	bin = bb_spur * 320;
1950
1951	tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
1952
1953	newVal = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
1954			AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
1955			AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
1956			AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
1957	REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), newVal);
1958
1959	newVal = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
1960		  AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
1961		  AR_PHY_SPUR_REG_MASK_RATE_SELECT |
1962		  AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
1963		  SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
1964	REG_WRITE(ah, AR_PHY_SPUR_REG, newVal);
1965
1966	if (IS_CHAN_HT40(chan)) {
1967		if (bb_spur < 0) {
1968			spur_subchannel_sd = 1;
1969			bb_spur_off = bb_spur + 10;
1970		} else {
1971			spur_subchannel_sd = 0;
1972			bb_spur_off = bb_spur - 10;
1973		}
1974	} else {
1975		spur_subchannel_sd = 0;
1976		bb_spur_off = bb_spur;
1977	}
1978
1979	if (IS_CHAN_HT40(chan))
1980		spur_delta_phase =
1981			((bb_spur * 262144) /
1982			 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1983	else
1984		spur_delta_phase =
1985			((bb_spur * 524288) /
1986			 10) & AR_PHY_TIMING11_SPUR_DELTA_PHASE;
1987
1988	denominator = IS_CHAN_2GHZ(chan) ? 44 : 40;
1989	spur_freq_sd = ((bb_spur_off * 2048) / denominator) & 0x3ff;
1990
1991	newVal = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
1992		  SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
1993		  SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
1994	REG_WRITE(ah, AR_PHY_TIMING11, newVal);
1995
1996	newVal = spur_subchannel_sd << AR_PHY_SFCORR_SPUR_SUBCHNL_SD_S;
1997	REG_WRITE(ah, AR_PHY_SFCORR_EXT, newVal);
1998
1999	cur_bin = -6000;
2000	upper = bin + 100;
2001	lower = bin - 100;
2002
2003	for (i = 0; i < 4; i++) {
2004		int pilot_mask = 0;
2005		int chan_mask = 0;
2006		int bp = 0;
2007		for (bp = 0; bp < 30; bp++) {
2008			if ((cur_bin > lower) && (cur_bin < upper)) {
2009				pilot_mask = pilot_mask | 0x1 << bp;
2010				chan_mask = chan_mask | 0x1 << bp;
2011			}
2012			cur_bin += 100;
2013		}
2014		cur_bin += inc[i];
2015		REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2016		REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2017	}
2018
2019	cur_vit_mask = 6100;
2020	upper = bin + 120;
2021	lower = bin - 120;
2022
2023	for (i = 0; i < 123; i++) {
2024		if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2025
2026			/* workaround for gcc bug #37014 */
2027			volatile int tmp_v = abs(cur_vit_mask - bin);
2028
2029			if (tmp_v < 75)
2030				mask_amt = 1;
2031			else
2032				mask_amt = 0;
2033			if (cur_vit_mask < 0)
2034				mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2035			else
2036				mask_p[cur_vit_mask / 100] = mask_amt;
2037		}
2038		cur_vit_mask -= 100;
2039	}
2040
2041	tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2042		| (mask_m[48] << 26) | (mask_m[49] << 24)
2043		| (mask_m[50] << 22) | (mask_m[51] << 20)
2044		| (mask_m[52] << 18) | (mask_m[53] << 16)
2045		| (mask_m[54] << 14) | (mask_m[55] << 12)
2046		| (mask_m[56] << 10) | (mask_m[57] << 8)
2047		| (mask_m[58] << 6) | (mask_m[59] << 4)
2048		| (mask_m[60] << 2) | (mask_m[61] << 0);
2049	REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2050	REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2051
2052	tmp_mask = (mask_m[31] << 28)
2053		| (mask_m[32] << 26) | (mask_m[33] << 24)
2054		| (mask_m[34] << 22) | (mask_m[35] << 20)
2055		| (mask_m[36] << 18) | (mask_m[37] << 16)
2056		| (mask_m[48] << 14) | (mask_m[39] << 12)
2057		| (mask_m[40] << 10) | (mask_m[41] << 8)
2058		| (mask_m[42] << 6) | (mask_m[43] << 4)
2059		| (mask_m[44] << 2) | (mask_m[45] << 0);
2060	REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2061	REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2062
2063	tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2064		| (mask_m[18] << 26) | (mask_m[18] << 24)
2065		| (mask_m[20] << 22) | (mask_m[20] << 20)
2066		| (mask_m[22] << 18) | (mask_m[22] << 16)
2067		| (mask_m[24] << 14) | (mask_m[24] << 12)
2068		| (mask_m[25] << 10) | (mask_m[26] << 8)
2069		| (mask_m[27] << 6) | (mask_m[28] << 4)
2070		| (mask_m[29] << 2) | (mask_m[30] << 0);
2071	REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2072	REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2073
2074	tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2075		| (mask_m[2] << 26) | (mask_m[3] << 24)
2076		| (mask_m[4] << 22) | (mask_m[5] << 20)
2077		| (mask_m[6] << 18) | (mask_m[7] << 16)
2078		| (mask_m[8] << 14) | (mask_m[9] << 12)
2079		| (mask_m[10] << 10) | (mask_m[11] << 8)
2080		| (mask_m[12] << 6) | (mask_m[13] << 4)
2081		| (mask_m[14] << 2) | (mask_m[15] << 0);
2082	REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2083	REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2084
2085	tmp_mask = (mask_p[15] << 28)
2086		| (mask_p[14] << 26) | (mask_p[13] << 24)
2087		| (mask_p[12] << 22) | (mask_p[11] << 20)
2088		| (mask_p[10] << 18) | (mask_p[9] << 16)
2089		| (mask_p[8] << 14) | (mask_p[7] << 12)
2090		| (mask_p[6] << 10) | (mask_p[5] << 8)
2091		| (mask_p[4] << 6) | (mask_p[3] << 4)
2092		| (mask_p[2] << 2) | (mask_p[1] << 0);
2093	REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2094	REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2095
2096	tmp_mask = (mask_p[30] << 28)
2097		| (mask_p[29] << 26) | (mask_p[28] << 24)
2098		| (mask_p[27] << 22) | (mask_p[26] << 20)
2099		| (mask_p[25] << 18) | (mask_p[24] << 16)
2100		| (mask_p[23] << 14) | (mask_p[22] << 12)
2101		| (mask_p[21] << 10) | (mask_p[20] << 8)
2102		| (mask_p[19] << 6) | (mask_p[18] << 4)
2103		| (mask_p[17] << 2) | (mask_p[16] << 0);
2104	REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2105	REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2106
2107	tmp_mask = (mask_p[45] << 28)
2108		| (mask_p[44] << 26) | (mask_p[43] << 24)
2109		| (mask_p[42] << 22) | (mask_p[41] << 20)
2110		| (mask_p[40] << 18) | (mask_p[39] << 16)
2111		| (mask_p[38] << 14) | (mask_p[37] << 12)
2112		| (mask_p[36] << 10) | (mask_p[35] << 8)
2113		| (mask_p[34] << 6) | (mask_p[33] << 4)
2114		| (mask_p[32] << 2) | (mask_p[31] << 0);
2115	REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2116	REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2117
2118	tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2119		| (mask_p[59] << 26) | (mask_p[58] << 24)
2120		| (mask_p[57] << 22) | (mask_p[56] << 20)
2121		| (mask_p[55] << 18) | (mask_p[54] << 16)
2122		| (mask_p[53] << 14) | (mask_p[52] << 12)
2123		| (mask_p[51] << 10) | (mask_p[50] << 8)
2124		| (mask_p[49] << 6) | (mask_p[48] << 4)
2125		| (mask_p[47] << 2) | (mask_p[46] << 0);
2126	REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2127	REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2128}
2129
2130static void ath9k_hw_spur_mitigate(struct ath_hw *ah, struct ath9k_channel *chan)
2131{
2132	int bb_spur = AR_NO_SPUR;
2133	int bin, cur_bin;
2134	int spur_freq_sd;
2135	int spur_delta_phase;
2136	int denominator;
2137	int upper, lower, cur_vit_mask;
2138	int tmp, new;
2139	int i;
2140	int pilot_mask_reg[4] = { AR_PHY_TIMING7, AR_PHY_TIMING8,
2141			  AR_PHY_PILOT_MASK_01_30, AR_PHY_PILOT_MASK_31_60
2142	};
2143	int chan_mask_reg[4] = { AR_PHY_TIMING9, AR_PHY_TIMING10,
2144			 AR_PHY_CHANNEL_MASK_01_30, AR_PHY_CHANNEL_MASK_31_60
2145	};
2146	int inc[4] = { 0, 100, 0, 0 };
2147
2148	int8_t mask_m[123];
2149	int8_t mask_p[123];
2150	int8_t mask_amt;
2151	int tmp_mask;
2152	int cur_bb_spur;
2153	bool is2GHz = IS_CHAN_2GHZ(chan);
2154
2155	memset(&mask_m, 0, sizeof(int8_t) * 123);
2156	memset(&mask_p, 0, sizeof(int8_t) * 123);
2157
2158	for (i = 0; i < AR_EEPROM_MODAL_SPURS; i++) {
2159		cur_bb_spur = ah->eep_ops->get_spur_channel(ah, i, is2GHz);
2160		if (AR_NO_SPUR == cur_bb_spur)
2161			break;
2162		cur_bb_spur = cur_bb_spur - (chan->channel * 10);
2163		if ((cur_bb_spur > -95) && (cur_bb_spur < 95)) {
2164			bb_spur = cur_bb_spur;
2165			break;
2166		}
2167	}
2168
2169	if (AR_NO_SPUR == bb_spur)
2170		return;
2171
2172	bin = bb_spur * 32;
2173
2174	tmp = REG_READ(ah, AR_PHY_TIMING_CTRL4(0));
2175	new = tmp | (AR_PHY_TIMING_CTRL4_ENABLE_SPUR_RSSI |
2176		     AR_PHY_TIMING_CTRL4_ENABLE_SPUR_FILTER |
2177		     AR_PHY_TIMING_CTRL4_ENABLE_CHAN_MASK |
2178		     AR_PHY_TIMING_CTRL4_ENABLE_PILOT_MASK);
2179
2180	REG_WRITE(ah, AR_PHY_TIMING_CTRL4(0), new);
2181
2182	new = (AR_PHY_SPUR_REG_MASK_RATE_CNTL |
2183	       AR_PHY_SPUR_REG_ENABLE_MASK_PPM |
2184	       AR_PHY_SPUR_REG_MASK_RATE_SELECT |
2185	       AR_PHY_SPUR_REG_ENABLE_VIT_SPUR_RSSI |
2186	       SM(SPUR_RSSI_THRESH, AR_PHY_SPUR_REG_SPUR_RSSI_THRESH));
2187	REG_WRITE(ah, AR_PHY_SPUR_REG, new);
2188
2189	spur_delta_phase = ((bb_spur * 524288) / 100) &
2190		AR_PHY_TIMING11_SPUR_DELTA_PHASE;
2191
2192	denominator = IS_CHAN_2GHZ(chan) ? 440 : 400;
2193	spur_freq_sd = ((bb_spur * 2048) / denominator) & 0x3ff;
2194
2195	new = (AR_PHY_TIMING11_USE_SPUR_IN_AGC |
2196	       SM(spur_freq_sd, AR_PHY_TIMING11_SPUR_FREQ_SD) |
2197	       SM(spur_delta_phase, AR_PHY_TIMING11_SPUR_DELTA_PHASE));
2198	REG_WRITE(ah, AR_PHY_TIMING11, new);
2199
2200	cur_bin = -6000;
2201	upper = bin + 100;
2202	lower = bin - 100;
2203
2204	for (i = 0; i < 4; i++) {
2205		int pilot_mask = 0;
2206		int chan_mask = 0;
2207		int bp = 0;
2208		for (bp = 0; bp < 30; bp++) {
2209			if ((cur_bin > lower) && (cur_bin < upper)) {
2210				pilot_mask = pilot_mask | 0x1 << bp;
2211				chan_mask = chan_mask | 0x1 << bp;
2212			}
2213			cur_bin += 100;
2214		}
2215		cur_bin += inc[i];
2216		REG_WRITE(ah, pilot_mask_reg[i], pilot_mask);
2217		REG_WRITE(ah, chan_mask_reg[i], chan_mask);
2218	}
2219
2220	cur_vit_mask = 6100;
2221	upper = bin + 120;
2222	lower = bin - 120;
2223
2224	for (i = 0; i < 123; i++) {
2225		if ((cur_vit_mask > lower) && (cur_vit_mask < upper)) {
2226
2227			/* workaround for gcc bug #37014 */
2228			volatile int tmp_v = abs(cur_vit_mask - bin);
2229
2230			if (tmp_v < 75)
2231				mask_amt = 1;
2232			else
2233				mask_amt = 0;
2234			if (cur_vit_mask < 0)
2235				mask_m[abs(cur_vit_mask / 100)] = mask_amt;
2236			else
2237				mask_p[cur_vit_mask / 100] = mask_amt;
2238		}
2239		cur_vit_mask -= 100;
2240	}
2241
2242	tmp_mask = (mask_m[46] << 30) | (mask_m[47] << 28)
2243		| (mask_m[48] << 26) | (mask_m[49] << 24)
2244		| (mask_m[50] << 22) | (mask_m[51] << 20)
2245		| (mask_m[52] << 18) | (mask_m[53] << 16)
2246		| (mask_m[54] << 14) | (mask_m[55] << 12)
2247		| (mask_m[56] << 10) | (mask_m[57] << 8)
2248		| (mask_m[58] << 6) | (mask_m[59] << 4)
2249		| (mask_m[60] << 2) | (mask_m[61] << 0);
2250	REG_WRITE(ah, AR_PHY_BIN_MASK_1, tmp_mask);
2251	REG_WRITE(ah, AR_PHY_VIT_MASK2_M_46_61, tmp_mask);
2252
2253	tmp_mask = (mask_m[31] << 28)
2254		| (mask_m[32] << 26) | (mask_m[33] << 24)
2255		| (mask_m[34] << 22) | (mask_m[35] << 20)
2256		| (mask_m[36] << 18) | (mask_m[37] << 16)
2257		| (mask_m[48] << 14) | (mask_m[39] << 12)
2258		| (mask_m[40] << 10) | (mask_m[41] << 8)
2259		| (mask_m[42] << 6) | (mask_m[43] << 4)
2260		| (mask_m[44] << 2) | (mask_m[45] << 0);
2261	REG_WRITE(ah, AR_PHY_BIN_MASK_2, tmp_mask);
2262	REG_WRITE(ah, AR_PHY_MASK2_M_31_45, tmp_mask);
2263
2264	tmp_mask = (mask_m[16] << 30) | (mask_m[16] << 28)
2265		| (mask_m[18] << 26) | (mask_m[18] << 24)
2266		| (mask_m[20] << 22) | (mask_m[20] << 20)
2267		| (mask_m[22] << 18) | (mask_m[22] << 16)
2268		| (mask_m[24] << 14) | (mask_m[24] << 12)
2269		| (mask_m[25] << 10) | (mask_m[26] << 8)
2270		| (mask_m[27] << 6) | (mask_m[28] << 4)
2271		| (mask_m[29] << 2) | (mask_m[30] << 0);
2272	REG_WRITE(ah, AR_PHY_BIN_MASK_3, tmp_mask);
2273	REG_WRITE(ah, AR_PHY_MASK2_M_16_30, tmp_mask);
2274
2275	tmp_mask = (mask_m[0] << 30) | (mask_m[1] << 28)
2276		| (mask_m[2] << 26) | (mask_m[3] << 24)
2277		| (mask_m[4] << 22) | (mask_m[5] << 20)
2278		| (mask_m[6] << 18) | (mask_m[7] << 16)
2279		| (mask_m[8] << 14) | (mask_m[9] << 12)
2280		| (mask_m[10] << 10) | (mask_m[11] << 8)
2281		| (mask_m[12] << 6) | (mask_m[13] << 4)
2282		| (mask_m[14] << 2) | (mask_m[15] << 0);
2283	REG_WRITE(ah, AR_PHY_MASK_CTL, tmp_mask);
2284	REG_WRITE(ah, AR_PHY_MASK2_M_00_15, tmp_mask);
2285
2286	tmp_mask = (mask_p[15] << 28)
2287		| (mask_p[14] << 26) | (mask_p[13] << 24)
2288		| (mask_p[12] << 22) | (mask_p[11] << 20)
2289		| (mask_p[10] << 18) | (mask_p[9] << 16)
2290		| (mask_p[8] << 14) | (mask_p[7] << 12)
2291		| (mask_p[6] << 10) | (mask_p[5] << 8)
2292		| (mask_p[4] << 6) | (mask_p[3] << 4)
2293		| (mask_p[2] << 2) | (mask_p[1] << 0);
2294	REG_WRITE(ah, AR_PHY_BIN_MASK2_1, tmp_mask);
2295	REG_WRITE(ah, AR_PHY_MASK2_P_15_01, tmp_mask);
2296
2297	tmp_mask = (mask_p[30] << 28)
2298		| (mask_p[29] << 26) | (mask_p[28] << 24)
2299		| (mask_p[27] << 22) | (mask_p[26] << 20)
2300		| (mask_p[25] << 18) | (mask_p[24] << 16)
2301		| (mask_p[23] << 14) | (mask_p[22] << 12)
2302		| (mask_p[21] << 10) | (mask_p[20] << 8)
2303		| (mask_p[19] << 6) | (mask_p[18] << 4)
2304		| (mask_p[17] << 2) | (mask_p[16] << 0);
2305	REG_WRITE(ah, AR_PHY_BIN_MASK2_2, tmp_mask);
2306	REG_WRITE(ah, AR_PHY_MASK2_P_30_16, tmp_mask);
2307
2308	tmp_mask = (mask_p[45] << 28)
2309		| (mask_p[44] << 26) | (mask_p[43] << 24)
2310		| (mask_p[42] << 22) | (mask_p[41] << 20)
2311		| (mask_p[40] << 18) | (mask_p[39] << 16)
2312		| (mask_p[38] << 14) | (mask_p[37] << 12)
2313		| (mask_p[36] << 10) | (mask_p[35] << 8)
2314		| (mask_p[34] << 6) | (mask_p[33] << 4)
2315		| (mask_p[32] << 2) | (mask_p[31] << 0);
2316	REG_WRITE(ah, AR_PHY_BIN_MASK2_3, tmp_mask);
2317	REG_WRITE(ah, AR_PHY_MASK2_P_45_31, tmp_mask);
2318
2319	tmp_mask = (mask_p[61] << 30) | (mask_p[60] << 28)
2320		| (mask_p[59] << 26) | (mask_p[58] << 24)
2321		| (mask_p[57] << 22) | (mask_p[56] << 20)
2322		| (mask_p[55] << 18) | (mask_p[54] << 16)
2323		| (mask_p[53] << 14) | (mask_p[52] << 12)
2324		| (mask_p[51] << 10) | (mask_p[50] << 8)
2325		| (mask_p[49] << 6) | (mask_p[48] << 4)
2326		| (mask_p[47] << 2) | (mask_p[46] << 0);
2327	REG_WRITE(ah, AR_PHY_BIN_MASK2_4, tmp_mask);
2328	REG_WRITE(ah, AR_PHY_MASK2_P_61_45, tmp_mask);
2329}
2330
2331static void ath9k_enable_rfkill(struct ath_hw *ah)
2332{
2333	REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL,
2334		    AR_GPIO_INPUT_EN_VAL_RFSILENT_BB);
2335
2336	REG_CLR_BIT(ah, AR_GPIO_INPUT_MUX2,
2337		    AR_GPIO_INPUT_MUX2_RFSILENT);
2338
2339	ath9k_hw_cfg_gpio_input(ah, ah->rfkill_gpio);
2340	REG_SET_BIT(ah, AR_PHY_TEST, RFSILENT_BB);
2341}
2342
2343int ath9k_hw_reset(struct ath_hw *ah, struct ath9k_channel *chan,
2344		    bool bChannelChange)
2345{
2346	u32 saveLedState;
2347	struct ath_softc *sc = ah->ah_sc;
2348	struct ath9k_channel *curchan = ah->curchan;
2349	u32 saveDefAntenna;
2350	u32 macStaId1;
2351	u64 tsf = 0;
2352	int i, rx_chainmask, r;
2353
2354	ah->extprotspacing = sc->ht_extprotspacing;
2355	ah->txchainmask = sc->tx_chainmask;
2356	ah->rxchainmask = sc->rx_chainmask;
2357
2358	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
2359		return -EIO;
2360
2361	if (curchan && !ah->chip_fullsleep)
2362		ath9k_hw_getnf(ah, curchan);
2363
2364	if (bChannelChange &&
2365	    (ah->chip_fullsleep != true) &&
2366	    (ah->curchan != NULL) &&
2367	    (chan->channel != ah->curchan->channel) &&
2368	    ((chan->channelFlags & CHANNEL_ALL) ==
2369	     (ah->curchan->channelFlags & CHANNEL_ALL)) &&
2370	     !(AR_SREV_9280(ah) || IS_CHAN_A_5MHZ_SPACED(chan) ||
2371	     IS_CHAN_A_5MHZ_SPACED(ah->curchan))) {
2372
2373		if (ath9k_hw_channel_change(ah, chan, sc->tx_chan_width)) {
2374			ath9k_hw_loadnf(ah, ah->curchan);
2375			ath9k_hw_start_nfcal(ah);
2376			return 0;
2377		}
2378	}
2379
2380	saveDefAntenna = REG_READ(ah, AR_DEF_ANTENNA);
2381	if (saveDefAntenna == 0)
2382		saveDefAntenna = 1;
2383
2384	macStaId1 = REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_BASE_RATE_11B;
2385
2386	/* For chips on which RTC reset is done, save TSF before it gets cleared */
2387	if (AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2388		tsf = ath9k_hw_gettsf64(ah);
2389
2390	saveLedState = REG_READ(ah, AR_CFG_LED) &
2391		(AR_CFG_LED_ASSOC_CTL | AR_CFG_LED_MODE_SEL |
2392		 AR_CFG_LED_BLINK_THRESH_SEL | AR_CFG_LED_BLINK_SLOW);
2393
2394	ath9k_hw_mark_phy_inactive(ah);
2395
2396	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2397		REG_WRITE(ah,
2398			  AR9271_RESET_POWER_DOWN_CONTROL,
2399			  AR9271_RADIO_RF_RST);
2400		udelay(50);
2401	}
2402
2403	if (!ath9k_hw_chip_reset(ah, chan)) {
2404		DPRINTF(ah, ATH_DBG_FATAL, "Chip reset failed\n");
2405		return -EINVAL;
2406	}
2407
2408	if (AR_SREV_9271(ah) && ah->htc_reset_init) {
2409		ah->htc_reset_init = false;
2410		REG_WRITE(ah,
2411			  AR9271_RESET_POWER_DOWN_CONTROL,
2412			  AR9271_GATE_MAC_CTL);
2413		udelay(50);
2414	}
2415
2416	/* Restore TSF */
2417	if (tsf && AR_SREV_9280(ah) && ah->eep_ops->get_eeprom(ah, EEP_OL_PWRCTRL))
2418		ath9k_hw_settsf64(ah, tsf);
2419
2420	if (AR_SREV_9280_10_OR_LATER(ah))
2421		REG_SET_BIT(ah, AR_GPIO_INPUT_EN_VAL, AR_GPIO_JTAG_DISABLE);
2422
2423	if (AR_SREV_9287_12_OR_LATER(ah)) {
2424		/* Enable ASYNC FIFO */
2425		REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2426				AR_MAC_PCU_ASYNC_FIFO_REG3_DATAPATH_SEL);
2427		REG_SET_BIT(ah, AR_PHY_MODE, AR_PHY_MODE_ASYNCFIFO);
2428		REG_CLR_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2429				AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2430		REG_SET_BIT(ah, AR_MAC_PCU_ASYNC_FIFO_REG3,
2431				AR_MAC_PCU_ASYNC_FIFO_REG3_SOFT_RESET);
2432	}
2433	r = ath9k_hw_process_ini(ah, chan, sc->tx_chan_width);
2434	if (r)
2435		return r;
2436
2437	/* Setup MFP options for CCMP */
2438	if (AR_SREV_9280_20_OR_LATER(ah)) {
2439		/* Mask Retry(b11), PwrMgt(b12), MoreData(b13) to 0 in mgmt
2440		 * frames when constructing CCMP AAD. */
2441		REG_RMW_FIELD(ah, AR_AES_MUTE_MASK1, AR_AES_MUTE_MASK1_FC_MGMT,
2442			      0xc7ff);
2443		ah->sw_mgmt_crypto = false;
2444	} else if (AR_SREV_9160_10_OR_LATER(ah)) {
2445		/* Disable hardware crypto for management frames */
2446		REG_CLR_BIT(ah, AR_PCU_MISC_MODE2,
2447			    AR_PCU_MISC_MODE2_MGMT_CRYPTO_ENABLE);
2448		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2449			    AR_PCU_MISC_MODE2_NO_CRYPTO_FOR_NON_DATA_PKT);
2450		ah->sw_mgmt_crypto = true;
2451	} else
2452		ah->sw_mgmt_crypto = true;
2453
2454	if (IS_CHAN_OFDM(chan) || IS_CHAN_HT(chan))
2455		ath9k_hw_set_delta_slope(ah, chan);
2456
2457	if (AR_SREV_9280_10_OR_LATER(ah))
2458		ath9k_hw_9280_spur_mitigate(ah, chan);
2459	else
2460		ath9k_hw_spur_mitigate(ah, chan);
2461
2462	ah->eep_ops->set_board_values(ah, chan);
2463
2464	ath9k_hw_decrease_chain_power(ah, chan);
2465
2466	REG_WRITE(ah, AR_STA_ID0, get_unaligned_le32(ah->macaddr));
2467	REG_WRITE(ah, AR_STA_ID1, get_unaligned_le16(ah->macaddr + 4)
2468		  | macStaId1
2469		  | AR_STA_ID1_RTS_USE_DEF
2470		  | (ah->config.
2471		     ack_6mb ? AR_STA_ID1_ACKCTS_6MB : 0)
2472		  | ah->sta_id1_defaults);
2473	ath9k_hw_set_operating_mode(ah, ah->opmode);
2474
2475	ath9k_hw_setbssidmask(ah);
2476
2477	REG_WRITE(ah, AR_DEF_ANTENNA, saveDefAntenna);
2478
2479	ath9k_hw_write_associd(ah);
2480
2481	REG_WRITE(ah, AR_ISR, ~0);
2482
2483	REG_WRITE(ah, AR_RSSI_THR, INIT_RSSI_THR);
2484
2485	if (AR_SREV_9280_10_OR_LATER(ah))
2486		ath9k_hw_ar9280_set_channel(ah, chan);
2487	else
2488		if (!(ath9k_hw_set_channel(ah, chan)))
2489			return -EIO;
2490
2491	for (i = 0; i < AR_NUM_DCU; i++)
2492		REG_WRITE(ah, AR_DQCUMASK(i), 1 << i);
2493
2494	ah->intr_txqs = 0;
2495	for (i = 0; i < ah->caps.total_queues; i++)
2496		ath9k_hw_resettxqueue(ah, i);
2497
2498	ath9k_hw_init_interrupt_masks(ah, ah->opmode);
2499	ath9k_hw_init_qos(ah);
2500
2501	if (ah->caps.hw_caps & ATH9K_HW_CAP_RFSILENT)
2502		ath9k_enable_rfkill(ah);
2503
2504	ath9k_hw_init_user_settings(ah);
2505
2506	if (AR_SREV_9287_12_OR_LATER(ah)) {
2507		REG_WRITE(ah, AR_D_GBL_IFS_SIFS,
2508			  AR_D_GBL_IFS_SIFS_ASYNC_FIFO_DUR);
2509		REG_WRITE(ah, AR_D_GBL_IFS_SLOT,
2510			  AR_D_GBL_IFS_SLOT_ASYNC_FIFO_DUR);
2511		REG_WRITE(ah, AR_D_GBL_IFS_EIFS,
2512			  AR_D_GBL_IFS_EIFS_ASYNC_FIFO_DUR);
2513
2514		REG_WRITE(ah, AR_TIME_OUT, AR_TIME_OUT_ACK_CTS_ASYNC_FIFO_DUR);
2515		REG_WRITE(ah, AR_USEC, AR_USEC_ASYNC_FIFO_DUR);
2516
2517		REG_SET_BIT(ah, AR_MAC_PCU_LOGIC_ANALYZER,
2518			    AR_MAC_PCU_LOGIC_ANALYZER_DISBUG20768);
2519		REG_RMW_FIELD(ah, AR_AHB_MODE, AR_AHB_CUSTOM_BURST_EN,
2520			      AR_AHB_CUSTOM_BURST_ASYNC_FIFO_VAL);
2521	}
2522	if (AR_SREV_9287_12_OR_LATER(ah)) {
2523		REG_SET_BIT(ah, AR_PCU_MISC_MODE2,
2524				AR_PCU_MISC_MODE2_ENABLE_AGGWEP);
2525	}
2526
2527	REG_WRITE(ah, AR_STA_ID1,
2528		  REG_READ(ah, AR_STA_ID1) | AR_STA_ID1_PRESERVE_SEQNUM);
2529
2530	ath9k_hw_set_dma(ah);
2531
2532	REG_WRITE(ah, AR_OBS, 8);
2533
2534	if (ah->config.intr_mitigation) {
2535		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_LAST, 500);
2536		REG_RMW_FIELD(ah, AR_RIMT, AR_RIMT_FIRST, 2000);
2537	}
2538
2539	ath9k_hw_init_bb(ah, chan);
2540
2541	if (!ath9k_hw_init_cal(ah, chan))
2542		return -EIO;
2543
2544	rx_chainmask = ah->rxchainmask;
2545	if ((rx_chainmask == 0x5) || (rx_chainmask == 0x3)) {
2546		REG_WRITE(ah, AR_PHY_RX_CHAINMASK, rx_chainmask);
2547		REG_WRITE(ah, AR_PHY_CAL_CHAINMASK, rx_chainmask);
2548	}
2549
2550	REG_WRITE(ah, AR_CFG_LED, saveLedState | AR_CFG_SCLK_32KHZ);
2551
2552	/*
2553	 * For big endian systems turn on swapping for descriptors
2554	 */
2555	if (AR_SREV_9100(ah)) {
2556		u32 mask;
2557		mask = REG_READ(ah, AR_CFG);
2558		if (mask & (AR_CFG_SWRB | AR_CFG_SWTB | AR_CFG_SWRG)) {
2559			DPRINTF(ah, ATH_DBG_RESET,
2560				"CFG Byte Swap Set 0x%x\n", mask);
2561		} else {
2562			mask =
2563				INIT_CONFIG_STATUS | AR_CFG_SWRB | AR_CFG_SWTB;
2564			REG_WRITE(ah, AR_CFG, mask);
2565			DPRINTF(ah, ATH_DBG_RESET,
2566				"Setting CFG 0x%x\n", REG_READ(ah, AR_CFG));
2567		}
2568	} else {
2569		/* Configure AR9271 target WLAN */
2570                if (AR_SREV_9271(ah))
2571			REG_WRITE(ah, AR_CFG, AR_CFG_SWRB | AR_CFG_SWTB);
2572#ifdef __BIG_ENDIAN
2573                else
2574			REG_WRITE(ah, AR_CFG, AR_CFG_SWTD | AR_CFG_SWRD);
2575#endif
2576	}
2577
2578	if (ah->btcoex_hw.enabled)
2579		ath9k_hw_btcoex_enable(ah);
2580
2581	return 0;
2582}
2583
2584/************************/
2585/* Key Cache Management */
2586/************************/
2587
2588bool ath9k_hw_keyreset(struct ath_hw *ah, u16 entry)
2589{
2590	u32 keyType;
2591
2592	if (entry >= ah->caps.keycache_size) {
2593		DPRINTF(ah, ATH_DBG_FATAL,
2594			"keychache entry %u out of range\n", entry);
2595		return false;
2596	}
2597
2598	keyType = REG_READ(ah, AR_KEYTABLE_TYPE(entry));
2599
2600	REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), 0);
2601	REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), 0);
2602	REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), 0);
2603	REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), 0);
2604	REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), 0);
2605	REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), AR_KEYTABLE_TYPE_CLR);
2606	REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), 0);
2607	REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), 0);
2608
2609	if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2610		u16 micentry = entry + 64;
2611
2612		REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), 0);
2613		REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2614		REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), 0);
2615		REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2616
2617	}
2618
2619	return true;
2620}
2621
2622bool ath9k_hw_keysetmac(struct ath_hw *ah, u16 entry, const u8 *mac)
2623{
2624	u32 macHi, macLo;
2625
2626	if (entry >= ah->caps.keycache_size) {
2627		DPRINTF(ah, ATH_DBG_FATAL,
2628			"keychache entry %u out of range\n", entry);
2629		return false;
2630	}
2631
2632	if (mac != NULL) {
2633		macHi = (mac[5] << 8) | mac[4];
2634		macLo = (mac[3] << 24) |
2635			(mac[2] << 16) |
2636			(mac[1] << 8) |
2637			mac[0];
2638		macLo >>= 1;
2639		macLo |= (macHi & 1) << 31;
2640		macHi >>= 1;
2641	} else {
2642		macLo = macHi = 0;
2643	}
2644	REG_WRITE(ah, AR_KEYTABLE_MAC0(entry), macLo);
2645	REG_WRITE(ah, AR_KEYTABLE_MAC1(entry), macHi | AR_KEYTABLE_VALID);
2646
2647	return true;
2648}
2649
2650bool ath9k_hw_set_keycache_entry(struct ath_hw *ah, u16 entry,
2651				 const struct ath9k_keyval *k,
2652				 const u8 *mac)
2653{
2654	const struct ath9k_hw_capabilities *pCap = &ah->caps;
2655	u32 key0, key1, key2, key3, key4;
2656	u32 keyType;
2657
2658	if (entry >= pCap->keycache_size) {
2659		DPRINTF(ah, ATH_DBG_FATAL,
2660			"keycache entry %u out of range\n", entry);
2661		return false;
2662	}
2663
2664	switch (k->kv_type) {
2665	case ATH9K_CIPHER_AES_OCB:
2666		keyType = AR_KEYTABLE_TYPE_AES;
2667		break;
2668	case ATH9K_CIPHER_AES_CCM:
2669		if (!(pCap->hw_caps & ATH9K_HW_CAP_CIPHER_AESCCM)) {
2670			DPRINTF(ah, ATH_DBG_ANY,
2671				"AES-CCM not supported by mac rev 0x%x\n",
2672				ah->hw_version.macRev);
2673			return false;
2674		}
2675		keyType = AR_KEYTABLE_TYPE_CCM;
2676		break;
2677	case ATH9K_CIPHER_TKIP:
2678		keyType = AR_KEYTABLE_TYPE_TKIP;
2679		if (ATH9K_IS_MIC_ENABLED(ah)
2680		    && entry + 64 >= pCap->keycache_size) {
2681			DPRINTF(ah, ATH_DBG_ANY,
2682				"entry %u inappropriate for TKIP\n", entry);
2683			return false;
2684		}
2685		break;
2686	case ATH9K_CIPHER_WEP:
2687		if (k->kv_len < WLAN_KEY_LEN_WEP40) {
2688			DPRINTF(ah, ATH_DBG_ANY,
2689				"WEP key length %u too small\n", k->kv_len);
2690			return false;
2691		}
2692		if (k->kv_len <= WLAN_KEY_LEN_WEP40)
2693			keyType = AR_KEYTABLE_TYPE_40;
2694		else if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2695			keyType = AR_KEYTABLE_TYPE_104;
2696		else
2697			keyType = AR_KEYTABLE_TYPE_128;
2698		break;
2699	case ATH9K_CIPHER_CLR:
2700		keyType = AR_KEYTABLE_TYPE_CLR;
2701		break;
2702	default:
2703		DPRINTF(ah, ATH_DBG_FATAL,
2704			"cipher %u not supported\n", k->kv_type);
2705		return false;
2706	}
2707
2708	key0 = get_unaligned_le32(k->kv_val + 0);
2709	key1 = get_unaligned_le16(k->kv_val + 4);
2710	key2 = get_unaligned_le32(k->kv_val + 6);
2711	key3 = get_unaligned_le16(k->kv_val + 10);
2712	key4 = get_unaligned_le32(k->kv_val + 12);
2713	if (k->kv_len <= WLAN_KEY_LEN_WEP104)
2714		key4 &= 0xff;
2715
2716	/*
2717	 * Note: Key cache registers access special memory area that requires
2718	 * two 32-bit writes to actually update the values in the internal
2719	 * memory. Consequently, the exact order and pairs used here must be
2720	 * maintained.
2721	 */
2722
2723	if (keyType == AR_KEYTABLE_TYPE_TKIP && ATH9K_IS_MIC_ENABLED(ah)) {
2724		u16 micentry = entry + 64;
2725
2726		/*
2727		 * Write inverted key[47:0] first to avoid Michael MIC errors
2728		 * on frames that could be sent or received at the same time.
2729		 * The correct key will be written in the end once everything
2730		 * else is ready.
2731		 */
2732		REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), ~key0);
2733		REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), ~key1);
2734
2735		/* Write key[95:48] */
2736		REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2737		REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2738
2739		/* Write key[127:96] and key type */
2740		REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2741		REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2742
2743		/* Write MAC address for the entry */
2744		(void) ath9k_hw_keysetmac(ah, entry, mac);
2745
2746		if (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) {
2747			/*
2748			 * TKIP uses two key cache entries:
2749			 * Michael MIC TX/RX keys in the same key cache entry
2750			 * (idx = main index + 64):
2751			 * key0 [31:0] = RX key [31:0]
2752			 * key1 [15:0] = TX key [31:16]
2753			 * key1 [31:16] = reserved
2754			 * key2 [31:0] = RX key [63:32]
2755			 * key3 [15:0] = TX key [15:0]
2756			 * key3 [31:16] = reserved
2757			 * key4 [31:0] = TX key [63:32]
2758			 */
2759			u32 mic0, mic1, mic2, mic3, mic4;
2760
2761			mic0 = get_unaligned_le32(k->kv_mic + 0);
2762			mic2 = get_unaligned_le32(k->kv_mic + 4);
2763			mic1 = get_unaligned_le16(k->kv_txmic + 2) & 0xffff;
2764			mic3 = get_unaligned_le16(k->kv_txmic + 0) & 0xffff;
2765			mic4 = get_unaligned_le32(k->kv_txmic + 4);
2766
2767			/* Write RX[31:0] and TX[31:16] */
2768			REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2769			REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), mic1);
2770
2771			/* Write RX[63:32] and TX[15:0] */
2772			REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2773			REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), mic3);
2774
2775			/* Write TX[63:32] and keyType(reserved) */
2776			REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), mic4);
2777			REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2778				  AR_KEYTABLE_TYPE_CLR);
2779
2780		} else {
2781			/*
2782			 * TKIP uses four key cache entries (two for group
2783			 * keys):
2784			 * Michael MIC TX/RX keys are in different key cache
2785			 * entries (idx = main index + 64 for TX and
2786			 * main index + 32 + 96 for RX):
2787			 * key0 [31:0] = TX/RX MIC key [31:0]
2788			 * key1 [31:0] = reserved
2789			 * key2 [31:0] = TX/RX MIC key [63:32]
2790			 * key3 [31:0] = reserved
2791			 * key4 [31:0] = reserved
2792			 *
2793			 * Upper layer code will call this function separately
2794			 * for TX and RX keys when these registers offsets are
2795			 * used.
2796			 */
2797			u32 mic0, mic2;
2798
2799			mic0 = get_unaligned_le32(k->kv_mic + 0);
2800			mic2 = get_unaligned_le32(k->kv_mic + 4);
2801
2802			/* Write MIC key[31:0] */
2803			REG_WRITE(ah, AR_KEYTABLE_KEY0(micentry), mic0);
2804			REG_WRITE(ah, AR_KEYTABLE_KEY1(micentry), 0);
2805
2806			/* Write MIC key[63:32] */
2807			REG_WRITE(ah, AR_KEYTABLE_KEY2(micentry), mic2);
2808			REG_WRITE(ah, AR_KEYTABLE_KEY3(micentry), 0);
2809
2810			/* Write TX[63:32] and keyType(reserved) */
2811			REG_WRITE(ah, AR_KEYTABLE_KEY4(micentry), 0);
2812			REG_WRITE(ah, AR_KEYTABLE_TYPE(micentry),
2813				  AR_KEYTABLE_TYPE_CLR);
2814		}
2815
2816		/* MAC address registers are reserved for the MIC entry */
2817		REG_WRITE(ah, AR_KEYTABLE_MAC0(micentry), 0);
2818		REG_WRITE(ah, AR_KEYTABLE_MAC1(micentry), 0);
2819
2820		/*
2821		 * Write the correct (un-inverted) key[47:0] last to enable
2822		 * TKIP now that all other registers are set with correct
2823		 * values.
2824		 */
2825		REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2826		REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2827	} else {
2828		/* Write key[47:0] */
2829		REG_WRITE(ah, AR_KEYTABLE_KEY0(entry), key0);
2830		REG_WRITE(ah, AR_KEYTABLE_KEY1(entry), key1);
2831
2832		/* Write key[95:48] */
2833		REG_WRITE(ah, AR_KEYTABLE_KEY2(entry), key2);
2834		REG_WRITE(ah, AR_KEYTABLE_KEY3(entry), key3);
2835
2836		/* Write key[127:96] and key type */
2837		REG_WRITE(ah, AR_KEYTABLE_KEY4(entry), key4);
2838		REG_WRITE(ah, AR_KEYTABLE_TYPE(entry), keyType);
2839
2840		/* Write MAC address for the entry */
2841		(void) ath9k_hw_keysetmac(ah, entry, mac);
2842	}
2843
2844	return true;
2845}
2846
2847bool ath9k_hw_keyisvalid(struct ath_hw *ah, u16 entry)
2848{
2849	if (entry < ah->caps.keycache_size) {
2850		u32 val = REG_READ(ah, AR_KEYTABLE_MAC1(entry));
2851		if (val & AR_KEYTABLE_VALID)
2852			return true;
2853	}
2854	return false;
2855}
2856
2857/******************************/
2858/* Power Management (Chipset) */
2859/******************************/
2860
2861static void ath9k_set_power_sleep(struct ath_hw *ah, int setChip)
2862{
2863	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2864	if (setChip) {
2865		REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2866			    AR_RTC_FORCE_WAKE_EN);
2867		if (!AR_SREV_9100(ah))
2868			REG_WRITE(ah, AR_RC, AR_RC_AHB | AR_RC_HOSTIF);
2869
2870		REG_CLR_BIT(ah, (AR_RTC_RESET),
2871			    AR_RTC_RESET_EN);
2872	}
2873}
2874
2875static void ath9k_set_power_network_sleep(struct ath_hw *ah, int setChip)
2876{
2877	REG_SET_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2878	if (setChip) {
2879		struct ath9k_hw_capabilities *pCap = &ah->caps;
2880
2881		if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
2882			REG_WRITE(ah, AR_RTC_FORCE_WAKE,
2883				  AR_RTC_FORCE_WAKE_ON_INT);
2884		} else {
2885			REG_CLR_BIT(ah, AR_RTC_FORCE_WAKE,
2886				    AR_RTC_FORCE_WAKE_EN);
2887		}
2888	}
2889}
2890
2891static bool ath9k_hw_set_power_awake(struct ath_hw *ah, int setChip)
2892{
2893	u32 val;
2894	int i;
2895
2896	if (setChip) {
2897		if ((REG_READ(ah, AR_RTC_STATUS) &
2898		     AR_RTC_STATUS_M) == AR_RTC_STATUS_SHUTDOWN) {
2899			if (ath9k_hw_set_reset_reg(ah,
2900					   ATH9K_RESET_POWER_ON) != true) {
2901				return false;
2902			}
2903		}
2904		if (AR_SREV_9100(ah))
2905			REG_SET_BIT(ah, AR_RTC_RESET,
2906				    AR_RTC_RESET_EN);
2907
2908		REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2909			    AR_RTC_FORCE_WAKE_EN);
2910		udelay(50);
2911
2912		for (i = POWER_UP_TIME / 50; i > 0; i--) {
2913			val = REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M;
2914			if (val == AR_RTC_STATUS_ON)
2915				break;
2916			udelay(50);
2917			REG_SET_BIT(ah, AR_RTC_FORCE_WAKE,
2918				    AR_RTC_FORCE_WAKE_EN);
2919		}
2920		if (i == 0) {
2921			DPRINTF(ah, ATH_DBG_FATAL,
2922				"Failed to wakeup in %uus\n", POWER_UP_TIME / 20);
2923			return false;
2924		}
2925	}
2926
2927	REG_CLR_BIT(ah, AR_STA_ID1, AR_STA_ID1_PWR_SAV);
2928
2929	return true;
2930}
2931
2932bool ath9k_hw_setpower(struct ath_hw *ah, enum ath9k_power_mode mode)
2933{
2934	int status = true, setChip = true;
2935	static const char *modes[] = {
2936		"AWAKE",
2937		"FULL-SLEEP",
2938		"NETWORK SLEEP",
2939		"UNDEFINED"
2940	};
2941
2942	if (ah->power_mode == mode)
2943		return status;
2944
2945	DPRINTF(ah, ATH_DBG_RESET, "%s -> %s\n",
2946		modes[ah->power_mode], modes[mode]);
2947
2948	switch (mode) {
2949	case ATH9K_PM_AWAKE:
2950		status = ath9k_hw_set_power_awake(ah, setChip);
2951		break;
2952	case ATH9K_PM_FULL_SLEEP:
2953		ath9k_set_power_sleep(ah, setChip);
2954		ah->chip_fullsleep = true;
2955		break;
2956	case ATH9K_PM_NETWORK_SLEEP:
2957		ath9k_set_power_network_sleep(ah, setChip);
2958		break;
2959	default:
2960		DPRINTF(ah, ATH_DBG_FATAL,
2961			"Unknown power mode %u\n", mode);
2962		return false;
2963	}
2964	ah->power_mode = mode;
2965
2966	return status;
2967}
2968
2969/*
2970 * Helper for ASPM support.
2971 *
2972 * Disable PLL when in L0s as well as receiver clock when in L1.
2973 * This power saving option must be enabled through the SerDes.
2974 *
2975 * Programming the SerDes must go through the same 288 bit serial shift
2976 * register as the other analog registers.  Hence the 9 writes.
2977 */
2978void ath9k_hw_configpcipowersave(struct ath_hw *ah, int restore, int power_off)
2979{
2980	u8 i;
2981	u32 val;
2982
2983	if (ah->is_pciexpress != true)
2984		return;
2985
2986	/* Do not touch SerDes registers */
2987	if (ah->config.pcie_powersave_enable == 2)
2988		return;
2989
2990	/* Nothing to do on restore for 11N */
2991	if (!restore) {
2992		if (AR_SREV_9280_20_OR_LATER(ah)) {
2993			/*
2994			 * AR9280 2.0 or later chips use SerDes values from the
2995			 * initvals.h initialized depending on chipset during
2996			 * ath9k_hw_init()
2997			 */
2998			for (i = 0; i < ah->iniPcieSerdes.ia_rows; i++) {
2999				REG_WRITE(ah, INI_RA(&ah->iniPcieSerdes, i, 0),
3000					  INI_RA(&ah->iniPcieSerdes, i, 1));
3001			}
3002		} else if (AR_SREV_9280(ah) &&
3003			   (ah->hw_version.macRev == AR_SREV_REVISION_9280_10)) {
3004			REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fd00);
3005			REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3006
3007			/* RX shut off when elecidle is asserted */
3008			REG_WRITE(ah, AR_PCIE_SERDES, 0xa8000019);
3009			REG_WRITE(ah, AR_PCIE_SERDES, 0x13160820);
3010			REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980560);
3011
3012			/* Shut off CLKREQ active in L1 */
3013			if (ah->config.pcie_clock_req)
3014				REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffc);
3015			else
3016				REG_WRITE(ah, AR_PCIE_SERDES, 0x401deffd);
3017
3018			REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3019			REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3020			REG_WRITE(ah, AR_PCIE_SERDES, 0x00043007);
3021
3022			/* Load the new settings */
3023			REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3024
3025		} else {
3026			REG_WRITE(ah, AR_PCIE_SERDES, 0x9248fc00);
3027			REG_WRITE(ah, AR_PCIE_SERDES, 0x24924924);
3028
3029			/* RX shut off when elecidle is asserted */
3030			REG_WRITE(ah, AR_PCIE_SERDES, 0x28000039);
3031			REG_WRITE(ah, AR_PCIE_SERDES, 0x53160824);
3032			REG_WRITE(ah, AR_PCIE_SERDES, 0xe5980579);
3033
3034			/*
3035			 * Ignore ah->ah_config.pcie_clock_req setting for
3036			 * pre-AR9280 11n
3037			 */
3038			REG_WRITE(ah, AR_PCIE_SERDES, 0x001defff);
3039
3040			REG_WRITE(ah, AR_PCIE_SERDES, 0x1aaabe40);
3041			REG_WRITE(ah, AR_PCIE_SERDES, 0xbe105554);
3042			REG_WRITE(ah, AR_PCIE_SERDES, 0x000e3007);
3043
3044			/* Load the new settings */
3045			REG_WRITE(ah, AR_PCIE_SERDES2, 0x00000000);
3046		}
3047
3048		udelay(1000);
3049
3050		/* set bit 19 to allow forcing of pcie core into L1 state */
3051		REG_SET_BIT(ah, AR_PCIE_PM_CTRL, AR_PCIE_PM_CTRL_ENA);
3052
3053		/* Several PCIe massages to ensure proper behaviour */
3054		if (ah->config.pcie_waen) {
3055			val = ah->config.pcie_waen;
3056			if (!power_off)
3057				val &= (~AR_WA_D3_L1_DISABLE);
3058		} else {
3059			if (AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3060			    AR_SREV_9287(ah)) {
3061				val = AR9285_WA_DEFAULT;
3062				if (!power_off)
3063					val &= (~AR_WA_D3_L1_DISABLE);
3064			} else if (AR_SREV_9280(ah)) {
3065				/*
3066				 * On AR9280 chips bit 22 of 0x4004 needs to be
3067				 * set otherwise card may disappear.
3068				 */
3069				val = AR9280_WA_DEFAULT;
3070				if (!power_off)
3071					val &= (~AR_WA_D3_L1_DISABLE);
3072			} else
3073				val = AR_WA_DEFAULT;
3074		}
3075
3076		REG_WRITE(ah, AR_WA, val);
3077	}
3078
3079	if (power_off) {
3080		/*
3081		 * Set PCIe workaround bits
3082		 * bit 14 in WA register (disable L1) should only
3083		 * be set when device enters D3 and be cleared
3084		 * when device comes back to D0.
3085		 */
3086		if (ah->config.pcie_waen) {
3087			if (ah->config.pcie_waen & AR_WA_D3_L1_DISABLE)
3088				REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3089		} else {
3090			if (((AR_SREV_9285(ah) || AR_SREV_9271(ah) ||
3091			      AR_SREV_9287(ah)) &&
3092			     (AR9285_WA_DEFAULT & AR_WA_D3_L1_DISABLE)) ||
3093			    (AR_SREV_9280(ah) &&
3094			     (AR9280_WA_DEFAULT & AR_WA_D3_L1_DISABLE))) {
3095				REG_SET_BIT(ah, AR_WA, AR_WA_D3_L1_DISABLE);
3096			}
3097		}
3098	}
3099}
3100
3101/**********************/
3102/* Interrupt Handling */
3103/**********************/
3104
3105bool ath9k_hw_intrpend(struct ath_hw *ah)
3106{
3107	u32 host_isr;
3108
3109	if (AR_SREV_9100(ah))
3110		return true;
3111
3112	host_isr = REG_READ(ah, AR_INTR_ASYNC_CAUSE);
3113	if ((host_isr & AR_INTR_MAC_IRQ) && (host_isr != AR_INTR_SPURIOUS))
3114		return true;
3115
3116	host_isr = REG_READ(ah, AR_INTR_SYNC_CAUSE);
3117	if ((host_isr & AR_INTR_SYNC_DEFAULT)
3118	    && (host_isr != AR_INTR_SPURIOUS))
3119		return true;
3120
3121	return false;
3122}
3123
3124bool ath9k_hw_getisr(struct ath_hw *ah, enum ath9k_int *masked)
3125{
3126	u32 isr = 0;
3127	u32 mask2 = 0;
3128	struct ath9k_hw_capabilities *pCap = &ah->caps;
3129	u32 sync_cause = 0;
3130	bool fatal_int = false;
3131
3132	if (!AR_SREV_9100(ah)) {
3133		if (REG_READ(ah, AR_INTR_ASYNC_CAUSE) & AR_INTR_MAC_IRQ) {
3134			if ((REG_READ(ah, AR_RTC_STATUS) & AR_RTC_STATUS_M)
3135			    == AR_RTC_STATUS_ON) {
3136				isr = REG_READ(ah, AR_ISR);
3137			}
3138		}
3139
3140		sync_cause = REG_READ(ah, AR_INTR_SYNC_CAUSE) &
3141			AR_INTR_SYNC_DEFAULT;
3142
3143		*masked = 0;
3144
3145		if (!isr && !sync_cause)
3146			return false;
3147	} else {
3148		*masked = 0;
3149		isr = REG_READ(ah, AR_ISR);
3150	}
3151
3152	if (isr) {
3153		if (isr & AR_ISR_BCNMISC) {
3154			u32 isr2;
3155			isr2 = REG_READ(ah, AR_ISR_S2);
3156			if (isr2 & AR_ISR_S2_TIM)
3157				mask2 |= ATH9K_INT_TIM;
3158			if (isr2 & AR_ISR_S2_DTIM)
3159				mask2 |= ATH9K_INT_DTIM;
3160			if (isr2 & AR_ISR_S2_DTIMSYNC)
3161				mask2 |= ATH9K_INT_DTIMSYNC;
3162			if (isr2 & (AR_ISR_S2_CABEND))
3163				mask2 |= ATH9K_INT_CABEND;
3164			if (isr2 & AR_ISR_S2_GTT)
3165				mask2 |= ATH9K_INT_GTT;
3166			if (isr2 & AR_ISR_S2_CST)
3167				mask2 |= ATH9K_INT_CST;
3168			if (isr2 & AR_ISR_S2_TSFOOR)
3169				mask2 |= ATH9K_INT_TSFOOR;
3170		}
3171
3172		isr = REG_READ(ah, AR_ISR_RAC);
3173		if (isr == 0xffffffff) {
3174			*masked = 0;
3175			return false;
3176		}
3177
3178		*masked = isr & ATH9K_INT_COMMON;
3179
3180		if (ah->config.intr_mitigation) {
3181			if (isr & (AR_ISR_RXMINTR | AR_ISR_RXINTM))
3182				*masked |= ATH9K_INT_RX;
3183		}
3184
3185		if (isr & (AR_ISR_RXOK | AR_ISR_RXERR))
3186			*masked |= ATH9K_INT_RX;
3187		if (isr &
3188		    (AR_ISR_TXOK | AR_ISR_TXDESC | AR_ISR_TXERR |
3189		     AR_ISR_TXEOL)) {
3190			u32 s0_s, s1_s;
3191
3192			*masked |= ATH9K_INT_TX;
3193
3194			s0_s = REG_READ(ah, AR_ISR_S0_S);
3195			ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXOK);
3196			ah->intr_txqs |= MS(s0_s, AR_ISR_S0_QCU_TXDESC);
3197
3198			s1_s = REG_READ(ah, AR_ISR_S1_S);
3199			ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXERR);
3200			ah->intr_txqs |= MS(s1_s, AR_ISR_S1_QCU_TXEOL);
3201		}
3202
3203		if (isr & AR_ISR_RXORN) {
3204			DPRINTF(ah, ATH_DBG_INTERRUPT,
3205				"receive FIFO overrun interrupt\n");
3206		}
3207
3208		if (!AR_SREV_9100(ah)) {
3209			if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3210				u32 isr5 = REG_READ(ah, AR_ISR_S5_S);
3211				if (isr5 & AR_ISR_S5_TIM_TIMER)
3212					*masked |= ATH9K_INT_TIM_TIMER;
3213			}
3214		}
3215
3216		*masked |= mask2;
3217	}
3218
3219	if (AR_SREV_9100(ah))
3220		return true;
3221
3222	if (isr & AR_ISR_GENTMR) {
3223		u32 s5_s;
3224
3225		s5_s = REG_READ(ah, AR_ISR_S5_S);
3226		if (isr & AR_ISR_GENTMR) {
3227			ah->intr_gen_timer_trigger =
3228				MS(s5_s, AR_ISR_S5_GENTIMER_TRIG);
3229
3230			ah->intr_gen_timer_thresh =
3231				MS(s5_s, AR_ISR_S5_GENTIMER_THRESH);
3232
3233			if (ah->intr_gen_timer_trigger)
3234				*masked |= ATH9K_INT_GENTIMER;
3235
3236		}
3237	}
3238
3239	if (sync_cause) {
3240		fatal_int =
3241			(sync_cause &
3242			 (AR_INTR_SYNC_HOST1_FATAL | AR_INTR_SYNC_HOST1_PERR))
3243			? true : false;
3244
3245		if (fatal_int) {
3246			if (sync_cause & AR_INTR_SYNC_HOST1_FATAL) {
3247				DPRINTF(ah, ATH_DBG_ANY,
3248					"received PCI FATAL interrupt\n");
3249			}
3250			if (sync_cause & AR_INTR_SYNC_HOST1_PERR) {
3251				DPRINTF(ah, ATH_DBG_ANY,
3252					"received PCI PERR interrupt\n");
3253			}
3254			*masked |= ATH9K_INT_FATAL;
3255		}
3256		if (sync_cause & AR_INTR_SYNC_RADM_CPL_TIMEOUT) {
3257			DPRINTF(ah, ATH_DBG_INTERRUPT,
3258				"AR_INTR_SYNC_RADM_CPL_TIMEOUT\n");
3259			REG_WRITE(ah, AR_RC, AR_RC_HOSTIF);
3260			REG_WRITE(ah, AR_RC, 0);
3261			*masked |= ATH9K_INT_FATAL;
3262		}
3263		if (sync_cause & AR_INTR_SYNC_LOCAL_TIMEOUT) {
3264			DPRINTF(ah, ATH_DBG_INTERRUPT,
3265				"AR_INTR_SYNC_LOCAL_TIMEOUT\n");
3266		}
3267
3268		REG_WRITE(ah, AR_INTR_SYNC_CAUSE_CLR, sync_cause);
3269		(void) REG_READ(ah, AR_INTR_SYNC_CAUSE_CLR);
3270	}
3271
3272	return true;
3273}
3274
3275enum ath9k_int ath9k_hw_set_interrupts(struct ath_hw *ah, enum ath9k_int ints)
3276{
3277	u32 omask = ah->mask_reg;
3278	u32 mask, mask2;
3279	struct ath9k_hw_capabilities *pCap = &ah->caps;
3280
3281	DPRINTF(ah, ATH_DBG_INTERRUPT, "0x%x => 0x%x\n", omask, ints);
3282
3283	if (omask & ATH9K_INT_GLOBAL) {
3284		DPRINTF(ah, ATH_DBG_INTERRUPT, "disable IER\n");
3285		REG_WRITE(ah, AR_IER, AR_IER_DISABLE);
3286		(void) REG_READ(ah, AR_IER);
3287		if (!AR_SREV_9100(ah)) {
3288			REG_WRITE(ah, AR_INTR_ASYNC_ENABLE, 0);
3289			(void) REG_READ(ah, AR_INTR_ASYNC_ENABLE);
3290
3291			REG_WRITE(ah, AR_INTR_SYNC_ENABLE, 0);
3292			(void) REG_READ(ah, AR_INTR_SYNC_ENABLE);
3293		}
3294	}
3295
3296	mask = ints & ATH9K_INT_COMMON;
3297	mask2 = 0;
3298
3299	if (ints & ATH9K_INT_TX) {
3300		if (ah->txok_interrupt_mask)
3301			mask |= AR_IMR_TXOK;
3302		if (ah->txdesc_interrupt_mask)
3303			mask |= AR_IMR_TXDESC;
3304		if (ah->txerr_interrupt_mask)
3305			mask |= AR_IMR_TXERR;
3306		if (ah->txeol_interrupt_mask)
3307			mask |= AR_IMR_TXEOL;
3308	}
3309	if (ints & ATH9K_INT_RX) {
3310		mask |= AR_IMR_RXERR;
3311		if (ah->config.intr_mitigation)
3312			mask |= AR_IMR_RXMINTR | AR_IMR_RXINTM;
3313		else
3314			mask |= AR_IMR_RXOK | AR_IMR_RXDESC;
3315		if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP))
3316			mask |= AR_IMR_GENTMR;
3317	}
3318
3319	if (ints & (ATH9K_INT_BMISC)) {
3320		mask |= AR_IMR_BCNMISC;
3321		if (ints & ATH9K_INT_TIM)
3322			mask2 |= AR_IMR_S2_TIM;
3323		if (ints & ATH9K_INT_DTIM)
3324			mask2 |= AR_IMR_S2_DTIM;
3325		if (ints & ATH9K_INT_DTIMSYNC)
3326			mask2 |= AR_IMR_S2_DTIMSYNC;
3327		if (ints & ATH9K_INT_CABEND)
3328			mask2 |= AR_IMR_S2_CABEND;
3329		if (ints & ATH9K_INT_TSFOOR)
3330			mask2 |= AR_IMR_S2_TSFOOR;
3331	}
3332
3333	if (ints & (ATH9K_INT_GTT | ATH9K_INT_CST)) {
3334		mask |= AR_IMR_BCNMISC;
3335		if (ints & ATH9K_INT_GTT)
3336			mask2 |= AR_IMR_S2_GTT;
3337		if (ints & ATH9K_INT_CST)
3338			mask2 |= AR_IMR_S2_CST;
3339	}
3340
3341	DPRINTF(ah, ATH_DBG_INTERRUPT, "new IMR 0x%x\n", mask);
3342	REG_WRITE(ah, AR_IMR, mask);
3343	mask = REG_READ(ah, AR_IMR_S2) & ~(AR_IMR_S2_TIM |
3344					   AR_IMR_S2_DTIM |
3345					   AR_IMR_S2_DTIMSYNC |
3346					   AR_IMR_S2_CABEND |
3347					   AR_IMR_S2_CABTO |
3348					   AR_IMR_S2_TSFOOR |
3349					   AR_IMR_S2_GTT | AR_IMR_S2_CST);
3350	REG_WRITE(ah, AR_IMR_S2, mask | mask2);
3351	ah->mask_reg = ints;
3352
3353	if (!(pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)) {
3354		if (ints & ATH9K_INT_TIM_TIMER)
3355			REG_SET_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3356		else
3357			REG_CLR_BIT(ah, AR_IMR_S5, AR_IMR_S5_TIM_TIMER);
3358	}
3359
3360	if (ints & ATH9K_INT_GLOBAL) {
3361		DPRINTF(ah, ATH_DBG_INTERRUPT, "enable IER\n");
3362		REG_WRITE(ah, AR_IER, AR_IER_ENABLE);
3363		if (!AR_SREV_9100(ah)) {
3364			REG_WRITE(ah, AR_INTR_ASYNC_ENABLE,
3365				  AR_INTR_MAC_IRQ);
3366			REG_WRITE(ah, AR_INTR_ASYNC_MASK, AR_INTR_MAC_IRQ);
3367
3368
3369			REG_WRITE(ah, AR_INTR_SYNC_ENABLE,
3370				  AR_INTR_SYNC_DEFAULT);
3371			REG_WRITE(ah, AR_INTR_SYNC_MASK,
3372				  AR_INTR_SYNC_DEFAULT);
3373		}
3374		DPRINTF(ah, ATH_DBG_INTERRUPT, "AR_IMR 0x%x IER 0x%x\n",
3375			 REG_READ(ah, AR_IMR), REG_READ(ah, AR_IER));
3376	}
3377
3378	return omask;
3379}
3380
3381/*******************/
3382/* Beacon Handling */
3383/*******************/
3384
3385void ath9k_hw_beaconinit(struct ath_hw *ah, u32 next_beacon, u32 beacon_period)
3386{
3387	int flags = 0;
3388
3389	ah->beacon_interval = beacon_period;
3390
3391	switch (ah->opmode) {
3392	case NL80211_IFTYPE_STATION:
3393	case NL80211_IFTYPE_MONITOR:
3394		REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3395		REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT, 0xffff);
3396		REG_WRITE(ah, AR_NEXT_SWBA, 0x7ffff);
3397		flags |= AR_TBTT_TIMER_EN;
3398		break;
3399	case NL80211_IFTYPE_ADHOC:
3400	case NL80211_IFTYPE_MESH_POINT:
3401		REG_SET_BIT(ah, AR_TXCFG,
3402			    AR_TXCFG_ADHOC_BEACON_ATIM_TX_POLICY);
3403		REG_WRITE(ah, AR_NEXT_NDP_TIMER,
3404			  TU_TO_USEC(next_beacon +
3405				     (ah->atim_window ? ah->
3406				      atim_window : 1)));
3407		flags |= AR_NDP_TIMER_EN;
3408	case NL80211_IFTYPE_AP:
3409		REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(next_beacon));
3410		REG_WRITE(ah, AR_NEXT_DMA_BEACON_ALERT,
3411			  TU_TO_USEC(next_beacon -
3412				     ah->config.
3413				     dma_beacon_response_time));
3414		REG_WRITE(ah, AR_NEXT_SWBA,
3415			  TU_TO_USEC(next_beacon -
3416				     ah->config.
3417				     sw_beacon_response_time));
3418		flags |=
3419			AR_TBTT_TIMER_EN | AR_DBA_TIMER_EN | AR_SWBA_TIMER_EN;
3420		break;
3421	default:
3422		DPRINTF(ah, ATH_DBG_BEACON,
3423			"%s: unsupported opmode: %d\n",
3424			__func__, ah->opmode);
3425		return;
3426		break;
3427	}
3428
3429	REG_WRITE(ah, AR_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3430	REG_WRITE(ah, AR_DMA_BEACON_PERIOD, TU_TO_USEC(beacon_period));
3431	REG_WRITE(ah, AR_SWBA_PERIOD, TU_TO_USEC(beacon_period));
3432	REG_WRITE(ah, AR_NDP_PERIOD, TU_TO_USEC(beacon_period));
3433
3434	beacon_period &= ~ATH9K_BEACON_ENA;
3435	if (beacon_period & ATH9K_BEACON_RESET_TSF) {
3436		beacon_period &= ~ATH9K_BEACON_RESET_TSF;
3437		ath9k_hw_reset_tsf(ah);
3438	}
3439
3440	REG_SET_BIT(ah, AR_TIMER_MODE, flags);
3441}
3442
3443void ath9k_hw_set_sta_beacon_timers(struct ath_hw *ah,
3444				    const struct ath9k_beacon_state *bs)
3445{
3446	u32 nextTbtt, beaconintval, dtimperiod, beacontimeout;
3447	struct ath9k_hw_capabilities *pCap = &ah->caps;
3448
3449	REG_WRITE(ah, AR_NEXT_TBTT_TIMER, TU_TO_USEC(bs->bs_nexttbtt));
3450
3451	REG_WRITE(ah, AR_BEACON_PERIOD,
3452		  TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3453	REG_WRITE(ah, AR_DMA_BEACON_PERIOD,
3454		  TU_TO_USEC(bs->bs_intval & ATH9K_BEACON_PERIOD));
3455
3456	REG_RMW_FIELD(ah, AR_RSSI_THR,
3457		      AR_RSSI_THR_BM_THR, bs->bs_bmissthreshold);
3458
3459	beaconintval = bs->bs_intval & ATH9K_BEACON_PERIOD;
3460
3461	if (bs->bs_sleepduration > beaconintval)
3462		beaconintval = bs->bs_sleepduration;
3463
3464	dtimperiod = bs->bs_dtimperiod;
3465	if (bs->bs_sleepduration > dtimperiod)
3466		dtimperiod = bs->bs_sleepduration;
3467
3468	if (beaconintval == dtimperiod)
3469		nextTbtt = bs->bs_nextdtim;
3470	else
3471		nextTbtt = bs->bs_nexttbtt;
3472
3473	DPRINTF(ah, ATH_DBG_BEACON, "next DTIM %d\n", bs->bs_nextdtim);
3474	DPRINTF(ah, ATH_DBG_BEACON, "next beacon %d\n", nextTbtt);
3475	DPRINTF(ah, ATH_DBG_BEACON, "beacon period %d\n", beaconintval);
3476	DPRINTF(ah, ATH_DBG_BEACON, "DTIM period %d\n", dtimperiod);
3477
3478	REG_WRITE(ah, AR_NEXT_DTIM,
3479		  TU_TO_USEC(bs->bs_nextdtim - SLEEP_SLOP));
3480	REG_WRITE(ah, AR_NEXT_TIM, TU_TO_USEC(nextTbtt - SLEEP_SLOP));
3481
3482	REG_WRITE(ah, AR_SLEEP1,
3483		  SM((CAB_TIMEOUT_VAL << 3), AR_SLEEP1_CAB_TIMEOUT)
3484		  | AR_SLEEP1_ASSUME_DTIM);
3485
3486	if (pCap->hw_caps & ATH9K_HW_CAP_AUTOSLEEP)
3487		beacontimeout = (BEACON_TIMEOUT_VAL << 3);
3488	else
3489		beacontimeout = MIN_BEACON_TIMEOUT_VAL;
3490
3491	REG_WRITE(ah, AR_SLEEP2,
3492		  SM(beacontimeout, AR_SLEEP2_BEACON_TIMEOUT));
3493
3494	REG_WRITE(ah, AR_TIM_PERIOD, TU_TO_USEC(beaconintval));
3495	REG_WRITE(ah, AR_DTIM_PERIOD, TU_TO_USEC(dtimperiod));
3496
3497	REG_SET_BIT(ah, AR_TIMER_MODE,
3498		    AR_TBTT_TIMER_EN | AR_TIM_TIMER_EN |
3499		    AR_DTIM_TIMER_EN);
3500
3501	/* TSF Out of Range Threshold */
3502	REG_WRITE(ah, AR_TSFOOR_THRESHOLD, bs->bs_tsfoor_threshold);
3503}
3504
3505/*******************/
3506/* HW Capabilities */
3507/*******************/
3508
3509void ath9k_hw_fill_cap_info(struct ath_hw *ah)
3510{
3511	struct ath9k_hw_capabilities *pCap = &ah->caps;
3512	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3513	struct ath_btcoex_hw *btcoex_hw = &ah->btcoex_hw;
3514
3515	u16 capField = 0, eeval;
3516
3517	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_0);
3518	regulatory->current_rd = eeval;
3519
3520	eeval = ah->eep_ops->get_eeprom(ah, EEP_REG_1);
3521	if (AR_SREV_9285_10_OR_LATER(ah))
3522		eeval |= AR9285_RDEXT_DEFAULT;
3523	regulatory->current_rd_ext = eeval;
3524
3525	capField = ah->eep_ops->get_eeprom(ah, EEP_OP_CAP);
3526
3527	if (ah->opmode != NL80211_IFTYPE_AP &&
3528	    ah->hw_version.subvendorid == AR_SUBVENDOR_ID_NEW_A) {
3529		if (regulatory->current_rd == 0x64 ||
3530		    regulatory->current_rd == 0x65)
3531			regulatory->current_rd += 5;
3532		else if (regulatory->current_rd == 0x41)
3533			regulatory->current_rd = 0x43;
3534		DPRINTF(ah, ATH_DBG_REGULATORY,
3535			"regdomain mapped to 0x%x\n", regulatory->current_rd);
3536	}
3537
3538	eeval = ah->eep_ops->get_eeprom(ah, EEP_OP_MODE);
3539	bitmap_zero(pCap->wireless_modes, ATH9K_MODE_MAX);
3540
3541	if (eeval & AR5416_OPFLAGS_11A) {
3542		set_bit(ATH9K_MODE_11A, pCap->wireless_modes);
3543		if (ah->config.ht_enable) {
3544			if (!(eeval & AR5416_OPFLAGS_N_5G_HT20))
3545				set_bit(ATH9K_MODE_11NA_HT20,
3546					pCap->wireless_modes);
3547			if (!(eeval & AR5416_OPFLAGS_N_5G_HT40)) {
3548				set_bit(ATH9K_MODE_11NA_HT40PLUS,
3549					pCap->wireless_modes);
3550				set_bit(ATH9K_MODE_11NA_HT40MINUS,
3551					pCap->wireless_modes);
3552			}
3553		}
3554	}
3555
3556	if (eeval & AR5416_OPFLAGS_11G) {
3557		set_bit(ATH9K_MODE_11G, pCap->wireless_modes);
3558		if (ah->config.ht_enable) {
3559			if (!(eeval & AR5416_OPFLAGS_N_2G_HT20))
3560				set_bit(ATH9K_MODE_11NG_HT20,
3561					pCap->wireless_modes);
3562			if (!(eeval & AR5416_OPFLAGS_N_2G_HT40)) {
3563				set_bit(ATH9K_MODE_11NG_HT40PLUS,
3564					pCap->wireless_modes);
3565				set_bit(ATH9K_MODE_11NG_HT40MINUS,
3566					pCap->wireless_modes);
3567			}
3568		}
3569	}
3570
3571	pCap->tx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_TX_MASK);
3572	/*
3573	 * For AR9271 we will temporarilly uses the rx chainmax as read from
3574	 * the EEPROM.
3575	 */
3576	if ((ah->hw_version.devid == AR5416_DEVID_PCI) &&
3577	    !(eeval & AR5416_OPFLAGS_11A) &&
3578	    !(AR_SREV_9271(ah)))
3579		/* CB71: GPIO 0 is pulled down to indicate 3 rx chains */
3580		pCap->rx_chainmask = ath9k_hw_gpio_get(ah, 0) ? 0x5 : 0x7;
3581	else
3582		/* Use rx_chainmask from EEPROM. */
3583		pCap->rx_chainmask = ah->eep_ops->get_eeprom(ah, EEP_RX_MASK);
3584
3585	if (!(AR_SREV_9280(ah) && (ah->hw_version.macRev == 0)))
3586		ah->misc_mode |= AR_PCU_MIC_NEW_LOC_ENA;
3587
3588	pCap->low_2ghz_chan = 2312;
3589	pCap->high_2ghz_chan = 2732;
3590
3591	pCap->low_5ghz_chan = 4920;
3592	pCap->high_5ghz_chan = 6100;
3593
3594	pCap->hw_caps &= ~ATH9K_HW_CAP_CIPHER_CKIP;
3595	pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_TKIP;
3596	pCap->hw_caps |= ATH9K_HW_CAP_CIPHER_AESCCM;
3597
3598	pCap->hw_caps &= ~ATH9K_HW_CAP_MIC_CKIP;
3599	pCap->hw_caps |= ATH9K_HW_CAP_MIC_TKIP;
3600	pCap->hw_caps |= ATH9K_HW_CAP_MIC_AESCCM;
3601
3602	if (ah->config.ht_enable)
3603		pCap->hw_caps |= ATH9K_HW_CAP_HT;
3604	else
3605		pCap->hw_caps &= ~ATH9K_HW_CAP_HT;
3606
3607	pCap->hw_caps |= ATH9K_HW_CAP_GTT;
3608	pCap->hw_caps |= ATH9K_HW_CAP_VEOL;
3609	pCap->hw_caps |= ATH9K_HW_CAP_BSSIDMASK;
3610	pCap->hw_caps &= ~ATH9K_HW_CAP_MCAST_KEYSEARCH;
3611
3612	if (capField & AR_EEPROM_EEPCAP_MAXQCU)
3613		pCap->total_queues =
3614			MS(capField, AR_EEPROM_EEPCAP_MAXQCU);
3615	else
3616		pCap->total_queues = ATH9K_NUM_TX_QUEUES;
3617
3618	if (capField & AR_EEPROM_EEPCAP_KC_ENTRIES)
3619		pCap->keycache_size =
3620			1 << MS(capField, AR_EEPROM_EEPCAP_KC_ENTRIES);
3621	else
3622		pCap->keycache_size = AR_KEYTABLE_SIZE;
3623
3624	pCap->hw_caps |= ATH9K_HW_CAP_FASTCC;
3625	pCap->tx_triglevel_max = MAX_TX_FIFO_THRESHOLD;
3626
3627	if (AR_SREV_9285_10_OR_LATER(ah))
3628		pCap->num_gpio_pins = AR9285_NUM_GPIO;
3629	else if (AR_SREV_9280_10_OR_LATER(ah))
3630		pCap->num_gpio_pins = AR928X_NUM_GPIO;
3631	else
3632		pCap->num_gpio_pins = AR_NUM_GPIO;
3633
3634	if (AR_SREV_9160_10_OR_LATER(ah) || AR_SREV_9100(ah)) {
3635		pCap->hw_caps |= ATH9K_HW_CAP_CST;
3636		pCap->rts_aggr_limit = ATH_AMPDU_LIMIT_MAX;
3637	} else {
3638		pCap->rts_aggr_limit = (8 * 1024);
3639	}
3640
3641	pCap->hw_caps |= ATH9K_HW_CAP_ENHANCEDPM;
3642
3643#if defined(CONFIG_RFKILL) || defined(CONFIG_RFKILL_MODULE)
3644	ah->rfsilent = ah->eep_ops->get_eeprom(ah, EEP_RF_SILENT);
3645	if (ah->rfsilent & EEP_RFSILENT_ENABLED) {
3646		ah->rfkill_gpio =
3647			MS(ah->rfsilent, EEP_RFSILENT_GPIO_SEL);
3648		ah->rfkill_polarity =
3649			MS(ah->rfsilent, EEP_RFSILENT_POLARITY);
3650
3651		pCap->hw_caps |= ATH9K_HW_CAP_RFSILENT;
3652	}
3653#endif
3654
3655	pCap->hw_caps &= ~ATH9K_HW_CAP_AUTOSLEEP;
3656
3657	if (AR_SREV_9280(ah) || AR_SREV_9285(ah))
3658		pCap->hw_caps &= ~ATH9K_HW_CAP_4KB_SPLITTRANS;
3659	else
3660		pCap->hw_caps |= ATH9K_HW_CAP_4KB_SPLITTRANS;
3661
3662	if (regulatory->current_rd_ext & (1 << REG_EXT_JAPAN_MIDBAND)) {
3663		pCap->reg_cap =
3664			AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3665			AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN |
3666			AR_EEPROM_EEREGCAP_EN_KK_U2 |
3667			AR_EEPROM_EEREGCAP_EN_KK_MIDBAND;
3668	} else {
3669		pCap->reg_cap =
3670			AR_EEPROM_EEREGCAP_EN_KK_NEW_11A |
3671			AR_EEPROM_EEREGCAP_EN_KK_U1_EVEN;
3672	}
3673
3674	pCap->reg_cap |= AR_EEPROM_EEREGCAP_EN_FCC_MIDBAND;
3675
3676	pCap->num_antcfg_5ghz =
3677		ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_5GHZ);
3678	pCap->num_antcfg_2ghz =
3679		ah->eep_ops->get_num_ant_config(ah, ATH9K_HAL_FREQ_BAND_2GHZ);
3680
3681	if (AR_SREV_9280_10_OR_LATER(ah) &&
3682	    ath9k_hw_btcoex_supported(ah)) {
3683		btcoex_hw->btactive_gpio = ATH_BTACTIVE_GPIO;
3684		btcoex_hw->wlanactive_gpio = ATH_WLANACTIVE_GPIO;
3685
3686		if (AR_SREV_9285(ah)) {
3687			btcoex_hw->scheme = ATH_BTCOEX_CFG_3WIRE;
3688			btcoex_hw->btpriority_gpio = ATH_BTPRIORITY_GPIO;
3689		} else {
3690			btcoex_hw->scheme = ATH_BTCOEX_CFG_2WIRE;
3691		}
3692	} else {
3693		btcoex_hw->scheme = ATH_BTCOEX_CFG_NONE;
3694	}
3695}
3696
3697bool ath9k_hw_getcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3698			    u32 capability, u32 *result)
3699{
3700	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3701	switch (type) {
3702	case ATH9K_CAP_CIPHER:
3703		switch (capability) {
3704		case ATH9K_CIPHER_AES_CCM:
3705		case ATH9K_CIPHER_AES_OCB:
3706		case ATH9K_CIPHER_TKIP:
3707		case ATH9K_CIPHER_WEP:
3708		case ATH9K_CIPHER_MIC:
3709		case ATH9K_CIPHER_CLR:
3710			return true;
3711		default:
3712			return false;
3713		}
3714	case ATH9K_CAP_TKIP_MIC:
3715		switch (capability) {
3716		case 0:
3717			return true;
3718		case 1:
3719			return (ah->sta_id1_defaults &
3720				AR_STA_ID1_CRPT_MIC_ENABLE) ? true :
3721			false;
3722		}
3723	case ATH9K_CAP_TKIP_SPLIT:
3724		return (ah->misc_mode & AR_PCU_MIC_NEW_LOC_ENA) ?
3725			false : true;
3726	case ATH9K_CAP_DIVERSITY:
3727		return (REG_READ(ah, AR_PHY_CCK_DETECT) &
3728			AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV) ?
3729			true : false;
3730	case ATH9K_CAP_MCAST_KEYSRCH:
3731		switch (capability) {
3732		case 0:
3733			return true;
3734		case 1:
3735			if (REG_READ(ah, AR_STA_ID1) & AR_STA_ID1_ADHOC) {
3736				return false;
3737			} else {
3738				return (ah->sta_id1_defaults &
3739					AR_STA_ID1_MCAST_KSRCH) ? true :
3740					false;
3741			}
3742		}
3743		return false;
3744	case ATH9K_CAP_TXPOW:
3745		switch (capability) {
3746		case 0:
3747			return 0;
3748		case 1:
3749			*result = regulatory->power_limit;
3750			return 0;
3751		case 2:
3752			*result = regulatory->max_power_level;
3753			return 0;
3754		case 3:
3755			*result = regulatory->tp_scale;
3756			return 0;
3757		}
3758		return false;
3759	case ATH9K_CAP_DS:
3760		return (AR_SREV_9280_20_OR_LATER(ah) &&
3761			(ah->eep_ops->get_eeprom(ah, EEP_RC_CHAIN_MASK) == 1))
3762			? false : true;
3763	default:
3764		return false;
3765	}
3766}
3767
3768bool ath9k_hw_setcapability(struct ath_hw *ah, enum ath9k_capability_type type,
3769			    u32 capability, u32 setting, int *status)
3770{
3771	u32 v;
3772
3773	switch (type) {
3774	case ATH9K_CAP_TKIP_MIC:
3775		if (setting)
3776			ah->sta_id1_defaults |=
3777				AR_STA_ID1_CRPT_MIC_ENABLE;
3778		else
3779			ah->sta_id1_defaults &=
3780				~AR_STA_ID1_CRPT_MIC_ENABLE;
3781		return true;
3782	case ATH9K_CAP_DIVERSITY:
3783		v = REG_READ(ah, AR_PHY_CCK_DETECT);
3784		if (setting)
3785			v |= AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3786		else
3787			v &= ~AR_PHY_CCK_DETECT_BB_ENABLE_ANT_FAST_DIV;
3788		REG_WRITE(ah, AR_PHY_CCK_DETECT, v);
3789		return true;
3790	case ATH9K_CAP_MCAST_KEYSRCH:
3791		if (setting)
3792			ah->sta_id1_defaults |= AR_STA_ID1_MCAST_KSRCH;
3793		else
3794			ah->sta_id1_defaults &= ~AR_STA_ID1_MCAST_KSRCH;
3795		return true;
3796	default:
3797		return false;
3798	}
3799}
3800
3801/****************************/
3802/* GPIO / RFKILL / Antennae */
3803/****************************/
3804
3805static void ath9k_hw_gpio_cfg_output_mux(struct ath_hw *ah,
3806					 u32 gpio, u32 type)
3807{
3808	int addr;
3809	u32 gpio_shift, tmp;
3810
3811	if (gpio > 11)
3812		addr = AR_GPIO_OUTPUT_MUX3;
3813	else if (gpio > 5)
3814		addr = AR_GPIO_OUTPUT_MUX2;
3815	else
3816		addr = AR_GPIO_OUTPUT_MUX1;
3817
3818	gpio_shift = (gpio % 6) * 5;
3819
3820	if (AR_SREV_9280_20_OR_LATER(ah)
3821	    || (addr != AR_GPIO_OUTPUT_MUX1)) {
3822		REG_RMW(ah, addr, (type << gpio_shift),
3823			(0x1f << gpio_shift));
3824	} else {
3825		tmp = REG_READ(ah, addr);
3826		tmp = ((tmp & 0x1F0) << 1) | (tmp & ~0x1F0);
3827		tmp &= ~(0x1f << gpio_shift);
3828		tmp |= (type << gpio_shift);
3829		REG_WRITE(ah, addr, tmp);
3830	}
3831}
3832
3833void ath9k_hw_cfg_gpio_input(struct ath_hw *ah, u32 gpio)
3834{
3835	u32 gpio_shift;
3836
3837	ASSERT(gpio < ah->caps.num_gpio_pins);
3838
3839	gpio_shift = gpio << 1;
3840
3841	REG_RMW(ah,
3842		AR_GPIO_OE_OUT,
3843		(AR_GPIO_OE_OUT_DRV_NO << gpio_shift),
3844		(AR_GPIO_OE_OUT_DRV << gpio_shift));
3845}
3846
3847u32 ath9k_hw_gpio_get(struct ath_hw *ah, u32 gpio)
3848{
3849#define MS_REG_READ(x, y) \
3850	(MS(REG_READ(ah, AR_GPIO_IN_OUT), x##_GPIO_IN_VAL) & (AR_GPIO_BIT(y)))
3851
3852	if (gpio >= ah->caps.num_gpio_pins)
3853		return 0xffffffff;
3854
3855	if (AR_SREV_9287_10_OR_LATER(ah))
3856		return MS_REG_READ(AR9287, gpio) != 0;
3857	else if (AR_SREV_9285_10_OR_LATER(ah))
3858		return MS_REG_READ(AR9285, gpio) != 0;
3859	else if (AR_SREV_9280_10_OR_LATER(ah))
3860		return MS_REG_READ(AR928X, gpio) != 0;
3861	else
3862		return MS_REG_READ(AR, gpio) != 0;
3863}
3864
3865void ath9k_hw_cfg_output(struct ath_hw *ah, u32 gpio,
3866			 u32 ah_signal_type)
3867{
3868	u32 gpio_shift;
3869
3870	ath9k_hw_gpio_cfg_output_mux(ah, gpio, ah_signal_type);
3871
3872	gpio_shift = 2 * gpio;
3873
3874	REG_RMW(ah,
3875		AR_GPIO_OE_OUT,
3876		(AR_GPIO_OE_OUT_DRV_ALL << gpio_shift),
3877		(AR_GPIO_OE_OUT_DRV << gpio_shift));
3878}
3879
3880void ath9k_hw_set_gpio(struct ath_hw *ah, u32 gpio, u32 val)
3881{
3882	REG_RMW(ah, AR_GPIO_IN_OUT, ((val & 1) << gpio),
3883		AR_GPIO_BIT(gpio));
3884}
3885
3886u32 ath9k_hw_getdefantenna(struct ath_hw *ah)
3887{
3888	return REG_READ(ah, AR_DEF_ANTENNA) & 0x7;
3889}
3890
3891void ath9k_hw_setantenna(struct ath_hw *ah, u32 antenna)
3892{
3893	REG_WRITE(ah, AR_DEF_ANTENNA, (antenna & 0x7));
3894}
3895
3896bool ath9k_hw_setantennaswitch(struct ath_hw *ah,
3897			       enum ath9k_ant_setting settings,
3898			       struct ath9k_channel *chan,
3899			       u8 *tx_chainmask,
3900			       u8 *rx_chainmask,
3901			       u8 *antenna_cfgd)
3902{
3903	static u8 tx_chainmask_cfg, rx_chainmask_cfg;
3904
3905	if (AR_SREV_9280(ah)) {
3906		if (!tx_chainmask_cfg) {
3907
3908			tx_chainmask_cfg = *tx_chainmask;
3909			rx_chainmask_cfg = *rx_chainmask;
3910		}
3911
3912		switch (settings) {
3913		case ATH9K_ANT_FIXED_A:
3914			*tx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3915			*rx_chainmask = ATH9K_ANTENNA0_CHAINMASK;
3916			*antenna_cfgd = true;
3917			break;
3918		case ATH9K_ANT_FIXED_B:
3919			if (ah->caps.tx_chainmask >
3920			    ATH9K_ANTENNA1_CHAINMASK) {
3921				*tx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3922			}
3923			*rx_chainmask = ATH9K_ANTENNA1_CHAINMASK;
3924			*antenna_cfgd = true;
3925			break;
3926		case ATH9K_ANT_VARIABLE:
3927			*tx_chainmask = tx_chainmask_cfg;
3928			*rx_chainmask = rx_chainmask_cfg;
3929			*antenna_cfgd = true;
3930			break;
3931		default:
3932			break;
3933		}
3934	} else {
3935		ah->config.diversity_control = settings;
3936	}
3937
3938	return true;
3939}
3940
3941/*********************/
3942/* General Operation */
3943/*********************/
3944
3945u32 ath9k_hw_getrxfilter(struct ath_hw *ah)
3946{
3947	u32 bits = REG_READ(ah, AR_RX_FILTER);
3948	u32 phybits = REG_READ(ah, AR_PHY_ERR);
3949
3950	if (phybits & AR_PHY_ERR_RADAR)
3951		bits |= ATH9K_RX_FILTER_PHYRADAR;
3952	if (phybits & (AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING))
3953		bits |= ATH9K_RX_FILTER_PHYERR;
3954
3955	return bits;
3956}
3957
3958void ath9k_hw_setrxfilter(struct ath_hw *ah, u32 bits)
3959{
3960	u32 phybits;
3961
3962	REG_WRITE(ah, AR_RX_FILTER, bits);
3963
3964	phybits = 0;
3965	if (bits & ATH9K_RX_FILTER_PHYRADAR)
3966		phybits |= AR_PHY_ERR_RADAR;
3967	if (bits & ATH9K_RX_FILTER_PHYERR)
3968		phybits |= AR_PHY_ERR_OFDM_TIMING | AR_PHY_ERR_CCK_TIMING;
3969	REG_WRITE(ah, AR_PHY_ERR, phybits);
3970
3971	if (phybits)
3972		REG_WRITE(ah, AR_RXCFG,
3973			  REG_READ(ah, AR_RXCFG) | AR_RXCFG_ZLFDMA);
3974	else
3975		REG_WRITE(ah, AR_RXCFG,
3976			  REG_READ(ah, AR_RXCFG) & ~AR_RXCFG_ZLFDMA);
3977}
3978
3979bool ath9k_hw_phy_disable(struct ath_hw *ah)
3980{
3981	return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_WARM);
3982}
3983
3984bool ath9k_hw_disable(struct ath_hw *ah)
3985{
3986	if (!ath9k_hw_setpower(ah, ATH9K_PM_AWAKE))
3987		return false;
3988
3989	return ath9k_hw_set_reset_reg(ah, ATH9K_RESET_COLD);
3990}
3991
3992void ath9k_hw_set_txpowerlimit(struct ath_hw *ah, u32 limit)
3993{
3994	struct ath_regulatory *regulatory = ath9k_hw_regulatory(ah);
3995	struct ath9k_channel *chan = ah->curchan;
3996	struct ieee80211_channel *channel = chan->chan;
3997
3998	regulatory->power_limit = min(limit, (u32) MAX_RATE_POWER);
3999
4000	ah->eep_ops->set_txpower(ah, chan,
4001				 ath9k_regd_get_ctl(regulatory, chan),
4002				 channel->max_antenna_gain * 2,
4003				 channel->max_power * 2,
4004				 min((u32) MAX_RATE_POWER,
4005				 (u32) regulatory->power_limit));
4006}
4007
4008void ath9k_hw_setmac(struct ath_hw *ah, const u8 *mac)
4009{
4010	memcpy(ah->macaddr, mac, ETH_ALEN);
4011}
4012
4013void ath9k_hw_setopmode(struct ath_hw *ah)
4014{
4015	ath9k_hw_set_operating_mode(ah, ah->opmode);
4016}
4017
4018void ath9k_hw_setmcastfilter(struct ath_hw *ah, u32 filter0, u32 filter1)
4019{
4020	REG_WRITE(ah, AR_MCAST_FIL0, filter0);
4021	REG_WRITE(ah, AR_MCAST_FIL1, filter1);
4022}
4023
4024void ath9k_hw_setbssidmask(struct ath_hw *ah)
4025{
4026	REG_WRITE(ah, AR_BSSMSKL, get_unaligned_le32(ah->ah_sc->bssidmask));
4027	REG_WRITE(ah, AR_BSSMSKU, get_unaligned_le16(ah->ah_sc->bssidmask + 4));
4028}
4029
4030void ath9k_hw_write_associd(struct ath_hw *ah)
4031{
4032	REG_WRITE(ah, AR_BSS_ID0, get_unaligned_le32(ah->ah_sc->curbssid));
4033	REG_WRITE(ah, AR_BSS_ID1, get_unaligned_le16(ah->ah_sc->curbssid + 4) |
4034		  ((ah->ah_sc->curaid & 0x3fff) << AR_BSS_ID1_AID_S));
4035}
4036
4037u64 ath9k_hw_gettsf64(struct ath_hw *ah)
4038{
4039	u64 tsf;
4040
4041	tsf = REG_READ(ah, AR_TSF_U32);
4042	tsf = (tsf << 32) | REG_READ(ah, AR_TSF_L32);
4043
4044	return tsf;
4045}
4046
4047void ath9k_hw_settsf64(struct ath_hw *ah, u64 tsf64)
4048{
4049	REG_WRITE(ah, AR_TSF_L32, tsf64 & 0xffffffff);
4050	REG_WRITE(ah, AR_TSF_U32, (tsf64 >> 32) & 0xffffffff);
4051}
4052
4053void ath9k_hw_reset_tsf(struct ath_hw *ah)
4054{
4055	if (!ath9k_hw_wait(ah, AR_SLP32_MODE, AR_SLP32_TSF_WRITE_STATUS, 0,
4056			   AH_TSF_WRITE_TIMEOUT))
4057		DPRINTF(ah, ATH_DBG_RESET,
4058			"AR_SLP32_TSF_WRITE_STATUS limit exceeded\n");
4059
4060	REG_WRITE(ah, AR_RESET_TSF, AR_RESET_TSF_ONCE);
4061}
4062
4063void ath9k_hw_set_tsfadjust(struct ath_hw *ah, u32 setting)
4064{
4065	if (setting)
4066		ah->misc_mode |= AR_PCU_TX_ADD_TSF;
4067	else
4068		ah->misc_mode &= ~AR_PCU_TX_ADD_TSF;
4069}
4070
4071bool ath9k_hw_setslottime(struct ath_hw *ah, u32 us)
4072{
4073	if (us < ATH9K_SLOT_TIME_9 || us > ath9k_hw_mac_to_usec(ah, 0xffff)) {
4074		DPRINTF(ah, ATH_DBG_RESET, "bad slot time %u\n", us);
4075		ah->slottime = (u32) -1;
4076		return false;
4077	} else {
4078		REG_WRITE(ah, AR_D_GBL_IFS_SLOT, ath9k_hw_mac_to_clks(ah, us));
4079		ah->slottime = us;
4080		return true;
4081	}
4082}
4083
4084void ath9k_hw_set11nmac2040(struct ath_hw *ah, enum ath9k_ht_macmode mode)
4085{
4086	u32 macmode;
4087
4088	if (mode == ATH9K_HT_MACMODE_2040 &&
4089	    !ah->config.cwm_ignore_extcca)
4090		macmode = AR_2040_JOINED_RX_CLEAR;
4091	else
4092		macmode = 0;
4093
4094	REG_WRITE(ah, AR_2040_MODE, macmode);
4095}
4096
4097/* HW Generic timers configuration */
4098
4099static const struct ath_gen_timer_configuration gen_tmr_configuration[] =
4100{
4101	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4102	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4103	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4104	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4105	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4106	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4107	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4108	{AR_NEXT_NDP_TIMER, AR_NDP_PERIOD, AR_TIMER_MODE, 0x0080},
4109	{AR_NEXT_NDP2_TIMER, AR_NDP2_PERIOD, AR_NDP2_TIMER_MODE, 0x0001},
4110	{AR_NEXT_NDP2_TIMER + 1*4, AR_NDP2_PERIOD + 1*4,
4111				AR_NDP2_TIMER_MODE, 0x0002},
4112	{AR_NEXT_NDP2_TIMER + 2*4, AR_NDP2_PERIOD + 2*4,
4113				AR_NDP2_TIMER_MODE, 0x0004},
4114	{AR_NEXT_NDP2_TIMER + 3*4, AR_NDP2_PERIOD + 3*4,
4115				AR_NDP2_TIMER_MODE, 0x0008},
4116	{AR_NEXT_NDP2_TIMER + 4*4, AR_NDP2_PERIOD + 4*4,
4117				AR_NDP2_TIMER_MODE, 0x0010},
4118	{AR_NEXT_NDP2_TIMER + 5*4, AR_NDP2_PERIOD + 5*4,
4119				AR_NDP2_TIMER_MODE, 0x0020},
4120	{AR_NEXT_NDP2_TIMER + 6*4, AR_NDP2_PERIOD + 6*4,
4121				AR_NDP2_TIMER_MODE, 0x0040},
4122	{AR_NEXT_NDP2_TIMER + 7*4, AR_NDP2_PERIOD + 7*4,
4123				AR_NDP2_TIMER_MODE, 0x0080}
4124};
4125
4126/* HW generic timer primitives */
4127
4128/* compute and clear index of rightmost 1 */
4129static u32 rightmost_index(struct ath_gen_timer_table *timer_table, u32 *mask)
4130{
4131	u32 b;
4132
4133	b = *mask;
4134	b &= (0-b);
4135	*mask &= ~b;
4136	b *= debruijn32;
4137	b >>= 27;
4138
4139	return timer_table->gen_timer_index[b];
4140}
4141
4142u32 ath9k_hw_gettsf32(struct ath_hw *ah)
4143{
4144	return REG_READ(ah, AR_TSF_L32);
4145}
4146
4147struct ath_gen_timer *ath_gen_timer_alloc(struct ath_hw *ah,
4148					  void (*trigger)(void *),
4149					  void (*overflow)(void *),
4150					  void *arg,
4151					  u8 timer_index)
4152{
4153	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4154	struct ath_gen_timer *timer;
4155
4156	timer = kzalloc(sizeof(struct ath_gen_timer), GFP_KERNEL);
4157
4158	if (timer == NULL) {
4159		printk(KERN_DEBUG "Failed to allocate memory"
4160		       "for hw timer[%d]\n", timer_index);
4161		return NULL;
4162	}
4163
4164	/* allocate a hardware generic timer slot */
4165	timer_table->timers[timer_index] = timer;
4166	timer->index = timer_index;
4167	timer->trigger = trigger;
4168	timer->overflow = overflow;
4169	timer->arg = arg;
4170
4171	return timer;
4172}
4173
4174void ath_gen_timer_start(struct ath_hw *ah,
4175			 struct ath_gen_timer *timer,
4176			 u32 timer_next, u32 timer_period)
4177{
4178	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4179	u32 tsf;
4180
4181	BUG_ON(!timer_period);
4182
4183	set_bit(timer->index, &timer_table->timer_mask.timer_bits);
4184
4185	tsf = ath9k_hw_gettsf32(ah);
4186
4187	DPRINTF(ah, ATH_DBG_HWTIMER, "curent tsf %x period %x"
4188		"timer_next %x\n", tsf, timer_period, timer_next);
4189
4190	/*
4191	 * Pull timer_next forward if the current TSF already passed it
4192	 * because of software latency
4193	 */
4194	if (timer_next < tsf)
4195		timer_next = tsf + timer_period;
4196
4197	/*
4198	 * Program generic timer registers
4199	 */
4200	REG_WRITE(ah, gen_tmr_configuration[timer->index].next_addr,
4201		 timer_next);
4202	REG_WRITE(ah, gen_tmr_configuration[timer->index].period_addr,
4203		  timer_period);
4204	REG_SET_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4205		    gen_tmr_configuration[timer->index].mode_mask);
4206
4207	/* Enable both trigger and thresh interrupt masks */
4208	REG_SET_BIT(ah, AR_IMR_S5,
4209		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4210		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4211
4212	if ((ah->ah_sc->imask & ATH9K_INT_GENTIMER) == 0) {
4213		ath9k_hw_set_interrupts(ah, 0);
4214		ah->ah_sc->imask |= ATH9K_INT_GENTIMER;
4215		ath9k_hw_set_interrupts(ah, ah->ah_sc->imask);
4216	}
4217}
4218
4219void ath_gen_timer_stop(struct ath_hw *ah, struct ath_gen_timer *timer)
4220{
4221	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4222
4223	if ((timer->index < AR_FIRST_NDP_TIMER) ||
4224		(timer->index >= ATH_MAX_GEN_TIMER)) {
4225		return;
4226	}
4227
4228	/* Clear generic timer enable bits. */
4229	REG_CLR_BIT(ah, gen_tmr_configuration[timer->index].mode_addr,
4230			gen_tmr_configuration[timer->index].mode_mask);
4231
4232	/* Disable both trigger and thresh interrupt masks */
4233	REG_CLR_BIT(ah, AR_IMR_S5,
4234		(SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_THRESH) |
4235		SM(AR_GENTMR_BIT(timer->index), AR_IMR_S5_GENTIMER_TRIG)));
4236
4237	clear_bit(timer->index, &timer_table->timer_mask.timer_bits);
4238
4239	/* if no timer is enabled, turn off interrupt mask */
4240	if (timer_table->timer_mask.val == 0) {
4241		ath9k_hw_set_interrupts(ah, 0);
4242		ah->ah_sc->imask &= ~ATH9K_INT_GENTIMER;
4243		ath9k_hw_set_interrupts(ah, ah->ah_sc->imask);
4244	}
4245}
4246
4247void ath_gen_timer_free(struct ath_hw *ah, struct ath_gen_timer *timer)
4248{
4249	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4250
4251	/* free the hardware generic timer slot */
4252	timer_table->timers[timer->index] = NULL;
4253	kfree(timer);
4254}
4255
4256/*
4257 * Generic Timer Interrupts handling
4258 */
4259void ath_gen_timer_isr(struct ath_hw *ah)
4260{
4261	struct ath_gen_timer_table *timer_table = &ah->hw_gen_timers;
4262	struct ath_gen_timer *timer;
4263	u32 trigger_mask, thresh_mask, index;
4264
4265	/* get hardware generic timer interrupt status */
4266	trigger_mask = ah->intr_gen_timer_trigger;
4267	thresh_mask = ah->intr_gen_timer_thresh;
4268	trigger_mask &= timer_table->timer_mask.val;
4269	thresh_mask &= timer_table->timer_mask.val;
4270
4271	trigger_mask &= ~thresh_mask;
4272
4273	while (thresh_mask) {
4274		index = rightmost_index(timer_table, &thresh_mask);
4275		timer = timer_table->timers[index];
4276		BUG_ON(!timer);
4277		DPRINTF(ah, ATH_DBG_HWTIMER,
4278			"TSF overflow for Gen timer %d\n", index);
4279		timer->overflow(timer->arg);
4280	}
4281
4282	while (trigger_mask) {
4283		index = rightmost_index(timer_table, &trigger_mask);
4284		timer = timer_table->timers[index];
4285		BUG_ON(!timer);
4286		DPRINTF(ah, ATH_DBG_HWTIMER,
4287			"Gen timer[%d] trigger\n", index);
4288		timer->trigger(timer->arg);
4289	}
4290}
4291
4292/*
4293 * Primitive to disable ASPM
4294 */
4295void ath_pcie_aspm_disable(struct ath_softc *sc)
4296{
4297	struct pci_dev *pdev = to_pci_dev(sc->dev);
4298	u8 aspm;
4299
4300	pci_read_config_byte(pdev, ATH_PCIE_CAP_LINK_CTRL, &aspm);
4301	aspm &= ~(ATH_PCIE_CAP_LINK_L0S | ATH_PCIE_CAP_LINK_L1);
4302	pci_write_config_byte(pdev, ATH_PCIE_CAP_LINK_CTRL, aspm);
4303}
4304