ni_dpm.c revision bf0936e196ec21b604106578043d4c14831f99e7
1/*
2 * Copyright 2012 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
23
24#include "drmP.h"
25#include "radeon.h"
26#include "nid.h"
27#include "r600_dpm.h"
28#include "ni_dpm.h"
29#include "atom.h"
30#include <linux/math64.h>
31#include <linux/seq_file.h>
32
33#define MC_CG_ARB_FREQ_F0           0x0a
34#define MC_CG_ARB_FREQ_F1           0x0b
35#define MC_CG_ARB_FREQ_F2           0x0c
36#define MC_CG_ARB_FREQ_F3           0x0d
37
38#define SMC_RAM_END 0xC000
39
40static const struct ni_cac_weights cac_weights_cayman_xt =
41{
42	0x15,
43	0x2,
44	0x19,
45	0x2,
46	0x8,
47	0x14,
48	0x2,
49	0x16,
50	0xE,
51	0x17,
52	0x13,
53	0x2B,
54	0x10,
55	0x7,
56	0x5,
57	0x5,
58	0x5,
59	0x2,
60	0x3,
61	0x9,
62	0x10,
63	0x10,
64	0x2B,
65	0xA,
66	0x9,
67	0x4,
68	0xD,
69	0xD,
70	0x3E,
71	0x18,
72	0x14,
73	0,
74	0x3,
75	0x3,
76	0x5,
77	0,
78	0x2,
79	0,
80	0,
81	0,
82	0,
83	0,
84	0,
85	0,
86	0,
87	0,
88	0x1CC,
89	0,
90	0x164,
91	1,
92	1,
93	1,
94	1,
95	12,
96	12,
97	12,
98	0x12,
99	0x1F,
100	132,
101	5,
102	7,
103	0,
104	{ 0, 0, 0, 0, 0, 0, 0, 0 },
105	{ 0, 0, 0, 0 },
106	true
107};
108
109static const struct ni_cac_weights cac_weights_cayman_pro =
110{
111	0x16,
112	0x4,
113	0x10,
114	0x2,
115	0xA,
116	0x16,
117	0x2,
118	0x18,
119	0x10,
120	0x1A,
121	0x16,
122	0x2D,
123	0x12,
124	0xA,
125	0x6,
126	0x6,
127	0x6,
128	0x2,
129	0x4,
130	0xB,
131	0x11,
132	0x11,
133	0x2D,
134	0xC,
135	0xC,
136	0x7,
137	0x10,
138	0x10,
139	0x3F,
140	0x1A,
141	0x16,
142	0,
143	0x7,
144	0x4,
145	0x6,
146	1,
147	0x2,
148	0x1,
149	0,
150	0,
151	0,
152	0,
153	0,
154	0,
155	0x30,
156	0,
157	0x1CF,
158	0,
159	0x166,
160	1,
161	1,
162	1,
163	1,
164	12,
165	12,
166	12,
167	0x15,
168	0x1F,
169	132,
170	6,
171	6,
172	0,
173	{ 0, 0, 0, 0, 0, 0, 0, 0 },
174	{ 0, 0, 0, 0 },
175	true
176};
177
178static const struct ni_cac_weights cac_weights_cayman_le =
179{
180	0x7,
181	0xE,
182	0x1,
183	0xA,
184	0x1,
185	0x3F,
186	0x2,
187	0x18,
188	0x10,
189	0x1A,
190	0x1,
191	0x3F,
192	0x1,
193	0xE,
194	0x6,
195	0x6,
196	0x6,
197	0x2,
198	0x4,
199	0x9,
200	0x1A,
201	0x1A,
202	0x2C,
203	0xA,
204	0x11,
205	0x8,
206	0x19,
207	0x19,
208	0x1,
209	0x1,
210	0x1A,
211	0,
212	0x8,
213	0x5,
214	0x8,
215	0x1,
216	0x3,
217	0x1,
218	0,
219	0,
220	0,
221	0,
222	0,
223	0,
224	0x38,
225	0x38,
226	0x239,
227	0x3,
228	0x18A,
229	1,
230	1,
231	1,
232	1,
233	12,
234	12,
235	12,
236	0x15,
237	0x22,
238	132,
239	6,
240	6,
241	0,
242	{ 0, 0, 0, 0, 0, 0, 0, 0 },
243	{ 0, 0, 0, 0 },
244	true
245};
246
247#define NISLANDS_MGCG_SEQUENCE  300
248
249static const u32 cayman_cgcg_cgls_default[] =
250{
251	0x000008f8, 0x00000010, 0xffffffff,
252	0x000008fc, 0x00000000, 0xffffffff,
253	0x000008f8, 0x00000011, 0xffffffff,
254	0x000008fc, 0x00000000, 0xffffffff,
255	0x000008f8, 0x00000012, 0xffffffff,
256	0x000008fc, 0x00000000, 0xffffffff,
257	0x000008f8, 0x00000013, 0xffffffff,
258	0x000008fc, 0x00000000, 0xffffffff,
259	0x000008f8, 0x00000014, 0xffffffff,
260	0x000008fc, 0x00000000, 0xffffffff,
261	0x000008f8, 0x00000015, 0xffffffff,
262	0x000008fc, 0x00000000, 0xffffffff,
263	0x000008f8, 0x00000016, 0xffffffff,
264	0x000008fc, 0x00000000, 0xffffffff,
265	0x000008f8, 0x00000017, 0xffffffff,
266	0x000008fc, 0x00000000, 0xffffffff,
267	0x000008f8, 0x00000018, 0xffffffff,
268	0x000008fc, 0x00000000, 0xffffffff,
269	0x000008f8, 0x00000019, 0xffffffff,
270	0x000008fc, 0x00000000, 0xffffffff,
271	0x000008f8, 0x0000001a, 0xffffffff,
272	0x000008fc, 0x00000000, 0xffffffff,
273	0x000008f8, 0x0000001b, 0xffffffff,
274	0x000008fc, 0x00000000, 0xffffffff,
275	0x000008f8, 0x00000020, 0xffffffff,
276	0x000008fc, 0x00000000, 0xffffffff,
277	0x000008f8, 0x00000021, 0xffffffff,
278	0x000008fc, 0x00000000, 0xffffffff,
279	0x000008f8, 0x00000022, 0xffffffff,
280	0x000008fc, 0x00000000, 0xffffffff,
281	0x000008f8, 0x00000023, 0xffffffff,
282	0x000008fc, 0x00000000, 0xffffffff,
283	0x000008f8, 0x00000024, 0xffffffff,
284	0x000008fc, 0x00000000, 0xffffffff,
285	0x000008f8, 0x00000025, 0xffffffff,
286	0x000008fc, 0x00000000, 0xffffffff,
287	0x000008f8, 0x00000026, 0xffffffff,
288	0x000008fc, 0x00000000, 0xffffffff,
289	0x000008f8, 0x00000027, 0xffffffff,
290	0x000008fc, 0x00000000, 0xffffffff,
291	0x000008f8, 0x00000028, 0xffffffff,
292	0x000008fc, 0x00000000, 0xffffffff,
293	0x000008f8, 0x00000029, 0xffffffff,
294	0x000008fc, 0x00000000, 0xffffffff,
295	0x000008f8, 0x0000002a, 0xffffffff,
296	0x000008fc, 0x00000000, 0xffffffff,
297	0x000008f8, 0x0000002b, 0xffffffff,
298	0x000008fc, 0x00000000, 0xffffffff
299};
300#define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
301
302static const u32 cayman_cgcg_cgls_disable[] =
303{
304	0x000008f8, 0x00000010, 0xffffffff,
305	0x000008fc, 0xffffffff, 0xffffffff,
306	0x000008f8, 0x00000011, 0xffffffff,
307	0x000008fc, 0xffffffff, 0xffffffff,
308	0x000008f8, 0x00000012, 0xffffffff,
309	0x000008fc, 0xffffffff, 0xffffffff,
310	0x000008f8, 0x00000013, 0xffffffff,
311	0x000008fc, 0xffffffff, 0xffffffff,
312	0x000008f8, 0x00000014, 0xffffffff,
313	0x000008fc, 0xffffffff, 0xffffffff,
314	0x000008f8, 0x00000015, 0xffffffff,
315	0x000008fc, 0xffffffff, 0xffffffff,
316	0x000008f8, 0x00000016, 0xffffffff,
317	0x000008fc, 0xffffffff, 0xffffffff,
318	0x000008f8, 0x00000017, 0xffffffff,
319	0x000008fc, 0xffffffff, 0xffffffff,
320	0x000008f8, 0x00000018, 0xffffffff,
321	0x000008fc, 0xffffffff, 0xffffffff,
322	0x000008f8, 0x00000019, 0xffffffff,
323	0x000008fc, 0xffffffff, 0xffffffff,
324	0x000008f8, 0x0000001a, 0xffffffff,
325	0x000008fc, 0xffffffff, 0xffffffff,
326	0x000008f8, 0x0000001b, 0xffffffff,
327	0x000008fc, 0xffffffff, 0xffffffff,
328	0x000008f8, 0x00000020, 0xffffffff,
329	0x000008fc, 0x00000000, 0xffffffff,
330	0x000008f8, 0x00000021, 0xffffffff,
331	0x000008fc, 0x00000000, 0xffffffff,
332	0x000008f8, 0x00000022, 0xffffffff,
333	0x000008fc, 0x00000000, 0xffffffff,
334	0x000008f8, 0x00000023, 0xffffffff,
335	0x000008fc, 0x00000000, 0xffffffff,
336	0x000008f8, 0x00000024, 0xffffffff,
337	0x000008fc, 0x00000000, 0xffffffff,
338	0x000008f8, 0x00000025, 0xffffffff,
339	0x000008fc, 0x00000000, 0xffffffff,
340	0x000008f8, 0x00000026, 0xffffffff,
341	0x000008fc, 0x00000000, 0xffffffff,
342	0x000008f8, 0x00000027, 0xffffffff,
343	0x000008fc, 0x00000000, 0xffffffff,
344	0x000008f8, 0x00000028, 0xffffffff,
345	0x000008fc, 0x00000000, 0xffffffff,
346	0x000008f8, 0x00000029, 0xffffffff,
347	0x000008fc, 0x00000000, 0xffffffff,
348	0x000008f8, 0x0000002a, 0xffffffff,
349	0x000008fc, 0x00000000, 0xffffffff,
350	0x000008f8, 0x0000002b, 0xffffffff,
351	0x000008fc, 0x00000000, 0xffffffff,
352	0x00000644, 0x000f7902, 0x001f4180,
353	0x00000644, 0x000f3802, 0x001f4180
354};
355#define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
356
357static const u32 cayman_cgcg_cgls_enable[] =
358{
359	0x00000644, 0x000f7882, 0x001f4080,
360	0x000008f8, 0x00000010, 0xffffffff,
361	0x000008fc, 0x00000000, 0xffffffff,
362	0x000008f8, 0x00000011, 0xffffffff,
363	0x000008fc, 0x00000000, 0xffffffff,
364	0x000008f8, 0x00000012, 0xffffffff,
365	0x000008fc, 0x00000000, 0xffffffff,
366	0x000008f8, 0x00000013, 0xffffffff,
367	0x000008fc, 0x00000000, 0xffffffff,
368	0x000008f8, 0x00000014, 0xffffffff,
369	0x000008fc, 0x00000000, 0xffffffff,
370	0x000008f8, 0x00000015, 0xffffffff,
371	0x000008fc, 0x00000000, 0xffffffff,
372	0x000008f8, 0x00000016, 0xffffffff,
373	0x000008fc, 0x00000000, 0xffffffff,
374	0x000008f8, 0x00000017, 0xffffffff,
375	0x000008fc, 0x00000000, 0xffffffff,
376	0x000008f8, 0x00000018, 0xffffffff,
377	0x000008fc, 0x00000000, 0xffffffff,
378	0x000008f8, 0x00000019, 0xffffffff,
379	0x000008fc, 0x00000000, 0xffffffff,
380	0x000008f8, 0x0000001a, 0xffffffff,
381	0x000008fc, 0x00000000, 0xffffffff,
382	0x000008f8, 0x0000001b, 0xffffffff,
383	0x000008fc, 0x00000000, 0xffffffff,
384	0x000008f8, 0x00000020, 0xffffffff,
385	0x000008fc, 0xffffffff, 0xffffffff,
386	0x000008f8, 0x00000021, 0xffffffff,
387	0x000008fc, 0xffffffff, 0xffffffff,
388	0x000008f8, 0x00000022, 0xffffffff,
389	0x000008fc, 0xffffffff, 0xffffffff,
390	0x000008f8, 0x00000023, 0xffffffff,
391	0x000008fc, 0xffffffff, 0xffffffff,
392	0x000008f8, 0x00000024, 0xffffffff,
393	0x000008fc, 0xffffffff, 0xffffffff,
394	0x000008f8, 0x00000025, 0xffffffff,
395	0x000008fc, 0xffffffff, 0xffffffff,
396	0x000008f8, 0x00000026, 0xffffffff,
397	0x000008fc, 0xffffffff, 0xffffffff,
398	0x000008f8, 0x00000027, 0xffffffff,
399	0x000008fc, 0xffffffff, 0xffffffff,
400	0x000008f8, 0x00000028, 0xffffffff,
401	0x000008fc, 0xffffffff, 0xffffffff,
402	0x000008f8, 0x00000029, 0xffffffff,
403	0x000008fc, 0xffffffff, 0xffffffff,
404	0x000008f8, 0x0000002a, 0xffffffff,
405	0x000008fc, 0xffffffff, 0xffffffff,
406	0x000008f8, 0x0000002b, 0xffffffff,
407	0x000008fc, 0xffffffff, 0xffffffff
408};
409#define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
410
411static const u32 cayman_mgcg_default[] =
412{
413	0x0000802c, 0xc0000000, 0xffffffff,
414	0x00003fc4, 0xc0000000, 0xffffffff,
415	0x00005448, 0x00000100, 0xffffffff,
416	0x000055e4, 0x00000100, 0xffffffff,
417	0x0000160c, 0x00000100, 0xffffffff,
418	0x00008984, 0x06000100, 0xffffffff,
419	0x0000c164, 0x00000100, 0xffffffff,
420	0x00008a18, 0x00000100, 0xffffffff,
421	0x0000897c, 0x06000100, 0xffffffff,
422	0x00008b28, 0x00000100, 0xffffffff,
423	0x00009144, 0x00800200, 0xffffffff,
424	0x00009a60, 0x00000100, 0xffffffff,
425	0x00009868, 0x00000100, 0xffffffff,
426	0x00008d58, 0x00000100, 0xffffffff,
427	0x00009510, 0x00000100, 0xffffffff,
428	0x0000949c, 0x00000100, 0xffffffff,
429	0x00009654, 0x00000100, 0xffffffff,
430	0x00009030, 0x00000100, 0xffffffff,
431	0x00009034, 0x00000100, 0xffffffff,
432	0x00009038, 0x00000100, 0xffffffff,
433	0x0000903c, 0x00000100, 0xffffffff,
434	0x00009040, 0x00000100, 0xffffffff,
435	0x0000a200, 0x00000100, 0xffffffff,
436	0x0000a204, 0x00000100, 0xffffffff,
437	0x0000a208, 0x00000100, 0xffffffff,
438	0x0000a20c, 0x00000100, 0xffffffff,
439	0x00009744, 0x00000100, 0xffffffff,
440	0x00003f80, 0x00000100, 0xffffffff,
441	0x0000a210, 0x00000100, 0xffffffff,
442	0x0000a214, 0x00000100, 0xffffffff,
443	0x000004d8, 0x00000100, 0xffffffff,
444	0x00009664, 0x00000100, 0xffffffff,
445	0x00009698, 0x00000100, 0xffffffff,
446	0x000004d4, 0x00000200, 0xffffffff,
447	0x000004d0, 0x00000000, 0xffffffff,
448	0x000030cc, 0x00000104, 0xffffffff,
449	0x0000d0c0, 0x00000100, 0xffffffff,
450	0x0000d8c0, 0x00000100, 0xffffffff,
451	0x0000802c, 0x40000000, 0xffffffff,
452	0x00003fc4, 0x40000000, 0xffffffff,
453	0x0000915c, 0x00010000, 0xffffffff,
454	0x00009160, 0x00030002, 0xffffffff,
455	0x00009164, 0x00050004, 0xffffffff,
456	0x00009168, 0x00070006, 0xffffffff,
457	0x00009178, 0x00070000, 0xffffffff,
458	0x0000917c, 0x00030002, 0xffffffff,
459	0x00009180, 0x00050004, 0xffffffff,
460	0x0000918c, 0x00010006, 0xffffffff,
461	0x00009190, 0x00090008, 0xffffffff,
462	0x00009194, 0x00070000, 0xffffffff,
463	0x00009198, 0x00030002, 0xffffffff,
464	0x0000919c, 0x00050004, 0xffffffff,
465	0x000091a8, 0x00010006, 0xffffffff,
466	0x000091ac, 0x00090008, 0xffffffff,
467	0x000091b0, 0x00070000, 0xffffffff,
468	0x000091b4, 0x00030002, 0xffffffff,
469	0x000091b8, 0x00050004, 0xffffffff,
470	0x000091c4, 0x00010006, 0xffffffff,
471	0x000091c8, 0x00090008, 0xffffffff,
472	0x000091cc, 0x00070000, 0xffffffff,
473	0x000091d0, 0x00030002, 0xffffffff,
474	0x000091d4, 0x00050004, 0xffffffff,
475	0x000091e0, 0x00010006, 0xffffffff,
476	0x000091e4, 0x00090008, 0xffffffff,
477	0x000091e8, 0x00000000, 0xffffffff,
478	0x000091ec, 0x00070000, 0xffffffff,
479	0x000091f0, 0x00030002, 0xffffffff,
480	0x000091f4, 0x00050004, 0xffffffff,
481	0x00009200, 0x00010006, 0xffffffff,
482	0x00009204, 0x00090008, 0xffffffff,
483	0x00009208, 0x00070000, 0xffffffff,
484	0x0000920c, 0x00030002, 0xffffffff,
485	0x00009210, 0x00050004, 0xffffffff,
486	0x0000921c, 0x00010006, 0xffffffff,
487	0x00009220, 0x00090008, 0xffffffff,
488	0x00009224, 0x00070000, 0xffffffff,
489	0x00009228, 0x00030002, 0xffffffff,
490	0x0000922c, 0x00050004, 0xffffffff,
491	0x00009238, 0x00010006, 0xffffffff,
492	0x0000923c, 0x00090008, 0xffffffff,
493	0x00009240, 0x00070000, 0xffffffff,
494	0x00009244, 0x00030002, 0xffffffff,
495	0x00009248, 0x00050004, 0xffffffff,
496	0x00009254, 0x00010006, 0xffffffff,
497	0x00009258, 0x00090008, 0xffffffff,
498	0x0000925c, 0x00070000, 0xffffffff,
499	0x00009260, 0x00030002, 0xffffffff,
500	0x00009264, 0x00050004, 0xffffffff,
501	0x00009270, 0x00010006, 0xffffffff,
502	0x00009274, 0x00090008, 0xffffffff,
503	0x00009278, 0x00070000, 0xffffffff,
504	0x0000927c, 0x00030002, 0xffffffff,
505	0x00009280, 0x00050004, 0xffffffff,
506	0x0000928c, 0x00010006, 0xffffffff,
507	0x00009290, 0x00090008, 0xffffffff,
508	0x000092a8, 0x00070000, 0xffffffff,
509	0x000092ac, 0x00030002, 0xffffffff,
510	0x000092b0, 0x00050004, 0xffffffff,
511	0x000092bc, 0x00010006, 0xffffffff,
512	0x000092c0, 0x00090008, 0xffffffff,
513	0x000092c4, 0x00070000, 0xffffffff,
514	0x000092c8, 0x00030002, 0xffffffff,
515	0x000092cc, 0x00050004, 0xffffffff,
516	0x000092d8, 0x00010006, 0xffffffff,
517	0x000092dc, 0x00090008, 0xffffffff,
518	0x00009294, 0x00000000, 0xffffffff,
519	0x0000802c, 0x40010000, 0xffffffff,
520	0x00003fc4, 0x40010000, 0xffffffff,
521	0x0000915c, 0x00010000, 0xffffffff,
522	0x00009160, 0x00030002, 0xffffffff,
523	0x00009164, 0x00050004, 0xffffffff,
524	0x00009168, 0x00070006, 0xffffffff,
525	0x00009178, 0x00070000, 0xffffffff,
526	0x0000917c, 0x00030002, 0xffffffff,
527	0x00009180, 0x00050004, 0xffffffff,
528	0x0000918c, 0x00010006, 0xffffffff,
529	0x00009190, 0x00090008, 0xffffffff,
530	0x00009194, 0x00070000, 0xffffffff,
531	0x00009198, 0x00030002, 0xffffffff,
532	0x0000919c, 0x00050004, 0xffffffff,
533	0x000091a8, 0x00010006, 0xffffffff,
534	0x000091ac, 0x00090008, 0xffffffff,
535	0x000091b0, 0x00070000, 0xffffffff,
536	0x000091b4, 0x00030002, 0xffffffff,
537	0x000091b8, 0x00050004, 0xffffffff,
538	0x000091c4, 0x00010006, 0xffffffff,
539	0x000091c8, 0x00090008, 0xffffffff,
540	0x000091cc, 0x00070000, 0xffffffff,
541	0x000091d0, 0x00030002, 0xffffffff,
542	0x000091d4, 0x00050004, 0xffffffff,
543	0x000091e0, 0x00010006, 0xffffffff,
544	0x000091e4, 0x00090008, 0xffffffff,
545	0x000091e8, 0x00000000, 0xffffffff,
546	0x000091ec, 0x00070000, 0xffffffff,
547	0x000091f0, 0x00030002, 0xffffffff,
548	0x000091f4, 0x00050004, 0xffffffff,
549	0x00009200, 0x00010006, 0xffffffff,
550	0x00009204, 0x00090008, 0xffffffff,
551	0x00009208, 0x00070000, 0xffffffff,
552	0x0000920c, 0x00030002, 0xffffffff,
553	0x00009210, 0x00050004, 0xffffffff,
554	0x0000921c, 0x00010006, 0xffffffff,
555	0x00009220, 0x00090008, 0xffffffff,
556	0x00009224, 0x00070000, 0xffffffff,
557	0x00009228, 0x00030002, 0xffffffff,
558	0x0000922c, 0x00050004, 0xffffffff,
559	0x00009238, 0x00010006, 0xffffffff,
560	0x0000923c, 0x00090008, 0xffffffff,
561	0x00009240, 0x00070000, 0xffffffff,
562	0x00009244, 0x00030002, 0xffffffff,
563	0x00009248, 0x00050004, 0xffffffff,
564	0x00009254, 0x00010006, 0xffffffff,
565	0x00009258, 0x00090008, 0xffffffff,
566	0x0000925c, 0x00070000, 0xffffffff,
567	0x00009260, 0x00030002, 0xffffffff,
568	0x00009264, 0x00050004, 0xffffffff,
569	0x00009270, 0x00010006, 0xffffffff,
570	0x00009274, 0x00090008, 0xffffffff,
571	0x00009278, 0x00070000, 0xffffffff,
572	0x0000927c, 0x00030002, 0xffffffff,
573	0x00009280, 0x00050004, 0xffffffff,
574	0x0000928c, 0x00010006, 0xffffffff,
575	0x00009290, 0x00090008, 0xffffffff,
576	0x000092a8, 0x00070000, 0xffffffff,
577	0x000092ac, 0x00030002, 0xffffffff,
578	0x000092b0, 0x00050004, 0xffffffff,
579	0x000092bc, 0x00010006, 0xffffffff,
580	0x000092c0, 0x00090008, 0xffffffff,
581	0x000092c4, 0x00070000, 0xffffffff,
582	0x000092c8, 0x00030002, 0xffffffff,
583	0x000092cc, 0x00050004, 0xffffffff,
584	0x000092d8, 0x00010006, 0xffffffff,
585	0x000092dc, 0x00090008, 0xffffffff,
586	0x00009294, 0x00000000, 0xffffffff,
587	0x0000802c, 0xc0000000, 0xffffffff,
588	0x00003fc4, 0xc0000000, 0xffffffff,
589	0x000008f8, 0x00000010, 0xffffffff,
590	0x000008fc, 0x00000000, 0xffffffff,
591	0x000008f8, 0x00000011, 0xffffffff,
592	0x000008fc, 0x00000000, 0xffffffff,
593	0x000008f8, 0x00000012, 0xffffffff,
594	0x000008fc, 0x00000000, 0xffffffff,
595	0x000008f8, 0x00000013, 0xffffffff,
596	0x000008fc, 0x00000000, 0xffffffff,
597	0x000008f8, 0x00000014, 0xffffffff,
598	0x000008fc, 0x00000000, 0xffffffff,
599	0x000008f8, 0x00000015, 0xffffffff,
600	0x000008fc, 0x00000000, 0xffffffff,
601	0x000008f8, 0x00000016, 0xffffffff,
602	0x000008fc, 0x00000000, 0xffffffff,
603	0x000008f8, 0x00000017, 0xffffffff,
604	0x000008fc, 0x00000000, 0xffffffff,
605	0x000008f8, 0x00000018, 0xffffffff,
606	0x000008fc, 0x00000000, 0xffffffff,
607	0x000008f8, 0x00000019, 0xffffffff,
608	0x000008fc, 0x00000000, 0xffffffff,
609	0x000008f8, 0x0000001a, 0xffffffff,
610	0x000008fc, 0x00000000, 0xffffffff,
611	0x000008f8, 0x0000001b, 0xffffffff,
612	0x000008fc, 0x00000000, 0xffffffff
613};
614#define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
615
616static const u32 cayman_mgcg_disable[] =
617{
618	0x0000802c, 0xc0000000, 0xffffffff,
619	0x000008f8, 0x00000000, 0xffffffff,
620	0x000008fc, 0xffffffff, 0xffffffff,
621	0x000008f8, 0x00000001, 0xffffffff,
622	0x000008fc, 0xffffffff, 0xffffffff,
623	0x000008f8, 0x00000002, 0xffffffff,
624	0x000008fc, 0xffffffff, 0xffffffff,
625	0x000008f8, 0x00000003, 0xffffffff,
626	0x000008fc, 0xffffffff, 0xffffffff,
627	0x00009150, 0x00600000, 0xffffffff
628};
629#define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
630
631static const u32 cayman_mgcg_enable[] =
632{
633	0x0000802c, 0xc0000000, 0xffffffff,
634	0x000008f8, 0x00000000, 0xffffffff,
635	0x000008fc, 0x00000000, 0xffffffff,
636	0x000008f8, 0x00000001, 0xffffffff,
637	0x000008fc, 0x00000000, 0xffffffff,
638	0x000008f8, 0x00000002, 0xffffffff,
639	0x000008fc, 0x00600000, 0xffffffff,
640	0x000008f8, 0x00000003, 0xffffffff,
641	0x000008fc, 0x00000000, 0xffffffff,
642	0x00009150, 0x96944200, 0xffffffff
643};
644
645#define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
646
647#define NISLANDS_SYSLS_SEQUENCE  100
648
649static const u32 cayman_sysls_default[] =
650{
651	/* Register,   Value,     Mask bits */
652	0x000055e8, 0x00000000, 0xffffffff,
653	0x0000d0bc, 0x00000000, 0xffffffff,
654	0x0000d8bc, 0x00000000, 0xffffffff,
655	0x000015c0, 0x000c1401, 0xffffffff,
656	0x0000264c, 0x000c0400, 0xffffffff,
657	0x00002648, 0x000c0400, 0xffffffff,
658	0x00002650, 0x000c0400, 0xffffffff,
659	0x000020b8, 0x000c0400, 0xffffffff,
660	0x000020bc, 0x000c0400, 0xffffffff,
661	0x000020c0, 0x000c0c80, 0xffffffff,
662	0x0000f4a0, 0x000000c0, 0xffffffff,
663	0x0000f4a4, 0x00680fff, 0xffffffff,
664	0x00002f50, 0x00000404, 0xffffffff,
665	0x000004c8, 0x00000001, 0xffffffff,
666	0x000064ec, 0x00000000, 0xffffffff,
667	0x00000c7c, 0x00000000, 0xffffffff,
668	0x00008dfc, 0x00000000, 0xffffffff
669};
670#define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
671
672static const u32 cayman_sysls_disable[] =
673{
674	/* Register,   Value,     Mask bits */
675	0x0000d0c0, 0x00000000, 0xffffffff,
676	0x0000d8c0, 0x00000000, 0xffffffff,
677	0x000055e8, 0x00000000, 0xffffffff,
678	0x0000d0bc, 0x00000000, 0xffffffff,
679	0x0000d8bc, 0x00000000, 0xffffffff,
680	0x000015c0, 0x00041401, 0xffffffff,
681	0x0000264c, 0x00040400, 0xffffffff,
682	0x00002648, 0x00040400, 0xffffffff,
683	0x00002650, 0x00040400, 0xffffffff,
684	0x000020b8, 0x00040400, 0xffffffff,
685	0x000020bc, 0x00040400, 0xffffffff,
686	0x000020c0, 0x00040c80, 0xffffffff,
687	0x0000f4a0, 0x000000c0, 0xffffffff,
688	0x0000f4a4, 0x00680000, 0xffffffff,
689	0x00002f50, 0x00000404, 0xffffffff,
690	0x000004c8, 0x00000001, 0xffffffff,
691	0x000064ec, 0x00007ffd, 0xffffffff,
692	0x00000c7c, 0x0000ff00, 0xffffffff,
693	0x00008dfc, 0x0000007f, 0xffffffff
694};
695#define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
696
697static const u32 cayman_sysls_enable[] =
698{
699	/* Register,   Value,     Mask bits */
700	0x000055e8, 0x00000001, 0xffffffff,
701	0x0000d0bc, 0x00000100, 0xffffffff,
702	0x0000d8bc, 0x00000100, 0xffffffff,
703	0x000015c0, 0x000c1401, 0xffffffff,
704	0x0000264c, 0x000c0400, 0xffffffff,
705	0x00002648, 0x000c0400, 0xffffffff,
706	0x00002650, 0x000c0400, 0xffffffff,
707	0x000020b8, 0x000c0400, 0xffffffff,
708	0x000020bc, 0x000c0400, 0xffffffff,
709	0x000020c0, 0x000c0c80, 0xffffffff,
710	0x0000f4a0, 0x000000c0, 0xffffffff,
711	0x0000f4a4, 0x00680fff, 0xffffffff,
712	0x00002f50, 0x00000903, 0xffffffff,
713	0x000004c8, 0x00000000, 0xffffffff,
714	0x000064ec, 0x00000000, 0xffffffff,
715	0x00000c7c, 0x00000000, 0xffffffff,
716	0x00008dfc, 0x00000000, 0xffffffff
717};
718#define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
719
720struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
721struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
722
723struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
724{
725        struct ni_power_info *pi = rdev->pm.dpm.priv;
726
727        return pi;
728}
729
730struct ni_ps *ni_get_ps(struct radeon_ps *rps)
731{
732	struct ni_ps *ps = rps->ps_priv;
733
734	return ps;
735}
736
737static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
738						     u16 v, s32 t,
739						     u32 ileakage,
740						     u32 *leakage)
741{
742	s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
743
744	i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
745	vddc = div64_s64(drm_int2fixp(v), 1000);
746	temperature = div64_s64(drm_int2fixp(t), 1000);
747
748	kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
749			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
750	kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
751			  drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
752
753	leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
754
755	*leakage = drm_fixp2int(leakage_w * 1000);
756}
757
758static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
759					     const struct ni_leakage_coeffients *coeff,
760					     u16 v,
761					     s32 t,
762					     u32 i_leakage,
763					     u32 *leakage)
764{
765	ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
766}
767
768static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
769					struct radeon_ps *rps)
770{
771	struct ni_ps *ps = ni_get_ps(rps);
772	struct radeon_clock_and_voltage_limits *max_limits;
773	bool disable_mclk_switching;
774	u32 mclk, sclk;
775	u16 vddc, vddci;
776	int i;
777
778	if (rdev->pm.dpm.new_active_crtc_count > 1)
779		disable_mclk_switching = true;
780	else
781		disable_mclk_switching = false;
782
783	if (rdev->pm.dpm.ac_power)
784		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
785	else
786		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
787
788	if (rdev->pm.dpm.ac_power == false) {
789		for (i = 0; i < ps->performance_level_count; i++) {
790			if (ps->performance_levels[i].mclk > max_limits->mclk)
791				ps->performance_levels[i].mclk = max_limits->mclk;
792			if (ps->performance_levels[i].sclk > max_limits->sclk)
793				ps->performance_levels[i].sclk = max_limits->sclk;
794			if (ps->performance_levels[i].vddc > max_limits->vddc)
795				ps->performance_levels[i].vddc = max_limits->vddc;
796			if (ps->performance_levels[i].vddci > max_limits->vddci)
797				ps->performance_levels[i].vddci = max_limits->vddci;
798		}
799	}
800
801	/* XXX validate the min clocks required for display */
802
803	if (disable_mclk_switching) {
804		mclk  = ps->performance_levels[ps->performance_level_count - 1].mclk;
805		sclk = ps->performance_levels[0].sclk;
806		vddc = ps->performance_levels[0].vddc;
807		vddci = ps->performance_levels[ps->performance_level_count - 1].vddci;
808	} else {
809		sclk = ps->performance_levels[0].sclk;
810		mclk = ps->performance_levels[0].mclk;
811		vddc = ps->performance_levels[0].vddc;
812		vddci = ps->performance_levels[0].vddci;
813	}
814
815	/* adjusted low state */
816	ps->performance_levels[0].sclk = sclk;
817	ps->performance_levels[0].mclk = mclk;
818	ps->performance_levels[0].vddc = vddc;
819	ps->performance_levels[0].vddci = vddci;
820
821	btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
822				  &ps->performance_levels[0].sclk,
823				  &ps->performance_levels[0].mclk);
824
825	for (i = 1; i < ps->performance_level_count; i++) {
826		if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
827			ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
828		if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
829			ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
830	}
831
832	if (disable_mclk_switching) {
833		mclk = ps->performance_levels[0].mclk;
834		for (i = 1; i < ps->performance_level_count; i++) {
835			if (mclk < ps->performance_levels[i].mclk)
836				mclk = ps->performance_levels[i].mclk;
837		}
838		for (i = 0; i < ps->performance_level_count; i++) {
839			ps->performance_levels[i].mclk = mclk;
840			ps->performance_levels[i].vddci = vddci;
841		}
842	} else {
843		for (i = 1; i < ps->performance_level_count; i++) {
844			if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
845				ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
846			if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
847				ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
848		}
849	}
850
851	for (i = 1; i < ps->performance_level_count; i++)
852		btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
853					  &ps->performance_levels[i].sclk,
854					  &ps->performance_levels[i].mclk);
855
856	for (i = 0; i < ps->performance_level_count; i++)
857		btc_adjust_clock_combinations(rdev, max_limits,
858					      &ps->performance_levels[i]);
859
860	for (i = 0; i < ps->performance_level_count; i++) {
861		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
862						   ps->performance_levels[i].sclk,
863						   max_limits->vddc,  &ps->performance_levels[i].vddc);
864		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
865						   ps->performance_levels[i].mclk,
866						   max_limits->vddci, &ps->performance_levels[i].vddci);
867		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
868						   ps->performance_levels[i].mclk,
869						   max_limits->vddc,  &ps->performance_levels[i].vddc);
870		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
871						   rdev->clock.current_dispclk,
872						   max_limits->vddc,  &ps->performance_levels[i].vddc);
873	}
874
875	for (i = 0; i < ps->performance_level_count; i++) {
876		btc_apply_voltage_delta_rules(rdev,
877					      max_limits->vddc, max_limits->vddci,
878					      &ps->performance_levels[i].vddc,
879					      &ps->performance_levels[i].vddci);
880	}
881
882	ps->dc_compatible = true;
883	for (i = 0; i < ps->performance_level_count; i++) {
884		if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
885			ps->dc_compatible = false;
886
887		if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
888			ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
889	}
890}
891
892static void ni_cg_clockgating_default(struct radeon_device *rdev)
893{
894	u32 count;
895	const u32 *ps = NULL;
896
897	ps = (const u32 *)&cayman_cgcg_cgls_default;
898	count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
899
900	btc_program_mgcg_hw_sequence(rdev, ps, count);
901}
902
903static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
904				      bool enable)
905{
906	u32 count;
907	const u32 *ps = NULL;
908
909	if (enable) {
910		ps = (const u32 *)&cayman_cgcg_cgls_enable;
911		count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
912	} else {
913		ps = (const u32 *)&cayman_cgcg_cgls_disable;
914		count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
915	}
916
917	btc_program_mgcg_hw_sequence(rdev, ps, count);
918}
919
920static void ni_mg_clockgating_default(struct radeon_device *rdev)
921{
922	u32 count;
923	const u32 *ps = NULL;
924
925	ps = (const u32 *)&cayman_mgcg_default;
926	count = CAYMAN_MGCG_DEFAULT_LENGTH;
927
928	btc_program_mgcg_hw_sequence(rdev, ps, count);
929}
930
931static void ni_mg_clockgating_enable(struct radeon_device *rdev,
932				     bool enable)
933{
934	u32 count;
935	const u32 *ps = NULL;
936
937	if (enable) {
938		ps = (const u32 *)&cayman_mgcg_enable;
939		count = CAYMAN_MGCG_ENABLE_LENGTH;
940	} else {
941		ps = (const u32 *)&cayman_mgcg_disable;
942		count = CAYMAN_MGCG_DISABLE_LENGTH;
943	}
944
945	btc_program_mgcg_hw_sequence(rdev, ps, count);
946}
947
948static void ni_ls_clockgating_default(struct radeon_device *rdev)
949{
950	u32 count;
951	const u32 *ps = NULL;
952
953	ps = (const u32 *)&cayman_sysls_default;
954	count = CAYMAN_SYSLS_DEFAULT_LENGTH;
955
956	btc_program_mgcg_hw_sequence(rdev, ps, count);
957}
958
959static void ni_ls_clockgating_enable(struct radeon_device *rdev,
960				     bool enable)
961{
962	u32 count;
963	const u32 *ps = NULL;
964
965	if (enable) {
966		ps = (const u32 *)&cayman_sysls_enable;
967		count = CAYMAN_SYSLS_ENABLE_LENGTH;
968	} else {
969		ps = (const u32 *)&cayman_sysls_disable;
970		count = CAYMAN_SYSLS_DISABLE_LENGTH;
971	}
972
973	btc_program_mgcg_hw_sequence(rdev, ps, count);
974
975}
976
977static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
978							     struct radeon_clock_voltage_dependency_table *table)
979{
980	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
981	u32 i;
982
983	if (table) {
984		for (i = 0; i < table->count; i++) {
985			if (0xff01 == table->entries[i].v) {
986				if (pi->max_vddc == 0)
987					return -EINVAL;
988				table->entries[i].v = pi->max_vddc;
989			}
990		}
991	}
992	return 0;
993}
994
995static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
996{
997	int ret = 0;
998
999	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1000								&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1001
1002	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1003								&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1004	return ret;
1005}
1006
1007static void ni_stop_dpm(struct radeon_device *rdev)
1008{
1009	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1010}
1011
1012#if 0
1013static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1014					bool ac_power)
1015{
1016	if (ac_power)
1017		return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1018			0 : -EINVAL;
1019
1020	return 0;
1021}
1022#endif
1023
1024static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1025						      PPSMC_Msg msg, u32 parameter)
1026{
1027	WREG32(SMC_SCRATCH0, parameter);
1028	return rv770_send_msg_to_smc(rdev, msg);
1029}
1030
1031static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1032{
1033	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1034		return -EINVAL;
1035
1036	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1037		0 : -EINVAL;
1038}
1039
1040static int ni_unrestrict_performance_levels_after_switch(struct radeon_device *rdev)
1041{
1042	if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1043		return -EINVAL;
1044
1045	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) == PPSMC_Result_OK) ?
1046		0 : -EINVAL;
1047}
1048
1049static void ni_stop_smc(struct radeon_device *rdev)
1050{
1051	u32 tmp;
1052	int i;
1053
1054	for (i = 0; i < rdev->usec_timeout; i++) {
1055		tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1056		if (tmp != 1)
1057			break;
1058		udelay(1);
1059	}
1060
1061	udelay(100);
1062
1063	r7xx_stop_smc(rdev);
1064}
1065
1066static int ni_process_firmware_header(struct radeon_device *rdev)
1067{
1068        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1069        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1070        struct ni_power_info *ni_pi = ni_get_pi(rdev);
1071	u32 tmp;
1072	int ret;
1073
1074	ret = rv770_read_smc_sram_dword(rdev,
1075					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1076					NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1077					&tmp, pi->sram_end);
1078
1079	if (ret)
1080		return ret;
1081
1082	pi->state_table_start = (u16)tmp;
1083
1084	ret = rv770_read_smc_sram_dword(rdev,
1085					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1086					NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1087					&tmp, pi->sram_end);
1088
1089	if (ret)
1090		return ret;
1091
1092	pi->soft_regs_start = (u16)tmp;
1093
1094	ret = rv770_read_smc_sram_dword(rdev,
1095					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1096					NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1097					&tmp, pi->sram_end);
1098
1099	if (ret)
1100		return ret;
1101
1102	eg_pi->mc_reg_table_start = (u16)tmp;
1103
1104	ret = rv770_read_smc_sram_dword(rdev,
1105					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1106					NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1107					&tmp, pi->sram_end);
1108
1109	if (ret)
1110		return ret;
1111
1112	ni_pi->fan_table_start = (u16)tmp;
1113
1114	ret = rv770_read_smc_sram_dword(rdev,
1115					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1116					NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1117					&tmp, pi->sram_end);
1118
1119	if (ret)
1120		return ret;
1121
1122	ni_pi->arb_table_start = (u16)tmp;
1123
1124	ret = rv770_read_smc_sram_dword(rdev,
1125					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1126					NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1127					&tmp, pi->sram_end);
1128
1129	if (ret)
1130		return ret;
1131
1132	ni_pi->cac_table_start = (u16)tmp;
1133
1134	ret = rv770_read_smc_sram_dword(rdev,
1135					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1136					NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1137					&tmp, pi->sram_end);
1138
1139	if (ret)
1140		return ret;
1141
1142	ni_pi->spll_table_start = (u16)tmp;
1143
1144
1145	return ret;
1146}
1147
1148static void ni_read_clock_registers(struct radeon_device *rdev)
1149{
1150	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1151
1152	ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1153	ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1154	ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1155	ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1156	ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1157	ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1158	ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1159	ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1160	ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1161	ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1162	ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1163	ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1164	ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1165	ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1166}
1167
1168#if 0
1169static int ni_enter_ulp_state(struct radeon_device *rdev)
1170{
1171	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1172
1173	if (pi->gfx_clock_gating) {
1174                WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1175		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1176                WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1177		RREG32(GB_ADDR_CONFIG);
1178        }
1179
1180	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1181                 ~HOST_SMC_MSG_MASK);
1182
1183	udelay(25000);
1184
1185	return 0;
1186}
1187#endif
1188
1189static void ni_program_response_times(struct radeon_device *rdev)
1190{
1191	u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1192	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1193	u32 reference_clock;
1194
1195	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1196
1197	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1198	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1199
1200	if (voltage_response_time == 0)
1201		voltage_response_time = 1000;
1202
1203	if (backbias_response_time == 0)
1204		backbias_response_time = 1000;
1205
1206	acpi_delay_time = 15000;
1207	vbi_time_out = 100000;
1208
1209	reference_clock = radeon_get_xclk(rdev);
1210
1211	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1212	bb_dly   = (backbias_response_time * reference_clock) / 1600;
1213	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1214	vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1215
1216	mclk_switch_limit = (460 * reference_clock) / 100;
1217
1218	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1219	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1220	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1221	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1222	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1223	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1224}
1225
1226static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1227					  struct atom_voltage_table *voltage_table,
1228					  NISLANDS_SMC_STATETABLE *table)
1229{
1230	unsigned int i;
1231
1232	for (i = 0; i < voltage_table->count; i++) {
1233		table->highSMIO[i] = 0;
1234		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1235	}
1236}
1237
1238static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1239					   NISLANDS_SMC_STATETABLE *table)
1240{
1241	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1242	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1243	unsigned char i;
1244
1245	if (eg_pi->vddc_voltage_table.count) {
1246		ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1247		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1248		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1249			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1250
1251		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1252			if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1253				table->maxVDDCIndexInPPTable = i;
1254				break;
1255			}
1256		}
1257	}
1258
1259	if (eg_pi->vddci_voltage_table.count) {
1260		ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1261
1262		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1263		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1264			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1265	}
1266}
1267
1268static int ni_populate_voltage_value(struct radeon_device *rdev,
1269				     struct atom_voltage_table *table,
1270				     u16 value,
1271				     NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1272{
1273	unsigned int i;
1274
1275	for (i = 0; i < table->count; i++) {
1276		if (value <= table->entries[i].value) {
1277			voltage->index = (u8)i;
1278			voltage->value = cpu_to_be16(table->entries[i].value);
1279			break;
1280		}
1281	}
1282
1283	if (i >= table->count)
1284		return -EINVAL;
1285
1286	return 0;
1287}
1288
1289static void ni_populate_mvdd_value(struct radeon_device *rdev,
1290				   u32 mclk,
1291				   NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1292{
1293        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1294	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1295
1296	if (!pi->mvdd_control) {
1297		voltage->index = eg_pi->mvdd_high_index;
1298                voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1299		return;
1300	}
1301
1302	if (mclk <= pi->mvdd_split_frequency) {
1303		voltage->index = eg_pi->mvdd_low_index;
1304		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1305	} else {
1306		voltage->index = eg_pi->mvdd_high_index;
1307		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1308	}
1309}
1310
1311static int ni_get_std_voltage_value(struct radeon_device *rdev,
1312				    NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1313				    u16 *std_voltage)
1314{
1315	if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1316	    ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1317		*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1318	else
1319		*std_voltage = be16_to_cpu(voltage->value);
1320
1321	return 0;
1322}
1323
1324static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1325					  u16 value, u8 index,
1326					  NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1327{
1328	voltage->index = index;
1329	voltage->value = cpu_to_be16(value);
1330}
1331
1332static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1333{
1334	u32 xclk_period;
1335	u32 xclk = radeon_get_xclk(rdev);
1336	u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1337
1338	xclk_period = (1000000000UL / xclk);
1339	xclk_period /= 10000UL;
1340
1341	return tmp * xclk_period;
1342}
1343
1344static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1345{
1346	return (power_in_watts * scaling_factor) << 2;
1347}
1348
1349static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1350					  struct radeon_ps *radeon_state,
1351					  u32 near_tdp_limit)
1352{
1353	struct ni_ps *state = ni_get_ps(radeon_state);
1354	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1355	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1356	u32 power_boost_limit = 0;
1357	int ret;
1358
1359	if (ni_pi->enable_power_containment &&
1360	    ni_pi->use_power_boost_limit) {
1361		NISLANDS_SMC_VOLTAGE_VALUE vddc;
1362		u16 std_vddc_med;
1363		u16 std_vddc_high;
1364		u64 tmp, n, d;
1365
1366		if (state->performance_level_count < 3)
1367			return 0;
1368
1369		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1370						state->performance_levels[state->performance_level_count - 2].vddc,
1371						&vddc);
1372		if (ret)
1373			return 0;
1374
1375		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1376		if (ret)
1377			return 0;
1378
1379		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1380						state->performance_levels[state->performance_level_count - 1].vddc,
1381						&vddc);
1382		if (ret)
1383			return 0;
1384
1385		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1386		if (ret)
1387			return 0;
1388
1389		n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1390		d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1391		tmp = div64_u64(n, d);
1392
1393		if (tmp >> 32)
1394			return 0;
1395		power_boost_limit = (u32)tmp;
1396	}
1397
1398	return power_boost_limit;
1399}
1400
1401static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1402					    bool adjust_polarity,
1403					    u32 tdp_adjustment,
1404					    u32 *tdp_limit,
1405					    u32 *near_tdp_limit)
1406{
1407	if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1408		return -EINVAL;
1409
1410	if (adjust_polarity) {
1411		*tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1412		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1413	} else {
1414		*tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1415		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1416	}
1417
1418	return 0;
1419}
1420
1421static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1422				      struct radeon_ps *radeon_state)
1423{
1424	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1425	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1426
1427	if (ni_pi->enable_power_containment) {
1428		NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1429		u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1430		u32 tdp_limit;
1431		u32 near_tdp_limit;
1432		u32 power_boost_limit;
1433		int ret;
1434
1435		if (scaling_factor == 0)
1436			return -EINVAL;
1437
1438		memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1439
1440		ret = ni_calculate_adjusted_tdp_limits(rdev,
1441						       false, /* ??? */
1442						       rdev->pm.dpm.tdp_adjustment,
1443						       &tdp_limit,
1444						       &near_tdp_limit);
1445		if (ret)
1446			return ret;
1447
1448		power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1449								   near_tdp_limit);
1450
1451		smc_table->dpm2Params.TDPLimit =
1452			cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1453		smc_table->dpm2Params.NearTDPLimit =
1454			cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1455		smc_table->dpm2Params.SafePowerLimit =
1456			cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1457							   scaling_factor));
1458		smc_table->dpm2Params.PowerBoostLimit =
1459			cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1460
1461		ret = rv770_copy_bytes_to_smc(rdev,
1462					      (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1463						    offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1464					      (u8 *)(&smc_table->dpm2Params.TDPLimit),
1465					      sizeof(u32) * 4, pi->sram_end);
1466		if (ret)
1467			return ret;
1468	}
1469
1470	return 0;
1471}
1472
1473int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1474				u32 arb_freq_src, u32 arb_freq_dest)
1475{
1476	u32 mc_arb_dram_timing;
1477	u32 mc_arb_dram_timing2;
1478	u32 burst_time;
1479	u32 mc_cg_config;
1480
1481	switch (arb_freq_src) {
1482        case MC_CG_ARB_FREQ_F0:
1483		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1484		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1485		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1486		break;
1487        case MC_CG_ARB_FREQ_F1:
1488		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1489		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1490		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1491		break;
1492        case MC_CG_ARB_FREQ_F2:
1493		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1494		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1495		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1496		break;
1497        case MC_CG_ARB_FREQ_F3:
1498		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1499		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1500		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1501		break;
1502        default:
1503		return -EINVAL;
1504	}
1505
1506	switch (arb_freq_dest) {
1507        case MC_CG_ARB_FREQ_F0:
1508		WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1509		WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1510		WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1511		break;
1512        case MC_CG_ARB_FREQ_F1:
1513		WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1514		WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1515		WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1516		break;
1517        case MC_CG_ARB_FREQ_F2:
1518		WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1519		WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1520		WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1521		break;
1522        case MC_CG_ARB_FREQ_F3:
1523		WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1524		WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1525		WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1526		break;
1527	default:
1528		return -EINVAL;
1529	}
1530
1531	mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1532	WREG32(MC_CG_CONFIG, mc_cg_config);
1533	WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1534
1535	return 0;
1536}
1537
1538static int ni_init_arb_table_index(struct radeon_device *rdev)
1539{
1540	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1541	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1542	u32 tmp;
1543	int ret;
1544
1545	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1546					&tmp, pi->sram_end);
1547	if (ret)
1548		return ret;
1549
1550	tmp &= 0x00FFFFFF;
1551	tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1552
1553	return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1554					  tmp, pi->sram_end);
1555}
1556
1557static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1558{
1559	return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1560}
1561
1562static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1563{
1564	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1565	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1566	u32 tmp;
1567	int ret;
1568
1569	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1570					&tmp, pi->sram_end);
1571	if (ret)
1572		return ret;
1573
1574	tmp = (tmp >> 24) & 0xff;
1575
1576	if (tmp == MC_CG_ARB_FREQ_F0)
1577		return 0;
1578
1579	return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1580}
1581
1582static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1583						struct rv7xx_pl *pl,
1584						SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1585{
1586	u32 dram_timing;
1587	u32 dram_timing2;
1588
1589	arb_regs->mc_arb_rfsh_rate =
1590		(u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1591
1592
1593	radeon_atom_set_engine_dram_timings(rdev,
1594                                            pl->sclk,
1595                                            pl->mclk);
1596
1597	dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1598	dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1599
1600	arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1601	arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1602
1603	return 0;
1604}
1605
1606static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1607						  struct radeon_ps *radeon_state,
1608						  unsigned int first_arb_set)
1609{
1610	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1611	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1612	struct ni_ps *state = ni_get_ps(radeon_state);
1613	SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1614	int i, ret = 0;
1615
1616	for (i = 0; i < state->performance_level_count; i++) {
1617		ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1618		if (ret)
1619			break;
1620
1621		ret = rv770_copy_bytes_to_smc(rdev,
1622					      (u16)(ni_pi->arb_table_start +
1623						    offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1624						    sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1625					      (u8 *)&arb_regs,
1626					      (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1627					      pi->sram_end);
1628		if (ret)
1629			break;
1630	}
1631	return ret;
1632}
1633
1634static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1635					       struct radeon_ps *radeon_new_state)
1636{
1637	return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1638						      NISLANDS_DRIVER_STATE_ARB_INDEX);
1639}
1640
1641static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1642					   struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1643{
1644	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1645
1646	voltage->index = eg_pi->mvdd_high_index;
1647	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1648}
1649
1650static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1651					 struct radeon_ps *radeon_initial_state,
1652					 NISLANDS_SMC_STATETABLE *table)
1653{
1654	struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1655	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1656	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1657	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1658	u32 reg;
1659	int ret;
1660
1661	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1662		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1663	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1664		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1665	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1666		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1667	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1668		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1669	table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1670		cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1671	table->initialState.levels[0].mclk.vDLL_CNTL =
1672		cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1673	table->initialState.levels[0].mclk.vMPLL_SS =
1674		cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1675	table->initialState.levels[0].mclk.vMPLL_SS2 =
1676		cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1677	table->initialState.levels[0].mclk.mclk_value =
1678		cpu_to_be32(initial_state->performance_levels[0].mclk);
1679
1680	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1681		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1682	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1683		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1684	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1685		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1686	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1687		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1688	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1689		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1690	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1691		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1692	table->initialState.levels[0].sclk.sclk_value =
1693		cpu_to_be32(initial_state->performance_levels[0].sclk);
1694	table->initialState.levels[0].arbRefreshState =
1695		NISLANDS_INITIAL_STATE_ARB_INDEX;
1696
1697	table->initialState.levels[0].ACIndex = 0;
1698
1699	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1700					initial_state->performance_levels[0].vddc,
1701					&table->initialState.levels[0].vddc);
1702	if (!ret) {
1703		u16 std_vddc;
1704
1705		ret = ni_get_std_voltage_value(rdev,
1706					       &table->initialState.levels[0].vddc,
1707					       &std_vddc);
1708		if (!ret)
1709			ni_populate_std_voltage_value(rdev, std_vddc,
1710						      table->initialState.levels[0].vddc.index,
1711						      &table->initialState.levels[0].std_vddc);
1712	}
1713
1714	if (eg_pi->vddci_control)
1715		ni_populate_voltage_value(rdev,
1716					  &eg_pi->vddci_voltage_table,
1717					  initial_state->performance_levels[0].vddci,
1718					  &table->initialState.levels[0].vddci);
1719
1720	ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1721
1722	reg = CG_R(0xffff) | CG_L(0);
1723	table->initialState.levels[0].aT = cpu_to_be32(reg);
1724
1725	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1726
1727	if (pi->boot_in_gen2)
1728		table->initialState.levels[0].gen2PCIE = 1;
1729	else
1730		table->initialState.levels[0].gen2PCIE = 0;
1731
1732	if (pi->mem_gddr5) {
1733		table->initialState.levels[0].strobeMode =
1734			cypress_get_strobe_mode_settings(rdev,
1735							 initial_state->performance_levels[0].mclk);
1736
1737		if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1738			table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1739		else
1740			table->initialState.levels[0].mcFlags =  0;
1741	}
1742
1743	table->initialState.levelCount = 1;
1744
1745	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1746
1747	table->initialState.levels[0].dpm2.MaxPS = 0;
1748	table->initialState.levels[0].dpm2.NearTDPDec = 0;
1749	table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1750	table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1751
1752	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1753	table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1754
1755	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1756	table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1757
1758	return 0;
1759}
1760
1761static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1762				      NISLANDS_SMC_STATETABLE *table)
1763{
1764	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1765	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1766	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1767	u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1768	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1769	u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1770	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1771	u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1772	u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1773	u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1774	u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1775	u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1776	u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1777	u32 reg;
1778	int ret;
1779
1780	table->ACPIState = table->initialState;
1781
1782	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1783
1784	if (pi->acpi_vddc) {
1785		ret = ni_populate_voltage_value(rdev,
1786						&eg_pi->vddc_voltage_table,
1787						pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1788		if (!ret) {
1789			u16 std_vddc;
1790
1791			ret = ni_get_std_voltage_value(rdev,
1792						       &table->ACPIState.levels[0].vddc, &std_vddc);
1793			if (!ret)
1794				ni_populate_std_voltage_value(rdev, std_vddc,
1795							      table->ACPIState.levels[0].vddc.index,
1796							      &table->ACPIState.levels[0].std_vddc);
1797		}
1798
1799		if (pi->pcie_gen2) {
1800			if (pi->acpi_pcie_gen2)
1801				table->ACPIState.levels[0].gen2PCIE = 1;
1802			else
1803				table->ACPIState.levels[0].gen2PCIE = 0;
1804		} else {
1805			table->ACPIState.levels[0].gen2PCIE = 0;
1806		}
1807	} else {
1808		ret = ni_populate_voltage_value(rdev,
1809						&eg_pi->vddc_voltage_table,
1810						pi->min_vddc_in_table,
1811						&table->ACPIState.levels[0].vddc);
1812		if (!ret) {
1813			u16 std_vddc;
1814
1815			ret = ni_get_std_voltage_value(rdev,
1816						       &table->ACPIState.levels[0].vddc,
1817						       &std_vddc);
1818			if (!ret)
1819				ni_populate_std_voltage_value(rdev, std_vddc,
1820							      table->ACPIState.levels[0].vddc.index,
1821							      &table->ACPIState.levels[0].std_vddc);
1822		}
1823		table->ACPIState.levels[0].gen2PCIE = 0;
1824	}
1825
1826	if (eg_pi->acpi_vddci) {
1827		if (eg_pi->vddci_control)
1828			ni_populate_voltage_value(rdev,
1829						  &eg_pi->vddci_voltage_table,
1830						  eg_pi->acpi_vddci,
1831						  &table->ACPIState.levels[0].vddci);
1832	}
1833
1834
1835	mpll_ad_func_cntl &= ~PDNB;
1836
1837	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1838
1839        if (pi->mem_gddr5)
1840                mpll_dq_func_cntl &= ~PDNB;
1841        mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1842
1843
1844	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1845			     MRDCKA1_RESET |
1846			     MRDCKB0_RESET |
1847			     MRDCKB1_RESET |
1848			     MRDCKC0_RESET |
1849			     MRDCKC1_RESET |
1850			     MRDCKD0_RESET |
1851			     MRDCKD1_RESET);
1852
1853	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1854			      MRDCKA1_PDNB |
1855			      MRDCKB0_PDNB |
1856			      MRDCKB1_PDNB |
1857			      MRDCKC0_PDNB |
1858			      MRDCKC1_PDNB |
1859			      MRDCKD0_PDNB |
1860			      MRDCKD1_PDNB);
1861
1862	dll_cntl |= (MRDCKA0_BYPASS |
1863                     MRDCKA1_BYPASS |
1864                     MRDCKB0_BYPASS |
1865                     MRDCKB1_BYPASS |
1866                     MRDCKC0_BYPASS |
1867                     MRDCKC1_BYPASS |
1868                     MRDCKD0_BYPASS |
1869                     MRDCKD1_BYPASS);
1870
1871        spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1872	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1873
1874	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1875	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1876	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1877	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1878	table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1879	table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1880
1881	table->ACPIState.levels[0].mclk.mclk_value = 0;
1882
1883	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1884	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1885	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1886	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1887
1888	table->ACPIState.levels[0].sclk.sclk_value = 0;
1889
1890	ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1891
1892	if (eg_pi->dynamic_ac_timing)
1893		table->ACPIState.levels[0].ACIndex = 1;
1894
1895	table->ACPIState.levels[0].dpm2.MaxPS = 0;
1896	table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1897	table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1898	table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1899
1900	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1901	table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1902
1903	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1904	table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1905
1906	return 0;
1907}
1908
1909static int ni_init_smc_table(struct radeon_device *rdev)
1910{
1911	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1912	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1913	int ret;
1914	struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1915	NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1916
1917	memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1918
1919	ni_populate_smc_voltage_tables(rdev, table);
1920
1921	switch (rdev->pm.int_thermal_type) {
1922	case THERMAL_TYPE_NI:
1923	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1924		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1925		break;
1926	case THERMAL_TYPE_NONE:
1927		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1928		break;
1929	default:
1930		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1931		break;
1932	}
1933
1934	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1935		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1936
1937	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1938		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1939
1940	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1941		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1942
1943	if (pi->mem_gddr5)
1944		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1945
1946	ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1947	if (ret)
1948		return ret;
1949
1950	ret = ni_populate_smc_acpi_state(rdev, table);
1951	if (ret)
1952		return ret;
1953
1954	table->driverState = table->initialState;
1955
1956	table->ULVState = table->initialState;
1957
1958	ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1959						     NISLANDS_INITIAL_STATE_ARB_INDEX);
1960	if (ret)
1961		return ret;
1962
1963	return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
1964				       sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
1965}
1966
1967static int ni_calculate_sclk_params(struct radeon_device *rdev,
1968				    u32 engine_clock,
1969				    NISLANDS_SMC_SCLK_VALUE *sclk)
1970{
1971	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1972	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1973	struct atom_clock_dividers dividers;
1974	u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
1975	u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
1976	u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
1977	u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
1978	u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
1979	u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
1980	u64 tmp;
1981	u32 reference_clock = rdev->clock.spll.reference_freq;
1982	u32 reference_divider;
1983	u32 fbdiv;
1984	int ret;
1985
1986	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1987					     engine_clock, false, &dividers);
1988	if (ret)
1989		return ret;
1990
1991	reference_divider = 1 + dividers.ref_div;
1992
1993
1994	tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
1995	do_div(tmp, reference_clock);
1996	fbdiv = (u32) tmp;
1997
1998	spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
1999	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2000	spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2001
2002	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2003	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2004
2005	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2006	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2007	spll_func_cntl_3 |= SPLL_DITHEN;
2008
2009	if (pi->sclk_ss) {
2010		struct radeon_atom_ss ss;
2011		u32 vco_freq = engine_clock * dividers.post_div;
2012
2013		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2014						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2015			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2016			u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2017
2018			cg_spll_spread_spectrum &= ~CLK_S_MASK;
2019			cg_spll_spread_spectrum |= CLK_S(clk_s);
2020			cg_spll_spread_spectrum |= SSEN;
2021
2022			cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2023			cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2024		}
2025	}
2026
2027	sclk->sclk_value = engine_clock;
2028	sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2029	sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2030	sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2031	sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2032	sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2033	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2034
2035	return 0;
2036}
2037
2038static int ni_populate_sclk_value(struct radeon_device *rdev,
2039				  u32 engine_clock,
2040				  NISLANDS_SMC_SCLK_VALUE *sclk)
2041{
2042	NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2043	int ret;
2044
2045	ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2046	if (!ret) {
2047		sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2048		sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2049		sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2050		sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2051		sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2052		sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2053		sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2054	}
2055
2056	return ret;
2057}
2058
2059static int ni_init_smc_spll_table(struct radeon_device *rdev)
2060{
2061        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2062	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2063	SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2064	NISLANDS_SMC_SCLK_VALUE sclk_params;
2065	u32 fb_div;
2066	u32 p_div;
2067	u32 clk_s;
2068	u32 clk_v;
2069	u32 sclk = 0;
2070	int i, ret;
2071	u32 tmp;
2072
2073	if (ni_pi->spll_table_start == 0)
2074		return -EINVAL;
2075
2076	spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2077	if (spll_table == NULL)
2078		return -ENOMEM;
2079
2080	for (i = 0; i < 256; i++) {
2081		ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2082		if (ret)
2083			break;
2084
2085		p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2086		fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2087		clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2088		clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2089
2090		fb_div &= ~0x00001FFF;
2091		fb_div >>= 1;
2092		clk_v >>= 6;
2093
2094		if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2095			ret = -EINVAL;
2096
2097		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2098			ret = -EINVAL;
2099
2100		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2101			ret = -EINVAL;
2102
2103		if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2104			ret = -EINVAL;
2105
2106		if (ret)
2107			break;
2108
2109		tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2110			((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2111		spll_table->freq[i] = cpu_to_be32(tmp);
2112
2113		tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2114			((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2115		spll_table->ss[i] = cpu_to_be32(tmp);
2116
2117		sclk += 512;
2118	}
2119
2120	if (!ret)
2121		ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2122					      sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2123
2124	kfree(spll_table);
2125
2126	return ret;
2127}
2128
2129static int ni_populate_mclk_value(struct radeon_device *rdev,
2130				  u32 engine_clock,
2131				  u32 memory_clock,
2132				  NISLANDS_SMC_MCLK_VALUE *mclk,
2133				  bool strobe_mode,
2134				  bool dll_state_on)
2135{
2136	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2137	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2138	u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2139	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2140	u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2141	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2142	u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2143	u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2144	u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2145	u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2146	struct atom_clock_dividers dividers;
2147	u32 ibias;
2148	u32 dll_speed;
2149	int ret;
2150	u32 mc_seq_misc7;
2151
2152	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2153					     memory_clock, strobe_mode, &dividers);
2154	if (ret)
2155		return ret;
2156
2157	if (!strobe_mode) {
2158		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2159
2160		if (mc_seq_misc7 & 0x8000000)
2161			dividers.post_div = 1;
2162	}
2163
2164	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2165
2166	mpll_ad_func_cntl &= ~(CLKR_MASK |
2167			       YCLK_POST_DIV_MASK |
2168			       CLKF_MASK |
2169			       CLKFRAC_MASK |
2170			       IBIAS_MASK);
2171	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2172	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2173	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2174	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2175	mpll_ad_func_cntl |= IBIAS(ibias);
2176
2177	if (dividers.vco_mode)
2178		mpll_ad_func_cntl_2 |= VCO_MODE;
2179	else
2180		mpll_ad_func_cntl_2 &= ~VCO_MODE;
2181
2182	if (pi->mem_gddr5) {
2183		mpll_dq_func_cntl &= ~(CLKR_MASK |
2184				       YCLK_POST_DIV_MASK |
2185				       CLKF_MASK |
2186				       CLKFRAC_MASK |
2187				       IBIAS_MASK);
2188		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2189		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2190		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2191		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2192		mpll_dq_func_cntl |= IBIAS(ibias);
2193
2194		if (strobe_mode)
2195			mpll_dq_func_cntl &= ~PDNB;
2196		else
2197			mpll_dq_func_cntl |= PDNB;
2198
2199		if (dividers.vco_mode)
2200			mpll_dq_func_cntl_2 |= VCO_MODE;
2201		else
2202			mpll_dq_func_cntl_2 &= ~VCO_MODE;
2203	}
2204
2205	if (pi->mclk_ss) {
2206		struct radeon_atom_ss ss;
2207		u32 vco_freq = memory_clock * dividers.post_div;
2208
2209		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2210						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2211			u32 reference_clock = rdev->clock.mpll.reference_freq;
2212			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2213			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2214			u32 clk_v = ss.percentage *
2215				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2216
2217			mpll_ss1 &= ~CLKV_MASK;
2218			mpll_ss1 |= CLKV(clk_v);
2219
2220			mpll_ss2 &= ~CLKS_MASK;
2221			mpll_ss2 |= CLKS(clk_s);
2222		}
2223	}
2224
2225	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2226					memory_clock);
2227
2228	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2229	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2230	if (dll_state_on)
2231		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2232				     MRDCKA1_PDNB |
2233				     MRDCKB0_PDNB |
2234				     MRDCKB1_PDNB |
2235				     MRDCKC0_PDNB |
2236				     MRDCKC1_PDNB |
2237				     MRDCKD0_PDNB |
2238				     MRDCKD1_PDNB);
2239	else
2240		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2241				      MRDCKA1_PDNB |
2242				      MRDCKB0_PDNB |
2243				      MRDCKB1_PDNB |
2244				      MRDCKC0_PDNB |
2245				      MRDCKC1_PDNB |
2246				      MRDCKD0_PDNB |
2247				      MRDCKD1_PDNB);
2248
2249
2250	mclk->mclk_value = cpu_to_be32(memory_clock);
2251	mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2252	mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2253	mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2254	mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2255	mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2256	mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2257	mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2258	mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2259
2260	return 0;
2261}
2262
2263static void ni_populate_smc_sp(struct radeon_device *rdev,
2264			       struct radeon_ps *radeon_state,
2265			       NISLANDS_SMC_SWSTATE *smc_state)
2266{
2267	struct ni_ps *ps = ni_get_ps(radeon_state);
2268	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2269	int i;
2270
2271	for (i = 0; i < ps->performance_level_count - 1; i++)
2272		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2273
2274	smc_state->levels[ps->performance_level_count - 1].bSP =
2275		cpu_to_be32(pi->psp);
2276}
2277
2278static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2279					 struct rv7xx_pl *pl,
2280					 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2281{
2282	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2283        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2284        struct ni_power_info *ni_pi = ni_get_pi(rdev);
2285	int ret;
2286	bool dll_state_on;
2287	u16 std_vddc;
2288	u32 tmp = RREG32(DC_STUTTER_CNTL);
2289
2290	level->gen2PCIE = pi->pcie_gen2 ?
2291		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2292
2293	ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2294	if (ret)
2295		return ret;
2296
2297	level->mcFlags =  0;
2298	if (pi->mclk_stutter_mode_threshold &&
2299	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2300	    !eg_pi->uvd_enabled &&
2301	    (tmp & DC_STUTTER_ENABLE_A) &&
2302	    (tmp & DC_STUTTER_ENABLE_B))
2303		level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2304
2305	if (pi->mem_gddr5) {
2306		if (pl->mclk > pi->mclk_edc_enable_threshold)
2307			level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2308		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2309			level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2310
2311		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2312
2313		if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2314			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2315			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2316				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2317			else
2318				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2319		} else {
2320			dll_state_on = false;
2321			if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2322				level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2323		}
2324
2325		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2326					     &level->mclk,
2327					     (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2328					     dll_state_on);
2329	} else
2330		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2331
2332	if (ret)
2333		return ret;
2334
2335	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2336					pl->vddc, &level->vddc);
2337	if (ret)
2338		return ret;
2339
2340	ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2341	if (ret)
2342		return ret;
2343
2344	ni_populate_std_voltage_value(rdev, std_vddc,
2345				      level->vddc.index, &level->std_vddc);
2346
2347	if (eg_pi->vddci_control) {
2348		ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2349						pl->vddci, &level->vddci);
2350		if (ret)
2351			return ret;
2352	}
2353
2354	ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2355
2356	return ret;
2357}
2358
2359static int ni_populate_smc_t(struct radeon_device *rdev,
2360			     struct radeon_ps *radeon_state,
2361			     NISLANDS_SMC_SWSTATE *smc_state)
2362{
2363        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2364        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2365	struct ni_ps *state = ni_get_ps(radeon_state);
2366	u32 a_t;
2367	u32 t_l, t_h;
2368	u32 high_bsp;
2369	int i, ret;
2370
2371	if (state->performance_level_count >= 9)
2372		return -EINVAL;
2373
2374	if (state->performance_level_count < 2) {
2375		a_t = CG_R(0xffff) | CG_L(0);
2376		smc_state->levels[0].aT = cpu_to_be32(a_t);
2377		return 0;
2378	}
2379
2380	smc_state->levels[0].aT = cpu_to_be32(0);
2381
2382	for (i = 0; i <= state->performance_level_count - 2; i++) {
2383		if (eg_pi->uvd_enabled)
2384			ret = r600_calculate_at(
2385				1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2386				100 * R600_AH_DFLT,
2387				state->performance_levels[i + 1].sclk,
2388				state->performance_levels[i].sclk,
2389				&t_l,
2390				&t_h);
2391		else
2392			ret = r600_calculate_at(
2393				1000 * (i + 1),
2394				100 * R600_AH_DFLT,
2395				state->performance_levels[i + 1].sclk,
2396				state->performance_levels[i].sclk,
2397				&t_l,
2398				&t_h);
2399
2400		if (ret) {
2401			t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2402			t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2403		}
2404
2405		a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2406		a_t |= CG_R(t_l * pi->bsp / 20000);
2407		smc_state->levels[i].aT = cpu_to_be32(a_t);
2408
2409		high_bsp = (i == state->performance_level_count - 2) ?
2410			pi->pbsp : pi->bsp;
2411
2412		a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2413		smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2414	}
2415
2416	return 0;
2417}
2418
2419static int ni_populate_power_containment_values(struct radeon_device *rdev,
2420						struct radeon_ps *radeon_state,
2421						NISLANDS_SMC_SWSTATE *smc_state)
2422{
2423        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2424        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2425	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2426	struct ni_ps *state = ni_get_ps(radeon_state);
2427	u32 prev_sclk;
2428	u32 max_sclk;
2429	u32 min_sclk;
2430	int i, ret;
2431	u32 tdp_limit;
2432	u32 near_tdp_limit;
2433	u32 power_boost_limit;
2434	u8 max_ps_percent;
2435
2436	if (ni_pi->enable_power_containment == false)
2437		return 0;
2438
2439	if (state->performance_level_count == 0)
2440		return -EINVAL;
2441
2442	if (smc_state->levelCount != state->performance_level_count)
2443		return -EINVAL;
2444
2445	ret = ni_calculate_adjusted_tdp_limits(rdev,
2446					       false, /* ??? */
2447					       rdev->pm.dpm.tdp_adjustment,
2448					       &tdp_limit,
2449					       &near_tdp_limit);
2450	if (ret)
2451		return ret;
2452
2453	power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2454
2455	ret = rv770_write_smc_sram_dword(rdev,
2456					 pi->state_table_start +
2457					 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2458					 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2459					 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2460					 pi->sram_end);
2461	if (ret)
2462		power_boost_limit = 0;
2463
2464	smc_state->levels[0].dpm2.MaxPS = 0;
2465	smc_state->levels[0].dpm2.NearTDPDec = 0;
2466	smc_state->levels[0].dpm2.AboveSafeInc = 0;
2467	smc_state->levels[0].dpm2.BelowSafeInc = 0;
2468	smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2469
2470	for (i = 1; i < state->performance_level_count; i++) {
2471		prev_sclk = state->performance_levels[i-1].sclk;
2472		max_sclk  = state->performance_levels[i].sclk;
2473		max_ps_percent = (i != (state->performance_level_count - 1)) ?
2474			NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2475
2476		if (max_sclk < prev_sclk)
2477			return -EINVAL;
2478
2479		if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2480			min_sclk = max_sclk;
2481		else if (1 == i)
2482			min_sclk = prev_sclk;
2483		else
2484			min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2485
2486		if (min_sclk < state->performance_levels[0].sclk)
2487			min_sclk = state->performance_levels[0].sclk;
2488
2489		if (min_sclk == 0)
2490			return -EINVAL;
2491
2492		smc_state->levels[i].dpm2.MaxPS =
2493			(u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2494		smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2495		smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2496		smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2497		smc_state->levels[i].stateFlags |=
2498			((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2499			PPSMC_STATEFLAG_POWERBOOST : 0;
2500	}
2501
2502	return 0;
2503}
2504
2505static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2506					 struct radeon_ps *radeon_state,
2507					 NISLANDS_SMC_SWSTATE *smc_state)
2508{
2509	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2510	struct ni_ps *state = ni_get_ps(radeon_state);
2511	u32 sq_power_throttle;
2512	u32 sq_power_throttle2;
2513	bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2514	int i;
2515
2516	if (state->performance_level_count == 0)
2517		return -EINVAL;
2518
2519	if (smc_state->levelCount != state->performance_level_count)
2520		return -EINVAL;
2521
2522	if (rdev->pm.dpm.sq_ramping_threshold == 0)
2523		return -EINVAL;
2524
2525	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2526		enable_sq_ramping = false;
2527
2528	if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2529		enable_sq_ramping = false;
2530
2531	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2532		enable_sq_ramping = false;
2533
2534	if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2535		enable_sq_ramping = false;
2536
2537	if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2538		enable_sq_ramping = false;
2539
2540	for (i = 0; i < state->performance_level_count; i++) {
2541		sq_power_throttle  = 0;
2542		sq_power_throttle2 = 0;
2543
2544		if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2545		    enable_sq_ramping) {
2546			sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2547			sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2548			sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2549			sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2550			sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2551		} else {
2552			sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2553			sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2554		}
2555
2556		smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2557		smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2558	}
2559
2560	return 0;
2561}
2562
2563static int ni_enable_power_containment(struct radeon_device *rdev,
2564				       struct radeon_ps *radeon_new_state,
2565				       bool enable)
2566{
2567        struct ni_power_info *ni_pi = ni_get_pi(rdev);
2568	PPSMC_Result smc_result;
2569	int ret = 0;
2570
2571	if (ni_pi->enable_power_containment) {
2572		if (enable) {
2573			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2574				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2575				if (smc_result != PPSMC_Result_OK) {
2576					ret = -EINVAL;
2577					ni_pi->pc_enabled = false;
2578				} else {
2579					ni_pi->pc_enabled = true;
2580				}
2581			}
2582		} else {
2583			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2584			if (smc_result != PPSMC_Result_OK)
2585				ret = -EINVAL;
2586			ni_pi->pc_enabled = false;
2587		}
2588	}
2589
2590	return ret;
2591}
2592
2593static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2594					 struct radeon_ps *radeon_state,
2595					 NISLANDS_SMC_SWSTATE *smc_state)
2596{
2597        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2598	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2599	struct ni_ps *state = ni_get_ps(radeon_state);
2600	int i, ret;
2601	u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2602
2603	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2604		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2605
2606	smc_state->levelCount = 0;
2607
2608	if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2609		return -EINVAL;
2610
2611	for (i = 0; i < state->performance_level_count; i++) {
2612		ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2613						    &smc_state->levels[i]);
2614		smc_state->levels[i].arbRefreshState =
2615			(u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2616
2617		if (ret)
2618			return ret;
2619
2620		if (ni_pi->enable_power_containment)
2621			smc_state->levels[i].displayWatermark =
2622				(state->performance_levels[i].sclk < threshold) ?
2623				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2624		else
2625			smc_state->levels[i].displayWatermark = (i < 2) ?
2626				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2627
2628		if (eg_pi->dynamic_ac_timing)
2629			smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2630		else
2631			smc_state->levels[i].ACIndex = 0;
2632
2633		smc_state->levelCount++;
2634	}
2635
2636	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2637				      cpu_to_be32(threshold / 512));
2638
2639	ni_populate_smc_sp(rdev, radeon_state, smc_state);
2640
2641	ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2642	if (ret)
2643		ni_pi->enable_power_containment = false;
2644
2645	ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2646	if (ret)
2647		ni_pi->enable_sq_ramping = false;
2648
2649	return ni_populate_smc_t(rdev, radeon_state, smc_state);
2650}
2651
2652static int ni_upload_sw_state(struct radeon_device *rdev,
2653			      struct radeon_ps *radeon_new_state)
2654{
2655	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2656	u16 address = pi->state_table_start +
2657		offsetof(NISLANDS_SMC_STATETABLE, driverState);
2658	u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2659		((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2660	int ret;
2661	NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2662
2663	if (smc_state == NULL)
2664		return -ENOMEM;
2665
2666	ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2667	if (ret)
2668		goto done;
2669
2670	ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2671
2672done:
2673	kfree(smc_state);
2674
2675	return ret;
2676}
2677
2678static int ni_set_mc_special_registers(struct radeon_device *rdev,
2679				       struct ni_mc_reg_table *table)
2680{
2681	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2682	u8 i, j, k;
2683	u32 temp_reg;
2684
2685	for (i = 0, j = table->last; i < table->last; i++) {
2686		switch (table->mc_reg_address[i].s1) {
2687		case MC_SEQ_MISC1 >> 2:
2688			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2689				return -EINVAL;
2690			temp_reg = RREG32(MC_PMG_CMD_EMRS);
2691			table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2692			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2693			for (k = 0; k < table->num_entries; k++)
2694				table->mc_reg_table_entry[k].mc_data[j] =
2695					((temp_reg & 0xffff0000)) |
2696					((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2697			j++;
2698			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2699				return -EINVAL;
2700
2701			temp_reg = RREG32(MC_PMG_CMD_MRS);
2702			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2703			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2704			for(k = 0; k < table->num_entries; k++) {
2705				table->mc_reg_table_entry[k].mc_data[j] =
2706					(temp_reg & 0xffff0000) |
2707					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2708				if (!pi->mem_gddr5)
2709					table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2710			}
2711			j++;
2712			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2713				return -EINVAL;
2714			break;
2715		case MC_SEQ_RESERVE_M >> 2:
2716			temp_reg = RREG32(MC_PMG_CMD_MRS1);
2717			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2718			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2719			for (k = 0; k < table->num_entries; k++)
2720				table->mc_reg_table_entry[k].mc_data[j] =
2721					(temp_reg & 0xffff0000) |
2722					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2723			j++;
2724			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2725				return -EINVAL;
2726			break;
2727		default:
2728			break;
2729		}
2730	}
2731
2732	table->last = j;
2733
2734	return 0;
2735}
2736
2737static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2738{
2739	bool result = true;
2740
2741	switch (in_reg) {
2742        case  MC_SEQ_RAS_TIMING >> 2:
2743		*out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2744		break;
2745        case MC_SEQ_CAS_TIMING >> 2:
2746		*out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2747		break;
2748        case MC_SEQ_MISC_TIMING >> 2:
2749		*out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2750		break;
2751        case MC_SEQ_MISC_TIMING2 >> 2:
2752		*out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2753		break;
2754        case MC_SEQ_RD_CTL_D0 >> 2:
2755		*out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2756		break;
2757        case MC_SEQ_RD_CTL_D1 >> 2:
2758		*out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2759		break;
2760        case MC_SEQ_WR_CTL_D0 >> 2:
2761		*out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2762		break;
2763        case MC_SEQ_WR_CTL_D1 >> 2:
2764		*out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2765		break;
2766        case MC_PMG_CMD_EMRS >> 2:
2767		*out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2768		break;
2769        case MC_PMG_CMD_MRS >> 2:
2770		*out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2771		break;
2772        case MC_PMG_CMD_MRS1 >> 2:
2773		*out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2774		break;
2775        case MC_SEQ_PMG_TIMING >> 2:
2776		*out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2777		break;
2778        case MC_PMG_CMD_MRS2 >> 2:
2779		*out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2780		break;
2781        default:
2782		result = false;
2783		break;
2784	}
2785
2786	return result;
2787}
2788
2789static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2790{
2791	u8 i, j;
2792
2793	for (i = 0; i < table->last; i++) {
2794		for (j = 1; j < table->num_entries; j++) {
2795			if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2796				table->valid_flag |= 1 << i;
2797				break;
2798			}
2799		}
2800	}
2801}
2802
2803static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2804{
2805	u32 i;
2806	u16 address;
2807
2808	for (i = 0; i < table->last; i++)
2809		table->mc_reg_address[i].s0 =
2810			ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2811			address : table->mc_reg_address[i].s1;
2812}
2813
2814static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2815				      struct ni_mc_reg_table *ni_table)
2816{
2817	u8 i, j;
2818
2819	if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2820		return -EINVAL;
2821	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2822		return -EINVAL;
2823
2824	for (i = 0; i < table->last; i++)
2825		ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2826	ni_table->last = table->last;
2827
2828	for (i = 0; i < table->num_entries; i++) {
2829		ni_table->mc_reg_table_entry[i].mclk_max =
2830			table->mc_reg_table_entry[i].mclk_max;
2831		for (j = 0; j < table->last; j++)
2832			ni_table->mc_reg_table_entry[i].mc_data[j] =
2833				table->mc_reg_table_entry[i].mc_data[j];
2834	}
2835	ni_table->num_entries = table->num_entries;
2836
2837	return 0;
2838}
2839
2840static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2841{
2842	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2843	int ret;
2844	struct atom_mc_reg_table *table;
2845	struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2846	u8 module_index = rv770_get_memory_module_index(rdev);
2847
2848        table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2849        if (!table)
2850                return -ENOMEM;
2851
2852	WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2853	WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2854	WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2855	WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2856	WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2857	WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2858	WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2859	WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2860	WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2861	WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2862	WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2863	WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2864	WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2865
2866	ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2867
2868        if (ret)
2869                goto init_mc_done;
2870
2871	ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2872
2873        if (ret)
2874                goto init_mc_done;
2875
2876	ni_set_s0_mc_reg_index(ni_table);
2877
2878	ret = ni_set_mc_special_registers(rdev, ni_table);
2879
2880        if (ret)
2881                goto init_mc_done;
2882
2883	ni_set_valid_flag(ni_table);
2884
2885init_mc_done:
2886        kfree(table);
2887
2888	return ret;
2889}
2890
2891static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2892					 SMC_NIslands_MCRegisters *mc_reg_table)
2893{
2894	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2895	u32 i, j;
2896
2897	for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2898		if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2899			if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2900				break;
2901			mc_reg_table->address[i].s0 =
2902				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2903			mc_reg_table->address[i].s1 =
2904				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2905			i++;
2906		}
2907	}
2908	mc_reg_table->last = (u8)i;
2909}
2910
2911
2912static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2913				    SMC_NIslands_MCRegisterSet *data,
2914				    u32 num_entries, u32 valid_flag)
2915{
2916	u32 i, j;
2917
2918	for (i = 0, j = 0; j < num_entries; j++) {
2919		if (valid_flag & (1 << j)) {
2920			data->value[i] = cpu_to_be32(entry->mc_data[j]);
2921			i++;
2922		}
2923	}
2924}
2925
2926static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2927						 struct rv7xx_pl *pl,
2928						 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2929{
2930	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2931	u32 i = 0;
2932
2933	for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2934		if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2935			break;
2936	}
2937
2938	if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2939		--i;
2940
2941	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2942				mc_reg_table_data,
2943				ni_pi->mc_reg_table.last,
2944				ni_pi->mc_reg_table.valid_flag);
2945}
2946
2947static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2948					   struct radeon_ps *radeon_state,
2949					   SMC_NIslands_MCRegisters *mc_reg_table)
2950{
2951	struct ni_ps *state = ni_get_ps(radeon_state);
2952	int i;
2953
2954	for (i = 0; i < state->performance_level_count; i++) {
2955		ni_convert_mc_reg_table_entry_to_smc(rdev,
2956						     &state->performance_levels[i],
2957						     &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2958	}
2959}
2960
2961static int ni_populate_mc_reg_table(struct radeon_device *rdev,
2962				    struct radeon_ps *radeon_boot_state)
2963{
2964	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2965	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2966        struct ni_power_info *ni_pi = ni_get_pi(rdev);
2967	struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
2968	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
2969
2970	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
2971
2972	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
2973
2974	ni_populate_mc_reg_addresses(rdev, mc_reg_table);
2975
2976	ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
2977					     &mc_reg_table->data[0]);
2978
2979	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
2980				&mc_reg_table->data[1],
2981				ni_pi->mc_reg_table.last,
2982				ni_pi->mc_reg_table.valid_flag);
2983
2984	ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
2985
2986	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
2987				       (u8 *)mc_reg_table,
2988				       sizeof(SMC_NIslands_MCRegisters),
2989				       pi->sram_end);
2990}
2991
2992static int ni_upload_mc_reg_table(struct radeon_device *rdev,
2993				  struct radeon_ps *radeon_new_state)
2994{
2995	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2996	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2997        struct ni_power_info *ni_pi = ni_get_pi(rdev);
2998	struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
2999	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3000	u16 address;
3001
3002	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3003
3004	ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3005
3006	address = eg_pi->mc_reg_table_start +
3007		(u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3008
3009	return rv770_copy_bytes_to_smc(rdev, address,
3010				       (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3011				       sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3012				       pi->sram_end);
3013}
3014
3015static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3016						   PP_NIslands_CACTABLES *cac_tables)
3017{
3018	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3019	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3020	u32 leakage = 0;
3021	unsigned int i, j, table_size;
3022	s32 t;
3023	u32 smc_leakage, max_leakage = 0;
3024	u32 scaling_factor;
3025
3026	table_size = eg_pi->vddc_voltage_table.count;
3027
3028	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3029		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3030
3031	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3032
3033	for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3034		for (j = 0; j < table_size; j++) {
3035			t = (1000 * ((i + 1) * 8));
3036
3037			if (t < ni_pi->cac_data.leakage_minimum_temperature)
3038				t = ni_pi->cac_data.leakage_minimum_temperature;
3039
3040			ni_calculate_leakage_for_v_and_t(rdev,
3041							 &ni_pi->cac_data.leakage_coefficients,
3042							 eg_pi->vddc_voltage_table.entries[j].value,
3043							 t,
3044							 ni_pi->cac_data.i_leakage,
3045							 &leakage);
3046
3047			smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3048			if (smc_leakage > max_leakage)
3049				max_leakage = smc_leakage;
3050
3051			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3052		}
3053	}
3054
3055	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3056		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3057			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3058	}
3059	return 0;
3060}
3061
3062static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3063					    PP_NIslands_CACTABLES *cac_tables)
3064{
3065	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3066	struct radeon_cac_leakage_table *leakage_table =
3067		&rdev->pm.dpm.dyn_state.cac_leakage_table;
3068	u32 i, j, table_size;
3069	u32 smc_leakage, max_leakage = 0;
3070	u32 scaling_factor;
3071
3072	if (!leakage_table)
3073		return -EINVAL;
3074
3075	table_size = leakage_table->count;
3076
3077	if (eg_pi->vddc_voltage_table.count != table_size)
3078		table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3079			eg_pi->vddc_voltage_table.count : leakage_table->count;
3080
3081	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3082		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3083
3084	if (table_size == 0)
3085		return -EINVAL;
3086
3087	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3088
3089	for (j = 0; j < table_size; j++) {
3090		smc_leakage = leakage_table->entries[j].leakage;
3091
3092		if (smc_leakage > max_leakage)
3093			max_leakage = smc_leakage;
3094
3095		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3096			cac_tables->cac_lkge_lut[i][j] =
3097				cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3098	}
3099
3100	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3101		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3102			cac_tables->cac_lkge_lut[i][j] =
3103				cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3104	}
3105	return 0;
3106}
3107
3108static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3109{
3110	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3111	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3112	PP_NIslands_CACTABLES *cac_tables = NULL;
3113	int i, ret;
3114        u32 reg;
3115
3116	if (ni_pi->enable_cac == false)
3117		return 0;
3118
3119	cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3120	if (!cac_tables)
3121		return -ENOMEM;
3122
3123	reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3124	reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3125		TID_UNIT(ni_pi->cac_weights->tid_unit));
3126	WREG32(CG_CAC_CTRL, reg);
3127
3128	for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3129		ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3130
3131	for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3132		cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3133
3134	ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3135	ni_pi->cac_data.pwr_const = 0;
3136	ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3137	ni_pi->cac_data.bif_cac_value = 0;
3138	ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3139	ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3140	ni_pi->cac_data.allow_ovrflw = 0;
3141	ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3142	ni_pi->cac_data.num_win_tdp = 0;
3143	ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3144
3145	if (ni_pi->driver_calculate_cac_leakage)
3146		ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3147	else
3148		ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3149
3150	if (ret)
3151		goto done_free;
3152
3153	cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3154	cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3155	cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3156	cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3157	cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3158	cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3159	cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3160	cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3161	cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3162
3163	ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3164				      sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3165
3166done_free:
3167	if (ret) {
3168		ni_pi->enable_cac = false;
3169		ni_pi->enable_power_containment = false;
3170	}
3171
3172	kfree(cac_tables);
3173
3174	return 0;
3175}
3176
3177static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3178{
3179	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3180	u32 reg;
3181
3182	if (!ni_pi->enable_cac ||
3183	    !ni_pi->cac_configuration_required)
3184		return 0;
3185
3186	if (ni_pi->cac_weights == NULL)
3187		return -EINVAL;
3188
3189	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3190						      WEIGHT_TCP_SIG1_MASK |
3191						      WEIGHT_TA_SIG_MASK);
3192	reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3193		WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3194		WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3195	WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3196
3197	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3198						      WEIGHT_TCC_EN1_MASK |
3199						      WEIGHT_TCC_EN2_MASK);
3200	reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3201		WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3202		WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3203	WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3204
3205	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3206						      WEIGHT_CB_EN1_MASK |
3207						      WEIGHT_CB_EN2_MASK |
3208						      WEIGHT_CB_EN3_MASK);
3209	reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3210		WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3211		WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3212		WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3213	WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3214
3215	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3216						      WEIGHT_DB_SIG1_MASK |
3217						      WEIGHT_DB_SIG2_MASK |
3218						      WEIGHT_DB_SIG3_MASK);
3219	reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3220		WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3221		WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3222		WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3223	WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3224
3225	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3226						      WEIGHT_SXM_SIG1_MASK |
3227						      WEIGHT_SXM_SIG2_MASK |
3228						      WEIGHT_SXS_SIG0_MASK |
3229						      WEIGHT_SXS_SIG1_MASK);
3230	reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3231		WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3232		WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3233		WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3234		WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3235	WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3236
3237	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3238						      WEIGHT_XBR_1_MASK |
3239						      WEIGHT_XBR_2_MASK |
3240						      WEIGHT_SPI_SIG0_MASK);
3241	reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3242		WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3243		WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3244		WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3245	WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3246
3247	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3248						      WEIGHT_SPI_SIG2_MASK |
3249						      WEIGHT_SPI_SIG3_MASK |
3250						      WEIGHT_SPI_SIG4_MASK |
3251						      WEIGHT_SPI_SIG5_MASK);
3252	reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3253		WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3254		WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3255		WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3256		WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3257	WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3258
3259	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3260						      WEIGHT_LDS_SIG1_MASK |
3261						      WEIGHT_SC_MASK);
3262	reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3263		WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3264		WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3265	WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3266
3267	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3268						      WEIGHT_CP_MASK |
3269						      WEIGHT_PA_SIG0_MASK |
3270						      WEIGHT_PA_SIG1_MASK |
3271						      WEIGHT_VGT_SIG0_MASK);
3272	reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3273		WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3274		WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3275		WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3276		WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3277	WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3278
3279	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3280						      WEIGHT_VGT_SIG2_MASK |
3281						      WEIGHT_DC_SIG0_MASK |
3282						      WEIGHT_DC_SIG1_MASK |
3283						      WEIGHT_DC_SIG2_MASK);
3284	reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3285		WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3286		WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3287		WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3288		WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3289	WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3290
3291	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3292						      WEIGHT_UVD_SIG0_MASK |
3293						      WEIGHT_UVD_SIG1_MASK |
3294						      WEIGHT_SPARE0_MASK |
3295						      WEIGHT_SPARE1_MASK);
3296	reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3297		WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3298		WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3299		WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3300		WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3301	WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3302
3303	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3304						      WEIGHT_SQ_VSP0_MASK);
3305	reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3306		WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3307	WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3308
3309	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3310	reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3311	WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3312
3313	reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3314							OVR_VAL_SPARE_0_MASK |
3315							OVR_MODE_SPARE_1_MASK |
3316							OVR_VAL_SPARE_1_MASK);
3317	reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3318		OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3319		OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3320		OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3321	WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3322
3323	reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3324					   VSP0_MASK |
3325					   GPR_MASK);
3326	reg |= (VSP(ni_pi->cac_weights->vsp) |
3327		VSP0(ni_pi->cac_weights->vsp0) |
3328		GPR(ni_pi->cac_weights->gpr));
3329	WREG32(SQ_CAC_THRESHOLD, reg);
3330
3331	reg = (MCDW_WR_ENABLE |
3332	       MCDX_WR_ENABLE |
3333	       MCDY_WR_ENABLE |
3334	       MCDZ_WR_ENABLE |
3335	       INDEX(0x09D4));
3336	WREG32(MC_CG_CONFIG, reg);
3337
3338	reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3339	       WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3340	       ALLOW_OVERFLOW);
3341	WREG32(MC_CG_DATAPORT, reg);
3342
3343	return 0;
3344}
3345
3346static int ni_enable_smc_cac(struct radeon_device *rdev,
3347			     struct radeon_ps *radeon_new_state,
3348			     bool enable)
3349{
3350	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3351	int ret = 0;
3352	PPSMC_Result smc_result;
3353
3354	if (ni_pi->enable_cac) {
3355		if (enable) {
3356			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3357				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3358
3359				if (ni_pi->support_cac_long_term_average) {
3360					smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3361					if (PPSMC_Result_OK != smc_result)
3362						ni_pi->support_cac_long_term_average = false;
3363				}
3364
3365				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3366				if (PPSMC_Result_OK != smc_result)
3367					ret = -EINVAL;
3368
3369				ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3370			}
3371		} else if (ni_pi->cac_enabled) {
3372			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3373
3374			ni_pi->cac_enabled = false;
3375
3376			if (ni_pi->support_cac_long_term_average) {
3377				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3378				if (PPSMC_Result_OK != smc_result)
3379					ni_pi->support_cac_long_term_average = false;
3380			}
3381		}
3382	}
3383
3384	return ret;
3385}
3386
3387static int ni_pcie_performance_request(struct radeon_device *rdev,
3388				       u8 perf_req, bool advertise)
3389{
3390	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3391
3392#if defined(CONFIG_ACPI)
3393	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3394            (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3395		if (eg_pi->pcie_performance_request_registered == false)
3396			radeon_acpi_pcie_notify_device_ready(rdev);
3397		eg_pi->pcie_performance_request_registered = true;
3398		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3399	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3400                   eg_pi->pcie_performance_request_registered) {
3401		eg_pi->pcie_performance_request_registered = false;
3402		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3403	}
3404#endif
3405	return 0;
3406}
3407
3408static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3409{
3410	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3411	u32 tmp;
3412
3413        tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3414
3415        if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3416            (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3417                pi->pcie_gen2 = true;
3418        else
3419		pi->pcie_gen2 = false;
3420
3421	if (!pi->pcie_gen2)
3422		ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3423
3424	return 0;
3425}
3426
3427static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3428					    bool enable)
3429{
3430        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3431        u32 tmp, bif;
3432
3433	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3434
3435	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3436	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3437		if (enable) {
3438			if (!pi->boot_in_gen2) {
3439				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3440				bif |= CG_CLIENT_REQ(0xd);
3441				WREG32(CG_BIF_REQ_AND_RSP, bif);
3442			}
3443			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3444			tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3445			tmp |= LC_GEN2_EN_STRAP;
3446
3447			tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3448			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3449			udelay(10);
3450			tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3451			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3452		} else {
3453			if (!pi->boot_in_gen2) {
3454				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3455				bif |= CG_CLIENT_REQ(0xd);
3456				WREG32(CG_BIF_REQ_AND_RSP, bif);
3457
3458				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3459				tmp &= ~LC_GEN2_EN_STRAP;
3460			}
3461			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3462		}
3463	}
3464}
3465
3466static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3467					bool enable)
3468{
3469	ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3470
3471	if (enable)
3472		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3473	else
3474                WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3475}
3476
3477void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3478					   struct radeon_ps *new_ps,
3479					   struct radeon_ps *old_ps)
3480{
3481	struct ni_ps *new_state = ni_get_ps(new_ps);
3482	struct ni_ps *current_state = ni_get_ps(old_ps);
3483
3484	if ((new_ps->vclk == old_ps->vclk) &&
3485	    (new_ps->dclk == old_ps->dclk))
3486		return;
3487
3488	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3489	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3490		return;
3491
3492	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3493}
3494
3495void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3496					  struct radeon_ps *new_ps,
3497					  struct radeon_ps *old_ps)
3498{
3499	struct ni_ps *new_state = ni_get_ps(new_ps);
3500	struct ni_ps *current_state = ni_get_ps(old_ps);
3501
3502	if ((new_ps->vclk == old_ps->vclk) &&
3503	    (new_ps->dclk == old_ps->dclk))
3504		return;
3505
3506	if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3507	    current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3508		return;
3509
3510	radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3511}
3512
3513void ni_dpm_setup_asic(struct radeon_device *rdev)
3514{
3515	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3516
3517	ni_read_clock_registers(rdev);
3518	btc_read_arb_registers(rdev);
3519	rv770_get_memory_type(rdev);
3520	if (eg_pi->pcie_performance_request)
3521		ni_advertise_gen2_capability(rdev);
3522	rv770_get_pcie_gen2_status(rdev);
3523	rv770_enable_acpi_pm(rdev);
3524}
3525
3526void ni_update_current_ps(struct radeon_device *rdev,
3527			  struct radeon_ps *rps)
3528{
3529	struct ni_ps *new_ps = ni_get_ps(rps);
3530	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3531        struct ni_power_info *ni_pi = ni_get_pi(rdev);
3532
3533	eg_pi->current_rps = *rps;
3534	ni_pi->current_ps = *new_ps;
3535	eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3536}
3537
3538void ni_update_requested_ps(struct radeon_device *rdev,
3539			    struct radeon_ps *rps)
3540{
3541	struct ni_ps *new_ps = ni_get_ps(rps);
3542	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3543        struct ni_power_info *ni_pi = ni_get_pi(rdev);
3544
3545	eg_pi->requested_rps = *rps;
3546	ni_pi->requested_ps = *new_ps;
3547	eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3548}
3549
3550int ni_dpm_enable(struct radeon_device *rdev)
3551{
3552	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3553	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3554	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3555	int ret;
3556
3557	if (pi->gfx_clock_gating)
3558		ni_cg_clockgating_default(rdev);
3559        if (btc_dpm_enabled(rdev))
3560                return -EINVAL;
3561	if (pi->mg_clock_gating)
3562		ni_mg_clockgating_default(rdev);
3563	if (eg_pi->ls_clock_gating)
3564		ni_ls_clockgating_default(rdev);
3565	if (pi->voltage_control) {
3566		rv770_enable_voltage_control(rdev, true);
3567		ret = cypress_construct_voltage_tables(rdev);
3568		if (ret) {
3569			DRM_ERROR("cypress_construct_voltage_tables failed\n");
3570			return ret;
3571		}
3572	}
3573	if (eg_pi->dynamic_ac_timing) {
3574		ret = ni_initialize_mc_reg_table(rdev);
3575		if (ret)
3576			eg_pi->dynamic_ac_timing = false;
3577	}
3578	if (pi->dynamic_ss)
3579		cypress_enable_spread_spectrum(rdev, true);
3580	if (pi->thermal_protection)
3581		rv770_enable_thermal_protection(rdev, true);
3582	rv770_setup_bsp(rdev);
3583	rv770_program_git(rdev);
3584	rv770_program_tp(rdev);
3585	rv770_program_tpp(rdev);
3586	rv770_program_sstp(rdev);
3587	cypress_enable_display_gap(rdev);
3588	rv770_program_vc(rdev);
3589	if (pi->dynamic_pcie_gen2)
3590		ni_enable_dynamic_pcie_gen2(rdev, true);
3591	ret = rv770_upload_firmware(rdev);
3592	if (ret) {
3593		DRM_ERROR("rv770_upload_firmware failed\n");
3594		return ret;
3595	}
3596	ret = ni_process_firmware_header(rdev);
3597	if (ret) {
3598		DRM_ERROR("ni_process_firmware_header failed\n");
3599		return ret;
3600	}
3601	ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3602	if (ret) {
3603		DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3604		return ret;
3605	}
3606	ret = ni_init_smc_table(rdev);
3607	if (ret) {
3608		DRM_ERROR("ni_init_smc_table failed\n");
3609		return ret;
3610	}
3611	ret = ni_init_smc_spll_table(rdev);
3612	if (ret) {
3613		DRM_ERROR("ni_init_smc_spll_table failed\n");
3614		return ret;
3615	}
3616	ret = ni_init_arb_table_index(rdev);
3617	if (ret) {
3618		DRM_ERROR("ni_init_arb_table_index failed\n");
3619		return ret;
3620	}
3621	if (eg_pi->dynamic_ac_timing) {
3622		ret = ni_populate_mc_reg_table(rdev, boot_ps);
3623		if (ret) {
3624			DRM_ERROR("ni_populate_mc_reg_table failed\n");
3625			return ret;
3626		}
3627	}
3628	ret = ni_initialize_smc_cac_tables(rdev);
3629	if (ret) {
3630		DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3631		return ret;
3632	}
3633	ret = ni_initialize_hardware_cac_manager(rdev);
3634	if (ret) {
3635		DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3636		return ret;
3637	}
3638	ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3639	if (ret) {
3640		DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3641		return ret;
3642	}
3643	ni_program_response_times(rdev);
3644	r7xx_start_smc(rdev);
3645	ret = cypress_notify_smc_display_change(rdev, false);
3646	if (ret) {
3647		DRM_ERROR("cypress_notify_smc_display_change failed\n");
3648		return ret;
3649	}
3650	cypress_enable_sclk_control(rdev, true);
3651	if (eg_pi->memory_transition)
3652		cypress_enable_mclk_control(rdev, true);
3653	cypress_start_dpm(rdev);
3654	if (pi->gfx_clock_gating)
3655		ni_gfx_clockgating_enable(rdev, true);
3656	if (pi->mg_clock_gating)
3657		ni_mg_clockgating_enable(rdev, true);
3658	if (eg_pi->ls_clock_gating)
3659		ni_ls_clockgating_enable(rdev, true);
3660
3661	if (rdev->irq.installed &&
3662	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3663		PPSMC_Result result;
3664
3665		ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, 0xff * 1000);
3666		if (ret)
3667			return ret;
3668		rdev->irq.dpm_thermal = true;
3669		radeon_irq_set(rdev);
3670		result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3671
3672		if (result != PPSMC_Result_OK)
3673			DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
3674	}
3675
3676	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3677
3678	ni_update_current_ps(rdev, boot_ps);
3679
3680	return 0;
3681}
3682
3683void ni_dpm_disable(struct radeon_device *rdev)
3684{
3685	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3686	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3687	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3688
3689	if (!btc_dpm_enabled(rdev))
3690		return;
3691	rv770_clear_vc(rdev);
3692	if (pi->thermal_protection)
3693		rv770_enable_thermal_protection(rdev, false);
3694	ni_enable_power_containment(rdev, boot_ps, false);
3695	ni_enable_smc_cac(rdev, boot_ps, false);
3696	cypress_enable_spread_spectrum(rdev, false);
3697	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3698	if (pi->dynamic_pcie_gen2)
3699		ni_enable_dynamic_pcie_gen2(rdev, false);
3700
3701	if (rdev->irq.installed &&
3702	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3703		rdev->irq.dpm_thermal = false;
3704		radeon_irq_set(rdev);
3705	}
3706
3707	if (pi->gfx_clock_gating)
3708		ni_gfx_clockgating_enable(rdev, false);
3709	if (pi->mg_clock_gating)
3710		ni_mg_clockgating_enable(rdev, false);
3711	if (eg_pi->ls_clock_gating)
3712		ni_ls_clockgating_enable(rdev, false);
3713	ni_stop_dpm(rdev);
3714	btc_reset_to_default(rdev);
3715	ni_stop_smc(rdev);
3716	ni_force_switch_to_arb_f0(rdev);
3717
3718	ni_update_current_ps(rdev, boot_ps);
3719}
3720
3721static int ni_power_control_set_level(struct radeon_device *rdev)
3722{
3723	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3724	int ret;
3725
3726	ret = ni_restrict_performance_levels_before_switch(rdev);
3727	if (ret)
3728		return ret;
3729	ret = rv770_halt_smc(rdev);
3730	if (ret)
3731		return ret;
3732	ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3733	if (ret)
3734		return ret;
3735	ret = rv770_resume_smc(rdev);
3736	if (ret)
3737		return ret;
3738	ret = rv770_set_sw_state(rdev);
3739	if (ret)
3740		return ret;
3741
3742	return 0;
3743}
3744
3745int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3746{
3747	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3748	struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3749	struct radeon_ps *new_ps = &requested_ps;
3750
3751	ni_update_requested_ps(rdev, new_ps);
3752
3753	ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3754
3755	return 0;
3756}
3757
3758int ni_dpm_set_power_state(struct radeon_device *rdev)
3759{
3760	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3761	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3762	struct radeon_ps *old_ps = &eg_pi->current_rps;
3763	int ret;
3764
3765	ret = ni_restrict_performance_levels_before_switch(rdev);
3766	if (ret) {
3767		DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3768		return ret;
3769	}
3770	ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3771	ret = ni_enable_power_containment(rdev, new_ps, false);
3772	if (ret) {
3773		DRM_ERROR("ni_enable_power_containment failed\n");
3774		return ret;
3775	}
3776	ret = ni_enable_smc_cac(rdev, new_ps, false);
3777	if (ret) {
3778		DRM_ERROR("ni_enable_smc_cac failed\n");
3779		return ret;
3780	}
3781	ret = rv770_halt_smc(rdev);
3782	if (ret) {
3783		DRM_ERROR("rv770_halt_smc failed\n");
3784		return ret;
3785	}
3786	if (eg_pi->smu_uvd_hs)
3787		btc_notify_uvd_to_smc(rdev, new_ps);
3788	ret = ni_upload_sw_state(rdev, new_ps);
3789	if (ret) {
3790		DRM_ERROR("ni_upload_sw_state failed\n");
3791		return ret;
3792	}
3793	if (eg_pi->dynamic_ac_timing) {
3794		ret = ni_upload_mc_reg_table(rdev, new_ps);
3795		if (ret) {
3796			DRM_ERROR("ni_upload_mc_reg_table failed\n");
3797			return ret;
3798		}
3799	}
3800	ret = ni_program_memory_timing_parameters(rdev, new_ps);
3801	if (ret) {
3802		DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3803		return ret;
3804	}
3805	ret = rv770_resume_smc(rdev);
3806	if (ret) {
3807		DRM_ERROR("rv770_resume_smc failed\n");
3808		return ret;
3809	}
3810	ret = rv770_set_sw_state(rdev);
3811	if (ret) {
3812		DRM_ERROR("rv770_set_sw_state failed\n");
3813		return ret;
3814	}
3815	ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3816	ret = ni_enable_smc_cac(rdev, new_ps, true);
3817	if (ret) {
3818		DRM_ERROR("ni_enable_smc_cac failed\n");
3819		return ret;
3820	}
3821	ret = ni_enable_power_containment(rdev, new_ps, true);
3822	if (ret) {
3823		DRM_ERROR("ni_enable_power_containment failed\n");
3824		return ret;
3825	}
3826
3827	/* update tdp */
3828	ret = ni_power_control_set_level(rdev);
3829	if (ret) {
3830		DRM_ERROR("ni_power_control_set_level failed\n");
3831		return ret;
3832	}
3833
3834	ret = ni_unrestrict_performance_levels_after_switch(rdev);
3835	if (ret) {
3836		DRM_ERROR("ni_unrestrict_performance_levels_after_switch failed\n");
3837		return ret;
3838	}
3839
3840	return 0;
3841}
3842
3843void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3844{
3845	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3846	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3847
3848	ni_update_current_ps(rdev, new_ps);
3849}
3850
3851void ni_dpm_reset_asic(struct radeon_device *rdev)
3852{
3853	ni_restrict_performance_levels_before_switch(rdev);
3854	rv770_set_boot_state(rdev);
3855}
3856
3857union power_info {
3858	struct _ATOM_POWERPLAY_INFO info;
3859	struct _ATOM_POWERPLAY_INFO_V2 info_2;
3860	struct _ATOM_POWERPLAY_INFO_V3 info_3;
3861	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3862	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3863	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3864};
3865
3866union pplib_clock_info {
3867	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3868	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3869	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3870	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3871};
3872
3873union pplib_power_state {
3874	struct _ATOM_PPLIB_STATE v1;
3875	struct _ATOM_PPLIB_STATE_V2 v2;
3876};
3877
3878static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3879					  struct radeon_ps *rps,
3880					  struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3881					  u8 table_rev)
3882{
3883	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3884	rps->class = le16_to_cpu(non_clock_info->usClassification);
3885	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3886
3887	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3888		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3889		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3890	} else if (r600_is_uvd_state(rps->class, rps->class2)) {
3891		rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3892		rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3893	} else {
3894		rps->vclk = 0;
3895		rps->dclk = 0;
3896	}
3897
3898	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3899		rdev->pm.dpm.boot_ps = rps;
3900	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3901		rdev->pm.dpm.uvd_ps = rps;
3902}
3903
3904static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3905				      struct radeon_ps *rps, int index,
3906				      union pplib_clock_info *clock_info)
3907{
3908	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3909	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3910	struct ni_ps *ps = ni_get_ps(rps);
3911	u16 vddc;
3912	struct rv7xx_pl *pl = &ps->performance_levels[index];
3913
3914	ps->performance_level_count = index + 1;
3915
3916	pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3917	pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3918	pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3919	pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3920
3921	pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3922	pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3923	pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3924
3925	/* patch up vddc if necessary */
3926	if (pl->vddc == 0xff01) {
3927		if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
3928			pl->vddc = vddc;
3929	}
3930
3931	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3932		pi->acpi_vddc = pl->vddc;
3933		eg_pi->acpi_vddci = pl->vddci;
3934		if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3935			pi->acpi_pcie_gen2 = true;
3936		else
3937			pi->acpi_pcie_gen2 = false;
3938	}
3939
3940	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3941		eg_pi->ulv.supported = true;
3942		eg_pi->ulv.pl = pl;
3943	}
3944
3945	if (pi->min_vddc_in_table > pl->vddc)
3946		pi->min_vddc_in_table = pl->vddc;
3947
3948	if (pi->max_vddc_in_table < pl->vddc)
3949		pi->max_vddc_in_table = pl->vddc;
3950
3951	/* patch up boot state */
3952	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3953		u16 vddc, vddci, mvdd;
3954		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3955		pl->mclk = rdev->clock.default_mclk;
3956		pl->sclk = rdev->clock.default_sclk;
3957		pl->vddc = vddc;
3958		pl->vddci = vddci;
3959	}
3960
3961	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3962	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
3963		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
3964		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
3965		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
3966		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
3967	}
3968}
3969
3970static int ni_parse_power_table(struct radeon_device *rdev)
3971{
3972	struct radeon_mode_info *mode_info = &rdev->mode_info;
3973	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
3974	union pplib_power_state *power_state;
3975	int i, j;
3976	union pplib_clock_info *clock_info;
3977	union power_info *power_info;
3978	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
3979        u16 data_offset;
3980	u8 frev, crev;
3981	struct ni_ps *ps;
3982
3983	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
3984				   &frev, &crev, &data_offset))
3985		return -EINVAL;
3986	power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
3987
3988	rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
3989				  power_info->pplib.ucNumStates, GFP_KERNEL);
3990	if (!rdev->pm.dpm.ps)
3991		return -ENOMEM;
3992	rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
3993	rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
3994	rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
3995
3996	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
3997		power_state = (union pplib_power_state *)
3998			(mode_info->atom_context->bios + data_offset +
3999			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4000			 i * power_info->pplib.ucStateEntrySize);
4001		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4002			(mode_info->atom_context->bios + data_offset +
4003			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4004			 (power_state->v1.ucNonClockStateIndex *
4005			  power_info->pplib.ucNonClockSize));
4006		if (power_info->pplib.ucStateEntrySize - 1) {
4007			ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4008			if (ps == NULL) {
4009				kfree(rdev->pm.dpm.ps);
4010				return -ENOMEM;
4011			}
4012			rdev->pm.dpm.ps[i].ps_priv = ps;
4013			ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4014							 non_clock_info,
4015							 power_info->pplib.ucNonClockSize);
4016			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4017				clock_info = (union pplib_clock_info *)
4018					(mode_info->atom_context->bios + data_offset +
4019					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4020					 (power_state->v1.ucClockStateIndices[j] *
4021					  power_info->pplib.ucClockInfoSize));
4022				ni_parse_pplib_clock_info(rdev,
4023							  &rdev->pm.dpm.ps[i], j,
4024							  clock_info);
4025			}
4026		}
4027	}
4028	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4029	return 0;
4030}
4031
4032int ni_dpm_init(struct radeon_device *rdev)
4033{
4034	struct rv7xx_power_info *pi;
4035	struct evergreen_power_info *eg_pi;
4036	struct ni_power_info *ni_pi;
4037	int index = GetIndexIntoMasterTable(DATA, ASIC_InternalSS_Info);
4038	u16 data_offset, size;
4039	u8 frev, crev;
4040	struct atom_clock_dividers dividers;
4041	int ret;
4042
4043	ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4044	if (ni_pi == NULL)
4045		return -ENOMEM;
4046	rdev->pm.dpm.priv = ni_pi;
4047	eg_pi = &ni_pi->eg;
4048	pi = &eg_pi->rv7xx;
4049
4050	rv770_get_max_vddc(rdev);
4051
4052	eg_pi->ulv.supported = false;
4053	pi->acpi_vddc = 0;
4054	eg_pi->acpi_vddci = 0;
4055	pi->min_vddc_in_table = 0;
4056	pi->max_vddc_in_table = 0;
4057
4058	ret = ni_parse_power_table(rdev);
4059	if (ret)
4060		return ret;
4061	ret = r600_parse_extended_power_table(rdev);
4062	if (ret)
4063		return ret;
4064
4065	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4066		kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4067	if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4068		r600_free_extended_power_table(rdev);
4069		return -ENOMEM;
4070	}
4071	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4072	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4073	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4074	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4075	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4076	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4077	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4078	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4079	rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4080
4081	ni_patch_dependency_tables_based_on_leakage(rdev);
4082
4083	if (rdev->pm.dpm.voltage_response_time == 0)
4084		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4085	if (rdev->pm.dpm.backbias_response_time == 0)
4086		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4087
4088	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4089					     0, false, &dividers);
4090	if (ret)
4091		pi->ref_div = dividers.ref_div + 1;
4092	else
4093		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4094
4095	pi->rlp = RV770_RLP_DFLT;
4096	pi->rmp = RV770_RMP_DFLT;
4097	pi->lhp = RV770_LHP_DFLT;
4098	pi->lmp = RV770_LMP_DFLT;
4099
4100	eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4101	eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4102	eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4103	eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4104
4105	eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4106	eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4107	eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4108	eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4109
4110	eg_pi->smu_uvd_hs = true;
4111
4112	if (rdev->pdev->device == 0x6707) {
4113		pi->mclk_strobe_mode_threshold = 55000;
4114		pi->mclk_edc_enable_threshold = 55000;
4115		eg_pi->mclk_edc_wr_enable_threshold = 55000;
4116	} else {
4117		pi->mclk_strobe_mode_threshold = 40000;
4118		pi->mclk_edc_enable_threshold = 40000;
4119		eg_pi->mclk_edc_wr_enable_threshold = 40000;
4120	}
4121	ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4122
4123	pi->voltage_control =
4124		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4125
4126	pi->mvdd_control =
4127		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4128
4129	eg_pi->vddci_control =
4130		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4131
4132	if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size,
4133                                   &frev, &crev, &data_offset)) {
4134		pi->sclk_ss = true;
4135		pi->mclk_ss = true;
4136		pi->dynamic_ss = true;
4137	} else {
4138		pi->sclk_ss = false;
4139		pi->mclk_ss = false;
4140		pi->dynamic_ss = true;
4141	}
4142
4143	pi->asi = RV770_ASI_DFLT;
4144	pi->pasi = CYPRESS_HASI_DFLT;
4145	pi->vrc = CYPRESS_VRC_DFLT;
4146
4147	pi->power_gating = false;
4148
4149	pi->gfx_clock_gating = true;
4150
4151	pi->mg_clock_gating = true;
4152	pi->mgcgtssm = true;
4153	eg_pi->ls_clock_gating = false;
4154	eg_pi->sclk_deep_sleep = false;
4155
4156	pi->dynamic_pcie_gen2 = true;
4157
4158	if (pi->gfx_clock_gating &&
4159	    (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE))
4160		pi->thermal_protection = true;
4161	else
4162		pi->thermal_protection = false;
4163
4164	pi->display_gap = true;
4165
4166	pi->dcodt = true;
4167
4168	pi->ulps = true;
4169
4170	eg_pi->dynamic_ac_timing = true;
4171	eg_pi->abm = true;
4172	eg_pi->mcls = true;
4173	eg_pi->light_sleep = true;
4174	eg_pi->memory_transition = true;
4175#if defined(CONFIG_ACPI)
4176	eg_pi->pcie_performance_request =
4177		radeon_acpi_is_pcie_performance_request_supported(rdev);
4178#else
4179	eg_pi->pcie_performance_request = false;
4180#endif
4181
4182	eg_pi->dll_default_on = false;
4183
4184	eg_pi->sclk_deep_sleep = false;
4185
4186	pi->mclk_stutter_mode_threshold = 0;
4187
4188	pi->sram_end = SMC_RAM_END;
4189
4190	rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4191	rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4192	rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4193	rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4194	rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4195	rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4196	rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4197	rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4198
4199	ni_pi->cac_data.leakage_coefficients.at = 516;
4200	ni_pi->cac_data.leakage_coefficients.bt = 18;
4201	ni_pi->cac_data.leakage_coefficients.av = 51;
4202	ni_pi->cac_data.leakage_coefficients.bv = 2957;
4203
4204	switch (rdev->pdev->device) {
4205	case 0x6700:
4206	case 0x6701:
4207	case 0x6702:
4208	case 0x6703:
4209	case 0x6718:
4210		ni_pi->cac_weights = &cac_weights_cayman_xt;
4211		break;
4212	case 0x6705:
4213	case 0x6719:
4214	case 0x671D:
4215	case 0x671C:
4216	default:
4217		ni_pi->cac_weights = &cac_weights_cayman_pro;
4218		break;
4219	case 0x6704:
4220	case 0x6706:
4221	case 0x6707:
4222	case 0x6708:
4223	case 0x6709:
4224		ni_pi->cac_weights = &cac_weights_cayman_le;
4225		break;
4226	}
4227
4228	if (ni_pi->cac_weights->enable_power_containment_by_default) {
4229		ni_pi->enable_power_containment = true;
4230		ni_pi->enable_cac = true;
4231		ni_pi->enable_sq_ramping = true;
4232	} else {
4233		ni_pi->enable_power_containment = false;
4234		ni_pi->enable_cac = false;
4235		ni_pi->enable_sq_ramping = false;
4236	}
4237
4238	ni_pi->driver_calculate_cac_leakage = false;
4239	ni_pi->cac_configuration_required = true;
4240
4241	if (ni_pi->cac_configuration_required) {
4242		ni_pi->support_cac_long_term_average = true;
4243		ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4244		ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4245	} else {
4246		ni_pi->support_cac_long_term_average = false;
4247		ni_pi->lta_window_size = 0;
4248		ni_pi->lts_truncate = 0;
4249	}
4250
4251	ni_pi->use_power_boost_limit = true;
4252
4253	return 0;
4254}
4255
4256void ni_dpm_fini(struct radeon_device *rdev)
4257{
4258	int i;
4259
4260	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4261		kfree(rdev->pm.dpm.ps[i].ps_priv);
4262	}
4263	kfree(rdev->pm.dpm.ps);
4264	kfree(rdev->pm.dpm.priv);
4265	kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4266	r600_free_extended_power_table(rdev);
4267}
4268
4269void ni_dpm_print_power_state(struct radeon_device *rdev,
4270			      struct radeon_ps *rps)
4271{
4272	struct ni_ps *ps = ni_get_ps(rps);
4273	struct rv7xx_pl *pl;
4274	int i;
4275
4276	r600_dpm_print_class_info(rps->class, rps->class2);
4277	r600_dpm_print_cap_info(rps->caps);
4278	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4279	for (i = 0; i < ps->performance_level_count; i++) {
4280		pl = &ps->performance_levels[i];
4281		if (rdev->family >= CHIP_TAHITI)
4282			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4283			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4284		else
4285			printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4286			       i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4287	}
4288	r600_dpm_print_ps_status(rdev, rps);
4289}
4290
4291void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4292						    struct seq_file *m)
4293{
4294	struct radeon_ps *rps = rdev->pm.dpm.current_ps;
4295	struct ni_ps *ps = ni_get_ps(rps);
4296	struct rv7xx_pl *pl;
4297	u32 current_index =
4298		(RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4299		CURRENT_STATE_INDEX_SHIFT;
4300
4301	if (current_index >= ps->performance_level_count) {
4302		seq_printf(m, "invalid dpm profile %d\n", current_index);
4303	} else {
4304		pl = &ps->performance_levels[current_index];
4305		seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4306		seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4307			   current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4308	}
4309}
4310
4311u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4312{
4313	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4314	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4315
4316	if (low)
4317		return requested_state->performance_levels[0].sclk;
4318	else
4319		return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4320}
4321
4322u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4323{
4324	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4325	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4326
4327	if (low)
4328		return requested_state->performance_levels[0].mclk;
4329	else
4330		return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4331}
4332
4333