ni_dpm.c revision fee3d744bf3a0484f2f3ece587cccdffe33f2a15
1/*
2 * Copyright 2012 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 */
23
24#include "drmP.h"
25#include "radeon.h"
26#include "nid.h"
27#include "r600_dpm.h"
28#include "ni_dpm.h"
29#include "atom.h"
30
31#define MC_CG_ARB_FREQ_F0           0x0a
32#define MC_CG_ARB_FREQ_F1           0x0b
33#define MC_CG_ARB_FREQ_F2           0x0c
34#define MC_CG_ARB_FREQ_F3           0x0d
35
36#define SMC_RAM_END 0xC000
37
38static const struct ni_cac_weights cac_weights_cayman_xt =
39{
40	0x15,
41	0x2,
42	0x19,
43	0x2,
44	0x8,
45	0x14,
46	0x2,
47	0x16,
48	0xE,
49	0x17,
50	0x13,
51	0x2B,
52	0x10,
53	0x7,
54	0x5,
55	0x5,
56	0x5,
57	0x2,
58	0x3,
59	0x9,
60	0x10,
61	0x10,
62	0x2B,
63	0xA,
64	0x9,
65	0x4,
66	0xD,
67	0xD,
68	0x3E,
69	0x18,
70	0x14,
71	0,
72	0x3,
73	0x3,
74	0x5,
75	0,
76	0x2,
77	0,
78	0,
79	0,
80	0,
81	0,
82	0,
83	0,
84	0,
85	0,
86	0x1CC,
87	0,
88	0x164,
89	1,
90	1,
91	1,
92	1,
93	12,
94	12,
95	12,
96	0x12,
97	0x1F,
98	132,
99	5,
100	7,
101	0,
102	{ 0, 0, 0, 0, 0, 0, 0, 0 },
103	{ 0, 0, 0, 0 },
104	true
105};
106
107static const struct ni_cac_weights cac_weights_cayman_pro =
108{
109	0x16,
110	0x4,
111	0x10,
112	0x2,
113	0xA,
114	0x16,
115	0x2,
116	0x18,
117	0x10,
118	0x1A,
119	0x16,
120	0x2D,
121	0x12,
122	0xA,
123	0x6,
124	0x6,
125	0x6,
126	0x2,
127	0x4,
128	0xB,
129	0x11,
130	0x11,
131	0x2D,
132	0xC,
133	0xC,
134	0x7,
135	0x10,
136	0x10,
137	0x3F,
138	0x1A,
139	0x16,
140	0,
141	0x7,
142	0x4,
143	0x6,
144	1,
145	0x2,
146	0x1,
147	0,
148	0,
149	0,
150	0,
151	0,
152	0,
153	0x30,
154	0,
155	0x1CF,
156	0,
157	0x166,
158	1,
159	1,
160	1,
161	1,
162	12,
163	12,
164	12,
165	0x15,
166	0x1F,
167	132,
168	6,
169	6,
170	0,
171	{ 0, 0, 0, 0, 0, 0, 0, 0 },
172	{ 0, 0, 0, 0 },
173	true
174};
175
176static const struct ni_cac_weights cac_weights_cayman_le =
177{
178	0x7,
179	0xE,
180	0x1,
181	0xA,
182	0x1,
183	0x3F,
184	0x2,
185	0x18,
186	0x10,
187	0x1A,
188	0x1,
189	0x3F,
190	0x1,
191	0xE,
192	0x6,
193	0x6,
194	0x6,
195	0x2,
196	0x4,
197	0x9,
198	0x1A,
199	0x1A,
200	0x2C,
201	0xA,
202	0x11,
203	0x8,
204	0x19,
205	0x19,
206	0x1,
207	0x1,
208	0x1A,
209	0,
210	0x8,
211	0x5,
212	0x8,
213	0x1,
214	0x3,
215	0x1,
216	0,
217	0,
218	0,
219	0,
220	0,
221	0,
222	0x38,
223	0x38,
224	0x239,
225	0x3,
226	0x18A,
227	1,
228	1,
229	1,
230	1,
231	12,
232	12,
233	12,
234	0x15,
235	0x22,
236	132,
237	6,
238	6,
239	0,
240	{ 0, 0, 0, 0, 0, 0, 0, 0 },
241	{ 0, 0, 0, 0 },
242	true
243};
244
245#define NISLANDS_MGCG_SEQUENCE  300
246
247static const u32 cayman_cgcg_cgls_default[] =
248{
249	0x000008f8, 0x00000010, 0xffffffff,
250	0x000008fc, 0x00000000, 0xffffffff,
251	0x000008f8, 0x00000011, 0xffffffff,
252	0x000008fc, 0x00000000, 0xffffffff,
253	0x000008f8, 0x00000012, 0xffffffff,
254	0x000008fc, 0x00000000, 0xffffffff,
255	0x000008f8, 0x00000013, 0xffffffff,
256	0x000008fc, 0x00000000, 0xffffffff,
257	0x000008f8, 0x00000014, 0xffffffff,
258	0x000008fc, 0x00000000, 0xffffffff,
259	0x000008f8, 0x00000015, 0xffffffff,
260	0x000008fc, 0x00000000, 0xffffffff,
261	0x000008f8, 0x00000016, 0xffffffff,
262	0x000008fc, 0x00000000, 0xffffffff,
263	0x000008f8, 0x00000017, 0xffffffff,
264	0x000008fc, 0x00000000, 0xffffffff,
265	0x000008f8, 0x00000018, 0xffffffff,
266	0x000008fc, 0x00000000, 0xffffffff,
267	0x000008f8, 0x00000019, 0xffffffff,
268	0x000008fc, 0x00000000, 0xffffffff,
269	0x000008f8, 0x0000001a, 0xffffffff,
270	0x000008fc, 0x00000000, 0xffffffff,
271	0x000008f8, 0x0000001b, 0xffffffff,
272	0x000008fc, 0x00000000, 0xffffffff,
273	0x000008f8, 0x00000020, 0xffffffff,
274	0x000008fc, 0x00000000, 0xffffffff,
275	0x000008f8, 0x00000021, 0xffffffff,
276	0x000008fc, 0x00000000, 0xffffffff,
277	0x000008f8, 0x00000022, 0xffffffff,
278	0x000008fc, 0x00000000, 0xffffffff,
279	0x000008f8, 0x00000023, 0xffffffff,
280	0x000008fc, 0x00000000, 0xffffffff,
281	0x000008f8, 0x00000024, 0xffffffff,
282	0x000008fc, 0x00000000, 0xffffffff,
283	0x000008f8, 0x00000025, 0xffffffff,
284	0x000008fc, 0x00000000, 0xffffffff,
285	0x000008f8, 0x00000026, 0xffffffff,
286	0x000008fc, 0x00000000, 0xffffffff,
287	0x000008f8, 0x00000027, 0xffffffff,
288	0x000008fc, 0x00000000, 0xffffffff,
289	0x000008f8, 0x00000028, 0xffffffff,
290	0x000008fc, 0x00000000, 0xffffffff,
291	0x000008f8, 0x00000029, 0xffffffff,
292	0x000008fc, 0x00000000, 0xffffffff,
293	0x000008f8, 0x0000002a, 0xffffffff,
294	0x000008fc, 0x00000000, 0xffffffff,
295	0x000008f8, 0x0000002b, 0xffffffff,
296	0x000008fc, 0x00000000, 0xffffffff
297};
298#define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
299
300static const u32 cayman_cgcg_cgls_disable[] =
301{
302	0x000008f8, 0x00000010, 0xffffffff,
303	0x000008fc, 0xffffffff, 0xffffffff,
304	0x000008f8, 0x00000011, 0xffffffff,
305	0x000008fc, 0xffffffff, 0xffffffff,
306	0x000008f8, 0x00000012, 0xffffffff,
307	0x000008fc, 0xffffffff, 0xffffffff,
308	0x000008f8, 0x00000013, 0xffffffff,
309	0x000008fc, 0xffffffff, 0xffffffff,
310	0x000008f8, 0x00000014, 0xffffffff,
311	0x000008fc, 0xffffffff, 0xffffffff,
312	0x000008f8, 0x00000015, 0xffffffff,
313	0x000008fc, 0xffffffff, 0xffffffff,
314	0x000008f8, 0x00000016, 0xffffffff,
315	0x000008fc, 0xffffffff, 0xffffffff,
316	0x000008f8, 0x00000017, 0xffffffff,
317	0x000008fc, 0xffffffff, 0xffffffff,
318	0x000008f8, 0x00000018, 0xffffffff,
319	0x000008fc, 0xffffffff, 0xffffffff,
320	0x000008f8, 0x00000019, 0xffffffff,
321	0x000008fc, 0xffffffff, 0xffffffff,
322	0x000008f8, 0x0000001a, 0xffffffff,
323	0x000008fc, 0xffffffff, 0xffffffff,
324	0x000008f8, 0x0000001b, 0xffffffff,
325	0x000008fc, 0xffffffff, 0xffffffff,
326	0x000008f8, 0x00000020, 0xffffffff,
327	0x000008fc, 0x00000000, 0xffffffff,
328	0x000008f8, 0x00000021, 0xffffffff,
329	0x000008fc, 0x00000000, 0xffffffff,
330	0x000008f8, 0x00000022, 0xffffffff,
331	0x000008fc, 0x00000000, 0xffffffff,
332	0x000008f8, 0x00000023, 0xffffffff,
333	0x000008fc, 0x00000000, 0xffffffff,
334	0x000008f8, 0x00000024, 0xffffffff,
335	0x000008fc, 0x00000000, 0xffffffff,
336	0x000008f8, 0x00000025, 0xffffffff,
337	0x000008fc, 0x00000000, 0xffffffff,
338	0x000008f8, 0x00000026, 0xffffffff,
339	0x000008fc, 0x00000000, 0xffffffff,
340	0x000008f8, 0x00000027, 0xffffffff,
341	0x000008fc, 0x00000000, 0xffffffff,
342	0x000008f8, 0x00000028, 0xffffffff,
343	0x000008fc, 0x00000000, 0xffffffff,
344	0x000008f8, 0x00000029, 0xffffffff,
345	0x000008fc, 0x00000000, 0xffffffff,
346	0x000008f8, 0x0000002a, 0xffffffff,
347	0x000008fc, 0x00000000, 0xffffffff,
348	0x000008f8, 0x0000002b, 0xffffffff,
349	0x000008fc, 0x00000000, 0xffffffff,
350	0x00000644, 0x000f7902, 0x001f4180,
351	0x00000644, 0x000f3802, 0x001f4180
352};
353#define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
354
355static const u32 cayman_cgcg_cgls_enable[] =
356{
357	0x00000644, 0x000f7882, 0x001f4080,
358	0x000008f8, 0x00000010, 0xffffffff,
359	0x000008fc, 0x00000000, 0xffffffff,
360	0x000008f8, 0x00000011, 0xffffffff,
361	0x000008fc, 0x00000000, 0xffffffff,
362	0x000008f8, 0x00000012, 0xffffffff,
363	0x000008fc, 0x00000000, 0xffffffff,
364	0x000008f8, 0x00000013, 0xffffffff,
365	0x000008fc, 0x00000000, 0xffffffff,
366	0x000008f8, 0x00000014, 0xffffffff,
367	0x000008fc, 0x00000000, 0xffffffff,
368	0x000008f8, 0x00000015, 0xffffffff,
369	0x000008fc, 0x00000000, 0xffffffff,
370	0x000008f8, 0x00000016, 0xffffffff,
371	0x000008fc, 0x00000000, 0xffffffff,
372	0x000008f8, 0x00000017, 0xffffffff,
373	0x000008fc, 0x00000000, 0xffffffff,
374	0x000008f8, 0x00000018, 0xffffffff,
375	0x000008fc, 0x00000000, 0xffffffff,
376	0x000008f8, 0x00000019, 0xffffffff,
377	0x000008fc, 0x00000000, 0xffffffff,
378	0x000008f8, 0x0000001a, 0xffffffff,
379	0x000008fc, 0x00000000, 0xffffffff,
380	0x000008f8, 0x0000001b, 0xffffffff,
381	0x000008fc, 0x00000000, 0xffffffff,
382	0x000008f8, 0x00000020, 0xffffffff,
383	0x000008fc, 0xffffffff, 0xffffffff,
384	0x000008f8, 0x00000021, 0xffffffff,
385	0x000008fc, 0xffffffff, 0xffffffff,
386	0x000008f8, 0x00000022, 0xffffffff,
387	0x000008fc, 0xffffffff, 0xffffffff,
388	0x000008f8, 0x00000023, 0xffffffff,
389	0x000008fc, 0xffffffff, 0xffffffff,
390	0x000008f8, 0x00000024, 0xffffffff,
391	0x000008fc, 0xffffffff, 0xffffffff,
392	0x000008f8, 0x00000025, 0xffffffff,
393	0x000008fc, 0xffffffff, 0xffffffff,
394	0x000008f8, 0x00000026, 0xffffffff,
395	0x000008fc, 0xffffffff, 0xffffffff,
396	0x000008f8, 0x00000027, 0xffffffff,
397	0x000008fc, 0xffffffff, 0xffffffff,
398	0x000008f8, 0x00000028, 0xffffffff,
399	0x000008fc, 0xffffffff, 0xffffffff,
400	0x000008f8, 0x00000029, 0xffffffff,
401	0x000008fc, 0xffffffff, 0xffffffff,
402	0x000008f8, 0x0000002a, 0xffffffff,
403	0x000008fc, 0xffffffff, 0xffffffff,
404	0x000008f8, 0x0000002b, 0xffffffff,
405	0x000008fc, 0xffffffff, 0xffffffff
406};
407#define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
408
409static const u32 cayman_mgcg_default[] =
410{
411	0x0000802c, 0xc0000000, 0xffffffff,
412	0x00003fc4, 0xc0000000, 0xffffffff,
413	0x00005448, 0x00000100, 0xffffffff,
414	0x000055e4, 0x00000100, 0xffffffff,
415	0x0000160c, 0x00000100, 0xffffffff,
416	0x00008984, 0x06000100, 0xffffffff,
417	0x0000c164, 0x00000100, 0xffffffff,
418	0x00008a18, 0x00000100, 0xffffffff,
419	0x0000897c, 0x06000100, 0xffffffff,
420	0x00008b28, 0x00000100, 0xffffffff,
421	0x00009144, 0x00800200, 0xffffffff,
422	0x00009a60, 0x00000100, 0xffffffff,
423	0x00009868, 0x00000100, 0xffffffff,
424	0x00008d58, 0x00000100, 0xffffffff,
425	0x00009510, 0x00000100, 0xffffffff,
426	0x0000949c, 0x00000100, 0xffffffff,
427	0x00009654, 0x00000100, 0xffffffff,
428	0x00009030, 0x00000100, 0xffffffff,
429	0x00009034, 0x00000100, 0xffffffff,
430	0x00009038, 0x00000100, 0xffffffff,
431	0x0000903c, 0x00000100, 0xffffffff,
432	0x00009040, 0x00000100, 0xffffffff,
433	0x0000a200, 0x00000100, 0xffffffff,
434	0x0000a204, 0x00000100, 0xffffffff,
435	0x0000a208, 0x00000100, 0xffffffff,
436	0x0000a20c, 0x00000100, 0xffffffff,
437	0x00009744, 0x00000100, 0xffffffff,
438	0x00003f80, 0x00000100, 0xffffffff,
439	0x0000a210, 0x00000100, 0xffffffff,
440	0x0000a214, 0x00000100, 0xffffffff,
441	0x000004d8, 0x00000100, 0xffffffff,
442	0x00009664, 0x00000100, 0xffffffff,
443	0x00009698, 0x00000100, 0xffffffff,
444	0x000004d4, 0x00000200, 0xffffffff,
445	0x000004d0, 0x00000000, 0xffffffff,
446	0x000030cc, 0x00000104, 0xffffffff,
447	0x0000d0c0, 0x00000100, 0xffffffff,
448	0x0000d8c0, 0x00000100, 0xffffffff,
449	0x0000802c, 0x40000000, 0xffffffff,
450	0x00003fc4, 0x40000000, 0xffffffff,
451	0x0000915c, 0x00010000, 0xffffffff,
452	0x00009160, 0x00030002, 0xffffffff,
453	0x00009164, 0x00050004, 0xffffffff,
454	0x00009168, 0x00070006, 0xffffffff,
455	0x00009178, 0x00070000, 0xffffffff,
456	0x0000917c, 0x00030002, 0xffffffff,
457	0x00009180, 0x00050004, 0xffffffff,
458	0x0000918c, 0x00010006, 0xffffffff,
459	0x00009190, 0x00090008, 0xffffffff,
460	0x00009194, 0x00070000, 0xffffffff,
461	0x00009198, 0x00030002, 0xffffffff,
462	0x0000919c, 0x00050004, 0xffffffff,
463	0x000091a8, 0x00010006, 0xffffffff,
464	0x000091ac, 0x00090008, 0xffffffff,
465	0x000091b0, 0x00070000, 0xffffffff,
466	0x000091b4, 0x00030002, 0xffffffff,
467	0x000091b8, 0x00050004, 0xffffffff,
468	0x000091c4, 0x00010006, 0xffffffff,
469	0x000091c8, 0x00090008, 0xffffffff,
470	0x000091cc, 0x00070000, 0xffffffff,
471	0x000091d0, 0x00030002, 0xffffffff,
472	0x000091d4, 0x00050004, 0xffffffff,
473	0x000091e0, 0x00010006, 0xffffffff,
474	0x000091e4, 0x00090008, 0xffffffff,
475	0x000091e8, 0x00000000, 0xffffffff,
476	0x000091ec, 0x00070000, 0xffffffff,
477	0x000091f0, 0x00030002, 0xffffffff,
478	0x000091f4, 0x00050004, 0xffffffff,
479	0x00009200, 0x00010006, 0xffffffff,
480	0x00009204, 0x00090008, 0xffffffff,
481	0x00009208, 0x00070000, 0xffffffff,
482	0x0000920c, 0x00030002, 0xffffffff,
483	0x00009210, 0x00050004, 0xffffffff,
484	0x0000921c, 0x00010006, 0xffffffff,
485	0x00009220, 0x00090008, 0xffffffff,
486	0x00009224, 0x00070000, 0xffffffff,
487	0x00009228, 0x00030002, 0xffffffff,
488	0x0000922c, 0x00050004, 0xffffffff,
489	0x00009238, 0x00010006, 0xffffffff,
490	0x0000923c, 0x00090008, 0xffffffff,
491	0x00009240, 0x00070000, 0xffffffff,
492	0x00009244, 0x00030002, 0xffffffff,
493	0x00009248, 0x00050004, 0xffffffff,
494	0x00009254, 0x00010006, 0xffffffff,
495	0x00009258, 0x00090008, 0xffffffff,
496	0x0000925c, 0x00070000, 0xffffffff,
497	0x00009260, 0x00030002, 0xffffffff,
498	0x00009264, 0x00050004, 0xffffffff,
499	0x00009270, 0x00010006, 0xffffffff,
500	0x00009274, 0x00090008, 0xffffffff,
501	0x00009278, 0x00070000, 0xffffffff,
502	0x0000927c, 0x00030002, 0xffffffff,
503	0x00009280, 0x00050004, 0xffffffff,
504	0x0000928c, 0x00010006, 0xffffffff,
505	0x00009290, 0x00090008, 0xffffffff,
506	0x000092a8, 0x00070000, 0xffffffff,
507	0x000092ac, 0x00030002, 0xffffffff,
508	0x000092b0, 0x00050004, 0xffffffff,
509	0x000092bc, 0x00010006, 0xffffffff,
510	0x000092c0, 0x00090008, 0xffffffff,
511	0x000092c4, 0x00070000, 0xffffffff,
512	0x000092c8, 0x00030002, 0xffffffff,
513	0x000092cc, 0x00050004, 0xffffffff,
514	0x000092d8, 0x00010006, 0xffffffff,
515	0x000092dc, 0x00090008, 0xffffffff,
516	0x00009294, 0x00000000, 0xffffffff,
517	0x0000802c, 0x40010000, 0xffffffff,
518	0x00003fc4, 0x40010000, 0xffffffff,
519	0x0000915c, 0x00010000, 0xffffffff,
520	0x00009160, 0x00030002, 0xffffffff,
521	0x00009164, 0x00050004, 0xffffffff,
522	0x00009168, 0x00070006, 0xffffffff,
523	0x00009178, 0x00070000, 0xffffffff,
524	0x0000917c, 0x00030002, 0xffffffff,
525	0x00009180, 0x00050004, 0xffffffff,
526	0x0000918c, 0x00010006, 0xffffffff,
527	0x00009190, 0x00090008, 0xffffffff,
528	0x00009194, 0x00070000, 0xffffffff,
529	0x00009198, 0x00030002, 0xffffffff,
530	0x0000919c, 0x00050004, 0xffffffff,
531	0x000091a8, 0x00010006, 0xffffffff,
532	0x000091ac, 0x00090008, 0xffffffff,
533	0x000091b0, 0x00070000, 0xffffffff,
534	0x000091b4, 0x00030002, 0xffffffff,
535	0x000091b8, 0x00050004, 0xffffffff,
536	0x000091c4, 0x00010006, 0xffffffff,
537	0x000091c8, 0x00090008, 0xffffffff,
538	0x000091cc, 0x00070000, 0xffffffff,
539	0x000091d0, 0x00030002, 0xffffffff,
540	0x000091d4, 0x00050004, 0xffffffff,
541	0x000091e0, 0x00010006, 0xffffffff,
542	0x000091e4, 0x00090008, 0xffffffff,
543	0x000091e8, 0x00000000, 0xffffffff,
544	0x000091ec, 0x00070000, 0xffffffff,
545	0x000091f0, 0x00030002, 0xffffffff,
546	0x000091f4, 0x00050004, 0xffffffff,
547	0x00009200, 0x00010006, 0xffffffff,
548	0x00009204, 0x00090008, 0xffffffff,
549	0x00009208, 0x00070000, 0xffffffff,
550	0x0000920c, 0x00030002, 0xffffffff,
551	0x00009210, 0x00050004, 0xffffffff,
552	0x0000921c, 0x00010006, 0xffffffff,
553	0x00009220, 0x00090008, 0xffffffff,
554	0x00009224, 0x00070000, 0xffffffff,
555	0x00009228, 0x00030002, 0xffffffff,
556	0x0000922c, 0x00050004, 0xffffffff,
557	0x00009238, 0x00010006, 0xffffffff,
558	0x0000923c, 0x00090008, 0xffffffff,
559	0x00009240, 0x00070000, 0xffffffff,
560	0x00009244, 0x00030002, 0xffffffff,
561	0x00009248, 0x00050004, 0xffffffff,
562	0x00009254, 0x00010006, 0xffffffff,
563	0x00009258, 0x00090008, 0xffffffff,
564	0x0000925c, 0x00070000, 0xffffffff,
565	0x00009260, 0x00030002, 0xffffffff,
566	0x00009264, 0x00050004, 0xffffffff,
567	0x00009270, 0x00010006, 0xffffffff,
568	0x00009274, 0x00090008, 0xffffffff,
569	0x00009278, 0x00070000, 0xffffffff,
570	0x0000927c, 0x00030002, 0xffffffff,
571	0x00009280, 0x00050004, 0xffffffff,
572	0x0000928c, 0x00010006, 0xffffffff,
573	0x00009290, 0x00090008, 0xffffffff,
574	0x000092a8, 0x00070000, 0xffffffff,
575	0x000092ac, 0x00030002, 0xffffffff,
576	0x000092b0, 0x00050004, 0xffffffff,
577	0x000092bc, 0x00010006, 0xffffffff,
578	0x000092c0, 0x00090008, 0xffffffff,
579	0x000092c4, 0x00070000, 0xffffffff,
580	0x000092c8, 0x00030002, 0xffffffff,
581	0x000092cc, 0x00050004, 0xffffffff,
582	0x000092d8, 0x00010006, 0xffffffff,
583	0x000092dc, 0x00090008, 0xffffffff,
584	0x00009294, 0x00000000, 0xffffffff,
585	0x0000802c, 0xc0000000, 0xffffffff,
586	0x00003fc4, 0xc0000000, 0xffffffff,
587	0x000008f8, 0x00000010, 0xffffffff,
588	0x000008fc, 0x00000000, 0xffffffff,
589	0x000008f8, 0x00000011, 0xffffffff,
590	0x000008fc, 0x00000000, 0xffffffff,
591	0x000008f8, 0x00000012, 0xffffffff,
592	0x000008fc, 0x00000000, 0xffffffff,
593	0x000008f8, 0x00000013, 0xffffffff,
594	0x000008fc, 0x00000000, 0xffffffff,
595	0x000008f8, 0x00000014, 0xffffffff,
596	0x000008fc, 0x00000000, 0xffffffff,
597	0x000008f8, 0x00000015, 0xffffffff,
598	0x000008fc, 0x00000000, 0xffffffff,
599	0x000008f8, 0x00000016, 0xffffffff,
600	0x000008fc, 0x00000000, 0xffffffff,
601	0x000008f8, 0x00000017, 0xffffffff,
602	0x000008fc, 0x00000000, 0xffffffff,
603	0x000008f8, 0x00000018, 0xffffffff,
604	0x000008fc, 0x00000000, 0xffffffff,
605	0x000008f8, 0x00000019, 0xffffffff,
606	0x000008fc, 0x00000000, 0xffffffff,
607	0x000008f8, 0x0000001a, 0xffffffff,
608	0x000008fc, 0x00000000, 0xffffffff,
609	0x000008f8, 0x0000001b, 0xffffffff,
610	0x000008fc, 0x00000000, 0xffffffff
611};
612#define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
613
614static const u32 cayman_mgcg_disable[] =
615{
616	0x0000802c, 0xc0000000, 0xffffffff,
617	0x000008f8, 0x00000000, 0xffffffff,
618	0x000008fc, 0xffffffff, 0xffffffff,
619	0x000008f8, 0x00000001, 0xffffffff,
620	0x000008fc, 0xffffffff, 0xffffffff,
621	0x000008f8, 0x00000002, 0xffffffff,
622	0x000008fc, 0xffffffff, 0xffffffff,
623	0x000008f8, 0x00000003, 0xffffffff,
624	0x000008fc, 0xffffffff, 0xffffffff,
625	0x00009150, 0x00600000, 0xffffffff
626};
627#define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
628
629static const u32 cayman_mgcg_enable[] =
630{
631	0x0000802c, 0xc0000000, 0xffffffff,
632	0x000008f8, 0x00000000, 0xffffffff,
633	0x000008fc, 0x00000000, 0xffffffff,
634	0x000008f8, 0x00000001, 0xffffffff,
635	0x000008fc, 0x00000000, 0xffffffff,
636	0x000008f8, 0x00000002, 0xffffffff,
637	0x000008fc, 0x00600000, 0xffffffff,
638	0x000008f8, 0x00000003, 0xffffffff,
639	0x000008fc, 0x00000000, 0xffffffff,
640	0x00009150, 0x96944200, 0xffffffff
641};
642
643#define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
644
645#define NISLANDS_SYSLS_SEQUENCE  100
646
647static const u32 cayman_sysls_default[] =
648{
649	/* Register,   Value,     Mask bits */
650	0x000055e8, 0x00000000, 0xffffffff,
651	0x0000d0bc, 0x00000000, 0xffffffff,
652	0x0000d8bc, 0x00000000, 0xffffffff,
653	0x000015c0, 0x000c1401, 0xffffffff,
654	0x0000264c, 0x000c0400, 0xffffffff,
655	0x00002648, 0x000c0400, 0xffffffff,
656	0x00002650, 0x000c0400, 0xffffffff,
657	0x000020b8, 0x000c0400, 0xffffffff,
658	0x000020bc, 0x000c0400, 0xffffffff,
659	0x000020c0, 0x000c0c80, 0xffffffff,
660	0x0000f4a0, 0x000000c0, 0xffffffff,
661	0x0000f4a4, 0x00680fff, 0xffffffff,
662	0x00002f50, 0x00000404, 0xffffffff,
663	0x000004c8, 0x00000001, 0xffffffff,
664	0x000064ec, 0x00000000, 0xffffffff,
665	0x00000c7c, 0x00000000, 0xffffffff,
666	0x00008dfc, 0x00000000, 0xffffffff
667};
668#define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
669
670static const u32 cayman_sysls_disable[] =
671{
672	/* Register,   Value,     Mask bits */
673	0x0000d0c0, 0x00000000, 0xffffffff,
674	0x0000d8c0, 0x00000000, 0xffffffff,
675	0x000055e8, 0x00000000, 0xffffffff,
676	0x0000d0bc, 0x00000000, 0xffffffff,
677	0x0000d8bc, 0x00000000, 0xffffffff,
678	0x000015c0, 0x00041401, 0xffffffff,
679	0x0000264c, 0x00040400, 0xffffffff,
680	0x00002648, 0x00040400, 0xffffffff,
681	0x00002650, 0x00040400, 0xffffffff,
682	0x000020b8, 0x00040400, 0xffffffff,
683	0x000020bc, 0x00040400, 0xffffffff,
684	0x000020c0, 0x00040c80, 0xffffffff,
685	0x0000f4a0, 0x000000c0, 0xffffffff,
686	0x0000f4a4, 0x00680000, 0xffffffff,
687	0x00002f50, 0x00000404, 0xffffffff,
688	0x000004c8, 0x00000001, 0xffffffff,
689	0x000064ec, 0x00007ffd, 0xffffffff,
690	0x00000c7c, 0x0000ff00, 0xffffffff,
691	0x00008dfc, 0x0000007f, 0xffffffff
692};
693#define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
694
695static const u32 cayman_sysls_enable[] =
696{
697	/* Register,   Value,     Mask bits */
698	0x000055e8, 0x00000001, 0xffffffff,
699	0x0000d0bc, 0x00000100, 0xffffffff,
700	0x0000d8bc, 0x00000100, 0xffffffff,
701	0x000015c0, 0x000c1401, 0xffffffff,
702	0x0000264c, 0x000c0400, 0xffffffff,
703	0x00002648, 0x000c0400, 0xffffffff,
704	0x00002650, 0x000c0400, 0xffffffff,
705	0x000020b8, 0x000c0400, 0xffffffff,
706	0x000020bc, 0x000c0400, 0xffffffff,
707	0x000020c0, 0x000c0c80, 0xffffffff,
708	0x0000f4a0, 0x000000c0, 0xffffffff,
709	0x0000f4a4, 0x00680fff, 0xffffffff,
710	0x00002f50, 0x00000903, 0xffffffff,
711	0x000004c8, 0x00000000, 0xffffffff,
712	0x000064ec, 0x00000000, 0xffffffff,
713	0x00000c7c, 0x00000000, 0xffffffff,
714	0x00008dfc, 0x00000000, 0xffffffff
715};
716#define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
717
718struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
719struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
720
721static struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
722{
723        struct ni_power_info *pi = rdev->pm.dpm.priv;
724
725        return pi;
726}
727
728struct ni_ps *ni_get_ps(struct radeon_ps *rps)
729{
730	struct ni_ps *ps = rps->ps_priv;
731
732	return ps;
733}
734
735/* XXX: fix for kernel use  */
736#if 0
737static double ni_exp(double x)
738{
739	int count = 1;
740	double sum = 1.0, term, tolerance = 0.000000001, y = x;
741
742	if (x < 0)
743		y = -1 * x;
744	term  = y;
745
746	while (term >= tolerance) {
747		sum = sum + term;
748		count = count + 1;
749		term  = term * (y / count);
750	}
751
752	if (x < 0)
753		sum = 1.0 / sum;
754
755	return sum;
756}
757#endif
758
759static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
760						     u16 v, s32 t,
761						     u32 ileakage,
762						     u32 *leakage)
763{
764/* XXX: fix for kernel use  */
765#if 0
766	double kt, kv, leakage_w, i_leakage, vddc, temperature;
767
768	i_leakage   = ((double)ileakage) / 1000;
769	vddc        = ((double)v) / 1000;
770	temperature = ((double)t) / 1000;
771
772	kt = (((double)(coeff->at)) / 1000) * ni_exp((((double)(coeff->bt)) / 1000) * temperature);
773	kv = (((double)(coeff->av)) / 1000) * ni_exp((((double)(coeff->bv)) / 1000) * vddc);
774
775	leakage_w = i_leakage * kt * kv * vddc;
776
777	*leakage = (u32)(leakage_w * 1000);
778#endif
779}
780
781static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
782					     const struct ni_leakage_coeffients *coeff,
783					     u16 v,
784					     s32 t,
785					     u32 i_leakage,
786					     u32 *leakage)
787{
788	ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
789}
790
791static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
792					struct radeon_ps *rps)
793{
794	struct ni_ps *ps = ni_get_ps(rps);
795	struct radeon_clock_and_voltage_limits *max_limits;
796	bool disable_mclk_switching;
797	u32 mclk, sclk;
798	u16 vddc, vddci;
799	int i;
800
801	if (rdev->pm.dpm.new_active_crtc_count > 1)
802		disable_mclk_switching = true;
803	else
804		disable_mclk_switching = false;
805
806	if (rdev->pm.dpm.ac_power)
807		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
808	else
809		max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
810
811	if (rdev->pm.dpm.ac_power == false) {
812		for (i = 0; i < ps->performance_level_count; i++) {
813			if (ps->performance_levels[i].mclk > max_limits->mclk)
814				ps->performance_levels[i].mclk = max_limits->mclk;
815			if (ps->performance_levels[i].sclk > max_limits->sclk)
816				ps->performance_levels[i].sclk = max_limits->sclk;
817			if (ps->performance_levels[i].vddc > max_limits->vddc)
818				ps->performance_levels[i].vddc = max_limits->vddc;
819			if (ps->performance_levels[i].vddci > max_limits->vddci)
820				ps->performance_levels[i].vddci = max_limits->vddci;
821		}
822	}
823
824	/* XXX validate the min clocks required for display */
825
826	if (disable_mclk_switching) {
827		mclk  = ps->performance_levels[ps->performance_level_count - 1].mclk;
828		sclk = ps->performance_levels[0].sclk;
829		vddc = ps->performance_levels[0].vddc;
830		vddci = ps->performance_levels[ps->performance_level_count - 1].vddci;
831	} else {
832		sclk = ps->performance_levels[0].sclk;
833		mclk = ps->performance_levels[0].mclk;
834		vddc = ps->performance_levels[0].vddc;
835		vddci = ps->performance_levels[0].vddci;
836	}
837
838	/* adjusted low state */
839	ps->performance_levels[0].sclk = sclk;
840	ps->performance_levels[0].mclk = mclk;
841	ps->performance_levels[0].vddc = vddc;
842	ps->performance_levels[0].vddci = vddci;
843
844	btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
845				  &ps->performance_levels[0].sclk,
846				  &ps->performance_levels[0].mclk);
847
848	for (i = 1; i < ps->performance_level_count; i++) {
849		if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
850			ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
851		if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
852			ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
853	}
854
855	if (disable_mclk_switching) {
856		mclk = ps->performance_levels[0].mclk;
857		for (i = 1; i < ps->performance_level_count; i++) {
858			if (mclk < ps->performance_levels[i].mclk)
859				mclk = ps->performance_levels[i].mclk;
860		}
861		for (i = 0; i < ps->performance_level_count; i++) {
862			ps->performance_levels[i].mclk = mclk;
863			ps->performance_levels[i].vddci = vddci;
864		}
865	} else {
866		for (i = 1; i < ps->performance_level_count; i++) {
867			if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
868				ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
869			if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
870				ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
871		}
872	}
873
874	for (i = 1; i < ps->performance_level_count; i++)
875		btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
876					  &ps->performance_levels[i].sclk,
877					  &ps->performance_levels[i].mclk);
878
879	for (i = 0; i < ps->performance_level_count; i++)
880		btc_adjust_clock_combinations(rdev, max_limits,
881					      &ps->performance_levels[i]);
882
883	for (i = 0; i < ps->performance_level_count; i++) {
884		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
885						   ps->performance_levels[i].sclk,
886						   max_limits->vddc,  &ps->performance_levels[i].vddc);
887		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
888						   ps->performance_levels[i].mclk,
889						   max_limits->vddci, &ps->performance_levels[i].vddci);
890		btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
891						   ps->performance_levels[i].mclk,
892						   max_limits->vddc,  &ps->performance_levels[i].vddc);
893		/* XXX validate the voltage required for display */
894	}
895
896	for (i = 0; i < ps->performance_level_count; i++) {
897		btc_apply_voltage_delta_rules(rdev,
898					      max_limits->vddc, max_limits->vddci,
899					      &ps->performance_levels[i].vddc,
900					      &ps->performance_levels[i].vddci);
901	}
902
903	ps->dc_compatible = true;
904	for (i = 0; i < ps->performance_level_count; i++) {
905		if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
906			ps->dc_compatible = false;
907
908		if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
909			ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
910	}
911}
912
913static void ni_cg_clockgating_default(struct radeon_device *rdev)
914{
915	u32 count;
916	const u32 *ps = NULL;
917
918	ps = (const u32 *)&cayman_cgcg_cgls_default;
919	count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
920
921	btc_program_mgcg_hw_sequence(rdev, ps, count);
922}
923
924static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
925				      bool enable)
926{
927	u32 count;
928	const u32 *ps = NULL;
929
930	if (enable) {
931		ps = (const u32 *)&cayman_cgcg_cgls_enable;
932		count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
933	} else {
934		ps = (const u32 *)&cayman_cgcg_cgls_disable;
935		count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
936	}
937
938	btc_program_mgcg_hw_sequence(rdev, ps, count);
939}
940
941static void ni_mg_clockgating_default(struct radeon_device *rdev)
942{
943	u32 count;
944	const u32 *ps = NULL;
945
946	ps = (const u32 *)&cayman_mgcg_default;
947	count = CAYMAN_MGCG_DEFAULT_LENGTH;
948
949	btc_program_mgcg_hw_sequence(rdev, ps, count);
950}
951
952static void ni_mg_clockgating_enable(struct radeon_device *rdev,
953				     bool enable)
954{
955	u32 count;
956	const u32 *ps = NULL;
957
958	if (enable) {
959		ps = (const u32 *)&cayman_mgcg_enable;
960		count = CAYMAN_MGCG_ENABLE_LENGTH;
961	} else {
962		ps = (const u32 *)&cayman_mgcg_disable;
963		count = CAYMAN_MGCG_DISABLE_LENGTH;
964	}
965
966	btc_program_mgcg_hw_sequence(rdev, ps, count);
967}
968
969static void ni_ls_clockgating_default(struct radeon_device *rdev)
970{
971	u32 count;
972	const u32 *ps = NULL;
973
974	ps = (const u32 *)&cayman_sysls_default;
975	count = CAYMAN_SYSLS_DEFAULT_LENGTH;
976
977	btc_program_mgcg_hw_sequence(rdev, ps, count);
978}
979
980static void ni_ls_clockgating_enable(struct radeon_device *rdev,
981				     bool enable)
982{
983	u32 count;
984	const u32 *ps = NULL;
985
986	if (enable) {
987		ps = (const u32 *)&cayman_sysls_enable;
988		count = CAYMAN_SYSLS_ENABLE_LENGTH;
989	} else {
990		ps = (const u32 *)&cayman_sysls_disable;
991		count = CAYMAN_SYSLS_DISABLE_LENGTH;
992	}
993
994	btc_program_mgcg_hw_sequence(rdev, ps, count);
995
996}
997
998static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
999							     struct radeon_clock_voltage_dependency_table *table)
1000{
1001	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1002	u32 i;
1003
1004	if (table) {
1005		for (i = 0; i < table->count; i++) {
1006			if (0xff01 == table->entries[i].v) {
1007				if (pi->max_vddc == 0)
1008					return -EINVAL;
1009				table->entries[i].v = pi->max_vddc;
1010			}
1011		}
1012	}
1013	return 0;
1014}
1015
1016static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1017{
1018	int ret = 0;
1019
1020	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1021								&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1022
1023	ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1024								&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1025	return ret;
1026}
1027
1028static void ni_stop_dpm(struct radeon_device *rdev)
1029{
1030	WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1031}
1032
1033#if 0
1034static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1035					bool ac_power)
1036{
1037	if (ac_power)
1038		return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1039			0 : -EINVAL;
1040
1041	return 0;
1042}
1043#endif
1044
1045static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1046						      PPSMC_Msg msg, u32 parameter)
1047{
1048	WREG32(SMC_SCRATCH0, parameter);
1049	return rv770_send_msg_to_smc(rdev, msg);
1050}
1051
1052static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1053{
1054	if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1055		return -EINVAL;
1056
1057	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1058		0 : -EINVAL;
1059}
1060
1061#if 0
1062static int ni_unrestrict_performance_levels_after_switch(struct radeon_device *rdev)
1063{
1064	if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1065		return -EINVAL;
1066
1067	return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) == PPSMC_Result_OK) ?
1068		0 : -EINVAL;
1069}
1070#endif
1071
1072static void ni_stop_smc(struct radeon_device *rdev)
1073{
1074	u32 tmp;
1075	int i;
1076
1077	for (i = 0; i < rdev->usec_timeout; i++) {
1078		tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1079		if (tmp != 1)
1080			break;
1081		udelay(1);
1082	}
1083
1084	udelay(100);
1085
1086	r7xx_stop_smc(rdev);
1087}
1088
1089static int ni_process_firmware_header(struct radeon_device *rdev)
1090{
1091        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1092        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1093        struct ni_power_info *ni_pi = ni_get_pi(rdev);
1094	u32 tmp;
1095	int ret;
1096
1097	ret = rv770_read_smc_sram_dword(rdev,
1098					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1099					NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1100					&tmp, pi->sram_end);
1101
1102	if (ret)
1103		return ret;
1104
1105	pi->state_table_start = (u16)tmp;
1106
1107	ret = rv770_read_smc_sram_dword(rdev,
1108					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1109					NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1110					&tmp, pi->sram_end);
1111
1112	if (ret)
1113		return ret;
1114
1115	pi->soft_regs_start = (u16)tmp;
1116
1117	ret = rv770_read_smc_sram_dword(rdev,
1118					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1119					NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1120					&tmp, pi->sram_end);
1121
1122	if (ret)
1123		return ret;
1124
1125	eg_pi->mc_reg_table_start = (u16)tmp;
1126
1127	ret = rv770_read_smc_sram_dword(rdev,
1128					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1129					NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1130					&tmp, pi->sram_end);
1131
1132	if (ret)
1133		return ret;
1134
1135	ni_pi->fan_table_start = (u16)tmp;
1136
1137	ret = rv770_read_smc_sram_dword(rdev,
1138					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1139					NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1140					&tmp, pi->sram_end);
1141
1142	if (ret)
1143		return ret;
1144
1145	ni_pi->arb_table_start = (u16)tmp;
1146
1147	ret = rv770_read_smc_sram_dword(rdev,
1148					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1149					NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1150					&tmp, pi->sram_end);
1151
1152	if (ret)
1153		return ret;
1154
1155	ni_pi->cac_table_start = (u16)tmp;
1156
1157	ret = rv770_read_smc_sram_dword(rdev,
1158					NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1159					NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1160					&tmp, pi->sram_end);
1161
1162	if (ret)
1163		return ret;
1164
1165	ni_pi->spll_table_start = (u16)tmp;
1166
1167
1168	return ret;
1169}
1170
1171static void ni_read_clock_registers(struct radeon_device *rdev)
1172{
1173	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1174
1175	ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1176	ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1177	ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1178	ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1179	ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1180	ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1181	ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1182	ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1183	ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1184	ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1185	ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1186	ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1187	ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1188	ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1189}
1190
1191#if 0
1192static int ni_enter_ulp_state(struct radeon_device *rdev)
1193{
1194	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1195
1196	if (pi->gfx_clock_gating) {
1197                WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1198		WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1199                WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1200		RREG32(GB_ADDR_CONFIG);
1201        }
1202
1203	WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1204                 ~HOST_SMC_MSG_MASK);
1205
1206	udelay(25000);
1207
1208	return 0;
1209}
1210#endif
1211
1212static void ni_program_response_times(struct radeon_device *rdev)
1213{
1214	u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1215	u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1216	u32 reference_clock;
1217
1218	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1219
1220	voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1221	backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1222
1223	if (voltage_response_time == 0)
1224		voltage_response_time = 1000;
1225
1226	if (backbias_response_time == 0)
1227		backbias_response_time = 1000;
1228
1229	acpi_delay_time = 15000;
1230	vbi_time_out = 100000;
1231
1232	reference_clock = radeon_get_xclk(rdev);
1233
1234	vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1235	bb_dly   = (backbias_response_time * reference_clock) / 1600;
1236	acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1237	vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1238
1239	mclk_switch_limit = (460 * reference_clock) / 100;
1240
1241	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1242	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1243	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1244	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1245	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1246	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1247}
1248
1249static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1250					  struct atom_voltage_table *voltage_table,
1251					  NISLANDS_SMC_STATETABLE *table)
1252{
1253	unsigned int i;
1254
1255	for (i = 0; i < voltage_table->count; i++) {
1256		table->highSMIO[i] = 0;
1257		table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1258	}
1259}
1260
1261static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1262					   NISLANDS_SMC_STATETABLE *table)
1263{
1264	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1265	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1266	unsigned char i;
1267
1268	if (eg_pi->vddc_voltage_table.count) {
1269		ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1270		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1271		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1272			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1273
1274		for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1275			if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1276				table->maxVDDCIndexInPPTable = i;
1277				break;
1278			}
1279		}
1280	}
1281
1282	if (eg_pi->vddci_voltage_table.count) {
1283		ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1284
1285		table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1286		table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1287			cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1288	}
1289}
1290
1291static int ni_populate_voltage_value(struct radeon_device *rdev,
1292				     struct atom_voltage_table *table,
1293				     u16 value,
1294				     NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1295{
1296	unsigned int i;
1297
1298	for (i = 0; i < table->count; i++) {
1299		if (value <= table->entries[i].value) {
1300			voltage->index = (u8)i;
1301			voltage->value = cpu_to_be16(table->entries[i].value);
1302			break;
1303		}
1304	}
1305
1306	if (i >= table->count)
1307		return -EINVAL;
1308
1309	return 0;
1310}
1311
1312static void ni_populate_mvdd_value(struct radeon_device *rdev,
1313				   u32 mclk,
1314				   NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1315{
1316        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1317	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1318
1319	if (!pi->mvdd_control) {
1320		voltage->index = eg_pi->mvdd_high_index;
1321                voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1322		return;
1323	}
1324
1325	if (mclk <= pi->mvdd_split_frequency) {
1326		voltage->index = eg_pi->mvdd_low_index;
1327		voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1328	} else {
1329		voltage->index = eg_pi->mvdd_high_index;
1330		voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1331	}
1332}
1333
1334static int ni_get_std_voltage_value(struct radeon_device *rdev,
1335				    NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1336				    u16 *std_voltage)
1337{
1338	if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1339	    ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1340		*std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1341	else
1342		*std_voltage = be16_to_cpu(voltage->value);
1343
1344	return 0;
1345}
1346
1347static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1348					  u16 value, u8 index,
1349					  NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1350{
1351	voltage->index = index;
1352	voltage->value = cpu_to_be16(value);
1353}
1354
1355static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1356{
1357	u32 xclk_period;
1358	u32 xclk = radeon_get_xclk(rdev);
1359	u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1360
1361	xclk_period = (1000000000UL / xclk);
1362	xclk_period /= 10000UL;
1363
1364	return tmp * xclk_period;
1365}
1366
1367static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1368{
1369	return (power_in_watts * scaling_factor) << 2;
1370}
1371
1372static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1373					  struct radeon_ps *radeon_state,
1374					  u32 near_tdp_limit)
1375{
1376	struct ni_ps *state = ni_get_ps(radeon_state);
1377	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1378	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1379	u32 power_boost_limit = 0;
1380	int ret;
1381
1382	if (ni_pi->enable_power_containment &&
1383	    ni_pi->use_power_boost_limit) {
1384		NISLANDS_SMC_VOLTAGE_VALUE vddc;
1385		u16 std_vddc_med;
1386		u16 std_vddc_high;
1387		u64 tmp, n, d;
1388
1389		if (state->performance_level_count < 3)
1390			return 0;
1391
1392		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1393						state->performance_levels[state->performance_level_count - 2].vddc,
1394						&vddc);
1395		if (ret)
1396			return 0;
1397
1398		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1399		if (ret)
1400			return 0;
1401
1402		ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1403						state->performance_levels[state->performance_level_count - 1].vddc,
1404						&vddc);
1405		if (ret)
1406			return 0;
1407
1408		ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1409		if (ret)
1410			return 0;
1411
1412		n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1413		d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1414		tmp = div64_u64(n, d);
1415
1416		if (tmp >> 32)
1417			return 0;
1418		power_boost_limit = (u32)tmp;
1419	}
1420
1421	return power_boost_limit;
1422}
1423
1424static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1425					    bool adjust_polarity,
1426					    u32 tdp_adjustment,
1427					    u32 *tdp_limit,
1428					    u32 *near_tdp_limit)
1429{
1430	if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1431		return -EINVAL;
1432
1433	if (adjust_polarity) {
1434		*tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1435		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1436	} else {
1437		*tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1438		*near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1439	}
1440
1441	return 0;
1442}
1443
1444static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1445				      struct radeon_ps *radeon_state)
1446{
1447	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1448	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1449
1450	if (ni_pi->enable_power_containment) {
1451		NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1452		u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1453		u32 tdp_limit;
1454		u32 near_tdp_limit;
1455		u32 power_boost_limit;
1456		int ret;
1457
1458		if (scaling_factor == 0)
1459			return -EINVAL;
1460
1461		memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1462
1463		ret = ni_calculate_adjusted_tdp_limits(rdev,
1464						       false, /* ??? */
1465						       rdev->pm.dpm.tdp_adjustment,
1466						       &tdp_limit,
1467						       &near_tdp_limit);
1468		if (ret)
1469			return ret;
1470
1471		power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1472								   near_tdp_limit);
1473
1474		smc_table->dpm2Params.TDPLimit =
1475			cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1476		smc_table->dpm2Params.NearTDPLimit =
1477			cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1478		smc_table->dpm2Params.SafePowerLimit =
1479			cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1480							   scaling_factor));
1481		smc_table->dpm2Params.PowerBoostLimit =
1482			cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1483
1484		ret = rv770_copy_bytes_to_smc(rdev,
1485					      (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1486						    offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1487					      (u8 *)(&smc_table->dpm2Params.TDPLimit),
1488					      sizeof(u32) * 4, pi->sram_end);
1489		if (ret)
1490			return ret;
1491	}
1492
1493	return 0;
1494}
1495
1496static int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1497				       u32 arb_freq_src, u32 arb_freq_dest)
1498{
1499	u32 mc_arb_dram_timing;
1500	u32 mc_arb_dram_timing2;
1501	u32 burst_time;
1502	u32 mc_cg_config;
1503
1504	switch (arb_freq_src) {
1505        case MC_CG_ARB_FREQ_F0:
1506		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1507		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1508		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1509		break;
1510        case MC_CG_ARB_FREQ_F1:
1511		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1512		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1513		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1514		break;
1515        case MC_CG_ARB_FREQ_F2:
1516		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1517		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1518		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1519		break;
1520        case MC_CG_ARB_FREQ_F3:
1521		mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1522		mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1523		burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1524		break;
1525        default:
1526		return -EINVAL;
1527	}
1528
1529	switch (arb_freq_dest) {
1530        case MC_CG_ARB_FREQ_F0:
1531		WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1532		WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1533		WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1534		break;
1535        case MC_CG_ARB_FREQ_F1:
1536		WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1537		WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1538		WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1539		break;
1540        case MC_CG_ARB_FREQ_F2:
1541		WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1542		WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1543		WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1544		break;
1545        case MC_CG_ARB_FREQ_F3:
1546		WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1547		WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1548		WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1549		break;
1550	default:
1551		return -EINVAL;
1552	}
1553
1554	mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1555	WREG32(MC_CG_CONFIG, mc_cg_config);
1556	WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1557
1558	return 0;
1559}
1560
1561static int ni_init_arb_table_index(struct radeon_device *rdev)
1562{
1563	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1564	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1565	u32 tmp;
1566	int ret;
1567
1568	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1569					&tmp, pi->sram_end);
1570	if (ret)
1571		return ret;
1572
1573	tmp &= 0x00FFFFFF;
1574	tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1575
1576	return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1577					  tmp, pi->sram_end);
1578}
1579
1580static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1581{
1582	return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1583}
1584
1585static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1586{
1587	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1588	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1589	u32 tmp;
1590	int ret;
1591
1592	ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1593					&tmp, pi->sram_end);
1594	if (ret)
1595		return ret;
1596
1597	tmp = (tmp >> 24) & 0xff;
1598
1599	if (tmp == MC_CG_ARB_FREQ_F0)
1600		return 0;
1601
1602	return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1603}
1604
1605static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1606						struct rv7xx_pl *pl,
1607						SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1608{
1609	u32 dram_timing;
1610	u32 dram_timing2;
1611
1612	arb_regs->mc_arb_rfsh_rate =
1613		(u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1614
1615
1616	radeon_atom_set_engine_dram_timings(rdev,
1617                                            pl->sclk,
1618                                            pl->mclk);
1619
1620	dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1621	dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1622
1623	arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1624	arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1625
1626	return 0;
1627}
1628
1629static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1630						  struct radeon_ps *radeon_state,
1631						  unsigned int first_arb_set)
1632{
1633	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1634	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1635	struct ni_ps *state = ni_get_ps(radeon_state);
1636	SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1637	int i, ret = 0;
1638
1639	for (i = 0; i < state->performance_level_count; i++) {
1640		ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1641		if (ret)
1642			break;
1643
1644		ret = rv770_copy_bytes_to_smc(rdev,
1645					      (u16)(ni_pi->arb_table_start +
1646						    offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1647						    sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1648					      (u8 *)&arb_regs,
1649					      (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1650					      pi->sram_end);
1651		if (ret)
1652			break;
1653	}
1654	return ret;
1655}
1656
1657static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1658					       struct radeon_ps *radeon_new_state)
1659{
1660	return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1661						      NISLANDS_DRIVER_STATE_ARB_INDEX);
1662}
1663
1664static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1665					   struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1666{
1667	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1668
1669	voltage->index = eg_pi->mvdd_high_index;
1670	voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1671}
1672
1673static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1674					 struct radeon_ps *radeon_initial_state,
1675					 NISLANDS_SMC_STATETABLE *table)
1676{
1677	struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1678	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1679	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1680	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1681	u32 reg;
1682	int ret;
1683
1684	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1685		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1686	table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1687		cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1688	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1689		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1690	table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1691		cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1692	table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1693		cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1694	table->initialState.levels[0].mclk.vDLL_CNTL =
1695		cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1696	table->initialState.levels[0].mclk.vMPLL_SS =
1697		cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1698	table->initialState.levels[0].mclk.vMPLL_SS2 =
1699		cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1700	table->initialState.levels[0].mclk.mclk_value =
1701		cpu_to_be32(initial_state->performance_levels[0].mclk);
1702
1703	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1704		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1705	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1706		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1707	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1708		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1709	table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1710		cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1711	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1712		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1713	table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1714		cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1715	table->initialState.levels[0].sclk.sclk_value =
1716		cpu_to_be32(initial_state->performance_levels[0].sclk);
1717	table->initialState.levels[0].arbRefreshState =
1718		NISLANDS_INITIAL_STATE_ARB_INDEX;
1719
1720	table->initialState.levels[0].ACIndex = 0;
1721
1722	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1723					initial_state->performance_levels[0].vddc,
1724					&table->initialState.levels[0].vddc);
1725	if (!ret) {
1726		u16 std_vddc;
1727
1728		ret = ni_get_std_voltage_value(rdev,
1729					       &table->initialState.levels[0].vddc,
1730					       &std_vddc);
1731		if (!ret)
1732			ni_populate_std_voltage_value(rdev, std_vddc,
1733						      table->initialState.levels[0].vddc.index,
1734						      &table->initialState.levels[0].std_vddc);
1735	}
1736
1737	if (eg_pi->vddci_control)
1738		ni_populate_voltage_value(rdev,
1739					  &eg_pi->vddci_voltage_table,
1740					  initial_state->performance_levels[0].vddci,
1741					  &table->initialState.levels[0].vddci);
1742
1743	ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1744
1745	reg = CG_R(0xffff) | CG_L(0);
1746	table->initialState.levels[0].aT = cpu_to_be32(reg);
1747
1748	table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1749
1750	if (pi->boot_in_gen2)
1751		table->initialState.levels[0].gen2PCIE = 1;
1752	else
1753		table->initialState.levels[0].gen2PCIE = 0;
1754
1755	if (pi->mem_gddr5) {
1756		table->initialState.levels[0].strobeMode =
1757			cypress_get_strobe_mode_settings(rdev,
1758							 initial_state->performance_levels[0].mclk);
1759
1760		if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1761			table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1762		else
1763			table->initialState.levels[0].mcFlags =  0;
1764	}
1765
1766	table->initialState.levelCount = 1;
1767
1768	table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1769
1770	table->initialState.levels[0].dpm2.MaxPS = 0;
1771	table->initialState.levels[0].dpm2.NearTDPDec = 0;
1772	table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1773	table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1774
1775	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1776	table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1777
1778	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1779	table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1780
1781	return 0;
1782}
1783
1784static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1785				      NISLANDS_SMC_STATETABLE *table)
1786{
1787	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1788	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1789	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1790	u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1791	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1792	u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1793	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1794	u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1795	u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1796	u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1797	u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1798	u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1799	u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1800	u32 reg;
1801	int ret;
1802
1803	table->ACPIState = table->initialState;
1804
1805	table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1806
1807	if (pi->acpi_vddc) {
1808		ret = ni_populate_voltage_value(rdev,
1809						&eg_pi->vddc_voltage_table,
1810						pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1811		if (!ret) {
1812			u16 std_vddc;
1813
1814			ret = ni_get_std_voltage_value(rdev,
1815						       &table->ACPIState.levels[0].vddc, &std_vddc);
1816			if (!ret)
1817				ni_populate_std_voltage_value(rdev, std_vddc,
1818							      table->ACPIState.levels[0].vddc.index,
1819							      &table->ACPIState.levels[0].std_vddc);
1820		}
1821
1822		if (pi->pcie_gen2) {
1823			if (pi->acpi_pcie_gen2)
1824				table->ACPIState.levels[0].gen2PCIE = 1;
1825			else
1826				table->ACPIState.levels[0].gen2PCIE = 0;
1827		} else {
1828			table->ACPIState.levels[0].gen2PCIE = 0;
1829		}
1830	} else {
1831		ret = ni_populate_voltage_value(rdev,
1832						&eg_pi->vddc_voltage_table,
1833						pi->min_vddc_in_table,
1834						&table->ACPIState.levels[0].vddc);
1835		if (!ret) {
1836			u16 std_vddc;
1837
1838			ret = ni_get_std_voltage_value(rdev,
1839						       &table->ACPIState.levels[0].vddc,
1840						       &std_vddc);
1841			if (!ret)
1842				ni_populate_std_voltage_value(rdev, std_vddc,
1843							      table->ACPIState.levels[0].vddc.index,
1844							      &table->ACPIState.levels[0].std_vddc);
1845		}
1846		table->ACPIState.levels[0].gen2PCIE = 0;
1847	}
1848
1849	if (eg_pi->acpi_vddci) {
1850		if (eg_pi->vddci_control)
1851			ni_populate_voltage_value(rdev,
1852						  &eg_pi->vddci_voltage_table,
1853						  eg_pi->acpi_vddci,
1854						  &table->ACPIState.levels[0].vddci);
1855	}
1856
1857
1858	mpll_ad_func_cntl &= ~PDNB;
1859
1860	mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1861
1862        if (pi->mem_gddr5)
1863                mpll_dq_func_cntl &= ~PDNB;
1864        mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1865
1866
1867	mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1868			     MRDCKA1_RESET |
1869			     MRDCKB0_RESET |
1870			     MRDCKB1_RESET |
1871			     MRDCKC0_RESET |
1872			     MRDCKC1_RESET |
1873			     MRDCKD0_RESET |
1874			     MRDCKD1_RESET);
1875
1876	mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1877			      MRDCKA1_PDNB |
1878			      MRDCKB0_PDNB |
1879			      MRDCKB1_PDNB |
1880			      MRDCKC0_PDNB |
1881			      MRDCKC1_PDNB |
1882			      MRDCKD0_PDNB |
1883			      MRDCKD1_PDNB);
1884
1885	dll_cntl |= (MRDCKA0_BYPASS |
1886                     MRDCKA1_BYPASS |
1887                     MRDCKB0_BYPASS |
1888                     MRDCKB1_BYPASS |
1889                     MRDCKC0_BYPASS |
1890                     MRDCKC1_BYPASS |
1891                     MRDCKD0_BYPASS |
1892                     MRDCKD1_BYPASS);
1893
1894        spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1895	spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1896
1897	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1898	table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1899	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1900	table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1901	table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1902	table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1903
1904	table->ACPIState.levels[0].mclk.mclk_value = 0;
1905
1906	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1907	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1908	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1909	table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1910
1911	table->ACPIState.levels[0].sclk.sclk_value = 0;
1912
1913	ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1914
1915	if (eg_pi->dynamic_ac_timing)
1916		table->ACPIState.levels[0].ACIndex = 1;
1917
1918	table->ACPIState.levels[0].dpm2.MaxPS = 0;
1919	table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1920	table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1921	table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1922
1923	reg = MIN_POWER_MASK | MAX_POWER_MASK;
1924	table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1925
1926	reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1927	table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1928
1929	return 0;
1930}
1931
1932static int ni_init_smc_table(struct radeon_device *rdev)
1933{
1934	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1935	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1936	int ret;
1937	struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1938	NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1939
1940	memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1941
1942	ni_populate_smc_voltage_tables(rdev, table);
1943
1944	switch (rdev->pm.int_thermal_type) {
1945	case THERMAL_TYPE_NI:
1946	case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1947		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1948		break;
1949	case THERMAL_TYPE_NONE:
1950		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1951		break;
1952	default:
1953		table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1954		break;
1955	}
1956
1957	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1958		table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1959
1960	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1961		table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1962
1963	if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1964		table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1965
1966	if (pi->mem_gddr5)
1967		table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1968
1969	ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1970	if (ret)
1971		return ret;
1972
1973	ret = ni_populate_smc_acpi_state(rdev, table);
1974	if (ret)
1975		return ret;
1976
1977	table->driverState = table->initialState;
1978
1979	table->ULVState = table->initialState;
1980
1981	ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1982						     NISLANDS_INITIAL_STATE_ARB_INDEX);
1983	if (ret)
1984		return ret;
1985
1986	return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
1987				       sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
1988}
1989
1990static int ni_calculate_sclk_params(struct radeon_device *rdev,
1991				    u32 engine_clock,
1992				    NISLANDS_SMC_SCLK_VALUE *sclk)
1993{
1994	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1995	struct ni_power_info *ni_pi = ni_get_pi(rdev);
1996	struct atom_clock_dividers dividers;
1997	u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
1998	u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
1999	u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2000	u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2001	u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2002	u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2003	u64 tmp;
2004	u32 reference_clock = rdev->clock.spll.reference_freq;
2005	u32 reference_divider;
2006	u32 fbdiv;
2007	int ret;
2008
2009	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2010					     engine_clock, false, &dividers);
2011	if (ret)
2012		return ret;
2013
2014	reference_divider = 1 + dividers.ref_div;
2015
2016
2017	tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2018	do_div(tmp, reference_clock);
2019	fbdiv = (u32) tmp;
2020
2021	spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2022	spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2023	spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2024
2025	spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2026	spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2027
2028	spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2029	spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2030	spll_func_cntl_3 |= SPLL_DITHEN;
2031
2032	if (pi->sclk_ss) {
2033		struct radeon_atom_ss ss;
2034		u32 vco_freq = engine_clock * dividers.post_div;
2035
2036		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2037						     ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2038			u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2039			u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2040
2041			cg_spll_spread_spectrum &= ~CLK_S_MASK;
2042			cg_spll_spread_spectrum |= CLK_S(clk_s);
2043			cg_spll_spread_spectrum |= SSEN;
2044
2045			cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2046			cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2047		}
2048	}
2049
2050	sclk->sclk_value = engine_clock;
2051	sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2052	sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2053	sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2054	sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2055	sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2056	sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2057
2058	return 0;
2059}
2060
2061static int ni_populate_sclk_value(struct radeon_device *rdev,
2062				  u32 engine_clock,
2063				  NISLANDS_SMC_SCLK_VALUE *sclk)
2064{
2065	NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2066	int ret;
2067
2068	ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2069	if (!ret) {
2070		sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2071		sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2072		sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2073		sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2074		sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2075		sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2076		sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2077	}
2078
2079	return ret;
2080}
2081
2082static int ni_init_smc_spll_table(struct radeon_device *rdev)
2083{
2084        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2085	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2086	SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2087	NISLANDS_SMC_SCLK_VALUE sclk_params;
2088	u32 fb_div;
2089	u32 p_div;
2090	u32 clk_s;
2091	u32 clk_v;
2092	u32 sclk = 0;
2093	int i, ret;
2094	u32 tmp;
2095
2096	if (ni_pi->spll_table_start == 0)
2097		return -EINVAL;
2098
2099	spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2100	if (spll_table == NULL)
2101		return -ENOMEM;
2102
2103	for (i = 0; i < 256; i++) {
2104		ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2105		if (ret)
2106			break;
2107
2108		p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2109		fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2110		clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2111		clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2112
2113		fb_div &= ~0x00001FFF;
2114		fb_div >>= 1;
2115		clk_v >>= 6;
2116
2117		if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2118			ret = -EINVAL;
2119
2120		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2121			ret = -EINVAL;
2122
2123		if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2124			ret = -EINVAL;
2125
2126		if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2127			ret = -EINVAL;
2128
2129		if (ret)
2130			break;
2131
2132		tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2133			((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2134		spll_table->freq[i] = cpu_to_be32(tmp);
2135
2136		tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2137			((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2138		spll_table->ss[i] = cpu_to_be32(tmp);
2139
2140		sclk += 512;
2141	}
2142
2143	if (!ret)
2144		ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2145					      sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2146
2147	kfree(spll_table);
2148
2149	return ret;
2150}
2151
2152static int ni_populate_mclk_value(struct radeon_device *rdev,
2153				  u32 engine_clock,
2154				  u32 memory_clock,
2155				  NISLANDS_SMC_MCLK_VALUE *mclk,
2156				  bool strobe_mode,
2157				  bool dll_state_on)
2158{
2159	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2160	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2161	u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2162	u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2163	u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2164	u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2165	u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2166	u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2167	u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2168	u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2169	struct atom_clock_dividers dividers;
2170	u32 ibias;
2171	u32 dll_speed;
2172	int ret;
2173	u32 mc_seq_misc7;
2174
2175	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2176					     memory_clock, strobe_mode, &dividers);
2177	if (ret)
2178		return ret;
2179
2180	if (!strobe_mode) {
2181		mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2182
2183		if (mc_seq_misc7 & 0x8000000)
2184			dividers.post_div = 1;
2185	}
2186
2187	ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2188
2189	mpll_ad_func_cntl &= ~(CLKR_MASK |
2190			       YCLK_POST_DIV_MASK |
2191			       CLKF_MASK |
2192			       CLKFRAC_MASK |
2193			       IBIAS_MASK);
2194	mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2195	mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2196	mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2197	mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2198	mpll_ad_func_cntl |= IBIAS(ibias);
2199
2200	if (dividers.vco_mode)
2201		mpll_ad_func_cntl_2 |= VCO_MODE;
2202	else
2203		mpll_ad_func_cntl_2 &= ~VCO_MODE;
2204
2205	if (pi->mem_gddr5) {
2206		mpll_dq_func_cntl &= ~(CLKR_MASK |
2207				       YCLK_POST_DIV_MASK |
2208				       CLKF_MASK |
2209				       CLKFRAC_MASK |
2210				       IBIAS_MASK);
2211		mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2212		mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2213		mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2214		mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2215		mpll_dq_func_cntl |= IBIAS(ibias);
2216
2217		if (strobe_mode)
2218			mpll_dq_func_cntl &= ~PDNB;
2219		else
2220			mpll_dq_func_cntl |= PDNB;
2221
2222		if (dividers.vco_mode)
2223			mpll_dq_func_cntl_2 |= VCO_MODE;
2224		else
2225			mpll_dq_func_cntl_2 &= ~VCO_MODE;
2226	}
2227
2228	if (pi->mclk_ss) {
2229		struct radeon_atom_ss ss;
2230		u32 vco_freq = memory_clock * dividers.post_div;
2231
2232		if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2233						     ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2234			u32 reference_clock = rdev->clock.mpll.reference_freq;
2235			u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2236			u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2237			u32 clk_v = ss.percentage *
2238				(0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2239
2240			mpll_ss1 &= ~CLKV_MASK;
2241			mpll_ss1 |= CLKV(clk_v);
2242
2243			mpll_ss2 &= ~CLKS_MASK;
2244			mpll_ss2 |= CLKS(clk_s);
2245		}
2246	}
2247
2248	dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2249					memory_clock);
2250
2251	mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2252	mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2253	if (dll_state_on)
2254		mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2255				     MRDCKA1_PDNB |
2256				     MRDCKB0_PDNB |
2257				     MRDCKB1_PDNB |
2258				     MRDCKC0_PDNB |
2259				     MRDCKC1_PDNB |
2260				     MRDCKD0_PDNB |
2261				     MRDCKD1_PDNB);
2262	else
2263		mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2264				      MRDCKA1_PDNB |
2265				      MRDCKB0_PDNB |
2266				      MRDCKB1_PDNB |
2267				      MRDCKC0_PDNB |
2268				      MRDCKC1_PDNB |
2269				      MRDCKD0_PDNB |
2270				      MRDCKD1_PDNB);
2271
2272
2273	mclk->mclk_value = cpu_to_be32(memory_clock);
2274	mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2275	mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2276	mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2277	mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2278	mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2279	mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2280	mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2281	mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2282
2283	return 0;
2284}
2285
2286static void ni_populate_smc_sp(struct radeon_device *rdev,
2287			       struct radeon_ps *radeon_state,
2288			       NISLANDS_SMC_SWSTATE *smc_state)
2289{
2290	struct ni_ps *ps = ni_get_ps(radeon_state);
2291	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2292	int i;
2293
2294	for (i = 0; i < ps->performance_level_count - 1; i++)
2295		smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2296
2297	smc_state->levels[ps->performance_level_count - 1].bSP =
2298		cpu_to_be32(pi->psp);
2299}
2300
2301static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2302					 struct rv7xx_pl *pl,
2303					 NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2304{
2305	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2306        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2307        struct ni_power_info *ni_pi = ni_get_pi(rdev);
2308	int ret;
2309	bool dll_state_on;
2310	u16 std_vddc;
2311	u32 tmp = RREG32(DC_STUTTER_CNTL);
2312
2313	level->gen2PCIE = pi->pcie_gen2 ?
2314		((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2315
2316	ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2317	if (ret)
2318		return ret;
2319
2320	level->mcFlags =  0;
2321	if (pi->mclk_stutter_mode_threshold &&
2322	    (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2323	    !eg_pi->uvd_enabled &&
2324	    (tmp & DC_STUTTER_ENABLE_A) &&
2325	    (tmp & DC_STUTTER_ENABLE_B))
2326		level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2327
2328	if (pi->mem_gddr5) {
2329		if (pl->mclk > pi->mclk_edc_enable_threshold)
2330			level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2331		if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2332			level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2333
2334		level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2335
2336		if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2337			if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2338			    ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2339				dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2340			else
2341				dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2342		} else {
2343			dll_state_on = false;
2344			if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2345				level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2346		}
2347
2348		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2349					     &level->mclk,
2350					     (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2351					     dll_state_on);
2352	} else
2353		ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2354
2355	if (ret)
2356		return ret;
2357
2358	ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2359					pl->vddc, &level->vddc);
2360	if (ret)
2361		return ret;
2362
2363	ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2364	if (ret)
2365		return ret;
2366
2367	ni_populate_std_voltage_value(rdev, std_vddc,
2368				      level->vddc.index, &level->std_vddc);
2369
2370	if (eg_pi->vddci_control) {
2371		ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2372						pl->vddci, &level->vddci);
2373		if (ret)
2374			return ret;
2375	}
2376
2377	ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2378
2379	return ret;
2380}
2381
2382static int ni_populate_smc_t(struct radeon_device *rdev,
2383			     struct radeon_ps *radeon_state,
2384			     NISLANDS_SMC_SWSTATE *smc_state)
2385{
2386        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2387        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2388	struct ni_ps *state = ni_get_ps(radeon_state);
2389	u32 a_t;
2390	u32 t_l, t_h;
2391	u32 high_bsp;
2392	int i, ret;
2393
2394	if (state->performance_level_count >= 9)
2395		return -EINVAL;
2396
2397	if (state->performance_level_count < 2) {
2398		a_t = CG_R(0xffff) | CG_L(0);
2399		smc_state->levels[0].aT = cpu_to_be32(a_t);
2400		return 0;
2401	}
2402
2403	smc_state->levels[0].aT = cpu_to_be32(0);
2404
2405	for (i = 0; i <= state->performance_level_count - 2; i++) {
2406		if (eg_pi->uvd_enabled)
2407			ret = r600_calculate_at(
2408				1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2409				100 * R600_AH_DFLT,
2410				state->performance_levels[i + 1].sclk,
2411				state->performance_levels[i].sclk,
2412				&t_l,
2413				&t_h);
2414		else
2415			ret = r600_calculate_at(
2416				1000 * (i + 1),
2417				100 * R600_AH_DFLT,
2418				state->performance_levels[i + 1].sclk,
2419				state->performance_levels[i].sclk,
2420				&t_l,
2421				&t_h);
2422
2423		if (ret) {
2424			t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2425			t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2426		}
2427
2428		a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2429		a_t |= CG_R(t_l * pi->bsp / 20000);
2430		smc_state->levels[i].aT = cpu_to_be32(a_t);
2431
2432		high_bsp = (i == state->performance_level_count - 2) ?
2433			pi->pbsp : pi->bsp;
2434
2435		a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2436		smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2437	}
2438
2439	return 0;
2440}
2441
2442static int ni_populate_power_containment_values(struct radeon_device *rdev,
2443						struct radeon_ps *radeon_state,
2444						NISLANDS_SMC_SWSTATE *smc_state)
2445{
2446        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2447        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2448	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2449	struct ni_ps *state = ni_get_ps(radeon_state);
2450	u32 prev_sclk;
2451	u32 max_sclk;
2452	u32 min_sclk;
2453	int i, ret;
2454	u32 tdp_limit;
2455	u32 near_tdp_limit;
2456	u32 power_boost_limit;
2457	u8 max_ps_percent;
2458
2459	if (ni_pi->enable_power_containment == false)
2460		return 0;
2461
2462	if (state->performance_level_count == 0)
2463		return -EINVAL;
2464
2465	if (smc_state->levelCount != state->performance_level_count)
2466		return -EINVAL;
2467
2468	ret = ni_calculate_adjusted_tdp_limits(rdev,
2469					       false, /* ??? */
2470					       rdev->pm.dpm.tdp_adjustment,
2471					       &tdp_limit,
2472					       &near_tdp_limit);
2473	if (ret)
2474		return ret;
2475
2476	power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2477
2478	ret = rv770_write_smc_sram_dword(rdev,
2479					 pi->state_table_start +
2480					 offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2481					 offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2482					 ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2483					 pi->sram_end);
2484	if (ret)
2485		power_boost_limit = 0;
2486
2487	smc_state->levels[0].dpm2.MaxPS = 0;
2488	smc_state->levels[0].dpm2.NearTDPDec = 0;
2489	smc_state->levels[0].dpm2.AboveSafeInc = 0;
2490	smc_state->levels[0].dpm2.BelowSafeInc = 0;
2491	smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2492
2493	for (i = 1; i < state->performance_level_count; i++) {
2494		prev_sclk = state->performance_levels[i-1].sclk;
2495		max_sclk  = state->performance_levels[i].sclk;
2496		max_ps_percent = (i != (state->performance_level_count - 1)) ?
2497			NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2498
2499		if (max_sclk < prev_sclk)
2500			return -EINVAL;
2501
2502		if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2503			min_sclk = max_sclk;
2504		else if (1 == i)
2505			min_sclk = prev_sclk;
2506		else
2507			min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2508
2509		if (min_sclk < state->performance_levels[0].sclk)
2510			min_sclk = state->performance_levels[0].sclk;
2511
2512		if (min_sclk == 0)
2513			return -EINVAL;
2514
2515		smc_state->levels[i].dpm2.MaxPS =
2516			(u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2517		smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2518		smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2519		smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2520		smc_state->levels[i].stateFlags |=
2521			((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2522			PPSMC_STATEFLAG_POWERBOOST : 0;
2523	}
2524
2525	return 0;
2526}
2527
2528static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2529					 struct radeon_ps *radeon_state,
2530					 NISLANDS_SMC_SWSTATE *smc_state)
2531{
2532	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2533	struct ni_ps *state = ni_get_ps(radeon_state);
2534	u32 sq_power_throttle;
2535	u32 sq_power_throttle2;
2536	bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2537	int i;
2538
2539	if (state->performance_level_count == 0)
2540		return -EINVAL;
2541
2542	if (smc_state->levelCount != state->performance_level_count)
2543		return -EINVAL;
2544
2545	if (rdev->pm.dpm.sq_ramping_threshold == 0)
2546		return -EINVAL;
2547
2548	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2549		enable_sq_ramping = false;
2550
2551	if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2552		enable_sq_ramping = false;
2553
2554	if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2555		enable_sq_ramping = false;
2556
2557	if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2558		enable_sq_ramping = false;
2559
2560	if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2561		enable_sq_ramping = false;
2562
2563	for (i = 0; i < state->performance_level_count; i++) {
2564		sq_power_throttle  = 0;
2565		sq_power_throttle2 = 0;
2566
2567		if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2568		    enable_sq_ramping) {
2569			sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2570			sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2571			sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2572			sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2573			sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2574		} else {
2575			sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2576			sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2577		}
2578
2579		smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2580		smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2581	}
2582
2583	return 0;
2584}
2585
2586static int ni_enable_power_containment(struct radeon_device *rdev,
2587				       struct radeon_ps *radeon_new_state,
2588				       bool enable)
2589{
2590        struct ni_power_info *ni_pi = ni_get_pi(rdev);
2591	PPSMC_Result smc_result;
2592	int ret = 0;
2593
2594	if (ni_pi->enable_power_containment) {
2595		if (enable) {
2596			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2597				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2598				if (smc_result != PPSMC_Result_OK) {
2599					ret = -EINVAL;
2600					ni_pi->pc_enabled = false;
2601				} else {
2602					ni_pi->pc_enabled = true;
2603				}
2604			}
2605		} else {
2606			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2607			if (smc_result != PPSMC_Result_OK)
2608				ret = -EINVAL;
2609			ni_pi->pc_enabled = false;
2610		}
2611	}
2612
2613	return ret;
2614}
2615
2616static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2617					 struct radeon_ps *radeon_state,
2618					 NISLANDS_SMC_SWSTATE *smc_state)
2619{
2620        struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2621	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2622	struct ni_ps *state = ni_get_ps(radeon_state);
2623	int i, ret;
2624	u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2625
2626	if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2627		smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2628
2629	smc_state->levelCount = 0;
2630
2631	if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2632		return -EINVAL;
2633
2634	for (i = 0; i < state->performance_level_count; i++) {
2635		ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2636						    &smc_state->levels[i]);
2637		smc_state->levels[i].arbRefreshState =
2638			(u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2639
2640		if (ret)
2641			return ret;
2642
2643		if (ni_pi->enable_power_containment)
2644			smc_state->levels[i].displayWatermark =
2645				(state->performance_levels[i].sclk < threshold) ?
2646				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2647		else
2648			smc_state->levels[i].displayWatermark = (i < 2) ?
2649				PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2650
2651		if (eg_pi->dynamic_ac_timing)
2652			smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2653		else
2654			smc_state->levels[i].ACIndex = 0;
2655
2656		smc_state->levelCount++;
2657	}
2658
2659	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2660				      cpu_to_be32(threshold / 512));
2661
2662	ni_populate_smc_sp(rdev, radeon_state, smc_state);
2663
2664	ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2665	if (ret)
2666		ni_pi->enable_power_containment = false;
2667
2668	ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2669	if (ret)
2670		ni_pi->enable_sq_ramping = false;
2671
2672	return ni_populate_smc_t(rdev, radeon_state, smc_state);
2673}
2674
2675static int ni_upload_sw_state(struct radeon_device *rdev,
2676			      struct radeon_ps *radeon_new_state)
2677{
2678	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2679	u16 address = pi->state_table_start +
2680		offsetof(NISLANDS_SMC_STATETABLE, driverState);
2681	u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2682		((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2683	int ret;
2684	NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2685
2686	if (smc_state == NULL)
2687		return -ENOMEM;
2688
2689	ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2690	if (ret)
2691		goto done;
2692
2693	ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2694
2695done:
2696	kfree(smc_state);
2697
2698	return ret;
2699}
2700
2701static int ni_set_mc_special_registers(struct radeon_device *rdev,
2702				       struct ni_mc_reg_table *table)
2703{
2704	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2705	u8 i, j, k;
2706	u32 temp_reg;
2707
2708	for (i = 0, j = table->last; i < table->last; i++) {
2709		switch (table->mc_reg_address[i].s1) {
2710		case MC_SEQ_MISC1 >> 2:
2711			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2712				return -EINVAL;
2713			temp_reg = RREG32(MC_PMG_CMD_EMRS);
2714			table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2715			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2716			for (k = 0; k < table->num_entries; k++)
2717				table->mc_reg_table_entry[k].mc_data[j] =
2718					((temp_reg & 0xffff0000)) |
2719					((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2720			j++;
2721			if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2722				return -EINVAL;
2723
2724			temp_reg = RREG32(MC_PMG_CMD_MRS);
2725			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2726			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2727			for(k = 0; k < table->num_entries; k++) {
2728				table->mc_reg_table_entry[k].mc_data[j] =
2729					(temp_reg & 0xffff0000) |
2730					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2731				if (!pi->mem_gddr5)
2732					table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2733			}
2734			j++;
2735			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2736				return -EINVAL;
2737			break;
2738		case MC_SEQ_RESERVE_M >> 2:
2739			temp_reg = RREG32(MC_PMG_CMD_MRS1);
2740			table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2741			table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2742			for (k = 0; k < table->num_entries; k++)
2743				table->mc_reg_table_entry[k].mc_data[j] =
2744					(temp_reg & 0xffff0000) |
2745					(table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2746			j++;
2747			if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2748				return -EINVAL;
2749			break;
2750		default:
2751			break;
2752		}
2753	}
2754
2755	table->last = j;
2756
2757	return 0;
2758}
2759
2760static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2761{
2762	bool result = true;
2763
2764	switch (in_reg) {
2765        case  MC_SEQ_RAS_TIMING >> 2:
2766		*out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2767		break;
2768        case MC_SEQ_CAS_TIMING >> 2:
2769		*out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2770		break;
2771        case MC_SEQ_MISC_TIMING >> 2:
2772		*out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2773		break;
2774        case MC_SEQ_MISC_TIMING2 >> 2:
2775		*out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2776		break;
2777        case MC_SEQ_RD_CTL_D0 >> 2:
2778		*out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2779		break;
2780        case MC_SEQ_RD_CTL_D1 >> 2:
2781		*out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2782		break;
2783        case MC_SEQ_WR_CTL_D0 >> 2:
2784		*out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2785		break;
2786        case MC_SEQ_WR_CTL_D1 >> 2:
2787		*out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2788		break;
2789        case MC_PMG_CMD_EMRS >> 2:
2790		*out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2791		break;
2792        case MC_PMG_CMD_MRS >> 2:
2793		*out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2794		break;
2795        case MC_PMG_CMD_MRS1 >> 2:
2796		*out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2797		break;
2798        case MC_SEQ_PMG_TIMING >> 2:
2799		*out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2800		break;
2801        case MC_PMG_CMD_MRS2 >> 2:
2802		*out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2803		break;
2804        default:
2805		result = false;
2806		break;
2807	}
2808
2809	return result;
2810}
2811
2812static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2813{
2814	u8 i, j;
2815
2816	for (i = 0; i < table->last; i++) {
2817		for (j = 1; j < table->num_entries; j++) {
2818			if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2819				table->valid_flag |= 1 << i;
2820				break;
2821			}
2822		}
2823	}
2824}
2825
2826static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2827{
2828	u32 i;
2829	u16 address;
2830
2831	for (i = 0; i < table->last; i++)
2832		table->mc_reg_address[i].s0 =
2833			ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2834			address : table->mc_reg_address[i].s1;
2835}
2836
2837static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2838				      struct ni_mc_reg_table *ni_table)
2839{
2840	u8 i, j;
2841
2842	if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2843		return -EINVAL;
2844	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2845		return -EINVAL;
2846
2847	for (i = 0; i < table->last; i++)
2848		ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2849	ni_table->last = table->last;
2850
2851	for (i = 0; i < table->num_entries; i++) {
2852		ni_table->mc_reg_table_entry[i].mclk_max =
2853			table->mc_reg_table_entry[i].mclk_max;
2854		for (j = 0; j < table->last; j++)
2855			ni_table->mc_reg_table_entry[i].mc_data[j] =
2856				table->mc_reg_table_entry[i].mc_data[j];
2857	}
2858	ni_table->num_entries = table->num_entries;
2859
2860	return 0;
2861}
2862
2863static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2864{
2865	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2866	int ret;
2867	struct atom_mc_reg_table *table;
2868	struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2869	u8 module_index = rv770_get_memory_module_index(rdev);
2870
2871        table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2872        if (!table)
2873                return -ENOMEM;
2874
2875	WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2876	WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2877	WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2878	WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2879	WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2880	WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2881	WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2882	WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2883	WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2884	WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2885	WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2886	WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2887	WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2888
2889	ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2890
2891        if (ret)
2892                goto init_mc_done;
2893
2894	ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2895
2896        if (ret)
2897                goto init_mc_done;
2898
2899	ni_set_s0_mc_reg_index(ni_table);
2900
2901	ret = ni_set_mc_special_registers(rdev, ni_table);
2902
2903        if (ret)
2904                goto init_mc_done;
2905
2906	ni_set_valid_flag(ni_table);
2907
2908init_mc_done:
2909        kfree(table);
2910
2911	return ret;
2912}
2913
2914static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2915					 SMC_NIslands_MCRegisters *mc_reg_table)
2916{
2917	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2918	u32 i, j;
2919
2920	for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2921		if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2922			if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2923				break;
2924			mc_reg_table->address[i].s0 =
2925				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2926			mc_reg_table->address[i].s1 =
2927				cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2928			i++;
2929		}
2930	}
2931	mc_reg_table->last = (u8)i;
2932}
2933
2934
2935static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2936				    SMC_NIslands_MCRegisterSet *data,
2937				    u32 num_entries, u32 valid_flag)
2938{
2939	u32 i, j;
2940
2941	for (i = 0, j = 0; j < num_entries; j++) {
2942		if (valid_flag & (1 << j)) {
2943			data->value[i] = cpu_to_be32(entry->mc_data[j]);
2944			i++;
2945		}
2946	}
2947}
2948
2949static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2950						 struct rv7xx_pl *pl,
2951						 SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2952{
2953	struct ni_power_info *ni_pi = ni_get_pi(rdev);
2954	u32 i = 0;
2955
2956	for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2957		if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2958			break;
2959	}
2960
2961	if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2962		--i;
2963
2964	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2965				mc_reg_table_data,
2966				ni_pi->mc_reg_table.last,
2967				ni_pi->mc_reg_table.valid_flag);
2968}
2969
2970static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2971					   struct radeon_ps *radeon_state,
2972					   SMC_NIslands_MCRegisters *mc_reg_table)
2973{
2974	struct ni_ps *state = ni_get_ps(radeon_state);
2975	int i;
2976
2977	for (i = 0; i < state->performance_level_count; i++) {
2978		ni_convert_mc_reg_table_entry_to_smc(rdev,
2979						     &state->performance_levels[i],
2980						     &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2981	}
2982}
2983
2984static int ni_populate_mc_reg_table(struct radeon_device *rdev,
2985				    struct radeon_ps *radeon_boot_state)
2986{
2987	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2988	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2989        struct ni_power_info *ni_pi = ni_get_pi(rdev);
2990	struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
2991	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
2992
2993	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
2994
2995	rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
2996
2997	ni_populate_mc_reg_addresses(rdev, mc_reg_table);
2998
2999	ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3000					     &mc_reg_table->data[0]);
3001
3002	ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3003				&mc_reg_table->data[1],
3004				ni_pi->mc_reg_table.last,
3005				ni_pi->mc_reg_table.valid_flag);
3006
3007	ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3008
3009	return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3010				       (u8 *)mc_reg_table,
3011				       sizeof(SMC_NIslands_MCRegisters),
3012				       pi->sram_end);
3013}
3014
3015static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3016				  struct radeon_ps *radeon_new_state)
3017{
3018	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3019	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3020        struct ni_power_info *ni_pi = ni_get_pi(rdev);
3021	struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3022	SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3023	u16 address;
3024
3025	memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3026
3027	ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3028
3029	address = eg_pi->mc_reg_table_start +
3030		(u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3031
3032	return rv770_copy_bytes_to_smc(rdev, address,
3033				       (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3034				       sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3035				       pi->sram_end);
3036}
3037
3038static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3039						   PP_NIslands_CACTABLES *cac_tables)
3040{
3041	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3042	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3043	u32 leakage = 0;
3044	unsigned int i, j, table_size;
3045	s32 t;
3046	u32 smc_leakage, max_leakage = 0;
3047	u32 scaling_factor;
3048
3049	table_size = eg_pi->vddc_voltage_table.count;
3050
3051	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3052		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3053
3054	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3055
3056	for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3057		for (j = 0; j < table_size; j++) {
3058			t = (1000 * ((i + 1) * 8));
3059
3060			if (t < ni_pi->cac_data.leakage_minimum_temperature)
3061				t = ni_pi->cac_data.leakage_minimum_temperature;
3062
3063			ni_calculate_leakage_for_v_and_t(rdev,
3064							 &ni_pi->cac_data.leakage_coefficients,
3065							 eg_pi->vddc_voltage_table.entries[j].value,
3066							 t,
3067							 ni_pi->cac_data.i_leakage,
3068							 &leakage);
3069
3070			smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3071			if (smc_leakage > max_leakage)
3072				max_leakage = smc_leakage;
3073
3074			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3075		}
3076	}
3077
3078	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3079		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3080			cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3081	}
3082	return 0;
3083}
3084
3085static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3086					    PP_NIslands_CACTABLES *cac_tables)
3087{
3088	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3089	struct radeon_cac_leakage_table *leakage_table =
3090		&rdev->pm.dpm.dyn_state.cac_leakage_table;
3091	u32 i, j, table_size;
3092	u32 smc_leakage, max_leakage = 0;
3093	u32 scaling_factor;
3094
3095	if (!leakage_table)
3096		return -EINVAL;
3097
3098	table_size = leakage_table->count;
3099
3100	if (eg_pi->vddc_voltage_table.count != table_size)
3101		table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3102			eg_pi->vddc_voltage_table.count : leakage_table->count;
3103
3104	if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3105		table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3106
3107	if (table_size == 0)
3108		return -EINVAL;
3109
3110	scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3111
3112	for (j = 0; j < table_size; j++) {
3113		smc_leakage = leakage_table->entries[j].leakage;
3114
3115		if (smc_leakage > max_leakage)
3116			max_leakage = smc_leakage;
3117
3118		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3119			cac_tables->cac_lkge_lut[i][j] =
3120				cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3121	}
3122
3123	for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3124		for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3125			cac_tables->cac_lkge_lut[i][j] =
3126				cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3127	}
3128	return 0;
3129}
3130
3131static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3132{
3133	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3134	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3135	PP_NIslands_CACTABLES *cac_tables = NULL;
3136	int i, ret;
3137        u32 reg;
3138
3139	if (ni_pi->enable_cac == false)
3140		return 0;
3141
3142	cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3143	if (!cac_tables)
3144		return -ENOMEM;
3145
3146	reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3147	reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3148		TID_UNIT(ni_pi->cac_weights->tid_unit));
3149	WREG32(CG_CAC_CTRL, reg);
3150
3151	for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3152		ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3153
3154	for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3155		cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3156
3157	ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3158	ni_pi->cac_data.pwr_const = 0;
3159	ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3160	ni_pi->cac_data.bif_cac_value = 0;
3161	ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3162	ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3163	ni_pi->cac_data.allow_ovrflw = 0;
3164	ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3165	ni_pi->cac_data.num_win_tdp = 0;
3166	ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3167
3168	if (ni_pi->driver_calculate_cac_leakage)
3169		ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3170	else
3171		ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3172
3173	if (ret)
3174		goto done_free;
3175
3176	cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3177	cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3178	cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3179	cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3180	cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3181	cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3182	cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3183	cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3184	cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3185
3186	ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3187				      sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3188
3189done_free:
3190	if (ret) {
3191		ni_pi->enable_cac = false;
3192		ni_pi->enable_power_containment = false;
3193	}
3194
3195	kfree(cac_tables);
3196
3197	return 0;
3198}
3199
3200static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3201{
3202	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3203	u32 reg;
3204
3205	if (!ni_pi->enable_cac ||
3206	    !ni_pi->cac_configuration_required)
3207		return 0;
3208
3209	if (ni_pi->cac_weights == NULL)
3210		return -EINVAL;
3211
3212	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3213						      WEIGHT_TCP_SIG1_MASK |
3214						      WEIGHT_TA_SIG_MASK);
3215	reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3216		WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3217		WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3218	WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3219
3220	reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3221						      WEIGHT_TCC_EN1_MASK |
3222						      WEIGHT_TCC_EN2_MASK);
3223	reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3224		WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3225		WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3226	WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3227
3228	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3229						      WEIGHT_CB_EN1_MASK |
3230						      WEIGHT_CB_EN2_MASK |
3231						      WEIGHT_CB_EN3_MASK);
3232	reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3233		WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3234		WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3235		WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3236	WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3237
3238	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3239						      WEIGHT_DB_SIG1_MASK |
3240						      WEIGHT_DB_SIG2_MASK |
3241						      WEIGHT_DB_SIG3_MASK);
3242	reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3243		WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3244		WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3245		WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3246	WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3247
3248	reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3249						      WEIGHT_SXM_SIG1_MASK |
3250						      WEIGHT_SXM_SIG2_MASK |
3251						      WEIGHT_SXS_SIG0_MASK |
3252						      WEIGHT_SXS_SIG1_MASK);
3253	reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3254		WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3255		WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3256		WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3257		WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3258	WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3259
3260	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3261						      WEIGHT_XBR_1_MASK |
3262						      WEIGHT_XBR_2_MASK |
3263						      WEIGHT_SPI_SIG0_MASK);
3264	reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3265		WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3266		WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3267		WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3268	WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3269
3270	reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3271						      WEIGHT_SPI_SIG2_MASK |
3272						      WEIGHT_SPI_SIG3_MASK |
3273						      WEIGHT_SPI_SIG4_MASK |
3274						      WEIGHT_SPI_SIG5_MASK);
3275	reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3276		WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3277		WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3278		WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3279		WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3280	WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3281
3282	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3283						      WEIGHT_LDS_SIG1_MASK |
3284						      WEIGHT_SC_MASK);
3285	reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3286		WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3287		WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3288	WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3289
3290	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3291						      WEIGHT_CP_MASK |
3292						      WEIGHT_PA_SIG0_MASK |
3293						      WEIGHT_PA_SIG1_MASK |
3294						      WEIGHT_VGT_SIG0_MASK);
3295	reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3296		WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3297		WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3298		WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3299		WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3300	WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3301
3302	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3303						      WEIGHT_VGT_SIG2_MASK |
3304						      WEIGHT_DC_SIG0_MASK |
3305						      WEIGHT_DC_SIG1_MASK |
3306						      WEIGHT_DC_SIG2_MASK);
3307	reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3308		WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3309		WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3310		WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3311		WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3312	WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3313
3314	reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3315						      WEIGHT_UVD_SIG0_MASK |
3316						      WEIGHT_UVD_SIG1_MASK |
3317						      WEIGHT_SPARE0_MASK |
3318						      WEIGHT_SPARE1_MASK);
3319	reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3320		WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3321		WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3322		WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3323		WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3324	WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3325
3326	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3327						      WEIGHT_SQ_VSP0_MASK);
3328	reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3329		WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3330	WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3331
3332	reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3333	reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3334	WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3335
3336	reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3337							OVR_VAL_SPARE_0_MASK |
3338							OVR_MODE_SPARE_1_MASK |
3339							OVR_VAL_SPARE_1_MASK);
3340	reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3341		OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3342		OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3343		OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3344	WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3345
3346	reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3347					   VSP0_MASK |
3348					   GPR_MASK);
3349	reg |= (VSP(ni_pi->cac_weights->vsp) |
3350		VSP0(ni_pi->cac_weights->vsp0) |
3351		GPR(ni_pi->cac_weights->gpr));
3352	WREG32(SQ_CAC_THRESHOLD, reg);
3353
3354	reg = (MCDW_WR_ENABLE |
3355	       MCDX_WR_ENABLE |
3356	       MCDY_WR_ENABLE |
3357	       MCDZ_WR_ENABLE |
3358	       INDEX(0x09D4));
3359	WREG32(MC_CG_CONFIG, reg);
3360
3361	reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3362	       WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3363	       ALLOW_OVERFLOW);
3364	WREG32(MC_CG_DATAPORT, reg);
3365
3366	return 0;
3367}
3368
3369static int ni_enable_smc_cac(struct radeon_device *rdev,
3370			     struct radeon_ps *radeon_new_state,
3371			     bool enable)
3372{
3373	struct ni_power_info *ni_pi = ni_get_pi(rdev);
3374	int ret = 0;
3375	PPSMC_Result smc_result;
3376
3377	if (ni_pi->enable_cac) {
3378		if (enable) {
3379			if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3380				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3381
3382				if (ni_pi->support_cac_long_term_average) {
3383					smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3384					if (PPSMC_Result_OK != smc_result)
3385						ni_pi->support_cac_long_term_average = false;
3386				}
3387
3388				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3389				if (PPSMC_Result_OK != smc_result)
3390					ret = -EINVAL;
3391
3392				ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3393			}
3394		} else if (ni_pi->cac_enabled) {
3395			smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3396
3397			ni_pi->cac_enabled = false;
3398
3399			if (ni_pi->support_cac_long_term_average) {
3400				smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3401				if (PPSMC_Result_OK != smc_result)
3402					ni_pi->support_cac_long_term_average = false;
3403			}
3404		}
3405	}
3406
3407	return ret;
3408}
3409
3410static int ni_pcie_performance_request(struct radeon_device *rdev,
3411				       u8 perf_req, bool advertise)
3412{
3413	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3414
3415#if defined(CONFIG_ACPI)
3416	if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3417            (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3418		if (eg_pi->pcie_performance_request_registered == false)
3419			radeon_acpi_pcie_notify_device_ready(rdev);
3420		eg_pi->pcie_performance_request_registered = true;
3421		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3422	} else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3423                   eg_pi->pcie_performance_request_registered) {
3424		eg_pi->pcie_performance_request_registered = false;
3425		return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3426	}
3427#endif
3428	return 0;
3429}
3430
3431static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3432{
3433	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3434	u32 tmp;
3435
3436        tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3437
3438        if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3439            (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3440                pi->pcie_gen2 = true;
3441        else
3442		pi->pcie_gen2 = false;
3443
3444	if (!pi->pcie_gen2)
3445		ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3446
3447	return 0;
3448}
3449
3450static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3451					    bool enable)
3452{
3453        struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3454        u32 tmp, bif;
3455
3456	tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3457
3458	if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3459	    (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3460		if (enable) {
3461			if (!pi->boot_in_gen2) {
3462				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3463				bif |= CG_CLIENT_REQ(0xd);
3464				WREG32(CG_BIF_REQ_AND_RSP, bif);
3465			}
3466			tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3467			tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3468			tmp |= LC_GEN2_EN_STRAP;
3469
3470			tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3471			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3472			udelay(10);
3473			tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3474			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3475		} else {
3476			if (!pi->boot_in_gen2) {
3477				bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3478				bif |= CG_CLIENT_REQ(0xd);
3479				WREG32(CG_BIF_REQ_AND_RSP, bif);
3480
3481				tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3482				tmp &= ~LC_GEN2_EN_STRAP;
3483			}
3484			WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3485		}
3486	}
3487}
3488
3489static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3490					bool enable)
3491{
3492	ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3493
3494	if (enable)
3495		WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3496	else
3497                WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3498}
3499
3500void ni_dpm_setup_asic(struct radeon_device *rdev)
3501{
3502	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3503
3504	ni_read_clock_registers(rdev);
3505	btc_read_arb_registers(rdev);
3506	rv770_get_memory_type(rdev);
3507	if (eg_pi->pcie_performance_request)
3508		ni_advertise_gen2_capability(rdev);
3509	rv770_get_pcie_gen2_status(rdev);
3510	rv770_enable_acpi_pm(rdev);
3511}
3512
3513static void ni_update_current_ps(struct radeon_device *rdev,
3514				 struct radeon_ps *rps)
3515{
3516	struct ni_ps *new_ps = ni_get_ps(rps);
3517	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3518        struct ni_power_info *ni_pi = ni_get_pi(rdev);
3519
3520	eg_pi->current_rps = *rps;
3521	ni_pi->current_ps = *new_ps;
3522	eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3523}
3524
3525static void ni_update_requested_ps(struct radeon_device *rdev,
3526				   struct radeon_ps *rps)
3527{
3528	struct ni_ps *new_ps = ni_get_ps(rps);
3529	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3530        struct ni_power_info *ni_pi = ni_get_pi(rdev);
3531
3532	eg_pi->requested_rps = *rps;
3533	ni_pi->requested_ps = *new_ps;
3534	eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3535}
3536
3537int ni_dpm_enable(struct radeon_device *rdev)
3538{
3539	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3540	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3541	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3542
3543	if (pi->gfx_clock_gating)
3544		ni_cg_clockgating_default(rdev);
3545        if (btc_dpm_enabled(rdev))
3546                return -EINVAL;
3547	if (pi->mg_clock_gating)
3548		ni_mg_clockgating_default(rdev);
3549	if (eg_pi->ls_clock_gating)
3550		ni_ls_clockgating_default(rdev);
3551	if (pi->voltage_control) {
3552		rv770_enable_voltage_control(rdev, true);
3553		cypress_construct_voltage_tables(rdev);
3554	}
3555	if (eg_pi->dynamic_ac_timing)
3556		ni_initialize_mc_reg_table(rdev);
3557	if (pi->dynamic_ss)
3558		cypress_enable_spread_spectrum(rdev, true);
3559	if (pi->thermal_protection)
3560		rv770_enable_thermal_protection(rdev, true);
3561	rv770_setup_bsp(rdev);
3562	rv770_program_git(rdev);
3563	rv770_program_tp(rdev);
3564	rv770_program_tpp(rdev);
3565	rv770_program_sstp(rdev);
3566	cypress_enable_display_gap(rdev);
3567	rv770_program_vc(rdev);
3568	if (pi->dynamic_pcie_gen2)
3569		ni_enable_dynamic_pcie_gen2(rdev, true);
3570	if (rv770_upload_firmware(rdev))
3571		return -EINVAL;
3572	ni_process_firmware_header(rdev);
3573	ni_initial_switch_from_arb_f0_to_f1(rdev);
3574	ni_init_smc_table(rdev);
3575	ni_init_smc_spll_table(rdev);
3576	ni_init_arb_table_index(rdev);
3577	if (eg_pi->dynamic_ac_timing)
3578		ni_populate_mc_reg_table(rdev, boot_ps);
3579	ni_initialize_smc_cac_tables(rdev);
3580	ni_initialize_hardware_cac_manager(rdev);
3581	ni_populate_smc_tdp_limits(rdev, boot_ps);
3582	ni_program_response_times(rdev);
3583	r7xx_start_smc(rdev);
3584	cypress_notify_smc_display_change(rdev, false);
3585	cypress_enable_sclk_control(rdev, true);
3586	if (eg_pi->memory_transition)
3587		cypress_enable_mclk_control(rdev, true);
3588	cypress_start_dpm(rdev);
3589	if (pi->gfx_clock_gating)
3590		ni_gfx_clockgating_enable(rdev, true);
3591	if (pi->mg_clock_gating)
3592		ni_mg_clockgating_enable(rdev, true);
3593	if (eg_pi->ls_clock_gating)
3594		ni_ls_clockgating_enable(rdev, true);
3595
3596	if (rdev->irq.installed &&
3597	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3598		PPSMC_Result result;
3599
3600		rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, 0xff * 1000);
3601		rdev->irq.dpm_thermal = true;
3602		radeon_irq_set(rdev);
3603		result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3604
3605		if (result != PPSMC_Result_OK)
3606			DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
3607	}
3608
3609	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3610
3611	ni_update_current_ps(rdev, boot_ps);
3612
3613	return 0;
3614}
3615
3616void ni_dpm_disable(struct radeon_device *rdev)
3617{
3618	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3619	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3620	struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3621
3622	if (!btc_dpm_enabled(rdev))
3623		return;
3624	rv770_clear_vc(rdev);
3625	if (pi->thermal_protection)
3626		rv770_enable_thermal_protection(rdev, false);
3627	ni_enable_power_containment(rdev, boot_ps, false);
3628	ni_enable_smc_cac(rdev, boot_ps, false);
3629	cypress_enable_spread_spectrum(rdev, false);
3630	rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3631	if (pi->dynamic_pcie_gen2)
3632		ni_enable_dynamic_pcie_gen2(rdev, false);
3633
3634	if (rdev->irq.installed &&
3635	    r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3636		rdev->irq.dpm_thermal = false;
3637		radeon_irq_set(rdev);
3638	}
3639
3640	if (pi->gfx_clock_gating)
3641		ni_gfx_clockgating_enable(rdev, false);
3642	if (pi->mg_clock_gating)
3643		ni_mg_clockgating_enable(rdev, false);
3644	if (eg_pi->ls_clock_gating)
3645		ni_ls_clockgating_enable(rdev, false);
3646	ni_stop_dpm(rdev);
3647	btc_reset_to_default(rdev);
3648	ni_stop_smc(rdev);
3649	ni_force_switch_to_arb_f0(rdev);
3650
3651	ni_update_current_ps(rdev, boot_ps);
3652}
3653
3654int ni_power_control_set_level(struct radeon_device *rdev)
3655{
3656	struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3657
3658	ni_restrict_performance_levels_before_switch(rdev);
3659	rv770_halt_smc(rdev);
3660	ni_populate_smc_tdp_limits(rdev, new_ps);
3661	rv770_resume_smc(rdev);
3662	rv770_set_sw_state(rdev);
3663
3664	return 0;
3665}
3666
3667int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3668{
3669	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3670	struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3671	struct radeon_ps *new_ps = &requested_ps;
3672
3673	ni_update_requested_ps(rdev, new_ps);
3674
3675	ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3676
3677	return 0;
3678}
3679
3680int ni_dpm_set_power_state(struct radeon_device *rdev)
3681{
3682	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3683	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3684	int ret;
3685
3686	ni_restrict_performance_levels_before_switch(rdev);
3687	ni_enable_power_containment(rdev, new_ps, false);
3688	ni_enable_smc_cac(rdev, new_ps, false);
3689	rv770_halt_smc(rdev);
3690	if (eg_pi->smu_uvd_hs)
3691		btc_notify_uvd_to_smc(rdev, new_ps);
3692	ni_upload_sw_state(rdev, new_ps);
3693	if (eg_pi->dynamic_ac_timing)
3694		ni_upload_mc_reg_table(rdev, new_ps);
3695	ret = ni_program_memory_timing_parameters(rdev, new_ps);
3696	if (ret)
3697		return ret;
3698	ni_populate_smc_tdp_limits(rdev, new_ps);
3699	rv770_resume_smc(rdev);
3700	rv770_set_sw_state(rdev);
3701	ni_enable_smc_cac(rdev, new_ps, true);
3702	ni_enable_power_containment(rdev, new_ps, true);
3703
3704#if 0
3705	/* XXX */
3706	ni_unrestrict_performance_levels_after_switch(rdev);
3707#endif
3708
3709	return 0;
3710}
3711
3712void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3713{
3714	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3715	struct radeon_ps *new_ps = &eg_pi->requested_rps;
3716
3717	ni_update_current_ps(rdev, new_ps);
3718}
3719
3720void ni_dpm_reset_asic(struct radeon_device *rdev)
3721{
3722	ni_restrict_performance_levels_before_switch(rdev);
3723	rv770_set_boot_state(rdev);
3724}
3725
3726union power_info {
3727	struct _ATOM_POWERPLAY_INFO info;
3728	struct _ATOM_POWERPLAY_INFO_V2 info_2;
3729	struct _ATOM_POWERPLAY_INFO_V3 info_3;
3730	struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3731	struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3732	struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3733};
3734
3735union pplib_clock_info {
3736	struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3737	struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3738	struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3739	struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3740};
3741
3742union pplib_power_state {
3743	struct _ATOM_PPLIB_STATE v1;
3744	struct _ATOM_PPLIB_STATE_V2 v2;
3745};
3746
3747static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3748					  struct radeon_ps *rps,
3749					  struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3750					  u8 table_rev)
3751{
3752	rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3753	rps->class = le16_to_cpu(non_clock_info->usClassification);
3754	rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3755
3756	if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3757		rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3758		rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3759	} else if (r600_is_uvd_state(rps->class, rps->class2)) {
3760		rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3761		rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3762	} else {
3763		rps->vclk = 0;
3764		rps->dclk = 0;
3765	}
3766
3767	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3768		rdev->pm.dpm.boot_ps = rps;
3769	if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3770		rdev->pm.dpm.uvd_ps = rps;
3771}
3772
3773static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3774				      struct radeon_ps *rps, int index,
3775				      union pplib_clock_info *clock_info)
3776{
3777	struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3778	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3779	struct ni_ps *ps = ni_get_ps(rps);
3780	u16 vddc;
3781	struct rv7xx_pl *pl = &ps->performance_levels[index];
3782
3783	ps->performance_level_count = index + 1;
3784
3785	pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3786	pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3787	pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3788	pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3789
3790	pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3791	pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3792	pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3793
3794	/* patch up vddc if necessary */
3795	if (pl->vddc == 0xff01) {
3796		if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
3797			pl->vddc = vddc;
3798	}
3799
3800	if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3801		pi->acpi_vddc = pl->vddc;
3802		eg_pi->acpi_vddci = pl->vddci;
3803		if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3804			pi->acpi_pcie_gen2 = true;
3805		else
3806			pi->acpi_pcie_gen2 = false;
3807	}
3808
3809	if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3810		eg_pi->ulv.supported = true;
3811		eg_pi->ulv.pl = pl;
3812	}
3813
3814	if (pi->min_vddc_in_table > pl->vddc)
3815		pi->min_vddc_in_table = pl->vddc;
3816
3817	if (pi->max_vddc_in_table < pl->vddc)
3818		pi->max_vddc_in_table = pl->vddc;
3819
3820	/* patch up boot state */
3821	if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3822		u16 vddc, vddci;
3823		radeon_atombios_get_default_voltages(rdev, &vddc, &vddci);
3824		pl->mclk = rdev->clock.default_mclk;
3825		pl->sclk = rdev->clock.default_sclk;
3826		pl->vddc = vddc;
3827		pl->vddci = vddci;
3828	}
3829
3830	if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3831	    ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
3832		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
3833		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
3834		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
3835		rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
3836	}
3837}
3838
3839static int ni_parse_power_table(struct radeon_device *rdev)
3840{
3841	struct radeon_mode_info *mode_info = &rdev->mode_info;
3842	struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
3843	union pplib_power_state *power_state;
3844	int i, j;
3845	union pplib_clock_info *clock_info;
3846	union power_info *power_info;
3847	int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
3848        u16 data_offset;
3849	u8 frev, crev;
3850	struct ni_ps *ps;
3851
3852	if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
3853				   &frev, &crev, &data_offset))
3854		return -EINVAL;
3855	power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
3856
3857	rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
3858				  power_info->pplib.ucNumStates, GFP_KERNEL);
3859	if (!rdev->pm.dpm.ps)
3860		return -ENOMEM;
3861	rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
3862	rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
3863	rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
3864
3865	for (i = 0; i < power_info->pplib.ucNumStates; i++) {
3866		power_state = (union pplib_power_state *)
3867			(mode_info->atom_context->bios + data_offset +
3868			 le16_to_cpu(power_info->pplib.usStateArrayOffset) +
3869			 i * power_info->pplib.ucStateEntrySize);
3870		non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
3871			(mode_info->atom_context->bios + data_offset +
3872			 le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
3873			 (power_state->v1.ucNonClockStateIndex *
3874			  power_info->pplib.ucNonClockSize));
3875		if (power_info->pplib.ucStateEntrySize - 1) {
3876			ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
3877			if (ps == NULL) {
3878				kfree(rdev->pm.dpm.ps);
3879				return -ENOMEM;
3880			}
3881			rdev->pm.dpm.ps[i].ps_priv = ps;
3882			ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
3883							 non_clock_info,
3884							 power_info->pplib.ucNonClockSize);
3885			for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
3886				clock_info = (union pplib_clock_info *)
3887					(mode_info->atom_context->bios + data_offset +
3888					 le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
3889					 (power_state->v1.ucClockStateIndices[j] *
3890					  power_info->pplib.ucClockInfoSize));
3891				ni_parse_pplib_clock_info(rdev,
3892							  &rdev->pm.dpm.ps[i], j,
3893							  clock_info);
3894			}
3895		}
3896	}
3897	rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
3898	return 0;
3899}
3900
3901int ni_dpm_init(struct radeon_device *rdev)
3902{
3903	struct rv7xx_power_info *pi;
3904	struct evergreen_power_info *eg_pi;
3905	struct ni_power_info *ni_pi;
3906	int index = GetIndexIntoMasterTable(DATA, ASIC_InternalSS_Info);
3907	u16 data_offset, size;
3908	u8 frev, crev;
3909	struct atom_clock_dividers dividers;
3910	int ret;
3911
3912	ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
3913	if (ni_pi == NULL)
3914		return -ENOMEM;
3915	rdev->pm.dpm.priv = ni_pi;
3916	eg_pi = &ni_pi->eg;
3917	pi = &eg_pi->rv7xx;
3918
3919	rv770_get_max_vddc(rdev);
3920
3921	eg_pi->ulv.supported = false;
3922	pi->acpi_vddc = 0;
3923	eg_pi->acpi_vddci = 0;
3924	pi->min_vddc_in_table = 0;
3925	pi->max_vddc_in_table = 0;
3926
3927	ret = ni_parse_power_table(rdev);
3928	if (ret)
3929		return ret;
3930	ret = r600_parse_extended_power_table(rdev);
3931	if (ret)
3932		return ret;
3933
3934	ni_patch_dependency_tables_based_on_leakage(rdev);
3935
3936	if (rdev->pm.dpm.voltage_response_time == 0)
3937		rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
3938	if (rdev->pm.dpm.backbias_response_time == 0)
3939		rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
3940
3941	ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
3942					     0, false, &dividers);
3943	if (ret)
3944		pi->ref_div = dividers.ref_div + 1;
3945	else
3946		pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
3947
3948	pi->rlp = RV770_RLP_DFLT;
3949	pi->rmp = RV770_RMP_DFLT;
3950	pi->lhp = RV770_LHP_DFLT;
3951	pi->lmp = RV770_LMP_DFLT;
3952
3953	eg_pi->ats[0].rlp = RV770_RLP_DFLT;
3954	eg_pi->ats[0].rmp = RV770_RMP_DFLT;
3955	eg_pi->ats[0].lhp = RV770_LHP_DFLT;
3956	eg_pi->ats[0].lmp = RV770_LMP_DFLT;
3957
3958	eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
3959	eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
3960	eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
3961	eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
3962
3963	eg_pi->smu_uvd_hs = true;
3964
3965	if (rdev->pdev->device == 0x6707) {
3966		pi->mclk_strobe_mode_threshold = 55000;
3967		pi->mclk_edc_enable_threshold = 55000;
3968		eg_pi->mclk_edc_wr_enable_threshold = 55000;
3969	} else {
3970		pi->mclk_strobe_mode_threshold = 40000;
3971		pi->mclk_edc_enable_threshold = 40000;
3972		eg_pi->mclk_edc_wr_enable_threshold = 40000;
3973	}
3974	ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
3975
3976	pi->voltage_control =
3977		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC);
3978
3979	pi->mvdd_control =
3980		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC);
3981
3982	eg_pi->vddci_control =
3983		radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI);
3984
3985	if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size,
3986                                   &frev, &crev, &data_offset)) {
3987		pi->sclk_ss = true;
3988		pi->mclk_ss = true;
3989		pi->dynamic_ss = true;
3990	} else {
3991		pi->sclk_ss = false;
3992		pi->mclk_ss = false;
3993		pi->dynamic_ss = true;
3994	}
3995
3996	pi->asi = RV770_ASI_DFLT;
3997	pi->pasi = CYPRESS_HASI_DFLT;
3998	pi->vrc = CYPRESS_VRC_DFLT;
3999
4000	pi->power_gating = false;
4001
4002	pi->gfx_clock_gating = true;
4003
4004	pi->mg_clock_gating = true;
4005	pi->mgcgtssm = true;
4006	eg_pi->ls_clock_gating = false;
4007	eg_pi->sclk_deep_sleep = false;
4008
4009	pi->dynamic_pcie_gen2 = true;
4010
4011	if (pi->gfx_clock_gating &&
4012	    (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE))
4013		pi->thermal_protection = true;
4014	else
4015		pi->thermal_protection = false;
4016
4017	pi->display_gap = true;
4018
4019	pi->dcodt = true;
4020
4021	pi->ulps = true;
4022
4023	eg_pi->dynamic_ac_timing = true;
4024	eg_pi->abm = true;
4025	eg_pi->mcls = true;
4026	eg_pi->light_sleep = true;
4027	eg_pi->memory_transition = true;
4028#if defined(CONFIG_ACPI)
4029	eg_pi->pcie_performance_request =
4030		radeon_acpi_is_pcie_performance_request_supported(rdev);
4031#else
4032	eg_pi->pcie_performance_request = false;
4033#endif
4034
4035	eg_pi->dll_default_on = false;
4036
4037	eg_pi->sclk_deep_sleep = false;
4038
4039	pi->mclk_stutter_mode_threshold = 0;
4040
4041	pi->sram_end = SMC_RAM_END;
4042
4043	rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4044	rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4045	rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4046	rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4047	rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4048	rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4049	rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4050	rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4051
4052	ni_pi->cac_data.leakage_coefficients.at = 516;
4053	ni_pi->cac_data.leakage_coefficients.bt = 18;
4054	ni_pi->cac_data.leakage_coefficients.av = 51;
4055	ni_pi->cac_data.leakage_coefficients.bv = 2957;
4056
4057	switch (rdev->pdev->device) {
4058	case 0x6700:
4059	case 0x6701:
4060	case 0x6702:
4061	case 0x6703:
4062	case 0x6718:
4063		ni_pi->cac_weights = &cac_weights_cayman_xt;
4064		break;
4065	case 0x6705:
4066	case 0x6719:
4067	case 0x671D:
4068	case 0x671C:
4069	default:
4070		ni_pi->cac_weights = &cac_weights_cayman_pro;
4071		break;
4072	case 0x6704:
4073	case 0x6706:
4074	case 0x6707:
4075	case 0x6708:
4076	case 0x6709:
4077		ni_pi->cac_weights = &cac_weights_cayman_le;
4078		break;
4079	}
4080
4081	if (ni_pi->cac_weights->enable_power_containment_by_default) {
4082		ni_pi->enable_power_containment = true;
4083		ni_pi->enable_cac = true;
4084		ni_pi->enable_sq_ramping = true;
4085	} else {
4086		ni_pi->enable_power_containment = false;
4087		ni_pi->enable_cac = false;
4088		ni_pi->enable_sq_ramping = false;
4089	}
4090
4091	ni_pi->driver_calculate_cac_leakage = false;
4092	ni_pi->cac_configuration_required = true;
4093
4094	if (ni_pi->cac_configuration_required) {
4095		ni_pi->support_cac_long_term_average = true;
4096		ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4097		ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4098	} else {
4099		ni_pi->support_cac_long_term_average = false;
4100		ni_pi->lta_window_size = 0;
4101		ni_pi->lts_truncate = 0;
4102	}
4103
4104	ni_pi->use_power_boost_limit = true;
4105
4106	return 0;
4107}
4108
4109void ni_dpm_fini(struct radeon_device *rdev)
4110{
4111	int i;
4112
4113	for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4114		kfree(rdev->pm.dpm.ps[i].ps_priv);
4115	}
4116	kfree(rdev->pm.dpm.ps);
4117	kfree(rdev->pm.dpm.priv);
4118	r600_free_extended_power_table(rdev);
4119}
4120
4121void ni_dpm_print_power_state(struct radeon_device *rdev,
4122			      struct radeon_ps *rps)
4123{
4124	struct ni_ps *ps = ni_get_ps(rps);
4125	struct rv7xx_pl *pl;
4126	int i;
4127
4128	r600_dpm_print_class_info(rps->class, rps->class2);
4129	r600_dpm_print_cap_info(rps->caps);
4130	printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4131	for (i = 0; i < ps->performance_level_count; i++) {
4132		pl = &ps->performance_levels[i];
4133		printk("\t\tpower level 0    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4134		       pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4135	}
4136	r600_dpm_print_ps_status(rdev, rps);
4137}
4138
4139u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4140{
4141	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4142	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4143
4144	if (low)
4145		return requested_state->performance_levels[0].sclk;
4146	else
4147		return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4148}
4149
4150u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4151{
4152	struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4153	struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4154
4155	if (low)
4156		return requested_state->performance_levels[0].mclk;
4157	else
4158		return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4159}
4160
4161