]> rtime.felk.cvut.cz Git - linux-imx.git/blob - drivers/gpu/drm/radeon/ni_dpm.c
drm/radeon/dpm: fix UVD clock setting on cayman
[linux-imx.git] / drivers / gpu / drm / radeon / ni_dpm.c
1 /*
2  * Copyright 2012 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23
24 #include "drmP.h"
25 #include "radeon.h"
26 #include "nid.h"
27 #include "r600_dpm.h"
28 #include "ni_dpm.h"
29 #include "atom.h"
30 #include <linux/math64.h>
31
32 #define MC_CG_ARB_FREQ_F0           0x0a
33 #define MC_CG_ARB_FREQ_F1           0x0b
34 #define MC_CG_ARB_FREQ_F2           0x0c
35 #define MC_CG_ARB_FREQ_F3           0x0d
36
37 #define SMC_RAM_END 0xC000
38
39 static const struct ni_cac_weights cac_weights_cayman_xt =
40 {
41         0x15,
42         0x2,
43         0x19,
44         0x2,
45         0x8,
46         0x14,
47         0x2,
48         0x16,
49         0xE,
50         0x17,
51         0x13,
52         0x2B,
53         0x10,
54         0x7,
55         0x5,
56         0x5,
57         0x5,
58         0x2,
59         0x3,
60         0x9,
61         0x10,
62         0x10,
63         0x2B,
64         0xA,
65         0x9,
66         0x4,
67         0xD,
68         0xD,
69         0x3E,
70         0x18,
71         0x14,
72         0,
73         0x3,
74         0x3,
75         0x5,
76         0,
77         0x2,
78         0,
79         0,
80         0,
81         0,
82         0,
83         0,
84         0,
85         0,
86         0,
87         0x1CC,
88         0,
89         0x164,
90         1,
91         1,
92         1,
93         1,
94         12,
95         12,
96         12,
97         0x12,
98         0x1F,
99         132,
100         5,
101         7,
102         0,
103         { 0, 0, 0, 0, 0, 0, 0, 0 },
104         { 0, 0, 0, 0 },
105         true
106 };
107
108 static const struct ni_cac_weights cac_weights_cayman_pro =
109 {
110         0x16,
111         0x4,
112         0x10,
113         0x2,
114         0xA,
115         0x16,
116         0x2,
117         0x18,
118         0x10,
119         0x1A,
120         0x16,
121         0x2D,
122         0x12,
123         0xA,
124         0x6,
125         0x6,
126         0x6,
127         0x2,
128         0x4,
129         0xB,
130         0x11,
131         0x11,
132         0x2D,
133         0xC,
134         0xC,
135         0x7,
136         0x10,
137         0x10,
138         0x3F,
139         0x1A,
140         0x16,
141         0,
142         0x7,
143         0x4,
144         0x6,
145         1,
146         0x2,
147         0x1,
148         0,
149         0,
150         0,
151         0,
152         0,
153         0,
154         0x30,
155         0,
156         0x1CF,
157         0,
158         0x166,
159         1,
160         1,
161         1,
162         1,
163         12,
164         12,
165         12,
166         0x15,
167         0x1F,
168         132,
169         6,
170         6,
171         0,
172         { 0, 0, 0, 0, 0, 0, 0, 0 },
173         { 0, 0, 0, 0 },
174         true
175 };
176
177 static const struct ni_cac_weights cac_weights_cayman_le =
178 {
179         0x7,
180         0xE,
181         0x1,
182         0xA,
183         0x1,
184         0x3F,
185         0x2,
186         0x18,
187         0x10,
188         0x1A,
189         0x1,
190         0x3F,
191         0x1,
192         0xE,
193         0x6,
194         0x6,
195         0x6,
196         0x2,
197         0x4,
198         0x9,
199         0x1A,
200         0x1A,
201         0x2C,
202         0xA,
203         0x11,
204         0x8,
205         0x19,
206         0x19,
207         0x1,
208         0x1,
209         0x1A,
210         0,
211         0x8,
212         0x5,
213         0x8,
214         0x1,
215         0x3,
216         0x1,
217         0,
218         0,
219         0,
220         0,
221         0,
222         0,
223         0x38,
224         0x38,
225         0x239,
226         0x3,
227         0x18A,
228         1,
229         1,
230         1,
231         1,
232         12,
233         12,
234         12,
235         0x15,
236         0x22,
237         132,
238         6,
239         6,
240         0,
241         { 0, 0, 0, 0, 0, 0, 0, 0 },
242         { 0, 0, 0, 0 },
243         true
244 };
245
246 #define NISLANDS_MGCG_SEQUENCE  300
247
248 static const u32 cayman_cgcg_cgls_default[] =
249 {
250         0x000008f8, 0x00000010, 0xffffffff,
251         0x000008fc, 0x00000000, 0xffffffff,
252         0x000008f8, 0x00000011, 0xffffffff,
253         0x000008fc, 0x00000000, 0xffffffff,
254         0x000008f8, 0x00000012, 0xffffffff,
255         0x000008fc, 0x00000000, 0xffffffff,
256         0x000008f8, 0x00000013, 0xffffffff,
257         0x000008fc, 0x00000000, 0xffffffff,
258         0x000008f8, 0x00000014, 0xffffffff,
259         0x000008fc, 0x00000000, 0xffffffff,
260         0x000008f8, 0x00000015, 0xffffffff,
261         0x000008fc, 0x00000000, 0xffffffff,
262         0x000008f8, 0x00000016, 0xffffffff,
263         0x000008fc, 0x00000000, 0xffffffff,
264         0x000008f8, 0x00000017, 0xffffffff,
265         0x000008fc, 0x00000000, 0xffffffff,
266         0x000008f8, 0x00000018, 0xffffffff,
267         0x000008fc, 0x00000000, 0xffffffff,
268         0x000008f8, 0x00000019, 0xffffffff,
269         0x000008fc, 0x00000000, 0xffffffff,
270         0x000008f8, 0x0000001a, 0xffffffff,
271         0x000008fc, 0x00000000, 0xffffffff,
272         0x000008f8, 0x0000001b, 0xffffffff,
273         0x000008fc, 0x00000000, 0xffffffff,
274         0x000008f8, 0x00000020, 0xffffffff,
275         0x000008fc, 0x00000000, 0xffffffff,
276         0x000008f8, 0x00000021, 0xffffffff,
277         0x000008fc, 0x00000000, 0xffffffff,
278         0x000008f8, 0x00000022, 0xffffffff,
279         0x000008fc, 0x00000000, 0xffffffff,
280         0x000008f8, 0x00000023, 0xffffffff,
281         0x000008fc, 0x00000000, 0xffffffff,
282         0x000008f8, 0x00000024, 0xffffffff,
283         0x000008fc, 0x00000000, 0xffffffff,
284         0x000008f8, 0x00000025, 0xffffffff,
285         0x000008fc, 0x00000000, 0xffffffff,
286         0x000008f8, 0x00000026, 0xffffffff,
287         0x000008fc, 0x00000000, 0xffffffff,
288         0x000008f8, 0x00000027, 0xffffffff,
289         0x000008fc, 0x00000000, 0xffffffff,
290         0x000008f8, 0x00000028, 0xffffffff,
291         0x000008fc, 0x00000000, 0xffffffff,
292         0x000008f8, 0x00000029, 0xffffffff,
293         0x000008fc, 0x00000000, 0xffffffff,
294         0x000008f8, 0x0000002a, 0xffffffff,
295         0x000008fc, 0x00000000, 0xffffffff,
296         0x000008f8, 0x0000002b, 0xffffffff,
297         0x000008fc, 0x00000000, 0xffffffff
298 };
299 #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
300
301 static const u32 cayman_cgcg_cgls_disable[] =
302 {
303         0x000008f8, 0x00000010, 0xffffffff,
304         0x000008fc, 0xffffffff, 0xffffffff,
305         0x000008f8, 0x00000011, 0xffffffff,
306         0x000008fc, 0xffffffff, 0xffffffff,
307         0x000008f8, 0x00000012, 0xffffffff,
308         0x000008fc, 0xffffffff, 0xffffffff,
309         0x000008f8, 0x00000013, 0xffffffff,
310         0x000008fc, 0xffffffff, 0xffffffff,
311         0x000008f8, 0x00000014, 0xffffffff,
312         0x000008fc, 0xffffffff, 0xffffffff,
313         0x000008f8, 0x00000015, 0xffffffff,
314         0x000008fc, 0xffffffff, 0xffffffff,
315         0x000008f8, 0x00000016, 0xffffffff,
316         0x000008fc, 0xffffffff, 0xffffffff,
317         0x000008f8, 0x00000017, 0xffffffff,
318         0x000008fc, 0xffffffff, 0xffffffff,
319         0x000008f8, 0x00000018, 0xffffffff,
320         0x000008fc, 0xffffffff, 0xffffffff,
321         0x000008f8, 0x00000019, 0xffffffff,
322         0x000008fc, 0xffffffff, 0xffffffff,
323         0x000008f8, 0x0000001a, 0xffffffff,
324         0x000008fc, 0xffffffff, 0xffffffff,
325         0x000008f8, 0x0000001b, 0xffffffff,
326         0x000008fc, 0xffffffff, 0xffffffff,
327         0x000008f8, 0x00000020, 0xffffffff,
328         0x000008fc, 0x00000000, 0xffffffff,
329         0x000008f8, 0x00000021, 0xffffffff,
330         0x000008fc, 0x00000000, 0xffffffff,
331         0x000008f8, 0x00000022, 0xffffffff,
332         0x000008fc, 0x00000000, 0xffffffff,
333         0x000008f8, 0x00000023, 0xffffffff,
334         0x000008fc, 0x00000000, 0xffffffff,
335         0x000008f8, 0x00000024, 0xffffffff,
336         0x000008fc, 0x00000000, 0xffffffff,
337         0x000008f8, 0x00000025, 0xffffffff,
338         0x000008fc, 0x00000000, 0xffffffff,
339         0x000008f8, 0x00000026, 0xffffffff,
340         0x000008fc, 0x00000000, 0xffffffff,
341         0x000008f8, 0x00000027, 0xffffffff,
342         0x000008fc, 0x00000000, 0xffffffff,
343         0x000008f8, 0x00000028, 0xffffffff,
344         0x000008fc, 0x00000000, 0xffffffff,
345         0x000008f8, 0x00000029, 0xffffffff,
346         0x000008fc, 0x00000000, 0xffffffff,
347         0x000008f8, 0x0000002a, 0xffffffff,
348         0x000008fc, 0x00000000, 0xffffffff,
349         0x000008f8, 0x0000002b, 0xffffffff,
350         0x000008fc, 0x00000000, 0xffffffff,
351         0x00000644, 0x000f7902, 0x001f4180,
352         0x00000644, 0x000f3802, 0x001f4180
353 };
354 #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
355
356 static const u32 cayman_cgcg_cgls_enable[] =
357 {
358         0x00000644, 0x000f7882, 0x001f4080,
359         0x000008f8, 0x00000010, 0xffffffff,
360         0x000008fc, 0x00000000, 0xffffffff,
361         0x000008f8, 0x00000011, 0xffffffff,
362         0x000008fc, 0x00000000, 0xffffffff,
363         0x000008f8, 0x00000012, 0xffffffff,
364         0x000008fc, 0x00000000, 0xffffffff,
365         0x000008f8, 0x00000013, 0xffffffff,
366         0x000008fc, 0x00000000, 0xffffffff,
367         0x000008f8, 0x00000014, 0xffffffff,
368         0x000008fc, 0x00000000, 0xffffffff,
369         0x000008f8, 0x00000015, 0xffffffff,
370         0x000008fc, 0x00000000, 0xffffffff,
371         0x000008f8, 0x00000016, 0xffffffff,
372         0x000008fc, 0x00000000, 0xffffffff,
373         0x000008f8, 0x00000017, 0xffffffff,
374         0x000008fc, 0x00000000, 0xffffffff,
375         0x000008f8, 0x00000018, 0xffffffff,
376         0x000008fc, 0x00000000, 0xffffffff,
377         0x000008f8, 0x00000019, 0xffffffff,
378         0x000008fc, 0x00000000, 0xffffffff,
379         0x000008f8, 0x0000001a, 0xffffffff,
380         0x000008fc, 0x00000000, 0xffffffff,
381         0x000008f8, 0x0000001b, 0xffffffff,
382         0x000008fc, 0x00000000, 0xffffffff,
383         0x000008f8, 0x00000020, 0xffffffff,
384         0x000008fc, 0xffffffff, 0xffffffff,
385         0x000008f8, 0x00000021, 0xffffffff,
386         0x000008fc, 0xffffffff, 0xffffffff,
387         0x000008f8, 0x00000022, 0xffffffff,
388         0x000008fc, 0xffffffff, 0xffffffff,
389         0x000008f8, 0x00000023, 0xffffffff,
390         0x000008fc, 0xffffffff, 0xffffffff,
391         0x000008f8, 0x00000024, 0xffffffff,
392         0x000008fc, 0xffffffff, 0xffffffff,
393         0x000008f8, 0x00000025, 0xffffffff,
394         0x000008fc, 0xffffffff, 0xffffffff,
395         0x000008f8, 0x00000026, 0xffffffff,
396         0x000008fc, 0xffffffff, 0xffffffff,
397         0x000008f8, 0x00000027, 0xffffffff,
398         0x000008fc, 0xffffffff, 0xffffffff,
399         0x000008f8, 0x00000028, 0xffffffff,
400         0x000008fc, 0xffffffff, 0xffffffff,
401         0x000008f8, 0x00000029, 0xffffffff,
402         0x000008fc, 0xffffffff, 0xffffffff,
403         0x000008f8, 0x0000002a, 0xffffffff,
404         0x000008fc, 0xffffffff, 0xffffffff,
405         0x000008f8, 0x0000002b, 0xffffffff,
406         0x000008fc, 0xffffffff, 0xffffffff
407 };
408 #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
409
410 static const u32 cayman_mgcg_default[] =
411 {
412         0x0000802c, 0xc0000000, 0xffffffff,
413         0x00003fc4, 0xc0000000, 0xffffffff,
414         0x00005448, 0x00000100, 0xffffffff,
415         0x000055e4, 0x00000100, 0xffffffff,
416         0x0000160c, 0x00000100, 0xffffffff,
417         0x00008984, 0x06000100, 0xffffffff,
418         0x0000c164, 0x00000100, 0xffffffff,
419         0x00008a18, 0x00000100, 0xffffffff,
420         0x0000897c, 0x06000100, 0xffffffff,
421         0x00008b28, 0x00000100, 0xffffffff,
422         0x00009144, 0x00800200, 0xffffffff,
423         0x00009a60, 0x00000100, 0xffffffff,
424         0x00009868, 0x00000100, 0xffffffff,
425         0x00008d58, 0x00000100, 0xffffffff,
426         0x00009510, 0x00000100, 0xffffffff,
427         0x0000949c, 0x00000100, 0xffffffff,
428         0x00009654, 0x00000100, 0xffffffff,
429         0x00009030, 0x00000100, 0xffffffff,
430         0x00009034, 0x00000100, 0xffffffff,
431         0x00009038, 0x00000100, 0xffffffff,
432         0x0000903c, 0x00000100, 0xffffffff,
433         0x00009040, 0x00000100, 0xffffffff,
434         0x0000a200, 0x00000100, 0xffffffff,
435         0x0000a204, 0x00000100, 0xffffffff,
436         0x0000a208, 0x00000100, 0xffffffff,
437         0x0000a20c, 0x00000100, 0xffffffff,
438         0x00009744, 0x00000100, 0xffffffff,
439         0x00003f80, 0x00000100, 0xffffffff,
440         0x0000a210, 0x00000100, 0xffffffff,
441         0x0000a214, 0x00000100, 0xffffffff,
442         0x000004d8, 0x00000100, 0xffffffff,
443         0x00009664, 0x00000100, 0xffffffff,
444         0x00009698, 0x00000100, 0xffffffff,
445         0x000004d4, 0x00000200, 0xffffffff,
446         0x000004d0, 0x00000000, 0xffffffff,
447         0x000030cc, 0x00000104, 0xffffffff,
448         0x0000d0c0, 0x00000100, 0xffffffff,
449         0x0000d8c0, 0x00000100, 0xffffffff,
450         0x0000802c, 0x40000000, 0xffffffff,
451         0x00003fc4, 0x40000000, 0xffffffff,
452         0x0000915c, 0x00010000, 0xffffffff,
453         0x00009160, 0x00030002, 0xffffffff,
454         0x00009164, 0x00050004, 0xffffffff,
455         0x00009168, 0x00070006, 0xffffffff,
456         0x00009178, 0x00070000, 0xffffffff,
457         0x0000917c, 0x00030002, 0xffffffff,
458         0x00009180, 0x00050004, 0xffffffff,
459         0x0000918c, 0x00010006, 0xffffffff,
460         0x00009190, 0x00090008, 0xffffffff,
461         0x00009194, 0x00070000, 0xffffffff,
462         0x00009198, 0x00030002, 0xffffffff,
463         0x0000919c, 0x00050004, 0xffffffff,
464         0x000091a8, 0x00010006, 0xffffffff,
465         0x000091ac, 0x00090008, 0xffffffff,
466         0x000091b0, 0x00070000, 0xffffffff,
467         0x000091b4, 0x00030002, 0xffffffff,
468         0x000091b8, 0x00050004, 0xffffffff,
469         0x000091c4, 0x00010006, 0xffffffff,
470         0x000091c8, 0x00090008, 0xffffffff,
471         0x000091cc, 0x00070000, 0xffffffff,
472         0x000091d0, 0x00030002, 0xffffffff,
473         0x000091d4, 0x00050004, 0xffffffff,
474         0x000091e0, 0x00010006, 0xffffffff,
475         0x000091e4, 0x00090008, 0xffffffff,
476         0x000091e8, 0x00000000, 0xffffffff,
477         0x000091ec, 0x00070000, 0xffffffff,
478         0x000091f0, 0x00030002, 0xffffffff,
479         0x000091f4, 0x00050004, 0xffffffff,
480         0x00009200, 0x00010006, 0xffffffff,
481         0x00009204, 0x00090008, 0xffffffff,
482         0x00009208, 0x00070000, 0xffffffff,
483         0x0000920c, 0x00030002, 0xffffffff,
484         0x00009210, 0x00050004, 0xffffffff,
485         0x0000921c, 0x00010006, 0xffffffff,
486         0x00009220, 0x00090008, 0xffffffff,
487         0x00009224, 0x00070000, 0xffffffff,
488         0x00009228, 0x00030002, 0xffffffff,
489         0x0000922c, 0x00050004, 0xffffffff,
490         0x00009238, 0x00010006, 0xffffffff,
491         0x0000923c, 0x00090008, 0xffffffff,
492         0x00009240, 0x00070000, 0xffffffff,
493         0x00009244, 0x00030002, 0xffffffff,
494         0x00009248, 0x00050004, 0xffffffff,
495         0x00009254, 0x00010006, 0xffffffff,
496         0x00009258, 0x00090008, 0xffffffff,
497         0x0000925c, 0x00070000, 0xffffffff,
498         0x00009260, 0x00030002, 0xffffffff,
499         0x00009264, 0x00050004, 0xffffffff,
500         0x00009270, 0x00010006, 0xffffffff,
501         0x00009274, 0x00090008, 0xffffffff,
502         0x00009278, 0x00070000, 0xffffffff,
503         0x0000927c, 0x00030002, 0xffffffff,
504         0x00009280, 0x00050004, 0xffffffff,
505         0x0000928c, 0x00010006, 0xffffffff,
506         0x00009290, 0x00090008, 0xffffffff,
507         0x000092a8, 0x00070000, 0xffffffff,
508         0x000092ac, 0x00030002, 0xffffffff,
509         0x000092b0, 0x00050004, 0xffffffff,
510         0x000092bc, 0x00010006, 0xffffffff,
511         0x000092c0, 0x00090008, 0xffffffff,
512         0x000092c4, 0x00070000, 0xffffffff,
513         0x000092c8, 0x00030002, 0xffffffff,
514         0x000092cc, 0x00050004, 0xffffffff,
515         0x000092d8, 0x00010006, 0xffffffff,
516         0x000092dc, 0x00090008, 0xffffffff,
517         0x00009294, 0x00000000, 0xffffffff,
518         0x0000802c, 0x40010000, 0xffffffff,
519         0x00003fc4, 0x40010000, 0xffffffff,
520         0x0000915c, 0x00010000, 0xffffffff,
521         0x00009160, 0x00030002, 0xffffffff,
522         0x00009164, 0x00050004, 0xffffffff,
523         0x00009168, 0x00070006, 0xffffffff,
524         0x00009178, 0x00070000, 0xffffffff,
525         0x0000917c, 0x00030002, 0xffffffff,
526         0x00009180, 0x00050004, 0xffffffff,
527         0x0000918c, 0x00010006, 0xffffffff,
528         0x00009190, 0x00090008, 0xffffffff,
529         0x00009194, 0x00070000, 0xffffffff,
530         0x00009198, 0x00030002, 0xffffffff,
531         0x0000919c, 0x00050004, 0xffffffff,
532         0x000091a8, 0x00010006, 0xffffffff,
533         0x000091ac, 0x00090008, 0xffffffff,
534         0x000091b0, 0x00070000, 0xffffffff,
535         0x000091b4, 0x00030002, 0xffffffff,
536         0x000091b8, 0x00050004, 0xffffffff,
537         0x000091c4, 0x00010006, 0xffffffff,
538         0x000091c8, 0x00090008, 0xffffffff,
539         0x000091cc, 0x00070000, 0xffffffff,
540         0x000091d0, 0x00030002, 0xffffffff,
541         0x000091d4, 0x00050004, 0xffffffff,
542         0x000091e0, 0x00010006, 0xffffffff,
543         0x000091e4, 0x00090008, 0xffffffff,
544         0x000091e8, 0x00000000, 0xffffffff,
545         0x000091ec, 0x00070000, 0xffffffff,
546         0x000091f0, 0x00030002, 0xffffffff,
547         0x000091f4, 0x00050004, 0xffffffff,
548         0x00009200, 0x00010006, 0xffffffff,
549         0x00009204, 0x00090008, 0xffffffff,
550         0x00009208, 0x00070000, 0xffffffff,
551         0x0000920c, 0x00030002, 0xffffffff,
552         0x00009210, 0x00050004, 0xffffffff,
553         0x0000921c, 0x00010006, 0xffffffff,
554         0x00009220, 0x00090008, 0xffffffff,
555         0x00009224, 0x00070000, 0xffffffff,
556         0x00009228, 0x00030002, 0xffffffff,
557         0x0000922c, 0x00050004, 0xffffffff,
558         0x00009238, 0x00010006, 0xffffffff,
559         0x0000923c, 0x00090008, 0xffffffff,
560         0x00009240, 0x00070000, 0xffffffff,
561         0x00009244, 0x00030002, 0xffffffff,
562         0x00009248, 0x00050004, 0xffffffff,
563         0x00009254, 0x00010006, 0xffffffff,
564         0x00009258, 0x00090008, 0xffffffff,
565         0x0000925c, 0x00070000, 0xffffffff,
566         0x00009260, 0x00030002, 0xffffffff,
567         0x00009264, 0x00050004, 0xffffffff,
568         0x00009270, 0x00010006, 0xffffffff,
569         0x00009274, 0x00090008, 0xffffffff,
570         0x00009278, 0x00070000, 0xffffffff,
571         0x0000927c, 0x00030002, 0xffffffff,
572         0x00009280, 0x00050004, 0xffffffff,
573         0x0000928c, 0x00010006, 0xffffffff,
574         0x00009290, 0x00090008, 0xffffffff,
575         0x000092a8, 0x00070000, 0xffffffff,
576         0x000092ac, 0x00030002, 0xffffffff,
577         0x000092b0, 0x00050004, 0xffffffff,
578         0x000092bc, 0x00010006, 0xffffffff,
579         0x000092c0, 0x00090008, 0xffffffff,
580         0x000092c4, 0x00070000, 0xffffffff,
581         0x000092c8, 0x00030002, 0xffffffff,
582         0x000092cc, 0x00050004, 0xffffffff,
583         0x000092d8, 0x00010006, 0xffffffff,
584         0x000092dc, 0x00090008, 0xffffffff,
585         0x00009294, 0x00000000, 0xffffffff,
586         0x0000802c, 0xc0000000, 0xffffffff,
587         0x00003fc4, 0xc0000000, 0xffffffff,
588         0x000008f8, 0x00000010, 0xffffffff,
589         0x000008fc, 0x00000000, 0xffffffff,
590         0x000008f8, 0x00000011, 0xffffffff,
591         0x000008fc, 0x00000000, 0xffffffff,
592         0x000008f8, 0x00000012, 0xffffffff,
593         0x000008fc, 0x00000000, 0xffffffff,
594         0x000008f8, 0x00000013, 0xffffffff,
595         0x000008fc, 0x00000000, 0xffffffff,
596         0x000008f8, 0x00000014, 0xffffffff,
597         0x000008fc, 0x00000000, 0xffffffff,
598         0x000008f8, 0x00000015, 0xffffffff,
599         0x000008fc, 0x00000000, 0xffffffff,
600         0x000008f8, 0x00000016, 0xffffffff,
601         0x000008fc, 0x00000000, 0xffffffff,
602         0x000008f8, 0x00000017, 0xffffffff,
603         0x000008fc, 0x00000000, 0xffffffff,
604         0x000008f8, 0x00000018, 0xffffffff,
605         0x000008fc, 0x00000000, 0xffffffff,
606         0x000008f8, 0x00000019, 0xffffffff,
607         0x000008fc, 0x00000000, 0xffffffff,
608         0x000008f8, 0x0000001a, 0xffffffff,
609         0x000008fc, 0x00000000, 0xffffffff,
610         0x000008f8, 0x0000001b, 0xffffffff,
611         0x000008fc, 0x00000000, 0xffffffff
612 };
613 #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
614
615 static const u32 cayman_mgcg_disable[] =
616 {
617         0x0000802c, 0xc0000000, 0xffffffff,
618         0x000008f8, 0x00000000, 0xffffffff,
619         0x000008fc, 0xffffffff, 0xffffffff,
620         0x000008f8, 0x00000001, 0xffffffff,
621         0x000008fc, 0xffffffff, 0xffffffff,
622         0x000008f8, 0x00000002, 0xffffffff,
623         0x000008fc, 0xffffffff, 0xffffffff,
624         0x000008f8, 0x00000003, 0xffffffff,
625         0x000008fc, 0xffffffff, 0xffffffff,
626         0x00009150, 0x00600000, 0xffffffff
627 };
628 #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
629
630 static const u32 cayman_mgcg_enable[] =
631 {
632         0x0000802c, 0xc0000000, 0xffffffff,
633         0x000008f8, 0x00000000, 0xffffffff,
634         0x000008fc, 0x00000000, 0xffffffff,
635         0x000008f8, 0x00000001, 0xffffffff,
636         0x000008fc, 0x00000000, 0xffffffff,
637         0x000008f8, 0x00000002, 0xffffffff,
638         0x000008fc, 0x00600000, 0xffffffff,
639         0x000008f8, 0x00000003, 0xffffffff,
640         0x000008fc, 0x00000000, 0xffffffff,
641         0x00009150, 0x96944200, 0xffffffff
642 };
643
644 #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
645
646 #define NISLANDS_SYSLS_SEQUENCE  100
647
648 static const u32 cayman_sysls_default[] =
649 {
650         /* Register,   Value,     Mask bits */
651         0x000055e8, 0x00000000, 0xffffffff,
652         0x0000d0bc, 0x00000000, 0xffffffff,
653         0x0000d8bc, 0x00000000, 0xffffffff,
654         0x000015c0, 0x000c1401, 0xffffffff,
655         0x0000264c, 0x000c0400, 0xffffffff,
656         0x00002648, 0x000c0400, 0xffffffff,
657         0x00002650, 0x000c0400, 0xffffffff,
658         0x000020b8, 0x000c0400, 0xffffffff,
659         0x000020bc, 0x000c0400, 0xffffffff,
660         0x000020c0, 0x000c0c80, 0xffffffff,
661         0x0000f4a0, 0x000000c0, 0xffffffff,
662         0x0000f4a4, 0x00680fff, 0xffffffff,
663         0x00002f50, 0x00000404, 0xffffffff,
664         0x000004c8, 0x00000001, 0xffffffff,
665         0x000064ec, 0x00000000, 0xffffffff,
666         0x00000c7c, 0x00000000, 0xffffffff,
667         0x00008dfc, 0x00000000, 0xffffffff
668 };
669 #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
670
671 static const u32 cayman_sysls_disable[] =
672 {
673         /* Register,   Value,     Mask bits */
674         0x0000d0c0, 0x00000000, 0xffffffff,
675         0x0000d8c0, 0x00000000, 0xffffffff,
676         0x000055e8, 0x00000000, 0xffffffff,
677         0x0000d0bc, 0x00000000, 0xffffffff,
678         0x0000d8bc, 0x00000000, 0xffffffff,
679         0x000015c0, 0x00041401, 0xffffffff,
680         0x0000264c, 0x00040400, 0xffffffff,
681         0x00002648, 0x00040400, 0xffffffff,
682         0x00002650, 0x00040400, 0xffffffff,
683         0x000020b8, 0x00040400, 0xffffffff,
684         0x000020bc, 0x00040400, 0xffffffff,
685         0x000020c0, 0x00040c80, 0xffffffff,
686         0x0000f4a0, 0x000000c0, 0xffffffff,
687         0x0000f4a4, 0x00680000, 0xffffffff,
688         0x00002f50, 0x00000404, 0xffffffff,
689         0x000004c8, 0x00000001, 0xffffffff,
690         0x000064ec, 0x00007ffd, 0xffffffff,
691         0x00000c7c, 0x0000ff00, 0xffffffff,
692         0x00008dfc, 0x0000007f, 0xffffffff
693 };
694 #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
695
696 static const u32 cayman_sysls_enable[] =
697 {
698         /* Register,   Value,     Mask bits */
699         0x000055e8, 0x00000001, 0xffffffff,
700         0x0000d0bc, 0x00000100, 0xffffffff,
701         0x0000d8bc, 0x00000100, 0xffffffff,
702         0x000015c0, 0x000c1401, 0xffffffff,
703         0x0000264c, 0x000c0400, 0xffffffff,
704         0x00002648, 0x000c0400, 0xffffffff,
705         0x00002650, 0x000c0400, 0xffffffff,
706         0x000020b8, 0x000c0400, 0xffffffff,
707         0x000020bc, 0x000c0400, 0xffffffff,
708         0x000020c0, 0x000c0c80, 0xffffffff,
709         0x0000f4a0, 0x000000c0, 0xffffffff,
710         0x0000f4a4, 0x00680fff, 0xffffffff,
711         0x00002f50, 0x00000903, 0xffffffff,
712         0x000004c8, 0x00000000, 0xffffffff,
713         0x000064ec, 0x00000000, 0xffffffff,
714         0x00000c7c, 0x00000000, 0xffffffff,
715         0x00008dfc, 0x00000000, 0xffffffff
716 };
717 #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
718
719 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
720 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
721
722 struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
723 {
724         struct ni_power_info *pi = rdev->pm.dpm.priv;
725
726         return pi;
727 }
728
729 struct ni_ps *ni_get_ps(struct radeon_ps *rps)
730 {
731         struct ni_ps *ps = rps->ps_priv;
732
733         return ps;
734 }
735
736 static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
737                                                      u16 v, s32 t,
738                                                      u32 ileakage,
739                                                      u32 *leakage)
740 {
741         s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
742
743         i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
744         vddc = div64_s64(drm_int2fixp(v), 1000);
745         temperature = div64_s64(drm_int2fixp(t), 1000);
746
747         kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
748                           drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
749         kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
750                           drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
751
752         leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
753
754         *leakage = drm_fixp2int(leakage_w * 1000);
755 }
756
757 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
758                                              const struct ni_leakage_coeffients *coeff,
759                                              u16 v,
760                                              s32 t,
761                                              u32 i_leakage,
762                                              u32 *leakage)
763 {
764         ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
765 }
766
767 static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
768                                         struct radeon_ps *rps)
769 {
770         struct ni_ps *ps = ni_get_ps(rps);
771         struct radeon_clock_and_voltage_limits *max_limits;
772         bool disable_mclk_switching;
773         u32 mclk, sclk;
774         u16 vddc, vddci;
775         int i;
776
777         if (rdev->pm.dpm.new_active_crtc_count > 1)
778                 disable_mclk_switching = true;
779         else
780                 disable_mclk_switching = false;
781
782         if (rdev->pm.dpm.ac_power)
783                 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
784         else
785                 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
786
787         if (rdev->pm.dpm.ac_power == false) {
788                 for (i = 0; i < ps->performance_level_count; i++) {
789                         if (ps->performance_levels[i].mclk > max_limits->mclk)
790                                 ps->performance_levels[i].mclk = max_limits->mclk;
791                         if (ps->performance_levels[i].sclk > max_limits->sclk)
792                                 ps->performance_levels[i].sclk = max_limits->sclk;
793                         if (ps->performance_levels[i].vddc > max_limits->vddc)
794                                 ps->performance_levels[i].vddc = max_limits->vddc;
795                         if (ps->performance_levels[i].vddci > max_limits->vddci)
796                                 ps->performance_levels[i].vddci = max_limits->vddci;
797                 }
798         }
799
800         /* XXX validate the min clocks required for display */
801
802         if (disable_mclk_switching) {
803                 mclk  = ps->performance_levels[ps->performance_level_count - 1].mclk;
804                 sclk = ps->performance_levels[0].sclk;
805                 vddc = ps->performance_levels[0].vddc;
806                 vddci = ps->performance_levels[ps->performance_level_count - 1].vddci;
807         } else {
808                 sclk = ps->performance_levels[0].sclk;
809                 mclk = ps->performance_levels[0].mclk;
810                 vddc = ps->performance_levels[0].vddc;
811                 vddci = ps->performance_levels[0].vddci;
812         }
813
814         /* adjusted low state */
815         ps->performance_levels[0].sclk = sclk;
816         ps->performance_levels[0].mclk = mclk;
817         ps->performance_levels[0].vddc = vddc;
818         ps->performance_levels[0].vddci = vddci;
819
820         btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
821                                   &ps->performance_levels[0].sclk,
822                                   &ps->performance_levels[0].mclk);
823
824         for (i = 1; i < ps->performance_level_count; i++) {
825                 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
826                         ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
827                 if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
828                         ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
829         }
830
831         if (disable_mclk_switching) {
832                 mclk = ps->performance_levels[0].mclk;
833                 for (i = 1; i < ps->performance_level_count; i++) {
834                         if (mclk < ps->performance_levels[i].mclk)
835                                 mclk = ps->performance_levels[i].mclk;
836                 }
837                 for (i = 0; i < ps->performance_level_count; i++) {
838                         ps->performance_levels[i].mclk = mclk;
839                         ps->performance_levels[i].vddci = vddci;
840                 }
841         } else {
842                 for (i = 1; i < ps->performance_level_count; i++) {
843                         if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
844                                 ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
845                         if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
846                                 ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
847                 }
848         }
849
850         for (i = 1; i < ps->performance_level_count; i++)
851                 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
852                                           &ps->performance_levels[i].sclk,
853                                           &ps->performance_levels[i].mclk);
854
855         for (i = 0; i < ps->performance_level_count; i++)
856                 btc_adjust_clock_combinations(rdev, max_limits,
857                                               &ps->performance_levels[i]);
858
859         for (i = 0; i < ps->performance_level_count; i++) {
860                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
861                                                    ps->performance_levels[i].sclk,
862                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
863                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
864                                                    ps->performance_levels[i].mclk,
865                                                    max_limits->vddci, &ps->performance_levels[i].vddci);
866                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
867                                                    ps->performance_levels[i].mclk,
868                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
869                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
870                                                    rdev->clock.current_dispclk,
871                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
872         }
873
874         for (i = 0; i < ps->performance_level_count; i++) {
875                 btc_apply_voltage_delta_rules(rdev,
876                                               max_limits->vddc, max_limits->vddci,
877                                               &ps->performance_levels[i].vddc,
878                                               &ps->performance_levels[i].vddci);
879         }
880
881         ps->dc_compatible = true;
882         for (i = 0; i < ps->performance_level_count; i++) {
883                 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
884                         ps->dc_compatible = false;
885
886                 if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
887                         ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
888         }
889 }
890
891 static void ni_cg_clockgating_default(struct radeon_device *rdev)
892 {
893         u32 count;
894         const u32 *ps = NULL;
895
896         ps = (const u32 *)&cayman_cgcg_cgls_default;
897         count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
898
899         btc_program_mgcg_hw_sequence(rdev, ps, count);
900 }
901
902 static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
903                                       bool enable)
904 {
905         u32 count;
906         const u32 *ps = NULL;
907
908         if (enable) {
909                 ps = (const u32 *)&cayman_cgcg_cgls_enable;
910                 count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
911         } else {
912                 ps = (const u32 *)&cayman_cgcg_cgls_disable;
913                 count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
914         }
915
916         btc_program_mgcg_hw_sequence(rdev, ps, count);
917 }
918
919 static void ni_mg_clockgating_default(struct radeon_device *rdev)
920 {
921         u32 count;
922         const u32 *ps = NULL;
923
924         ps = (const u32 *)&cayman_mgcg_default;
925         count = CAYMAN_MGCG_DEFAULT_LENGTH;
926
927         btc_program_mgcg_hw_sequence(rdev, ps, count);
928 }
929
930 static void ni_mg_clockgating_enable(struct radeon_device *rdev,
931                                      bool enable)
932 {
933         u32 count;
934         const u32 *ps = NULL;
935
936         if (enable) {
937                 ps = (const u32 *)&cayman_mgcg_enable;
938                 count = CAYMAN_MGCG_ENABLE_LENGTH;
939         } else {
940                 ps = (const u32 *)&cayman_mgcg_disable;
941                 count = CAYMAN_MGCG_DISABLE_LENGTH;
942         }
943
944         btc_program_mgcg_hw_sequence(rdev, ps, count);
945 }
946
947 static void ni_ls_clockgating_default(struct radeon_device *rdev)
948 {
949         u32 count;
950         const u32 *ps = NULL;
951
952         ps = (const u32 *)&cayman_sysls_default;
953         count = CAYMAN_SYSLS_DEFAULT_LENGTH;
954
955         btc_program_mgcg_hw_sequence(rdev, ps, count);
956 }
957
958 static void ni_ls_clockgating_enable(struct radeon_device *rdev,
959                                      bool enable)
960 {
961         u32 count;
962         const u32 *ps = NULL;
963
964         if (enable) {
965                 ps = (const u32 *)&cayman_sysls_enable;
966                 count = CAYMAN_SYSLS_ENABLE_LENGTH;
967         } else {
968                 ps = (const u32 *)&cayman_sysls_disable;
969                 count = CAYMAN_SYSLS_DISABLE_LENGTH;
970         }
971
972         btc_program_mgcg_hw_sequence(rdev, ps, count);
973
974 }
975
976 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
977                                                              struct radeon_clock_voltage_dependency_table *table)
978 {
979         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
980         u32 i;
981
982         if (table) {
983                 for (i = 0; i < table->count; i++) {
984                         if (0xff01 == table->entries[i].v) {
985                                 if (pi->max_vddc == 0)
986                                         return -EINVAL;
987                                 table->entries[i].v = pi->max_vddc;
988                         }
989                 }
990         }
991         return 0;
992 }
993
994 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
995 {
996         int ret = 0;
997
998         ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
999                                                                 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1000
1001         ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1002                                                                 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1003         return ret;
1004 }
1005
1006 static void ni_stop_dpm(struct radeon_device *rdev)
1007 {
1008         WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1009 }
1010
1011 #if 0
1012 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1013                                         bool ac_power)
1014 {
1015         if (ac_power)
1016                 return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1017                         0 : -EINVAL;
1018
1019         return 0;
1020 }
1021 #endif
1022
1023 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1024                                                       PPSMC_Msg msg, u32 parameter)
1025 {
1026         WREG32(SMC_SCRATCH0, parameter);
1027         return rv770_send_msg_to_smc(rdev, msg);
1028 }
1029
1030 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1031 {
1032         if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1033                 return -EINVAL;
1034
1035         return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1036                 0 : -EINVAL;
1037 }
1038
1039 #if 0
1040 static int ni_unrestrict_performance_levels_after_switch(struct radeon_device *rdev)
1041 {
1042         if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1043                 return -EINVAL;
1044
1045         return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) == PPSMC_Result_OK) ?
1046                 0 : -EINVAL;
1047 }
1048 #endif
1049
1050 static void ni_stop_smc(struct radeon_device *rdev)
1051 {
1052         u32 tmp;
1053         int i;
1054
1055         for (i = 0; i < rdev->usec_timeout; i++) {
1056                 tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1057                 if (tmp != 1)
1058                         break;
1059                 udelay(1);
1060         }
1061
1062         udelay(100);
1063
1064         r7xx_stop_smc(rdev);
1065 }
1066
1067 static int ni_process_firmware_header(struct radeon_device *rdev)
1068 {
1069         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1070         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1071         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1072         u32 tmp;
1073         int ret;
1074
1075         ret = rv770_read_smc_sram_dword(rdev,
1076                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1077                                         NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1078                                         &tmp, pi->sram_end);
1079
1080         if (ret)
1081                 return ret;
1082
1083         pi->state_table_start = (u16)tmp;
1084
1085         ret = rv770_read_smc_sram_dword(rdev,
1086                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1087                                         NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1088                                         &tmp, pi->sram_end);
1089
1090         if (ret)
1091                 return ret;
1092
1093         pi->soft_regs_start = (u16)tmp;
1094
1095         ret = rv770_read_smc_sram_dword(rdev,
1096                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1097                                         NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1098                                         &tmp, pi->sram_end);
1099
1100         if (ret)
1101                 return ret;
1102
1103         eg_pi->mc_reg_table_start = (u16)tmp;
1104
1105         ret = rv770_read_smc_sram_dword(rdev,
1106                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1107                                         NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1108                                         &tmp, pi->sram_end);
1109
1110         if (ret)
1111                 return ret;
1112
1113         ni_pi->fan_table_start = (u16)tmp;
1114
1115         ret = rv770_read_smc_sram_dword(rdev,
1116                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1117                                         NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1118                                         &tmp, pi->sram_end);
1119
1120         if (ret)
1121                 return ret;
1122
1123         ni_pi->arb_table_start = (u16)tmp;
1124
1125         ret = rv770_read_smc_sram_dword(rdev,
1126                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1127                                         NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1128                                         &tmp, pi->sram_end);
1129
1130         if (ret)
1131                 return ret;
1132
1133         ni_pi->cac_table_start = (u16)tmp;
1134
1135         ret = rv770_read_smc_sram_dword(rdev,
1136                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1137                                         NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1138                                         &tmp, pi->sram_end);
1139
1140         if (ret)
1141                 return ret;
1142
1143         ni_pi->spll_table_start = (u16)tmp;
1144
1145
1146         return ret;
1147 }
1148
1149 static void ni_read_clock_registers(struct radeon_device *rdev)
1150 {
1151         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1152
1153         ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1154         ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1155         ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1156         ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1157         ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1158         ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1159         ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1160         ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1161         ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1162         ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1163         ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1164         ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1165         ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1166         ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1167 }
1168
1169 #if 0
1170 static int ni_enter_ulp_state(struct radeon_device *rdev)
1171 {
1172         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1173
1174         if (pi->gfx_clock_gating) {
1175                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1176                 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1177                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1178                 RREG32(GB_ADDR_CONFIG);
1179         }
1180
1181         WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1182                  ~HOST_SMC_MSG_MASK);
1183
1184         udelay(25000);
1185
1186         return 0;
1187 }
1188 #endif
1189
1190 static void ni_program_response_times(struct radeon_device *rdev)
1191 {
1192         u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1193         u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1194         u32 reference_clock;
1195
1196         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1197
1198         voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1199         backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1200
1201         if (voltage_response_time == 0)
1202                 voltage_response_time = 1000;
1203
1204         if (backbias_response_time == 0)
1205                 backbias_response_time = 1000;
1206
1207         acpi_delay_time = 15000;
1208         vbi_time_out = 100000;
1209
1210         reference_clock = radeon_get_xclk(rdev);
1211
1212         vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1213         bb_dly   = (backbias_response_time * reference_clock) / 1600;
1214         acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1215         vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1216
1217         mclk_switch_limit = (460 * reference_clock) / 100;
1218
1219         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1220         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1221         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1222         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1223         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1224         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1225 }
1226
1227 static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1228                                           struct atom_voltage_table *voltage_table,
1229                                           NISLANDS_SMC_STATETABLE *table)
1230 {
1231         unsigned int i;
1232
1233         for (i = 0; i < voltage_table->count; i++) {
1234                 table->highSMIO[i] = 0;
1235                 table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1236         }
1237 }
1238
1239 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1240                                            NISLANDS_SMC_STATETABLE *table)
1241 {
1242         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1243         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1244         unsigned char i;
1245
1246         if (eg_pi->vddc_voltage_table.count) {
1247                 ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1248                 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1249                 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1250                         cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1251
1252                 for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1253                         if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1254                                 table->maxVDDCIndexInPPTable = i;
1255                                 break;
1256                         }
1257                 }
1258         }
1259
1260         if (eg_pi->vddci_voltage_table.count) {
1261                 ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1262
1263                 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1264                 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1265                         cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1266         }
1267 }
1268
1269 static int ni_populate_voltage_value(struct radeon_device *rdev,
1270                                      struct atom_voltage_table *table,
1271                                      u16 value,
1272                                      NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1273 {
1274         unsigned int i;
1275
1276         for (i = 0; i < table->count; i++) {
1277                 if (value <= table->entries[i].value) {
1278                         voltage->index = (u8)i;
1279                         voltage->value = cpu_to_be16(table->entries[i].value);
1280                         break;
1281                 }
1282         }
1283
1284         if (i >= table->count)
1285                 return -EINVAL;
1286
1287         return 0;
1288 }
1289
1290 static void ni_populate_mvdd_value(struct radeon_device *rdev,
1291                                    u32 mclk,
1292                                    NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1293 {
1294         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1295         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1296
1297         if (!pi->mvdd_control) {
1298                 voltage->index = eg_pi->mvdd_high_index;
1299                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1300                 return;
1301         }
1302
1303         if (mclk <= pi->mvdd_split_frequency) {
1304                 voltage->index = eg_pi->mvdd_low_index;
1305                 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1306         } else {
1307                 voltage->index = eg_pi->mvdd_high_index;
1308                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1309         }
1310 }
1311
1312 static int ni_get_std_voltage_value(struct radeon_device *rdev,
1313                                     NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1314                                     u16 *std_voltage)
1315 {
1316         if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1317             ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1318                 *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1319         else
1320                 *std_voltage = be16_to_cpu(voltage->value);
1321
1322         return 0;
1323 }
1324
1325 static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1326                                           u16 value, u8 index,
1327                                           NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1328 {
1329         voltage->index = index;
1330         voltage->value = cpu_to_be16(value);
1331 }
1332
1333 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1334 {
1335         u32 xclk_period;
1336         u32 xclk = radeon_get_xclk(rdev);
1337         u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1338
1339         xclk_period = (1000000000UL / xclk);
1340         xclk_period /= 10000UL;
1341
1342         return tmp * xclk_period;
1343 }
1344
1345 static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1346 {
1347         return (power_in_watts * scaling_factor) << 2;
1348 }
1349
1350 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1351                                           struct radeon_ps *radeon_state,
1352                                           u32 near_tdp_limit)
1353 {
1354         struct ni_ps *state = ni_get_ps(radeon_state);
1355         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1356         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1357         u32 power_boost_limit = 0;
1358         int ret;
1359
1360         if (ni_pi->enable_power_containment &&
1361             ni_pi->use_power_boost_limit) {
1362                 NISLANDS_SMC_VOLTAGE_VALUE vddc;
1363                 u16 std_vddc_med;
1364                 u16 std_vddc_high;
1365                 u64 tmp, n, d;
1366
1367                 if (state->performance_level_count < 3)
1368                         return 0;
1369
1370                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1371                                                 state->performance_levels[state->performance_level_count - 2].vddc,
1372                                                 &vddc);
1373                 if (ret)
1374                         return 0;
1375
1376                 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1377                 if (ret)
1378                         return 0;
1379
1380                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1381                                                 state->performance_levels[state->performance_level_count - 1].vddc,
1382                                                 &vddc);
1383                 if (ret)
1384                         return 0;
1385
1386                 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1387                 if (ret)
1388                         return 0;
1389
1390                 n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1391                 d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1392                 tmp = div64_u64(n, d);
1393
1394                 if (tmp >> 32)
1395                         return 0;
1396                 power_boost_limit = (u32)tmp;
1397         }
1398
1399         return power_boost_limit;
1400 }
1401
1402 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1403                                             bool adjust_polarity,
1404                                             u32 tdp_adjustment,
1405                                             u32 *tdp_limit,
1406                                             u32 *near_tdp_limit)
1407 {
1408         if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1409                 return -EINVAL;
1410
1411         if (adjust_polarity) {
1412                 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1413                 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1414         } else {
1415                 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1416                 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1417         }
1418
1419         return 0;
1420 }
1421
1422 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1423                                       struct radeon_ps *radeon_state)
1424 {
1425         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1426         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1427
1428         if (ni_pi->enable_power_containment) {
1429                 NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1430                 u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1431                 u32 tdp_limit;
1432                 u32 near_tdp_limit;
1433                 u32 power_boost_limit;
1434                 int ret;
1435
1436                 if (scaling_factor == 0)
1437                         return -EINVAL;
1438
1439                 memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1440
1441                 ret = ni_calculate_adjusted_tdp_limits(rdev,
1442                                                        false, /* ??? */
1443                                                        rdev->pm.dpm.tdp_adjustment,
1444                                                        &tdp_limit,
1445                                                        &near_tdp_limit);
1446                 if (ret)
1447                         return ret;
1448
1449                 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1450                                                                    near_tdp_limit);
1451
1452                 smc_table->dpm2Params.TDPLimit =
1453                         cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1454                 smc_table->dpm2Params.NearTDPLimit =
1455                         cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1456                 smc_table->dpm2Params.SafePowerLimit =
1457                         cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1458                                                            scaling_factor));
1459                 smc_table->dpm2Params.PowerBoostLimit =
1460                         cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1461
1462                 ret = rv770_copy_bytes_to_smc(rdev,
1463                                               (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1464                                                     offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1465                                               (u8 *)(&smc_table->dpm2Params.TDPLimit),
1466                                               sizeof(u32) * 4, pi->sram_end);
1467                 if (ret)
1468                         return ret;
1469         }
1470
1471         return 0;
1472 }
1473
1474 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1475                                 u32 arb_freq_src, u32 arb_freq_dest)
1476 {
1477         u32 mc_arb_dram_timing;
1478         u32 mc_arb_dram_timing2;
1479         u32 burst_time;
1480         u32 mc_cg_config;
1481
1482         switch (arb_freq_src) {
1483         case MC_CG_ARB_FREQ_F0:
1484                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1485                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1486                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1487                 break;
1488         case MC_CG_ARB_FREQ_F1:
1489                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1490                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1491                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1492                 break;
1493         case MC_CG_ARB_FREQ_F2:
1494                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1495                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1496                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1497                 break;
1498         case MC_CG_ARB_FREQ_F3:
1499                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1500                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1501                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1502                 break;
1503         default:
1504                 return -EINVAL;
1505         }
1506
1507         switch (arb_freq_dest) {
1508         case MC_CG_ARB_FREQ_F0:
1509                 WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1510                 WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1511                 WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1512                 break;
1513         case MC_CG_ARB_FREQ_F1:
1514                 WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1515                 WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1516                 WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1517                 break;
1518         case MC_CG_ARB_FREQ_F2:
1519                 WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1520                 WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1521                 WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1522                 break;
1523         case MC_CG_ARB_FREQ_F3:
1524                 WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1525                 WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1526                 WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1527                 break;
1528         default:
1529                 return -EINVAL;
1530         }
1531
1532         mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1533         WREG32(MC_CG_CONFIG, mc_cg_config);
1534         WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1535
1536         return 0;
1537 }
1538
1539 static int ni_init_arb_table_index(struct radeon_device *rdev)
1540 {
1541         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1542         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1543         u32 tmp;
1544         int ret;
1545
1546         ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1547                                         &tmp, pi->sram_end);
1548         if (ret)
1549                 return ret;
1550
1551         tmp &= 0x00FFFFFF;
1552         tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1553
1554         return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1555                                           tmp, pi->sram_end);
1556 }
1557
1558 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1559 {
1560         return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1561 }
1562
1563 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1564 {
1565         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1566         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1567         u32 tmp;
1568         int ret;
1569
1570         ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1571                                         &tmp, pi->sram_end);
1572         if (ret)
1573                 return ret;
1574
1575         tmp = (tmp >> 24) & 0xff;
1576
1577         if (tmp == MC_CG_ARB_FREQ_F0)
1578                 return 0;
1579
1580         return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1581 }
1582
1583 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1584                                                 struct rv7xx_pl *pl,
1585                                                 SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1586 {
1587         u32 dram_timing;
1588         u32 dram_timing2;
1589
1590         arb_regs->mc_arb_rfsh_rate =
1591                 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1592
1593
1594         radeon_atom_set_engine_dram_timings(rdev,
1595                                             pl->sclk,
1596                                             pl->mclk);
1597
1598         dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1599         dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1600
1601         arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1602         arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1603
1604         return 0;
1605 }
1606
1607 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1608                                                   struct radeon_ps *radeon_state,
1609                                                   unsigned int first_arb_set)
1610 {
1611         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1612         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1613         struct ni_ps *state = ni_get_ps(radeon_state);
1614         SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1615         int i, ret = 0;
1616
1617         for (i = 0; i < state->performance_level_count; i++) {
1618                 ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1619                 if (ret)
1620                         break;
1621
1622                 ret = rv770_copy_bytes_to_smc(rdev,
1623                                               (u16)(ni_pi->arb_table_start +
1624                                                     offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1625                                                     sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1626                                               (u8 *)&arb_regs,
1627                                               (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1628                                               pi->sram_end);
1629                 if (ret)
1630                         break;
1631         }
1632         return ret;
1633 }
1634
1635 static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1636                                                struct radeon_ps *radeon_new_state)
1637 {
1638         return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1639                                                       NISLANDS_DRIVER_STATE_ARB_INDEX);
1640 }
1641
1642 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1643                                            struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1644 {
1645         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1646
1647         voltage->index = eg_pi->mvdd_high_index;
1648         voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1649 }
1650
1651 static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1652                                          struct radeon_ps *radeon_initial_state,
1653                                          NISLANDS_SMC_STATETABLE *table)
1654 {
1655         struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1656         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1657         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1658         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1659         u32 reg;
1660         int ret;
1661
1662         table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1663                 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1664         table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1665                 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1666         table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1667                 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1668         table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1669                 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1670         table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1671                 cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1672         table->initialState.levels[0].mclk.vDLL_CNTL =
1673                 cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1674         table->initialState.levels[0].mclk.vMPLL_SS =
1675                 cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1676         table->initialState.levels[0].mclk.vMPLL_SS2 =
1677                 cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1678         table->initialState.levels[0].mclk.mclk_value =
1679                 cpu_to_be32(initial_state->performance_levels[0].mclk);
1680
1681         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1682                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1683         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1684                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1685         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1686                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1687         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1688                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1689         table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1690                 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1691         table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1692                 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1693         table->initialState.levels[0].sclk.sclk_value =
1694                 cpu_to_be32(initial_state->performance_levels[0].sclk);
1695         table->initialState.levels[0].arbRefreshState =
1696                 NISLANDS_INITIAL_STATE_ARB_INDEX;
1697
1698         table->initialState.levels[0].ACIndex = 0;
1699
1700         ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1701                                         initial_state->performance_levels[0].vddc,
1702                                         &table->initialState.levels[0].vddc);
1703         if (!ret) {
1704                 u16 std_vddc;
1705
1706                 ret = ni_get_std_voltage_value(rdev,
1707                                                &table->initialState.levels[0].vddc,
1708                                                &std_vddc);
1709                 if (!ret)
1710                         ni_populate_std_voltage_value(rdev, std_vddc,
1711                                                       table->initialState.levels[0].vddc.index,
1712                                                       &table->initialState.levels[0].std_vddc);
1713         }
1714
1715         if (eg_pi->vddci_control)
1716                 ni_populate_voltage_value(rdev,
1717                                           &eg_pi->vddci_voltage_table,
1718                                           initial_state->performance_levels[0].vddci,
1719                                           &table->initialState.levels[0].vddci);
1720
1721         ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1722
1723         reg = CG_R(0xffff) | CG_L(0);
1724         table->initialState.levels[0].aT = cpu_to_be32(reg);
1725
1726         table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1727
1728         if (pi->boot_in_gen2)
1729                 table->initialState.levels[0].gen2PCIE = 1;
1730         else
1731                 table->initialState.levels[0].gen2PCIE = 0;
1732
1733         if (pi->mem_gddr5) {
1734                 table->initialState.levels[0].strobeMode =
1735                         cypress_get_strobe_mode_settings(rdev,
1736                                                          initial_state->performance_levels[0].mclk);
1737
1738                 if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1739                         table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1740                 else
1741                         table->initialState.levels[0].mcFlags =  0;
1742         }
1743
1744         table->initialState.levelCount = 1;
1745
1746         table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1747
1748         table->initialState.levels[0].dpm2.MaxPS = 0;
1749         table->initialState.levels[0].dpm2.NearTDPDec = 0;
1750         table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1751         table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1752
1753         reg = MIN_POWER_MASK | MAX_POWER_MASK;
1754         table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1755
1756         reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1757         table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1758
1759         return 0;
1760 }
1761
1762 static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1763                                       NISLANDS_SMC_STATETABLE *table)
1764 {
1765         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1766         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1767         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1768         u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1769         u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1770         u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1771         u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1772         u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1773         u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1774         u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1775         u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1776         u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1777         u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1778         u32 reg;
1779         int ret;
1780
1781         table->ACPIState = table->initialState;
1782
1783         table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1784
1785         if (pi->acpi_vddc) {
1786                 ret = ni_populate_voltage_value(rdev,
1787                                                 &eg_pi->vddc_voltage_table,
1788                                                 pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1789                 if (!ret) {
1790                         u16 std_vddc;
1791
1792                         ret = ni_get_std_voltage_value(rdev,
1793                                                        &table->ACPIState.levels[0].vddc, &std_vddc);
1794                         if (!ret)
1795                                 ni_populate_std_voltage_value(rdev, std_vddc,
1796                                                               table->ACPIState.levels[0].vddc.index,
1797                                                               &table->ACPIState.levels[0].std_vddc);
1798                 }
1799
1800                 if (pi->pcie_gen2) {
1801                         if (pi->acpi_pcie_gen2)
1802                                 table->ACPIState.levels[0].gen2PCIE = 1;
1803                         else
1804                                 table->ACPIState.levels[0].gen2PCIE = 0;
1805                 } else {
1806                         table->ACPIState.levels[0].gen2PCIE = 0;
1807                 }
1808         } else {
1809                 ret = ni_populate_voltage_value(rdev,
1810                                                 &eg_pi->vddc_voltage_table,
1811                                                 pi->min_vddc_in_table,
1812                                                 &table->ACPIState.levels[0].vddc);
1813                 if (!ret) {
1814                         u16 std_vddc;
1815
1816                         ret = ni_get_std_voltage_value(rdev,
1817                                                        &table->ACPIState.levels[0].vddc,
1818                                                        &std_vddc);
1819                         if (!ret)
1820                                 ni_populate_std_voltage_value(rdev, std_vddc,
1821                                                               table->ACPIState.levels[0].vddc.index,
1822                                                               &table->ACPIState.levels[0].std_vddc);
1823                 }
1824                 table->ACPIState.levels[0].gen2PCIE = 0;
1825         }
1826
1827         if (eg_pi->acpi_vddci) {
1828                 if (eg_pi->vddci_control)
1829                         ni_populate_voltage_value(rdev,
1830                                                   &eg_pi->vddci_voltage_table,
1831                                                   eg_pi->acpi_vddci,
1832                                                   &table->ACPIState.levels[0].vddci);
1833         }
1834
1835
1836         mpll_ad_func_cntl &= ~PDNB;
1837
1838         mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1839
1840         if (pi->mem_gddr5)
1841                 mpll_dq_func_cntl &= ~PDNB;
1842         mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1843
1844
1845         mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1846                              MRDCKA1_RESET |
1847                              MRDCKB0_RESET |
1848                              MRDCKB1_RESET |
1849                              MRDCKC0_RESET |
1850                              MRDCKC1_RESET |
1851                              MRDCKD0_RESET |
1852                              MRDCKD1_RESET);
1853
1854         mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1855                               MRDCKA1_PDNB |
1856                               MRDCKB0_PDNB |
1857                               MRDCKB1_PDNB |
1858                               MRDCKC0_PDNB |
1859                               MRDCKC1_PDNB |
1860                               MRDCKD0_PDNB |
1861                               MRDCKD1_PDNB);
1862
1863         dll_cntl |= (MRDCKA0_BYPASS |
1864                      MRDCKA1_BYPASS |
1865                      MRDCKB0_BYPASS |
1866                      MRDCKB1_BYPASS |
1867                      MRDCKC0_BYPASS |
1868                      MRDCKC1_BYPASS |
1869                      MRDCKD0_BYPASS |
1870                      MRDCKD1_BYPASS);
1871
1872         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1873         spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1874
1875         table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1876         table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1877         table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1878         table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1879         table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1880         table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1881
1882         table->ACPIState.levels[0].mclk.mclk_value = 0;
1883
1884         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1885         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1886         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1887         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1888
1889         table->ACPIState.levels[0].sclk.sclk_value = 0;
1890
1891         ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1892
1893         if (eg_pi->dynamic_ac_timing)
1894                 table->ACPIState.levels[0].ACIndex = 1;
1895
1896         table->ACPIState.levels[0].dpm2.MaxPS = 0;
1897         table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1898         table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1899         table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1900
1901         reg = MIN_POWER_MASK | MAX_POWER_MASK;
1902         table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1903
1904         reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1905         table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1906
1907         return 0;
1908 }
1909
1910 static int ni_init_smc_table(struct radeon_device *rdev)
1911 {
1912         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1913         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1914         int ret;
1915         struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1916         NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1917
1918         memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1919
1920         ni_populate_smc_voltage_tables(rdev, table);
1921
1922         switch (rdev->pm.int_thermal_type) {
1923         case THERMAL_TYPE_NI:
1924         case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1925                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1926                 break;
1927         case THERMAL_TYPE_NONE:
1928                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1929                 break;
1930         default:
1931                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1932                 break;
1933         }
1934
1935         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1936                 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1937
1938         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1939                 table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1940
1941         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1942                 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1943
1944         if (pi->mem_gddr5)
1945                 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1946
1947         ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1948         if (ret)
1949                 return ret;
1950
1951         ret = ni_populate_smc_acpi_state(rdev, table);
1952         if (ret)
1953                 return ret;
1954
1955         table->driverState = table->initialState;
1956
1957         table->ULVState = table->initialState;
1958
1959         ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1960                                                      NISLANDS_INITIAL_STATE_ARB_INDEX);
1961         if (ret)
1962                 return ret;
1963
1964         return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
1965                                        sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
1966 }
1967
1968 static int ni_calculate_sclk_params(struct radeon_device *rdev,
1969                                     u32 engine_clock,
1970                                     NISLANDS_SMC_SCLK_VALUE *sclk)
1971 {
1972         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1973         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1974         struct atom_clock_dividers dividers;
1975         u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
1976         u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
1977         u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
1978         u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
1979         u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
1980         u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
1981         u64 tmp;
1982         u32 reference_clock = rdev->clock.spll.reference_freq;
1983         u32 reference_divider;
1984         u32 fbdiv;
1985         int ret;
1986
1987         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1988                                              engine_clock, false, &dividers);
1989         if (ret)
1990                 return ret;
1991
1992         reference_divider = 1 + dividers.ref_div;
1993
1994
1995         tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
1996         do_div(tmp, reference_clock);
1997         fbdiv = (u32) tmp;
1998
1999         spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2000         spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2001         spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2002
2003         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2004         spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2005
2006         spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2007         spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2008         spll_func_cntl_3 |= SPLL_DITHEN;
2009
2010         if (pi->sclk_ss) {
2011                 struct radeon_atom_ss ss;
2012                 u32 vco_freq = engine_clock * dividers.post_div;
2013
2014                 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2015                                                      ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2016                         u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2017                         u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2018
2019                         cg_spll_spread_spectrum &= ~CLK_S_MASK;
2020                         cg_spll_spread_spectrum |= CLK_S(clk_s);
2021                         cg_spll_spread_spectrum |= SSEN;
2022
2023                         cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2024                         cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2025                 }
2026         }
2027
2028         sclk->sclk_value = engine_clock;
2029         sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2030         sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2031         sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2032         sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2033         sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2034         sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2035
2036         return 0;
2037 }
2038
2039 static int ni_populate_sclk_value(struct radeon_device *rdev,
2040                                   u32 engine_clock,
2041                                   NISLANDS_SMC_SCLK_VALUE *sclk)
2042 {
2043         NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2044         int ret;
2045
2046         ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2047         if (!ret) {
2048                 sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2049                 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2050                 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2051                 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2052                 sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2053                 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2054                 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2055         }
2056
2057         return ret;
2058 }
2059
2060 static int ni_init_smc_spll_table(struct radeon_device *rdev)
2061 {
2062         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2063         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2064         SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2065         NISLANDS_SMC_SCLK_VALUE sclk_params;
2066         u32 fb_div;
2067         u32 p_div;
2068         u32 clk_s;
2069         u32 clk_v;
2070         u32 sclk = 0;
2071         int i, ret;
2072         u32 tmp;
2073
2074         if (ni_pi->spll_table_start == 0)
2075                 return -EINVAL;
2076
2077         spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2078         if (spll_table == NULL)
2079                 return -ENOMEM;
2080
2081         for (i = 0; i < 256; i++) {
2082                 ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2083                 if (ret)
2084                         break;
2085
2086                 p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2087                 fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2088                 clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2089                 clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2090
2091                 fb_div &= ~0x00001FFF;
2092                 fb_div >>= 1;
2093                 clk_v >>= 6;
2094
2095                 if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2096                         ret = -EINVAL;
2097
2098                 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2099                         ret = -EINVAL;
2100
2101                 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2102                         ret = -EINVAL;
2103
2104                 if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2105                         ret = -EINVAL;
2106
2107                 if (ret)
2108                         break;
2109
2110                 tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2111                         ((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2112                 spll_table->freq[i] = cpu_to_be32(tmp);
2113
2114                 tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2115                         ((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2116                 spll_table->ss[i] = cpu_to_be32(tmp);
2117
2118                 sclk += 512;
2119         }
2120
2121         if (!ret)
2122                 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2123                                               sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2124
2125         kfree(spll_table);
2126
2127         return ret;
2128 }
2129
2130 static int ni_populate_mclk_value(struct radeon_device *rdev,
2131                                   u32 engine_clock,
2132                                   u32 memory_clock,
2133                                   NISLANDS_SMC_MCLK_VALUE *mclk,
2134                                   bool strobe_mode,
2135                                   bool dll_state_on)
2136 {
2137         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2138         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2139         u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2140         u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2141         u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2142         u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2143         u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2144         u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2145         u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2146         u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2147         struct atom_clock_dividers dividers;
2148         u32 ibias;
2149         u32 dll_speed;
2150         int ret;
2151         u32 mc_seq_misc7;
2152
2153         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2154                                              memory_clock, strobe_mode, &dividers);
2155         if (ret)
2156                 return ret;
2157
2158         if (!strobe_mode) {
2159                 mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2160
2161                 if (mc_seq_misc7 & 0x8000000)
2162                         dividers.post_div = 1;
2163         }
2164
2165         ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2166
2167         mpll_ad_func_cntl &= ~(CLKR_MASK |
2168                                YCLK_POST_DIV_MASK |
2169                                CLKF_MASK |
2170                                CLKFRAC_MASK |
2171                                IBIAS_MASK);
2172         mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2173         mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2174         mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2175         mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2176         mpll_ad_func_cntl |= IBIAS(ibias);
2177
2178         if (dividers.vco_mode)
2179                 mpll_ad_func_cntl_2 |= VCO_MODE;
2180         else
2181                 mpll_ad_func_cntl_2 &= ~VCO_MODE;
2182
2183         if (pi->mem_gddr5) {
2184                 mpll_dq_func_cntl &= ~(CLKR_MASK |
2185                                        YCLK_POST_DIV_MASK |
2186                                        CLKF_MASK |
2187                                        CLKFRAC_MASK |
2188                                        IBIAS_MASK);
2189                 mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2190                 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2191                 mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2192                 mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2193                 mpll_dq_func_cntl |= IBIAS(ibias);
2194
2195                 if (strobe_mode)
2196                         mpll_dq_func_cntl &= ~PDNB;
2197                 else
2198                         mpll_dq_func_cntl |= PDNB;
2199
2200                 if (dividers.vco_mode)
2201                         mpll_dq_func_cntl_2 |= VCO_MODE;
2202                 else
2203                         mpll_dq_func_cntl_2 &= ~VCO_MODE;
2204         }
2205
2206         if (pi->mclk_ss) {
2207                 struct radeon_atom_ss ss;
2208                 u32 vco_freq = memory_clock * dividers.post_div;
2209
2210                 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2211                                                      ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2212                         u32 reference_clock = rdev->clock.mpll.reference_freq;
2213                         u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2214                         u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2215                         u32 clk_v = ss.percentage *
2216                                 (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2217
2218                         mpll_ss1 &= ~CLKV_MASK;
2219                         mpll_ss1 |= CLKV(clk_v);
2220
2221                         mpll_ss2 &= ~CLKS_MASK;
2222                         mpll_ss2 |= CLKS(clk_s);
2223                 }
2224         }
2225
2226         dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2227                                         memory_clock);
2228
2229         mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2230         mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2231         if (dll_state_on)
2232                 mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2233                                      MRDCKA1_PDNB |
2234                                      MRDCKB0_PDNB |
2235                                      MRDCKB1_PDNB |
2236                                      MRDCKC0_PDNB |
2237                                      MRDCKC1_PDNB |
2238                                      MRDCKD0_PDNB |
2239                                      MRDCKD1_PDNB);
2240         else
2241                 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2242                                       MRDCKA1_PDNB |
2243                                       MRDCKB0_PDNB |
2244                                       MRDCKB1_PDNB |
2245                                       MRDCKC0_PDNB |
2246                                       MRDCKC1_PDNB |
2247                                       MRDCKD0_PDNB |
2248                                       MRDCKD1_PDNB);
2249
2250
2251         mclk->mclk_value = cpu_to_be32(memory_clock);
2252         mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2253         mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2254         mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2255         mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2256         mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2257         mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2258         mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2259         mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2260
2261         return 0;
2262 }
2263
2264 static void ni_populate_smc_sp(struct radeon_device *rdev,
2265                                struct radeon_ps *radeon_state,
2266                                NISLANDS_SMC_SWSTATE *smc_state)
2267 {
2268         struct ni_ps *ps = ni_get_ps(radeon_state);
2269         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2270         int i;
2271
2272         for (i = 0; i < ps->performance_level_count - 1; i++)
2273                 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2274
2275         smc_state->levels[ps->performance_level_count - 1].bSP =
2276                 cpu_to_be32(pi->psp);
2277 }
2278
2279 static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2280                                          struct rv7xx_pl *pl,
2281                                          NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2282 {
2283         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2284         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2285         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2286         int ret;
2287         bool dll_state_on;
2288         u16 std_vddc;
2289         u32 tmp = RREG32(DC_STUTTER_CNTL);
2290
2291         level->gen2PCIE = pi->pcie_gen2 ?
2292                 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2293
2294         ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2295         if (ret)
2296                 return ret;
2297
2298         level->mcFlags =  0;
2299         if (pi->mclk_stutter_mode_threshold &&
2300             (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2301             !eg_pi->uvd_enabled &&
2302             (tmp & DC_STUTTER_ENABLE_A) &&
2303             (tmp & DC_STUTTER_ENABLE_B))
2304                 level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2305
2306         if (pi->mem_gddr5) {
2307                 if (pl->mclk > pi->mclk_edc_enable_threshold)
2308                         level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2309                 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2310                         level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2311
2312                 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2313
2314                 if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2315                         if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2316                             ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2317                                 dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2318                         else
2319                                 dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2320                 } else {
2321                         dll_state_on = false;
2322                         if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2323                                 level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2324                 }
2325
2326                 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2327                                              &level->mclk,
2328                                              (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2329                                              dll_state_on);
2330         } else
2331                 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2332
2333         if (ret)
2334                 return ret;
2335
2336         ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2337                                         pl->vddc, &level->vddc);
2338         if (ret)
2339                 return ret;
2340
2341         ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2342         if (ret)
2343                 return ret;
2344
2345         ni_populate_std_voltage_value(rdev, std_vddc,
2346                                       level->vddc.index, &level->std_vddc);
2347
2348         if (eg_pi->vddci_control) {
2349                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2350                                                 pl->vddci, &level->vddci);
2351                 if (ret)
2352                         return ret;
2353         }
2354
2355         ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2356
2357         return ret;
2358 }
2359
2360 static int ni_populate_smc_t(struct radeon_device *rdev,
2361                              struct radeon_ps *radeon_state,
2362                              NISLANDS_SMC_SWSTATE *smc_state)
2363 {
2364         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2365         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2366         struct ni_ps *state = ni_get_ps(radeon_state);
2367         u32 a_t;
2368         u32 t_l, t_h;
2369         u32 high_bsp;
2370         int i, ret;
2371
2372         if (state->performance_level_count >= 9)
2373                 return -EINVAL;
2374
2375         if (state->performance_level_count < 2) {
2376                 a_t = CG_R(0xffff) | CG_L(0);
2377                 smc_state->levels[0].aT = cpu_to_be32(a_t);
2378                 return 0;
2379         }
2380
2381         smc_state->levels[0].aT = cpu_to_be32(0);
2382
2383         for (i = 0; i <= state->performance_level_count - 2; i++) {
2384                 if (eg_pi->uvd_enabled)
2385                         ret = r600_calculate_at(
2386                                 1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2387                                 100 * R600_AH_DFLT,
2388                                 state->performance_levels[i + 1].sclk,
2389                                 state->performance_levels[i].sclk,
2390                                 &t_l,
2391                                 &t_h);
2392                 else
2393                         ret = r600_calculate_at(
2394                                 1000 * (i + 1),
2395                                 100 * R600_AH_DFLT,
2396                                 state->performance_levels[i + 1].sclk,
2397                                 state->performance_levels[i].sclk,
2398                                 &t_l,
2399                                 &t_h);
2400
2401                 if (ret) {
2402                         t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2403                         t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2404                 }
2405
2406                 a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2407                 a_t |= CG_R(t_l * pi->bsp / 20000);
2408                 smc_state->levels[i].aT = cpu_to_be32(a_t);
2409
2410                 high_bsp = (i == state->performance_level_count - 2) ?
2411                         pi->pbsp : pi->bsp;
2412
2413                 a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2414                 smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2415         }
2416
2417         return 0;
2418 }
2419
2420 static int ni_populate_power_containment_values(struct radeon_device *rdev,
2421                                                 struct radeon_ps *radeon_state,
2422                                                 NISLANDS_SMC_SWSTATE *smc_state)
2423 {
2424         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2425         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2426         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2427         struct ni_ps *state = ni_get_ps(radeon_state);
2428         u32 prev_sclk;
2429         u32 max_sclk;
2430         u32 min_sclk;
2431         int i, ret;
2432         u32 tdp_limit;
2433         u32 near_tdp_limit;
2434         u32 power_boost_limit;
2435         u8 max_ps_percent;
2436
2437         if (ni_pi->enable_power_containment == false)
2438                 return 0;
2439
2440         if (state->performance_level_count == 0)
2441                 return -EINVAL;
2442
2443         if (smc_state->levelCount != state->performance_level_count)
2444                 return -EINVAL;
2445
2446         ret = ni_calculate_adjusted_tdp_limits(rdev,
2447                                                false, /* ??? */
2448                                                rdev->pm.dpm.tdp_adjustment,
2449                                                &tdp_limit,
2450                                                &near_tdp_limit);
2451         if (ret)
2452                 return ret;
2453
2454         power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2455
2456         ret = rv770_write_smc_sram_dword(rdev,
2457                                          pi->state_table_start +
2458                                          offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2459                                          offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2460                                          ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2461                                          pi->sram_end);
2462         if (ret)
2463                 power_boost_limit = 0;
2464
2465         smc_state->levels[0].dpm2.MaxPS = 0;
2466         smc_state->levels[0].dpm2.NearTDPDec = 0;
2467         smc_state->levels[0].dpm2.AboveSafeInc = 0;
2468         smc_state->levels[0].dpm2.BelowSafeInc = 0;
2469         smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2470
2471         for (i = 1; i < state->performance_level_count; i++) {
2472                 prev_sclk = state->performance_levels[i-1].sclk;
2473                 max_sclk  = state->performance_levels[i].sclk;
2474                 max_ps_percent = (i != (state->performance_level_count - 1)) ?
2475                         NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2476
2477                 if (max_sclk < prev_sclk)
2478                         return -EINVAL;
2479
2480                 if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2481                         min_sclk = max_sclk;
2482                 else if (1 == i)
2483                         min_sclk = prev_sclk;
2484                 else
2485                         min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2486
2487                 if (min_sclk < state->performance_levels[0].sclk)
2488                         min_sclk = state->performance_levels[0].sclk;
2489
2490                 if (min_sclk == 0)
2491                         return -EINVAL;
2492
2493                 smc_state->levels[i].dpm2.MaxPS =
2494                         (u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2495                 smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2496                 smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2497                 smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2498                 smc_state->levels[i].stateFlags |=
2499                         ((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2500                         PPSMC_STATEFLAG_POWERBOOST : 0;
2501         }
2502
2503         return 0;
2504 }
2505
2506 static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2507                                          struct radeon_ps *radeon_state,
2508                                          NISLANDS_SMC_SWSTATE *smc_state)
2509 {
2510         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2511         struct ni_ps *state = ni_get_ps(radeon_state);
2512         u32 sq_power_throttle;
2513         u32 sq_power_throttle2;
2514         bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2515         int i;
2516
2517         if (state->performance_level_count == 0)
2518                 return -EINVAL;
2519
2520         if (smc_state->levelCount != state->performance_level_count)
2521                 return -EINVAL;
2522
2523         if (rdev->pm.dpm.sq_ramping_threshold == 0)
2524                 return -EINVAL;
2525
2526         if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2527                 enable_sq_ramping = false;
2528
2529         if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2530                 enable_sq_ramping = false;
2531
2532         if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2533                 enable_sq_ramping = false;
2534
2535         if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2536                 enable_sq_ramping = false;
2537
2538         if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2539                 enable_sq_ramping = false;
2540
2541         for (i = 0; i < state->performance_level_count; i++) {
2542                 sq_power_throttle  = 0;
2543                 sq_power_throttle2 = 0;
2544
2545                 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2546                     enable_sq_ramping) {
2547                         sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2548                         sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2549                         sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2550                         sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2551                         sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2552                 } else {
2553                         sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2554                         sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2555                 }
2556
2557                 smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2558                 smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2559         }
2560
2561         return 0;
2562 }
2563
2564 static int ni_enable_power_containment(struct radeon_device *rdev,
2565                                        struct radeon_ps *radeon_new_state,
2566                                        bool enable)
2567 {
2568         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2569         PPSMC_Result smc_result;
2570         int ret = 0;
2571
2572         if (ni_pi->enable_power_containment) {
2573                 if (enable) {
2574                         if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2575                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2576                                 if (smc_result != PPSMC_Result_OK) {
2577                                         ret = -EINVAL;
2578                                         ni_pi->pc_enabled = false;
2579                                 } else {
2580                                         ni_pi->pc_enabled = true;
2581                                 }
2582                         }
2583                 } else {
2584                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2585                         if (smc_result != PPSMC_Result_OK)
2586                                 ret = -EINVAL;
2587                         ni_pi->pc_enabled = false;
2588                 }
2589         }
2590
2591         return ret;
2592 }
2593
2594 static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2595                                          struct radeon_ps *radeon_state,
2596                                          NISLANDS_SMC_SWSTATE *smc_state)
2597 {
2598         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2599         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2600         struct ni_ps *state = ni_get_ps(radeon_state);
2601         int i, ret;
2602         u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2603
2604         if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2605                 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2606
2607         smc_state->levelCount = 0;
2608
2609         if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2610                 return -EINVAL;
2611
2612         for (i = 0; i < state->performance_level_count; i++) {
2613                 ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2614                                                     &smc_state->levels[i]);
2615                 smc_state->levels[i].arbRefreshState =
2616                         (u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2617
2618                 if (ret)
2619                         return ret;
2620
2621                 if (ni_pi->enable_power_containment)
2622                         smc_state->levels[i].displayWatermark =
2623                                 (state->performance_levels[i].sclk < threshold) ?
2624                                 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2625                 else
2626                         smc_state->levels[i].displayWatermark = (i < 2) ?
2627                                 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2628
2629                 if (eg_pi->dynamic_ac_timing)
2630                         smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2631                 else
2632                         smc_state->levels[i].ACIndex = 0;
2633
2634                 smc_state->levelCount++;
2635         }
2636
2637         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2638                                       cpu_to_be32(threshold / 512));
2639
2640         ni_populate_smc_sp(rdev, radeon_state, smc_state);
2641
2642         ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2643         if (ret)
2644                 ni_pi->enable_power_containment = false;
2645
2646         ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2647         if (ret)
2648                 ni_pi->enable_sq_ramping = false;
2649
2650         return ni_populate_smc_t(rdev, radeon_state, smc_state);
2651 }
2652
2653 static int ni_upload_sw_state(struct radeon_device *rdev,
2654                               struct radeon_ps *radeon_new_state)
2655 {
2656         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2657         u16 address = pi->state_table_start +
2658                 offsetof(NISLANDS_SMC_STATETABLE, driverState);
2659         u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2660                 ((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2661         int ret;
2662         NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2663
2664         if (smc_state == NULL)
2665                 return -ENOMEM;
2666
2667         ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2668         if (ret)
2669                 goto done;
2670
2671         ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2672
2673 done:
2674         kfree(smc_state);
2675
2676         return ret;
2677 }
2678
2679 static int ni_set_mc_special_registers(struct radeon_device *rdev,
2680                                        struct ni_mc_reg_table *table)
2681 {
2682         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2683         u8 i, j, k;
2684         u32 temp_reg;
2685
2686         for (i = 0, j = table->last; i < table->last; i++) {
2687                 switch (table->mc_reg_address[i].s1) {
2688                 case MC_SEQ_MISC1 >> 2:
2689                         if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2690                                 return -EINVAL;
2691                         temp_reg = RREG32(MC_PMG_CMD_EMRS);
2692                         table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2693                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2694                         for (k = 0; k < table->num_entries; k++)
2695                                 table->mc_reg_table_entry[k].mc_data[j] =
2696                                         ((temp_reg & 0xffff0000)) |
2697                                         ((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2698                         j++;
2699                         if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2700                                 return -EINVAL;
2701
2702                         temp_reg = RREG32(MC_PMG_CMD_MRS);
2703                         table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2704                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2705                         for(k = 0; k < table->num_entries; k++) {
2706                                 table->mc_reg_table_entry[k].mc_data[j] =
2707                                         (temp_reg & 0xffff0000) |
2708                                         (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2709                                 if (!pi->mem_gddr5)
2710                                         table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2711                         }
2712                         j++;
2713                         if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2714                                 return -EINVAL;
2715                         break;
2716                 case MC_SEQ_RESERVE_M >> 2:
2717                         temp_reg = RREG32(MC_PMG_CMD_MRS1);
2718                         table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2719                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2720                         for (k = 0; k < table->num_entries; k++)
2721                                 table->mc_reg_table_entry[k].mc_data[j] =
2722                                         (temp_reg & 0xffff0000) |
2723                                         (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2724                         j++;
2725                         if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2726                                 return -EINVAL;
2727                         break;
2728                 default:
2729                         break;
2730                 }
2731         }
2732
2733         table->last = j;
2734
2735         return 0;
2736 }
2737
2738 static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2739 {
2740         bool result = true;
2741
2742         switch (in_reg) {
2743         case  MC_SEQ_RAS_TIMING >> 2:
2744                 *out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2745                 break;
2746         case MC_SEQ_CAS_TIMING >> 2:
2747                 *out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2748                 break;
2749         case MC_SEQ_MISC_TIMING >> 2:
2750                 *out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2751                 break;
2752         case MC_SEQ_MISC_TIMING2 >> 2:
2753                 *out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2754                 break;
2755         case MC_SEQ_RD_CTL_D0 >> 2:
2756                 *out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2757                 break;
2758         case MC_SEQ_RD_CTL_D1 >> 2:
2759                 *out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2760                 break;
2761         case MC_SEQ_WR_CTL_D0 >> 2:
2762                 *out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2763                 break;
2764         case MC_SEQ_WR_CTL_D1 >> 2:
2765                 *out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2766                 break;
2767         case MC_PMG_CMD_EMRS >> 2:
2768                 *out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2769                 break;
2770         case MC_PMG_CMD_MRS >> 2:
2771                 *out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2772                 break;
2773         case MC_PMG_CMD_MRS1 >> 2:
2774                 *out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2775                 break;
2776         case MC_SEQ_PMG_TIMING >> 2:
2777                 *out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2778                 break;
2779         case MC_PMG_CMD_MRS2 >> 2:
2780                 *out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2781                 break;
2782         default:
2783                 result = false;
2784                 break;
2785         }
2786
2787         return result;
2788 }
2789
2790 static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2791 {
2792         u8 i, j;
2793
2794         for (i = 0; i < table->last; i++) {
2795                 for (j = 1; j < table->num_entries; j++) {
2796                         if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2797                                 table->valid_flag |= 1 << i;
2798                                 break;
2799                         }
2800                 }
2801         }
2802 }
2803
2804 static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2805 {
2806         u32 i;
2807         u16 address;
2808
2809         for (i = 0; i < table->last; i++)
2810                 table->mc_reg_address[i].s0 =
2811                         ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2812                         address : table->mc_reg_address[i].s1;
2813 }
2814
2815 static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2816                                       struct ni_mc_reg_table *ni_table)
2817 {
2818         u8 i, j;
2819
2820         if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2821                 return -EINVAL;
2822         if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2823                 return -EINVAL;
2824
2825         for (i = 0; i < table->last; i++)
2826                 ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2827         ni_table->last = table->last;
2828
2829         for (i = 0; i < table->num_entries; i++) {
2830                 ni_table->mc_reg_table_entry[i].mclk_max =
2831                         table->mc_reg_table_entry[i].mclk_max;
2832                 for (j = 0; j < table->last; j++)
2833                         ni_table->mc_reg_table_entry[i].mc_data[j] =
2834                                 table->mc_reg_table_entry[i].mc_data[j];
2835         }
2836         ni_table->num_entries = table->num_entries;
2837
2838         return 0;
2839 }
2840
2841 static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2842 {
2843         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2844         int ret;
2845         struct atom_mc_reg_table *table;
2846         struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2847         u8 module_index = rv770_get_memory_module_index(rdev);
2848
2849         table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2850         if (!table)
2851                 return -ENOMEM;
2852
2853         WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2854         WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2855         WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2856         WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2857         WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2858         WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2859         WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2860         WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2861         WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2862         WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2863         WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2864         WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2865         WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2866
2867         ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2868
2869         if (ret)
2870                 goto init_mc_done;
2871
2872         ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2873
2874         if (ret)
2875                 goto init_mc_done;
2876
2877         ni_set_s0_mc_reg_index(ni_table);
2878
2879         ret = ni_set_mc_special_registers(rdev, ni_table);
2880
2881         if (ret)
2882                 goto init_mc_done;
2883
2884         ni_set_valid_flag(ni_table);
2885
2886 init_mc_done:
2887         kfree(table);
2888
2889         return ret;
2890 }
2891
2892 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2893                                          SMC_NIslands_MCRegisters *mc_reg_table)
2894 {
2895         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2896         u32 i, j;
2897
2898         for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2899                 if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2900                         if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2901                                 break;
2902                         mc_reg_table->address[i].s0 =
2903                                 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2904                         mc_reg_table->address[i].s1 =
2905                                 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2906                         i++;
2907                 }
2908         }
2909         mc_reg_table->last = (u8)i;
2910 }
2911
2912
2913 static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2914                                     SMC_NIslands_MCRegisterSet *data,
2915                                     u32 num_entries, u32 valid_flag)
2916 {
2917         u32 i, j;
2918
2919         for (i = 0, j = 0; j < num_entries; j++) {
2920                 if (valid_flag & (1 << j)) {
2921                         data->value[i] = cpu_to_be32(entry->mc_data[j]);
2922                         i++;
2923                 }
2924         }
2925 }
2926
2927 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2928                                                  struct rv7xx_pl *pl,
2929                                                  SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2930 {
2931         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2932         u32 i = 0;
2933
2934         for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2935                 if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2936                         break;
2937         }
2938
2939         if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2940                 --i;
2941
2942         ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2943                                 mc_reg_table_data,
2944                                 ni_pi->mc_reg_table.last,
2945                                 ni_pi->mc_reg_table.valid_flag);
2946 }
2947
2948 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2949                                            struct radeon_ps *radeon_state,
2950                                            SMC_NIslands_MCRegisters *mc_reg_table)
2951 {
2952         struct ni_ps *state = ni_get_ps(radeon_state);
2953         int i;
2954
2955         for (i = 0; i < state->performance_level_count; i++) {
2956                 ni_convert_mc_reg_table_entry_to_smc(rdev,
2957                                                      &state->performance_levels[i],
2958                                                      &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2959         }
2960 }
2961
2962 static int ni_populate_mc_reg_table(struct radeon_device *rdev,
2963                                     struct radeon_ps *radeon_boot_state)
2964 {
2965         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2966         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2967         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2968         struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
2969         SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
2970
2971         memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
2972
2973         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
2974
2975         ni_populate_mc_reg_addresses(rdev, mc_reg_table);
2976
2977         ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
2978                                              &mc_reg_table->data[0]);
2979
2980         ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
2981                                 &mc_reg_table->data[1],
2982                                 ni_pi->mc_reg_table.last,
2983                                 ni_pi->mc_reg_table.valid_flag);
2984
2985         ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
2986
2987         return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
2988                                        (u8 *)mc_reg_table,
2989                                        sizeof(SMC_NIslands_MCRegisters),
2990                                        pi->sram_end);
2991 }
2992
2993 static int ni_upload_mc_reg_table(struct radeon_device *rdev,
2994                                   struct radeon_ps *radeon_new_state)
2995 {
2996         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2997         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2998         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2999         struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3000         SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3001         u16 address;
3002
3003         memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3004
3005         ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3006
3007         address = eg_pi->mc_reg_table_start +
3008                 (u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3009
3010         return rv770_copy_bytes_to_smc(rdev, address,
3011                                        (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3012                                        sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3013                                        pi->sram_end);
3014 }
3015
3016 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3017                                                    PP_NIslands_CACTABLES *cac_tables)
3018 {
3019         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3020         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3021         u32 leakage = 0;
3022         unsigned int i, j, table_size;
3023         s32 t;
3024         u32 smc_leakage, max_leakage = 0;
3025         u32 scaling_factor;
3026
3027         table_size = eg_pi->vddc_voltage_table.count;
3028
3029         if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3030                 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3031
3032         scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3033
3034         for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3035                 for (j = 0; j < table_size; j++) {
3036                         t = (1000 * ((i + 1) * 8));
3037
3038                         if (t < ni_pi->cac_data.leakage_minimum_temperature)
3039                                 t = ni_pi->cac_data.leakage_minimum_temperature;
3040
3041                         ni_calculate_leakage_for_v_and_t(rdev,
3042                                                          &ni_pi->cac_data.leakage_coefficients,
3043                                                          eg_pi->vddc_voltage_table.entries[j].value,
3044                                                          t,
3045                                                          ni_pi->cac_data.i_leakage,
3046                                                          &leakage);
3047
3048                         smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3049                         if (smc_leakage > max_leakage)
3050                                 max_leakage = smc_leakage;
3051
3052                         cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3053                 }
3054         }
3055
3056         for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3057                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3058                         cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3059         }
3060         return 0;
3061 }
3062
3063 static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3064                                             PP_NIslands_CACTABLES *cac_tables)
3065 {
3066         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3067         struct radeon_cac_leakage_table *leakage_table =
3068                 &rdev->pm.dpm.dyn_state.cac_leakage_table;
3069         u32 i, j, table_size;
3070         u32 smc_leakage, max_leakage = 0;
3071         u32 scaling_factor;
3072
3073         if (!leakage_table)
3074                 return -EINVAL;
3075
3076         table_size = leakage_table->count;
3077
3078         if (eg_pi->vddc_voltage_table.count != table_size)
3079                 table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3080                         eg_pi->vddc_voltage_table.count : leakage_table->count;
3081
3082         if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3083                 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3084
3085         if (table_size == 0)
3086                 return -EINVAL;
3087
3088         scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3089
3090         for (j = 0; j < table_size; j++) {
3091                 smc_leakage = leakage_table->entries[j].leakage;
3092
3093                 if (smc_leakage > max_leakage)
3094                         max_leakage = smc_leakage;
3095
3096                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3097                         cac_tables->cac_lkge_lut[i][j] =
3098                                 cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3099         }
3100
3101         for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3102                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3103                         cac_tables->cac_lkge_lut[i][j] =
3104                                 cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3105         }
3106         return 0;
3107 }
3108
3109 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3110 {
3111         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3112         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3113         PP_NIslands_CACTABLES *cac_tables = NULL;
3114         int i, ret;
3115         u32 reg;
3116
3117         if (ni_pi->enable_cac == false)
3118                 return 0;
3119
3120         cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3121         if (!cac_tables)
3122                 return -ENOMEM;
3123
3124         reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3125         reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3126                 TID_UNIT(ni_pi->cac_weights->tid_unit));
3127         WREG32(CG_CAC_CTRL, reg);
3128
3129         for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3130                 ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3131
3132         for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3133                 cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3134
3135         ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3136         ni_pi->cac_data.pwr_const = 0;
3137         ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3138         ni_pi->cac_data.bif_cac_value = 0;
3139         ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3140         ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3141         ni_pi->cac_data.allow_ovrflw = 0;
3142         ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3143         ni_pi->cac_data.num_win_tdp = 0;
3144         ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3145
3146         if (ni_pi->driver_calculate_cac_leakage)
3147                 ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3148         else
3149                 ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3150
3151         if (ret)
3152                 goto done_free;
3153
3154         cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3155         cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3156         cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3157         cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3158         cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3159         cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3160         cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3161         cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3162         cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3163
3164         ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3165                                       sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3166
3167 done_free:
3168         if (ret) {
3169                 ni_pi->enable_cac = false;
3170                 ni_pi->enable_power_containment = false;
3171         }
3172
3173         kfree(cac_tables);
3174
3175         return 0;
3176 }
3177
3178 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3179 {
3180         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3181         u32 reg;
3182
3183         if (!ni_pi->enable_cac ||
3184             !ni_pi->cac_configuration_required)
3185                 return 0;
3186
3187         if (ni_pi->cac_weights == NULL)
3188                 return -EINVAL;
3189
3190         reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3191                                                       WEIGHT_TCP_SIG1_MASK |
3192                                                       WEIGHT_TA_SIG_MASK);
3193         reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3194                 WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3195                 WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3196         WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3197
3198         reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3199                                                       WEIGHT_TCC_EN1_MASK |
3200                                                       WEIGHT_TCC_EN2_MASK);
3201         reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3202                 WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3203                 WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3204         WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3205
3206         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3207                                                       WEIGHT_CB_EN1_MASK |
3208                                                       WEIGHT_CB_EN2_MASK |
3209                                                       WEIGHT_CB_EN3_MASK);
3210         reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3211                 WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3212                 WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3213                 WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3214         WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3215
3216         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3217                                                       WEIGHT_DB_SIG1_MASK |
3218                                                       WEIGHT_DB_SIG2_MASK |
3219                                                       WEIGHT_DB_SIG3_MASK);
3220         reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3221                 WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3222                 WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3223                 WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3224         WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3225
3226         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3227                                                       WEIGHT_SXM_SIG1_MASK |
3228                                                       WEIGHT_SXM_SIG2_MASK |
3229                                                       WEIGHT_SXS_SIG0_MASK |
3230                                                       WEIGHT_SXS_SIG1_MASK);
3231         reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3232                 WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3233                 WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3234                 WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3235                 WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3236         WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3237
3238         reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3239                                                       WEIGHT_XBR_1_MASK |
3240                                                       WEIGHT_XBR_2_MASK |
3241                                                       WEIGHT_SPI_SIG0_MASK);
3242         reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3243                 WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3244                 WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3245                 WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3246         WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3247
3248         reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3249                                                       WEIGHT_SPI_SIG2_MASK |
3250                                                       WEIGHT_SPI_SIG3_MASK |
3251                                                       WEIGHT_SPI_SIG4_MASK |
3252                                                       WEIGHT_SPI_SIG5_MASK);
3253         reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3254                 WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3255                 WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3256                 WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3257                 WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3258         WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3259
3260         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3261                                                       WEIGHT_LDS_SIG1_MASK |
3262                                                       WEIGHT_SC_MASK);
3263         reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3264                 WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3265                 WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3266         WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3267
3268         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3269                                                       WEIGHT_CP_MASK |
3270                                                       WEIGHT_PA_SIG0_MASK |
3271                                                       WEIGHT_PA_SIG1_MASK |
3272                                                       WEIGHT_VGT_SIG0_MASK);
3273         reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3274                 WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3275                 WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3276                 WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3277                 WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3278         WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3279
3280         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3281                                                       WEIGHT_VGT_SIG2_MASK |
3282                                                       WEIGHT_DC_SIG0_MASK |
3283                                                       WEIGHT_DC_SIG1_MASK |
3284                                                       WEIGHT_DC_SIG2_MASK);
3285         reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3286                 WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3287                 WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3288                 WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3289                 WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3290         WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3291
3292         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3293                                                       WEIGHT_UVD_SIG0_MASK |
3294                                                       WEIGHT_UVD_SIG1_MASK |
3295                                                       WEIGHT_SPARE0_MASK |
3296                                                       WEIGHT_SPARE1_MASK);
3297         reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3298                 WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3299                 WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3300                 WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3301                 WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3302         WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3303
3304         reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3305                                                       WEIGHT_SQ_VSP0_MASK);
3306         reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3307                 WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3308         WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3309
3310         reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3311         reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3312         WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3313
3314         reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3315                                                         OVR_VAL_SPARE_0_MASK |
3316                                                         OVR_MODE_SPARE_1_MASK |
3317                                                         OVR_VAL_SPARE_1_MASK);
3318         reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3319                 OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3320                 OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3321                 OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3322         WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3323
3324         reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3325                                            VSP0_MASK |
3326                                            GPR_MASK);
3327         reg |= (VSP(ni_pi->cac_weights->vsp) |
3328                 VSP0(ni_pi->cac_weights->vsp0) |
3329                 GPR(ni_pi->cac_weights->gpr));
3330         WREG32(SQ_CAC_THRESHOLD, reg);
3331
3332         reg = (MCDW_WR_ENABLE |
3333                MCDX_WR_ENABLE |
3334                MCDY_WR_ENABLE |
3335                MCDZ_WR_ENABLE |
3336                INDEX(0x09D4));
3337         WREG32(MC_CG_CONFIG, reg);
3338
3339         reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3340                WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3341                ALLOW_OVERFLOW);
3342         WREG32(MC_CG_DATAPORT, reg);
3343
3344         return 0;
3345 }
3346
3347 static int ni_enable_smc_cac(struct radeon_device *rdev,
3348                              struct radeon_ps *radeon_new_state,
3349                              bool enable)
3350 {
3351         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3352         int ret = 0;
3353         PPSMC_Result smc_result;
3354
3355         if (ni_pi->enable_cac) {
3356                 if (enable) {
3357                         if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3358                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3359
3360                                 if (ni_pi->support_cac_long_term_average) {
3361                                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3362                                         if (PPSMC_Result_OK != smc_result)
3363                                                 ni_pi->support_cac_long_term_average = false;
3364                                 }
3365
3366                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3367                                 if (PPSMC_Result_OK != smc_result)
3368                                         ret = -EINVAL;
3369
3370                                 ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3371                         }
3372                 } else if (ni_pi->cac_enabled) {
3373                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3374
3375                         ni_pi->cac_enabled = false;
3376
3377                         if (ni_pi->support_cac_long_term_average) {
3378                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3379                                 if (PPSMC_Result_OK != smc_result)
3380                                         ni_pi->support_cac_long_term_average = false;
3381                         }
3382                 }
3383         }
3384
3385         return ret;
3386 }
3387
3388 static int ni_pcie_performance_request(struct radeon_device *rdev,
3389                                        u8 perf_req, bool advertise)
3390 {
3391         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3392
3393 #if defined(CONFIG_ACPI)
3394         if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3395             (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3396                 if (eg_pi->pcie_performance_request_registered == false)
3397                         radeon_acpi_pcie_notify_device_ready(rdev);
3398                 eg_pi->pcie_performance_request_registered = true;
3399                 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3400         } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3401                    eg_pi->pcie_performance_request_registered) {
3402                 eg_pi->pcie_performance_request_registered = false;
3403                 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3404         }
3405 #endif
3406         return 0;
3407 }
3408
3409 static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3410 {
3411         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3412         u32 tmp;
3413
3414         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3415
3416         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3417             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3418                 pi->pcie_gen2 = true;
3419         else
3420                 pi->pcie_gen2 = false;
3421
3422         if (!pi->pcie_gen2)
3423                 ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3424
3425         return 0;
3426 }
3427
3428 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3429                                             bool enable)
3430 {
3431         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3432         u32 tmp, bif;
3433
3434         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3435
3436         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3437             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3438                 if (enable) {
3439                         if (!pi->boot_in_gen2) {
3440                                 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3441                                 bif |= CG_CLIENT_REQ(0xd);
3442                                 WREG32(CG_BIF_REQ_AND_RSP, bif);
3443                         }
3444                         tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3445                         tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3446                         tmp |= LC_GEN2_EN_STRAP;
3447
3448                         tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3449                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3450                         udelay(10);
3451                         tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3452                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3453                 } else {
3454                         if (!pi->boot_in_gen2) {
3455                                 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3456                                 bif |= CG_CLIENT_REQ(0xd);
3457                                 WREG32(CG_BIF_REQ_AND_RSP, bif);
3458
3459                                 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3460                                 tmp &= ~LC_GEN2_EN_STRAP;
3461                         }
3462                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3463                 }
3464         }
3465 }
3466
3467 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3468                                         bool enable)
3469 {
3470         ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3471
3472         if (enable)
3473                 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3474         else
3475                 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3476 }
3477
3478 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3479                                            struct radeon_ps *new_ps,
3480                                            struct radeon_ps *old_ps)
3481 {
3482         struct ni_ps *new_state = ni_get_ps(new_ps);
3483         struct ni_ps *current_state = ni_get_ps(old_ps);
3484
3485         if ((new_ps->vclk == old_ps->vclk) &&
3486             (new_ps->dclk == old_ps->dclk))
3487                 return;
3488
3489         if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3490             current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3491                 return;
3492
3493         radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3494 }
3495
3496 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3497                                           struct radeon_ps *new_ps,
3498                                           struct radeon_ps *old_ps)
3499 {
3500         struct ni_ps *new_state = ni_get_ps(new_ps);
3501         struct ni_ps *current_state = ni_get_ps(old_ps);
3502
3503         if ((new_ps->vclk == old_ps->vclk) &&
3504             (new_ps->dclk == old_ps->dclk))
3505                 return;
3506
3507         if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3508             current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3509                 return;
3510
3511         radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3512 }
3513
3514 void ni_dpm_setup_asic(struct radeon_device *rdev)
3515 {
3516         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3517
3518         ni_read_clock_registers(rdev);
3519         btc_read_arb_registers(rdev);
3520         rv770_get_memory_type(rdev);
3521         if (eg_pi->pcie_performance_request)
3522                 ni_advertise_gen2_capability(rdev);
3523         rv770_get_pcie_gen2_status(rdev);
3524         rv770_enable_acpi_pm(rdev);
3525 }
3526
3527 void ni_update_current_ps(struct radeon_device *rdev,
3528                           struct radeon_ps *rps)
3529 {
3530         struct ni_ps *new_ps = ni_get_ps(rps);
3531         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3532         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3533
3534         eg_pi->current_rps = *rps;
3535         ni_pi->current_ps = *new_ps;
3536         eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3537 }
3538
3539 void ni_update_requested_ps(struct radeon_device *rdev,
3540                             struct radeon_ps *rps)
3541 {
3542         struct ni_ps *new_ps = ni_get_ps(rps);
3543         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3544         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3545
3546         eg_pi->requested_rps = *rps;
3547         ni_pi->requested_ps = *new_ps;
3548         eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3549 }
3550
3551 int ni_dpm_enable(struct radeon_device *rdev)
3552 {
3553         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3554         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3555         struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3556         int ret;
3557
3558         if (pi->gfx_clock_gating)
3559                 ni_cg_clockgating_default(rdev);
3560         if (btc_dpm_enabled(rdev))
3561                 return -EINVAL;
3562         if (pi->mg_clock_gating)
3563                 ni_mg_clockgating_default(rdev);
3564         if (eg_pi->ls_clock_gating)
3565                 ni_ls_clockgating_default(rdev);
3566         if (pi->voltage_control) {
3567                 rv770_enable_voltage_control(rdev, true);
3568                 ret = cypress_construct_voltage_tables(rdev);
3569                 if (ret) {
3570                         DRM_ERROR("cypress_construct_voltage_tables failed\n");
3571                         return ret;
3572                 }
3573         }
3574         if (eg_pi->dynamic_ac_timing) {
3575                 ret = ni_initialize_mc_reg_table(rdev);
3576                 if (ret)
3577                         eg_pi->dynamic_ac_timing = false;
3578         }
3579         if (pi->dynamic_ss)
3580                 cypress_enable_spread_spectrum(rdev, true);
3581         if (pi->thermal_protection)
3582                 rv770_enable_thermal_protection(rdev, true);
3583         rv770_setup_bsp(rdev);
3584         rv770_program_git(rdev);
3585         rv770_program_tp(rdev);
3586         rv770_program_tpp(rdev);
3587         rv770_program_sstp(rdev);
3588         cypress_enable_display_gap(rdev);
3589         rv770_program_vc(rdev);
3590         if (pi->dynamic_pcie_gen2)
3591                 ni_enable_dynamic_pcie_gen2(rdev, true);
3592         ret = rv770_upload_firmware(rdev);
3593         if (ret) {
3594                 DRM_ERROR("rv770_upload_firmware failed\n");
3595                 return ret;
3596         }
3597         ret = ni_process_firmware_header(rdev);
3598         if (ret) {
3599                 DRM_ERROR("ni_process_firmware_header failed\n");
3600                 return ret;
3601         }
3602         ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3603         if (ret) {
3604                 DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3605                 return ret;
3606         }
3607         ret = ni_init_smc_table(rdev);
3608         if (ret) {
3609                 DRM_ERROR("ni_init_smc_table failed\n");
3610                 return ret;
3611         }
3612         ret = ni_init_smc_spll_table(rdev);
3613         if (ret) {
3614                 DRM_ERROR("ni_init_smc_spll_table failed\n");
3615                 return ret;
3616         }
3617         ret = ni_init_arb_table_index(rdev);
3618         if (ret) {
3619                 DRM_ERROR("ni_init_arb_table_index failed\n");
3620                 return ret;
3621         }
3622         if (eg_pi->dynamic_ac_timing) {
3623                 ret = ni_populate_mc_reg_table(rdev, boot_ps);
3624                 if (ret) {
3625                         DRM_ERROR("ni_populate_mc_reg_table failed\n");
3626                         return ret;
3627                 }
3628         }
3629         ret = ni_initialize_smc_cac_tables(rdev);
3630         if (ret) {
3631                 DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3632                 return ret;
3633         }
3634         ret = ni_initialize_hardware_cac_manager(rdev);
3635         if (ret) {
3636                 DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3637                 return ret;
3638         }
3639         ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3640         if (ret) {
3641                 DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3642                 return ret;
3643         }
3644         ni_program_response_times(rdev);
3645         r7xx_start_smc(rdev);
3646         ret = cypress_notify_smc_display_change(rdev, false);
3647         if (ret) {
3648                 DRM_ERROR("cypress_notify_smc_display_change failed\n");
3649                 return ret;
3650         }
3651         cypress_enable_sclk_control(rdev, true);
3652         if (eg_pi->memory_transition)
3653                 cypress_enable_mclk_control(rdev, true);
3654         cypress_start_dpm(rdev);
3655         if (pi->gfx_clock_gating)
3656                 ni_gfx_clockgating_enable(rdev, true);
3657         if (pi->mg_clock_gating)
3658                 ni_mg_clockgating_enable(rdev, true);
3659         if (eg_pi->ls_clock_gating)
3660                 ni_ls_clockgating_enable(rdev, true);
3661
3662         if (rdev->irq.installed &&
3663             r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3664                 PPSMC_Result result;
3665
3666                 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, 0xff * 1000);
3667                 if (ret)
3668                         return ret;
3669                 rdev->irq.dpm_thermal = true;
3670                 radeon_irq_set(rdev);
3671                 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3672
3673                 if (result != PPSMC_Result_OK)
3674                         DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
3675         }
3676
3677         rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3678
3679         ni_update_current_ps(rdev, boot_ps);
3680
3681         return 0;
3682 }
3683
3684 void ni_dpm_disable(struct radeon_device *rdev)
3685 {
3686         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3687         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3688         struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3689
3690         if (!btc_dpm_enabled(rdev))
3691                 return;
3692         rv770_clear_vc(rdev);
3693         if (pi->thermal_protection)
3694                 rv770_enable_thermal_protection(rdev, false);
3695         ni_enable_power_containment(rdev, boot_ps, false);
3696         ni_enable_smc_cac(rdev, boot_ps, false);
3697         cypress_enable_spread_spectrum(rdev, false);
3698         rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3699         if (pi->dynamic_pcie_gen2)
3700                 ni_enable_dynamic_pcie_gen2(rdev, false);
3701
3702         if (rdev->irq.installed &&
3703             r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3704                 rdev->irq.dpm_thermal = false;
3705                 radeon_irq_set(rdev);
3706         }
3707
3708         if (pi->gfx_clock_gating)
3709                 ni_gfx_clockgating_enable(rdev, false);
3710         if (pi->mg_clock_gating)
3711                 ni_mg_clockgating_enable(rdev, false);
3712         if (eg_pi->ls_clock_gating)
3713                 ni_ls_clockgating_enable(rdev, false);
3714         ni_stop_dpm(rdev);
3715         btc_reset_to_default(rdev);
3716         ni_stop_smc(rdev);
3717         ni_force_switch_to_arb_f0(rdev);
3718
3719         ni_update_current_ps(rdev, boot_ps);
3720 }
3721
3722 int ni_power_control_set_level(struct radeon_device *rdev)
3723 {
3724         struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3725         int ret;
3726
3727         ret = ni_restrict_performance_levels_before_switch(rdev);
3728         if (ret)
3729                 return ret;
3730         ret = rv770_halt_smc(rdev);
3731         if (ret)
3732                 return ret;
3733         ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3734         if (ret)
3735                 return ret;
3736         ret = rv770_resume_smc(rdev);
3737         if (ret)
3738                 return ret;
3739         rv770_set_sw_state(rdev);
3740
3741         return 0;
3742 }
3743
3744 int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3745 {
3746         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3747         struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3748         struct radeon_ps *new_ps = &requested_ps;
3749
3750         ni_update_requested_ps(rdev, new_ps);
3751
3752         ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3753
3754         return 0;
3755 }
3756
3757 int ni_dpm_set_power_state(struct radeon_device *rdev)
3758 {
3759         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3760         struct radeon_ps *new_ps = &eg_pi->requested_rps;
3761         struct radeon_ps *old_ps = &eg_pi->current_rps;
3762         int ret;
3763
3764         ret = ni_restrict_performance_levels_before_switch(rdev);
3765         if (ret) {
3766                 DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3767                 return ret;
3768         }
3769         ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3770         ret = ni_enable_power_containment(rdev, new_ps, false);
3771         if (ret) {
3772                 DRM_ERROR("ni_enable_power_containment failed\n");
3773                 return ret;
3774         }
3775         ret = ni_enable_smc_cac(rdev, new_ps, false);
3776         if (ret) {
3777                 DRM_ERROR("ni_enable_smc_cac failed\n");
3778                 return ret;
3779         }
3780         ret = rv770_halt_smc(rdev);
3781         if (ret) {
3782                 DRM_ERROR("rv770_halt_smc failed\n");
3783                 return ret;
3784         }
3785         if (eg_pi->smu_uvd_hs)
3786                 btc_notify_uvd_to_smc(rdev, new_ps);
3787         ret = ni_upload_sw_state(rdev, new_ps);
3788         if (ret) {
3789                 DRM_ERROR("ni_upload_sw_state failed\n");
3790                 return ret;
3791         }
3792         if (eg_pi->dynamic_ac_timing) {
3793                 ret = ni_upload_mc_reg_table(rdev, new_ps);
3794                 if (ret) {
3795                         DRM_ERROR("ni_upload_mc_reg_table failed\n");
3796                         return ret;
3797                 }
3798         }
3799         ret = ni_program_memory_timing_parameters(rdev, new_ps);
3800         if (ret) {
3801                 DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3802                 return ret;
3803         }
3804         ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3805         if (ret) {
3806                 DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3807                 return ret;
3808         }
3809         ret = rv770_resume_smc(rdev);
3810         if (ret) {
3811                 DRM_ERROR("rv770_resume_smc failed\n");
3812                 return ret;
3813         }
3814         ret = rv770_set_sw_state(rdev);
3815         if (ret) {
3816                 DRM_ERROR("rv770_set_sw_state failed\n");
3817                 return ret;
3818         }
3819         ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3820         ret = ni_enable_smc_cac(rdev, new_ps, true);
3821         if (ret) {
3822                 DRM_ERROR("ni_enable_smc_cac failed\n");
3823                 return ret;
3824         }
3825         ret = ni_enable_power_containment(rdev, new_ps, true);
3826         if (ret) {
3827                 DRM_ERROR("ni_enable_power_containment failed\n");
3828                 return ret;
3829         }
3830
3831 #if 0
3832         /* XXX */
3833         ret = ni_unrestrict_performance_levels_after_switch(rdev);
3834         if (ret) {
3835                 DRM_ERROR("ni_unrestrict_performance_levels_after_switch failed\n");
3836                 return ret;
3837         }
3838 #endif
3839
3840         return 0;
3841 }
3842
3843 void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3844 {
3845         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3846         struct radeon_ps *new_ps = &eg_pi->requested_rps;
3847
3848         ni_update_current_ps(rdev, new_ps);
3849 }
3850
3851 void ni_dpm_reset_asic(struct radeon_device *rdev)
3852 {
3853         ni_restrict_performance_levels_before_switch(rdev);
3854         rv770_set_boot_state(rdev);
3855 }
3856
3857 union power_info {
3858         struct _ATOM_POWERPLAY_INFO info;
3859         struct _ATOM_POWERPLAY_INFO_V2 info_2;
3860         struct _ATOM_POWERPLAY_INFO_V3 info_3;
3861         struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3862         struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3863         struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3864 };
3865
3866 union pplib_clock_info {
3867         struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3868         struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3869         struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3870         struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3871 };
3872
3873 union pplib_power_state {
3874         struct _ATOM_PPLIB_STATE v1;
3875         struct _ATOM_PPLIB_STATE_V2 v2;
3876 };
3877
3878 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3879                                           struct radeon_ps *rps,
3880                                           struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3881                                           u8 table_rev)
3882 {
3883         rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3884         rps->class = le16_to_cpu(non_clock_info->usClassification);
3885         rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3886
3887         if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3888                 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3889                 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3890         } else if (r600_is_uvd_state(rps->class, rps->class2)) {
3891                 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3892                 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3893         } else {
3894                 rps->vclk = 0;
3895                 rps->dclk = 0;
3896         }
3897
3898         if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3899                 rdev->pm.dpm.boot_ps = rps;
3900         if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3901                 rdev->pm.dpm.uvd_ps = rps;
3902 }
3903
3904 static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3905                                       struct radeon_ps *rps, int index,
3906                                       union pplib_clock_info *clock_info)
3907 {
3908         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3909         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3910         struct ni_ps *ps = ni_get_ps(rps);
3911         u16 vddc;
3912         struct rv7xx_pl *pl = &ps->performance_levels[index];
3913
3914         ps->performance_level_count = index + 1;
3915
3916         pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3917         pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3918         pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3919         pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3920
3921         pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3922         pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3923         pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3924
3925         /* patch up vddc if necessary */
3926         if (pl->vddc == 0xff01) {
3927                 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
3928                         pl->vddc = vddc;
3929         }
3930
3931         if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3932                 pi->acpi_vddc = pl->vddc;
3933                 eg_pi->acpi_vddci = pl->vddci;
3934                 if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3935                         pi->acpi_pcie_gen2 = true;
3936                 else
3937                         pi->acpi_pcie_gen2 = false;
3938         }
3939
3940         if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3941                 eg_pi->ulv.supported = true;
3942                 eg_pi->ulv.pl = pl;
3943         }
3944
3945         if (pi->min_vddc_in_table > pl->vddc)
3946                 pi->min_vddc_in_table = pl->vddc;
3947
3948         if (pi->max_vddc_in_table < pl->vddc)
3949                 pi->max_vddc_in_table = pl->vddc;
3950
3951         /* patch up boot state */
3952         if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3953                 u16 vddc, vddci, mvdd;
3954                 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3955                 pl->mclk = rdev->clock.default_mclk;
3956                 pl->sclk = rdev->clock.default_sclk;
3957                 pl->vddc = vddc;
3958                 pl->vddci = vddci;
3959         }
3960
3961         if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
3962             ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
3963                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
3964                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
3965                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
3966                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
3967         }
3968 }
3969
3970 static int ni_parse_power_table(struct radeon_device *rdev)
3971 {
3972         struct radeon_mode_info *mode_info = &rdev->mode_info;
3973         struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
3974         union pplib_power_state *power_state;
3975         int i, j;
3976         union pplib_clock_info *clock_info;
3977         union power_info *power_info;
3978         int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
3979         u16 data_offset;
3980         u8 frev, crev;
3981         struct ni_ps *ps;
3982
3983         if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
3984                                    &frev, &crev, &data_offset))
3985                 return -EINVAL;
3986         power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
3987
3988         rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
3989                                   power_info->pplib.ucNumStates, GFP_KERNEL);
3990         if (!rdev->pm.dpm.ps)
3991                 return -ENOMEM;
3992         rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
3993         rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
3994         rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
3995
3996         for (i = 0; i < power_info->pplib.ucNumStates; i++) {
3997                 power_state = (union pplib_power_state *)
3998                         (mode_info->atom_context->bios + data_offset +
3999                          le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4000                          i * power_info->pplib.ucStateEntrySize);
4001                 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4002                         (mode_info->atom_context->bios + data_offset +
4003                          le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4004                          (power_state->v1.ucNonClockStateIndex *
4005                           power_info->pplib.ucNonClockSize));
4006                 if (power_info->pplib.ucStateEntrySize - 1) {
4007                         ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4008                         if (ps == NULL) {
4009                                 kfree(rdev->pm.dpm.ps);
4010                                 return -ENOMEM;
4011                         }
4012                         rdev->pm.dpm.ps[i].ps_priv = ps;
4013                         ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4014                                                          non_clock_info,
4015                                                          power_info->pplib.ucNonClockSize);
4016                         for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4017                                 clock_info = (union pplib_clock_info *)
4018                                         (mode_info->atom_context->bios + data_offset +
4019                                          le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4020                                          (power_state->v1.ucClockStateIndices[j] *
4021                                           power_info->pplib.ucClockInfoSize));
4022                                 ni_parse_pplib_clock_info(rdev,
4023                                                           &rdev->pm.dpm.ps[i], j,
4024                                                           clock_info);
4025                         }
4026                 }
4027         }
4028         rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4029         return 0;
4030 }
4031
4032 int ni_dpm_init(struct radeon_device *rdev)
4033 {
4034         struct rv7xx_power_info *pi;
4035         struct evergreen_power_info *eg_pi;
4036         struct ni_power_info *ni_pi;
4037         int index = GetIndexIntoMasterTable(DATA, ASIC_InternalSS_Info);
4038         u16 data_offset, size;
4039         u8 frev, crev;
4040         struct atom_clock_dividers dividers;
4041         int ret;
4042
4043         ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4044         if (ni_pi == NULL)
4045                 return -ENOMEM;
4046         rdev->pm.dpm.priv = ni_pi;
4047         eg_pi = &ni_pi->eg;
4048         pi = &eg_pi->rv7xx;
4049
4050         rv770_get_max_vddc(rdev);
4051
4052         eg_pi->ulv.supported = false;
4053         pi->acpi_vddc = 0;
4054         eg_pi->acpi_vddci = 0;
4055         pi->min_vddc_in_table = 0;
4056         pi->max_vddc_in_table = 0;
4057
4058         ret = ni_parse_power_table(rdev);
4059         if (ret)
4060                 return ret;
4061         ret = r600_parse_extended_power_table(rdev);
4062         if (ret)
4063                 return ret;
4064
4065         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4066                 kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4067         if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4068                 r600_free_extended_power_table(rdev);
4069                 return -ENOMEM;
4070         }
4071         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4072         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4073         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4074         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4075         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4076         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4077         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4078         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4079         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4080
4081         ni_patch_dependency_tables_based_on_leakage(rdev);
4082
4083         if (rdev->pm.dpm.voltage_response_time == 0)
4084                 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4085         if (rdev->pm.dpm.backbias_response_time == 0)
4086                 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4087
4088         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4089                                              0, false, &dividers);
4090         if (ret)
4091                 pi->ref_div = dividers.ref_div + 1;
4092         else
4093                 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4094
4095         pi->rlp = RV770_RLP_DFLT;
4096         pi->rmp = RV770_RMP_DFLT;
4097         pi->lhp = RV770_LHP_DFLT;
4098         pi->lmp = RV770_LMP_DFLT;
4099
4100         eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4101         eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4102         eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4103         eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4104
4105         eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4106         eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4107         eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4108         eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4109
4110         eg_pi->smu_uvd_hs = true;
4111
4112         if (rdev->pdev->device == 0x6707) {
4113                 pi->mclk_strobe_mode_threshold = 55000;
4114                 pi->mclk_edc_enable_threshold = 55000;
4115                 eg_pi->mclk_edc_wr_enable_threshold = 55000;
4116         } else {
4117                 pi->mclk_strobe_mode_threshold = 40000;
4118                 pi->mclk_edc_enable_threshold = 40000;
4119                 eg_pi->mclk_edc_wr_enable_threshold = 40000;
4120         }
4121         ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4122
4123         pi->voltage_control =
4124                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4125
4126         pi->mvdd_control =
4127                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4128
4129         eg_pi->vddci_control =
4130                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4131
4132         if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size,
4133                                    &frev, &crev, &data_offset)) {
4134                 pi->sclk_ss = true;
4135                 pi->mclk_ss = true;
4136                 pi->dynamic_ss = true;
4137         } else {
4138                 pi->sclk_ss = false;
4139                 pi->mclk_ss = false;
4140                 pi->dynamic_ss = true;
4141         }
4142
4143         pi->asi = RV770_ASI_DFLT;
4144         pi->pasi = CYPRESS_HASI_DFLT;
4145         pi->vrc = CYPRESS_VRC_DFLT;
4146
4147         pi->power_gating = false;
4148
4149         pi->gfx_clock_gating = true;
4150
4151         pi->mg_clock_gating = true;
4152         pi->mgcgtssm = true;
4153         eg_pi->ls_clock_gating = false;
4154         eg_pi->sclk_deep_sleep = false;
4155
4156         pi->dynamic_pcie_gen2 = true;
4157
4158         if (pi->gfx_clock_gating &&
4159             (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE))
4160                 pi->thermal_protection = true;
4161         else
4162                 pi->thermal_protection = false;
4163
4164         pi->display_gap = true;
4165
4166         pi->dcodt = true;
4167
4168         pi->ulps = true;
4169
4170         eg_pi->dynamic_ac_timing = true;
4171         eg_pi->abm = true;
4172         eg_pi->mcls = true;
4173         eg_pi->light_sleep = true;
4174         eg_pi->memory_transition = true;
4175 #if defined(CONFIG_ACPI)
4176         eg_pi->pcie_performance_request =
4177                 radeon_acpi_is_pcie_performance_request_supported(rdev);
4178 #else
4179         eg_pi->pcie_performance_request = false;
4180 #endif
4181
4182         eg_pi->dll_default_on = false;
4183
4184         eg_pi->sclk_deep_sleep = false;
4185
4186         pi->mclk_stutter_mode_threshold = 0;
4187
4188         pi->sram_end = SMC_RAM_END;
4189
4190         rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4191         rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4192         rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4193         rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4194         rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4195         rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4196         rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4197         rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4198
4199         ni_pi->cac_data.leakage_coefficients.at = 516;
4200         ni_pi->cac_data.leakage_coefficients.bt = 18;
4201         ni_pi->cac_data.leakage_coefficients.av = 51;
4202         ni_pi->cac_data.leakage_coefficients.bv = 2957;
4203
4204         switch (rdev->pdev->device) {
4205         case 0x6700:
4206         case 0x6701:
4207         case 0x6702:
4208         case 0x6703:
4209         case 0x6718:
4210                 ni_pi->cac_weights = &cac_weights_cayman_xt;
4211                 break;
4212         case 0x6705:
4213         case 0x6719:
4214         case 0x671D:
4215         case 0x671C:
4216         default:
4217                 ni_pi->cac_weights = &cac_weights_cayman_pro;
4218                 break;
4219         case 0x6704:
4220         case 0x6706:
4221         case 0x6707:
4222         case 0x6708:
4223         case 0x6709:
4224                 ni_pi->cac_weights = &cac_weights_cayman_le;
4225                 break;
4226         }
4227
4228         if (ni_pi->cac_weights->enable_power_containment_by_default) {
4229                 ni_pi->enable_power_containment = true;
4230                 ni_pi->enable_cac = true;
4231                 ni_pi->enable_sq_ramping = true;
4232         } else {
4233                 ni_pi->enable_power_containment = false;
4234                 ni_pi->enable_cac = false;
4235                 ni_pi->enable_sq_ramping = false;
4236         }
4237
4238         ni_pi->driver_calculate_cac_leakage = false;
4239         ni_pi->cac_configuration_required = true;
4240
4241         if (ni_pi->cac_configuration_required) {
4242                 ni_pi->support_cac_long_term_average = true;
4243                 ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4244                 ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4245         } else {
4246                 ni_pi->support_cac_long_term_average = false;
4247                 ni_pi->lta_window_size = 0;
4248                 ni_pi->lts_truncate = 0;
4249         }
4250
4251         ni_pi->use_power_boost_limit = true;
4252
4253         return 0;
4254 }
4255
4256 void ni_dpm_fini(struct radeon_device *rdev)
4257 {
4258         int i;
4259
4260         for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4261                 kfree(rdev->pm.dpm.ps[i].ps_priv);
4262         }
4263         kfree(rdev->pm.dpm.ps);
4264         kfree(rdev->pm.dpm.priv);
4265         kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4266         r600_free_extended_power_table(rdev);
4267 }
4268
4269 void ni_dpm_print_power_state(struct radeon_device *rdev,
4270                               struct radeon_ps *rps)
4271 {
4272         struct ni_ps *ps = ni_get_ps(rps);
4273         struct rv7xx_pl *pl;
4274         int i;
4275
4276         r600_dpm_print_class_info(rps->class, rps->class2);
4277         r600_dpm_print_cap_info(rps->caps);
4278         printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4279         for (i = 0; i < ps->performance_level_count; i++) {
4280                 pl = &ps->performance_levels[i];
4281                 if (rdev->family >= CHIP_TAHITI)
4282                         printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4283                                i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4284                 else
4285                         printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4286                                i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4287         }
4288         r600_dpm_print_ps_status(rdev, rps);
4289 }
4290
4291 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4292 {
4293         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4294         struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4295
4296         if (low)
4297                 return requested_state->performance_levels[0].sclk;
4298         else
4299                 return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4300 }
4301
4302 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4303 {
4304         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4305         struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4306
4307         if (low)
4308                 return requested_state->performance_levels[0].mclk;
4309         else
4310                 return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4311 }
4312