]> rtime.felk.cvut.cz Git - linux-imx.git/blob - drivers/gpu/drm/radeon/ni_dpm.c
Merge branch 'for-3.11-fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/tj...
[linux-imx.git] / drivers / gpu / drm / radeon / ni_dpm.c
1 /*
2  * Copyright 2012 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23
24 #include "drmP.h"
25 #include "radeon.h"
26 #include "nid.h"
27 #include "r600_dpm.h"
28 #include "ni_dpm.h"
29 #include "atom.h"
30 #include <linux/math64.h>
31 #include <linux/seq_file.h>
32
33 #define MC_CG_ARB_FREQ_F0           0x0a
34 #define MC_CG_ARB_FREQ_F1           0x0b
35 #define MC_CG_ARB_FREQ_F2           0x0c
36 #define MC_CG_ARB_FREQ_F3           0x0d
37
38 #define SMC_RAM_END 0xC000
39
40 static const struct ni_cac_weights cac_weights_cayman_xt =
41 {
42         0x15,
43         0x2,
44         0x19,
45         0x2,
46         0x8,
47         0x14,
48         0x2,
49         0x16,
50         0xE,
51         0x17,
52         0x13,
53         0x2B,
54         0x10,
55         0x7,
56         0x5,
57         0x5,
58         0x5,
59         0x2,
60         0x3,
61         0x9,
62         0x10,
63         0x10,
64         0x2B,
65         0xA,
66         0x9,
67         0x4,
68         0xD,
69         0xD,
70         0x3E,
71         0x18,
72         0x14,
73         0,
74         0x3,
75         0x3,
76         0x5,
77         0,
78         0x2,
79         0,
80         0,
81         0,
82         0,
83         0,
84         0,
85         0,
86         0,
87         0,
88         0x1CC,
89         0,
90         0x164,
91         1,
92         1,
93         1,
94         1,
95         12,
96         12,
97         12,
98         0x12,
99         0x1F,
100         132,
101         5,
102         7,
103         0,
104         { 0, 0, 0, 0, 0, 0, 0, 0 },
105         { 0, 0, 0, 0 },
106         true
107 };
108
109 static const struct ni_cac_weights cac_weights_cayman_pro =
110 {
111         0x16,
112         0x4,
113         0x10,
114         0x2,
115         0xA,
116         0x16,
117         0x2,
118         0x18,
119         0x10,
120         0x1A,
121         0x16,
122         0x2D,
123         0x12,
124         0xA,
125         0x6,
126         0x6,
127         0x6,
128         0x2,
129         0x4,
130         0xB,
131         0x11,
132         0x11,
133         0x2D,
134         0xC,
135         0xC,
136         0x7,
137         0x10,
138         0x10,
139         0x3F,
140         0x1A,
141         0x16,
142         0,
143         0x7,
144         0x4,
145         0x6,
146         1,
147         0x2,
148         0x1,
149         0,
150         0,
151         0,
152         0,
153         0,
154         0,
155         0x30,
156         0,
157         0x1CF,
158         0,
159         0x166,
160         1,
161         1,
162         1,
163         1,
164         12,
165         12,
166         12,
167         0x15,
168         0x1F,
169         132,
170         6,
171         6,
172         0,
173         { 0, 0, 0, 0, 0, 0, 0, 0 },
174         { 0, 0, 0, 0 },
175         true
176 };
177
178 static const struct ni_cac_weights cac_weights_cayman_le =
179 {
180         0x7,
181         0xE,
182         0x1,
183         0xA,
184         0x1,
185         0x3F,
186         0x2,
187         0x18,
188         0x10,
189         0x1A,
190         0x1,
191         0x3F,
192         0x1,
193         0xE,
194         0x6,
195         0x6,
196         0x6,
197         0x2,
198         0x4,
199         0x9,
200         0x1A,
201         0x1A,
202         0x2C,
203         0xA,
204         0x11,
205         0x8,
206         0x19,
207         0x19,
208         0x1,
209         0x1,
210         0x1A,
211         0,
212         0x8,
213         0x5,
214         0x8,
215         0x1,
216         0x3,
217         0x1,
218         0,
219         0,
220         0,
221         0,
222         0,
223         0,
224         0x38,
225         0x38,
226         0x239,
227         0x3,
228         0x18A,
229         1,
230         1,
231         1,
232         1,
233         12,
234         12,
235         12,
236         0x15,
237         0x22,
238         132,
239         6,
240         6,
241         0,
242         { 0, 0, 0, 0, 0, 0, 0, 0 },
243         { 0, 0, 0, 0 },
244         true
245 };
246
247 #define NISLANDS_MGCG_SEQUENCE  300
248
249 static const u32 cayman_cgcg_cgls_default[] =
250 {
251         0x000008f8, 0x00000010, 0xffffffff,
252         0x000008fc, 0x00000000, 0xffffffff,
253         0x000008f8, 0x00000011, 0xffffffff,
254         0x000008fc, 0x00000000, 0xffffffff,
255         0x000008f8, 0x00000012, 0xffffffff,
256         0x000008fc, 0x00000000, 0xffffffff,
257         0x000008f8, 0x00000013, 0xffffffff,
258         0x000008fc, 0x00000000, 0xffffffff,
259         0x000008f8, 0x00000014, 0xffffffff,
260         0x000008fc, 0x00000000, 0xffffffff,
261         0x000008f8, 0x00000015, 0xffffffff,
262         0x000008fc, 0x00000000, 0xffffffff,
263         0x000008f8, 0x00000016, 0xffffffff,
264         0x000008fc, 0x00000000, 0xffffffff,
265         0x000008f8, 0x00000017, 0xffffffff,
266         0x000008fc, 0x00000000, 0xffffffff,
267         0x000008f8, 0x00000018, 0xffffffff,
268         0x000008fc, 0x00000000, 0xffffffff,
269         0x000008f8, 0x00000019, 0xffffffff,
270         0x000008fc, 0x00000000, 0xffffffff,
271         0x000008f8, 0x0000001a, 0xffffffff,
272         0x000008fc, 0x00000000, 0xffffffff,
273         0x000008f8, 0x0000001b, 0xffffffff,
274         0x000008fc, 0x00000000, 0xffffffff,
275         0x000008f8, 0x00000020, 0xffffffff,
276         0x000008fc, 0x00000000, 0xffffffff,
277         0x000008f8, 0x00000021, 0xffffffff,
278         0x000008fc, 0x00000000, 0xffffffff,
279         0x000008f8, 0x00000022, 0xffffffff,
280         0x000008fc, 0x00000000, 0xffffffff,
281         0x000008f8, 0x00000023, 0xffffffff,
282         0x000008fc, 0x00000000, 0xffffffff,
283         0x000008f8, 0x00000024, 0xffffffff,
284         0x000008fc, 0x00000000, 0xffffffff,
285         0x000008f8, 0x00000025, 0xffffffff,
286         0x000008fc, 0x00000000, 0xffffffff,
287         0x000008f8, 0x00000026, 0xffffffff,
288         0x000008fc, 0x00000000, 0xffffffff,
289         0x000008f8, 0x00000027, 0xffffffff,
290         0x000008fc, 0x00000000, 0xffffffff,
291         0x000008f8, 0x00000028, 0xffffffff,
292         0x000008fc, 0x00000000, 0xffffffff,
293         0x000008f8, 0x00000029, 0xffffffff,
294         0x000008fc, 0x00000000, 0xffffffff,
295         0x000008f8, 0x0000002a, 0xffffffff,
296         0x000008fc, 0x00000000, 0xffffffff,
297         0x000008f8, 0x0000002b, 0xffffffff,
298         0x000008fc, 0x00000000, 0xffffffff
299 };
300 #define CAYMAN_CGCG_CGLS_DEFAULT_LENGTH sizeof(cayman_cgcg_cgls_default) / (3 * sizeof(u32))
301
302 static const u32 cayman_cgcg_cgls_disable[] =
303 {
304         0x000008f8, 0x00000010, 0xffffffff,
305         0x000008fc, 0xffffffff, 0xffffffff,
306         0x000008f8, 0x00000011, 0xffffffff,
307         0x000008fc, 0xffffffff, 0xffffffff,
308         0x000008f8, 0x00000012, 0xffffffff,
309         0x000008fc, 0xffffffff, 0xffffffff,
310         0x000008f8, 0x00000013, 0xffffffff,
311         0x000008fc, 0xffffffff, 0xffffffff,
312         0x000008f8, 0x00000014, 0xffffffff,
313         0x000008fc, 0xffffffff, 0xffffffff,
314         0x000008f8, 0x00000015, 0xffffffff,
315         0x000008fc, 0xffffffff, 0xffffffff,
316         0x000008f8, 0x00000016, 0xffffffff,
317         0x000008fc, 0xffffffff, 0xffffffff,
318         0x000008f8, 0x00000017, 0xffffffff,
319         0x000008fc, 0xffffffff, 0xffffffff,
320         0x000008f8, 0x00000018, 0xffffffff,
321         0x000008fc, 0xffffffff, 0xffffffff,
322         0x000008f8, 0x00000019, 0xffffffff,
323         0x000008fc, 0xffffffff, 0xffffffff,
324         0x000008f8, 0x0000001a, 0xffffffff,
325         0x000008fc, 0xffffffff, 0xffffffff,
326         0x000008f8, 0x0000001b, 0xffffffff,
327         0x000008fc, 0xffffffff, 0xffffffff,
328         0x000008f8, 0x00000020, 0xffffffff,
329         0x000008fc, 0x00000000, 0xffffffff,
330         0x000008f8, 0x00000021, 0xffffffff,
331         0x000008fc, 0x00000000, 0xffffffff,
332         0x000008f8, 0x00000022, 0xffffffff,
333         0x000008fc, 0x00000000, 0xffffffff,
334         0x000008f8, 0x00000023, 0xffffffff,
335         0x000008fc, 0x00000000, 0xffffffff,
336         0x000008f8, 0x00000024, 0xffffffff,
337         0x000008fc, 0x00000000, 0xffffffff,
338         0x000008f8, 0x00000025, 0xffffffff,
339         0x000008fc, 0x00000000, 0xffffffff,
340         0x000008f8, 0x00000026, 0xffffffff,
341         0x000008fc, 0x00000000, 0xffffffff,
342         0x000008f8, 0x00000027, 0xffffffff,
343         0x000008fc, 0x00000000, 0xffffffff,
344         0x000008f8, 0x00000028, 0xffffffff,
345         0x000008fc, 0x00000000, 0xffffffff,
346         0x000008f8, 0x00000029, 0xffffffff,
347         0x000008fc, 0x00000000, 0xffffffff,
348         0x000008f8, 0x0000002a, 0xffffffff,
349         0x000008fc, 0x00000000, 0xffffffff,
350         0x000008f8, 0x0000002b, 0xffffffff,
351         0x000008fc, 0x00000000, 0xffffffff,
352         0x00000644, 0x000f7902, 0x001f4180,
353         0x00000644, 0x000f3802, 0x001f4180
354 };
355 #define CAYMAN_CGCG_CGLS_DISABLE_LENGTH sizeof(cayman_cgcg_cgls_disable) / (3 * sizeof(u32))
356
357 static const u32 cayman_cgcg_cgls_enable[] =
358 {
359         0x00000644, 0x000f7882, 0x001f4080,
360         0x000008f8, 0x00000010, 0xffffffff,
361         0x000008fc, 0x00000000, 0xffffffff,
362         0x000008f8, 0x00000011, 0xffffffff,
363         0x000008fc, 0x00000000, 0xffffffff,
364         0x000008f8, 0x00000012, 0xffffffff,
365         0x000008fc, 0x00000000, 0xffffffff,
366         0x000008f8, 0x00000013, 0xffffffff,
367         0x000008fc, 0x00000000, 0xffffffff,
368         0x000008f8, 0x00000014, 0xffffffff,
369         0x000008fc, 0x00000000, 0xffffffff,
370         0x000008f8, 0x00000015, 0xffffffff,
371         0x000008fc, 0x00000000, 0xffffffff,
372         0x000008f8, 0x00000016, 0xffffffff,
373         0x000008fc, 0x00000000, 0xffffffff,
374         0x000008f8, 0x00000017, 0xffffffff,
375         0x000008fc, 0x00000000, 0xffffffff,
376         0x000008f8, 0x00000018, 0xffffffff,
377         0x000008fc, 0x00000000, 0xffffffff,
378         0x000008f8, 0x00000019, 0xffffffff,
379         0x000008fc, 0x00000000, 0xffffffff,
380         0x000008f8, 0x0000001a, 0xffffffff,
381         0x000008fc, 0x00000000, 0xffffffff,
382         0x000008f8, 0x0000001b, 0xffffffff,
383         0x000008fc, 0x00000000, 0xffffffff,
384         0x000008f8, 0x00000020, 0xffffffff,
385         0x000008fc, 0xffffffff, 0xffffffff,
386         0x000008f8, 0x00000021, 0xffffffff,
387         0x000008fc, 0xffffffff, 0xffffffff,
388         0x000008f8, 0x00000022, 0xffffffff,
389         0x000008fc, 0xffffffff, 0xffffffff,
390         0x000008f8, 0x00000023, 0xffffffff,
391         0x000008fc, 0xffffffff, 0xffffffff,
392         0x000008f8, 0x00000024, 0xffffffff,
393         0x000008fc, 0xffffffff, 0xffffffff,
394         0x000008f8, 0x00000025, 0xffffffff,
395         0x000008fc, 0xffffffff, 0xffffffff,
396         0x000008f8, 0x00000026, 0xffffffff,
397         0x000008fc, 0xffffffff, 0xffffffff,
398         0x000008f8, 0x00000027, 0xffffffff,
399         0x000008fc, 0xffffffff, 0xffffffff,
400         0x000008f8, 0x00000028, 0xffffffff,
401         0x000008fc, 0xffffffff, 0xffffffff,
402         0x000008f8, 0x00000029, 0xffffffff,
403         0x000008fc, 0xffffffff, 0xffffffff,
404         0x000008f8, 0x0000002a, 0xffffffff,
405         0x000008fc, 0xffffffff, 0xffffffff,
406         0x000008f8, 0x0000002b, 0xffffffff,
407         0x000008fc, 0xffffffff, 0xffffffff
408 };
409 #define CAYMAN_CGCG_CGLS_ENABLE_LENGTH  sizeof(cayman_cgcg_cgls_enable) / (3 * sizeof(u32))
410
411 static const u32 cayman_mgcg_default[] =
412 {
413         0x0000802c, 0xc0000000, 0xffffffff,
414         0x00003fc4, 0xc0000000, 0xffffffff,
415         0x00005448, 0x00000100, 0xffffffff,
416         0x000055e4, 0x00000100, 0xffffffff,
417         0x0000160c, 0x00000100, 0xffffffff,
418         0x00008984, 0x06000100, 0xffffffff,
419         0x0000c164, 0x00000100, 0xffffffff,
420         0x00008a18, 0x00000100, 0xffffffff,
421         0x0000897c, 0x06000100, 0xffffffff,
422         0x00008b28, 0x00000100, 0xffffffff,
423         0x00009144, 0x00800200, 0xffffffff,
424         0x00009a60, 0x00000100, 0xffffffff,
425         0x00009868, 0x00000100, 0xffffffff,
426         0x00008d58, 0x00000100, 0xffffffff,
427         0x00009510, 0x00000100, 0xffffffff,
428         0x0000949c, 0x00000100, 0xffffffff,
429         0x00009654, 0x00000100, 0xffffffff,
430         0x00009030, 0x00000100, 0xffffffff,
431         0x00009034, 0x00000100, 0xffffffff,
432         0x00009038, 0x00000100, 0xffffffff,
433         0x0000903c, 0x00000100, 0xffffffff,
434         0x00009040, 0x00000100, 0xffffffff,
435         0x0000a200, 0x00000100, 0xffffffff,
436         0x0000a204, 0x00000100, 0xffffffff,
437         0x0000a208, 0x00000100, 0xffffffff,
438         0x0000a20c, 0x00000100, 0xffffffff,
439         0x00009744, 0x00000100, 0xffffffff,
440         0x00003f80, 0x00000100, 0xffffffff,
441         0x0000a210, 0x00000100, 0xffffffff,
442         0x0000a214, 0x00000100, 0xffffffff,
443         0x000004d8, 0x00000100, 0xffffffff,
444         0x00009664, 0x00000100, 0xffffffff,
445         0x00009698, 0x00000100, 0xffffffff,
446         0x000004d4, 0x00000200, 0xffffffff,
447         0x000004d0, 0x00000000, 0xffffffff,
448         0x000030cc, 0x00000104, 0xffffffff,
449         0x0000d0c0, 0x00000100, 0xffffffff,
450         0x0000d8c0, 0x00000100, 0xffffffff,
451         0x0000802c, 0x40000000, 0xffffffff,
452         0x00003fc4, 0x40000000, 0xffffffff,
453         0x0000915c, 0x00010000, 0xffffffff,
454         0x00009160, 0x00030002, 0xffffffff,
455         0x00009164, 0x00050004, 0xffffffff,
456         0x00009168, 0x00070006, 0xffffffff,
457         0x00009178, 0x00070000, 0xffffffff,
458         0x0000917c, 0x00030002, 0xffffffff,
459         0x00009180, 0x00050004, 0xffffffff,
460         0x0000918c, 0x00010006, 0xffffffff,
461         0x00009190, 0x00090008, 0xffffffff,
462         0x00009194, 0x00070000, 0xffffffff,
463         0x00009198, 0x00030002, 0xffffffff,
464         0x0000919c, 0x00050004, 0xffffffff,
465         0x000091a8, 0x00010006, 0xffffffff,
466         0x000091ac, 0x00090008, 0xffffffff,
467         0x000091b0, 0x00070000, 0xffffffff,
468         0x000091b4, 0x00030002, 0xffffffff,
469         0x000091b8, 0x00050004, 0xffffffff,
470         0x000091c4, 0x00010006, 0xffffffff,
471         0x000091c8, 0x00090008, 0xffffffff,
472         0x000091cc, 0x00070000, 0xffffffff,
473         0x000091d0, 0x00030002, 0xffffffff,
474         0x000091d4, 0x00050004, 0xffffffff,
475         0x000091e0, 0x00010006, 0xffffffff,
476         0x000091e4, 0x00090008, 0xffffffff,
477         0x000091e8, 0x00000000, 0xffffffff,
478         0x000091ec, 0x00070000, 0xffffffff,
479         0x000091f0, 0x00030002, 0xffffffff,
480         0x000091f4, 0x00050004, 0xffffffff,
481         0x00009200, 0x00010006, 0xffffffff,
482         0x00009204, 0x00090008, 0xffffffff,
483         0x00009208, 0x00070000, 0xffffffff,
484         0x0000920c, 0x00030002, 0xffffffff,
485         0x00009210, 0x00050004, 0xffffffff,
486         0x0000921c, 0x00010006, 0xffffffff,
487         0x00009220, 0x00090008, 0xffffffff,
488         0x00009224, 0x00070000, 0xffffffff,
489         0x00009228, 0x00030002, 0xffffffff,
490         0x0000922c, 0x00050004, 0xffffffff,
491         0x00009238, 0x00010006, 0xffffffff,
492         0x0000923c, 0x00090008, 0xffffffff,
493         0x00009240, 0x00070000, 0xffffffff,
494         0x00009244, 0x00030002, 0xffffffff,
495         0x00009248, 0x00050004, 0xffffffff,
496         0x00009254, 0x00010006, 0xffffffff,
497         0x00009258, 0x00090008, 0xffffffff,
498         0x0000925c, 0x00070000, 0xffffffff,
499         0x00009260, 0x00030002, 0xffffffff,
500         0x00009264, 0x00050004, 0xffffffff,
501         0x00009270, 0x00010006, 0xffffffff,
502         0x00009274, 0x00090008, 0xffffffff,
503         0x00009278, 0x00070000, 0xffffffff,
504         0x0000927c, 0x00030002, 0xffffffff,
505         0x00009280, 0x00050004, 0xffffffff,
506         0x0000928c, 0x00010006, 0xffffffff,
507         0x00009290, 0x00090008, 0xffffffff,
508         0x000092a8, 0x00070000, 0xffffffff,
509         0x000092ac, 0x00030002, 0xffffffff,
510         0x000092b0, 0x00050004, 0xffffffff,
511         0x000092bc, 0x00010006, 0xffffffff,
512         0x000092c0, 0x00090008, 0xffffffff,
513         0x000092c4, 0x00070000, 0xffffffff,
514         0x000092c8, 0x00030002, 0xffffffff,
515         0x000092cc, 0x00050004, 0xffffffff,
516         0x000092d8, 0x00010006, 0xffffffff,
517         0x000092dc, 0x00090008, 0xffffffff,
518         0x00009294, 0x00000000, 0xffffffff,
519         0x0000802c, 0x40010000, 0xffffffff,
520         0x00003fc4, 0x40010000, 0xffffffff,
521         0x0000915c, 0x00010000, 0xffffffff,
522         0x00009160, 0x00030002, 0xffffffff,
523         0x00009164, 0x00050004, 0xffffffff,
524         0x00009168, 0x00070006, 0xffffffff,
525         0x00009178, 0x00070000, 0xffffffff,
526         0x0000917c, 0x00030002, 0xffffffff,
527         0x00009180, 0x00050004, 0xffffffff,
528         0x0000918c, 0x00010006, 0xffffffff,
529         0x00009190, 0x00090008, 0xffffffff,
530         0x00009194, 0x00070000, 0xffffffff,
531         0x00009198, 0x00030002, 0xffffffff,
532         0x0000919c, 0x00050004, 0xffffffff,
533         0x000091a8, 0x00010006, 0xffffffff,
534         0x000091ac, 0x00090008, 0xffffffff,
535         0x000091b0, 0x00070000, 0xffffffff,
536         0x000091b4, 0x00030002, 0xffffffff,
537         0x000091b8, 0x00050004, 0xffffffff,
538         0x000091c4, 0x00010006, 0xffffffff,
539         0x000091c8, 0x00090008, 0xffffffff,
540         0x000091cc, 0x00070000, 0xffffffff,
541         0x000091d0, 0x00030002, 0xffffffff,
542         0x000091d4, 0x00050004, 0xffffffff,
543         0x000091e0, 0x00010006, 0xffffffff,
544         0x000091e4, 0x00090008, 0xffffffff,
545         0x000091e8, 0x00000000, 0xffffffff,
546         0x000091ec, 0x00070000, 0xffffffff,
547         0x000091f0, 0x00030002, 0xffffffff,
548         0x000091f4, 0x00050004, 0xffffffff,
549         0x00009200, 0x00010006, 0xffffffff,
550         0x00009204, 0x00090008, 0xffffffff,
551         0x00009208, 0x00070000, 0xffffffff,
552         0x0000920c, 0x00030002, 0xffffffff,
553         0x00009210, 0x00050004, 0xffffffff,
554         0x0000921c, 0x00010006, 0xffffffff,
555         0x00009220, 0x00090008, 0xffffffff,
556         0x00009224, 0x00070000, 0xffffffff,
557         0x00009228, 0x00030002, 0xffffffff,
558         0x0000922c, 0x00050004, 0xffffffff,
559         0x00009238, 0x00010006, 0xffffffff,
560         0x0000923c, 0x00090008, 0xffffffff,
561         0x00009240, 0x00070000, 0xffffffff,
562         0x00009244, 0x00030002, 0xffffffff,
563         0x00009248, 0x00050004, 0xffffffff,
564         0x00009254, 0x00010006, 0xffffffff,
565         0x00009258, 0x00090008, 0xffffffff,
566         0x0000925c, 0x00070000, 0xffffffff,
567         0x00009260, 0x00030002, 0xffffffff,
568         0x00009264, 0x00050004, 0xffffffff,
569         0x00009270, 0x00010006, 0xffffffff,
570         0x00009274, 0x00090008, 0xffffffff,
571         0x00009278, 0x00070000, 0xffffffff,
572         0x0000927c, 0x00030002, 0xffffffff,
573         0x00009280, 0x00050004, 0xffffffff,
574         0x0000928c, 0x00010006, 0xffffffff,
575         0x00009290, 0x00090008, 0xffffffff,
576         0x000092a8, 0x00070000, 0xffffffff,
577         0x000092ac, 0x00030002, 0xffffffff,
578         0x000092b0, 0x00050004, 0xffffffff,
579         0x000092bc, 0x00010006, 0xffffffff,
580         0x000092c0, 0x00090008, 0xffffffff,
581         0x000092c4, 0x00070000, 0xffffffff,
582         0x000092c8, 0x00030002, 0xffffffff,
583         0x000092cc, 0x00050004, 0xffffffff,
584         0x000092d8, 0x00010006, 0xffffffff,
585         0x000092dc, 0x00090008, 0xffffffff,
586         0x00009294, 0x00000000, 0xffffffff,
587         0x0000802c, 0xc0000000, 0xffffffff,
588         0x00003fc4, 0xc0000000, 0xffffffff,
589         0x000008f8, 0x00000010, 0xffffffff,
590         0x000008fc, 0x00000000, 0xffffffff,
591         0x000008f8, 0x00000011, 0xffffffff,
592         0x000008fc, 0x00000000, 0xffffffff,
593         0x000008f8, 0x00000012, 0xffffffff,
594         0x000008fc, 0x00000000, 0xffffffff,
595         0x000008f8, 0x00000013, 0xffffffff,
596         0x000008fc, 0x00000000, 0xffffffff,
597         0x000008f8, 0x00000014, 0xffffffff,
598         0x000008fc, 0x00000000, 0xffffffff,
599         0x000008f8, 0x00000015, 0xffffffff,
600         0x000008fc, 0x00000000, 0xffffffff,
601         0x000008f8, 0x00000016, 0xffffffff,
602         0x000008fc, 0x00000000, 0xffffffff,
603         0x000008f8, 0x00000017, 0xffffffff,
604         0x000008fc, 0x00000000, 0xffffffff,
605         0x000008f8, 0x00000018, 0xffffffff,
606         0x000008fc, 0x00000000, 0xffffffff,
607         0x000008f8, 0x00000019, 0xffffffff,
608         0x000008fc, 0x00000000, 0xffffffff,
609         0x000008f8, 0x0000001a, 0xffffffff,
610         0x000008fc, 0x00000000, 0xffffffff,
611         0x000008f8, 0x0000001b, 0xffffffff,
612         0x000008fc, 0x00000000, 0xffffffff
613 };
614 #define CAYMAN_MGCG_DEFAULT_LENGTH sizeof(cayman_mgcg_default) / (3 * sizeof(u32))
615
616 static const u32 cayman_mgcg_disable[] =
617 {
618         0x0000802c, 0xc0000000, 0xffffffff,
619         0x000008f8, 0x00000000, 0xffffffff,
620         0x000008fc, 0xffffffff, 0xffffffff,
621         0x000008f8, 0x00000001, 0xffffffff,
622         0x000008fc, 0xffffffff, 0xffffffff,
623         0x000008f8, 0x00000002, 0xffffffff,
624         0x000008fc, 0xffffffff, 0xffffffff,
625         0x000008f8, 0x00000003, 0xffffffff,
626         0x000008fc, 0xffffffff, 0xffffffff,
627         0x00009150, 0x00600000, 0xffffffff
628 };
629 #define CAYMAN_MGCG_DISABLE_LENGTH   sizeof(cayman_mgcg_disable) / (3 * sizeof(u32))
630
631 static const u32 cayman_mgcg_enable[] =
632 {
633         0x0000802c, 0xc0000000, 0xffffffff,
634         0x000008f8, 0x00000000, 0xffffffff,
635         0x000008fc, 0x00000000, 0xffffffff,
636         0x000008f8, 0x00000001, 0xffffffff,
637         0x000008fc, 0x00000000, 0xffffffff,
638         0x000008f8, 0x00000002, 0xffffffff,
639         0x000008fc, 0x00600000, 0xffffffff,
640         0x000008f8, 0x00000003, 0xffffffff,
641         0x000008fc, 0x00000000, 0xffffffff,
642         0x00009150, 0x96944200, 0xffffffff
643 };
644
645 #define CAYMAN_MGCG_ENABLE_LENGTH   sizeof(cayman_mgcg_enable) / (3 * sizeof(u32))
646
647 #define NISLANDS_SYSLS_SEQUENCE  100
648
649 static const u32 cayman_sysls_default[] =
650 {
651         /* Register,   Value,     Mask bits */
652         0x000055e8, 0x00000000, 0xffffffff,
653         0x0000d0bc, 0x00000000, 0xffffffff,
654         0x0000d8bc, 0x00000000, 0xffffffff,
655         0x000015c0, 0x000c1401, 0xffffffff,
656         0x0000264c, 0x000c0400, 0xffffffff,
657         0x00002648, 0x000c0400, 0xffffffff,
658         0x00002650, 0x000c0400, 0xffffffff,
659         0x000020b8, 0x000c0400, 0xffffffff,
660         0x000020bc, 0x000c0400, 0xffffffff,
661         0x000020c0, 0x000c0c80, 0xffffffff,
662         0x0000f4a0, 0x000000c0, 0xffffffff,
663         0x0000f4a4, 0x00680fff, 0xffffffff,
664         0x00002f50, 0x00000404, 0xffffffff,
665         0x000004c8, 0x00000001, 0xffffffff,
666         0x000064ec, 0x00000000, 0xffffffff,
667         0x00000c7c, 0x00000000, 0xffffffff,
668         0x00008dfc, 0x00000000, 0xffffffff
669 };
670 #define CAYMAN_SYSLS_DEFAULT_LENGTH sizeof(cayman_sysls_default) / (3 * sizeof(u32))
671
672 static const u32 cayman_sysls_disable[] =
673 {
674         /* Register,   Value,     Mask bits */
675         0x0000d0c0, 0x00000000, 0xffffffff,
676         0x0000d8c0, 0x00000000, 0xffffffff,
677         0x000055e8, 0x00000000, 0xffffffff,
678         0x0000d0bc, 0x00000000, 0xffffffff,
679         0x0000d8bc, 0x00000000, 0xffffffff,
680         0x000015c0, 0x00041401, 0xffffffff,
681         0x0000264c, 0x00040400, 0xffffffff,
682         0x00002648, 0x00040400, 0xffffffff,
683         0x00002650, 0x00040400, 0xffffffff,
684         0x000020b8, 0x00040400, 0xffffffff,
685         0x000020bc, 0x00040400, 0xffffffff,
686         0x000020c0, 0x00040c80, 0xffffffff,
687         0x0000f4a0, 0x000000c0, 0xffffffff,
688         0x0000f4a4, 0x00680000, 0xffffffff,
689         0x00002f50, 0x00000404, 0xffffffff,
690         0x000004c8, 0x00000001, 0xffffffff,
691         0x000064ec, 0x00007ffd, 0xffffffff,
692         0x00000c7c, 0x0000ff00, 0xffffffff,
693         0x00008dfc, 0x0000007f, 0xffffffff
694 };
695 #define CAYMAN_SYSLS_DISABLE_LENGTH sizeof(cayman_sysls_disable) / (3 * sizeof(u32))
696
697 static const u32 cayman_sysls_enable[] =
698 {
699         /* Register,   Value,     Mask bits */
700         0x000055e8, 0x00000001, 0xffffffff,
701         0x0000d0bc, 0x00000100, 0xffffffff,
702         0x0000d8bc, 0x00000100, 0xffffffff,
703         0x000015c0, 0x000c1401, 0xffffffff,
704         0x0000264c, 0x000c0400, 0xffffffff,
705         0x00002648, 0x000c0400, 0xffffffff,
706         0x00002650, 0x000c0400, 0xffffffff,
707         0x000020b8, 0x000c0400, 0xffffffff,
708         0x000020bc, 0x000c0400, 0xffffffff,
709         0x000020c0, 0x000c0c80, 0xffffffff,
710         0x0000f4a0, 0x000000c0, 0xffffffff,
711         0x0000f4a4, 0x00680fff, 0xffffffff,
712         0x00002f50, 0x00000903, 0xffffffff,
713         0x000004c8, 0x00000000, 0xffffffff,
714         0x000064ec, 0x00000000, 0xffffffff,
715         0x00000c7c, 0x00000000, 0xffffffff,
716         0x00008dfc, 0x00000000, 0xffffffff
717 };
718 #define CAYMAN_SYSLS_ENABLE_LENGTH sizeof(cayman_sysls_enable) / (3 * sizeof(u32))
719
720 struct rv7xx_power_info *rv770_get_pi(struct radeon_device *rdev);
721 struct evergreen_power_info *evergreen_get_pi(struct radeon_device *rdev);
722
723 struct ni_power_info *ni_get_pi(struct radeon_device *rdev)
724 {
725         struct ni_power_info *pi = rdev->pm.dpm.priv;
726
727         return pi;
728 }
729
730 struct ni_ps *ni_get_ps(struct radeon_ps *rps)
731 {
732         struct ni_ps *ps = rps->ps_priv;
733
734         return ps;
735 }
736
737 static void ni_calculate_leakage_for_v_and_t_formula(const struct ni_leakage_coeffients *coeff,
738                                                      u16 v, s32 t,
739                                                      u32 ileakage,
740                                                      u32 *leakage)
741 {
742         s64 kt, kv, leakage_w, i_leakage, vddc, temperature;
743
744         i_leakage = div64_s64(drm_int2fixp(ileakage), 1000);
745         vddc = div64_s64(drm_int2fixp(v), 1000);
746         temperature = div64_s64(drm_int2fixp(t), 1000);
747
748         kt = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->at), 1000),
749                           drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bt), 1000), temperature)));
750         kv = drm_fixp_mul(div64_s64(drm_int2fixp(coeff->av), 1000),
751                           drm_fixp_exp(drm_fixp_mul(div64_s64(drm_int2fixp(coeff->bv), 1000), vddc)));
752
753         leakage_w = drm_fixp_mul(drm_fixp_mul(drm_fixp_mul(i_leakage, kt), kv), vddc);
754
755         *leakage = drm_fixp2int(leakage_w * 1000);
756 }
757
758 static void ni_calculate_leakage_for_v_and_t(struct radeon_device *rdev,
759                                              const struct ni_leakage_coeffients *coeff,
760                                              u16 v,
761                                              s32 t,
762                                              u32 i_leakage,
763                                              u32 *leakage)
764 {
765         ni_calculate_leakage_for_v_and_t_formula(coeff, v, t, i_leakage, leakage);
766 }
767
768 bool ni_dpm_vblank_too_short(struct radeon_device *rdev)
769 {
770         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
771         u32 vblank_time = r600_dpm_get_vblank_time(rdev);
772         u32 switch_limit = pi->mem_gddr5 ? 450 : 300;
773
774         if (vblank_time < switch_limit)
775                 return true;
776         else
777                 return false;
778
779 }
780
781 static void ni_apply_state_adjust_rules(struct radeon_device *rdev,
782                                         struct radeon_ps *rps)
783 {
784         struct ni_ps *ps = ni_get_ps(rps);
785         struct radeon_clock_and_voltage_limits *max_limits;
786         bool disable_mclk_switching;
787         u32 mclk, sclk;
788         u16 vddc, vddci;
789         int i;
790
791         if ((rdev->pm.dpm.new_active_crtc_count > 1) ||
792             ni_dpm_vblank_too_short(rdev))
793                 disable_mclk_switching = true;
794         else
795                 disable_mclk_switching = false;
796
797         if (rdev->pm.dpm.ac_power)
798                 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac;
799         else
800                 max_limits = &rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc;
801
802         if (rdev->pm.dpm.ac_power == false) {
803                 for (i = 0; i < ps->performance_level_count; i++) {
804                         if (ps->performance_levels[i].mclk > max_limits->mclk)
805                                 ps->performance_levels[i].mclk = max_limits->mclk;
806                         if (ps->performance_levels[i].sclk > max_limits->sclk)
807                                 ps->performance_levels[i].sclk = max_limits->sclk;
808                         if (ps->performance_levels[i].vddc > max_limits->vddc)
809                                 ps->performance_levels[i].vddc = max_limits->vddc;
810                         if (ps->performance_levels[i].vddci > max_limits->vddci)
811                                 ps->performance_levels[i].vddci = max_limits->vddci;
812                 }
813         }
814
815         /* XXX validate the min clocks required for display */
816
817         if (disable_mclk_switching) {
818                 mclk  = ps->performance_levels[ps->performance_level_count - 1].mclk;
819                 sclk = ps->performance_levels[0].sclk;
820                 vddc = ps->performance_levels[0].vddc;
821                 vddci = ps->performance_levels[ps->performance_level_count - 1].vddci;
822         } else {
823                 sclk = ps->performance_levels[0].sclk;
824                 mclk = ps->performance_levels[0].mclk;
825                 vddc = ps->performance_levels[0].vddc;
826                 vddci = ps->performance_levels[0].vddci;
827         }
828
829         /* adjusted low state */
830         ps->performance_levels[0].sclk = sclk;
831         ps->performance_levels[0].mclk = mclk;
832         ps->performance_levels[0].vddc = vddc;
833         ps->performance_levels[0].vddci = vddci;
834
835         btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
836                                   &ps->performance_levels[0].sclk,
837                                   &ps->performance_levels[0].mclk);
838
839         for (i = 1; i < ps->performance_level_count; i++) {
840                 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk)
841                         ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk;
842                 if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc)
843                         ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc;
844         }
845
846         if (disable_mclk_switching) {
847                 mclk = ps->performance_levels[0].mclk;
848                 for (i = 1; i < ps->performance_level_count; i++) {
849                         if (mclk < ps->performance_levels[i].mclk)
850                                 mclk = ps->performance_levels[i].mclk;
851                 }
852                 for (i = 0; i < ps->performance_level_count; i++) {
853                         ps->performance_levels[i].mclk = mclk;
854                         ps->performance_levels[i].vddci = vddci;
855                 }
856         } else {
857                 for (i = 1; i < ps->performance_level_count; i++) {
858                         if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk)
859                                 ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk;
860                         if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci)
861                                 ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci;
862                 }
863         }
864
865         for (i = 1; i < ps->performance_level_count; i++)
866                 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk,
867                                           &ps->performance_levels[i].sclk,
868                                           &ps->performance_levels[i].mclk);
869
870         for (i = 0; i < ps->performance_level_count; i++)
871                 btc_adjust_clock_combinations(rdev, max_limits,
872                                               &ps->performance_levels[i]);
873
874         for (i = 0; i < ps->performance_level_count; i++) {
875                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk,
876                                                    ps->performance_levels[i].sclk,
877                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
878                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddci_dependency_on_mclk,
879                                                    ps->performance_levels[i].mclk,
880                                                    max_limits->vddci, &ps->performance_levels[i].vddci);
881                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk,
882                                                    ps->performance_levels[i].mclk,
883                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
884                 btc_apply_voltage_dependency_rules(&rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk,
885                                                    rdev->clock.current_dispclk,
886                                                    max_limits->vddc,  &ps->performance_levels[i].vddc);
887         }
888
889         for (i = 0; i < ps->performance_level_count; i++) {
890                 btc_apply_voltage_delta_rules(rdev,
891                                               max_limits->vddc, max_limits->vddci,
892                                               &ps->performance_levels[i].vddc,
893                                               &ps->performance_levels[i].vddci);
894         }
895
896         ps->dc_compatible = true;
897         for (i = 0; i < ps->performance_level_count; i++) {
898                 if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)
899                         ps->dc_compatible = false;
900
901                 if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2)
902                         ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2;
903         }
904 }
905
906 static void ni_cg_clockgating_default(struct radeon_device *rdev)
907 {
908         u32 count;
909         const u32 *ps = NULL;
910
911         ps = (const u32 *)&cayman_cgcg_cgls_default;
912         count = CAYMAN_CGCG_CGLS_DEFAULT_LENGTH;
913
914         btc_program_mgcg_hw_sequence(rdev, ps, count);
915 }
916
917 static void ni_gfx_clockgating_enable(struct radeon_device *rdev,
918                                       bool enable)
919 {
920         u32 count;
921         const u32 *ps = NULL;
922
923         if (enable) {
924                 ps = (const u32 *)&cayman_cgcg_cgls_enable;
925                 count = CAYMAN_CGCG_CGLS_ENABLE_LENGTH;
926         } else {
927                 ps = (const u32 *)&cayman_cgcg_cgls_disable;
928                 count = CAYMAN_CGCG_CGLS_DISABLE_LENGTH;
929         }
930
931         btc_program_mgcg_hw_sequence(rdev, ps, count);
932 }
933
934 static void ni_mg_clockgating_default(struct radeon_device *rdev)
935 {
936         u32 count;
937         const u32 *ps = NULL;
938
939         ps = (const u32 *)&cayman_mgcg_default;
940         count = CAYMAN_MGCG_DEFAULT_LENGTH;
941
942         btc_program_mgcg_hw_sequence(rdev, ps, count);
943 }
944
945 static void ni_mg_clockgating_enable(struct radeon_device *rdev,
946                                      bool enable)
947 {
948         u32 count;
949         const u32 *ps = NULL;
950
951         if (enable) {
952                 ps = (const u32 *)&cayman_mgcg_enable;
953                 count = CAYMAN_MGCG_ENABLE_LENGTH;
954         } else {
955                 ps = (const u32 *)&cayman_mgcg_disable;
956                 count = CAYMAN_MGCG_DISABLE_LENGTH;
957         }
958
959         btc_program_mgcg_hw_sequence(rdev, ps, count);
960 }
961
962 static void ni_ls_clockgating_default(struct radeon_device *rdev)
963 {
964         u32 count;
965         const u32 *ps = NULL;
966
967         ps = (const u32 *)&cayman_sysls_default;
968         count = CAYMAN_SYSLS_DEFAULT_LENGTH;
969
970         btc_program_mgcg_hw_sequence(rdev, ps, count);
971 }
972
973 static void ni_ls_clockgating_enable(struct radeon_device *rdev,
974                                      bool enable)
975 {
976         u32 count;
977         const u32 *ps = NULL;
978
979         if (enable) {
980                 ps = (const u32 *)&cayman_sysls_enable;
981                 count = CAYMAN_SYSLS_ENABLE_LENGTH;
982         } else {
983                 ps = (const u32 *)&cayman_sysls_disable;
984                 count = CAYMAN_SYSLS_DISABLE_LENGTH;
985         }
986
987         btc_program_mgcg_hw_sequence(rdev, ps, count);
988
989 }
990
991 static int ni_patch_single_dependency_table_based_on_leakage(struct radeon_device *rdev,
992                                                              struct radeon_clock_voltage_dependency_table *table)
993 {
994         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
995         u32 i;
996
997         if (table) {
998                 for (i = 0; i < table->count; i++) {
999                         if (0xff01 == table->entries[i].v) {
1000                                 if (pi->max_vddc == 0)
1001                                         return -EINVAL;
1002                                 table->entries[i].v = pi->max_vddc;
1003                         }
1004                 }
1005         }
1006         return 0;
1007 }
1008
1009 static int ni_patch_dependency_tables_based_on_leakage(struct radeon_device *rdev)
1010 {
1011         int ret = 0;
1012
1013         ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1014                                                                 &rdev->pm.dpm.dyn_state.vddc_dependency_on_sclk);
1015
1016         ret = ni_patch_single_dependency_table_based_on_leakage(rdev,
1017                                                                 &rdev->pm.dpm.dyn_state.vddc_dependency_on_mclk);
1018         return ret;
1019 }
1020
1021 static void ni_stop_dpm(struct radeon_device *rdev)
1022 {
1023         WREG32_P(GENERAL_PWRMGT, 0, ~GLOBAL_PWRMGT_EN);
1024 }
1025
1026 #if 0
1027 static int ni_notify_hw_of_power_source(struct radeon_device *rdev,
1028                                         bool ac_power)
1029 {
1030         if (ac_power)
1031                 return (rv770_send_msg_to_smc(rdev, PPSMC_MSG_RunningOnAC) == PPSMC_Result_OK) ?
1032                         0 : -EINVAL;
1033
1034         return 0;
1035 }
1036 #endif
1037
1038 static PPSMC_Result ni_send_msg_to_smc_with_parameter(struct radeon_device *rdev,
1039                                                       PPSMC_Msg msg, u32 parameter)
1040 {
1041         WREG32(SMC_SCRATCH0, parameter);
1042         return rv770_send_msg_to_smc(rdev, msg);
1043 }
1044
1045 static int ni_restrict_performance_levels_before_switch(struct radeon_device *rdev)
1046 {
1047         if (rv770_send_msg_to_smc(rdev, PPSMC_MSG_NoForcedLevel) != PPSMC_Result_OK)
1048                 return -EINVAL;
1049
1050         return (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 1) == PPSMC_Result_OK) ?
1051                 0 : -EINVAL;
1052 }
1053
1054 int ni_dpm_force_performance_level(struct radeon_device *rdev,
1055                                    enum radeon_dpm_forced_level level)
1056 {
1057         struct radeon_ps *rps = rdev->pm.dpm.current_ps;
1058         struct ni_ps *ps = ni_get_ps(rps);
1059         u32 levels;
1060
1061         if (level == RADEON_DPM_FORCED_LEVEL_HIGH) {
1062                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1063                         return -EINVAL;
1064
1065                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 1) != PPSMC_Result_OK)
1066                         return -EINVAL;
1067         } else if (level == RADEON_DPM_FORCED_LEVEL_LOW) {
1068                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1069                         return -EINVAL;
1070
1071                 levels = ps->performance_level_count - 1;
1072                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, levels) != PPSMC_Result_OK)
1073                         return -EINVAL;
1074         } else if (level == RADEON_DPM_FORCED_LEVEL_AUTO) {
1075                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetForcedLevels, 0) != PPSMC_Result_OK)
1076                         return -EINVAL;
1077
1078                 if (ni_send_msg_to_smc_with_parameter(rdev, PPSMC_MSG_SetEnabledLevels, 0) != PPSMC_Result_OK)
1079                         return -EINVAL;
1080         }
1081
1082         rdev->pm.dpm.forced_level = level;
1083
1084         return 0;
1085 }
1086
1087 static void ni_stop_smc(struct radeon_device *rdev)
1088 {
1089         u32 tmp;
1090         int i;
1091
1092         for (i = 0; i < rdev->usec_timeout; i++) {
1093                 tmp = RREG32(LB_SYNC_RESET_SEL) & LB_SYNC_RESET_SEL_MASK;
1094                 if (tmp != 1)
1095                         break;
1096                 udelay(1);
1097         }
1098
1099         udelay(100);
1100
1101         r7xx_stop_smc(rdev);
1102 }
1103
1104 static int ni_process_firmware_header(struct radeon_device *rdev)
1105 {
1106         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1107         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1108         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1109         u32 tmp;
1110         int ret;
1111
1112         ret = rv770_read_smc_sram_dword(rdev,
1113                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1114                                         NISLANDS_SMC_FIRMWARE_HEADER_stateTable,
1115                                         &tmp, pi->sram_end);
1116
1117         if (ret)
1118                 return ret;
1119
1120         pi->state_table_start = (u16)tmp;
1121
1122         ret = rv770_read_smc_sram_dword(rdev,
1123                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1124                                         NISLANDS_SMC_FIRMWARE_HEADER_softRegisters,
1125                                         &tmp, pi->sram_end);
1126
1127         if (ret)
1128                 return ret;
1129
1130         pi->soft_regs_start = (u16)tmp;
1131
1132         ret = rv770_read_smc_sram_dword(rdev,
1133                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1134                                         NISLANDS_SMC_FIRMWARE_HEADER_mcRegisterTable,
1135                                         &tmp, pi->sram_end);
1136
1137         if (ret)
1138                 return ret;
1139
1140         eg_pi->mc_reg_table_start = (u16)tmp;
1141
1142         ret = rv770_read_smc_sram_dword(rdev,
1143                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1144                                         NISLANDS_SMC_FIRMWARE_HEADER_fanTable,
1145                                         &tmp, pi->sram_end);
1146
1147         if (ret)
1148                 return ret;
1149
1150         ni_pi->fan_table_start = (u16)tmp;
1151
1152         ret = rv770_read_smc_sram_dword(rdev,
1153                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1154                                         NISLANDS_SMC_FIRMWARE_HEADER_mcArbDramAutoRefreshTable,
1155                                         &tmp, pi->sram_end);
1156
1157         if (ret)
1158                 return ret;
1159
1160         ni_pi->arb_table_start = (u16)tmp;
1161
1162         ret = rv770_read_smc_sram_dword(rdev,
1163                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1164                                         NISLANDS_SMC_FIRMWARE_HEADER_cacTable,
1165                                         &tmp, pi->sram_end);
1166
1167         if (ret)
1168                 return ret;
1169
1170         ni_pi->cac_table_start = (u16)tmp;
1171
1172         ret = rv770_read_smc_sram_dword(rdev,
1173                                         NISLANDS_SMC_FIRMWARE_HEADER_LOCATION +
1174                                         NISLANDS_SMC_FIRMWARE_HEADER_spllTable,
1175                                         &tmp, pi->sram_end);
1176
1177         if (ret)
1178                 return ret;
1179
1180         ni_pi->spll_table_start = (u16)tmp;
1181
1182
1183         return ret;
1184 }
1185
1186 static void ni_read_clock_registers(struct radeon_device *rdev)
1187 {
1188         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1189
1190         ni_pi->clock_registers.cg_spll_func_cntl = RREG32(CG_SPLL_FUNC_CNTL);
1191         ni_pi->clock_registers.cg_spll_func_cntl_2 = RREG32(CG_SPLL_FUNC_CNTL_2);
1192         ni_pi->clock_registers.cg_spll_func_cntl_3 = RREG32(CG_SPLL_FUNC_CNTL_3);
1193         ni_pi->clock_registers.cg_spll_func_cntl_4 = RREG32(CG_SPLL_FUNC_CNTL_4);
1194         ni_pi->clock_registers.cg_spll_spread_spectrum = RREG32(CG_SPLL_SPREAD_SPECTRUM);
1195         ni_pi->clock_registers.cg_spll_spread_spectrum_2 = RREG32(CG_SPLL_SPREAD_SPECTRUM_2);
1196         ni_pi->clock_registers.mpll_ad_func_cntl = RREG32(MPLL_AD_FUNC_CNTL);
1197         ni_pi->clock_registers.mpll_ad_func_cntl_2 = RREG32(MPLL_AD_FUNC_CNTL_2);
1198         ni_pi->clock_registers.mpll_dq_func_cntl = RREG32(MPLL_DQ_FUNC_CNTL);
1199         ni_pi->clock_registers.mpll_dq_func_cntl_2 = RREG32(MPLL_DQ_FUNC_CNTL_2);
1200         ni_pi->clock_registers.mclk_pwrmgt_cntl = RREG32(MCLK_PWRMGT_CNTL);
1201         ni_pi->clock_registers.dll_cntl = RREG32(DLL_CNTL);
1202         ni_pi->clock_registers.mpll_ss1 = RREG32(MPLL_SS1);
1203         ni_pi->clock_registers.mpll_ss2 = RREG32(MPLL_SS2);
1204 }
1205
1206 #if 0
1207 static int ni_enter_ulp_state(struct radeon_device *rdev)
1208 {
1209         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1210
1211         if (pi->gfx_clock_gating) {
1212                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~DYN_GFX_CLK_OFF_EN);
1213                 WREG32_P(SCLK_PWRMGT_CNTL, GFX_CLK_FORCE_ON, ~GFX_CLK_FORCE_ON);
1214                 WREG32_P(SCLK_PWRMGT_CNTL, 0, ~GFX_CLK_FORCE_ON);
1215                 RREG32(GB_ADDR_CONFIG);
1216         }
1217
1218         WREG32_P(SMC_MSG, HOST_SMC_MSG(PPSMC_MSG_SwitchToMinimumPower),
1219                  ~HOST_SMC_MSG_MASK);
1220
1221         udelay(25000);
1222
1223         return 0;
1224 }
1225 #endif
1226
1227 static void ni_program_response_times(struct radeon_device *rdev)
1228 {
1229         u32 voltage_response_time, backbias_response_time, acpi_delay_time, vbi_time_out;
1230         u32 vddc_dly, bb_dly, acpi_dly, vbi_dly, mclk_switch_limit;
1231         u32 reference_clock;
1232
1233         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mvdd_chg_time, 1);
1234
1235         voltage_response_time = (u32)rdev->pm.dpm.voltage_response_time;
1236         backbias_response_time = (u32)rdev->pm.dpm.backbias_response_time;
1237
1238         if (voltage_response_time == 0)
1239                 voltage_response_time = 1000;
1240
1241         if (backbias_response_time == 0)
1242                 backbias_response_time = 1000;
1243
1244         acpi_delay_time = 15000;
1245         vbi_time_out = 100000;
1246
1247         reference_clock = radeon_get_xclk(rdev);
1248
1249         vddc_dly = (voltage_response_time  * reference_clock) / 1600;
1250         bb_dly   = (backbias_response_time * reference_clock) / 1600;
1251         acpi_dly = (acpi_delay_time * reference_clock) / 1600;
1252         vbi_dly  = (vbi_time_out * reference_clock) / 1600;
1253
1254         mclk_switch_limit = (460 * reference_clock) / 100;
1255
1256         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_vreg,  vddc_dly);
1257         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_bbias, bb_dly);
1258         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_delay_acpi,  acpi_dly);
1259         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_chg_timeout, vbi_dly);
1260         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mc_block_delay, 0xAA);
1261         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_mclk_switch_lim, mclk_switch_limit);
1262 }
1263
1264 static void ni_populate_smc_voltage_table(struct radeon_device *rdev,
1265                                           struct atom_voltage_table *voltage_table,
1266                                           NISLANDS_SMC_STATETABLE *table)
1267 {
1268         unsigned int i;
1269
1270         for (i = 0; i < voltage_table->count; i++) {
1271                 table->highSMIO[i] = 0;
1272                 table->lowSMIO[i] |= cpu_to_be32(voltage_table->entries[i].smio_low);
1273         }
1274 }
1275
1276 static void ni_populate_smc_voltage_tables(struct radeon_device *rdev,
1277                                            NISLANDS_SMC_STATETABLE *table)
1278 {
1279         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1280         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1281         unsigned char i;
1282
1283         if (eg_pi->vddc_voltage_table.count) {
1284                 ni_populate_smc_voltage_table(rdev, &eg_pi->vddc_voltage_table, table);
1285                 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] = 0;
1286                 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDC] =
1287                         cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1288
1289                 for (i = 0; i < eg_pi->vddc_voltage_table.count; i++) {
1290                         if (pi->max_vddc_in_table <= eg_pi->vddc_voltage_table.entries[i].value) {
1291                                 table->maxVDDCIndexInPPTable = i;
1292                                 break;
1293                         }
1294                 }
1295         }
1296
1297         if (eg_pi->vddci_voltage_table.count) {
1298                 ni_populate_smc_voltage_table(rdev, &eg_pi->vddci_voltage_table, table);
1299
1300                 table->voltageMaskTable.highMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] = 0;
1301                 table->voltageMaskTable.lowMask[NISLANDS_SMC_VOLTAGEMASK_VDDCI] =
1302                         cpu_to_be32(eg_pi->vddc_voltage_table.mask_low);
1303         }
1304 }
1305
1306 static int ni_populate_voltage_value(struct radeon_device *rdev,
1307                                      struct atom_voltage_table *table,
1308                                      u16 value,
1309                                      NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1310 {
1311         unsigned int i;
1312
1313         for (i = 0; i < table->count; i++) {
1314                 if (value <= table->entries[i].value) {
1315                         voltage->index = (u8)i;
1316                         voltage->value = cpu_to_be16(table->entries[i].value);
1317                         break;
1318                 }
1319         }
1320
1321         if (i >= table->count)
1322                 return -EINVAL;
1323
1324         return 0;
1325 }
1326
1327 static void ni_populate_mvdd_value(struct radeon_device *rdev,
1328                                    u32 mclk,
1329                                    NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1330 {
1331         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1332         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1333
1334         if (!pi->mvdd_control) {
1335                 voltage->index = eg_pi->mvdd_high_index;
1336                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1337                 return;
1338         }
1339
1340         if (mclk <= pi->mvdd_split_frequency) {
1341                 voltage->index = eg_pi->mvdd_low_index;
1342                 voltage->value = cpu_to_be16(MVDD_LOW_VALUE);
1343         } else {
1344                 voltage->index = eg_pi->mvdd_high_index;
1345                 voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1346         }
1347 }
1348
1349 static int ni_get_std_voltage_value(struct radeon_device *rdev,
1350                                     NISLANDS_SMC_VOLTAGE_VALUE *voltage,
1351                                     u16 *std_voltage)
1352 {
1353         if (rdev->pm.dpm.dyn_state.cac_leakage_table.entries &&
1354             ((u32)voltage->index < rdev->pm.dpm.dyn_state.cac_leakage_table.count))
1355                 *std_voltage = rdev->pm.dpm.dyn_state.cac_leakage_table.entries[voltage->index].vddc;
1356         else
1357                 *std_voltage = be16_to_cpu(voltage->value);
1358
1359         return 0;
1360 }
1361
1362 static void ni_populate_std_voltage_value(struct radeon_device *rdev,
1363                                           u16 value, u8 index,
1364                                           NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1365 {
1366         voltage->index = index;
1367         voltage->value = cpu_to_be16(value);
1368 }
1369
1370 static u32 ni_get_smc_power_scaling_factor(struct radeon_device *rdev)
1371 {
1372         u32 xclk_period;
1373         u32 xclk = radeon_get_xclk(rdev);
1374         u32 tmp = RREG32(CG_CAC_CTRL) & TID_CNT_MASK;
1375
1376         xclk_period = (1000000000UL / xclk);
1377         xclk_period /= 10000UL;
1378
1379         return tmp * xclk_period;
1380 }
1381
1382 static u32 ni_scale_power_for_smc(u32 power_in_watts, u32 scaling_factor)
1383 {
1384         return (power_in_watts * scaling_factor) << 2;
1385 }
1386
1387 static u32 ni_calculate_power_boost_limit(struct radeon_device *rdev,
1388                                           struct radeon_ps *radeon_state,
1389                                           u32 near_tdp_limit)
1390 {
1391         struct ni_ps *state = ni_get_ps(radeon_state);
1392         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1393         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1394         u32 power_boost_limit = 0;
1395         int ret;
1396
1397         if (ni_pi->enable_power_containment &&
1398             ni_pi->use_power_boost_limit) {
1399                 NISLANDS_SMC_VOLTAGE_VALUE vddc;
1400                 u16 std_vddc_med;
1401                 u16 std_vddc_high;
1402                 u64 tmp, n, d;
1403
1404                 if (state->performance_level_count < 3)
1405                         return 0;
1406
1407                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1408                                                 state->performance_levels[state->performance_level_count - 2].vddc,
1409                                                 &vddc);
1410                 if (ret)
1411                         return 0;
1412
1413                 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_med);
1414                 if (ret)
1415                         return 0;
1416
1417                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1418                                                 state->performance_levels[state->performance_level_count - 1].vddc,
1419                                                 &vddc);
1420                 if (ret)
1421                         return 0;
1422
1423                 ret = ni_get_std_voltage_value(rdev, &vddc, &std_vddc_high);
1424                 if (ret)
1425                         return 0;
1426
1427                 n = ((u64)near_tdp_limit * ((u64)std_vddc_med * (u64)std_vddc_med) * 90);
1428                 d = ((u64)std_vddc_high * (u64)std_vddc_high * 100);
1429                 tmp = div64_u64(n, d);
1430
1431                 if (tmp >> 32)
1432                         return 0;
1433                 power_boost_limit = (u32)tmp;
1434         }
1435
1436         return power_boost_limit;
1437 }
1438
1439 static int ni_calculate_adjusted_tdp_limits(struct radeon_device *rdev,
1440                                             bool adjust_polarity,
1441                                             u32 tdp_adjustment,
1442                                             u32 *tdp_limit,
1443                                             u32 *near_tdp_limit)
1444 {
1445         if (tdp_adjustment > (u32)rdev->pm.dpm.tdp_od_limit)
1446                 return -EINVAL;
1447
1448         if (adjust_polarity) {
1449                 *tdp_limit = ((100 + tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1450                 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit + (*tdp_limit - rdev->pm.dpm.tdp_limit);
1451         } else {
1452                 *tdp_limit = ((100 - tdp_adjustment) * rdev->pm.dpm.tdp_limit) / 100;
1453                 *near_tdp_limit = rdev->pm.dpm.near_tdp_limit - (rdev->pm.dpm.tdp_limit - *tdp_limit);
1454         }
1455
1456         return 0;
1457 }
1458
1459 static int ni_populate_smc_tdp_limits(struct radeon_device *rdev,
1460                                       struct radeon_ps *radeon_state)
1461 {
1462         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1463         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1464
1465         if (ni_pi->enable_power_containment) {
1466                 NISLANDS_SMC_STATETABLE *smc_table = &ni_pi->smc_statetable;
1467                 u32 scaling_factor = ni_get_smc_power_scaling_factor(rdev);
1468                 u32 tdp_limit;
1469                 u32 near_tdp_limit;
1470                 u32 power_boost_limit;
1471                 int ret;
1472
1473                 if (scaling_factor == 0)
1474                         return -EINVAL;
1475
1476                 memset(smc_table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1477
1478                 ret = ni_calculate_adjusted_tdp_limits(rdev,
1479                                                        false, /* ??? */
1480                                                        rdev->pm.dpm.tdp_adjustment,
1481                                                        &tdp_limit,
1482                                                        &near_tdp_limit);
1483                 if (ret)
1484                         return ret;
1485
1486                 power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state,
1487                                                                    near_tdp_limit);
1488
1489                 smc_table->dpm2Params.TDPLimit =
1490                         cpu_to_be32(ni_scale_power_for_smc(tdp_limit, scaling_factor));
1491                 smc_table->dpm2Params.NearTDPLimit =
1492                         cpu_to_be32(ni_scale_power_for_smc(near_tdp_limit, scaling_factor));
1493                 smc_table->dpm2Params.SafePowerLimit =
1494                         cpu_to_be32(ni_scale_power_for_smc((near_tdp_limit * NISLANDS_DPM2_TDP_SAFE_LIMIT_PERCENT) / 100,
1495                                                            scaling_factor));
1496                 smc_table->dpm2Params.PowerBoostLimit =
1497                         cpu_to_be32(ni_scale_power_for_smc(power_boost_limit, scaling_factor));
1498
1499                 ret = rv770_copy_bytes_to_smc(rdev,
1500                                               (u16)(pi->state_table_start + offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
1501                                                     offsetof(PP_NIslands_DPM2Parameters, TDPLimit)),
1502                                               (u8 *)(&smc_table->dpm2Params.TDPLimit),
1503                                               sizeof(u32) * 4, pi->sram_end);
1504                 if (ret)
1505                         return ret;
1506         }
1507
1508         return 0;
1509 }
1510
1511 int ni_copy_and_switch_arb_sets(struct radeon_device *rdev,
1512                                 u32 arb_freq_src, u32 arb_freq_dest)
1513 {
1514         u32 mc_arb_dram_timing;
1515         u32 mc_arb_dram_timing2;
1516         u32 burst_time;
1517         u32 mc_cg_config;
1518
1519         switch (arb_freq_src) {
1520         case MC_CG_ARB_FREQ_F0:
1521                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING);
1522                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1523                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE0_MASK) >> STATE0_SHIFT;
1524                 break;
1525         case MC_CG_ARB_FREQ_F1:
1526                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_1);
1527                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_1);
1528                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE1_MASK) >> STATE1_SHIFT;
1529                 break;
1530         case MC_CG_ARB_FREQ_F2:
1531                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_2);
1532                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_2);
1533                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE2_MASK) >> STATE2_SHIFT;
1534                 break;
1535         case MC_CG_ARB_FREQ_F3:
1536                 mc_arb_dram_timing  = RREG32(MC_ARB_DRAM_TIMING_3);
1537                 mc_arb_dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2_3);
1538                 burst_time = (RREG32(MC_ARB_BURST_TIME) & STATE3_MASK) >> STATE3_SHIFT;
1539                 break;
1540         default:
1541                 return -EINVAL;
1542         }
1543
1544         switch (arb_freq_dest) {
1545         case MC_CG_ARB_FREQ_F0:
1546                 WREG32(MC_ARB_DRAM_TIMING, mc_arb_dram_timing);
1547                 WREG32(MC_ARB_DRAM_TIMING2, mc_arb_dram_timing2);
1548                 WREG32_P(MC_ARB_BURST_TIME, STATE0(burst_time), ~STATE0_MASK);
1549                 break;
1550         case MC_CG_ARB_FREQ_F1:
1551                 WREG32(MC_ARB_DRAM_TIMING_1, mc_arb_dram_timing);
1552                 WREG32(MC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2);
1553                 WREG32_P(MC_ARB_BURST_TIME, STATE1(burst_time), ~STATE1_MASK);
1554                 break;
1555         case MC_CG_ARB_FREQ_F2:
1556                 WREG32(MC_ARB_DRAM_TIMING_2, mc_arb_dram_timing);
1557                 WREG32(MC_ARB_DRAM_TIMING2_2, mc_arb_dram_timing2);
1558                 WREG32_P(MC_ARB_BURST_TIME, STATE2(burst_time), ~STATE2_MASK);
1559                 break;
1560         case MC_CG_ARB_FREQ_F3:
1561                 WREG32(MC_ARB_DRAM_TIMING_3, mc_arb_dram_timing);
1562                 WREG32(MC_ARB_DRAM_TIMING2_3, mc_arb_dram_timing2);
1563                 WREG32_P(MC_ARB_BURST_TIME, STATE3(burst_time), ~STATE3_MASK);
1564                 break;
1565         default:
1566                 return -EINVAL;
1567         }
1568
1569         mc_cg_config = RREG32(MC_CG_CONFIG) | 0x0000000F;
1570         WREG32(MC_CG_CONFIG, mc_cg_config);
1571         WREG32_P(MC_ARB_CG, CG_ARB_REQ(arb_freq_dest), ~CG_ARB_REQ_MASK);
1572
1573         return 0;
1574 }
1575
1576 static int ni_init_arb_table_index(struct radeon_device *rdev)
1577 {
1578         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1579         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1580         u32 tmp;
1581         int ret;
1582
1583         ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1584                                         &tmp, pi->sram_end);
1585         if (ret)
1586                 return ret;
1587
1588         tmp &= 0x00FFFFFF;
1589         tmp |= ((u32)MC_CG_ARB_FREQ_F1) << 24;
1590
1591         return rv770_write_smc_sram_dword(rdev, ni_pi->arb_table_start,
1592                                           tmp, pi->sram_end);
1593 }
1594
1595 static int ni_initial_switch_from_arb_f0_to_f1(struct radeon_device *rdev)
1596 {
1597         return ni_copy_and_switch_arb_sets(rdev, MC_CG_ARB_FREQ_F0, MC_CG_ARB_FREQ_F1);
1598 }
1599
1600 static int ni_force_switch_to_arb_f0(struct radeon_device *rdev)
1601 {
1602         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1603         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1604         u32 tmp;
1605         int ret;
1606
1607         ret = rv770_read_smc_sram_dword(rdev, ni_pi->arb_table_start,
1608                                         &tmp, pi->sram_end);
1609         if (ret)
1610                 return ret;
1611
1612         tmp = (tmp >> 24) & 0xff;
1613
1614         if (tmp == MC_CG_ARB_FREQ_F0)
1615                 return 0;
1616
1617         return ni_copy_and_switch_arb_sets(rdev, tmp, MC_CG_ARB_FREQ_F0);
1618 }
1619
1620 static int ni_populate_memory_timing_parameters(struct radeon_device *rdev,
1621                                                 struct rv7xx_pl *pl,
1622                                                 SMC_NIslands_MCArbDramTimingRegisterSet *arb_regs)
1623 {
1624         u32 dram_timing;
1625         u32 dram_timing2;
1626
1627         arb_regs->mc_arb_rfsh_rate =
1628                 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk);
1629
1630
1631         radeon_atom_set_engine_dram_timings(rdev,
1632                                             pl->sclk,
1633                                             pl->mclk);
1634
1635         dram_timing = RREG32(MC_ARB_DRAM_TIMING);
1636         dram_timing2 = RREG32(MC_ARB_DRAM_TIMING2);
1637
1638         arb_regs->mc_arb_dram_timing  = cpu_to_be32(dram_timing);
1639         arb_regs->mc_arb_dram_timing2 = cpu_to_be32(dram_timing2);
1640
1641         return 0;
1642 }
1643
1644 static int ni_do_program_memory_timing_parameters(struct radeon_device *rdev,
1645                                                   struct radeon_ps *radeon_state,
1646                                                   unsigned int first_arb_set)
1647 {
1648         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1649         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1650         struct ni_ps *state = ni_get_ps(radeon_state);
1651         SMC_NIslands_MCArbDramTimingRegisterSet arb_regs = { 0 };
1652         int i, ret = 0;
1653
1654         for (i = 0; i < state->performance_level_count; i++) {
1655                 ret = ni_populate_memory_timing_parameters(rdev, &state->performance_levels[i], &arb_regs);
1656                 if (ret)
1657                         break;
1658
1659                 ret = rv770_copy_bytes_to_smc(rdev,
1660                                               (u16)(ni_pi->arb_table_start +
1661                                                     offsetof(SMC_NIslands_MCArbDramTimingRegisters, data) +
1662                                                     sizeof(SMC_NIslands_MCArbDramTimingRegisterSet) * (first_arb_set + i)),
1663                                               (u8 *)&arb_regs,
1664                                               (u16)sizeof(SMC_NIslands_MCArbDramTimingRegisterSet),
1665                                               pi->sram_end);
1666                 if (ret)
1667                         break;
1668         }
1669         return ret;
1670 }
1671
1672 static int ni_program_memory_timing_parameters(struct radeon_device *rdev,
1673                                                struct radeon_ps *radeon_new_state)
1674 {
1675         return ni_do_program_memory_timing_parameters(rdev, radeon_new_state,
1676                                                       NISLANDS_DRIVER_STATE_ARB_INDEX);
1677 }
1678
1679 static void ni_populate_initial_mvdd_value(struct radeon_device *rdev,
1680                                            struct NISLANDS_SMC_VOLTAGE_VALUE *voltage)
1681 {
1682         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1683
1684         voltage->index = eg_pi->mvdd_high_index;
1685         voltage->value = cpu_to_be16(MVDD_HIGH_VALUE);
1686 }
1687
1688 static int ni_populate_smc_initial_state(struct radeon_device *rdev,
1689                                          struct radeon_ps *radeon_initial_state,
1690                                          NISLANDS_SMC_STATETABLE *table)
1691 {
1692         struct ni_ps *initial_state = ni_get_ps(radeon_initial_state);
1693         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1694         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1695         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1696         u32 reg;
1697         int ret;
1698
1699         table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL =
1700                 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl);
1701         table->initialState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 =
1702                 cpu_to_be32(ni_pi->clock_registers.mpll_ad_func_cntl_2);
1703         table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL =
1704                 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl);
1705         table->initialState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 =
1706                 cpu_to_be32(ni_pi->clock_registers.mpll_dq_func_cntl_2);
1707         table->initialState.levels[0].mclk.vMCLK_PWRMGT_CNTL =
1708                 cpu_to_be32(ni_pi->clock_registers.mclk_pwrmgt_cntl);
1709         table->initialState.levels[0].mclk.vDLL_CNTL =
1710                 cpu_to_be32(ni_pi->clock_registers.dll_cntl);
1711         table->initialState.levels[0].mclk.vMPLL_SS =
1712                 cpu_to_be32(ni_pi->clock_registers.mpll_ss1);
1713         table->initialState.levels[0].mclk.vMPLL_SS2 =
1714                 cpu_to_be32(ni_pi->clock_registers.mpll_ss2);
1715         table->initialState.levels[0].mclk.mclk_value =
1716                 cpu_to_be32(initial_state->performance_levels[0].mclk);
1717
1718         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL =
1719                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl);
1720         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 =
1721                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_2);
1722         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 =
1723                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_3);
1724         table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 =
1725                 cpu_to_be32(ni_pi->clock_registers.cg_spll_func_cntl_4);
1726         table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM =
1727                 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum);
1728         table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM_2 =
1729                 cpu_to_be32(ni_pi->clock_registers.cg_spll_spread_spectrum_2);
1730         table->initialState.levels[0].sclk.sclk_value =
1731                 cpu_to_be32(initial_state->performance_levels[0].sclk);
1732         table->initialState.levels[0].arbRefreshState =
1733                 NISLANDS_INITIAL_STATE_ARB_INDEX;
1734
1735         table->initialState.levels[0].ACIndex = 0;
1736
1737         ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
1738                                         initial_state->performance_levels[0].vddc,
1739                                         &table->initialState.levels[0].vddc);
1740         if (!ret) {
1741                 u16 std_vddc;
1742
1743                 ret = ni_get_std_voltage_value(rdev,
1744                                                &table->initialState.levels[0].vddc,
1745                                                &std_vddc);
1746                 if (!ret)
1747                         ni_populate_std_voltage_value(rdev, std_vddc,
1748                                                       table->initialState.levels[0].vddc.index,
1749                                                       &table->initialState.levels[0].std_vddc);
1750         }
1751
1752         if (eg_pi->vddci_control)
1753                 ni_populate_voltage_value(rdev,
1754                                           &eg_pi->vddci_voltage_table,
1755                                           initial_state->performance_levels[0].vddci,
1756                                           &table->initialState.levels[0].vddci);
1757
1758         ni_populate_initial_mvdd_value(rdev, &table->initialState.levels[0].mvdd);
1759
1760         reg = CG_R(0xffff) | CG_L(0);
1761         table->initialState.levels[0].aT = cpu_to_be32(reg);
1762
1763         table->initialState.levels[0].bSP = cpu_to_be32(pi->dsp);
1764
1765         if (pi->boot_in_gen2)
1766                 table->initialState.levels[0].gen2PCIE = 1;
1767         else
1768                 table->initialState.levels[0].gen2PCIE = 0;
1769
1770         if (pi->mem_gddr5) {
1771                 table->initialState.levels[0].strobeMode =
1772                         cypress_get_strobe_mode_settings(rdev,
1773                                                          initial_state->performance_levels[0].mclk);
1774
1775                 if (initial_state->performance_levels[0].mclk > pi->mclk_edc_enable_threshold)
1776                         table->initialState.levels[0].mcFlags = NISLANDS_SMC_MC_EDC_RD_FLAG | NISLANDS_SMC_MC_EDC_WR_FLAG;
1777                 else
1778                         table->initialState.levels[0].mcFlags =  0;
1779         }
1780
1781         table->initialState.levelCount = 1;
1782
1783         table->initialState.flags |= PPSMC_SWSTATE_FLAG_DC;
1784
1785         table->initialState.levels[0].dpm2.MaxPS = 0;
1786         table->initialState.levels[0].dpm2.NearTDPDec = 0;
1787         table->initialState.levels[0].dpm2.AboveSafeInc = 0;
1788         table->initialState.levels[0].dpm2.BelowSafeInc = 0;
1789
1790         reg = MIN_POWER_MASK | MAX_POWER_MASK;
1791         table->initialState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1792
1793         reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1794         table->initialState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1795
1796         return 0;
1797 }
1798
1799 static int ni_populate_smc_acpi_state(struct radeon_device *rdev,
1800                                       NISLANDS_SMC_STATETABLE *table)
1801 {
1802         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1803         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
1804         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1805         u32 mpll_ad_func_cntl   = ni_pi->clock_registers.mpll_ad_func_cntl;
1806         u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
1807         u32 mpll_dq_func_cntl   = ni_pi->clock_registers.mpll_dq_func_cntl;
1808         u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
1809         u32 spll_func_cntl      = ni_pi->clock_registers.cg_spll_func_cntl;
1810         u32 spll_func_cntl_2    = ni_pi->clock_registers.cg_spll_func_cntl_2;
1811         u32 spll_func_cntl_3    = ni_pi->clock_registers.cg_spll_func_cntl_3;
1812         u32 spll_func_cntl_4    = ni_pi->clock_registers.cg_spll_func_cntl_4;
1813         u32 mclk_pwrmgt_cntl    = ni_pi->clock_registers.mclk_pwrmgt_cntl;
1814         u32 dll_cntl            = ni_pi->clock_registers.dll_cntl;
1815         u32 reg;
1816         int ret;
1817
1818         table->ACPIState = table->initialState;
1819
1820         table->ACPIState.flags &= ~PPSMC_SWSTATE_FLAG_DC;
1821
1822         if (pi->acpi_vddc) {
1823                 ret = ni_populate_voltage_value(rdev,
1824                                                 &eg_pi->vddc_voltage_table,
1825                                                 pi->acpi_vddc, &table->ACPIState.levels[0].vddc);
1826                 if (!ret) {
1827                         u16 std_vddc;
1828
1829                         ret = ni_get_std_voltage_value(rdev,
1830                                                        &table->ACPIState.levels[0].vddc, &std_vddc);
1831                         if (!ret)
1832                                 ni_populate_std_voltage_value(rdev, std_vddc,
1833                                                               table->ACPIState.levels[0].vddc.index,
1834                                                               &table->ACPIState.levels[0].std_vddc);
1835                 }
1836
1837                 if (pi->pcie_gen2) {
1838                         if (pi->acpi_pcie_gen2)
1839                                 table->ACPIState.levels[0].gen2PCIE = 1;
1840                         else
1841                                 table->ACPIState.levels[0].gen2PCIE = 0;
1842                 } else {
1843                         table->ACPIState.levels[0].gen2PCIE = 0;
1844                 }
1845         } else {
1846                 ret = ni_populate_voltage_value(rdev,
1847                                                 &eg_pi->vddc_voltage_table,
1848                                                 pi->min_vddc_in_table,
1849                                                 &table->ACPIState.levels[0].vddc);
1850                 if (!ret) {
1851                         u16 std_vddc;
1852
1853                         ret = ni_get_std_voltage_value(rdev,
1854                                                        &table->ACPIState.levels[0].vddc,
1855                                                        &std_vddc);
1856                         if (!ret)
1857                                 ni_populate_std_voltage_value(rdev, std_vddc,
1858                                                               table->ACPIState.levels[0].vddc.index,
1859                                                               &table->ACPIState.levels[0].std_vddc);
1860                 }
1861                 table->ACPIState.levels[0].gen2PCIE = 0;
1862         }
1863
1864         if (eg_pi->acpi_vddci) {
1865                 if (eg_pi->vddci_control)
1866                         ni_populate_voltage_value(rdev,
1867                                                   &eg_pi->vddci_voltage_table,
1868                                                   eg_pi->acpi_vddci,
1869                                                   &table->ACPIState.levels[0].vddci);
1870         }
1871
1872
1873         mpll_ad_func_cntl &= ~PDNB;
1874
1875         mpll_ad_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN;
1876
1877         if (pi->mem_gddr5)
1878                 mpll_dq_func_cntl &= ~PDNB;
1879         mpll_dq_func_cntl_2 |= BIAS_GEN_PDNB | RESET_EN | BYPASS;
1880
1881
1882         mclk_pwrmgt_cntl |= (MRDCKA0_RESET |
1883                              MRDCKA1_RESET |
1884                              MRDCKB0_RESET |
1885                              MRDCKB1_RESET |
1886                              MRDCKC0_RESET |
1887                              MRDCKC1_RESET |
1888                              MRDCKD0_RESET |
1889                              MRDCKD1_RESET);
1890
1891         mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
1892                               MRDCKA1_PDNB |
1893                               MRDCKB0_PDNB |
1894                               MRDCKB1_PDNB |
1895                               MRDCKC0_PDNB |
1896                               MRDCKC1_PDNB |
1897                               MRDCKD0_PDNB |
1898                               MRDCKD1_PDNB);
1899
1900         dll_cntl |= (MRDCKA0_BYPASS |
1901                      MRDCKA1_BYPASS |
1902                      MRDCKB0_BYPASS |
1903                      MRDCKB1_BYPASS |
1904                      MRDCKC0_BYPASS |
1905                      MRDCKC1_BYPASS |
1906                      MRDCKD0_BYPASS |
1907                      MRDCKD1_BYPASS);
1908
1909         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
1910         spll_func_cntl_2 |= SCLK_MUX_SEL(4);
1911
1912         table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
1913         table->ACPIState.levels[0].mclk.vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
1914         table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
1915         table->ACPIState.levels[0].mclk.vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
1916         table->ACPIState.levels[0].mclk.vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
1917         table->ACPIState.levels[0].mclk.vDLL_CNTL = cpu_to_be32(dll_cntl);
1918
1919         table->ACPIState.levels[0].mclk.mclk_value = 0;
1920
1921         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl);
1922         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2);
1923         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3);
1924         table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(spll_func_cntl_4);
1925
1926         table->ACPIState.levels[0].sclk.sclk_value = 0;
1927
1928         ni_populate_mvdd_value(rdev, 0, &table->ACPIState.levels[0].mvdd);
1929
1930         if (eg_pi->dynamic_ac_timing)
1931                 table->ACPIState.levels[0].ACIndex = 1;
1932
1933         table->ACPIState.levels[0].dpm2.MaxPS = 0;
1934         table->ACPIState.levels[0].dpm2.NearTDPDec = 0;
1935         table->ACPIState.levels[0].dpm2.AboveSafeInc = 0;
1936         table->ACPIState.levels[0].dpm2.BelowSafeInc = 0;
1937
1938         reg = MIN_POWER_MASK | MAX_POWER_MASK;
1939         table->ACPIState.levels[0].SQPowerThrottle = cpu_to_be32(reg);
1940
1941         reg = MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
1942         table->ACPIState.levels[0].SQPowerThrottle_2 = cpu_to_be32(reg);
1943
1944         return 0;
1945 }
1946
1947 static int ni_init_smc_table(struct radeon_device *rdev)
1948 {
1949         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
1950         struct ni_power_info *ni_pi = ni_get_pi(rdev);
1951         int ret;
1952         struct radeon_ps *radeon_boot_state = rdev->pm.dpm.boot_ps;
1953         NISLANDS_SMC_STATETABLE *table = &ni_pi->smc_statetable;
1954
1955         memset(table, 0, sizeof(NISLANDS_SMC_STATETABLE));
1956
1957         ni_populate_smc_voltage_tables(rdev, table);
1958
1959         switch (rdev->pm.int_thermal_type) {
1960         case THERMAL_TYPE_NI:
1961         case THERMAL_TYPE_EMC2103_WITH_INTERNAL:
1962                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_INTERNAL;
1963                 break;
1964         case THERMAL_TYPE_NONE:
1965                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_NONE;
1966                 break;
1967         default:
1968                 table->thermalProtectType = PPSMC_THERMAL_PROTECT_TYPE_EXTERNAL;
1969                 break;
1970         }
1971
1972         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_HARDWAREDC)
1973                 table->systemFlags |= PPSMC_SYSTEMFLAG_GPIO_DC;
1974
1975         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_REGULATOR_HOT)
1976                 table->systemFlags |= PPSMC_SYSTEMFLAG_REGULATOR_HOT;
1977
1978         if (rdev->pm.dpm.platform_caps & ATOM_PP_PLATFORM_CAP_STEPVDDC)
1979                 table->systemFlags |= PPSMC_SYSTEMFLAG_STEPVDDC;
1980
1981         if (pi->mem_gddr5)
1982                 table->systemFlags |= PPSMC_SYSTEMFLAG_GDDR5;
1983
1984         ret = ni_populate_smc_initial_state(rdev, radeon_boot_state, table);
1985         if (ret)
1986                 return ret;
1987
1988         ret = ni_populate_smc_acpi_state(rdev, table);
1989         if (ret)
1990                 return ret;
1991
1992         table->driverState = table->initialState;
1993
1994         table->ULVState = table->initialState;
1995
1996         ret = ni_do_program_memory_timing_parameters(rdev, radeon_boot_state,
1997                                                      NISLANDS_INITIAL_STATE_ARB_INDEX);
1998         if (ret)
1999                 return ret;
2000
2001         return rv770_copy_bytes_to_smc(rdev, pi->state_table_start, (u8 *)table,
2002                                        sizeof(NISLANDS_SMC_STATETABLE), pi->sram_end);
2003 }
2004
2005 static int ni_calculate_sclk_params(struct radeon_device *rdev,
2006                                     u32 engine_clock,
2007                                     NISLANDS_SMC_SCLK_VALUE *sclk)
2008 {
2009         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2010         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2011         struct atom_clock_dividers dividers;
2012         u32 spll_func_cntl = ni_pi->clock_registers.cg_spll_func_cntl;
2013         u32 spll_func_cntl_2 = ni_pi->clock_registers.cg_spll_func_cntl_2;
2014         u32 spll_func_cntl_3 = ni_pi->clock_registers.cg_spll_func_cntl_3;
2015         u32 spll_func_cntl_4 = ni_pi->clock_registers.cg_spll_func_cntl_4;
2016         u32 cg_spll_spread_spectrum = ni_pi->clock_registers.cg_spll_spread_spectrum;
2017         u32 cg_spll_spread_spectrum_2 = ni_pi->clock_registers.cg_spll_spread_spectrum_2;
2018         u64 tmp;
2019         u32 reference_clock = rdev->clock.spll.reference_freq;
2020         u32 reference_divider;
2021         u32 fbdiv;
2022         int ret;
2023
2024         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
2025                                              engine_clock, false, &dividers);
2026         if (ret)
2027                 return ret;
2028
2029         reference_divider = 1 + dividers.ref_div;
2030
2031
2032         tmp = (u64) engine_clock * reference_divider * dividers.post_div * 16834;
2033         do_div(tmp, reference_clock);
2034         fbdiv = (u32) tmp;
2035
2036         spll_func_cntl &= ~(SPLL_PDIV_A_MASK | SPLL_REF_DIV_MASK);
2037         spll_func_cntl |= SPLL_REF_DIV(dividers.ref_div);
2038         spll_func_cntl |= SPLL_PDIV_A(dividers.post_div);
2039
2040         spll_func_cntl_2 &= ~SCLK_MUX_SEL_MASK;
2041         spll_func_cntl_2 |= SCLK_MUX_SEL(2);
2042
2043         spll_func_cntl_3 &= ~SPLL_FB_DIV_MASK;
2044         spll_func_cntl_3 |= SPLL_FB_DIV(fbdiv);
2045         spll_func_cntl_3 |= SPLL_DITHEN;
2046
2047         if (pi->sclk_ss) {
2048                 struct radeon_atom_ss ss;
2049                 u32 vco_freq = engine_clock * dividers.post_div;
2050
2051                 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2052                                                      ASIC_INTERNAL_ENGINE_SS, vco_freq)) {
2053                         u32 clk_s = reference_clock * 5 / (reference_divider * ss.rate);
2054                         u32 clk_v = 4 * ss.percentage * fbdiv / (clk_s * 10000);
2055
2056                         cg_spll_spread_spectrum &= ~CLK_S_MASK;
2057                         cg_spll_spread_spectrum |= CLK_S(clk_s);
2058                         cg_spll_spread_spectrum |= SSEN;
2059
2060                         cg_spll_spread_spectrum_2 &= ~CLK_V_MASK;
2061                         cg_spll_spread_spectrum_2 |= CLK_V(clk_v);
2062                 }
2063         }
2064
2065         sclk->sclk_value = engine_clock;
2066         sclk->vCG_SPLL_FUNC_CNTL = spll_func_cntl;
2067         sclk->vCG_SPLL_FUNC_CNTL_2 = spll_func_cntl_2;
2068         sclk->vCG_SPLL_FUNC_CNTL_3 = spll_func_cntl_3;
2069         sclk->vCG_SPLL_FUNC_CNTL_4 = spll_func_cntl_4;
2070         sclk->vCG_SPLL_SPREAD_SPECTRUM = cg_spll_spread_spectrum;
2071         sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cg_spll_spread_spectrum_2;
2072
2073         return 0;
2074 }
2075
2076 static int ni_populate_sclk_value(struct radeon_device *rdev,
2077                                   u32 engine_clock,
2078                                   NISLANDS_SMC_SCLK_VALUE *sclk)
2079 {
2080         NISLANDS_SMC_SCLK_VALUE sclk_tmp;
2081         int ret;
2082
2083         ret = ni_calculate_sclk_params(rdev, engine_clock, &sclk_tmp);
2084         if (!ret) {
2085                 sclk->sclk_value = cpu_to_be32(sclk_tmp.sclk_value);
2086                 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL);
2087                 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_2);
2088                 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_3);
2089                 sclk->vCG_SPLL_FUNC_CNTL_4 = cpu_to_be32(sclk_tmp.vCG_SPLL_FUNC_CNTL_4);
2090                 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM);
2091                 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(sclk_tmp.vCG_SPLL_SPREAD_SPECTRUM_2);
2092         }
2093
2094         return ret;
2095 }
2096
2097 static int ni_init_smc_spll_table(struct radeon_device *rdev)
2098 {
2099         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2100         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2101         SMC_NISLANDS_SPLL_DIV_TABLE *spll_table;
2102         NISLANDS_SMC_SCLK_VALUE sclk_params;
2103         u32 fb_div;
2104         u32 p_div;
2105         u32 clk_s;
2106         u32 clk_v;
2107         u32 sclk = 0;
2108         int i, ret;
2109         u32 tmp;
2110
2111         if (ni_pi->spll_table_start == 0)
2112                 return -EINVAL;
2113
2114         spll_table = kzalloc(sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), GFP_KERNEL);
2115         if (spll_table == NULL)
2116                 return -ENOMEM;
2117
2118         for (i = 0; i < 256; i++) {
2119                 ret = ni_calculate_sclk_params(rdev, sclk, &sclk_params);
2120                 if (ret)
2121                         break;
2122
2123                 p_div = (sclk_params.vCG_SPLL_FUNC_CNTL & SPLL_PDIV_A_MASK) >> SPLL_PDIV_A_SHIFT;
2124                 fb_div = (sclk_params.vCG_SPLL_FUNC_CNTL_3 & SPLL_FB_DIV_MASK) >> SPLL_FB_DIV_SHIFT;
2125                 clk_s = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM & CLK_S_MASK) >> CLK_S_SHIFT;
2126                 clk_v = (sclk_params.vCG_SPLL_SPREAD_SPECTRUM_2 & CLK_V_MASK) >> CLK_V_SHIFT;
2127
2128                 fb_div &= ~0x00001FFF;
2129                 fb_div >>= 1;
2130                 clk_v >>= 6;
2131
2132                 if (p_div & ~(SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT))
2133                         ret = -EINVAL;
2134
2135                 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2136                         ret = -EINVAL;
2137
2138                 if (clk_s & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT))
2139                         ret = -EINVAL;
2140
2141                 if (clk_v & ~(SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK >> SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT))
2142                         ret = -EINVAL;
2143
2144                 if (ret)
2145                         break;
2146
2147                 tmp = ((fb_div << SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_FBDIV_MASK) |
2148                         ((p_div << SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_PDIV_MASK);
2149                 spll_table->freq[i] = cpu_to_be32(tmp);
2150
2151                 tmp = ((clk_v << SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKV_MASK) |
2152                         ((clk_s << SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_SHIFT) & SMC_NISLANDS_SPLL_DIV_TABLE_CLKS_MASK);
2153                 spll_table->ss[i] = cpu_to_be32(tmp);
2154
2155                 sclk += 512;
2156         }
2157
2158         if (!ret)
2159                 ret = rv770_copy_bytes_to_smc(rdev, ni_pi->spll_table_start, (u8 *)spll_table,
2160                                               sizeof(SMC_NISLANDS_SPLL_DIV_TABLE), pi->sram_end);
2161
2162         kfree(spll_table);
2163
2164         return ret;
2165 }
2166
2167 static int ni_populate_mclk_value(struct radeon_device *rdev,
2168                                   u32 engine_clock,
2169                                   u32 memory_clock,
2170                                   NISLANDS_SMC_MCLK_VALUE *mclk,
2171                                   bool strobe_mode,
2172                                   bool dll_state_on)
2173 {
2174         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2175         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2176         u32 mpll_ad_func_cntl = ni_pi->clock_registers.mpll_ad_func_cntl;
2177         u32 mpll_ad_func_cntl_2 = ni_pi->clock_registers.mpll_ad_func_cntl_2;
2178         u32 mpll_dq_func_cntl = ni_pi->clock_registers.mpll_dq_func_cntl;
2179         u32 mpll_dq_func_cntl_2 = ni_pi->clock_registers.mpll_dq_func_cntl_2;
2180         u32 mclk_pwrmgt_cntl = ni_pi->clock_registers.mclk_pwrmgt_cntl;
2181         u32 dll_cntl = ni_pi->clock_registers.dll_cntl;
2182         u32 mpll_ss1 = ni_pi->clock_registers.mpll_ss1;
2183         u32 mpll_ss2 = ni_pi->clock_registers.mpll_ss2;
2184         struct atom_clock_dividers dividers;
2185         u32 ibias;
2186         u32 dll_speed;
2187         int ret;
2188         u32 mc_seq_misc7;
2189
2190         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_MEMORY_PLL_PARAM,
2191                                              memory_clock, strobe_mode, &dividers);
2192         if (ret)
2193                 return ret;
2194
2195         if (!strobe_mode) {
2196                 mc_seq_misc7 = RREG32(MC_SEQ_MISC7);
2197
2198                 if (mc_seq_misc7 & 0x8000000)
2199                         dividers.post_div = 1;
2200         }
2201
2202         ibias = cypress_map_clkf_to_ibias(rdev, dividers.whole_fb_div);
2203
2204         mpll_ad_func_cntl &= ~(CLKR_MASK |
2205                                YCLK_POST_DIV_MASK |
2206                                CLKF_MASK |
2207                                CLKFRAC_MASK |
2208                                IBIAS_MASK);
2209         mpll_ad_func_cntl |= CLKR(dividers.ref_div);
2210         mpll_ad_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2211         mpll_ad_func_cntl |= CLKF(dividers.whole_fb_div);
2212         mpll_ad_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2213         mpll_ad_func_cntl |= IBIAS(ibias);
2214
2215         if (dividers.vco_mode)
2216                 mpll_ad_func_cntl_2 |= VCO_MODE;
2217         else
2218                 mpll_ad_func_cntl_2 &= ~VCO_MODE;
2219
2220         if (pi->mem_gddr5) {
2221                 mpll_dq_func_cntl &= ~(CLKR_MASK |
2222                                        YCLK_POST_DIV_MASK |
2223                                        CLKF_MASK |
2224                                        CLKFRAC_MASK |
2225                                        IBIAS_MASK);
2226                 mpll_dq_func_cntl |= CLKR(dividers.ref_div);
2227                 mpll_dq_func_cntl |= YCLK_POST_DIV(dividers.post_div);
2228                 mpll_dq_func_cntl |= CLKF(dividers.whole_fb_div);
2229                 mpll_dq_func_cntl |= CLKFRAC(dividers.frac_fb_div);
2230                 mpll_dq_func_cntl |= IBIAS(ibias);
2231
2232                 if (strobe_mode)
2233                         mpll_dq_func_cntl &= ~PDNB;
2234                 else
2235                         mpll_dq_func_cntl |= PDNB;
2236
2237                 if (dividers.vco_mode)
2238                         mpll_dq_func_cntl_2 |= VCO_MODE;
2239                 else
2240                         mpll_dq_func_cntl_2 &= ~VCO_MODE;
2241         }
2242
2243         if (pi->mclk_ss) {
2244                 struct radeon_atom_ss ss;
2245                 u32 vco_freq = memory_clock * dividers.post_div;
2246
2247                 if (radeon_atombios_get_asic_ss_info(rdev, &ss,
2248                                                      ASIC_INTERNAL_MEMORY_SS, vco_freq)) {
2249                         u32 reference_clock = rdev->clock.mpll.reference_freq;
2250                         u32 decoded_ref = rv740_get_decoded_reference_divider(dividers.ref_div);
2251                         u32 clk_s = reference_clock * 5 / (decoded_ref * ss.rate);
2252                         u32 clk_v = ss.percentage *
2253                                 (0x4000 * dividers.whole_fb_div + 0x800 * dividers.frac_fb_div) / (clk_s * 625);
2254
2255                         mpll_ss1 &= ~CLKV_MASK;
2256                         mpll_ss1 |= CLKV(clk_v);
2257
2258                         mpll_ss2 &= ~CLKS_MASK;
2259                         mpll_ss2 |= CLKS(clk_s);
2260                 }
2261         }
2262
2263         dll_speed = rv740_get_dll_speed(pi->mem_gddr5,
2264                                         memory_clock);
2265
2266         mclk_pwrmgt_cntl &= ~DLL_SPEED_MASK;
2267         mclk_pwrmgt_cntl |= DLL_SPEED(dll_speed);
2268         if (dll_state_on)
2269                 mclk_pwrmgt_cntl |= (MRDCKA0_PDNB |
2270                                      MRDCKA1_PDNB |
2271                                      MRDCKB0_PDNB |
2272                                      MRDCKB1_PDNB |
2273                                      MRDCKC0_PDNB |
2274                                      MRDCKC1_PDNB |
2275                                      MRDCKD0_PDNB |
2276                                      MRDCKD1_PDNB);
2277         else
2278                 mclk_pwrmgt_cntl &= ~(MRDCKA0_PDNB |
2279                                       MRDCKA1_PDNB |
2280                                       MRDCKB0_PDNB |
2281                                       MRDCKB1_PDNB |
2282                                       MRDCKC0_PDNB |
2283                                       MRDCKC1_PDNB |
2284                                       MRDCKD0_PDNB |
2285                                       MRDCKD1_PDNB);
2286
2287
2288         mclk->mclk_value = cpu_to_be32(memory_clock);
2289         mclk->vMPLL_AD_FUNC_CNTL = cpu_to_be32(mpll_ad_func_cntl);
2290         mclk->vMPLL_AD_FUNC_CNTL_2 = cpu_to_be32(mpll_ad_func_cntl_2);
2291         mclk->vMPLL_DQ_FUNC_CNTL = cpu_to_be32(mpll_dq_func_cntl);
2292         mclk->vMPLL_DQ_FUNC_CNTL_2 = cpu_to_be32(mpll_dq_func_cntl_2);
2293         mclk->vMCLK_PWRMGT_CNTL = cpu_to_be32(mclk_pwrmgt_cntl);
2294         mclk->vDLL_CNTL = cpu_to_be32(dll_cntl);
2295         mclk->vMPLL_SS = cpu_to_be32(mpll_ss1);
2296         mclk->vMPLL_SS2 = cpu_to_be32(mpll_ss2);
2297
2298         return 0;
2299 }
2300
2301 static void ni_populate_smc_sp(struct radeon_device *rdev,
2302                                struct radeon_ps *radeon_state,
2303                                NISLANDS_SMC_SWSTATE *smc_state)
2304 {
2305         struct ni_ps *ps = ni_get_ps(radeon_state);
2306         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2307         int i;
2308
2309         for (i = 0; i < ps->performance_level_count - 1; i++)
2310                 smc_state->levels[i].bSP = cpu_to_be32(pi->dsp);
2311
2312         smc_state->levels[ps->performance_level_count - 1].bSP =
2313                 cpu_to_be32(pi->psp);
2314 }
2315
2316 static int ni_convert_power_level_to_smc(struct radeon_device *rdev,
2317                                          struct rv7xx_pl *pl,
2318                                          NISLANDS_SMC_HW_PERFORMANCE_LEVEL *level)
2319 {
2320         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2321         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2322         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2323         int ret;
2324         bool dll_state_on;
2325         u16 std_vddc;
2326         u32 tmp = RREG32(DC_STUTTER_CNTL);
2327
2328         level->gen2PCIE = pi->pcie_gen2 ?
2329                 ((pl->flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ? 1 : 0) : 0;
2330
2331         ret = ni_populate_sclk_value(rdev, pl->sclk, &level->sclk);
2332         if (ret)
2333                 return ret;
2334
2335         level->mcFlags =  0;
2336         if (pi->mclk_stutter_mode_threshold &&
2337             (pl->mclk <= pi->mclk_stutter_mode_threshold) &&
2338             !eg_pi->uvd_enabled &&
2339             (tmp & DC_STUTTER_ENABLE_A) &&
2340             (tmp & DC_STUTTER_ENABLE_B))
2341                 level->mcFlags |= NISLANDS_SMC_MC_STUTTER_EN;
2342
2343         if (pi->mem_gddr5) {
2344                 if (pl->mclk > pi->mclk_edc_enable_threshold)
2345                         level->mcFlags |= NISLANDS_SMC_MC_EDC_RD_FLAG;
2346                 if (pl->mclk > eg_pi->mclk_edc_wr_enable_threshold)
2347                         level->mcFlags |= NISLANDS_SMC_MC_EDC_WR_FLAG;
2348
2349                 level->strobeMode = cypress_get_strobe_mode_settings(rdev, pl->mclk);
2350
2351                 if (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) {
2352                         if (cypress_get_mclk_frequency_ratio(rdev, pl->mclk, true) >=
2353                             ((RREG32(MC_SEQ_MISC7) >> 16) & 0xf))
2354                                 dll_state_on = ((RREG32(MC_SEQ_MISC5) >> 1) & 0x1) ? true : false;
2355                         else
2356                                 dll_state_on = ((RREG32(MC_SEQ_MISC6) >> 1) & 0x1) ? true : false;
2357                 } else {
2358                         dll_state_on = false;
2359                         if (pl->mclk > ni_pi->mclk_rtt_mode_threshold)
2360                                 level->mcFlags |= NISLANDS_SMC_MC_RTT_ENABLE;
2361                 }
2362
2363                 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk,
2364                                              &level->mclk,
2365                                              (level->strobeMode & NISLANDS_SMC_STROBE_ENABLE) != 0,
2366                                              dll_state_on);
2367         } else
2368                 ret = ni_populate_mclk_value(rdev, pl->sclk, pl->mclk, &level->mclk, 1, 1);
2369
2370         if (ret)
2371                 return ret;
2372
2373         ret = ni_populate_voltage_value(rdev, &eg_pi->vddc_voltage_table,
2374                                         pl->vddc, &level->vddc);
2375         if (ret)
2376                 return ret;
2377
2378         ret = ni_get_std_voltage_value(rdev, &level->vddc, &std_vddc);
2379         if (ret)
2380                 return ret;
2381
2382         ni_populate_std_voltage_value(rdev, std_vddc,
2383                                       level->vddc.index, &level->std_vddc);
2384
2385         if (eg_pi->vddci_control) {
2386                 ret = ni_populate_voltage_value(rdev, &eg_pi->vddci_voltage_table,
2387                                                 pl->vddci, &level->vddci);
2388                 if (ret)
2389                         return ret;
2390         }
2391
2392         ni_populate_mvdd_value(rdev, pl->mclk, &level->mvdd);
2393
2394         return ret;
2395 }
2396
2397 static int ni_populate_smc_t(struct radeon_device *rdev,
2398                              struct radeon_ps *radeon_state,
2399                              NISLANDS_SMC_SWSTATE *smc_state)
2400 {
2401         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2402         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2403         struct ni_ps *state = ni_get_ps(radeon_state);
2404         u32 a_t;
2405         u32 t_l, t_h;
2406         u32 high_bsp;
2407         int i, ret;
2408
2409         if (state->performance_level_count >= 9)
2410                 return -EINVAL;
2411
2412         if (state->performance_level_count < 2) {
2413                 a_t = CG_R(0xffff) | CG_L(0);
2414                 smc_state->levels[0].aT = cpu_to_be32(a_t);
2415                 return 0;
2416         }
2417
2418         smc_state->levels[0].aT = cpu_to_be32(0);
2419
2420         for (i = 0; i <= state->performance_level_count - 2; i++) {
2421                 if (eg_pi->uvd_enabled)
2422                         ret = r600_calculate_at(
2423                                 1000 * (i * (eg_pi->smu_uvd_hs ? 2 : 8) + 2),
2424                                 100 * R600_AH_DFLT,
2425                                 state->performance_levels[i + 1].sclk,
2426                                 state->performance_levels[i].sclk,
2427                                 &t_l,
2428                                 &t_h);
2429                 else
2430                         ret = r600_calculate_at(
2431                                 1000 * (i + 1),
2432                                 100 * R600_AH_DFLT,
2433                                 state->performance_levels[i + 1].sclk,
2434                                 state->performance_levels[i].sclk,
2435                                 &t_l,
2436                                 &t_h);
2437
2438                 if (ret) {
2439                         t_h = (i + 1) * 1000 - 50 * R600_AH_DFLT;
2440                         t_l = (i + 1) * 1000 + 50 * R600_AH_DFLT;
2441                 }
2442
2443                 a_t = be32_to_cpu(smc_state->levels[i].aT) & ~CG_R_MASK;
2444                 a_t |= CG_R(t_l * pi->bsp / 20000);
2445                 smc_state->levels[i].aT = cpu_to_be32(a_t);
2446
2447                 high_bsp = (i == state->performance_level_count - 2) ?
2448                         pi->pbsp : pi->bsp;
2449
2450                 a_t = CG_R(0xffff) | CG_L(t_h * high_bsp / 20000);
2451                 smc_state->levels[i + 1].aT = cpu_to_be32(a_t);
2452         }
2453
2454         return 0;
2455 }
2456
2457 static int ni_populate_power_containment_values(struct radeon_device *rdev,
2458                                                 struct radeon_ps *radeon_state,
2459                                                 NISLANDS_SMC_SWSTATE *smc_state)
2460 {
2461         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2462         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2463         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2464         struct ni_ps *state = ni_get_ps(radeon_state);
2465         u32 prev_sclk;
2466         u32 max_sclk;
2467         u32 min_sclk;
2468         int i, ret;
2469         u32 tdp_limit;
2470         u32 near_tdp_limit;
2471         u32 power_boost_limit;
2472         u8 max_ps_percent;
2473
2474         if (ni_pi->enable_power_containment == false)
2475                 return 0;
2476
2477         if (state->performance_level_count == 0)
2478                 return -EINVAL;
2479
2480         if (smc_state->levelCount != state->performance_level_count)
2481                 return -EINVAL;
2482
2483         ret = ni_calculate_adjusted_tdp_limits(rdev,
2484                                                false, /* ??? */
2485                                                rdev->pm.dpm.tdp_adjustment,
2486                                                &tdp_limit,
2487                                                &near_tdp_limit);
2488         if (ret)
2489                 return ret;
2490
2491         power_boost_limit = ni_calculate_power_boost_limit(rdev, radeon_state, near_tdp_limit);
2492
2493         ret = rv770_write_smc_sram_dword(rdev,
2494                                          pi->state_table_start +
2495                                          offsetof(NISLANDS_SMC_STATETABLE, dpm2Params) +
2496                                          offsetof(PP_NIslands_DPM2Parameters, PowerBoostLimit),
2497                                          ni_scale_power_for_smc(power_boost_limit, ni_get_smc_power_scaling_factor(rdev)),
2498                                          pi->sram_end);
2499         if (ret)
2500                 power_boost_limit = 0;
2501
2502         smc_state->levels[0].dpm2.MaxPS = 0;
2503         smc_state->levels[0].dpm2.NearTDPDec = 0;
2504         smc_state->levels[0].dpm2.AboveSafeInc = 0;
2505         smc_state->levels[0].dpm2.BelowSafeInc = 0;
2506         smc_state->levels[0].stateFlags |= power_boost_limit ? PPSMC_STATEFLAG_POWERBOOST : 0;
2507
2508         for (i = 1; i < state->performance_level_count; i++) {
2509                 prev_sclk = state->performance_levels[i-1].sclk;
2510                 max_sclk  = state->performance_levels[i].sclk;
2511                 max_ps_percent = (i != (state->performance_level_count - 1)) ?
2512                         NISLANDS_DPM2_MAXPS_PERCENT_M : NISLANDS_DPM2_MAXPS_PERCENT_H;
2513
2514                 if (max_sclk < prev_sclk)
2515                         return -EINVAL;
2516
2517                 if ((max_ps_percent == 0) || (prev_sclk == max_sclk) || eg_pi->uvd_enabled)
2518                         min_sclk = max_sclk;
2519                 else if (1 == i)
2520                         min_sclk = prev_sclk;
2521                 else
2522                         min_sclk = (prev_sclk * (u32)max_ps_percent) / 100;
2523
2524                 if (min_sclk < state->performance_levels[0].sclk)
2525                         min_sclk = state->performance_levels[0].sclk;
2526
2527                 if (min_sclk == 0)
2528                         return -EINVAL;
2529
2530                 smc_state->levels[i].dpm2.MaxPS =
2531                         (u8)((NISLANDS_DPM2_MAX_PULSE_SKIP * (max_sclk - min_sclk)) / max_sclk);
2532                 smc_state->levels[i].dpm2.NearTDPDec = NISLANDS_DPM2_NEAR_TDP_DEC;
2533                 smc_state->levels[i].dpm2.AboveSafeInc = NISLANDS_DPM2_ABOVE_SAFE_INC;
2534                 smc_state->levels[i].dpm2.BelowSafeInc = NISLANDS_DPM2_BELOW_SAFE_INC;
2535                 smc_state->levels[i].stateFlags |=
2536                         ((i != (state->performance_level_count - 1)) && power_boost_limit) ?
2537                         PPSMC_STATEFLAG_POWERBOOST : 0;
2538         }
2539
2540         return 0;
2541 }
2542
2543 static int ni_populate_sq_ramping_values(struct radeon_device *rdev,
2544                                          struct radeon_ps *radeon_state,
2545                                          NISLANDS_SMC_SWSTATE *smc_state)
2546 {
2547         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2548         struct ni_ps *state = ni_get_ps(radeon_state);
2549         u32 sq_power_throttle;
2550         u32 sq_power_throttle2;
2551         bool enable_sq_ramping = ni_pi->enable_sq_ramping;
2552         int i;
2553
2554         if (state->performance_level_count == 0)
2555                 return -EINVAL;
2556
2557         if (smc_state->levelCount != state->performance_level_count)
2558                 return -EINVAL;
2559
2560         if (rdev->pm.dpm.sq_ramping_threshold == 0)
2561                 return -EINVAL;
2562
2563         if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER > (MAX_POWER_MASK >> MAX_POWER_SHIFT))
2564                 enable_sq_ramping = false;
2565
2566         if (NISLANDS_DPM2_SQ_RAMP_MIN_POWER > (MIN_POWER_MASK >> MIN_POWER_SHIFT))
2567                 enable_sq_ramping = false;
2568
2569         if (NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA > (MAX_POWER_DELTA_MASK >> MAX_POWER_DELTA_SHIFT))
2570                 enable_sq_ramping = false;
2571
2572         if (NISLANDS_DPM2_SQ_RAMP_STI_SIZE > (STI_SIZE_MASK >> STI_SIZE_SHIFT))
2573                 enable_sq_ramping = false;
2574
2575         if (NISLANDS_DPM2_SQ_RAMP_LTI_RATIO <= (LTI_RATIO_MASK >> LTI_RATIO_SHIFT))
2576                 enable_sq_ramping = false;
2577
2578         for (i = 0; i < state->performance_level_count; i++) {
2579                 sq_power_throttle  = 0;
2580                 sq_power_throttle2 = 0;
2581
2582                 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) &&
2583                     enable_sq_ramping) {
2584                         sq_power_throttle |= MAX_POWER(NISLANDS_DPM2_SQ_RAMP_MAX_POWER);
2585                         sq_power_throttle |= MIN_POWER(NISLANDS_DPM2_SQ_RAMP_MIN_POWER);
2586                         sq_power_throttle2 |= MAX_POWER_DELTA(NISLANDS_DPM2_SQ_RAMP_MAX_POWER_DELTA);
2587                         sq_power_throttle2 |= STI_SIZE(NISLANDS_DPM2_SQ_RAMP_STI_SIZE);
2588                         sq_power_throttle2 |= LTI_RATIO(NISLANDS_DPM2_SQ_RAMP_LTI_RATIO);
2589                 } else {
2590                         sq_power_throttle |= MAX_POWER_MASK | MIN_POWER_MASK;
2591                         sq_power_throttle2 |= MAX_POWER_DELTA_MASK | STI_SIZE_MASK | LTI_RATIO_MASK;
2592                 }
2593
2594                 smc_state->levels[i].SQPowerThrottle   = cpu_to_be32(sq_power_throttle);
2595                 smc_state->levels[i].SQPowerThrottle_2 = cpu_to_be32(sq_power_throttle2);
2596         }
2597
2598         return 0;
2599 }
2600
2601 static int ni_enable_power_containment(struct radeon_device *rdev,
2602                                        struct radeon_ps *radeon_new_state,
2603                                        bool enable)
2604 {
2605         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2606         PPSMC_Result smc_result;
2607         int ret = 0;
2608
2609         if (ni_pi->enable_power_containment) {
2610                 if (enable) {
2611                         if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
2612                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingActive);
2613                                 if (smc_result != PPSMC_Result_OK) {
2614                                         ret = -EINVAL;
2615                                         ni_pi->pc_enabled = false;
2616                                 } else {
2617                                         ni_pi->pc_enabled = true;
2618                                 }
2619                         }
2620                 } else {
2621                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_TDPClampingInactive);
2622                         if (smc_result != PPSMC_Result_OK)
2623                                 ret = -EINVAL;
2624                         ni_pi->pc_enabled = false;
2625                 }
2626         }
2627
2628         return ret;
2629 }
2630
2631 static int ni_convert_power_state_to_smc(struct radeon_device *rdev,
2632                                          struct radeon_ps *radeon_state,
2633                                          NISLANDS_SMC_SWSTATE *smc_state)
2634 {
2635         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
2636         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2637         struct ni_ps *state = ni_get_ps(radeon_state);
2638         int i, ret;
2639         u32 threshold = state->performance_levels[state->performance_level_count - 1].sclk * 100 / 100;
2640
2641         if (!(radeon_state->caps & ATOM_PPLIB_DISALLOW_ON_DC))
2642                 smc_state->flags |= PPSMC_SWSTATE_FLAG_DC;
2643
2644         smc_state->levelCount = 0;
2645
2646         if (state->performance_level_count > NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE)
2647                 return -EINVAL;
2648
2649         for (i = 0; i < state->performance_level_count; i++) {
2650                 ret = ni_convert_power_level_to_smc(rdev, &state->performance_levels[i],
2651                                                     &smc_state->levels[i]);
2652                 smc_state->levels[i].arbRefreshState =
2653                         (u8)(NISLANDS_DRIVER_STATE_ARB_INDEX + i);
2654
2655                 if (ret)
2656                         return ret;
2657
2658                 if (ni_pi->enable_power_containment)
2659                         smc_state->levels[i].displayWatermark =
2660                                 (state->performance_levels[i].sclk < threshold) ?
2661                                 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2662                 else
2663                         smc_state->levels[i].displayWatermark = (i < 2) ?
2664                                 PPSMC_DISPLAY_WATERMARK_LOW : PPSMC_DISPLAY_WATERMARK_HIGH;
2665
2666                 if (eg_pi->dynamic_ac_timing)
2667                         smc_state->levels[i].ACIndex = NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i;
2668                 else
2669                         smc_state->levels[i].ACIndex = 0;
2670
2671                 smc_state->levelCount++;
2672         }
2673
2674         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_watermark_threshold,
2675                                       cpu_to_be32(threshold / 512));
2676
2677         ni_populate_smc_sp(rdev, radeon_state, smc_state);
2678
2679         ret = ni_populate_power_containment_values(rdev, radeon_state, smc_state);
2680         if (ret)
2681                 ni_pi->enable_power_containment = false;
2682
2683         ret = ni_populate_sq_ramping_values(rdev, radeon_state, smc_state);
2684         if (ret)
2685                 ni_pi->enable_sq_ramping = false;
2686
2687         return ni_populate_smc_t(rdev, radeon_state, smc_state);
2688 }
2689
2690 static int ni_upload_sw_state(struct radeon_device *rdev,
2691                               struct radeon_ps *radeon_new_state)
2692 {
2693         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2694         u16 address = pi->state_table_start +
2695                 offsetof(NISLANDS_SMC_STATETABLE, driverState);
2696         u16 state_size = sizeof(NISLANDS_SMC_SWSTATE) +
2697                 ((NISLANDS_MAX_SMC_PERFORMANCE_LEVELS_PER_SWSTATE - 1) * sizeof(NISLANDS_SMC_HW_PERFORMANCE_LEVEL));
2698         int ret;
2699         NISLANDS_SMC_SWSTATE *smc_state = kzalloc(state_size, GFP_KERNEL);
2700
2701         if (smc_state == NULL)
2702                 return -ENOMEM;
2703
2704         ret = ni_convert_power_state_to_smc(rdev, radeon_new_state, smc_state);
2705         if (ret)
2706                 goto done;
2707
2708         ret = rv770_copy_bytes_to_smc(rdev, address, (u8 *)smc_state, state_size, pi->sram_end);
2709
2710 done:
2711         kfree(smc_state);
2712
2713         return ret;
2714 }
2715
2716 static int ni_set_mc_special_registers(struct radeon_device *rdev,
2717                                        struct ni_mc_reg_table *table)
2718 {
2719         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
2720         u8 i, j, k;
2721         u32 temp_reg;
2722
2723         for (i = 0, j = table->last; i < table->last; i++) {
2724                 switch (table->mc_reg_address[i].s1) {
2725                 case MC_SEQ_MISC1 >> 2:
2726                         if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2727                                 return -EINVAL;
2728                         temp_reg = RREG32(MC_PMG_CMD_EMRS);
2729                         table->mc_reg_address[j].s1 = MC_PMG_CMD_EMRS >> 2;
2730                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2731                         for (k = 0; k < table->num_entries; k++)
2732                                 table->mc_reg_table_entry[k].mc_data[j] =
2733                                         ((temp_reg & 0xffff0000)) |
2734                                         ((table->mc_reg_table_entry[k].mc_data[i] & 0xffff0000) >> 16);
2735                         j++;
2736                         if (j >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2737                                 return -EINVAL;
2738
2739                         temp_reg = RREG32(MC_PMG_CMD_MRS);
2740                         table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS >> 2;
2741                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2742                         for(k = 0; k < table->num_entries; k++) {
2743                                 table->mc_reg_table_entry[k].mc_data[j] =
2744                                         (temp_reg & 0xffff0000) |
2745                                         (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2746                                 if (!pi->mem_gddr5)
2747                                         table->mc_reg_table_entry[k].mc_data[j] |= 0x100;
2748                         }
2749                         j++;
2750                         if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2751                                 return -EINVAL;
2752                         break;
2753                 case MC_SEQ_RESERVE_M >> 2:
2754                         temp_reg = RREG32(MC_PMG_CMD_MRS1);
2755                         table->mc_reg_address[j].s1 = MC_PMG_CMD_MRS1 >> 2;
2756                         table->mc_reg_address[j].s0 = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2757                         for (k = 0; k < table->num_entries; k++)
2758                                 table->mc_reg_table_entry[k].mc_data[j] =
2759                                         (temp_reg & 0xffff0000) |
2760                                         (table->mc_reg_table_entry[k].mc_data[i] & 0x0000ffff);
2761                         j++;
2762                         if (j > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2763                                 return -EINVAL;
2764                         break;
2765                 default:
2766                         break;
2767                 }
2768         }
2769
2770         table->last = j;
2771
2772         return 0;
2773 }
2774
2775 static bool ni_check_s0_mc_reg_index(u16 in_reg, u16 *out_reg)
2776 {
2777         bool result = true;
2778
2779         switch (in_reg) {
2780         case  MC_SEQ_RAS_TIMING >> 2:
2781                 *out_reg = MC_SEQ_RAS_TIMING_LP >> 2;
2782                 break;
2783         case MC_SEQ_CAS_TIMING >> 2:
2784                 *out_reg = MC_SEQ_CAS_TIMING_LP >> 2;
2785                 break;
2786         case MC_SEQ_MISC_TIMING >> 2:
2787                 *out_reg = MC_SEQ_MISC_TIMING_LP >> 2;
2788                 break;
2789         case MC_SEQ_MISC_TIMING2 >> 2:
2790                 *out_reg = MC_SEQ_MISC_TIMING2_LP >> 2;
2791                 break;
2792         case MC_SEQ_RD_CTL_D0 >> 2:
2793                 *out_reg = MC_SEQ_RD_CTL_D0_LP >> 2;
2794                 break;
2795         case MC_SEQ_RD_CTL_D1 >> 2:
2796                 *out_reg = MC_SEQ_RD_CTL_D1_LP >> 2;
2797                 break;
2798         case MC_SEQ_WR_CTL_D0 >> 2:
2799                 *out_reg = MC_SEQ_WR_CTL_D0_LP >> 2;
2800                 break;
2801         case MC_SEQ_WR_CTL_D1 >> 2:
2802                 *out_reg = MC_SEQ_WR_CTL_D1_LP >> 2;
2803                 break;
2804         case MC_PMG_CMD_EMRS >> 2:
2805                 *out_reg = MC_SEQ_PMG_CMD_EMRS_LP >> 2;
2806                 break;
2807         case MC_PMG_CMD_MRS >> 2:
2808                 *out_reg = MC_SEQ_PMG_CMD_MRS_LP >> 2;
2809                 break;
2810         case MC_PMG_CMD_MRS1 >> 2:
2811                 *out_reg = MC_SEQ_PMG_CMD_MRS1_LP >> 2;
2812                 break;
2813         case MC_SEQ_PMG_TIMING >> 2:
2814                 *out_reg = MC_SEQ_PMG_TIMING_LP >> 2;
2815                 break;
2816         case MC_PMG_CMD_MRS2 >> 2:
2817                 *out_reg = MC_SEQ_PMG_CMD_MRS2_LP >> 2;
2818                 break;
2819         default:
2820                 result = false;
2821                 break;
2822         }
2823
2824         return result;
2825 }
2826
2827 static void ni_set_valid_flag(struct ni_mc_reg_table *table)
2828 {
2829         u8 i, j;
2830
2831         for (i = 0; i < table->last; i++) {
2832                 for (j = 1; j < table->num_entries; j++) {
2833                         if (table->mc_reg_table_entry[j-1].mc_data[i] != table->mc_reg_table_entry[j].mc_data[i]) {
2834                                 table->valid_flag |= 1 << i;
2835                                 break;
2836                         }
2837                 }
2838         }
2839 }
2840
2841 static void ni_set_s0_mc_reg_index(struct ni_mc_reg_table *table)
2842 {
2843         u32 i;
2844         u16 address;
2845
2846         for (i = 0; i < table->last; i++)
2847                 table->mc_reg_address[i].s0 =
2848                         ni_check_s0_mc_reg_index(table->mc_reg_address[i].s1, &address) ?
2849                         address : table->mc_reg_address[i].s1;
2850 }
2851
2852 static int ni_copy_vbios_mc_reg_table(struct atom_mc_reg_table *table,
2853                                       struct ni_mc_reg_table *ni_table)
2854 {
2855         u8 i, j;
2856
2857         if (table->last > SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2858                 return -EINVAL;
2859         if (table->num_entries > MAX_AC_TIMING_ENTRIES)
2860                 return -EINVAL;
2861
2862         for (i = 0; i < table->last; i++)
2863                 ni_table->mc_reg_address[i].s1 = table->mc_reg_address[i].s1;
2864         ni_table->last = table->last;
2865
2866         for (i = 0; i < table->num_entries; i++) {
2867                 ni_table->mc_reg_table_entry[i].mclk_max =
2868                         table->mc_reg_table_entry[i].mclk_max;
2869                 for (j = 0; j < table->last; j++)
2870                         ni_table->mc_reg_table_entry[i].mc_data[j] =
2871                                 table->mc_reg_table_entry[i].mc_data[j];
2872         }
2873         ni_table->num_entries = table->num_entries;
2874
2875         return 0;
2876 }
2877
2878 static int ni_initialize_mc_reg_table(struct radeon_device *rdev)
2879 {
2880         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2881         int ret;
2882         struct atom_mc_reg_table *table;
2883         struct ni_mc_reg_table *ni_table = &ni_pi->mc_reg_table;
2884         u8 module_index = rv770_get_memory_module_index(rdev);
2885
2886         table = kzalloc(sizeof(struct atom_mc_reg_table), GFP_KERNEL);
2887         if (!table)
2888                 return -ENOMEM;
2889
2890         WREG32(MC_SEQ_RAS_TIMING_LP, RREG32(MC_SEQ_RAS_TIMING));
2891         WREG32(MC_SEQ_CAS_TIMING_LP, RREG32(MC_SEQ_CAS_TIMING));
2892         WREG32(MC_SEQ_MISC_TIMING_LP, RREG32(MC_SEQ_MISC_TIMING));
2893         WREG32(MC_SEQ_MISC_TIMING2_LP, RREG32(MC_SEQ_MISC_TIMING2));
2894         WREG32(MC_SEQ_PMG_CMD_EMRS_LP, RREG32(MC_PMG_CMD_EMRS));
2895         WREG32(MC_SEQ_PMG_CMD_MRS_LP, RREG32(MC_PMG_CMD_MRS));
2896         WREG32(MC_SEQ_PMG_CMD_MRS1_LP, RREG32(MC_PMG_CMD_MRS1));
2897         WREG32(MC_SEQ_WR_CTL_D0_LP, RREG32(MC_SEQ_WR_CTL_D0));
2898         WREG32(MC_SEQ_WR_CTL_D1_LP, RREG32(MC_SEQ_WR_CTL_D1));
2899         WREG32(MC_SEQ_RD_CTL_D0_LP, RREG32(MC_SEQ_RD_CTL_D0));
2900         WREG32(MC_SEQ_RD_CTL_D1_LP, RREG32(MC_SEQ_RD_CTL_D1));
2901         WREG32(MC_SEQ_PMG_TIMING_LP, RREG32(MC_SEQ_PMG_TIMING));
2902         WREG32(MC_SEQ_PMG_CMD_MRS2_LP, RREG32(MC_PMG_CMD_MRS2));
2903
2904         ret = radeon_atom_init_mc_reg_table(rdev, module_index, table);
2905
2906         if (ret)
2907                 goto init_mc_done;
2908
2909         ret = ni_copy_vbios_mc_reg_table(table, ni_table);
2910
2911         if (ret)
2912                 goto init_mc_done;
2913
2914         ni_set_s0_mc_reg_index(ni_table);
2915
2916         ret = ni_set_mc_special_registers(rdev, ni_table);
2917
2918         if (ret)
2919                 goto init_mc_done;
2920
2921         ni_set_valid_flag(ni_table);
2922
2923 init_mc_done:
2924         kfree(table);
2925
2926         return ret;
2927 }
2928
2929 static void ni_populate_mc_reg_addresses(struct radeon_device *rdev,
2930                                          SMC_NIslands_MCRegisters *mc_reg_table)
2931 {
2932         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2933         u32 i, j;
2934
2935         for (i = 0, j = 0; j < ni_pi->mc_reg_table.last; j++) {
2936                 if (ni_pi->mc_reg_table.valid_flag & (1 << j)) {
2937                         if (i >= SMC_NISLANDS_MC_REGISTER_ARRAY_SIZE)
2938                                 break;
2939                         mc_reg_table->address[i].s0 =
2940                                 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s0);
2941                         mc_reg_table->address[i].s1 =
2942                                 cpu_to_be16(ni_pi->mc_reg_table.mc_reg_address[j].s1);
2943                         i++;
2944                 }
2945         }
2946         mc_reg_table->last = (u8)i;
2947 }
2948
2949
2950 static void ni_convert_mc_registers(struct ni_mc_reg_entry *entry,
2951                                     SMC_NIslands_MCRegisterSet *data,
2952                                     u32 num_entries, u32 valid_flag)
2953 {
2954         u32 i, j;
2955
2956         for (i = 0, j = 0; j < num_entries; j++) {
2957                 if (valid_flag & (1 << j)) {
2958                         data->value[i] = cpu_to_be32(entry->mc_data[j]);
2959                         i++;
2960                 }
2961         }
2962 }
2963
2964 static void ni_convert_mc_reg_table_entry_to_smc(struct radeon_device *rdev,
2965                                                  struct rv7xx_pl *pl,
2966                                                  SMC_NIslands_MCRegisterSet *mc_reg_table_data)
2967 {
2968         struct ni_power_info *ni_pi = ni_get_pi(rdev);
2969         u32 i = 0;
2970
2971         for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
2972                 if (pl->mclk <= ni_pi->mc_reg_table.mc_reg_table_entry[i].mclk_max)
2973                         break;
2974         }
2975
2976         if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
2977                 --i;
2978
2979         ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[i],
2980                                 mc_reg_table_data,
2981                                 ni_pi->mc_reg_table.last,
2982                                 ni_pi->mc_reg_table.valid_flag);
2983 }
2984
2985 static void ni_convert_mc_reg_table_to_smc(struct radeon_device *rdev,
2986                                            struct radeon_ps *radeon_state,
2987                                            SMC_NIslands_MCRegisters *mc_reg_table)
2988 {
2989         struct ni_ps *state = ni_get_ps(radeon_state);
2990         int i;
2991
2992         for (i = 0; i < state->performance_level_count; i++) {
2993                 ni_convert_mc_reg_table_entry_to_smc(rdev,
2994                                                      &state->performance_levels[i],
2995                                                      &mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT + i]);
2996         }
2997 }
2998
2999 static int ni_populate_mc_reg_table(struct radeon_device *rdev,
3000                                     struct radeon_ps *radeon_boot_state)
3001 {
3002         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3003         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3004         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3005         struct ni_ps *boot_state = ni_get_ps(radeon_boot_state);
3006         SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3007
3008         memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3009
3010         rv770_write_smc_soft_register(rdev, NI_SMC_SOFT_REGISTER_seq_index, 1);
3011
3012         ni_populate_mc_reg_addresses(rdev, mc_reg_table);
3013
3014         ni_convert_mc_reg_table_entry_to_smc(rdev, &boot_state->performance_levels[0],
3015                                              &mc_reg_table->data[0]);
3016
3017         ni_convert_mc_registers(&ni_pi->mc_reg_table.mc_reg_table_entry[0],
3018                                 &mc_reg_table->data[1],
3019                                 ni_pi->mc_reg_table.last,
3020                                 ni_pi->mc_reg_table.valid_flag);
3021
3022         ni_convert_mc_reg_table_to_smc(rdev, radeon_boot_state, mc_reg_table);
3023
3024         return rv770_copy_bytes_to_smc(rdev, eg_pi->mc_reg_table_start,
3025                                        (u8 *)mc_reg_table,
3026                                        sizeof(SMC_NIslands_MCRegisters),
3027                                        pi->sram_end);
3028 }
3029
3030 static int ni_upload_mc_reg_table(struct radeon_device *rdev,
3031                                   struct radeon_ps *radeon_new_state)
3032 {
3033         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3034         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3035         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3036         struct ni_ps *ni_new_state = ni_get_ps(radeon_new_state);
3037         SMC_NIslands_MCRegisters *mc_reg_table = &ni_pi->smc_mc_reg_table;
3038         u16 address;
3039
3040         memset(mc_reg_table, 0, sizeof(SMC_NIslands_MCRegisters));
3041
3042         ni_convert_mc_reg_table_to_smc(rdev, radeon_new_state, mc_reg_table);
3043
3044         address = eg_pi->mc_reg_table_start +
3045                 (u16)offsetof(SMC_NIslands_MCRegisters, data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT]);
3046
3047         return rv770_copy_bytes_to_smc(rdev, address,
3048                                        (u8 *)&mc_reg_table->data[NISLANDS_MCREGISTERTABLE_FIRST_DRIVERSTATE_SLOT],
3049                                        sizeof(SMC_NIslands_MCRegisterSet) * ni_new_state->performance_level_count,
3050                                        pi->sram_end);
3051 }
3052
3053 static int ni_init_driver_calculated_leakage_table(struct radeon_device *rdev,
3054                                                    PP_NIslands_CACTABLES *cac_tables)
3055 {
3056         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3057         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3058         u32 leakage = 0;
3059         unsigned int i, j, table_size;
3060         s32 t;
3061         u32 smc_leakage, max_leakage = 0;
3062         u32 scaling_factor;
3063
3064         table_size = eg_pi->vddc_voltage_table.count;
3065
3066         if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3067                 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3068
3069         scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3070
3071         for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++) {
3072                 for (j = 0; j < table_size; j++) {
3073                         t = (1000 * ((i + 1) * 8));
3074
3075                         if (t < ni_pi->cac_data.leakage_minimum_temperature)
3076                                 t = ni_pi->cac_data.leakage_minimum_temperature;
3077
3078                         ni_calculate_leakage_for_v_and_t(rdev,
3079                                                          &ni_pi->cac_data.leakage_coefficients,
3080                                                          eg_pi->vddc_voltage_table.entries[j].value,
3081                                                          t,
3082                                                          ni_pi->cac_data.i_leakage,
3083                                                          &leakage);
3084
3085                         smc_leakage = ni_scale_power_for_smc(leakage, scaling_factor) / 1000;
3086                         if (smc_leakage > max_leakage)
3087                                 max_leakage = smc_leakage;
3088
3089                         cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(smc_leakage);
3090                 }
3091         }
3092
3093         for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3094                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3095                         cac_tables->cac_lkge_lut[i][j] = cpu_to_be32(max_leakage);
3096         }
3097         return 0;
3098 }
3099
3100 static int ni_init_simplified_leakage_table(struct radeon_device *rdev,
3101                                             PP_NIslands_CACTABLES *cac_tables)
3102 {
3103         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3104         struct radeon_cac_leakage_table *leakage_table =
3105                 &rdev->pm.dpm.dyn_state.cac_leakage_table;
3106         u32 i, j, table_size;
3107         u32 smc_leakage, max_leakage = 0;
3108         u32 scaling_factor;
3109
3110         if (!leakage_table)
3111                 return -EINVAL;
3112
3113         table_size = leakage_table->count;
3114
3115         if (eg_pi->vddc_voltage_table.count != table_size)
3116                 table_size = (eg_pi->vddc_voltage_table.count < leakage_table->count) ?
3117                         eg_pi->vddc_voltage_table.count : leakage_table->count;
3118
3119         if (SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES < table_size)
3120                 table_size = SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES;
3121
3122         if (table_size == 0)
3123                 return -EINVAL;
3124
3125         scaling_factor = ni_get_smc_power_scaling_factor(rdev);
3126
3127         for (j = 0; j < table_size; j++) {
3128                 smc_leakage = leakage_table->entries[j].leakage;
3129
3130                 if (smc_leakage > max_leakage)
3131                         max_leakage = smc_leakage;
3132
3133                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3134                         cac_tables->cac_lkge_lut[i][j] =
3135                                 cpu_to_be32(ni_scale_power_for_smc(smc_leakage, scaling_factor));
3136         }
3137
3138         for (j = table_size; j < SMC_NISLANDS_LKGE_LUT_NUM_OF_VOLT_ENTRIES; j++) {
3139                 for (i = 0; i < SMC_NISLANDS_LKGE_LUT_NUM_OF_TEMP_ENTRIES; i++)
3140                         cac_tables->cac_lkge_lut[i][j] =
3141                                 cpu_to_be32(ni_scale_power_for_smc(max_leakage, scaling_factor));
3142         }
3143         return 0;
3144 }
3145
3146 static int ni_initialize_smc_cac_tables(struct radeon_device *rdev)
3147 {
3148         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3149         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3150         PP_NIslands_CACTABLES *cac_tables = NULL;
3151         int i, ret;
3152         u32 reg;
3153
3154         if (ni_pi->enable_cac == false)
3155                 return 0;
3156
3157         cac_tables = kzalloc(sizeof(PP_NIslands_CACTABLES), GFP_KERNEL);
3158         if (!cac_tables)
3159                 return -ENOMEM;
3160
3161         reg = RREG32(CG_CAC_CTRL) & ~(TID_CNT_MASK | TID_UNIT_MASK);
3162         reg |= (TID_CNT(ni_pi->cac_weights->tid_cnt) |
3163                 TID_UNIT(ni_pi->cac_weights->tid_unit));
3164         WREG32(CG_CAC_CTRL, reg);
3165
3166         for (i = 0; i < NISLANDS_DCCAC_MAX_LEVELS; i++)
3167                 ni_pi->dc_cac_table[i] = ni_pi->cac_weights->dc_cac[i];
3168
3169         for (i = 0; i < SMC_NISLANDS_BIF_LUT_NUM_OF_ENTRIES; i++)
3170                 cac_tables->cac_bif_lut[i] = ni_pi->cac_weights->pcie_cac[i];
3171
3172         ni_pi->cac_data.i_leakage = rdev->pm.dpm.cac_leakage;
3173         ni_pi->cac_data.pwr_const = 0;
3174         ni_pi->cac_data.dc_cac_value = ni_pi->dc_cac_table[NISLANDS_DCCAC_LEVEL_0];
3175         ni_pi->cac_data.bif_cac_value = 0;
3176         ni_pi->cac_data.mc_wr_weight = ni_pi->cac_weights->mc_write_weight;
3177         ni_pi->cac_data.mc_rd_weight = ni_pi->cac_weights->mc_read_weight;
3178         ni_pi->cac_data.allow_ovrflw = 0;
3179         ni_pi->cac_data.l2num_win_tdp = ni_pi->lta_window_size;
3180         ni_pi->cac_data.num_win_tdp = 0;
3181         ni_pi->cac_data.lts_truncate_n = ni_pi->lts_truncate;
3182
3183         if (ni_pi->driver_calculate_cac_leakage)
3184                 ret = ni_init_driver_calculated_leakage_table(rdev, cac_tables);
3185         else
3186                 ret = ni_init_simplified_leakage_table(rdev, cac_tables);
3187
3188         if (ret)
3189                 goto done_free;
3190
3191         cac_tables->pwr_const      = cpu_to_be32(ni_pi->cac_data.pwr_const);
3192         cac_tables->dc_cacValue    = cpu_to_be32(ni_pi->cac_data.dc_cac_value);
3193         cac_tables->bif_cacValue   = cpu_to_be32(ni_pi->cac_data.bif_cac_value);
3194         cac_tables->AllowOvrflw    = ni_pi->cac_data.allow_ovrflw;
3195         cac_tables->MCWrWeight     = ni_pi->cac_data.mc_wr_weight;
3196         cac_tables->MCRdWeight     = ni_pi->cac_data.mc_rd_weight;
3197         cac_tables->numWin_TDP     = ni_pi->cac_data.num_win_tdp;
3198         cac_tables->l2numWin_TDP   = ni_pi->cac_data.l2num_win_tdp;
3199         cac_tables->lts_truncate_n = ni_pi->cac_data.lts_truncate_n;
3200
3201         ret = rv770_copy_bytes_to_smc(rdev, ni_pi->cac_table_start, (u8 *)cac_tables,
3202                                       sizeof(PP_NIslands_CACTABLES), pi->sram_end);
3203
3204 done_free:
3205         if (ret) {
3206                 ni_pi->enable_cac = false;
3207                 ni_pi->enable_power_containment = false;
3208         }
3209
3210         kfree(cac_tables);
3211
3212         return 0;
3213 }
3214
3215 static int ni_initialize_hardware_cac_manager(struct radeon_device *rdev)
3216 {
3217         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3218         u32 reg;
3219
3220         if (!ni_pi->enable_cac ||
3221             !ni_pi->cac_configuration_required)
3222                 return 0;
3223
3224         if (ni_pi->cac_weights == NULL)
3225                 return -EINVAL;
3226
3227         reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_0) & ~(WEIGHT_TCP_SIG0_MASK |
3228                                                       WEIGHT_TCP_SIG1_MASK |
3229                                                       WEIGHT_TA_SIG_MASK);
3230         reg |= (WEIGHT_TCP_SIG0(ni_pi->cac_weights->weight_tcp_sig0) |
3231                 WEIGHT_TCP_SIG1(ni_pi->cac_weights->weight_tcp_sig1) |
3232                 WEIGHT_TA_SIG(ni_pi->cac_weights->weight_ta_sig));
3233         WREG32_CG(CG_CAC_REGION_1_WEIGHT_0, reg);
3234
3235         reg = RREG32_CG(CG_CAC_REGION_1_WEIGHT_1) & ~(WEIGHT_TCC_EN0_MASK |
3236                                                       WEIGHT_TCC_EN1_MASK |
3237                                                       WEIGHT_TCC_EN2_MASK);
3238         reg |= (WEIGHT_TCC_EN0(ni_pi->cac_weights->weight_tcc_en0) |
3239                 WEIGHT_TCC_EN1(ni_pi->cac_weights->weight_tcc_en1) |
3240                 WEIGHT_TCC_EN2(ni_pi->cac_weights->weight_tcc_en2));
3241         WREG32_CG(CG_CAC_REGION_1_WEIGHT_1, reg);
3242
3243         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_0) & ~(WEIGHT_CB_EN0_MASK |
3244                                                       WEIGHT_CB_EN1_MASK |
3245                                                       WEIGHT_CB_EN2_MASK |
3246                                                       WEIGHT_CB_EN3_MASK);
3247         reg |= (WEIGHT_CB_EN0(ni_pi->cac_weights->weight_cb_en0) |
3248                 WEIGHT_CB_EN1(ni_pi->cac_weights->weight_cb_en1) |
3249                 WEIGHT_CB_EN2(ni_pi->cac_weights->weight_cb_en2) |
3250                 WEIGHT_CB_EN3(ni_pi->cac_weights->weight_cb_en3));
3251         WREG32_CG(CG_CAC_REGION_2_WEIGHT_0, reg);
3252
3253         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_1) & ~(WEIGHT_DB_SIG0_MASK |
3254                                                       WEIGHT_DB_SIG1_MASK |
3255                                                       WEIGHT_DB_SIG2_MASK |
3256                                                       WEIGHT_DB_SIG3_MASK);
3257         reg |= (WEIGHT_DB_SIG0(ni_pi->cac_weights->weight_db_sig0) |
3258                 WEIGHT_DB_SIG1(ni_pi->cac_weights->weight_db_sig1) |
3259                 WEIGHT_DB_SIG2(ni_pi->cac_weights->weight_db_sig2) |
3260                 WEIGHT_DB_SIG3(ni_pi->cac_weights->weight_db_sig3));
3261         WREG32_CG(CG_CAC_REGION_2_WEIGHT_1, reg);
3262
3263         reg = RREG32_CG(CG_CAC_REGION_2_WEIGHT_2) & ~(WEIGHT_SXM_SIG0_MASK |
3264                                                       WEIGHT_SXM_SIG1_MASK |
3265                                                       WEIGHT_SXM_SIG2_MASK |
3266                                                       WEIGHT_SXS_SIG0_MASK |
3267                                                       WEIGHT_SXS_SIG1_MASK);
3268         reg |= (WEIGHT_SXM_SIG0(ni_pi->cac_weights->weight_sxm_sig0) |
3269                 WEIGHT_SXM_SIG1(ni_pi->cac_weights->weight_sxm_sig1) |
3270                 WEIGHT_SXM_SIG2(ni_pi->cac_weights->weight_sxm_sig2) |
3271                 WEIGHT_SXS_SIG0(ni_pi->cac_weights->weight_sxs_sig0) |
3272                 WEIGHT_SXS_SIG1(ni_pi->cac_weights->weight_sxs_sig1));
3273         WREG32_CG(CG_CAC_REGION_2_WEIGHT_2, reg);
3274
3275         reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_0) & ~(WEIGHT_XBR_0_MASK |
3276                                                       WEIGHT_XBR_1_MASK |
3277                                                       WEIGHT_XBR_2_MASK |
3278                                                       WEIGHT_SPI_SIG0_MASK);
3279         reg |= (WEIGHT_XBR_0(ni_pi->cac_weights->weight_xbr_0) |
3280                 WEIGHT_XBR_1(ni_pi->cac_weights->weight_xbr_1) |
3281                 WEIGHT_XBR_2(ni_pi->cac_weights->weight_xbr_2) |
3282                 WEIGHT_SPI_SIG0(ni_pi->cac_weights->weight_spi_sig0));
3283         WREG32_CG(CG_CAC_REGION_3_WEIGHT_0, reg);
3284
3285         reg = RREG32_CG(CG_CAC_REGION_3_WEIGHT_1) & ~(WEIGHT_SPI_SIG1_MASK |
3286                                                       WEIGHT_SPI_SIG2_MASK |
3287                                                       WEIGHT_SPI_SIG3_MASK |
3288                                                       WEIGHT_SPI_SIG4_MASK |
3289                                                       WEIGHT_SPI_SIG5_MASK);
3290         reg |= (WEIGHT_SPI_SIG1(ni_pi->cac_weights->weight_spi_sig1) |
3291                 WEIGHT_SPI_SIG2(ni_pi->cac_weights->weight_spi_sig2) |
3292                 WEIGHT_SPI_SIG3(ni_pi->cac_weights->weight_spi_sig3) |
3293                 WEIGHT_SPI_SIG4(ni_pi->cac_weights->weight_spi_sig4) |
3294                 WEIGHT_SPI_SIG5(ni_pi->cac_weights->weight_spi_sig5));
3295         WREG32_CG(CG_CAC_REGION_3_WEIGHT_1, reg);
3296
3297         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_0) & ~(WEIGHT_LDS_SIG0_MASK |
3298                                                       WEIGHT_LDS_SIG1_MASK |
3299                                                       WEIGHT_SC_MASK);
3300         reg |= (WEIGHT_LDS_SIG0(ni_pi->cac_weights->weight_lds_sig0) |
3301                 WEIGHT_LDS_SIG1(ni_pi->cac_weights->weight_lds_sig1) |
3302                 WEIGHT_SC(ni_pi->cac_weights->weight_sc));
3303         WREG32_CG(CG_CAC_REGION_4_WEIGHT_0, reg);
3304
3305         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_1) & ~(WEIGHT_BIF_MASK |
3306                                                       WEIGHT_CP_MASK |
3307                                                       WEIGHT_PA_SIG0_MASK |
3308                                                       WEIGHT_PA_SIG1_MASK |
3309                                                       WEIGHT_VGT_SIG0_MASK);
3310         reg |= (WEIGHT_BIF(ni_pi->cac_weights->weight_bif) |
3311                 WEIGHT_CP(ni_pi->cac_weights->weight_cp) |
3312                 WEIGHT_PA_SIG0(ni_pi->cac_weights->weight_pa_sig0) |
3313                 WEIGHT_PA_SIG1(ni_pi->cac_weights->weight_pa_sig1) |
3314                 WEIGHT_VGT_SIG0(ni_pi->cac_weights->weight_vgt_sig0));
3315         WREG32_CG(CG_CAC_REGION_4_WEIGHT_1, reg);
3316
3317         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_2) & ~(WEIGHT_VGT_SIG1_MASK |
3318                                                       WEIGHT_VGT_SIG2_MASK |
3319                                                       WEIGHT_DC_SIG0_MASK |
3320                                                       WEIGHT_DC_SIG1_MASK |
3321                                                       WEIGHT_DC_SIG2_MASK);
3322         reg |= (WEIGHT_VGT_SIG1(ni_pi->cac_weights->weight_vgt_sig1) |
3323                 WEIGHT_VGT_SIG2(ni_pi->cac_weights->weight_vgt_sig2) |
3324                 WEIGHT_DC_SIG0(ni_pi->cac_weights->weight_dc_sig0) |
3325                 WEIGHT_DC_SIG1(ni_pi->cac_weights->weight_dc_sig1) |
3326                 WEIGHT_DC_SIG2(ni_pi->cac_weights->weight_dc_sig2));
3327         WREG32_CG(CG_CAC_REGION_4_WEIGHT_2, reg);
3328
3329         reg = RREG32_CG(CG_CAC_REGION_4_WEIGHT_3) & ~(WEIGHT_DC_SIG3_MASK |
3330                                                       WEIGHT_UVD_SIG0_MASK |
3331                                                       WEIGHT_UVD_SIG1_MASK |
3332                                                       WEIGHT_SPARE0_MASK |
3333                                                       WEIGHT_SPARE1_MASK);
3334         reg |= (WEIGHT_DC_SIG3(ni_pi->cac_weights->weight_dc_sig3) |
3335                 WEIGHT_UVD_SIG0(ni_pi->cac_weights->weight_uvd_sig0) |
3336                 WEIGHT_UVD_SIG1(ni_pi->cac_weights->weight_uvd_sig1) |
3337                 WEIGHT_SPARE0(ni_pi->cac_weights->weight_spare0) |
3338                 WEIGHT_SPARE1(ni_pi->cac_weights->weight_spare1));
3339         WREG32_CG(CG_CAC_REGION_4_WEIGHT_3, reg);
3340
3341         reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_0) & ~(WEIGHT_SQ_VSP_MASK |
3342                                                       WEIGHT_SQ_VSP0_MASK);
3343         reg |= (WEIGHT_SQ_VSP(ni_pi->cac_weights->weight_sq_vsp) |
3344                 WEIGHT_SQ_VSP0(ni_pi->cac_weights->weight_sq_vsp0));
3345         WREG32_CG(CG_CAC_REGION_5_WEIGHT_0, reg);
3346
3347         reg = RREG32_CG(CG_CAC_REGION_5_WEIGHT_1) & ~(WEIGHT_SQ_GPR_MASK);
3348         reg |= WEIGHT_SQ_GPR(ni_pi->cac_weights->weight_sq_gpr);
3349         WREG32_CG(CG_CAC_REGION_5_WEIGHT_1, reg);
3350
3351         reg = RREG32_CG(CG_CAC_REGION_4_OVERRIDE_4) & ~(OVR_MODE_SPARE_0_MASK |
3352                                                         OVR_VAL_SPARE_0_MASK |
3353                                                         OVR_MODE_SPARE_1_MASK |
3354                                                         OVR_VAL_SPARE_1_MASK);
3355         reg |= (OVR_MODE_SPARE_0(ni_pi->cac_weights->ovr_mode_spare_0) |
3356                 OVR_VAL_SPARE_0(ni_pi->cac_weights->ovr_val_spare_0) |
3357                 OVR_MODE_SPARE_1(ni_pi->cac_weights->ovr_mode_spare_1) |
3358                 OVR_VAL_SPARE_1(ni_pi->cac_weights->ovr_val_spare_1));
3359         WREG32_CG(CG_CAC_REGION_4_OVERRIDE_4, reg);
3360
3361         reg = RREG32(SQ_CAC_THRESHOLD) & ~(VSP_MASK |
3362                                            VSP0_MASK |
3363                                            GPR_MASK);
3364         reg |= (VSP(ni_pi->cac_weights->vsp) |
3365                 VSP0(ni_pi->cac_weights->vsp0) |
3366                 GPR(ni_pi->cac_weights->gpr));
3367         WREG32(SQ_CAC_THRESHOLD, reg);
3368
3369         reg = (MCDW_WR_ENABLE |
3370                MCDX_WR_ENABLE |
3371                MCDY_WR_ENABLE |
3372                MCDZ_WR_ENABLE |
3373                INDEX(0x09D4));
3374         WREG32(MC_CG_CONFIG, reg);
3375
3376         reg = (READ_WEIGHT(ni_pi->cac_weights->mc_read_weight) |
3377                WRITE_WEIGHT(ni_pi->cac_weights->mc_write_weight) |
3378                ALLOW_OVERFLOW);
3379         WREG32(MC_CG_DATAPORT, reg);
3380
3381         return 0;
3382 }
3383
3384 static int ni_enable_smc_cac(struct radeon_device *rdev,
3385                              struct radeon_ps *radeon_new_state,
3386                              bool enable)
3387 {
3388         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3389         int ret = 0;
3390         PPSMC_Result smc_result;
3391
3392         if (ni_pi->enable_cac) {
3393                 if (enable) {
3394                         if (!r600_is_uvd_state(radeon_new_state->class, radeon_new_state->class2)) {
3395                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_CollectCAC_PowerCorreln);
3396
3397                                 if (ni_pi->support_cac_long_term_average) {
3398                                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgEnable);
3399                                         if (PPSMC_Result_OK != smc_result)
3400                                                 ni_pi->support_cac_long_term_average = false;
3401                                 }
3402
3403                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableCac);
3404                                 if (PPSMC_Result_OK != smc_result)
3405                                         ret = -EINVAL;
3406
3407                                 ni_pi->cac_enabled = (PPSMC_Result_OK == smc_result) ? true : false;
3408                         }
3409                 } else if (ni_pi->cac_enabled) {
3410                         smc_result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_DisableCac);
3411
3412                         ni_pi->cac_enabled = false;
3413
3414                         if (ni_pi->support_cac_long_term_average) {
3415                                 smc_result = rv770_send_msg_to_smc(rdev, PPSMC_CACLongTermAvgDisable);
3416                                 if (PPSMC_Result_OK != smc_result)
3417                                         ni_pi->support_cac_long_term_average = false;
3418                         }
3419                 }
3420         }
3421
3422         return ret;
3423 }
3424
3425 static int ni_pcie_performance_request(struct radeon_device *rdev,
3426                                        u8 perf_req, bool advertise)
3427 {
3428         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3429
3430 #if defined(CONFIG_ACPI)
3431         if ((perf_req == PCIE_PERF_REQ_PECI_GEN1) ||
3432             (perf_req == PCIE_PERF_REQ_PECI_GEN2)) {
3433                 if (eg_pi->pcie_performance_request_registered == false)
3434                         radeon_acpi_pcie_notify_device_ready(rdev);
3435                 eg_pi->pcie_performance_request_registered = true;
3436                 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3437         } else if ((perf_req == PCIE_PERF_REQ_REMOVE_REGISTRY) &&
3438                    eg_pi->pcie_performance_request_registered) {
3439                 eg_pi->pcie_performance_request_registered = false;
3440                 return radeon_acpi_pcie_performance_request(rdev, perf_req, advertise);
3441         }
3442 #endif
3443         return 0;
3444 }
3445
3446 static int ni_advertise_gen2_capability(struct radeon_device *rdev)
3447 {
3448         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3449         u32 tmp;
3450
3451         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3452
3453         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3454             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2))
3455                 pi->pcie_gen2 = true;
3456         else
3457                 pi->pcie_gen2 = false;
3458
3459         if (!pi->pcie_gen2)
3460                 ni_pcie_performance_request(rdev, PCIE_PERF_REQ_PECI_GEN2, true);
3461
3462         return 0;
3463 }
3464
3465 static void ni_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev,
3466                                             bool enable)
3467 {
3468         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3469         u32 tmp, bif;
3470
3471         tmp = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
3472
3473         if ((tmp & LC_OTHER_SIDE_EVER_SENT_GEN2) &&
3474             (tmp & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3475                 if (enable) {
3476                         if (!pi->boot_in_gen2) {
3477                                 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3478                                 bif |= CG_CLIENT_REQ(0xd);
3479                                 WREG32(CG_BIF_REQ_AND_RSP, bif);
3480                         }
3481                         tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3482                         tmp |= LC_HW_VOLTAGE_IF_CONTROL(1);
3483                         tmp |= LC_GEN2_EN_STRAP;
3484
3485                         tmp |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3486                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3487                         udelay(10);
3488                         tmp &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3489                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3490                 } else {
3491                         if (!pi->boot_in_gen2) {
3492                                 bif = RREG32(CG_BIF_REQ_AND_RSP) & ~CG_CLIENT_REQ_MASK;
3493                                 bif |= CG_CLIENT_REQ(0xd);
3494                                 WREG32(CG_BIF_REQ_AND_RSP, bif);
3495
3496                                 tmp &= ~LC_HW_VOLTAGE_IF_CONTROL_MASK;
3497                                 tmp &= ~LC_GEN2_EN_STRAP;
3498                         }
3499                         WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, tmp);
3500                 }
3501         }
3502 }
3503
3504 static void ni_enable_dynamic_pcie_gen2(struct radeon_device *rdev,
3505                                         bool enable)
3506 {
3507         ni_enable_bif_dynamic_pcie_gen2(rdev, enable);
3508
3509         if (enable)
3510                 WREG32_P(GENERAL_PWRMGT, ENABLE_GEN2PCIE, ~ENABLE_GEN2PCIE);
3511         else
3512                 WREG32_P(GENERAL_PWRMGT, 0, ~ENABLE_GEN2PCIE);
3513 }
3514
3515 void ni_set_uvd_clock_before_set_eng_clock(struct radeon_device *rdev,
3516                                            struct radeon_ps *new_ps,
3517                                            struct radeon_ps *old_ps)
3518 {
3519         struct ni_ps *new_state = ni_get_ps(new_ps);
3520         struct ni_ps *current_state = ni_get_ps(old_ps);
3521
3522         if ((new_ps->vclk == old_ps->vclk) &&
3523             (new_ps->dclk == old_ps->dclk))
3524                 return;
3525
3526         if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >=
3527             current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3528                 return;
3529
3530         radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3531 }
3532
3533 void ni_set_uvd_clock_after_set_eng_clock(struct radeon_device *rdev,
3534                                           struct radeon_ps *new_ps,
3535                                           struct radeon_ps *old_ps)
3536 {
3537         struct ni_ps *new_state = ni_get_ps(new_ps);
3538         struct ni_ps *current_state = ni_get_ps(old_ps);
3539
3540         if ((new_ps->vclk == old_ps->vclk) &&
3541             (new_ps->dclk == old_ps->dclk))
3542                 return;
3543
3544         if (new_state->performance_levels[new_state->performance_level_count - 1].sclk <
3545             current_state->performance_levels[current_state->performance_level_count - 1].sclk)
3546                 return;
3547
3548         radeon_set_uvd_clocks(rdev, new_ps->vclk, new_ps->dclk);
3549 }
3550
3551 void ni_dpm_setup_asic(struct radeon_device *rdev)
3552 {
3553         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3554
3555         ni_read_clock_registers(rdev);
3556         btc_read_arb_registers(rdev);
3557         rv770_get_memory_type(rdev);
3558         if (eg_pi->pcie_performance_request)
3559                 ni_advertise_gen2_capability(rdev);
3560         rv770_get_pcie_gen2_status(rdev);
3561         rv770_enable_acpi_pm(rdev);
3562 }
3563
3564 void ni_update_current_ps(struct radeon_device *rdev,
3565                           struct radeon_ps *rps)
3566 {
3567         struct ni_ps *new_ps = ni_get_ps(rps);
3568         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3569         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3570
3571         eg_pi->current_rps = *rps;
3572         ni_pi->current_ps = *new_ps;
3573         eg_pi->current_rps.ps_priv = &ni_pi->current_ps;
3574 }
3575
3576 void ni_update_requested_ps(struct radeon_device *rdev,
3577                             struct radeon_ps *rps)
3578 {
3579         struct ni_ps *new_ps = ni_get_ps(rps);
3580         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3581         struct ni_power_info *ni_pi = ni_get_pi(rdev);
3582
3583         eg_pi->requested_rps = *rps;
3584         ni_pi->requested_ps = *new_ps;
3585         eg_pi->requested_rps.ps_priv = &ni_pi->requested_ps;
3586 }
3587
3588 int ni_dpm_enable(struct radeon_device *rdev)
3589 {
3590         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3591         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3592         struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3593         int ret;
3594
3595         if (pi->gfx_clock_gating)
3596                 ni_cg_clockgating_default(rdev);
3597         if (btc_dpm_enabled(rdev))
3598                 return -EINVAL;
3599         if (pi->mg_clock_gating)
3600                 ni_mg_clockgating_default(rdev);
3601         if (eg_pi->ls_clock_gating)
3602                 ni_ls_clockgating_default(rdev);
3603         if (pi->voltage_control) {
3604                 rv770_enable_voltage_control(rdev, true);
3605                 ret = cypress_construct_voltage_tables(rdev);
3606                 if (ret) {
3607                         DRM_ERROR("cypress_construct_voltage_tables failed\n");
3608                         return ret;
3609                 }
3610         }
3611         if (eg_pi->dynamic_ac_timing) {
3612                 ret = ni_initialize_mc_reg_table(rdev);
3613                 if (ret)
3614                         eg_pi->dynamic_ac_timing = false;
3615         }
3616         if (pi->dynamic_ss)
3617                 cypress_enable_spread_spectrum(rdev, true);
3618         if (pi->thermal_protection)
3619                 rv770_enable_thermal_protection(rdev, true);
3620         rv770_setup_bsp(rdev);
3621         rv770_program_git(rdev);
3622         rv770_program_tp(rdev);
3623         rv770_program_tpp(rdev);
3624         rv770_program_sstp(rdev);
3625         cypress_enable_display_gap(rdev);
3626         rv770_program_vc(rdev);
3627         if (pi->dynamic_pcie_gen2)
3628                 ni_enable_dynamic_pcie_gen2(rdev, true);
3629         ret = rv770_upload_firmware(rdev);
3630         if (ret) {
3631                 DRM_ERROR("rv770_upload_firmware failed\n");
3632                 return ret;
3633         }
3634         ret = ni_process_firmware_header(rdev);
3635         if (ret) {
3636                 DRM_ERROR("ni_process_firmware_header failed\n");
3637                 return ret;
3638         }
3639         ret = ni_initial_switch_from_arb_f0_to_f1(rdev);
3640         if (ret) {
3641                 DRM_ERROR("ni_initial_switch_from_arb_f0_to_f1 failed\n");
3642                 return ret;
3643         }
3644         ret = ni_init_smc_table(rdev);
3645         if (ret) {
3646                 DRM_ERROR("ni_init_smc_table failed\n");
3647                 return ret;
3648         }
3649         ret = ni_init_smc_spll_table(rdev);
3650         if (ret) {
3651                 DRM_ERROR("ni_init_smc_spll_table failed\n");
3652                 return ret;
3653         }
3654         ret = ni_init_arb_table_index(rdev);
3655         if (ret) {
3656                 DRM_ERROR("ni_init_arb_table_index failed\n");
3657                 return ret;
3658         }
3659         if (eg_pi->dynamic_ac_timing) {
3660                 ret = ni_populate_mc_reg_table(rdev, boot_ps);
3661                 if (ret) {
3662                         DRM_ERROR("ni_populate_mc_reg_table failed\n");
3663                         return ret;
3664                 }
3665         }
3666         ret = ni_initialize_smc_cac_tables(rdev);
3667         if (ret) {
3668                 DRM_ERROR("ni_initialize_smc_cac_tables failed\n");
3669                 return ret;
3670         }
3671         ret = ni_initialize_hardware_cac_manager(rdev);
3672         if (ret) {
3673                 DRM_ERROR("ni_initialize_hardware_cac_manager failed\n");
3674                 return ret;
3675         }
3676         ret = ni_populate_smc_tdp_limits(rdev, boot_ps);
3677         if (ret) {
3678                 DRM_ERROR("ni_populate_smc_tdp_limits failed\n");
3679                 return ret;
3680         }
3681         ni_program_response_times(rdev);
3682         r7xx_start_smc(rdev);
3683         ret = cypress_notify_smc_display_change(rdev, false);
3684         if (ret) {
3685                 DRM_ERROR("cypress_notify_smc_display_change failed\n");
3686                 return ret;
3687         }
3688         cypress_enable_sclk_control(rdev, true);
3689         if (eg_pi->memory_transition)
3690                 cypress_enable_mclk_control(rdev, true);
3691         cypress_start_dpm(rdev);
3692         if (pi->gfx_clock_gating)
3693                 ni_gfx_clockgating_enable(rdev, true);
3694         if (pi->mg_clock_gating)
3695                 ni_mg_clockgating_enable(rdev, true);
3696         if (eg_pi->ls_clock_gating)
3697                 ni_ls_clockgating_enable(rdev, true);
3698
3699         if (rdev->irq.installed &&
3700             r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3701                 PPSMC_Result result;
3702
3703                 ret = rv770_set_thermal_temperature_range(rdev, R600_TEMP_RANGE_MIN, 0xff * 1000);
3704                 if (ret)
3705                         return ret;
3706                 rdev->irq.dpm_thermal = true;
3707                 radeon_irq_set(rdev);
3708                 result = rv770_send_msg_to_smc(rdev, PPSMC_MSG_EnableThermalInterrupt);
3709
3710                 if (result != PPSMC_Result_OK)
3711                         DRM_DEBUG_KMS("Could not enable thermal interrupts.\n");
3712         }
3713
3714         rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, true);
3715
3716         ni_update_current_ps(rdev, boot_ps);
3717
3718         return 0;
3719 }
3720
3721 void ni_dpm_disable(struct radeon_device *rdev)
3722 {
3723         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3724         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3725         struct radeon_ps *boot_ps = rdev->pm.dpm.boot_ps;
3726
3727         if (!btc_dpm_enabled(rdev))
3728                 return;
3729         rv770_clear_vc(rdev);
3730         if (pi->thermal_protection)
3731                 rv770_enable_thermal_protection(rdev, false);
3732         ni_enable_power_containment(rdev, boot_ps, false);
3733         ni_enable_smc_cac(rdev, boot_ps, false);
3734         cypress_enable_spread_spectrum(rdev, false);
3735         rv770_enable_auto_throttle_source(rdev, RADEON_DPM_AUTO_THROTTLE_SRC_THERMAL, false);
3736         if (pi->dynamic_pcie_gen2)
3737                 ni_enable_dynamic_pcie_gen2(rdev, false);
3738
3739         if (rdev->irq.installed &&
3740             r600_is_internal_thermal_sensor(rdev->pm.int_thermal_type)) {
3741                 rdev->irq.dpm_thermal = false;
3742                 radeon_irq_set(rdev);
3743         }
3744
3745         if (pi->gfx_clock_gating)
3746                 ni_gfx_clockgating_enable(rdev, false);
3747         if (pi->mg_clock_gating)
3748                 ni_mg_clockgating_enable(rdev, false);
3749         if (eg_pi->ls_clock_gating)
3750                 ni_ls_clockgating_enable(rdev, false);
3751         ni_stop_dpm(rdev);
3752         btc_reset_to_default(rdev);
3753         ni_stop_smc(rdev);
3754         ni_force_switch_to_arb_f0(rdev);
3755
3756         ni_update_current_ps(rdev, boot_ps);
3757 }
3758
3759 static int ni_power_control_set_level(struct radeon_device *rdev)
3760 {
3761         struct radeon_ps *new_ps = rdev->pm.dpm.requested_ps;
3762         int ret;
3763
3764         ret = ni_restrict_performance_levels_before_switch(rdev);
3765         if (ret)
3766                 return ret;
3767         ret = rv770_halt_smc(rdev);
3768         if (ret)
3769                 return ret;
3770         ret = ni_populate_smc_tdp_limits(rdev, new_ps);
3771         if (ret)
3772                 return ret;
3773         ret = rv770_resume_smc(rdev);
3774         if (ret)
3775                 return ret;
3776         ret = rv770_set_sw_state(rdev);
3777         if (ret)
3778                 return ret;
3779
3780         return 0;
3781 }
3782
3783 int ni_dpm_pre_set_power_state(struct radeon_device *rdev)
3784 {
3785         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3786         struct radeon_ps requested_ps = *rdev->pm.dpm.requested_ps;
3787         struct radeon_ps *new_ps = &requested_ps;
3788
3789         ni_update_requested_ps(rdev, new_ps);
3790
3791         ni_apply_state_adjust_rules(rdev, &eg_pi->requested_rps);
3792
3793         return 0;
3794 }
3795
3796 int ni_dpm_set_power_state(struct radeon_device *rdev)
3797 {
3798         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3799         struct radeon_ps *new_ps = &eg_pi->requested_rps;
3800         struct radeon_ps *old_ps = &eg_pi->current_rps;
3801         int ret;
3802
3803         ret = ni_restrict_performance_levels_before_switch(rdev);
3804         if (ret) {
3805                 DRM_ERROR("ni_restrict_performance_levels_before_switch failed\n");
3806                 return ret;
3807         }
3808         ni_set_uvd_clock_before_set_eng_clock(rdev, new_ps, old_ps);
3809         ret = ni_enable_power_containment(rdev, new_ps, false);
3810         if (ret) {
3811                 DRM_ERROR("ni_enable_power_containment failed\n");
3812                 return ret;
3813         }
3814         ret = ni_enable_smc_cac(rdev, new_ps, false);
3815         if (ret) {
3816                 DRM_ERROR("ni_enable_smc_cac failed\n");
3817                 return ret;
3818         }
3819         ret = rv770_halt_smc(rdev);
3820         if (ret) {
3821                 DRM_ERROR("rv770_halt_smc failed\n");
3822                 return ret;
3823         }
3824         if (eg_pi->smu_uvd_hs)
3825                 btc_notify_uvd_to_smc(rdev, new_ps);
3826         ret = ni_upload_sw_state(rdev, new_ps);
3827         if (ret) {
3828                 DRM_ERROR("ni_upload_sw_state failed\n");
3829                 return ret;
3830         }
3831         if (eg_pi->dynamic_ac_timing) {
3832                 ret = ni_upload_mc_reg_table(rdev, new_ps);
3833                 if (ret) {
3834                         DRM_ERROR("ni_upload_mc_reg_table failed\n");
3835                         return ret;
3836                 }
3837         }
3838         ret = ni_program_memory_timing_parameters(rdev, new_ps);
3839         if (ret) {
3840                 DRM_ERROR("ni_program_memory_timing_parameters failed\n");
3841                 return ret;
3842         }
3843         ret = rv770_resume_smc(rdev);
3844         if (ret) {
3845                 DRM_ERROR("rv770_resume_smc failed\n");
3846                 return ret;
3847         }
3848         ret = rv770_set_sw_state(rdev);
3849         if (ret) {
3850                 DRM_ERROR("rv770_set_sw_state failed\n");
3851                 return ret;
3852         }
3853         ni_set_uvd_clock_after_set_eng_clock(rdev, new_ps, old_ps);
3854         ret = ni_enable_smc_cac(rdev, new_ps, true);
3855         if (ret) {
3856                 DRM_ERROR("ni_enable_smc_cac failed\n");
3857                 return ret;
3858         }
3859         ret = ni_enable_power_containment(rdev, new_ps, true);
3860         if (ret) {
3861                 DRM_ERROR("ni_enable_power_containment failed\n");
3862                 return ret;
3863         }
3864
3865         /* update tdp */
3866         ret = ni_power_control_set_level(rdev);
3867         if (ret) {
3868                 DRM_ERROR("ni_power_control_set_level failed\n");
3869                 return ret;
3870         }
3871
3872         ret = ni_dpm_force_performance_level(rdev, RADEON_DPM_FORCED_LEVEL_AUTO);
3873         if (ret) {
3874                 DRM_ERROR("ni_dpm_force_performance_level failed\n");
3875                 return ret;
3876         }
3877
3878         return 0;
3879 }
3880
3881 void ni_dpm_post_set_power_state(struct radeon_device *rdev)
3882 {
3883         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3884         struct radeon_ps *new_ps = &eg_pi->requested_rps;
3885
3886         ni_update_current_ps(rdev, new_ps);
3887 }
3888
3889 void ni_dpm_reset_asic(struct radeon_device *rdev)
3890 {
3891         ni_restrict_performance_levels_before_switch(rdev);
3892         rv770_set_boot_state(rdev);
3893 }
3894
3895 union power_info {
3896         struct _ATOM_POWERPLAY_INFO info;
3897         struct _ATOM_POWERPLAY_INFO_V2 info_2;
3898         struct _ATOM_POWERPLAY_INFO_V3 info_3;
3899         struct _ATOM_PPLIB_POWERPLAYTABLE pplib;
3900         struct _ATOM_PPLIB_POWERPLAYTABLE2 pplib2;
3901         struct _ATOM_PPLIB_POWERPLAYTABLE3 pplib3;
3902 };
3903
3904 union pplib_clock_info {
3905         struct _ATOM_PPLIB_R600_CLOCK_INFO r600;
3906         struct _ATOM_PPLIB_RS780_CLOCK_INFO rs780;
3907         struct _ATOM_PPLIB_EVERGREEN_CLOCK_INFO evergreen;
3908         struct _ATOM_PPLIB_SUMO_CLOCK_INFO sumo;
3909 };
3910
3911 union pplib_power_state {
3912         struct _ATOM_PPLIB_STATE v1;
3913         struct _ATOM_PPLIB_STATE_V2 v2;
3914 };
3915
3916 static void ni_parse_pplib_non_clock_info(struct radeon_device *rdev,
3917                                           struct radeon_ps *rps,
3918                                           struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info,
3919                                           u8 table_rev)
3920 {
3921         rps->caps = le32_to_cpu(non_clock_info->ulCapsAndSettings);
3922         rps->class = le16_to_cpu(non_clock_info->usClassification);
3923         rps->class2 = le16_to_cpu(non_clock_info->usClassification2);
3924
3925         if (ATOM_PPLIB_NONCLOCKINFO_VER1 < table_rev) {
3926                 rps->vclk = le32_to_cpu(non_clock_info->ulVCLK);
3927                 rps->dclk = le32_to_cpu(non_clock_info->ulDCLK);
3928         } else if (r600_is_uvd_state(rps->class, rps->class2)) {
3929                 rps->vclk = RV770_DEFAULT_VCLK_FREQ;
3930                 rps->dclk = RV770_DEFAULT_DCLK_FREQ;
3931         } else {
3932                 rps->vclk = 0;
3933                 rps->dclk = 0;
3934         }
3935
3936         if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT)
3937                 rdev->pm.dpm.boot_ps = rps;
3938         if (rps->class & ATOM_PPLIB_CLASSIFICATION_UVDSTATE)
3939                 rdev->pm.dpm.uvd_ps = rps;
3940 }
3941
3942 static void ni_parse_pplib_clock_info(struct radeon_device *rdev,
3943                                       struct radeon_ps *rps, int index,
3944                                       union pplib_clock_info *clock_info)
3945 {
3946         struct rv7xx_power_info *pi = rv770_get_pi(rdev);
3947         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
3948         struct ni_ps *ps = ni_get_ps(rps);
3949         u16 vddc;
3950         struct rv7xx_pl *pl = &ps->performance_levels[index];
3951
3952         ps->performance_level_count = index + 1;
3953
3954         pl->sclk = le16_to_cpu(clock_info->evergreen.usEngineClockLow);
3955         pl->sclk |= clock_info->evergreen.ucEngineClockHigh << 16;
3956         pl->mclk = le16_to_cpu(clock_info->evergreen.usMemoryClockLow);
3957         pl->mclk |= clock_info->evergreen.ucMemoryClockHigh << 16;
3958
3959         pl->vddc = le16_to_cpu(clock_info->evergreen.usVDDC);
3960         pl->vddci = le16_to_cpu(clock_info->evergreen.usVDDCI);
3961         pl->flags = le32_to_cpu(clock_info->evergreen.ulFlags);
3962
3963         /* patch up vddc if necessary */
3964         if (pl->vddc == 0xff01) {
3965                 if (radeon_atom_get_max_vddc(rdev, 0, 0, &vddc) == 0)
3966                         pl->vddc = vddc;
3967         }
3968
3969         if (rps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) {
3970                 pi->acpi_vddc = pl->vddc;
3971                 eg_pi->acpi_vddci = pl->vddci;
3972                 if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2)
3973                         pi->acpi_pcie_gen2 = true;
3974                 else
3975                         pi->acpi_pcie_gen2 = false;
3976         }
3977
3978         if (rps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) {
3979                 eg_pi->ulv.supported = true;
3980                 eg_pi->ulv.pl = pl;
3981         }
3982
3983         if (pi->min_vddc_in_table > pl->vddc)
3984                 pi->min_vddc_in_table = pl->vddc;
3985
3986         if (pi->max_vddc_in_table < pl->vddc)
3987                 pi->max_vddc_in_table = pl->vddc;
3988
3989         /* patch up boot state */
3990         if (rps->class & ATOM_PPLIB_CLASSIFICATION_BOOT) {
3991                 u16 vddc, vddci, mvdd;
3992                 radeon_atombios_get_default_voltages(rdev, &vddc, &vddci, &mvdd);
3993                 pl->mclk = rdev->clock.default_mclk;
3994                 pl->sclk = rdev->clock.default_sclk;
3995                 pl->vddc = vddc;
3996                 pl->vddci = vddci;
3997         }
3998
3999         if ((rps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK) ==
4000             ATOM_PPLIB_CLASSIFICATION_UI_PERFORMANCE) {
4001                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.sclk = pl->sclk;
4002                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.mclk = pl->mclk;
4003                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddc = pl->vddc;
4004                 rdev->pm.dpm.dyn_state.max_clock_voltage_on_ac.vddci = pl->vddci;
4005         }
4006 }
4007
4008 static int ni_parse_power_table(struct radeon_device *rdev)
4009 {
4010         struct radeon_mode_info *mode_info = &rdev->mode_info;
4011         struct _ATOM_PPLIB_NONCLOCK_INFO *non_clock_info;
4012         union pplib_power_state *power_state;
4013         int i, j;
4014         union pplib_clock_info *clock_info;
4015         union power_info *power_info;
4016         int index = GetIndexIntoMasterTable(DATA, PowerPlayInfo);
4017         u16 data_offset;
4018         u8 frev, crev;
4019         struct ni_ps *ps;
4020
4021         if (!atom_parse_data_header(mode_info->atom_context, index, NULL,
4022                                    &frev, &crev, &data_offset))
4023                 return -EINVAL;
4024         power_info = (union power_info *)(mode_info->atom_context->bios + data_offset);
4025
4026         rdev->pm.dpm.ps = kzalloc(sizeof(struct radeon_ps) *
4027                                   power_info->pplib.ucNumStates, GFP_KERNEL);
4028         if (!rdev->pm.dpm.ps)
4029                 return -ENOMEM;
4030         rdev->pm.dpm.platform_caps = le32_to_cpu(power_info->pplib.ulPlatformCaps);
4031         rdev->pm.dpm.backbias_response_time = le16_to_cpu(power_info->pplib.usBackbiasTime);
4032         rdev->pm.dpm.voltage_response_time = le16_to_cpu(power_info->pplib.usVoltageTime);
4033
4034         for (i = 0; i < power_info->pplib.ucNumStates; i++) {
4035                 power_state = (union pplib_power_state *)
4036                         (mode_info->atom_context->bios + data_offset +
4037                          le16_to_cpu(power_info->pplib.usStateArrayOffset) +
4038                          i * power_info->pplib.ucStateEntrySize);
4039                 non_clock_info = (struct _ATOM_PPLIB_NONCLOCK_INFO *)
4040                         (mode_info->atom_context->bios + data_offset +
4041                          le16_to_cpu(power_info->pplib.usNonClockInfoArrayOffset) +
4042                          (power_state->v1.ucNonClockStateIndex *
4043                           power_info->pplib.ucNonClockSize));
4044                 if (power_info->pplib.ucStateEntrySize - 1) {
4045                         ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL);
4046                         if (ps == NULL) {
4047                                 kfree(rdev->pm.dpm.ps);
4048                                 return -ENOMEM;
4049                         }
4050                         rdev->pm.dpm.ps[i].ps_priv = ps;
4051                         ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i],
4052                                                          non_clock_info,
4053                                                          power_info->pplib.ucNonClockSize);
4054                         for (j = 0; j < (power_info->pplib.ucStateEntrySize - 1); j++) {
4055                                 clock_info = (union pplib_clock_info *)
4056                                         (mode_info->atom_context->bios + data_offset +
4057                                          le16_to_cpu(power_info->pplib.usClockInfoArrayOffset) +
4058                                          (power_state->v1.ucClockStateIndices[j] *
4059                                           power_info->pplib.ucClockInfoSize));
4060                                 ni_parse_pplib_clock_info(rdev,
4061                                                           &rdev->pm.dpm.ps[i], j,
4062                                                           clock_info);
4063                         }
4064                 }
4065         }
4066         rdev->pm.dpm.num_ps = power_info->pplib.ucNumStates;
4067         return 0;
4068 }
4069
4070 int ni_dpm_init(struct radeon_device *rdev)
4071 {
4072         struct rv7xx_power_info *pi;
4073         struct evergreen_power_info *eg_pi;
4074         struct ni_power_info *ni_pi;
4075         int index = GetIndexIntoMasterTable(DATA, ASIC_InternalSS_Info);
4076         u16 data_offset, size;
4077         u8 frev, crev;
4078         struct atom_clock_dividers dividers;
4079         int ret;
4080
4081         ni_pi = kzalloc(sizeof(struct ni_power_info), GFP_KERNEL);
4082         if (ni_pi == NULL)
4083                 return -ENOMEM;
4084         rdev->pm.dpm.priv = ni_pi;
4085         eg_pi = &ni_pi->eg;
4086         pi = &eg_pi->rv7xx;
4087
4088         rv770_get_max_vddc(rdev);
4089
4090         eg_pi->ulv.supported = false;
4091         pi->acpi_vddc = 0;
4092         eg_pi->acpi_vddci = 0;
4093         pi->min_vddc_in_table = 0;
4094         pi->max_vddc_in_table = 0;
4095
4096         ret = ni_parse_power_table(rdev);
4097         if (ret)
4098                 return ret;
4099         ret = r600_parse_extended_power_table(rdev);
4100         if (ret)
4101                 return ret;
4102
4103         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries =
4104                 kzalloc(4 * sizeof(struct radeon_clock_voltage_dependency_entry), GFP_KERNEL);
4105         if (!rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries) {
4106                 r600_free_extended_power_table(rdev);
4107                 return -ENOMEM;
4108         }
4109         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.count = 4;
4110         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].clk = 0;
4111         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[0].v = 0;
4112         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].clk = 36000;
4113         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[1].v = 720;
4114         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].clk = 54000;
4115         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[2].v = 810;
4116         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].clk = 72000;
4117         rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries[3].v = 900;
4118
4119         ni_patch_dependency_tables_based_on_leakage(rdev);
4120
4121         if (rdev->pm.dpm.voltage_response_time == 0)
4122                 rdev->pm.dpm.voltage_response_time = R600_VOLTAGERESPONSETIME_DFLT;
4123         if (rdev->pm.dpm.backbias_response_time == 0)
4124                 rdev->pm.dpm.backbias_response_time = R600_BACKBIASRESPONSETIME_DFLT;
4125
4126         ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
4127                                              0, false, &dividers);
4128         if (ret)
4129                 pi->ref_div = dividers.ref_div + 1;
4130         else
4131                 pi->ref_div = R600_REFERENCEDIVIDER_DFLT;
4132
4133         pi->rlp = RV770_RLP_DFLT;
4134         pi->rmp = RV770_RMP_DFLT;
4135         pi->lhp = RV770_LHP_DFLT;
4136         pi->lmp = RV770_LMP_DFLT;
4137
4138         eg_pi->ats[0].rlp = RV770_RLP_DFLT;
4139         eg_pi->ats[0].rmp = RV770_RMP_DFLT;
4140         eg_pi->ats[0].lhp = RV770_LHP_DFLT;
4141         eg_pi->ats[0].lmp = RV770_LMP_DFLT;
4142
4143         eg_pi->ats[1].rlp = BTC_RLP_UVD_DFLT;
4144         eg_pi->ats[1].rmp = BTC_RMP_UVD_DFLT;
4145         eg_pi->ats[1].lhp = BTC_LHP_UVD_DFLT;
4146         eg_pi->ats[1].lmp = BTC_LMP_UVD_DFLT;
4147
4148         eg_pi->smu_uvd_hs = true;
4149
4150         if (rdev->pdev->device == 0x6707) {
4151                 pi->mclk_strobe_mode_threshold = 55000;
4152                 pi->mclk_edc_enable_threshold = 55000;
4153                 eg_pi->mclk_edc_wr_enable_threshold = 55000;
4154         } else {
4155                 pi->mclk_strobe_mode_threshold = 40000;
4156                 pi->mclk_edc_enable_threshold = 40000;
4157                 eg_pi->mclk_edc_wr_enable_threshold = 40000;
4158         }
4159         ni_pi->mclk_rtt_mode_threshold = eg_pi->mclk_edc_wr_enable_threshold;
4160
4161         pi->voltage_control =
4162                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDC, 0);
4163
4164         pi->mvdd_control =
4165                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_MVDDC, 0);
4166
4167         eg_pi->vddci_control =
4168                 radeon_atom_is_voltage_gpio(rdev, SET_VOLTAGE_TYPE_ASIC_VDDCI, 0);
4169
4170         if (atom_parse_data_header(rdev->mode_info.atom_context, index, &size,
4171                                    &frev, &crev, &data_offset)) {
4172                 pi->sclk_ss = true;
4173                 pi->mclk_ss = true;
4174                 pi->dynamic_ss = true;
4175         } else {
4176                 pi->sclk_ss = false;
4177                 pi->mclk_ss = false;
4178                 pi->dynamic_ss = true;
4179         }
4180
4181         pi->asi = RV770_ASI_DFLT;
4182         pi->pasi = CYPRESS_HASI_DFLT;
4183         pi->vrc = CYPRESS_VRC_DFLT;
4184
4185         pi->power_gating = false;
4186
4187         pi->gfx_clock_gating = true;
4188
4189         pi->mg_clock_gating = true;
4190         pi->mgcgtssm = true;
4191         eg_pi->ls_clock_gating = false;
4192         eg_pi->sclk_deep_sleep = false;
4193
4194         pi->dynamic_pcie_gen2 = true;
4195
4196         if (pi->gfx_clock_gating &&
4197             (rdev->pm.int_thermal_type != THERMAL_TYPE_NONE))
4198                 pi->thermal_protection = true;
4199         else
4200                 pi->thermal_protection = false;
4201
4202         pi->display_gap = true;
4203
4204         pi->dcodt = true;
4205
4206         pi->ulps = true;
4207
4208         eg_pi->dynamic_ac_timing = true;
4209         eg_pi->abm = true;
4210         eg_pi->mcls = true;
4211         eg_pi->light_sleep = true;
4212         eg_pi->memory_transition = true;
4213 #if defined(CONFIG_ACPI)
4214         eg_pi->pcie_performance_request =
4215                 radeon_acpi_is_pcie_performance_request_supported(rdev);
4216 #else
4217         eg_pi->pcie_performance_request = false;
4218 #endif
4219
4220         eg_pi->dll_default_on = false;
4221
4222         eg_pi->sclk_deep_sleep = false;
4223
4224         pi->mclk_stutter_mode_threshold = 0;
4225
4226         pi->sram_end = SMC_RAM_END;
4227
4228         rdev->pm.dpm.dyn_state.mclk_sclk_ratio = 3;
4229         rdev->pm.dpm.dyn_state.vddc_vddci_delta = 200;
4230         rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2 = 900;
4231         rdev->pm.dpm.dyn_state.valid_sclk_values.count = ARRAY_SIZE(btc_valid_sclk);
4232         rdev->pm.dpm.dyn_state.valid_sclk_values.values = btc_valid_sclk;
4233         rdev->pm.dpm.dyn_state.valid_mclk_values.count = 0;
4234         rdev->pm.dpm.dyn_state.valid_mclk_values.values = NULL;
4235         rdev->pm.dpm.dyn_state.sclk_mclk_delta = 12500;
4236
4237         ni_pi->cac_data.leakage_coefficients.at = 516;
4238         ni_pi->cac_data.leakage_coefficients.bt = 18;
4239         ni_pi->cac_data.leakage_coefficients.av = 51;
4240         ni_pi->cac_data.leakage_coefficients.bv = 2957;
4241
4242         switch (rdev->pdev->device) {
4243         case 0x6700:
4244         case 0x6701:
4245         case 0x6702:
4246         case 0x6703:
4247         case 0x6718:
4248                 ni_pi->cac_weights = &cac_weights_cayman_xt;
4249                 break;
4250         case 0x6705:
4251         case 0x6719:
4252         case 0x671D:
4253         case 0x671C:
4254         default:
4255                 ni_pi->cac_weights = &cac_weights_cayman_pro;
4256                 break;
4257         case 0x6704:
4258         case 0x6706:
4259         case 0x6707:
4260         case 0x6708:
4261         case 0x6709:
4262                 ni_pi->cac_weights = &cac_weights_cayman_le;
4263                 break;
4264         }
4265
4266         if (ni_pi->cac_weights->enable_power_containment_by_default) {
4267                 ni_pi->enable_power_containment = true;
4268                 ni_pi->enable_cac = true;
4269                 ni_pi->enable_sq_ramping = true;
4270         } else {
4271                 ni_pi->enable_power_containment = false;
4272                 ni_pi->enable_cac = false;
4273                 ni_pi->enable_sq_ramping = false;
4274         }
4275
4276         ni_pi->driver_calculate_cac_leakage = false;
4277         ni_pi->cac_configuration_required = true;
4278
4279         if (ni_pi->cac_configuration_required) {
4280                 ni_pi->support_cac_long_term_average = true;
4281                 ni_pi->lta_window_size = ni_pi->cac_weights->l2_lta_window_size;
4282                 ni_pi->lts_truncate = ni_pi->cac_weights->lts_truncate;
4283         } else {
4284                 ni_pi->support_cac_long_term_average = false;
4285                 ni_pi->lta_window_size = 0;
4286                 ni_pi->lts_truncate = 0;
4287         }
4288
4289         ni_pi->use_power_boost_limit = true;
4290
4291         return 0;
4292 }
4293
4294 void ni_dpm_fini(struct radeon_device *rdev)
4295 {
4296         int i;
4297
4298         for (i = 0; i < rdev->pm.dpm.num_ps; i++) {
4299                 kfree(rdev->pm.dpm.ps[i].ps_priv);
4300         }
4301         kfree(rdev->pm.dpm.ps);
4302         kfree(rdev->pm.dpm.priv);
4303         kfree(rdev->pm.dpm.dyn_state.vddc_dependency_on_dispclk.entries);
4304         r600_free_extended_power_table(rdev);
4305 }
4306
4307 void ni_dpm_print_power_state(struct radeon_device *rdev,
4308                               struct radeon_ps *rps)
4309 {
4310         struct ni_ps *ps = ni_get_ps(rps);
4311         struct rv7xx_pl *pl;
4312         int i;
4313
4314         r600_dpm_print_class_info(rps->class, rps->class2);
4315         r600_dpm_print_cap_info(rps->caps);
4316         printk("\tuvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4317         for (i = 0; i < ps->performance_level_count; i++) {
4318                 pl = &ps->performance_levels[i];
4319                 if (rdev->family >= CHIP_TAHITI)
4320                         printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u pcie gen: %u\n",
4321                                i, pl->sclk, pl->mclk, pl->vddc, pl->vddci, pl->pcie_gen + 1);
4322                 else
4323                         printk("\t\tpower level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4324                                i, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4325         }
4326         r600_dpm_print_ps_status(rdev, rps);
4327 }
4328
4329 void ni_dpm_debugfs_print_current_performance_level(struct radeon_device *rdev,
4330                                                     struct seq_file *m)
4331 {
4332         struct radeon_ps *rps = rdev->pm.dpm.current_ps;
4333         struct ni_ps *ps = ni_get_ps(rps);
4334         struct rv7xx_pl *pl;
4335         u32 current_index =
4336                 (RREG32(TARGET_AND_CURRENT_PROFILE_INDEX) & CURRENT_STATE_INDEX_MASK) >>
4337                 CURRENT_STATE_INDEX_SHIFT;
4338
4339         if (current_index >= ps->performance_level_count) {
4340                 seq_printf(m, "invalid dpm profile %d\n", current_index);
4341         } else {
4342                 pl = &ps->performance_levels[current_index];
4343                 seq_printf(m, "uvd    vclk: %d dclk: %d\n", rps->vclk, rps->dclk);
4344                 seq_printf(m, "power level %d    sclk: %u mclk: %u vddc: %u vddci: %u\n",
4345                            current_index, pl->sclk, pl->mclk, pl->vddc, pl->vddci);
4346         }
4347 }
4348
4349 u32 ni_dpm_get_sclk(struct radeon_device *rdev, bool low)
4350 {
4351         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4352         struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4353
4354         if (low)
4355                 return requested_state->performance_levels[0].sclk;
4356         else
4357                 return requested_state->performance_levels[requested_state->performance_level_count - 1].sclk;
4358 }
4359
4360 u32 ni_dpm_get_mclk(struct radeon_device *rdev, bool low)
4361 {
4362         struct evergreen_power_info *eg_pi = evergreen_get_pi(rdev);
4363         struct ni_ps *requested_state = ni_get_ps(&eg_pi->requested_rps);
4364
4365         if (low)
4366                 return requested_state->performance_levels[0].mclk;
4367         else
4368                 return requested_state->performance_levels[requested_state->performance_level_count - 1].mclk;
4369 }
4370