Merge branch 'drm-nouveau-next' of git://anongit.freedesktop.org/git/nouveau/linux...
[cascardo/linux.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 static const u32 crtc_offsets[6] =
39 {
40         EVERGREEN_CRTC0_REGISTER_OFFSET,
41         EVERGREEN_CRTC1_REGISTER_OFFSET,
42         EVERGREEN_CRTC2_REGISTER_OFFSET,
43         EVERGREEN_CRTC3_REGISTER_OFFSET,
44         EVERGREEN_CRTC4_REGISTER_OFFSET,
45         EVERGREEN_CRTC5_REGISTER_OFFSET
46 };
47
48 #include "clearstate_evergreen.h"
49
50 static const u32 sumo_rlc_save_restore_register_list[] =
51 {
52         0x98fc,
53         0x9830,
54         0x9834,
55         0x9838,
56         0x9870,
57         0x9874,
58         0x8a14,
59         0x8b24,
60         0x8bcc,
61         0x8b10,
62         0x8d00,
63         0x8d04,
64         0x8c00,
65         0x8c04,
66         0x8c08,
67         0x8c0c,
68         0x8d8c,
69         0x8c20,
70         0x8c24,
71         0x8c28,
72         0x8c18,
73         0x8c1c,
74         0x8cf0,
75         0x8e2c,
76         0x8e38,
77         0x8c30,
78         0x9508,
79         0x9688,
80         0x9608,
81         0x960c,
82         0x9610,
83         0x9614,
84         0x88c4,
85         0x88d4,
86         0xa008,
87         0x900c,
88         0x9100,
89         0x913c,
90         0x98f8,
91         0x98f4,
92         0x9b7c,
93         0x3f8c,
94         0x8950,
95         0x8954,
96         0x8a18,
97         0x8b28,
98         0x9144,
99         0x9148,
100         0x914c,
101         0x3f90,
102         0x3f94,
103         0x915c,
104         0x9160,
105         0x9178,
106         0x917c,
107         0x9180,
108         0x918c,
109         0x9190,
110         0x9194,
111         0x9198,
112         0x919c,
113         0x91a8,
114         0x91ac,
115         0x91b0,
116         0x91b4,
117         0x91b8,
118         0x91c4,
119         0x91c8,
120         0x91cc,
121         0x91d0,
122         0x91d4,
123         0x91e0,
124         0x91e4,
125         0x91ec,
126         0x91f0,
127         0x91f4,
128         0x9200,
129         0x9204,
130         0x929c,
131         0x9150,
132         0x802c,
133 };
134
135 static void evergreen_gpu_init(struct radeon_device *rdev);
136 void evergreen_fini(struct radeon_device *rdev);
137 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
138 void evergreen_program_aspm(struct radeon_device *rdev);
139 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140                                      int ring, u32 cp_int_cntl);
141 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142                                    u32 status, u32 addr);
143 void cik_init_cp_pg_table(struct radeon_device *rdev);
144
145 extern u32 si_get_csb_size(struct radeon_device *rdev);
146 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
147 extern u32 cik_get_csb_size(struct radeon_device *rdev);
148 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
149
150 static const u32 evergreen_golden_registers[] =
151 {
152         0x3f90, 0xffff0000, 0xff000000,
153         0x9148, 0xffff0000, 0xff000000,
154         0x3f94, 0xffff0000, 0xff000000,
155         0x914c, 0xffff0000, 0xff000000,
156         0x9b7c, 0xffffffff, 0x00000000,
157         0x8a14, 0xffffffff, 0x00000007,
158         0x8b10, 0xffffffff, 0x00000000,
159         0x960c, 0xffffffff, 0x54763210,
160         0x88c4, 0xffffffff, 0x000000c2,
161         0x88d4, 0xffffffff, 0x00000010,
162         0x8974, 0xffffffff, 0x00000000,
163         0xc78, 0x00000080, 0x00000080,
164         0x5eb4, 0xffffffff, 0x00000002,
165         0x5e78, 0xffffffff, 0x001000f0,
166         0x6104, 0x01000300, 0x00000000,
167         0x5bc0, 0x00300000, 0x00000000,
168         0x7030, 0xffffffff, 0x00000011,
169         0x7c30, 0xffffffff, 0x00000011,
170         0x10830, 0xffffffff, 0x00000011,
171         0x11430, 0xffffffff, 0x00000011,
172         0x12030, 0xffffffff, 0x00000011,
173         0x12c30, 0xffffffff, 0x00000011,
174         0xd02c, 0xffffffff, 0x08421000,
175         0x240c, 0xffffffff, 0x00000380,
176         0x8b24, 0xffffffff, 0x00ff0fff,
177         0x28a4c, 0x06000000, 0x06000000,
178         0x10c, 0x00000001, 0x00000001,
179         0x8d00, 0xffffffff, 0x100e4848,
180         0x8d04, 0xffffffff, 0x00164745,
181         0x8c00, 0xffffffff, 0xe4000003,
182         0x8c04, 0xffffffff, 0x40600060,
183         0x8c08, 0xffffffff, 0x001c001c,
184         0x8cf0, 0xffffffff, 0x08e00620,
185         0x8c20, 0xffffffff, 0x00800080,
186         0x8c24, 0xffffffff, 0x00800080,
187         0x8c18, 0xffffffff, 0x20202078,
188         0x8c1c, 0xffffffff, 0x00001010,
189         0x28350, 0xffffffff, 0x00000000,
190         0xa008, 0xffffffff, 0x00010000,
191         0x5cc, 0xffffffff, 0x00000001,
192         0x9508, 0xffffffff, 0x00000002,
193         0x913c, 0x0000000f, 0x0000000a
194 };
195
196 static const u32 evergreen_golden_registers2[] =
197 {
198         0x2f4c, 0xffffffff, 0x00000000,
199         0x54f4, 0xffffffff, 0x00000000,
200         0x54f0, 0xffffffff, 0x00000000,
201         0x5498, 0xffffffff, 0x00000000,
202         0x549c, 0xffffffff, 0x00000000,
203         0x5494, 0xffffffff, 0x00000000,
204         0x53cc, 0xffffffff, 0x00000000,
205         0x53c8, 0xffffffff, 0x00000000,
206         0x53c4, 0xffffffff, 0x00000000,
207         0x53c0, 0xffffffff, 0x00000000,
208         0x53bc, 0xffffffff, 0x00000000,
209         0x53b8, 0xffffffff, 0x00000000,
210         0x53b4, 0xffffffff, 0x00000000,
211         0x53b0, 0xffffffff, 0x00000000
212 };
213
214 static const u32 cypress_mgcg_init[] =
215 {
216         0x802c, 0xffffffff, 0xc0000000,
217         0x5448, 0xffffffff, 0x00000100,
218         0x55e4, 0xffffffff, 0x00000100,
219         0x160c, 0xffffffff, 0x00000100,
220         0x5644, 0xffffffff, 0x00000100,
221         0xc164, 0xffffffff, 0x00000100,
222         0x8a18, 0xffffffff, 0x00000100,
223         0x897c, 0xffffffff, 0x06000100,
224         0x8b28, 0xffffffff, 0x00000100,
225         0x9144, 0xffffffff, 0x00000100,
226         0x9a60, 0xffffffff, 0x00000100,
227         0x9868, 0xffffffff, 0x00000100,
228         0x8d58, 0xffffffff, 0x00000100,
229         0x9510, 0xffffffff, 0x00000100,
230         0x949c, 0xffffffff, 0x00000100,
231         0x9654, 0xffffffff, 0x00000100,
232         0x9030, 0xffffffff, 0x00000100,
233         0x9034, 0xffffffff, 0x00000100,
234         0x9038, 0xffffffff, 0x00000100,
235         0x903c, 0xffffffff, 0x00000100,
236         0x9040, 0xffffffff, 0x00000100,
237         0xa200, 0xffffffff, 0x00000100,
238         0xa204, 0xffffffff, 0x00000100,
239         0xa208, 0xffffffff, 0x00000100,
240         0xa20c, 0xffffffff, 0x00000100,
241         0x971c, 0xffffffff, 0x00000100,
242         0x977c, 0xffffffff, 0x00000100,
243         0x3f80, 0xffffffff, 0x00000100,
244         0xa210, 0xffffffff, 0x00000100,
245         0xa214, 0xffffffff, 0x00000100,
246         0x4d8, 0xffffffff, 0x00000100,
247         0x9784, 0xffffffff, 0x00000100,
248         0x9698, 0xffffffff, 0x00000100,
249         0x4d4, 0xffffffff, 0x00000200,
250         0x30cc, 0xffffffff, 0x00000100,
251         0xd0c0, 0xffffffff, 0xff000100,
252         0x802c, 0xffffffff, 0x40000000,
253         0x915c, 0xffffffff, 0x00010000,
254         0x9160, 0xffffffff, 0x00030002,
255         0x9178, 0xffffffff, 0x00070000,
256         0x917c, 0xffffffff, 0x00030002,
257         0x9180, 0xffffffff, 0x00050004,
258         0x918c, 0xffffffff, 0x00010006,
259         0x9190, 0xffffffff, 0x00090008,
260         0x9194, 0xffffffff, 0x00070000,
261         0x9198, 0xffffffff, 0x00030002,
262         0x919c, 0xffffffff, 0x00050004,
263         0x91a8, 0xffffffff, 0x00010006,
264         0x91ac, 0xffffffff, 0x00090008,
265         0x91b0, 0xffffffff, 0x00070000,
266         0x91b4, 0xffffffff, 0x00030002,
267         0x91b8, 0xffffffff, 0x00050004,
268         0x91c4, 0xffffffff, 0x00010006,
269         0x91c8, 0xffffffff, 0x00090008,
270         0x91cc, 0xffffffff, 0x00070000,
271         0x91d0, 0xffffffff, 0x00030002,
272         0x91d4, 0xffffffff, 0x00050004,
273         0x91e0, 0xffffffff, 0x00010006,
274         0x91e4, 0xffffffff, 0x00090008,
275         0x91e8, 0xffffffff, 0x00000000,
276         0x91ec, 0xffffffff, 0x00070000,
277         0x91f0, 0xffffffff, 0x00030002,
278         0x91f4, 0xffffffff, 0x00050004,
279         0x9200, 0xffffffff, 0x00010006,
280         0x9204, 0xffffffff, 0x00090008,
281         0x9208, 0xffffffff, 0x00070000,
282         0x920c, 0xffffffff, 0x00030002,
283         0x9210, 0xffffffff, 0x00050004,
284         0x921c, 0xffffffff, 0x00010006,
285         0x9220, 0xffffffff, 0x00090008,
286         0x9224, 0xffffffff, 0x00070000,
287         0x9228, 0xffffffff, 0x00030002,
288         0x922c, 0xffffffff, 0x00050004,
289         0x9238, 0xffffffff, 0x00010006,
290         0x923c, 0xffffffff, 0x00090008,
291         0x9240, 0xffffffff, 0x00070000,
292         0x9244, 0xffffffff, 0x00030002,
293         0x9248, 0xffffffff, 0x00050004,
294         0x9254, 0xffffffff, 0x00010006,
295         0x9258, 0xffffffff, 0x00090008,
296         0x925c, 0xffffffff, 0x00070000,
297         0x9260, 0xffffffff, 0x00030002,
298         0x9264, 0xffffffff, 0x00050004,
299         0x9270, 0xffffffff, 0x00010006,
300         0x9274, 0xffffffff, 0x00090008,
301         0x9278, 0xffffffff, 0x00070000,
302         0x927c, 0xffffffff, 0x00030002,
303         0x9280, 0xffffffff, 0x00050004,
304         0x928c, 0xffffffff, 0x00010006,
305         0x9290, 0xffffffff, 0x00090008,
306         0x9294, 0xffffffff, 0x00000000,
307         0x929c, 0xffffffff, 0x00000001,
308         0x802c, 0xffffffff, 0x40010000,
309         0x915c, 0xffffffff, 0x00010000,
310         0x9160, 0xffffffff, 0x00030002,
311         0x9178, 0xffffffff, 0x00070000,
312         0x917c, 0xffffffff, 0x00030002,
313         0x9180, 0xffffffff, 0x00050004,
314         0x918c, 0xffffffff, 0x00010006,
315         0x9190, 0xffffffff, 0x00090008,
316         0x9194, 0xffffffff, 0x00070000,
317         0x9198, 0xffffffff, 0x00030002,
318         0x919c, 0xffffffff, 0x00050004,
319         0x91a8, 0xffffffff, 0x00010006,
320         0x91ac, 0xffffffff, 0x00090008,
321         0x91b0, 0xffffffff, 0x00070000,
322         0x91b4, 0xffffffff, 0x00030002,
323         0x91b8, 0xffffffff, 0x00050004,
324         0x91c4, 0xffffffff, 0x00010006,
325         0x91c8, 0xffffffff, 0x00090008,
326         0x91cc, 0xffffffff, 0x00070000,
327         0x91d0, 0xffffffff, 0x00030002,
328         0x91d4, 0xffffffff, 0x00050004,
329         0x91e0, 0xffffffff, 0x00010006,
330         0x91e4, 0xffffffff, 0x00090008,
331         0x91e8, 0xffffffff, 0x00000000,
332         0x91ec, 0xffffffff, 0x00070000,
333         0x91f0, 0xffffffff, 0x00030002,
334         0x91f4, 0xffffffff, 0x00050004,
335         0x9200, 0xffffffff, 0x00010006,
336         0x9204, 0xffffffff, 0x00090008,
337         0x9208, 0xffffffff, 0x00070000,
338         0x920c, 0xffffffff, 0x00030002,
339         0x9210, 0xffffffff, 0x00050004,
340         0x921c, 0xffffffff, 0x00010006,
341         0x9220, 0xffffffff, 0x00090008,
342         0x9224, 0xffffffff, 0x00070000,
343         0x9228, 0xffffffff, 0x00030002,
344         0x922c, 0xffffffff, 0x00050004,
345         0x9238, 0xffffffff, 0x00010006,
346         0x923c, 0xffffffff, 0x00090008,
347         0x9240, 0xffffffff, 0x00070000,
348         0x9244, 0xffffffff, 0x00030002,
349         0x9248, 0xffffffff, 0x00050004,
350         0x9254, 0xffffffff, 0x00010006,
351         0x9258, 0xffffffff, 0x00090008,
352         0x925c, 0xffffffff, 0x00070000,
353         0x9260, 0xffffffff, 0x00030002,
354         0x9264, 0xffffffff, 0x00050004,
355         0x9270, 0xffffffff, 0x00010006,
356         0x9274, 0xffffffff, 0x00090008,
357         0x9278, 0xffffffff, 0x00070000,
358         0x927c, 0xffffffff, 0x00030002,
359         0x9280, 0xffffffff, 0x00050004,
360         0x928c, 0xffffffff, 0x00010006,
361         0x9290, 0xffffffff, 0x00090008,
362         0x9294, 0xffffffff, 0x00000000,
363         0x929c, 0xffffffff, 0x00000001,
364         0x802c, 0xffffffff, 0xc0000000
365 };
366
367 static const u32 redwood_mgcg_init[] =
368 {
369         0x802c, 0xffffffff, 0xc0000000,
370         0x5448, 0xffffffff, 0x00000100,
371         0x55e4, 0xffffffff, 0x00000100,
372         0x160c, 0xffffffff, 0x00000100,
373         0x5644, 0xffffffff, 0x00000100,
374         0xc164, 0xffffffff, 0x00000100,
375         0x8a18, 0xffffffff, 0x00000100,
376         0x897c, 0xffffffff, 0x06000100,
377         0x8b28, 0xffffffff, 0x00000100,
378         0x9144, 0xffffffff, 0x00000100,
379         0x9a60, 0xffffffff, 0x00000100,
380         0x9868, 0xffffffff, 0x00000100,
381         0x8d58, 0xffffffff, 0x00000100,
382         0x9510, 0xffffffff, 0x00000100,
383         0x949c, 0xffffffff, 0x00000100,
384         0x9654, 0xffffffff, 0x00000100,
385         0x9030, 0xffffffff, 0x00000100,
386         0x9034, 0xffffffff, 0x00000100,
387         0x9038, 0xffffffff, 0x00000100,
388         0x903c, 0xffffffff, 0x00000100,
389         0x9040, 0xffffffff, 0x00000100,
390         0xa200, 0xffffffff, 0x00000100,
391         0xa204, 0xffffffff, 0x00000100,
392         0xa208, 0xffffffff, 0x00000100,
393         0xa20c, 0xffffffff, 0x00000100,
394         0x971c, 0xffffffff, 0x00000100,
395         0x977c, 0xffffffff, 0x00000100,
396         0x3f80, 0xffffffff, 0x00000100,
397         0xa210, 0xffffffff, 0x00000100,
398         0xa214, 0xffffffff, 0x00000100,
399         0x4d8, 0xffffffff, 0x00000100,
400         0x9784, 0xffffffff, 0x00000100,
401         0x9698, 0xffffffff, 0x00000100,
402         0x4d4, 0xffffffff, 0x00000200,
403         0x30cc, 0xffffffff, 0x00000100,
404         0xd0c0, 0xffffffff, 0xff000100,
405         0x802c, 0xffffffff, 0x40000000,
406         0x915c, 0xffffffff, 0x00010000,
407         0x9160, 0xffffffff, 0x00030002,
408         0x9178, 0xffffffff, 0x00070000,
409         0x917c, 0xffffffff, 0x00030002,
410         0x9180, 0xffffffff, 0x00050004,
411         0x918c, 0xffffffff, 0x00010006,
412         0x9190, 0xffffffff, 0x00090008,
413         0x9194, 0xffffffff, 0x00070000,
414         0x9198, 0xffffffff, 0x00030002,
415         0x919c, 0xffffffff, 0x00050004,
416         0x91a8, 0xffffffff, 0x00010006,
417         0x91ac, 0xffffffff, 0x00090008,
418         0x91b0, 0xffffffff, 0x00070000,
419         0x91b4, 0xffffffff, 0x00030002,
420         0x91b8, 0xffffffff, 0x00050004,
421         0x91c4, 0xffffffff, 0x00010006,
422         0x91c8, 0xffffffff, 0x00090008,
423         0x91cc, 0xffffffff, 0x00070000,
424         0x91d0, 0xffffffff, 0x00030002,
425         0x91d4, 0xffffffff, 0x00050004,
426         0x91e0, 0xffffffff, 0x00010006,
427         0x91e4, 0xffffffff, 0x00090008,
428         0x91e8, 0xffffffff, 0x00000000,
429         0x91ec, 0xffffffff, 0x00070000,
430         0x91f0, 0xffffffff, 0x00030002,
431         0x91f4, 0xffffffff, 0x00050004,
432         0x9200, 0xffffffff, 0x00010006,
433         0x9204, 0xffffffff, 0x00090008,
434         0x9294, 0xffffffff, 0x00000000,
435         0x929c, 0xffffffff, 0x00000001,
436         0x802c, 0xffffffff, 0xc0000000
437 };
438
439 static const u32 cedar_golden_registers[] =
440 {
441         0x3f90, 0xffff0000, 0xff000000,
442         0x9148, 0xffff0000, 0xff000000,
443         0x3f94, 0xffff0000, 0xff000000,
444         0x914c, 0xffff0000, 0xff000000,
445         0x9b7c, 0xffffffff, 0x00000000,
446         0x8a14, 0xffffffff, 0x00000007,
447         0x8b10, 0xffffffff, 0x00000000,
448         0x960c, 0xffffffff, 0x54763210,
449         0x88c4, 0xffffffff, 0x000000c2,
450         0x88d4, 0xffffffff, 0x00000000,
451         0x8974, 0xffffffff, 0x00000000,
452         0xc78, 0x00000080, 0x00000080,
453         0x5eb4, 0xffffffff, 0x00000002,
454         0x5e78, 0xffffffff, 0x001000f0,
455         0x6104, 0x01000300, 0x00000000,
456         0x5bc0, 0x00300000, 0x00000000,
457         0x7030, 0xffffffff, 0x00000011,
458         0x7c30, 0xffffffff, 0x00000011,
459         0x10830, 0xffffffff, 0x00000011,
460         0x11430, 0xffffffff, 0x00000011,
461         0xd02c, 0xffffffff, 0x08421000,
462         0x240c, 0xffffffff, 0x00000380,
463         0x8b24, 0xffffffff, 0x00ff0fff,
464         0x28a4c, 0x06000000, 0x06000000,
465         0x10c, 0x00000001, 0x00000001,
466         0x8d00, 0xffffffff, 0x100e4848,
467         0x8d04, 0xffffffff, 0x00164745,
468         0x8c00, 0xffffffff, 0xe4000003,
469         0x8c04, 0xffffffff, 0x40600060,
470         0x8c08, 0xffffffff, 0x001c001c,
471         0x8cf0, 0xffffffff, 0x08e00410,
472         0x8c20, 0xffffffff, 0x00800080,
473         0x8c24, 0xffffffff, 0x00800080,
474         0x8c18, 0xffffffff, 0x20202078,
475         0x8c1c, 0xffffffff, 0x00001010,
476         0x28350, 0xffffffff, 0x00000000,
477         0xa008, 0xffffffff, 0x00010000,
478         0x5cc, 0xffffffff, 0x00000001,
479         0x9508, 0xffffffff, 0x00000002
480 };
481
482 static const u32 cedar_mgcg_init[] =
483 {
484         0x802c, 0xffffffff, 0xc0000000,
485         0x5448, 0xffffffff, 0x00000100,
486         0x55e4, 0xffffffff, 0x00000100,
487         0x160c, 0xffffffff, 0x00000100,
488         0x5644, 0xffffffff, 0x00000100,
489         0xc164, 0xffffffff, 0x00000100,
490         0x8a18, 0xffffffff, 0x00000100,
491         0x897c, 0xffffffff, 0x06000100,
492         0x8b28, 0xffffffff, 0x00000100,
493         0x9144, 0xffffffff, 0x00000100,
494         0x9a60, 0xffffffff, 0x00000100,
495         0x9868, 0xffffffff, 0x00000100,
496         0x8d58, 0xffffffff, 0x00000100,
497         0x9510, 0xffffffff, 0x00000100,
498         0x949c, 0xffffffff, 0x00000100,
499         0x9654, 0xffffffff, 0x00000100,
500         0x9030, 0xffffffff, 0x00000100,
501         0x9034, 0xffffffff, 0x00000100,
502         0x9038, 0xffffffff, 0x00000100,
503         0x903c, 0xffffffff, 0x00000100,
504         0x9040, 0xffffffff, 0x00000100,
505         0xa200, 0xffffffff, 0x00000100,
506         0xa204, 0xffffffff, 0x00000100,
507         0xa208, 0xffffffff, 0x00000100,
508         0xa20c, 0xffffffff, 0x00000100,
509         0x971c, 0xffffffff, 0x00000100,
510         0x977c, 0xffffffff, 0x00000100,
511         0x3f80, 0xffffffff, 0x00000100,
512         0xa210, 0xffffffff, 0x00000100,
513         0xa214, 0xffffffff, 0x00000100,
514         0x4d8, 0xffffffff, 0x00000100,
515         0x9784, 0xffffffff, 0x00000100,
516         0x9698, 0xffffffff, 0x00000100,
517         0x4d4, 0xffffffff, 0x00000200,
518         0x30cc, 0xffffffff, 0x00000100,
519         0xd0c0, 0xffffffff, 0xff000100,
520         0x802c, 0xffffffff, 0x40000000,
521         0x915c, 0xffffffff, 0x00010000,
522         0x9178, 0xffffffff, 0x00050000,
523         0x917c, 0xffffffff, 0x00030002,
524         0x918c, 0xffffffff, 0x00010004,
525         0x9190, 0xffffffff, 0x00070006,
526         0x9194, 0xffffffff, 0x00050000,
527         0x9198, 0xffffffff, 0x00030002,
528         0x91a8, 0xffffffff, 0x00010004,
529         0x91ac, 0xffffffff, 0x00070006,
530         0x91e8, 0xffffffff, 0x00000000,
531         0x9294, 0xffffffff, 0x00000000,
532         0x929c, 0xffffffff, 0x00000001,
533         0x802c, 0xffffffff, 0xc0000000
534 };
535
536 static const u32 juniper_mgcg_init[] =
537 {
538         0x802c, 0xffffffff, 0xc0000000,
539         0x5448, 0xffffffff, 0x00000100,
540         0x55e4, 0xffffffff, 0x00000100,
541         0x160c, 0xffffffff, 0x00000100,
542         0x5644, 0xffffffff, 0x00000100,
543         0xc164, 0xffffffff, 0x00000100,
544         0x8a18, 0xffffffff, 0x00000100,
545         0x897c, 0xffffffff, 0x06000100,
546         0x8b28, 0xffffffff, 0x00000100,
547         0x9144, 0xffffffff, 0x00000100,
548         0x9a60, 0xffffffff, 0x00000100,
549         0x9868, 0xffffffff, 0x00000100,
550         0x8d58, 0xffffffff, 0x00000100,
551         0x9510, 0xffffffff, 0x00000100,
552         0x949c, 0xffffffff, 0x00000100,
553         0x9654, 0xffffffff, 0x00000100,
554         0x9030, 0xffffffff, 0x00000100,
555         0x9034, 0xffffffff, 0x00000100,
556         0x9038, 0xffffffff, 0x00000100,
557         0x903c, 0xffffffff, 0x00000100,
558         0x9040, 0xffffffff, 0x00000100,
559         0xa200, 0xffffffff, 0x00000100,
560         0xa204, 0xffffffff, 0x00000100,
561         0xa208, 0xffffffff, 0x00000100,
562         0xa20c, 0xffffffff, 0x00000100,
563         0x971c, 0xffffffff, 0x00000100,
564         0xd0c0, 0xffffffff, 0xff000100,
565         0x802c, 0xffffffff, 0x40000000,
566         0x915c, 0xffffffff, 0x00010000,
567         0x9160, 0xffffffff, 0x00030002,
568         0x9178, 0xffffffff, 0x00070000,
569         0x917c, 0xffffffff, 0x00030002,
570         0x9180, 0xffffffff, 0x00050004,
571         0x918c, 0xffffffff, 0x00010006,
572         0x9190, 0xffffffff, 0x00090008,
573         0x9194, 0xffffffff, 0x00070000,
574         0x9198, 0xffffffff, 0x00030002,
575         0x919c, 0xffffffff, 0x00050004,
576         0x91a8, 0xffffffff, 0x00010006,
577         0x91ac, 0xffffffff, 0x00090008,
578         0x91b0, 0xffffffff, 0x00070000,
579         0x91b4, 0xffffffff, 0x00030002,
580         0x91b8, 0xffffffff, 0x00050004,
581         0x91c4, 0xffffffff, 0x00010006,
582         0x91c8, 0xffffffff, 0x00090008,
583         0x91cc, 0xffffffff, 0x00070000,
584         0x91d0, 0xffffffff, 0x00030002,
585         0x91d4, 0xffffffff, 0x00050004,
586         0x91e0, 0xffffffff, 0x00010006,
587         0x91e4, 0xffffffff, 0x00090008,
588         0x91e8, 0xffffffff, 0x00000000,
589         0x91ec, 0xffffffff, 0x00070000,
590         0x91f0, 0xffffffff, 0x00030002,
591         0x91f4, 0xffffffff, 0x00050004,
592         0x9200, 0xffffffff, 0x00010006,
593         0x9204, 0xffffffff, 0x00090008,
594         0x9208, 0xffffffff, 0x00070000,
595         0x920c, 0xffffffff, 0x00030002,
596         0x9210, 0xffffffff, 0x00050004,
597         0x921c, 0xffffffff, 0x00010006,
598         0x9220, 0xffffffff, 0x00090008,
599         0x9224, 0xffffffff, 0x00070000,
600         0x9228, 0xffffffff, 0x00030002,
601         0x922c, 0xffffffff, 0x00050004,
602         0x9238, 0xffffffff, 0x00010006,
603         0x923c, 0xffffffff, 0x00090008,
604         0x9240, 0xffffffff, 0x00070000,
605         0x9244, 0xffffffff, 0x00030002,
606         0x9248, 0xffffffff, 0x00050004,
607         0x9254, 0xffffffff, 0x00010006,
608         0x9258, 0xffffffff, 0x00090008,
609         0x925c, 0xffffffff, 0x00070000,
610         0x9260, 0xffffffff, 0x00030002,
611         0x9264, 0xffffffff, 0x00050004,
612         0x9270, 0xffffffff, 0x00010006,
613         0x9274, 0xffffffff, 0x00090008,
614         0x9278, 0xffffffff, 0x00070000,
615         0x927c, 0xffffffff, 0x00030002,
616         0x9280, 0xffffffff, 0x00050004,
617         0x928c, 0xffffffff, 0x00010006,
618         0x9290, 0xffffffff, 0x00090008,
619         0x9294, 0xffffffff, 0x00000000,
620         0x929c, 0xffffffff, 0x00000001,
621         0x802c, 0xffffffff, 0xc0000000,
622         0x977c, 0xffffffff, 0x00000100,
623         0x3f80, 0xffffffff, 0x00000100,
624         0xa210, 0xffffffff, 0x00000100,
625         0xa214, 0xffffffff, 0x00000100,
626         0x4d8, 0xffffffff, 0x00000100,
627         0x9784, 0xffffffff, 0x00000100,
628         0x9698, 0xffffffff, 0x00000100,
629         0x4d4, 0xffffffff, 0x00000200,
630         0x30cc, 0xffffffff, 0x00000100,
631         0x802c, 0xffffffff, 0xc0000000
632 };
633
634 static const u32 supersumo_golden_registers[] =
635 {
636         0x5eb4, 0xffffffff, 0x00000002,
637         0x5cc, 0xffffffff, 0x00000001,
638         0x7030, 0xffffffff, 0x00000011,
639         0x7c30, 0xffffffff, 0x00000011,
640         0x6104, 0x01000300, 0x00000000,
641         0x5bc0, 0x00300000, 0x00000000,
642         0x8c04, 0xffffffff, 0x40600060,
643         0x8c08, 0xffffffff, 0x001c001c,
644         0x8c20, 0xffffffff, 0x00800080,
645         0x8c24, 0xffffffff, 0x00800080,
646         0x8c18, 0xffffffff, 0x20202078,
647         0x8c1c, 0xffffffff, 0x00001010,
648         0x918c, 0xffffffff, 0x00010006,
649         0x91a8, 0xffffffff, 0x00010006,
650         0x91c4, 0xffffffff, 0x00010006,
651         0x91e0, 0xffffffff, 0x00010006,
652         0x9200, 0xffffffff, 0x00010006,
653         0x9150, 0xffffffff, 0x6e944040,
654         0x917c, 0xffffffff, 0x00030002,
655         0x9180, 0xffffffff, 0x00050004,
656         0x9198, 0xffffffff, 0x00030002,
657         0x919c, 0xffffffff, 0x00050004,
658         0x91b4, 0xffffffff, 0x00030002,
659         0x91b8, 0xffffffff, 0x00050004,
660         0x91d0, 0xffffffff, 0x00030002,
661         0x91d4, 0xffffffff, 0x00050004,
662         0x91f0, 0xffffffff, 0x00030002,
663         0x91f4, 0xffffffff, 0x00050004,
664         0x915c, 0xffffffff, 0x00010000,
665         0x9160, 0xffffffff, 0x00030002,
666         0x3f90, 0xffff0000, 0xff000000,
667         0x9178, 0xffffffff, 0x00070000,
668         0x9194, 0xffffffff, 0x00070000,
669         0x91b0, 0xffffffff, 0x00070000,
670         0x91cc, 0xffffffff, 0x00070000,
671         0x91ec, 0xffffffff, 0x00070000,
672         0x9148, 0xffff0000, 0xff000000,
673         0x9190, 0xffffffff, 0x00090008,
674         0x91ac, 0xffffffff, 0x00090008,
675         0x91c8, 0xffffffff, 0x00090008,
676         0x91e4, 0xffffffff, 0x00090008,
677         0x9204, 0xffffffff, 0x00090008,
678         0x3f94, 0xffff0000, 0xff000000,
679         0x914c, 0xffff0000, 0xff000000,
680         0x929c, 0xffffffff, 0x00000001,
681         0x8a18, 0xffffffff, 0x00000100,
682         0x8b28, 0xffffffff, 0x00000100,
683         0x9144, 0xffffffff, 0x00000100,
684         0x5644, 0xffffffff, 0x00000100,
685         0x9b7c, 0xffffffff, 0x00000000,
686         0x8030, 0xffffffff, 0x0000100a,
687         0x8a14, 0xffffffff, 0x00000007,
688         0x8b24, 0xffffffff, 0x00ff0fff,
689         0x8b10, 0xffffffff, 0x00000000,
690         0x28a4c, 0x06000000, 0x06000000,
691         0x4d8, 0xffffffff, 0x00000100,
692         0x913c, 0xffff000f, 0x0100000a,
693         0x960c, 0xffffffff, 0x54763210,
694         0x88c4, 0xffffffff, 0x000000c2,
695         0x88d4, 0xffffffff, 0x00000010,
696         0x8974, 0xffffffff, 0x00000000,
697         0xc78, 0x00000080, 0x00000080,
698         0x5e78, 0xffffffff, 0x001000f0,
699         0xd02c, 0xffffffff, 0x08421000,
700         0xa008, 0xffffffff, 0x00010000,
701         0x8d00, 0xffffffff, 0x100e4848,
702         0x8d04, 0xffffffff, 0x00164745,
703         0x8c00, 0xffffffff, 0xe4000003,
704         0x8cf0, 0x1fffffff, 0x08e00620,
705         0x28350, 0xffffffff, 0x00000000,
706         0x9508, 0xffffffff, 0x00000002
707 };
708
709 static const u32 sumo_golden_registers[] =
710 {
711         0x900c, 0x00ffffff, 0x0017071f,
712         0x8c18, 0xffffffff, 0x10101060,
713         0x8c1c, 0xffffffff, 0x00001010,
714         0x8c30, 0x0000000f, 0x00000005,
715         0x9688, 0x0000000f, 0x00000007
716 };
717
718 static const u32 wrestler_golden_registers[] =
719 {
720         0x5eb4, 0xffffffff, 0x00000002,
721         0x5cc, 0xffffffff, 0x00000001,
722         0x7030, 0xffffffff, 0x00000011,
723         0x7c30, 0xffffffff, 0x00000011,
724         0x6104, 0x01000300, 0x00000000,
725         0x5bc0, 0x00300000, 0x00000000,
726         0x918c, 0xffffffff, 0x00010006,
727         0x91a8, 0xffffffff, 0x00010006,
728         0x9150, 0xffffffff, 0x6e944040,
729         0x917c, 0xffffffff, 0x00030002,
730         0x9198, 0xffffffff, 0x00030002,
731         0x915c, 0xffffffff, 0x00010000,
732         0x3f90, 0xffff0000, 0xff000000,
733         0x9178, 0xffffffff, 0x00070000,
734         0x9194, 0xffffffff, 0x00070000,
735         0x9148, 0xffff0000, 0xff000000,
736         0x9190, 0xffffffff, 0x00090008,
737         0x91ac, 0xffffffff, 0x00090008,
738         0x3f94, 0xffff0000, 0xff000000,
739         0x914c, 0xffff0000, 0xff000000,
740         0x929c, 0xffffffff, 0x00000001,
741         0x8a18, 0xffffffff, 0x00000100,
742         0x8b28, 0xffffffff, 0x00000100,
743         0x9144, 0xffffffff, 0x00000100,
744         0x9b7c, 0xffffffff, 0x00000000,
745         0x8030, 0xffffffff, 0x0000100a,
746         0x8a14, 0xffffffff, 0x00000001,
747         0x8b24, 0xffffffff, 0x00ff0fff,
748         0x8b10, 0xffffffff, 0x00000000,
749         0x28a4c, 0x06000000, 0x06000000,
750         0x4d8, 0xffffffff, 0x00000100,
751         0x913c, 0xffff000f, 0x0100000a,
752         0x960c, 0xffffffff, 0x54763210,
753         0x88c4, 0xffffffff, 0x000000c2,
754         0x88d4, 0xffffffff, 0x00000010,
755         0x8974, 0xffffffff, 0x00000000,
756         0xc78, 0x00000080, 0x00000080,
757         0x5e78, 0xffffffff, 0x001000f0,
758         0xd02c, 0xffffffff, 0x08421000,
759         0xa008, 0xffffffff, 0x00010000,
760         0x8d00, 0xffffffff, 0x100e4848,
761         0x8d04, 0xffffffff, 0x00164745,
762         0x8c00, 0xffffffff, 0xe4000003,
763         0x8cf0, 0x1fffffff, 0x08e00410,
764         0x28350, 0xffffffff, 0x00000000,
765         0x9508, 0xffffffff, 0x00000002,
766         0x900c, 0xffffffff, 0x0017071f,
767         0x8c18, 0xffffffff, 0x10101060,
768         0x8c1c, 0xffffffff, 0x00001010
769 };
770
771 static const u32 barts_golden_registers[] =
772 {
773         0x5eb4, 0xffffffff, 0x00000002,
774         0x5e78, 0x8f311ff1, 0x001000f0,
775         0x3f90, 0xffff0000, 0xff000000,
776         0x9148, 0xffff0000, 0xff000000,
777         0x3f94, 0xffff0000, 0xff000000,
778         0x914c, 0xffff0000, 0xff000000,
779         0xc78, 0x00000080, 0x00000080,
780         0xbd4, 0x70073777, 0x00010001,
781         0xd02c, 0xbfffff1f, 0x08421000,
782         0xd0b8, 0x03773777, 0x02011003,
783         0x5bc0, 0x00200000, 0x50100000,
784         0x98f8, 0x33773777, 0x02011003,
785         0x98fc, 0xffffffff, 0x76543210,
786         0x7030, 0x31000311, 0x00000011,
787         0x2f48, 0x00000007, 0x02011003,
788         0x6b28, 0x00000010, 0x00000012,
789         0x7728, 0x00000010, 0x00000012,
790         0x10328, 0x00000010, 0x00000012,
791         0x10f28, 0x00000010, 0x00000012,
792         0x11b28, 0x00000010, 0x00000012,
793         0x12728, 0x00000010, 0x00000012,
794         0x240c, 0x000007ff, 0x00000380,
795         0x8a14, 0xf000001f, 0x00000007,
796         0x8b24, 0x3fff3fff, 0x00ff0fff,
797         0x8b10, 0x0000ff0f, 0x00000000,
798         0x28a4c, 0x07ffffff, 0x06000000,
799         0x10c, 0x00000001, 0x00010003,
800         0xa02c, 0xffffffff, 0x0000009b,
801         0x913c, 0x0000000f, 0x0100000a,
802         0x8d00, 0xffff7f7f, 0x100e4848,
803         0x8d04, 0x00ffffff, 0x00164745,
804         0x8c00, 0xfffc0003, 0xe4000003,
805         0x8c04, 0xf8ff00ff, 0x40600060,
806         0x8c08, 0x00ff00ff, 0x001c001c,
807         0x8cf0, 0x1fff1fff, 0x08e00620,
808         0x8c20, 0x0fff0fff, 0x00800080,
809         0x8c24, 0x0fff0fff, 0x00800080,
810         0x8c18, 0xffffffff, 0x20202078,
811         0x8c1c, 0x0000ffff, 0x00001010,
812         0x28350, 0x00000f01, 0x00000000,
813         0x9508, 0x3700001f, 0x00000002,
814         0x960c, 0xffffffff, 0x54763210,
815         0x88c4, 0x001f3ae3, 0x000000c2,
816         0x88d4, 0x0000001f, 0x00000010,
817         0x8974, 0xffffffff, 0x00000000
818 };
819
820 static const u32 turks_golden_registers[] =
821 {
822         0x5eb4, 0xffffffff, 0x00000002,
823         0x5e78, 0x8f311ff1, 0x001000f0,
824         0x8c8, 0x00003000, 0x00001070,
825         0x8cc, 0x000fffff, 0x00040035,
826         0x3f90, 0xffff0000, 0xfff00000,
827         0x9148, 0xffff0000, 0xfff00000,
828         0x3f94, 0xffff0000, 0xfff00000,
829         0x914c, 0xffff0000, 0xfff00000,
830         0xc78, 0x00000080, 0x00000080,
831         0xbd4, 0x00073007, 0x00010002,
832         0xd02c, 0xbfffff1f, 0x08421000,
833         0xd0b8, 0x03773777, 0x02010002,
834         0x5bc0, 0x00200000, 0x50100000,
835         0x98f8, 0x33773777, 0x00010002,
836         0x98fc, 0xffffffff, 0x33221100,
837         0x7030, 0x31000311, 0x00000011,
838         0x2f48, 0x33773777, 0x00010002,
839         0x6b28, 0x00000010, 0x00000012,
840         0x7728, 0x00000010, 0x00000012,
841         0x10328, 0x00000010, 0x00000012,
842         0x10f28, 0x00000010, 0x00000012,
843         0x11b28, 0x00000010, 0x00000012,
844         0x12728, 0x00000010, 0x00000012,
845         0x240c, 0x000007ff, 0x00000380,
846         0x8a14, 0xf000001f, 0x00000007,
847         0x8b24, 0x3fff3fff, 0x00ff0fff,
848         0x8b10, 0x0000ff0f, 0x00000000,
849         0x28a4c, 0x07ffffff, 0x06000000,
850         0x10c, 0x00000001, 0x00010003,
851         0xa02c, 0xffffffff, 0x0000009b,
852         0x913c, 0x0000000f, 0x0100000a,
853         0x8d00, 0xffff7f7f, 0x100e4848,
854         0x8d04, 0x00ffffff, 0x00164745,
855         0x8c00, 0xfffc0003, 0xe4000003,
856         0x8c04, 0xf8ff00ff, 0x40600060,
857         0x8c08, 0x00ff00ff, 0x001c001c,
858         0x8cf0, 0x1fff1fff, 0x08e00410,
859         0x8c20, 0x0fff0fff, 0x00800080,
860         0x8c24, 0x0fff0fff, 0x00800080,
861         0x8c18, 0xffffffff, 0x20202078,
862         0x8c1c, 0x0000ffff, 0x00001010,
863         0x28350, 0x00000f01, 0x00000000,
864         0x9508, 0x3700001f, 0x00000002,
865         0x960c, 0xffffffff, 0x54763210,
866         0x88c4, 0x001f3ae3, 0x000000c2,
867         0x88d4, 0x0000001f, 0x00000010,
868         0x8974, 0xffffffff, 0x00000000
869 };
870
871 static const u32 caicos_golden_registers[] =
872 {
873         0x5eb4, 0xffffffff, 0x00000002,
874         0x5e78, 0x8f311ff1, 0x001000f0,
875         0x8c8, 0x00003420, 0x00001450,
876         0x8cc, 0x000fffff, 0x00040035,
877         0x3f90, 0xffff0000, 0xfffc0000,
878         0x9148, 0xffff0000, 0xfffc0000,
879         0x3f94, 0xffff0000, 0xfffc0000,
880         0x914c, 0xffff0000, 0xfffc0000,
881         0xc78, 0x00000080, 0x00000080,
882         0xbd4, 0x00073007, 0x00010001,
883         0xd02c, 0xbfffff1f, 0x08421000,
884         0xd0b8, 0x03773777, 0x02010001,
885         0x5bc0, 0x00200000, 0x50100000,
886         0x98f8, 0x33773777, 0x02010001,
887         0x98fc, 0xffffffff, 0x33221100,
888         0x7030, 0x31000311, 0x00000011,
889         0x2f48, 0x33773777, 0x02010001,
890         0x6b28, 0x00000010, 0x00000012,
891         0x7728, 0x00000010, 0x00000012,
892         0x10328, 0x00000010, 0x00000012,
893         0x10f28, 0x00000010, 0x00000012,
894         0x11b28, 0x00000010, 0x00000012,
895         0x12728, 0x00000010, 0x00000012,
896         0x240c, 0x000007ff, 0x00000380,
897         0x8a14, 0xf000001f, 0x00000001,
898         0x8b24, 0x3fff3fff, 0x00ff0fff,
899         0x8b10, 0x0000ff0f, 0x00000000,
900         0x28a4c, 0x07ffffff, 0x06000000,
901         0x10c, 0x00000001, 0x00010003,
902         0xa02c, 0xffffffff, 0x0000009b,
903         0x913c, 0x0000000f, 0x0100000a,
904         0x8d00, 0xffff7f7f, 0x100e4848,
905         0x8d04, 0x00ffffff, 0x00164745,
906         0x8c00, 0xfffc0003, 0xe4000003,
907         0x8c04, 0xf8ff00ff, 0x40600060,
908         0x8c08, 0x00ff00ff, 0x001c001c,
909         0x8cf0, 0x1fff1fff, 0x08e00410,
910         0x8c20, 0x0fff0fff, 0x00800080,
911         0x8c24, 0x0fff0fff, 0x00800080,
912         0x8c18, 0xffffffff, 0x20202078,
913         0x8c1c, 0x0000ffff, 0x00001010,
914         0x28350, 0x00000f01, 0x00000000,
915         0x9508, 0x3700001f, 0x00000002,
916         0x960c, 0xffffffff, 0x54763210,
917         0x88c4, 0x001f3ae3, 0x000000c2,
918         0x88d4, 0x0000001f, 0x00000010,
919         0x8974, 0xffffffff, 0x00000000
920 };
921
922 static void evergreen_init_golden_registers(struct radeon_device *rdev)
923 {
924         switch (rdev->family) {
925         case CHIP_CYPRESS:
926         case CHIP_HEMLOCK:
927                 radeon_program_register_sequence(rdev,
928                                                  evergreen_golden_registers,
929                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
930                 radeon_program_register_sequence(rdev,
931                                                  evergreen_golden_registers2,
932                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
933                 radeon_program_register_sequence(rdev,
934                                                  cypress_mgcg_init,
935                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
936                 break;
937         case CHIP_JUNIPER:
938                 radeon_program_register_sequence(rdev,
939                                                  evergreen_golden_registers,
940                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
941                 radeon_program_register_sequence(rdev,
942                                                  evergreen_golden_registers2,
943                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
944                 radeon_program_register_sequence(rdev,
945                                                  juniper_mgcg_init,
946                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
947                 break;
948         case CHIP_REDWOOD:
949                 radeon_program_register_sequence(rdev,
950                                                  evergreen_golden_registers,
951                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
952                 radeon_program_register_sequence(rdev,
953                                                  evergreen_golden_registers2,
954                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
955                 radeon_program_register_sequence(rdev,
956                                                  redwood_mgcg_init,
957                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
958                 break;
959         case CHIP_CEDAR:
960                 radeon_program_register_sequence(rdev,
961                                                  cedar_golden_registers,
962                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
963                 radeon_program_register_sequence(rdev,
964                                                  evergreen_golden_registers2,
965                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
966                 radeon_program_register_sequence(rdev,
967                                                  cedar_mgcg_init,
968                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
969                 break;
970         case CHIP_PALM:
971                 radeon_program_register_sequence(rdev,
972                                                  wrestler_golden_registers,
973                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
974                 break;
975         case CHIP_SUMO:
976                 radeon_program_register_sequence(rdev,
977                                                  supersumo_golden_registers,
978                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
979                 break;
980         case CHIP_SUMO2:
981                 radeon_program_register_sequence(rdev,
982                                                  supersumo_golden_registers,
983                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
984                 radeon_program_register_sequence(rdev,
985                                                  sumo_golden_registers,
986                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
987                 break;
988         case CHIP_BARTS:
989                 radeon_program_register_sequence(rdev,
990                                                  barts_golden_registers,
991                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
992                 break;
993         case CHIP_TURKS:
994                 radeon_program_register_sequence(rdev,
995                                                  turks_golden_registers,
996                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
997                 break;
998         case CHIP_CAICOS:
999                 radeon_program_register_sequence(rdev,
1000                                                  caicos_golden_registers,
1001                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1002                 break;
1003         default:
1004                 break;
1005         }
1006 }
1007
1008 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1009                              unsigned *bankh, unsigned *mtaspect,
1010                              unsigned *tile_split)
1011 {
1012         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1013         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1014         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1015         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1016         switch (*bankw) {
1017         default:
1018         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1019         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1020         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1021         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1022         }
1023         switch (*bankh) {
1024         default:
1025         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1026         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1027         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1028         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1029         }
1030         switch (*mtaspect) {
1031         default:
1032         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1033         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1034         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1035         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1036         }
1037 }
1038
1039 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1040                               u32 cntl_reg, u32 status_reg)
1041 {
1042         int r, i;
1043         struct atom_clock_dividers dividers;
1044
1045         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1046                                            clock, false, &dividers);
1047         if (r)
1048                 return r;
1049
1050         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1051
1052         for (i = 0; i < 100; i++) {
1053                 if (RREG32(status_reg) & DCLK_STATUS)
1054                         break;
1055                 mdelay(10);
1056         }
1057         if (i == 100)
1058                 return -ETIMEDOUT;
1059
1060         return 0;
1061 }
1062
1063 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1064 {
1065         int r = 0;
1066         u32 cg_scratch = RREG32(CG_SCRATCH1);
1067
1068         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1069         if (r)
1070                 goto done;
1071         cg_scratch &= 0xffff0000;
1072         cg_scratch |= vclk / 100; /* Mhz */
1073
1074         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1075         if (r)
1076                 goto done;
1077         cg_scratch &= 0x0000ffff;
1078         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1079
1080 done:
1081         WREG32(CG_SCRATCH1, cg_scratch);
1082
1083         return r;
1084 }
1085
1086 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1087 {
1088         /* start off with something large */
1089         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1090         int r;
1091
1092         /* bypass vclk and dclk with bclk */
1093         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1094                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1095                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1096
1097         /* put PLL in bypass mode */
1098         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1099
1100         if (!vclk || !dclk) {
1101                 /* keep the Bypass mode, put PLL to sleep */
1102                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1103                 return 0;
1104         }
1105
1106         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1107                                           16384, 0x03FFFFFF, 0, 128, 5,
1108                                           &fb_div, &vclk_div, &dclk_div);
1109         if (r)
1110                 return r;
1111
1112         /* set VCO_MODE to 1 */
1113         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1114
1115         /* toggle UPLL_SLEEP to 1 then back to 0 */
1116         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1117         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1118
1119         /* deassert UPLL_RESET */
1120         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1121
1122         mdelay(1);
1123
1124         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1125         if (r)
1126                 return r;
1127
1128         /* assert UPLL_RESET again */
1129         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1130
1131         /* disable spread spectrum. */
1132         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1133
1134         /* set feedback divider */
1135         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1136
1137         /* set ref divider to 0 */
1138         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1139
1140         if (fb_div < 307200)
1141                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1142         else
1143                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1144
1145         /* set PDIV_A and PDIV_B */
1146         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1147                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1148                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1149
1150         /* give the PLL some time to settle */
1151         mdelay(15);
1152
1153         /* deassert PLL_RESET */
1154         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1155
1156         mdelay(15);
1157
1158         /* switch from bypass mode to normal mode */
1159         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1160
1161         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1162         if (r)
1163                 return r;
1164
1165         /* switch VCLK and DCLK selection */
1166         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1167                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1168                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1169
1170         mdelay(100);
1171
1172         return 0;
1173 }
1174
1175 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1176 {
1177         u16 ctl, v;
1178         int err;
1179
1180         err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1181         if (err)
1182                 return;
1183
1184         v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1185
1186         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1187          * to avoid hangs or perfomance issues
1188          */
1189         if ((v == 0) || (v == 6) || (v == 7)) {
1190                 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1191                 ctl |= (2 << 12);
1192                 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1193         }
1194 }
1195
1196 void dce4_program_fmt(struct drm_encoder *encoder)
1197 {
1198         struct drm_device *dev = encoder->dev;
1199         struct radeon_device *rdev = dev->dev_private;
1200         struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1201         struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1202         struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1203         int bpc = 0;
1204         u32 tmp = 0;
1205         enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1206
1207         if (connector) {
1208                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1209                 bpc = radeon_get_monitor_bpc(connector);
1210                 dither = radeon_connector->dither;
1211         }
1212
1213         /* LVDS/eDP FMT is set up by atom */
1214         if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1215                 return;
1216
1217         /* not needed for analog */
1218         if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1219             (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1220                 return;
1221
1222         if (bpc == 0)
1223                 return;
1224
1225         switch (bpc) {
1226         case 6:
1227                 if (dither == RADEON_FMT_DITHER_ENABLE)
1228                         /* XXX sort out optimal dither settings */
1229                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1230                                 FMT_SPATIAL_DITHER_EN);
1231                 else
1232                         tmp |= FMT_TRUNCATE_EN;
1233                 break;
1234         case 8:
1235                 if (dither == RADEON_FMT_DITHER_ENABLE)
1236                         /* XXX sort out optimal dither settings */
1237                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1238                                 FMT_RGB_RANDOM_ENABLE |
1239                                 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1240                 else
1241                         tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1242                 break;
1243         case 10:
1244         default:
1245                 /* not needed */
1246                 break;
1247         }
1248
1249         WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1250 }
1251
1252 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1253 {
1254         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1255                 return true;
1256         else
1257                 return false;
1258 }
1259
1260 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1261 {
1262         u32 pos1, pos2;
1263
1264         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1265         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1266
1267         if (pos1 != pos2)
1268                 return true;
1269         else
1270                 return false;
1271 }
1272
1273 /**
1274  * dce4_wait_for_vblank - vblank wait asic callback.
1275  *
1276  * @rdev: radeon_device pointer
1277  * @crtc: crtc to wait for vblank on
1278  *
1279  * Wait for vblank on the requested crtc (evergreen+).
1280  */
1281 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1282 {
1283         unsigned i = 0;
1284
1285         if (crtc >= rdev->num_crtc)
1286                 return;
1287
1288         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1289                 return;
1290
1291         /* depending on when we hit vblank, we may be close to active; if so,
1292          * wait for another frame.
1293          */
1294         while (dce4_is_in_vblank(rdev, crtc)) {
1295                 if (i++ % 100 == 0) {
1296                         if (!dce4_is_counter_moving(rdev, crtc))
1297                                 break;
1298                 }
1299         }
1300
1301         while (!dce4_is_in_vblank(rdev, crtc)) {
1302                 if (i++ % 100 == 0) {
1303                         if (!dce4_is_counter_moving(rdev, crtc))
1304                                 break;
1305                 }
1306         }
1307 }
1308
1309 /**
1310  * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1311  *
1312  * @rdev: radeon_device pointer
1313  * @crtc: crtc to prepare for pageflip on
1314  *
1315  * Pre-pageflip callback (evergreen+).
1316  * Enables the pageflip irq (vblank irq).
1317  */
1318 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1319 {
1320         /* enable the pflip int */
1321         radeon_irq_kms_pflip_irq_get(rdev, crtc);
1322 }
1323
1324 /**
1325  * evergreen_post_page_flip - pos-pageflip callback.
1326  *
1327  * @rdev: radeon_device pointer
1328  * @crtc: crtc to cleanup pageflip on
1329  *
1330  * Post-pageflip callback (evergreen+).
1331  * Disables the pageflip irq (vblank irq).
1332  */
1333 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1334 {
1335         /* disable the pflip int */
1336         radeon_irq_kms_pflip_irq_put(rdev, crtc);
1337 }
1338
1339 /**
1340  * evergreen_page_flip - pageflip callback.
1341  *
1342  * @rdev: radeon_device pointer
1343  * @crtc_id: crtc to cleanup pageflip on
1344  * @crtc_base: new address of the crtc (GPU MC address)
1345  *
1346  * Does the actual pageflip (evergreen+).
1347  * During vblank we take the crtc lock and wait for the update_pending
1348  * bit to go high, when it does, we release the lock, and allow the
1349  * double buffered update to take place.
1350  * Returns the current update pending status.
1351  */
1352 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1353 {
1354         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1355         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1356         int i;
1357
1358         /* Lock the graphics update lock */
1359         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1360         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1361
1362         /* update the scanout addresses */
1363         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1364                upper_32_bits(crtc_base));
1365         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1366                (u32)crtc_base);
1367
1368         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1369                upper_32_bits(crtc_base));
1370         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1371                (u32)crtc_base);
1372
1373         /* Wait for update_pending to go high. */
1374         for (i = 0; i < rdev->usec_timeout; i++) {
1375                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1376                         break;
1377                 udelay(1);
1378         }
1379         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1380
1381         /* Unlock the lock, so double-buffering can take place inside vblank */
1382         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1383         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1384
1385         /* Return current update_pending status: */
1386         return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1387 }
1388
1389 /* get temperature in millidegrees */
1390 int evergreen_get_temp(struct radeon_device *rdev)
1391 {
1392         u32 temp, toffset;
1393         int actual_temp = 0;
1394
1395         if (rdev->family == CHIP_JUNIPER) {
1396                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1397                         TOFFSET_SHIFT;
1398                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1399                         TS0_ADC_DOUT_SHIFT;
1400
1401                 if (toffset & 0x100)
1402                         actual_temp = temp / 2 - (0x200 - toffset);
1403                 else
1404                         actual_temp = temp / 2 + toffset;
1405
1406                 actual_temp = actual_temp * 1000;
1407
1408         } else {
1409                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1410                         ASIC_T_SHIFT;
1411
1412                 if (temp & 0x400)
1413                         actual_temp = -256;
1414                 else if (temp & 0x200)
1415                         actual_temp = 255;
1416                 else if (temp & 0x100) {
1417                         actual_temp = temp & 0x1ff;
1418                         actual_temp |= ~0x1ff;
1419                 } else
1420                         actual_temp = temp & 0xff;
1421
1422                 actual_temp = (actual_temp * 1000) / 2;
1423         }
1424
1425         return actual_temp;
1426 }
1427
1428 int sumo_get_temp(struct radeon_device *rdev)
1429 {
1430         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1431         int actual_temp = temp - 49;
1432
1433         return actual_temp * 1000;
1434 }
1435
1436 /**
1437  * sumo_pm_init_profile - Initialize power profiles callback.
1438  *
1439  * @rdev: radeon_device pointer
1440  *
1441  * Initialize the power states used in profile mode
1442  * (sumo, trinity, SI).
1443  * Used for profile mode only.
1444  */
1445 void sumo_pm_init_profile(struct radeon_device *rdev)
1446 {
1447         int idx;
1448
1449         /* default */
1450         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1451         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1452         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1453         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1454
1455         /* low,mid sh/mh */
1456         if (rdev->flags & RADEON_IS_MOBILITY)
1457                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1458         else
1459                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1460
1461         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1462         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1463         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1464         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1465
1466         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1467         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1468         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1469         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1470
1471         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1472         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1473         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1474         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1475
1476         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1477         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1478         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1479         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1480
1481         /* high sh/mh */
1482         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1483         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1484         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1485         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1486         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1487                 rdev->pm.power_state[idx].num_clock_modes - 1;
1488
1489         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1490         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1491         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1492         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1493                 rdev->pm.power_state[idx].num_clock_modes - 1;
1494 }
1495
1496 /**
1497  * btc_pm_init_profile - Initialize power profiles callback.
1498  *
1499  * @rdev: radeon_device pointer
1500  *
1501  * Initialize the power states used in profile mode
1502  * (BTC, cayman).
1503  * Used for profile mode only.
1504  */
1505 void btc_pm_init_profile(struct radeon_device *rdev)
1506 {
1507         int idx;
1508
1509         /* default */
1510         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1511         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1512         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1513         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1514         /* starting with BTC, there is one state that is used for both
1515          * MH and SH.  Difference is that we always use the high clock index for
1516          * mclk.
1517          */
1518         if (rdev->flags & RADEON_IS_MOBILITY)
1519                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1520         else
1521                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1522         /* low sh */
1523         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1524         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1525         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1526         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1527         /* mid sh */
1528         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1529         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1530         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1531         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1532         /* high sh */
1533         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1534         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1535         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1536         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1537         /* low mh */
1538         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1539         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1540         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1541         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1542         /* mid mh */
1543         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1544         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1545         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1546         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1547         /* high mh */
1548         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1549         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1550         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1551         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1552 }
1553
1554 /**
1555  * evergreen_pm_misc - set additional pm hw parameters callback.
1556  *
1557  * @rdev: radeon_device pointer
1558  *
1559  * Set non-clock parameters associated with a power state
1560  * (voltage, etc.) (evergreen+).
1561  */
1562 void evergreen_pm_misc(struct radeon_device *rdev)
1563 {
1564         int req_ps_idx = rdev->pm.requested_power_state_index;
1565         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1566         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1567         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1568
1569         if (voltage->type == VOLTAGE_SW) {
1570                 /* 0xff0x are flags rather then an actual voltage */
1571                 if ((voltage->voltage & 0xff00) == 0xff00)
1572                         return;
1573                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1574                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1575                         rdev->pm.current_vddc = voltage->voltage;
1576                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1577                 }
1578
1579                 /* starting with BTC, there is one state that is used for both
1580                  * MH and SH.  Difference is that we always use the high clock index for
1581                  * mclk and vddci.
1582                  */
1583                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1584                     (rdev->family >= CHIP_BARTS) &&
1585                     rdev->pm.active_crtc_count &&
1586                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1587                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1588                         voltage = &rdev->pm.power_state[req_ps_idx].
1589                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1590
1591                 /* 0xff0x are flags rather then an actual voltage */
1592                 if ((voltage->vddci & 0xff00) == 0xff00)
1593                         return;
1594                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1595                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1596                         rdev->pm.current_vddci = voltage->vddci;
1597                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1598                 }
1599         }
1600 }
1601
1602 /**
1603  * evergreen_pm_prepare - pre-power state change callback.
1604  *
1605  * @rdev: radeon_device pointer
1606  *
1607  * Prepare for a power state change (evergreen+).
1608  */
1609 void evergreen_pm_prepare(struct radeon_device *rdev)
1610 {
1611         struct drm_device *ddev = rdev->ddev;
1612         struct drm_crtc *crtc;
1613         struct radeon_crtc *radeon_crtc;
1614         u32 tmp;
1615
1616         /* disable any active CRTCs */
1617         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1618                 radeon_crtc = to_radeon_crtc(crtc);
1619                 if (radeon_crtc->enabled) {
1620                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1621                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1622                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1623                 }
1624         }
1625 }
1626
1627 /**
1628  * evergreen_pm_finish - post-power state change callback.
1629  *
1630  * @rdev: radeon_device pointer
1631  *
1632  * Clean up after a power state change (evergreen+).
1633  */
1634 void evergreen_pm_finish(struct radeon_device *rdev)
1635 {
1636         struct drm_device *ddev = rdev->ddev;
1637         struct drm_crtc *crtc;
1638         struct radeon_crtc *radeon_crtc;
1639         u32 tmp;
1640
1641         /* enable any active CRTCs */
1642         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1643                 radeon_crtc = to_radeon_crtc(crtc);
1644                 if (radeon_crtc->enabled) {
1645                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1646                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1647                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1648                 }
1649         }
1650 }
1651
1652 /**
1653  * evergreen_hpd_sense - hpd sense callback.
1654  *
1655  * @rdev: radeon_device pointer
1656  * @hpd: hpd (hotplug detect) pin
1657  *
1658  * Checks if a digital monitor is connected (evergreen+).
1659  * Returns true if connected, false if not connected.
1660  */
1661 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1662 {
1663         bool connected = false;
1664
1665         switch (hpd) {
1666         case RADEON_HPD_1:
1667                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1668                         connected = true;
1669                 break;
1670         case RADEON_HPD_2:
1671                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1672                         connected = true;
1673                 break;
1674         case RADEON_HPD_3:
1675                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1676                         connected = true;
1677                 break;
1678         case RADEON_HPD_4:
1679                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1680                         connected = true;
1681                 break;
1682         case RADEON_HPD_5:
1683                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1684                         connected = true;
1685                 break;
1686         case RADEON_HPD_6:
1687                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1688                         connected = true;
1689                         break;
1690         default:
1691                 break;
1692         }
1693
1694         return connected;
1695 }
1696
1697 /**
1698  * evergreen_hpd_set_polarity - hpd set polarity callback.
1699  *
1700  * @rdev: radeon_device pointer
1701  * @hpd: hpd (hotplug detect) pin
1702  *
1703  * Set the polarity of the hpd pin (evergreen+).
1704  */
1705 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1706                                 enum radeon_hpd_id hpd)
1707 {
1708         u32 tmp;
1709         bool connected = evergreen_hpd_sense(rdev, hpd);
1710
1711         switch (hpd) {
1712         case RADEON_HPD_1:
1713                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1714                 if (connected)
1715                         tmp &= ~DC_HPDx_INT_POLARITY;
1716                 else
1717                         tmp |= DC_HPDx_INT_POLARITY;
1718                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1719                 break;
1720         case RADEON_HPD_2:
1721                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1722                 if (connected)
1723                         tmp &= ~DC_HPDx_INT_POLARITY;
1724                 else
1725                         tmp |= DC_HPDx_INT_POLARITY;
1726                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1727                 break;
1728         case RADEON_HPD_3:
1729                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1730                 if (connected)
1731                         tmp &= ~DC_HPDx_INT_POLARITY;
1732                 else
1733                         tmp |= DC_HPDx_INT_POLARITY;
1734                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1735                 break;
1736         case RADEON_HPD_4:
1737                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1738                 if (connected)
1739                         tmp &= ~DC_HPDx_INT_POLARITY;
1740                 else
1741                         tmp |= DC_HPDx_INT_POLARITY;
1742                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1743                 break;
1744         case RADEON_HPD_5:
1745                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1746                 if (connected)
1747                         tmp &= ~DC_HPDx_INT_POLARITY;
1748                 else
1749                         tmp |= DC_HPDx_INT_POLARITY;
1750                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1751                         break;
1752         case RADEON_HPD_6:
1753                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1754                 if (connected)
1755                         tmp &= ~DC_HPDx_INT_POLARITY;
1756                 else
1757                         tmp |= DC_HPDx_INT_POLARITY;
1758                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1759                 break;
1760         default:
1761                 break;
1762         }
1763 }
1764
1765 /**
1766  * evergreen_hpd_init - hpd setup callback.
1767  *
1768  * @rdev: radeon_device pointer
1769  *
1770  * Setup the hpd pins used by the card (evergreen+).
1771  * Enable the pin, set the polarity, and enable the hpd interrupts.
1772  */
1773 void evergreen_hpd_init(struct radeon_device *rdev)
1774 {
1775         struct drm_device *dev = rdev->ddev;
1776         struct drm_connector *connector;
1777         unsigned enabled = 0;
1778         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1779                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1780
1781         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1782                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1783
1784                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1785                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1786                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1787                          * aux dp channel on imac and help (but not completely fix)
1788                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1789                          * also avoid interrupt storms during dpms.
1790                          */
1791                         continue;
1792                 }
1793                 switch (radeon_connector->hpd.hpd) {
1794                 case RADEON_HPD_1:
1795                         WREG32(DC_HPD1_CONTROL, tmp);
1796                         break;
1797                 case RADEON_HPD_2:
1798                         WREG32(DC_HPD2_CONTROL, tmp);
1799                         break;
1800                 case RADEON_HPD_3:
1801                         WREG32(DC_HPD3_CONTROL, tmp);
1802                         break;
1803                 case RADEON_HPD_4:
1804                         WREG32(DC_HPD4_CONTROL, tmp);
1805                         break;
1806                 case RADEON_HPD_5:
1807                         WREG32(DC_HPD5_CONTROL, tmp);
1808                         break;
1809                 case RADEON_HPD_6:
1810                         WREG32(DC_HPD6_CONTROL, tmp);
1811                         break;
1812                 default:
1813                         break;
1814                 }
1815                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1816                 enabled |= 1 << radeon_connector->hpd.hpd;
1817         }
1818         radeon_irq_kms_enable_hpd(rdev, enabled);
1819 }
1820
1821 /**
1822  * evergreen_hpd_fini - hpd tear down callback.
1823  *
1824  * @rdev: radeon_device pointer
1825  *
1826  * Tear down the hpd pins used by the card (evergreen+).
1827  * Disable the hpd interrupts.
1828  */
1829 void evergreen_hpd_fini(struct radeon_device *rdev)
1830 {
1831         struct drm_device *dev = rdev->ddev;
1832         struct drm_connector *connector;
1833         unsigned disabled = 0;
1834
1835         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1836                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1837                 switch (radeon_connector->hpd.hpd) {
1838                 case RADEON_HPD_1:
1839                         WREG32(DC_HPD1_CONTROL, 0);
1840                         break;
1841                 case RADEON_HPD_2:
1842                         WREG32(DC_HPD2_CONTROL, 0);
1843                         break;
1844                 case RADEON_HPD_3:
1845                         WREG32(DC_HPD3_CONTROL, 0);
1846                         break;
1847                 case RADEON_HPD_4:
1848                         WREG32(DC_HPD4_CONTROL, 0);
1849                         break;
1850                 case RADEON_HPD_5:
1851                         WREG32(DC_HPD5_CONTROL, 0);
1852                         break;
1853                 case RADEON_HPD_6:
1854                         WREG32(DC_HPD6_CONTROL, 0);
1855                         break;
1856                 default:
1857                         break;
1858                 }
1859                 disabled |= 1 << radeon_connector->hpd.hpd;
1860         }
1861         radeon_irq_kms_disable_hpd(rdev, disabled);
1862 }
1863
1864 /* watermark setup */
1865
1866 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1867                                         struct radeon_crtc *radeon_crtc,
1868                                         struct drm_display_mode *mode,
1869                                         struct drm_display_mode *other_mode)
1870 {
1871         u32 tmp, buffer_alloc, i;
1872         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1873         /*
1874          * Line Buffer Setup
1875          * There are 3 line buffers, each one shared by 2 display controllers.
1876          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1877          * the display controllers.  The paritioning is done via one of four
1878          * preset allocations specified in bits 2:0:
1879          * first display controller
1880          *  0 - first half of lb (3840 * 2)
1881          *  1 - first 3/4 of lb (5760 * 2)
1882          *  2 - whole lb (7680 * 2), other crtc must be disabled
1883          *  3 - first 1/4 of lb (1920 * 2)
1884          * second display controller
1885          *  4 - second half of lb (3840 * 2)
1886          *  5 - second 3/4 of lb (5760 * 2)
1887          *  6 - whole lb (7680 * 2), other crtc must be disabled
1888          *  7 - last 1/4 of lb (1920 * 2)
1889          */
1890         /* this can get tricky if we have two large displays on a paired group
1891          * of crtcs.  Ideally for multiple large displays we'd assign them to
1892          * non-linked crtcs for maximum line buffer allocation.
1893          */
1894         if (radeon_crtc->base.enabled && mode) {
1895                 if (other_mode) {
1896                         tmp = 0; /* 1/2 */
1897                         buffer_alloc = 1;
1898                 } else {
1899                         tmp = 2; /* whole */
1900                         buffer_alloc = 2;
1901                 }
1902         } else {
1903                 tmp = 0;
1904                 buffer_alloc = 0;
1905         }
1906
1907         /* second controller of the pair uses second half of the lb */
1908         if (radeon_crtc->crtc_id % 2)
1909                 tmp += 4;
1910         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1911
1912         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1913                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1914                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1915                 for (i = 0; i < rdev->usec_timeout; i++) {
1916                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1917                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1918                                 break;
1919                         udelay(1);
1920                 }
1921         }
1922
1923         if (radeon_crtc->base.enabled && mode) {
1924                 switch (tmp) {
1925                 case 0:
1926                 case 4:
1927                 default:
1928                         if (ASIC_IS_DCE5(rdev))
1929                                 return 4096 * 2;
1930                         else
1931                                 return 3840 * 2;
1932                 case 1:
1933                 case 5:
1934                         if (ASIC_IS_DCE5(rdev))
1935                                 return 6144 * 2;
1936                         else
1937                                 return 5760 * 2;
1938                 case 2:
1939                 case 6:
1940                         if (ASIC_IS_DCE5(rdev))
1941                                 return 8192 * 2;
1942                         else
1943                                 return 7680 * 2;
1944                 case 3:
1945                 case 7:
1946                         if (ASIC_IS_DCE5(rdev))
1947                                 return 2048 * 2;
1948                         else
1949                                 return 1920 * 2;
1950                 }
1951         }
1952
1953         /* controller not enabled, so no lb used */
1954         return 0;
1955 }
1956
1957 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1958 {
1959         u32 tmp = RREG32(MC_SHARED_CHMAP);
1960
1961         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1962         case 0:
1963         default:
1964                 return 1;
1965         case 1:
1966                 return 2;
1967         case 2:
1968                 return 4;
1969         case 3:
1970                 return 8;
1971         }
1972 }
1973
1974 struct evergreen_wm_params {
1975         u32 dram_channels; /* number of dram channels */
1976         u32 yclk;          /* bandwidth per dram data pin in kHz */
1977         u32 sclk;          /* engine clock in kHz */
1978         u32 disp_clk;      /* display clock in kHz */
1979         u32 src_width;     /* viewport width */
1980         u32 active_time;   /* active display time in ns */
1981         u32 blank_time;    /* blank time in ns */
1982         bool interlaced;    /* mode is interlaced */
1983         fixed20_12 vsc;    /* vertical scale ratio */
1984         u32 num_heads;     /* number of active crtcs */
1985         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1986         u32 lb_size;       /* line buffer allocated to pipe */
1987         u32 vtaps;         /* vertical scaler taps */
1988 };
1989
1990 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1991 {
1992         /* Calculate DRAM Bandwidth and the part allocated to display. */
1993         fixed20_12 dram_efficiency; /* 0.7 */
1994         fixed20_12 yclk, dram_channels, bandwidth;
1995         fixed20_12 a;
1996
1997         a.full = dfixed_const(1000);
1998         yclk.full = dfixed_const(wm->yclk);
1999         yclk.full = dfixed_div(yclk, a);
2000         dram_channels.full = dfixed_const(wm->dram_channels * 4);
2001         a.full = dfixed_const(10);
2002         dram_efficiency.full = dfixed_const(7);
2003         dram_efficiency.full = dfixed_div(dram_efficiency, a);
2004         bandwidth.full = dfixed_mul(dram_channels, yclk);
2005         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2006
2007         return dfixed_trunc(bandwidth);
2008 }
2009
2010 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2011 {
2012         /* Calculate DRAM Bandwidth and the part allocated to display. */
2013         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2014         fixed20_12 yclk, dram_channels, bandwidth;
2015         fixed20_12 a;
2016
2017         a.full = dfixed_const(1000);
2018         yclk.full = dfixed_const(wm->yclk);
2019         yclk.full = dfixed_div(yclk, a);
2020         dram_channels.full = dfixed_const(wm->dram_channels * 4);
2021         a.full = dfixed_const(10);
2022         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2023         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2024         bandwidth.full = dfixed_mul(dram_channels, yclk);
2025         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2026
2027         return dfixed_trunc(bandwidth);
2028 }
2029
2030 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2031 {
2032         /* Calculate the display Data return Bandwidth */
2033         fixed20_12 return_efficiency; /* 0.8 */
2034         fixed20_12 sclk, bandwidth;
2035         fixed20_12 a;
2036
2037         a.full = dfixed_const(1000);
2038         sclk.full = dfixed_const(wm->sclk);
2039         sclk.full = dfixed_div(sclk, a);
2040         a.full = dfixed_const(10);
2041         return_efficiency.full = dfixed_const(8);
2042         return_efficiency.full = dfixed_div(return_efficiency, a);
2043         a.full = dfixed_const(32);
2044         bandwidth.full = dfixed_mul(a, sclk);
2045         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2046
2047         return dfixed_trunc(bandwidth);
2048 }
2049
2050 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2051 {
2052         /* Calculate the DMIF Request Bandwidth */
2053         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2054         fixed20_12 disp_clk, bandwidth;
2055         fixed20_12 a;
2056
2057         a.full = dfixed_const(1000);
2058         disp_clk.full = dfixed_const(wm->disp_clk);
2059         disp_clk.full = dfixed_div(disp_clk, a);
2060         a.full = dfixed_const(10);
2061         disp_clk_request_efficiency.full = dfixed_const(8);
2062         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2063         a.full = dfixed_const(32);
2064         bandwidth.full = dfixed_mul(a, disp_clk);
2065         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2066
2067         return dfixed_trunc(bandwidth);
2068 }
2069
2070 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2071 {
2072         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2073         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2074         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2075         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2076
2077         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2078 }
2079
2080 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2081 {
2082         /* Calculate the display mode Average Bandwidth
2083          * DisplayMode should contain the source and destination dimensions,
2084          * timing, etc.
2085          */
2086         fixed20_12 bpp;
2087         fixed20_12 line_time;
2088         fixed20_12 src_width;
2089         fixed20_12 bandwidth;
2090         fixed20_12 a;
2091
2092         a.full = dfixed_const(1000);
2093         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2094         line_time.full = dfixed_div(line_time, a);
2095         bpp.full = dfixed_const(wm->bytes_per_pixel);
2096         src_width.full = dfixed_const(wm->src_width);
2097         bandwidth.full = dfixed_mul(src_width, bpp);
2098         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2099         bandwidth.full = dfixed_div(bandwidth, line_time);
2100
2101         return dfixed_trunc(bandwidth);
2102 }
2103
2104 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2105 {
2106         /* First calcualte the latency in ns */
2107         u32 mc_latency = 2000; /* 2000 ns. */
2108         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2109         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2110         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2111         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2112         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2113                 (wm->num_heads * cursor_line_pair_return_time);
2114         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2115         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2116         fixed20_12 a, b, c;
2117
2118         if (wm->num_heads == 0)
2119                 return 0;
2120
2121         a.full = dfixed_const(2);
2122         b.full = dfixed_const(1);
2123         if ((wm->vsc.full > a.full) ||
2124             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2125             (wm->vtaps >= 5) ||
2126             ((wm->vsc.full >= a.full) && wm->interlaced))
2127                 max_src_lines_per_dst_line = 4;
2128         else
2129                 max_src_lines_per_dst_line = 2;
2130
2131         a.full = dfixed_const(available_bandwidth);
2132         b.full = dfixed_const(wm->num_heads);
2133         a.full = dfixed_div(a, b);
2134
2135         b.full = dfixed_const(1000);
2136         c.full = dfixed_const(wm->disp_clk);
2137         b.full = dfixed_div(c, b);
2138         c.full = dfixed_const(wm->bytes_per_pixel);
2139         b.full = dfixed_mul(b, c);
2140
2141         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2142
2143         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2144         b.full = dfixed_const(1000);
2145         c.full = dfixed_const(lb_fill_bw);
2146         b.full = dfixed_div(c, b);
2147         a.full = dfixed_div(a, b);
2148         line_fill_time = dfixed_trunc(a);
2149
2150         if (line_fill_time < wm->active_time)
2151                 return latency;
2152         else
2153                 return latency + (line_fill_time - wm->active_time);
2154
2155 }
2156
2157 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2158 {
2159         if (evergreen_average_bandwidth(wm) <=
2160             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2161                 return true;
2162         else
2163                 return false;
2164 };
2165
2166 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2167 {
2168         if (evergreen_average_bandwidth(wm) <=
2169             (evergreen_available_bandwidth(wm) / wm->num_heads))
2170                 return true;
2171         else
2172                 return false;
2173 };
2174
2175 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2176 {
2177         u32 lb_partitions = wm->lb_size / wm->src_width;
2178         u32 line_time = wm->active_time + wm->blank_time;
2179         u32 latency_tolerant_lines;
2180         u32 latency_hiding;
2181         fixed20_12 a;
2182
2183         a.full = dfixed_const(1);
2184         if (wm->vsc.full > a.full)
2185                 latency_tolerant_lines = 1;
2186         else {
2187                 if (lb_partitions <= (wm->vtaps + 1))
2188                         latency_tolerant_lines = 1;
2189                 else
2190                         latency_tolerant_lines = 2;
2191         }
2192
2193         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2194
2195         if (evergreen_latency_watermark(wm) <= latency_hiding)
2196                 return true;
2197         else
2198                 return false;
2199 }
2200
2201 static void evergreen_program_watermarks(struct radeon_device *rdev,
2202                                          struct radeon_crtc *radeon_crtc,
2203                                          u32 lb_size, u32 num_heads)
2204 {
2205         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2206         struct evergreen_wm_params wm_low, wm_high;
2207         u32 dram_channels;
2208         u32 pixel_period;
2209         u32 line_time = 0;
2210         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2211         u32 priority_a_mark = 0, priority_b_mark = 0;
2212         u32 priority_a_cnt = PRIORITY_OFF;
2213         u32 priority_b_cnt = PRIORITY_OFF;
2214         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2215         u32 tmp, arb_control3;
2216         fixed20_12 a, b, c;
2217
2218         if (radeon_crtc->base.enabled && num_heads && mode) {
2219                 pixel_period = 1000000 / (u32)mode->clock;
2220                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2221                 priority_a_cnt = 0;
2222                 priority_b_cnt = 0;
2223                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2224
2225                 /* watermark for high clocks */
2226                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2227                         wm_high.yclk =
2228                                 radeon_dpm_get_mclk(rdev, false) * 10;
2229                         wm_high.sclk =
2230                                 radeon_dpm_get_sclk(rdev, false) * 10;
2231                 } else {
2232                         wm_high.yclk = rdev->pm.current_mclk * 10;
2233                         wm_high.sclk = rdev->pm.current_sclk * 10;
2234                 }
2235
2236                 wm_high.disp_clk = mode->clock;
2237                 wm_high.src_width = mode->crtc_hdisplay;
2238                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2239                 wm_high.blank_time = line_time - wm_high.active_time;
2240                 wm_high.interlaced = false;
2241                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2242                         wm_high.interlaced = true;
2243                 wm_high.vsc = radeon_crtc->vsc;
2244                 wm_high.vtaps = 1;
2245                 if (radeon_crtc->rmx_type != RMX_OFF)
2246                         wm_high.vtaps = 2;
2247                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2248                 wm_high.lb_size = lb_size;
2249                 wm_high.dram_channels = dram_channels;
2250                 wm_high.num_heads = num_heads;
2251
2252                 /* watermark for low clocks */
2253                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2254                         wm_low.yclk =
2255                                 radeon_dpm_get_mclk(rdev, true) * 10;
2256                         wm_low.sclk =
2257                                 radeon_dpm_get_sclk(rdev, true) * 10;
2258                 } else {
2259                         wm_low.yclk = rdev->pm.current_mclk * 10;
2260                         wm_low.sclk = rdev->pm.current_sclk * 10;
2261                 }
2262
2263                 wm_low.disp_clk = mode->clock;
2264                 wm_low.src_width = mode->crtc_hdisplay;
2265                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2266                 wm_low.blank_time = line_time - wm_low.active_time;
2267                 wm_low.interlaced = false;
2268                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2269                         wm_low.interlaced = true;
2270                 wm_low.vsc = radeon_crtc->vsc;
2271                 wm_low.vtaps = 1;
2272                 if (radeon_crtc->rmx_type != RMX_OFF)
2273                         wm_low.vtaps = 2;
2274                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2275                 wm_low.lb_size = lb_size;
2276                 wm_low.dram_channels = dram_channels;
2277                 wm_low.num_heads = num_heads;
2278
2279                 /* set for high clocks */
2280                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2281                 /* set for low clocks */
2282                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2283
2284                 /* possibly force display priority to high */
2285                 /* should really do this at mode validation time... */
2286                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2287                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2288                     !evergreen_check_latency_hiding(&wm_high) ||
2289                     (rdev->disp_priority == 2)) {
2290                         DRM_DEBUG_KMS("force priority a to high\n");
2291                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2292                 }
2293                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2294                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2295                     !evergreen_check_latency_hiding(&wm_low) ||
2296                     (rdev->disp_priority == 2)) {
2297                         DRM_DEBUG_KMS("force priority b to high\n");
2298                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2299                 }
2300
2301                 a.full = dfixed_const(1000);
2302                 b.full = dfixed_const(mode->clock);
2303                 b.full = dfixed_div(b, a);
2304                 c.full = dfixed_const(latency_watermark_a);
2305                 c.full = dfixed_mul(c, b);
2306                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2307                 c.full = dfixed_div(c, a);
2308                 a.full = dfixed_const(16);
2309                 c.full = dfixed_div(c, a);
2310                 priority_a_mark = dfixed_trunc(c);
2311                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2312
2313                 a.full = dfixed_const(1000);
2314                 b.full = dfixed_const(mode->clock);
2315                 b.full = dfixed_div(b, a);
2316                 c.full = dfixed_const(latency_watermark_b);
2317                 c.full = dfixed_mul(c, b);
2318                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2319                 c.full = dfixed_div(c, a);
2320                 a.full = dfixed_const(16);
2321                 c.full = dfixed_div(c, a);
2322                 priority_b_mark = dfixed_trunc(c);
2323                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2324         }
2325
2326         /* select wm A */
2327         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2328         tmp = arb_control3;
2329         tmp &= ~LATENCY_WATERMARK_MASK(3);
2330         tmp |= LATENCY_WATERMARK_MASK(1);
2331         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2332         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2333                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2334                 LATENCY_HIGH_WATERMARK(line_time)));
2335         /* select wm B */
2336         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2337         tmp &= ~LATENCY_WATERMARK_MASK(3);
2338         tmp |= LATENCY_WATERMARK_MASK(2);
2339         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2340         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2341                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2342                 LATENCY_HIGH_WATERMARK(line_time)));
2343         /* restore original selection */
2344         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2345
2346         /* write the priority marks */
2347         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2348         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2349
2350         /* save values for DPM */
2351         radeon_crtc->line_time = line_time;
2352         radeon_crtc->wm_high = latency_watermark_a;
2353         radeon_crtc->wm_low = latency_watermark_b;
2354 }
2355
2356 /**
2357  * evergreen_bandwidth_update - update display watermarks callback.
2358  *
2359  * @rdev: radeon_device pointer
2360  *
2361  * Update the display watermarks based on the requested mode(s)
2362  * (evergreen+).
2363  */
2364 void evergreen_bandwidth_update(struct radeon_device *rdev)
2365 {
2366         struct drm_display_mode *mode0 = NULL;
2367         struct drm_display_mode *mode1 = NULL;
2368         u32 num_heads = 0, lb_size;
2369         int i;
2370
2371         radeon_update_display_priority(rdev);
2372
2373         for (i = 0; i < rdev->num_crtc; i++) {
2374                 if (rdev->mode_info.crtcs[i]->base.enabled)
2375                         num_heads++;
2376         }
2377         for (i = 0; i < rdev->num_crtc; i += 2) {
2378                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2379                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2380                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2381                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2382                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2383                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2384         }
2385 }
2386
2387 /**
2388  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2389  *
2390  * @rdev: radeon_device pointer
2391  *
2392  * Wait for the MC (memory controller) to be idle.
2393  * (evergreen+).
2394  * Returns 0 if the MC is idle, -1 if not.
2395  */
2396 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2397 {
2398         unsigned i;
2399         u32 tmp;
2400
2401         for (i = 0; i < rdev->usec_timeout; i++) {
2402                 /* read MC_STATUS */
2403                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2404                 if (!tmp)
2405                         return 0;
2406                 udelay(1);
2407         }
2408         return -1;
2409 }
2410
2411 /*
2412  * GART
2413  */
2414 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2415 {
2416         unsigned i;
2417         u32 tmp;
2418
2419         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2420
2421         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2422         for (i = 0; i < rdev->usec_timeout; i++) {
2423                 /* read MC_STATUS */
2424                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2425                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2426                 if (tmp == 2) {
2427                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2428                         return;
2429                 }
2430                 if (tmp) {
2431                         return;
2432                 }
2433                 udelay(1);
2434         }
2435 }
2436
2437 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2438 {
2439         u32 tmp;
2440         int r;
2441
2442         if (rdev->gart.robj == NULL) {
2443                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2444                 return -EINVAL;
2445         }
2446         r = radeon_gart_table_vram_pin(rdev);
2447         if (r)
2448                 return r;
2449         radeon_gart_restore(rdev);
2450         /* Setup L2 cache */
2451         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2452                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2453                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2454         WREG32(VM_L2_CNTL2, 0);
2455         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2456         /* Setup TLB control */
2457         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2458                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2459                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2460                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2461         if (rdev->flags & RADEON_IS_IGP) {
2462                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2463                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2464                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2465         } else {
2466                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2467                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2468                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2469                 if ((rdev->family == CHIP_JUNIPER) ||
2470                     (rdev->family == CHIP_CYPRESS) ||
2471                     (rdev->family == CHIP_HEMLOCK) ||
2472                     (rdev->family == CHIP_BARTS))
2473                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2474         }
2475         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2476         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2477         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2478         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2479         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2480         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2481         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2482         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2483                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2484         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2485                         (u32)(rdev->dummy_page.addr >> 12));
2486         WREG32(VM_CONTEXT1_CNTL, 0);
2487
2488         evergreen_pcie_gart_tlb_flush(rdev);
2489         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2490                  (unsigned)(rdev->mc.gtt_size >> 20),
2491                  (unsigned long long)rdev->gart.table_addr);
2492         rdev->gart.ready = true;
2493         return 0;
2494 }
2495
2496 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2497 {
2498         u32 tmp;
2499
2500         /* Disable all tables */
2501         WREG32(VM_CONTEXT0_CNTL, 0);
2502         WREG32(VM_CONTEXT1_CNTL, 0);
2503
2504         /* Setup L2 cache */
2505         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2506                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2507         WREG32(VM_L2_CNTL2, 0);
2508         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2509         /* Setup TLB control */
2510         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2511         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2512         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2513         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2514         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2515         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2516         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2517         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2518         radeon_gart_table_vram_unpin(rdev);
2519 }
2520
2521 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2522 {
2523         evergreen_pcie_gart_disable(rdev);
2524         radeon_gart_table_vram_free(rdev);
2525         radeon_gart_fini(rdev);
2526 }
2527
2528
2529 static void evergreen_agp_enable(struct radeon_device *rdev)
2530 {
2531         u32 tmp;
2532
2533         /* Setup L2 cache */
2534         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2535                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2536                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2537         WREG32(VM_L2_CNTL2, 0);
2538         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2539         /* Setup TLB control */
2540         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2541                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2542                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2543                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2544         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2545         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2546         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2547         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2548         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2549         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2550         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2551         WREG32(VM_CONTEXT0_CNTL, 0);
2552         WREG32(VM_CONTEXT1_CNTL, 0);
2553 }
2554
2555 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2556 {
2557         u32 crtc_enabled, tmp, frame_count, blackout;
2558         int i, j;
2559
2560         if (!ASIC_IS_NODCE(rdev)) {
2561                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2562                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2563
2564                 /* disable VGA render */
2565                 WREG32(VGA_RENDER_CONTROL, 0);
2566         }
2567         /* blank the display controllers */
2568         for (i = 0; i < rdev->num_crtc; i++) {
2569                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2570                 if (crtc_enabled) {
2571                         save->crtc_enabled[i] = true;
2572                         if (ASIC_IS_DCE6(rdev)) {
2573                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2574                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2575                                         radeon_wait_for_vblank(rdev, i);
2576                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2577                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2578                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2579                                 }
2580                         } else {
2581                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2582                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2583                                         radeon_wait_for_vblank(rdev, i);
2584                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2585                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2586                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2587                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2588                                 }
2589                         }
2590                         /* wait for the next frame */
2591                         frame_count = radeon_get_vblank_counter(rdev, i);
2592                         for (j = 0; j < rdev->usec_timeout; j++) {
2593                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2594                                         break;
2595                                 udelay(1);
2596                         }
2597
2598                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2599                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2600                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2601                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2602                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2603                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2604                         save->crtc_enabled[i] = false;
2605                         /* ***** */
2606                 } else {
2607                         save->crtc_enabled[i] = false;
2608                 }
2609         }
2610
2611         radeon_mc_wait_for_idle(rdev);
2612
2613         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2614         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2615                 /* Block CPU access */
2616                 WREG32(BIF_FB_EN, 0);
2617                 /* blackout the MC */
2618                 blackout &= ~BLACKOUT_MODE_MASK;
2619                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2620         }
2621         /* wait for the MC to settle */
2622         udelay(100);
2623
2624         /* lock double buffered regs */
2625         for (i = 0; i < rdev->num_crtc; i++) {
2626                 if (save->crtc_enabled[i]) {
2627                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2628                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2629                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2630                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2631                         }
2632                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2633                         if (!(tmp & 1)) {
2634                                 tmp |= 1;
2635                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2636                         }
2637                 }
2638         }
2639 }
2640
2641 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2642 {
2643         u32 tmp, frame_count;
2644         int i, j;
2645
2646         /* update crtc base addresses */
2647         for (i = 0; i < rdev->num_crtc; i++) {
2648                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2649                        upper_32_bits(rdev->mc.vram_start));
2650                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2651                        upper_32_bits(rdev->mc.vram_start));
2652                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2653                        (u32)rdev->mc.vram_start);
2654                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2655                        (u32)rdev->mc.vram_start);
2656         }
2657
2658         if (!ASIC_IS_NODCE(rdev)) {
2659                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2660                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2661         }
2662
2663         /* unlock regs and wait for update */
2664         for (i = 0; i < rdev->num_crtc; i++) {
2665                 if (save->crtc_enabled[i]) {
2666                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2667                         if ((tmp & 0x3) != 0) {
2668                                 tmp &= ~0x3;
2669                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2670                         }
2671                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2672                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2673                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2674                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2675                         }
2676                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2677                         if (tmp & 1) {
2678                                 tmp &= ~1;
2679                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2680                         }
2681                         for (j = 0; j < rdev->usec_timeout; j++) {
2682                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2683                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2684                                         break;
2685                                 udelay(1);
2686                         }
2687                 }
2688         }
2689
2690         /* unblackout the MC */
2691         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2692         tmp &= ~BLACKOUT_MODE_MASK;
2693         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2694         /* allow CPU access */
2695         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2696
2697         for (i = 0; i < rdev->num_crtc; i++) {
2698                 if (save->crtc_enabled[i]) {
2699                         if (ASIC_IS_DCE6(rdev)) {
2700                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2701                                 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2702                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2703                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2704                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2705                         } else {
2706                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2707                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2708                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2709                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2710                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2711                         }
2712                         /* wait for the next frame */
2713                         frame_count = radeon_get_vblank_counter(rdev, i);
2714                         for (j = 0; j < rdev->usec_timeout; j++) {
2715                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2716                                         break;
2717                                 udelay(1);
2718                         }
2719                 }
2720         }
2721         if (!ASIC_IS_NODCE(rdev)) {
2722                 /* Unlock vga access */
2723                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2724                 mdelay(1);
2725                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2726         }
2727 }
2728
2729 void evergreen_mc_program(struct radeon_device *rdev)
2730 {
2731         struct evergreen_mc_save save;
2732         u32 tmp;
2733         int i, j;
2734
2735         /* Initialize HDP */
2736         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2737                 WREG32((0x2c14 + j), 0x00000000);
2738                 WREG32((0x2c18 + j), 0x00000000);
2739                 WREG32((0x2c1c + j), 0x00000000);
2740                 WREG32((0x2c20 + j), 0x00000000);
2741                 WREG32((0x2c24 + j), 0x00000000);
2742         }
2743         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2744
2745         evergreen_mc_stop(rdev, &save);
2746         if (evergreen_mc_wait_for_idle(rdev)) {
2747                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2748         }
2749         /* Lockout access through VGA aperture*/
2750         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2751         /* Update configuration */
2752         if (rdev->flags & RADEON_IS_AGP) {
2753                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2754                         /* VRAM before AGP */
2755                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2756                                 rdev->mc.vram_start >> 12);
2757                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2758                                 rdev->mc.gtt_end >> 12);
2759                 } else {
2760                         /* VRAM after AGP */
2761                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2762                                 rdev->mc.gtt_start >> 12);
2763                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2764                                 rdev->mc.vram_end >> 12);
2765                 }
2766         } else {
2767                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2768                         rdev->mc.vram_start >> 12);
2769                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2770                         rdev->mc.vram_end >> 12);
2771         }
2772         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2773         /* llano/ontario only */
2774         if ((rdev->family == CHIP_PALM) ||
2775             (rdev->family == CHIP_SUMO) ||
2776             (rdev->family == CHIP_SUMO2)) {
2777                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2778                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2779                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2780                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2781         }
2782         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2783         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2784         WREG32(MC_VM_FB_LOCATION, tmp);
2785         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2786         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2787         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2788         if (rdev->flags & RADEON_IS_AGP) {
2789                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2790                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2791                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2792         } else {
2793                 WREG32(MC_VM_AGP_BASE, 0);
2794                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2795                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2796         }
2797         if (evergreen_mc_wait_for_idle(rdev)) {
2798                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2799         }
2800         evergreen_mc_resume(rdev, &save);
2801         /* we need to own VRAM, so turn off the VGA renderer here
2802          * to stop it overwriting our objects */
2803         rv515_vga_render_disable(rdev);
2804 }
2805
2806 /*
2807  * CP.
2808  */
2809 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2810 {
2811         struct radeon_ring *ring = &rdev->ring[ib->ring];
2812         u32 next_rptr;
2813
2814         /* set to DX10/11 mode */
2815         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2816         radeon_ring_write(ring, 1);
2817
2818         if (ring->rptr_save_reg) {
2819                 next_rptr = ring->wptr + 3 + 4;
2820                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2821                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2822                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2823                 radeon_ring_write(ring, next_rptr);
2824         } else if (rdev->wb.enabled) {
2825                 next_rptr = ring->wptr + 5 + 4;
2826                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2827                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2828                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2829                 radeon_ring_write(ring, next_rptr);
2830                 radeon_ring_write(ring, 0);
2831         }
2832
2833         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2834         radeon_ring_write(ring,
2835 #ifdef __BIG_ENDIAN
2836                           (2 << 0) |
2837 #endif
2838                           (ib->gpu_addr & 0xFFFFFFFC));
2839         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2840         radeon_ring_write(ring, ib->length_dw);
2841 }
2842
2843
2844 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2845 {
2846         const __be32 *fw_data;
2847         int i;
2848
2849         if (!rdev->me_fw || !rdev->pfp_fw)
2850                 return -EINVAL;
2851
2852         r700_cp_stop(rdev);
2853         WREG32(CP_RB_CNTL,
2854 #ifdef __BIG_ENDIAN
2855                BUF_SWAP_32BIT |
2856 #endif
2857                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2858
2859         fw_data = (const __be32 *)rdev->pfp_fw->data;
2860         WREG32(CP_PFP_UCODE_ADDR, 0);
2861         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2862                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2863         WREG32(CP_PFP_UCODE_ADDR, 0);
2864
2865         fw_data = (const __be32 *)rdev->me_fw->data;
2866         WREG32(CP_ME_RAM_WADDR, 0);
2867         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2868                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2869
2870         WREG32(CP_PFP_UCODE_ADDR, 0);
2871         WREG32(CP_ME_RAM_WADDR, 0);
2872         WREG32(CP_ME_RAM_RADDR, 0);
2873         return 0;
2874 }
2875
2876 static int evergreen_cp_start(struct radeon_device *rdev)
2877 {
2878         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2879         int r, i;
2880         uint32_t cp_me;
2881
2882         r = radeon_ring_lock(rdev, ring, 7);
2883         if (r) {
2884                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2885                 return r;
2886         }
2887         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2888         radeon_ring_write(ring, 0x1);
2889         radeon_ring_write(ring, 0x0);
2890         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2891         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2892         radeon_ring_write(ring, 0);
2893         radeon_ring_write(ring, 0);
2894         radeon_ring_unlock_commit(rdev, ring);
2895
2896         cp_me = 0xff;
2897         WREG32(CP_ME_CNTL, cp_me);
2898
2899         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2900         if (r) {
2901                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2902                 return r;
2903         }
2904
2905         /* setup clear context state */
2906         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2907         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2908
2909         for (i = 0; i < evergreen_default_size; i++)
2910                 radeon_ring_write(ring, evergreen_default_state[i]);
2911
2912         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2913         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2914
2915         /* set clear context state */
2916         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2917         radeon_ring_write(ring, 0);
2918
2919         /* SQ_VTX_BASE_VTX_LOC */
2920         radeon_ring_write(ring, 0xc0026f00);
2921         radeon_ring_write(ring, 0x00000000);
2922         radeon_ring_write(ring, 0x00000000);
2923         radeon_ring_write(ring, 0x00000000);
2924
2925         /* Clear consts */
2926         radeon_ring_write(ring, 0xc0036f00);
2927         radeon_ring_write(ring, 0x00000bc4);
2928         radeon_ring_write(ring, 0xffffffff);
2929         radeon_ring_write(ring, 0xffffffff);
2930         radeon_ring_write(ring, 0xffffffff);
2931
2932         radeon_ring_write(ring, 0xc0026900);
2933         radeon_ring_write(ring, 0x00000316);
2934         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2935         radeon_ring_write(ring, 0x00000010); /*  */
2936
2937         radeon_ring_unlock_commit(rdev, ring);
2938
2939         return 0;
2940 }
2941
2942 static int evergreen_cp_resume(struct radeon_device *rdev)
2943 {
2944         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2945         u32 tmp;
2946         u32 rb_bufsz;
2947         int r;
2948
2949         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2950         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2951                                  SOFT_RESET_PA |
2952                                  SOFT_RESET_SH |
2953                                  SOFT_RESET_VGT |
2954                                  SOFT_RESET_SPI |
2955                                  SOFT_RESET_SX));
2956         RREG32(GRBM_SOFT_RESET);
2957         mdelay(15);
2958         WREG32(GRBM_SOFT_RESET, 0);
2959         RREG32(GRBM_SOFT_RESET);
2960
2961         /* Set ring buffer size */
2962         rb_bufsz = order_base_2(ring->ring_size / 8);
2963         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2964 #ifdef __BIG_ENDIAN
2965         tmp |= BUF_SWAP_32BIT;
2966 #endif
2967         WREG32(CP_RB_CNTL, tmp);
2968         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2969         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2970
2971         /* Set the write pointer delay */
2972         WREG32(CP_RB_WPTR_DELAY, 0);
2973
2974         /* Initialize the ring buffer's read and write pointers */
2975         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2976         WREG32(CP_RB_RPTR_WR, 0);
2977         ring->wptr = 0;
2978         WREG32(CP_RB_WPTR, ring->wptr);
2979
2980         /* set the wb address whether it's enabled or not */
2981         WREG32(CP_RB_RPTR_ADDR,
2982                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2983         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2984         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2985
2986         if (rdev->wb.enabled)
2987                 WREG32(SCRATCH_UMSK, 0xff);
2988         else {
2989                 tmp |= RB_NO_UPDATE;
2990                 WREG32(SCRATCH_UMSK, 0);
2991         }
2992
2993         mdelay(1);
2994         WREG32(CP_RB_CNTL, tmp);
2995
2996         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2997         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2998
2999         ring->rptr = RREG32(CP_RB_RPTR);
3000
3001         evergreen_cp_start(rdev);
3002         ring->ready = true;
3003         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3004         if (r) {
3005                 ring->ready = false;
3006                 return r;
3007         }
3008         return 0;
3009 }
3010
3011 /*
3012  * Core functions
3013  */
3014 static void evergreen_gpu_init(struct radeon_device *rdev)
3015 {
3016         u32 gb_addr_config;
3017         u32 mc_shared_chmap, mc_arb_ramcfg;
3018         u32 sx_debug_1;
3019         u32 smx_dc_ctl0;
3020         u32 sq_config;
3021         u32 sq_lds_resource_mgmt;
3022         u32 sq_gpr_resource_mgmt_1;
3023         u32 sq_gpr_resource_mgmt_2;
3024         u32 sq_gpr_resource_mgmt_3;
3025         u32 sq_thread_resource_mgmt;
3026         u32 sq_thread_resource_mgmt_2;
3027         u32 sq_stack_resource_mgmt_1;
3028         u32 sq_stack_resource_mgmt_2;
3029         u32 sq_stack_resource_mgmt_3;
3030         u32 vgt_cache_invalidation;
3031         u32 hdp_host_path_cntl, tmp;
3032         u32 disabled_rb_mask;
3033         int i, j, num_shader_engines, ps_thread_count;
3034
3035         switch (rdev->family) {
3036         case CHIP_CYPRESS:
3037         case CHIP_HEMLOCK:
3038                 rdev->config.evergreen.num_ses = 2;
3039                 rdev->config.evergreen.max_pipes = 4;
3040                 rdev->config.evergreen.max_tile_pipes = 8;
3041                 rdev->config.evergreen.max_simds = 10;
3042                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3043                 rdev->config.evergreen.max_gprs = 256;
3044                 rdev->config.evergreen.max_threads = 248;
3045                 rdev->config.evergreen.max_gs_threads = 32;
3046                 rdev->config.evergreen.max_stack_entries = 512;
3047                 rdev->config.evergreen.sx_num_of_sets = 4;
3048                 rdev->config.evergreen.sx_max_export_size = 256;
3049                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3050                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3051                 rdev->config.evergreen.max_hw_contexts = 8;
3052                 rdev->config.evergreen.sq_num_cf_insts = 2;
3053
3054                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3055                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3056                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3057                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3058                 break;
3059         case CHIP_JUNIPER:
3060                 rdev->config.evergreen.num_ses = 1;
3061                 rdev->config.evergreen.max_pipes = 4;
3062                 rdev->config.evergreen.max_tile_pipes = 4;
3063                 rdev->config.evergreen.max_simds = 10;
3064                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3065                 rdev->config.evergreen.max_gprs = 256;
3066                 rdev->config.evergreen.max_threads = 248;
3067                 rdev->config.evergreen.max_gs_threads = 32;
3068                 rdev->config.evergreen.max_stack_entries = 512;
3069                 rdev->config.evergreen.sx_num_of_sets = 4;
3070                 rdev->config.evergreen.sx_max_export_size = 256;
3071                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3072                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3073                 rdev->config.evergreen.max_hw_contexts = 8;
3074                 rdev->config.evergreen.sq_num_cf_insts = 2;
3075
3076                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3077                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3078                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3079                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3080                 break;
3081         case CHIP_REDWOOD:
3082                 rdev->config.evergreen.num_ses = 1;
3083                 rdev->config.evergreen.max_pipes = 4;
3084                 rdev->config.evergreen.max_tile_pipes = 4;
3085                 rdev->config.evergreen.max_simds = 5;
3086                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3087                 rdev->config.evergreen.max_gprs = 256;
3088                 rdev->config.evergreen.max_threads = 248;
3089                 rdev->config.evergreen.max_gs_threads = 32;
3090                 rdev->config.evergreen.max_stack_entries = 256;
3091                 rdev->config.evergreen.sx_num_of_sets = 4;
3092                 rdev->config.evergreen.sx_max_export_size = 256;
3093                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3094                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3095                 rdev->config.evergreen.max_hw_contexts = 8;
3096                 rdev->config.evergreen.sq_num_cf_insts = 2;
3097
3098                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3099                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3100                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3101                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3102                 break;
3103         case CHIP_CEDAR:
3104         default:
3105                 rdev->config.evergreen.num_ses = 1;
3106                 rdev->config.evergreen.max_pipes = 2;
3107                 rdev->config.evergreen.max_tile_pipes = 2;
3108                 rdev->config.evergreen.max_simds = 2;
3109                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3110                 rdev->config.evergreen.max_gprs = 256;
3111                 rdev->config.evergreen.max_threads = 192;
3112                 rdev->config.evergreen.max_gs_threads = 16;
3113                 rdev->config.evergreen.max_stack_entries = 256;
3114                 rdev->config.evergreen.sx_num_of_sets = 4;
3115                 rdev->config.evergreen.sx_max_export_size = 128;
3116                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3117                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3118                 rdev->config.evergreen.max_hw_contexts = 4;
3119                 rdev->config.evergreen.sq_num_cf_insts = 1;
3120
3121                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3122                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3123                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3124                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3125                 break;
3126         case CHIP_PALM:
3127                 rdev->config.evergreen.num_ses = 1;
3128                 rdev->config.evergreen.max_pipes = 2;
3129                 rdev->config.evergreen.max_tile_pipes = 2;
3130                 rdev->config.evergreen.max_simds = 2;
3131                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3132                 rdev->config.evergreen.max_gprs = 256;
3133                 rdev->config.evergreen.max_threads = 192;
3134                 rdev->config.evergreen.max_gs_threads = 16;
3135                 rdev->config.evergreen.max_stack_entries = 256;
3136                 rdev->config.evergreen.sx_num_of_sets = 4;
3137                 rdev->config.evergreen.sx_max_export_size = 128;
3138                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3139                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3140                 rdev->config.evergreen.max_hw_contexts = 4;
3141                 rdev->config.evergreen.sq_num_cf_insts = 1;
3142
3143                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3144                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3145                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3146                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3147                 break;
3148         case CHIP_SUMO:
3149                 rdev->config.evergreen.num_ses = 1;
3150                 rdev->config.evergreen.max_pipes = 4;
3151                 rdev->config.evergreen.max_tile_pipes = 4;
3152                 if (rdev->pdev->device == 0x9648)
3153                         rdev->config.evergreen.max_simds = 3;
3154                 else if ((rdev->pdev->device == 0x9647) ||
3155                          (rdev->pdev->device == 0x964a))
3156                         rdev->config.evergreen.max_simds = 4;
3157                 else
3158                         rdev->config.evergreen.max_simds = 5;
3159                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3160                 rdev->config.evergreen.max_gprs = 256;
3161                 rdev->config.evergreen.max_threads = 248;
3162                 rdev->config.evergreen.max_gs_threads = 32;
3163                 rdev->config.evergreen.max_stack_entries = 256;
3164                 rdev->config.evergreen.sx_num_of_sets = 4;
3165                 rdev->config.evergreen.sx_max_export_size = 256;
3166                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3167                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3168                 rdev->config.evergreen.max_hw_contexts = 8;
3169                 rdev->config.evergreen.sq_num_cf_insts = 2;
3170
3171                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3172                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3173                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3174                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3175                 break;
3176         case CHIP_SUMO2:
3177                 rdev->config.evergreen.num_ses = 1;
3178                 rdev->config.evergreen.max_pipes = 4;
3179                 rdev->config.evergreen.max_tile_pipes = 4;
3180                 rdev->config.evergreen.max_simds = 2;
3181                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3182                 rdev->config.evergreen.max_gprs = 256;
3183                 rdev->config.evergreen.max_threads = 248;
3184                 rdev->config.evergreen.max_gs_threads = 32;
3185                 rdev->config.evergreen.max_stack_entries = 512;
3186                 rdev->config.evergreen.sx_num_of_sets = 4;
3187                 rdev->config.evergreen.sx_max_export_size = 256;
3188                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3189                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3190                 rdev->config.evergreen.max_hw_contexts = 4;
3191                 rdev->config.evergreen.sq_num_cf_insts = 2;
3192
3193                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3194                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3195                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3196                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3197                 break;
3198         case CHIP_BARTS:
3199                 rdev->config.evergreen.num_ses = 2;
3200                 rdev->config.evergreen.max_pipes = 4;
3201                 rdev->config.evergreen.max_tile_pipes = 8;
3202                 rdev->config.evergreen.max_simds = 7;
3203                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3204                 rdev->config.evergreen.max_gprs = 256;
3205                 rdev->config.evergreen.max_threads = 248;
3206                 rdev->config.evergreen.max_gs_threads = 32;
3207                 rdev->config.evergreen.max_stack_entries = 512;
3208                 rdev->config.evergreen.sx_num_of_sets = 4;
3209                 rdev->config.evergreen.sx_max_export_size = 256;
3210                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3211                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3212                 rdev->config.evergreen.max_hw_contexts = 8;
3213                 rdev->config.evergreen.sq_num_cf_insts = 2;
3214
3215                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3216                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3217                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3218                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3219                 break;
3220         case CHIP_TURKS:
3221                 rdev->config.evergreen.num_ses = 1;
3222                 rdev->config.evergreen.max_pipes = 4;
3223                 rdev->config.evergreen.max_tile_pipes = 4;
3224                 rdev->config.evergreen.max_simds = 6;
3225                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3226                 rdev->config.evergreen.max_gprs = 256;
3227                 rdev->config.evergreen.max_threads = 248;
3228                 rdev->config.evergreen.max_gs_threads = 32;
3229                 rdev->config.evergreen.max_stack_entries = 256;
3230                 rdev->config.evergreen.sx_num_of_sets = 4;
3231                 rdev->config.evergreen.sx_max_export_size = 256;
3232                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3233                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3234                 rdev->config.evergreen.max_hw_contexts = 8;
3235                 rdev->config.evergreen.sq_num_cf_insts = 2;
3236
3237                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3238                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3239                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3240                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3241                 break;
3242         case CHIP_CAICOS:
3243                 rdev->config.evergreen.num_ses = 1;
3244                 rdev->config.evergreen.max_pipes = 2;
3245                 rdev->config.evergreen.max_tile_pipes = 2;
3246                 rdev->config.evergreen.max_simds = 2;
3247                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3248                 rdev->config.evergreen.max_gprs = 256;
3249                 rdev->config.evergreen.max_threads = 192;
3250                 rdev->config.evergreen.max_gs_threads = 16;
3251                 rdev->config.evergreen.max_stack_entries = 256;
3252                 rdev->config.evergreen.sx_num_of_sets = 4;
3253                 rdev->config.evergreen.sx_max_export_size = 128;
3254                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3255                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3256                 rdev->config.evergreen.max_hw_contexts = 4;
3257                 rdev->config.evergreen.sq_num_cf_insts = 1;
3258
3259                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3260                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3261                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3262                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3263                 break;
3264         }
3265
3266         /* Initialize HDP */
3267         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3268                 WREG32((0x2c14 + j), 0x00000000);
3269                 WREG32((0x2c18 + j), 0x00000000);
3270                 WREG32((0x2c1c + j), 0x00000000);
3271                 WREG32((0x2c20 + j), 0x00000000);
3272                 WREG32((0x2c24 + j), 0x00000000);
3273         }
3274
3275         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3276
3277         evergreen_fix_pci_max_read_req_size(rdev);
3278
3279         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3280         if ((rdev->family == CHIP_PALM) ||
3281             (rdev->family == CHIP_SUMO) ||
3282             (rdev->family == CHIP_SUMO2))
3283                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3284         else
3285                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3286
3287         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3288          * not have bank info, so create a custom tiling dword.
3289          * bits 3:0   num_pipes
3290          * bits 7:4   num_banks
3291          * bits 11:8  group_size
3292          * bits 15:12 row_size
3293          */
3294         rdev->config.evergreen.tile_config = 0;
3295         switch (rdev->config.evergreen.max_tile_pipes) {
3296         case 1:
3297         default:
3298                 rdev->config.evergreen.tile_config |= (0 << 0);
3299                 break;
3300         case 2:
3301                 rdev->config.evergreen.tile_config |= (1 << 0);
3302                 break;
3303         case 4:
3304                 rdev->config.evergreen.tile_config |= (2 << 0);
3305                 break;
3306         case 8:
3307                 rdev->config.evergreen.tile_config |= (3 << 0);
3308                 break;
3309         }
3310         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3311         if (rdev->flags & RADEON_IS_IGP)
3312                 rdev->config.evergreen.tile_config |= 1 << 4;
3313         else {
3314                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3315                 case 0: /* four banks */
3316                         rdev->config.evergreen.tile_config |= 0 << 4;
3317                         break;
3318                 case 1: /* eight banks */
3319                         rdev->config.evergreen.tile_config |= 1 << 4;
3320                         break;
3321                 case 2: /* sixteen banks */
3322                 default:
3323                         rdev->config.evergreen.tile_config |= 2 << 4;
3324                         break;
3325                 }
3326         }
3327         rdev->config.evergreen.tile_config |= 0 << 8;
3328         rdev->config.evergreen.tile_config |=
3329                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3330
3331         num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3332
3333         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3334                 u32 efuse_straps_4;
3335                 u32 efuse_straps_3;
3336
3337                 efuse_straps_4 = RREG32_RCU(0x204);
3338                 efuse_straps_3 = RREG32_RCU(0x203);
3339                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3340                       ((efuse_straps_3 & 0xf0000000) >> 28));
3341         } else {
3342                 tmp = 0;
3343                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3344                         u32 rb_disable_bitmap;
3345
3346                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3347                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3348                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3349                         tmp <<= 4;
3350                         tmp |= rb_disable_bitmap;
3351                 }
3352         }
3353         /* enabled rb are just the one not disabled :) */
3354         disabled_rb_mask = tmp;
3355         tmp = 0;
3356         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3357                 tmp |= (1 << i);
3358         /* if all the backends are disabled, fix it up here */
3359         if ((disabled_rb_mask & tmp) == tmp) {
3360                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3361                         disabled_rb_mask &= ~(1 << i);
3362         }
3363
3364         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3365         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3366
3367         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3368         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3369         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3370         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3371         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3372         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3373         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3374
3375         if ((rdev->config.evergreen.max_backends == 1) &&
3376             (rdev->flags & RADEON_IS_IGP)) {
3377                 if ((disabled_rb_mask & 3) == 1) {
3378                         /* RB0 disabled, RB1 enabled */
3379                         tmp = 0x11111111;
3380                 } else {
3381                         /* RB1 disabled, RB0 enabled */
3382                         tmp = 0x00000000;
3383                 }
3384         } else {
3385                 tmp = gb_addr_config & NUM_PIPES_MASK;
3386                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3387                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3388         }
3389         WREG32(GB_BACKEND_MAP, tmp);
3390
3391         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3392         WREG32(CGTS_TCC_DISABLE, 0);
3393         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3394         WREG32(CGTS_USER_TCC_DISABLE, 0);
3395
3396         /* set HW defaults for 3D engine */
3397         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3398                                      ROQ_IB2_START(0x2b)));
3399
3400         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3401
3402         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3403                              SYNC_GRADIENT |
3404                              SYNC_WALKER |
3405                              SYNC_ALIGNER));
3406
3407         sx_debug_1 = RREG32(SX_DEBUG_1);
3408         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3409         WREG32(SX_DEBUG_1, sx_debug_1);
3410
3411
3412         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3413         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3414         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3415         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3416
3417         if (rdev->family <= CHIP_SUMO2)
3418                 WREG32(SMX_SAR_CTL0, 0x00010000);
3419
3420         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3421                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3422                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3423
3424         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3425                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3426                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3427
3428         WREG32(VGT_NUM_INSTANCES, 1);
3429         WREG32(SPI_CONFIG_CNTL, 0);
3430         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3431         WREG32(CP_PERFMON_CNTL, 0);
3432
3433         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3434                                   FETCH_FIFO_HIWATER(0x4) |
3435                                   DONE_FIFO_HIWATER(0xe0) |
3436                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3437
3438         sq_config = RREG32(SQ_CONFIG);
3439         sq_config &= ~(PS_PRIO(3) |
3440                        VS_PRIO(3) |
3441                        GS_PRIO(3) |
3442                        ES_PRIO(3));
3443         sq_config |= (VC_ENABLE |
3444                       EXPORT_SRC_C |
3445                       PS_PRIO(0) |
3446                       VS_PRIO(1) |
3447                       GS_PRIO(2) |
3448                       ES_PRIO(3));
3449
3450         switch (rdev->family) {
3451         case CHIP_CEDAR:
3452         case CHIP_PALM:
3453         case CHIP_SUMO:
3454         case CHIP_SUMO2:
3455         case CHIP_CAICOS:
3456                 /* no vertex cache */
3457                 sq_config &= ~VC_ENABLE;
3458                 break;
3459         default:
3460                 break;
3461         }
3462
3463         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3464
3465         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3466         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3467         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3468         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3469         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3470         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3471         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3472
3473         switch (rdev->family) {
3474         case CHIP_CEDAR:
3475         case CHIP_PALM:
3476         case CHIP_SUMO:
3477         case CHIP_SUMO2:
3478                 ps_thread_count = 96;
3479                 break;
3480         default:
3481                 ps_thread_count = 128;
3482                 break;
3483         }
3484
3485         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3486         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3487         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3488         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3489         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3490         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3491
3492         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3493         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3494         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3495         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3496         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3497         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3498
3499         WREG32(SQ_CONFIG, sq_config);
3500         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3501         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3502         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3503         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3504         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3505         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3506         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3507         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3508         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3509         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3510
3511         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3512                                           FORCE_EOV_MAX_REZ_CNT(255)));
3513
3514         switch (rdev->family) {
3515         case CHIP_CEDAR:
3516         case CHIP_PALM:
3517         case CHIP_SUMO:
3518         case CHIP_SUMO2:
3519         case CHIP_CAICOS:
3520                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3521                 break;
3522         default:
3523                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3524                 break;
3525         }
3526         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3527         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3528
3529         WREG32(VGT_GS_VERTEX_REUSE, 16);
3530         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3531         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3532
3533         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3534         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3535
3536         WREG32(CB_PERF_CTR0_SEL_0, 0);
3537         WREG32(CB_PERF_CTR0_SEL_1, 0);
3538         WREG32(CB_PERF_CTR1_SEL_0, 0);
3539         WREG32(CB_PERF_CTR1_SEL_1, 0);
3540         WREG32(CB_PERF_CTR2_SEL_0, 0);
3541         WREG32(CB_PERF_CTR2_SEL_1, 0);
3542         WREG32(CB_PERF_CTR3_SEL_0, 0);
3543         WREG32(CB_PERF_CTR3_SEL_1, 0);
3544
3545         /* clear render buffer base addresses */
3546         WREG32(CB_COLOR0_BASE, 0);
3547         WREG32(CB_COLOR1_BASE, 0);
3548         WREG32(CB_COLOR2_BASE, 0);
3549         WREG32(CB_COLOR3_BASE, 0);
3550         WREG32(CB_COLOR4_BASE, 0);
3551         WREG32(CB_COLOR5_BASE, 0);
3552         WREG32(CB_COLOR6_BASE, 0);
3553         WREG32(CB_COLOR7_BASE, 0);
3554         WREG32(CB_COLOR8_BASE, 0);
3555         WREG32(CB_COLOR9_BASE, 0);
3556         WREG32(CB_COLOR10_BASE, 0);
3557         WREG32(CB_COLOR11_BASE, 0);
3558
3559         /* set the shader const cache sizes to 0 */
3560         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3561                 WREG32(i, 0);
3562         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3563                 WREG32(i, 0);
3564
3565         tmp = RREG32(HDP_MISC_CNTL);
3566         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3567         WREG32(HDP_MISC_CNTL, tmp);
3568
3569         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3570         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3571
3572         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3573
3574         udelay(50);
3575
3576 }
3577
3578 int evergreen_mc_init(struct radeon_device *rdev)
3579 {
3580         u32 tmp;
3581         int chansize, numchan;
3582
3583         /* Get VRAM informations */
3584         rdev->mc.vram_is_ddr = true;
3585         if ((rdev->family == CHIP_PALM) ||
3586             (rdev->family == CHIP_SUMO) ||
3587             (rdev->family == CHIP_SUMO2))
3588                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3589         else
3590                 tmp = RREG32(MC_ARB_RAMCFG);
3591         if (tmp & CHANSIZE_OVERRIDE) {
3592                 chansize = 16;
3593         } else if (tmp & CHANSIZE_MASK) {
3594                 chansize = 64;
3595         } else {
3596                 chansize = 32;
3597         }
3598         tmp = RREG32(MC_SHARED_CHMAP);
3599         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3600         case 0:
3601         default:
3602                 numchan = 1;
3603                 break;
3604         case 1:
3605                 numchan = 2;
3606                 break;
3607         case 2:
3608                 numchan = 4;
3609                 break;
3610         case 3:
3611                 numchan = 8;
3612                 break;
3613         }
3614         rdev->mc.vram_width = numchan * chansize;
3615         /* Could aper size report 0 ? */
3616         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3617         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3618         /* Setup GPU memory space */
3619         if ((rdev->family == CHIP_PALM) ||
3620             (rdev->family == CHIP_SUMO) ||
3621             (rdev->family == CHIP_SUMO2)) {
3622                 /* size in bytes on fusion */
3623                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3624                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3625         } else {
3626                 /* size in MB on evergreen/cayman/tn */
3627                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3628                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3629         }
3630         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3631         r700_vram_gtt_location(rdev, &rdev->mc);
3632         radeon_update_bandwidth_info(rdev);
3633
3634         return 0;
3635 }
3636
3637 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3638 {
3639         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3640                 RREG32(GRBM_STATUS));
3641         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3642                 RREG32(GRBM_STATUS_SE0));
3643         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3644                 RREG32(GRBM_STATUS_SE1));
3645         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3646                 RREG32(SRBM_STATUS));
3647         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3648                 RREG32(SRBM_STATUS2));
3649         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3650                 RREG32(CP_STALLED_STAT1));
3651         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3652                 RREG32(CP_STALLED_STAT2));
3653         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3654                 RREG32(CP_BUSY_STAT));
3655         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3656                 RREG32(CP_STAT));
3657         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3658                 RREG32(DMA_STATUS_REG));
3659         if (rdev->family >= CHIP_CAYMAN) {
3660                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3661                          RREG32(DMA_STATUS_REG + 0x800));
3662         }
3663 }
3664
3665 bool evergreen_is_display_hung(struct radeon_device *rdev)
3666 {
3667         u32 crtc_hung = 0;
3668         u32 crtc_status[6];
3669         u32 i, j, tmp;
3670
3671         for (i = 0; i < rdev->num_crtc; i++) {
3672                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3673                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3674                         crtc_hung |= (1 << i);
3675                 }
3676         }
3677
3678         for (j = 0; j < 10; j++) {
3679                 for (i = 0; i < rdev->num_crtc; i++) {
3680                         if (crtc_hung & (1 << i)) {
3681                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3682                                 if (tmp != crtc_status[i])
3683                                         crtc_hung &= ~(1 << i);
3684                         }
3685                 }
3686                 if (crtc_hung == 0)
3687                         return false;
3688                 udelay(100);
3689         }
3690
3691         return true;
3692 }
3693
3694 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3695 {
3696         u32 reset_mask = 0;
3697         u32 tmp;
3698
3699         /* GRBM_STATUS */
3700         tmp = RREG32(GRBM_STATUS);
3701         if (tmp & (PA_BUSY | SC_BUSY |
3702                    SH_BUSY | SX_BUSY |
3703                    TA_BUSY | VGT_BUSY |
3704                    DB_BUSY | CB_BUSY |
3705                    SPI_BUSY | VGT_BUSY_NO_DMA))
3706                 reset_mask |= RADEON_RESET_GFX;
3707
3708         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3709                    CP_BUSY | CP_COHERENCY_BUSY))
3710                 reset_mask |= RADEON_RESET_CP;
3711
3712         if (tmp & GRBM_EE_BUSY)
3713                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3714
3715         /* DMA_STATUS_REG */
3716         tmp = RREG32(DMA_STATUS_REG);
3717         if (!(tmp & DMA_IDLE))
3718                 reset_mask |= RADEON_RESET_DMA;
3719
3720         /* SRBM_STATUS2 */
3721         tmp = RREG32(SRBM_STATUS2);
3722         if (tmp & DMA_BUSY)
3723                 reset_mask |= RADEON_RESET_DMA;
3724
3725         /* SRBM_STATUS */
3726         tmp = RREG32(SRBM_STATUS);
3727         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3728                 reset_mask |= RADEON_RESET_RLC;
3729
3730         if (tmp & IH_BUSY)
3731                 reset_mask |= RADEON_RESET_IH;
3732
3733         if (tmp & SEM_BUSY)
3734                 reset_mask |= RADEON_RESET_SEM;
3735
3736         if (tmp & GRBM_RQ_PENDING)
3737                 reset_mask |= RADEON_RESET_GRBM;
3738
3739         if (tmp & VMC_BUSY)
3740                 reset_mask |= RADEON_RESET_VMC;
3741
3742         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3743                    MCC_BUSY | MCD_BUSY))
3744                 reset_mask |= RADEON_RESET_MC;
3745
3746         if (evergreen_is_display_hung(rdev))
3747                 reset_mask |= RADEON_RESET_DISPLAY;
3748
3749         /* VM_L2_STATUS */
3750         tmp = RREG32(VM_L2_STATUS);
3751         if (tmp & L2_BUSY)
3752                 reset_mask |= RADEON_RESET_VMC;
3753
3754         /* Skip MC reset as it's mostly likely not hung, just busy */
3755         if (reset_mask & RADEON_RESET_MC) {
3756                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3757                 reset_mask &= ~RADEON_RESET_MC;
3758         }
3759
3760         return reset_mask;
3761 }
3762
3763 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3764 {
3765         struct evergreen_mc_save save;
3766         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3767         u32 tmp;
3768
3769         if (reset_mask == 0)
3770                 return;
3771
3772         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3773
3774         evergreen_print_gpu_status_regs(rdev);
3775
3776         /* Disable CP parsing/prefetching */
3777         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3778
3779         if (reset_mask & RADEON_RESET_DMA) {
3780                 /* Disable DMA */
3781                 tmp = RREG32(DMA_RB_CNTL);
3782                 tmp &= ~DMA_RB_ENABLE;
3783                 WREG32(DMA_RB_CNTL, tmp);
3784         }
3785
3786         udelay(50);
3787
3788         evergreen_mc_stop(rdev, &save);
3789         if (evergreen_mc_wait_for_idle(rdev)) {
3790                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3791         }
3792
3793         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3794                 grbm_soft_reset |= SOFT_RESET_DB |
3795                         SOFT_RESET_CB |
3796                         SOFT_RESET_PA |
3797                         SOFT_RESET_SC |
3798                         SOFT_RESET_SPI |
3799                         SOFT_RESET_SX |
3800                         SOFT_RESET_SH |
3801                         SOFT_RESET_TC |
3802                         SOFT_RESET_TA |
3803                         SOFT_RESET_VC |
3804                         SOFT_RESET_VGT;
3805         }
3806
3807         if (reset_mask & RADEON_RESET_CP) {
3808                 grbm_soft_reset |= SOFT_RESET_CP |
3809                         SOFT_RESET_VGT;
3810
3811                 srbm_soft_reset |= SOFT_RESET_GRBM;
3812         }
3813
3814         if (reset_mask & RADEON_RESET_DMA)
3815                 srbm_soft_reset |= SOFT_RESET_DMA;
3816
3817         if (reset_mask & RADEON_RESET_DISPLAY)
3818                 srbm_soft_reset |= SOFT_RESET_DC;
3819
3820         if (reset_mask & RADEON_RESET_RLC)
3821                 srbm_soft_reset |= SOFT_RESET_RLC;
3822
3823         if (reset_mask & RADEON_RESET_SEM)
3824                 srbm_soft_reset |= SOFT_RESET_SEM;
3825
3826         if (reset_mask & RADEON_RESET_IH)
3827                 srbm_soft_reset |= SOFT_RESET_IH;
3828
3829         if (reset_mask & RADEON_RESET_GRBM)
3830                 srbm_soft_reset |= SOFT_RESET_GRBM;
3831
3832         if (reset_mask & RADEON_RESET_VMC)
3833                 srbm_soft_reset |= SOFT_RESET_VMC;
3834
3835         if (!(rdev->flags & RADEON_IS_IGP)) {
3836                 if (reset_mask & RADEON_RESET_MC)
3837                         srbm_soft_reset |= SOFT_RESET_MC;
3838         }
3839
3840         if (grbm_soft_reset) {
3841                 tmp = RREG32(GRBM_SOFT_RESET);
3842                 tmp |= grbm_soft_reset;
3843                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3844                 WREG32(GRBM_SOFT_RESET, tmp);
3845                 tmp = RREG32(GRBM_SOFT_RESET);
3846
3847                 udelay(50);
3848
3849                 tmp &= ~grbm_soft_reset;
3850                 WREG32(GRBM_SOFT_RESET, tmp);
3851                 tmp = RREG32(GRBM_SOFT_RESET);
3852         }
3853
3854         if (srbm_soft_reset) {
3855                 tmp = RREG32(SRBM_SOFT_RESET);
3856                 tmp |= srbm_soft_reset;
3857                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3858                 WREG32(SRBM_SOFT_RESET, tmp);
3859                 tmp = RREG32(SRBM_SOFT_RESET);
3860
3861                 udelay(50);
3862
3863                 tmp &= ~srbm_soft_reset;
3864                 WREG32(SRBM_SOFT_RESET, tmp);
3865                 tmp = RREG32(SRBM_SOFT_RESET);
3866         }
3867
3868         /* Wait a little for things to settle down */
3869         udelay(50);
3870
3871         evergreen_mc_resume(rdev, &save);
3872         udelay(50);
3873
3874         evergreen_print_gpu_status_regs(rdev);
3875 }
3876
3877 int evergreen_asic_reset(struct radeon_device *rdev)
3878 {
3879         u32 reset_mask;
3880
3881         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3882
3883         if (reset_mask)
3884                 r600_set_bios_scratch_engine_hung(rdev, true);
3885
3886         evergreen_gpu_soft_reset(rdev, reset_mask);
3887
3888         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3889
3890         if (!reset_mask)
3891                 r600_set_bios_scratch_engine_hung(rdev, false);
3892
3893         return 0;
3894 }
3895
3896 /**
3897  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3898  *
3899  * @rdev: radeon_device pointer
3900  * @ring: radeon_ring structure holding ring information
3901  *
3902  * Check if the GFX engine is locked up.
3903  * Returns true if the engine appears to be locked up, false if not.
3904  */
3905 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3906 {
3907         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3908
3909         if (!(reset_mask & (RADEON_RESET_GFX |
3910                             RADEON_RESET_COMPUTE |
3911                             RADEON_RESET_CP))) {
3912                 radeon_ring_lockup_update(ring);
3913                 return false;
3914         }
3915         /* force CP activities */
3916         radeon_ring_force_activity(rdev, ring);
3917         return radeon_ring_test_lockup(rdev, ring);
3918 }
3919
3920 /*
3921  * RLC
3922  */
3923 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3924 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3925
3926 void sumo_rlc_fini(struct radeon_device *rdev)
3927 {
3928         int r;
3929
3930         /* save restore block */
3931         if (rdev->rlc.save_restore_obj) {
3932                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3933                 if (unlikely(r != 0))
3934                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3935                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3936                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3937
3938                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3939                 rdev->rlc.save_restore_obj = NULL;
3940         }
3941
3942         /* clear state block */
3943         if (rdev->rlc.clear_state_obj) {
3944                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3945                 if (unlikely(r != 0))
3946                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3947                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3948                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3949
3950                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3951                 rdev->rlc.clear_state_obj = NULL;
3952         }
3953
3954         /* clear state block */
3955         if (rdev->rlc.cp_table_obj) {
3956                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3957                 if (unlikely(r != 0))
3958                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3959                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3960                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3961
3962                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3963                 rdev->rlc.cp_table_obj = NULL;
3964         }
3965 }
3966
3967 #define CP_ME_TABLE_SIZE    96
3968
3969 int sumo_rlc_init(struct radeon_device *rdev)
3970 {
3971         const u32 *src_ptr;
3972         volatile u32 *dst_ptr;
3973         u32 dws, data, i, j, k, reg_num;
3974         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
3975         u64 reg_list_mc_addr;
3976         const struct cs_section_def *cs_data;
3977         int r;
3978
3979         src_ptr = rdev->rlc.reg_list;
3980         dws = rdev->rlc.reg_list_size;
3981         if (rdev->family >= CHIP_BONAIRE) {
3982                 dws += (5 * 16) + 48 + 48 + 64;
3983         }
3984         cs_data = rdev->rlc.cs_data;
3985
3986         if (src_ptr) {
3987                 /* save restore block */
3988                 if (rdev->rlc.save_restore_obj == NULL) {
3989                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3990                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3991                         if (r) {
3992                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3993                                 return r;
3994                         }
3995                 }
3996
3997                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3998                 if (unlikely(r != 0)) {
3999                         sumo_rlc_fini(rdev);
4000                         return r;
4001                 }
4002                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4003                                   &rdev->rlc.save_restore_gpu_addr);
4004                 if (r) {
4005                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4006                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4007                         sumo_rlc_fini(rdev);
4008                         return r;
4009                 }
4010
4011                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4012                 if (r) {
4013                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4014                         sumo_rlc_fini(rdev);
4015                         return r;
4016                 }
4017                 /* write the sr buffer */
4018                 dst_ptr = rdev->rlc.sr_ptr;
4019                 if (rdev->family >= CHIP_TAHITI) {
4020                         /* SI */
4021                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
4022                                 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4023                 } else {
4024                         /* ON/LN/TN */
4025                         /* format:
4026                          * dw0: (reg2 << 16) | reg1
4027                          * dw1: reg1 save space
4028                          * dw2: reg2 save space
4029                          */
4030                         for (i = 0; i < dws; i++) {
4031                                 data = src_ptr[i] >> 2;
4032                                 i++;
4033                                 if (i < dws)
4034                                         data |= (src_ptr[i] >> 2) << 16;
4035                                 j = (((i - 1) * 3) / 2);
4036                                 dst_ptr[j] = cpu_to_le32(data);
4037                         }
4038                         j = ((i * 3) / 2);
4039                         dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4040                 }
4041                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4042                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4043         }
4044
4045         if (cs_data) {
4046                 /* clear state block */
4047                 if (rdev->family >= CHIP_BONAIRE) {
4048                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4049                 } else if (rdev->family >= CHIP_TAHITI) {
4050                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4051                         dws = rdev->rlc.clear_state_size + (256 / 4);
4052                 } else {
4053                         reg_list_num = 0;
4054                         dws = 0;
4055                         for (i = 0; cs_data[i].section != NULL; i++) {
4056                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4057                                         reg_list_num++;
4058                                         dws += cs_data[i].section[j].reg_count;
4059                                 }
4060                         }
4061                         reg_list_blk_index = (3 * reg_list_num + 2);
4062                         dws += reg_list_blk_index;
4063                         rdev->rlc.clear_state_size = dws;
4064                 }
4065
4066                 if (rdev->rlc.clear_state_obj == NULL) {
4067                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4068                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
4069                         if (r) {
4070                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4071                                 sumo_rlc_fini(rdev);
4072                                 return r;
4073                         }
4074                 }
4075                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4076                 if (unlikely(r != 0)) {
4077                         sumo_rlc_fini(rdev);
4078                         return r;
4079                 }
4080                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4081                                   &rdev->rlc.clear_state_gpu_addr);
4082                 if (r) {
4083                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4084                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4085                         sumo_rlc_fini(rdev);
4086                         return r;
4087                 }
4088
4089                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4090                 if (r) {
4091                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4092                         sumo_rlc_fini(rdev);
4093                         return r;
4094                 }
4095                 /* set up the cs buffer */
4096                 dst_ptr = rdev->rlc.cs_ptr;
4097                 if (rdev->family >= CHIP_BONAIRE) {
4098                         cik_get_csb_buffer(rdev, dst_ptr);
4099                 } else if (rdev->family >= CHIP_TAHITI) {
4100                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4101                         dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4102                         dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4103                         dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4104                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4105                 } else {
4106                         reg_list_hdr_blk_index = 0;
4107                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4108                         data = upper_32_bits(reg_list_mc_addr);
4109                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4110                         reg_list_hdr_blk_index++;
4111                         for (i = 0; cs_data[i].section != NULL; i++) {
4112                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4113                                         reg_num = cs_data[i].section[j].reg_count;
4114                                         data = reg_list_mc_addr & 0xffffffff;
4115                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4116                                         reg_list_hdr_blk_index++;
4117
4118                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4119                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4120                                         reg_list_hdr_blk_index++;
4121
4122                                         data = 0x08000000 | (reg_num * 4);
4123                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4124                                         reg_list_hdr_blk_index++;
4125
4126                                         for (k = 0; k < reg_num; k++) {
4127                                                 data = cs_data[i].section[j].extent[k];
4128                                                 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4129                                         }
4130                                         reg_list_mc_addr += reg_num * 4;
4131                                         reg_list_blk_index += reg_num;
4132                                 }
4133                         }
4134                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4135                 }
4136                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4137                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4138         }
4139
4140         if (rdev->rlc.cp_table_size) {
4141                 if (rdev->rlc.cp_table_obj == NULL) {
4142                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4143                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4144                         if (r) {
4145                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4146                                 sumo_rlc_fini(rdev);
4147                                 return r;
4148                         }
4149                 }
4150
4151                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4152                 if (unlikely(r != 0)) {
4153                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4154                         sumo_rlc_fini(rdev);
4155                         return r;
4156                 }
4157                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4158                                   &rdev->rlc.cp_table_gpu_addr);
4159                 if (r) {
4160                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4161                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4162                         sumo_rlc_fini(rdev);
4163                         return r;
4164                 }
4165                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4166                 if (r) {
4167                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4168                         sumo_rlc_fini(rdev);
4169                         return r;
4170                 }
4171
4172                 cik_init_cp_pg_table(rdev);
4173
4174                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4175                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4176
4177         }
4178
4179         return 0;
4180 }
4181
4182 static void evergreen_rlc_start(struct radeon_device *rdev)
4183 {
4184         u32 mask = RLC_ENABLE;
4185
4186         if (rdev->flags & RADEON_IS_IGP) {
4187                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4188         }
4189
4190         WREG32(RLC_CNTL, mask);
4191 }
4192
4193 int evergreen_rlc_resume(struct radeon_device *rdev)
4194 {
4195         u32 i;
4196         const __be32 *fw_data;
4197
4198         if (!rdev->rlc_fw)
4199                 return -EINVAL;
4200
4201         r600_rlc_stop(rdev);
4202
4203         WREG32(RLC_HB_CNTL, 0);
4204
4205         if (rdev->flags & RADEON_IS_IGP) {
4206                 if (rdev->family == CHIP_ARUBA) {
4207                         u32 always_on_bitmap =
4208                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4209                         /* find out the number of active simds */
4210                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4211                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4212                         tmp = hweight32(~tmp);
4213                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4214                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4215                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4216                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4217                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4218                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4219                         }
4220                 } else {
4221                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4222                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4223                 }
4224                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4225                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4226         } else {
4227                 WREG32(RLC_HB_BASE, 0);
4228                 WREG32(RLC_HB_RPTR, 0);
4229                 WREG32(RLC_HB_WPTR, 0);
4230                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4231                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4232         }
4233         WREG32(RLC_MC_CNTL, 0);
4234         WREG32(RLC_UCODE_CNTL, 0);
4235
4236         fw_data = (const __be32 *)rdev->rlc_fw->data;
4237         if (rdev->family >= CHIP_ARUBA) {
4238                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4239                         WREG32(RLC_UCODE_ADDR, i);
4240                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4241                 }
4242         } else if (rdev->family >= CHIP_CAYMAN) {
4243                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4244                         WREG32(RLC_UCODE_ADDR, i);
4245                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4246                 }
4247         } else {
4248                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4249                         WREG32(RLC_UCODE_ADDR, i);
4250                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4251                 }
4252         }
4253         WREG32(RLC_UCODE_ADDR, 0);
4254
4255         evergreen_rlc_start(rdev);
4256
4257         return 0;
4258 }
4259
4260 /* Interrupts */
4261
4262 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4263 {
4264         if (crtc >= rdev->num_crtc)
4265                 return 0;
4266         else
4267                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4268 }
4269
4270 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4271 {
4272         u32 tmp;
4273
4274         if (rdev->family >= CHIP_CAYMAN) {
4275                 cayman_cp_int_cntl_setup(rdev, 0,
4276                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4277                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4278                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4279                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4280                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4281         } else
4282                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4283         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4284         WREG32(DMA_CNTL, tmp);
4285         WREG32(GRBM_INT_CNTL, 0);
4286         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4287         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4288         if (rdev->num_crtc >= 4) {
4289                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4290                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4291         }
4292         if (rdev->num_crtc >= 6) {
4293                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4294                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4295         }
4296
4297         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4298         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4299         if (rdev->num_crtc >= 4) {
4300                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4301                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4302         }
4303         if (rdev->num_crtc >= 6) {
4304                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4305                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4306         }
4307
4308         /* only one DAC on DCE6 */
4309         if (!ASIC_IS_DCE6(rdev))
4310                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4311         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4312
4313         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4314         WREG32(DC_HPD1_INT_CONTROL, tmp);
4315         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4316         WREG32(DC_HPD2_INT_CONTROL, tmp);
4317         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4318         WREG32(DC_HPD3_INT_CONTROL, tmp);
4319         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4320         WREG32(DC_HPD4_INT_CONTROL, tmp);
4321         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4322         WREG32(DC_HPD5_INT_CONTROL, tmp);
4323         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4324         WREG32(DC_HPD6_INT_CONTROL, tmp);
4325
4326 }
4327
4328 int evergreen_irq_set(struct radeon_device *rdev)
4329 {
4330         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4331         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4332         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4333         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4334         u32 grbm_int_cntl = 0;
4335         u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
4336         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4337         u32 dma_cntl, dma_cntl1 = 0;
4338         u32 thermal_int = 0;
4339
4340         if (!rdev->irq.installed) {
4341                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4342                 return -EINVAL;
4343         }
4344         /* don't enable anything if the ih is disabled */
4345         if (!rdev->ih.enabled) {
4346                 r600_disable_interrupts(rdev);
4347                 /* force the active interrupt state to all disabled */
4348                 evergreen_disable_interrupt_state(rdev);
4349                 return 0;
4350         }
4351
4352         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4353         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4354         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4355         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4356         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4357         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4358         if (rdev->family == CHIP_ARUBA)
4359                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4360                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4361         else
4362                 thermal_int = RREG32(CG_THERMAL_INT) &
4363                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4364
4365         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4366         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4367         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4368         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4369         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4370         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4371
4372         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4373
4374         if (rdev->family >= CHIP_CAYMAN) {
4375                 /* enable CP interrupts on all rings */
4376                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4377                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4378                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4379                 }
4380                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4381                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4382                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4383                 }
4384                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4385                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4386                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4387                 }
4388         } else {
4389                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4390                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4391                         cp_int_cntl |= RB_INT_ENABLE;
4392                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4393                 }
4394         }
4395
4396         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4397                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4398                 dma_cntl |= TRAP_ENABLE;
4399         }
4400
4401         if (rdev->family >= CHIP_CAYMAN) {
4402                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4403                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4404                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4405                         dma_cntl1 |= TRAP_ENABLE;
4406                 }
4407         }
4408
4409         if (rdev->irq.dpm_thermal) {
4410                 DRM_DEBUG("dpm thermal\n");
4411                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4412         }
4413
4414         if (rdev->irq.crtc_vblank_int[0] ||
4415             atomic_read(&rdev->irq.pflip[0])) {
4416                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4417                 crtc1 |= VBLANK_INT_MASK;
4418         }
4419         if (rdev->irq.crtc_vblank_int[1] ||
4420             atomic_read(&rdev->irq.pflip[1])) {
4421                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4422                 crtc2 |= VBLANK_INT_MASK;
4423         }
4424         if (rdev->irq.crtc_vblank_int[2] ||
4425             atomic_read(&rdev->irq.pflip[2])) {
4426                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4427                 crtc3 |= VBLANK_INT_MASK;
4428         }
4429         if (rdev->irq.crtc_vblank_int[3] ||
4430             atomic_read(&rdev->irq.pflip[3])) {
4431                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4432                 crtc4 |= VBLANK_INT_MASK;
4433         }
4434         if (rdev->irq.crtc_vblank_int[4] ||
4435             atomic_read(&rdev->irq.pflip[4])) {
4436                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4437                 crtc5 |= VBLANK_INT_MASK;
4438         }
4439         if (rdev->irq.crtc_vblank_int[5] ||
4440             atomic_read(&rdev->irq.pflip[5])) {
4441                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4442                 crtc6 |= VBLANK_INT_MASK;
4443         }
4444         if (rdev->irq.hpd[0]) {
4445                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4446                 hpd1 |= DC_HPDx_INT_EN;
4447         }
4448         if (rdev->irq.hpd[1]) {
4449                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4450                 hpd2 |= DC_HPDx_INT_EN;
4451         }
4452         if (rdev->irq.hpd[2]) {
4453                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4454                 hpd3 |= DC_HPDx_INT_EN;
4455         }
4456         if (rdev->irq.hpd[3]) {
4457                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4458                 hpd4 |= DC_HPDx_INT_EN;
4459         }
4460         if (rdev->irq.hpd[4]) {
4461                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4462                 hpd5 |= DC_HPDx_INT_EN;
4463         }
4464         if (rdev->irq.hpd[5]) {
4465                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4466                 hpd6 |= DC_HPDx_INT_EN;
4467         }
4468         if (rdev->irq.afmt[0]) {
4469                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4470                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4471         }
4472         if (rdev->irq.afmt[1]) {
4473                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4474                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4475         }
4476         if (rdev->irq.afmt[2]) {
4477                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4478                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4479         }
4480         if (rdev->irq.afmt[3]) {
4481                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4482                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4483         }
4484         if (rdev->irq.afmt[4]) {
4485                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4486                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4487         }
4488         if (rdev->irq.afmt[5]) {
4489                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4490                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4491         }
4492
4493         if (rdev->family >= CHIP_CAYMAN) {
4494                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4495                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4496                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4497         } else
4498                 WREG32(CP_INT_CNTL, cp_int_cntl);
4499
4500         WREG32(DMA_CNTL, dma_cntl);
4501
4502         if (rdev->family >= CHIP_CAYMAN)
4503                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4504
4505         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4506
4507         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4508         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4509         if (rdev->num_crtc >= 4) {
4510                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4511                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4512         }
4513         if (rdev->num_crtc >= 6) {
4514                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4515                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4516         }
4517
4518         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4519         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
4520         if (rdev->num_crtc >= 4) {
4521                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4522                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4523         }
4524         if (rdev->num_crtc >= 6) {
4525                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4526                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4527         }
4528
4529         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4530         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4531         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4532         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4533         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4534         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4535         if (rdev->family == CHIP_ARUBA)
4536                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4537         else
4538                 WREG32(CG_THERMAL_INT, thermal_int);
4539
4540         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4541         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4542         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4543         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4544         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4545         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4546
4547         return 0;
4548 }
4549
4550 static void evergreen_irq_ack(struct radeon_device *rdev)
4551 {
4552         u32 tmp;
4553
4554         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4555         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4556         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4557         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4558         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4559         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4560         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4561         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4562         if (rdev->num_crtc >= 4) {
4563                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4564                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4565         }
4566         if (rdev->num_crtc >= 6) {
4567                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4568                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4569         }
4570
4571         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4572         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4573         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4574         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4575         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4576         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4577
4578         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4579                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4580         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4581                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4582         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4583                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4584         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4585                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4586         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4587                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4588         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4589                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4590
4591         if (rdev->num_crtc >= 4) {
4592                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4593                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4594                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4595                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4596                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4597                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4598                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4599                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4600                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4601                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4602                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4603                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4604         }
4605
4606         if (rdev->num_crtc >= 6) {
4607                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4608                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4609                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4610                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4611                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4612                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4613                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4614                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4615                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4616                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4617                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4618                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4619         }
4620
4621         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4622                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4623                 tmp |= DC_HPDx_INT_ACK;
4624                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4625         }
4626         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4627                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4628                 tmp |= DC_HPDx_INT_ACK;
4629                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4630         }
4631         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4632                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4633                 tmp |= DC_HPDx_INT_ACK;
4634                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4635         }
4636         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4637                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4638                 tmp |= DC_HPDx_INT_ACK;
4639                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4640         }
4641         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4642                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4643                 tmp |= DC_HPDx_INT_ACK;
4644                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4645         }
4646         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4647                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4648                 tmp |= DC_HPDx_INT_ACK;
4649                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4650         }
4651         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4652                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4653                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4654                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4655         }
4656         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4657                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4658                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4659                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4660         }
4661         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4662                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4663                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4664                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4665         }
4666         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4667                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4668                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4669                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4670         }
4671         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4672                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4673                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4674                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4675         }
4676         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4677                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4678                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4679                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4680         }
4681 }
4682
4683 static void evergreen_irq_disable(struct radeon_device *rdev)
4684 {
4685         r600_disable_interrupts(rdev);
4686         /* Wait and acknowledge irq */
4687         mdelay(1);
4688         evergreen_irq_ack(rdev);
4689         evergreen_disable_interrupt_state(rdev);
4690 }
4691
4692 void evergreen_irq_suspend(struct radeon_device *rdev)
4693 {
4694         evergreen_irq_disable(rdev);
4695         r600_rlc_stop(rdev);
4696 }
4697
4698 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4699 {
4700         u32 wptr, tmp;
4701
4702         if (rdev->wb.enabled)
4703                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4704         else
4705                 wptr = RREG32(IH_RB_WPTR);
4706
4707         if (wptr & RB_OVERFLOW) {
4708                 /* When a ring buffer overflow happen start parsing interrupt
4709                  * from the last not overwritten vector (wptr + 16). Hopefully
4710                  * this should allow us to catchup.
4711                  */
4712                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4713                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4714                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4715                 tmp = RREG32(IH_RB_CNTL);
4716                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4717                 WREG32(IH_RB_CNTL, tmp);
4718         }
4719         return (wptr & rdev->ih.ptr_mask);
4720 }
4721
4722 int evergreen_irq_process(struct radeon_device *rdev)
4723 {
4724         u32 wptr;
4725         u32 rptr;
4726         u32 src_id, src_data;
4727         u32 ring_index;
4728         bool queue_hotplug = false;
4729         bool queue_hdmi = false;
4730         bool queue_thermal = false;
4731         u32 status, addr;
4732
4733         if (!rdev->ih.enabled || rdev->shutdown)
4734                 return IRQ_NONE;
4735
4736         wptr = evergreen_get_ih_wptr(rdev);
4737
4738 restart_ih:
4739         /* is somebody else already processing irqs? */
4740         if (atomic_xchg(&rdev->ih.lock, 1))
4741                 return IRQ_NONE;
4742
4743         rptr = rdev->ih.rptr;
4744         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4745
4746         /* Order reading of wptr vs. reading of IH ring data */
4747         rmb();
4748
4749         /* display interrupts */
4750         evergreen_irq_ack(rdev);
4751
4752         while (rptr != wptr) {
4753                 /* wptr/rptr are in bytes! */
4754                 ring_index = rptr / 4;
4755                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4756                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4757
4758                 switch (src_id) {
4759                 case 1: /* D1 vblank/vline */
4760                         switch (src_data) {
4761                         case 0: /* D1 vblank */
4762                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4763                                         if (rdev->irq.crtc_vblank_int[0]) {
4764                                                 drm_handle_vblank(rdev->ddev, 0);
4765                                                 rdev->pm.vblank_sync = true;
4766                                                 wake_up(&rdev->irq.vblank_queue);
4767                                         }
4768                                         if (atomic_read(&rdev->irq.pflip[0]))
4769                                                 radeon_crtc_handle_flip(rdev, 0);
4770                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4771                                         DRM_DEBUG("IH: D1 vblank\n");
4772                                 }
4773                                 break;
4774                         case 1: /* D1 vline */
4775                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4776                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4777                                         DRM_DEBUG("IH: D1 vline\n");
4778                                 }
4779                                 break;
4780                         default:
4781                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4782                                 break;
4783                         }
4784                         break;
4785                 case 2: /* D2 vblank/vline */
4786                         switch (src_data) {
4787                         case 0: /* D2 vblank */
4788                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4789                                         if (rdev->irq.crtc_vblank_int[1]) {
4790                                                 drm_handle_vblank(rdev->ddev, 1);
4791                                                 rdev->pm.vblank_sync = true;
4792                                                 wake_up(&rdev->irq.vblank_queue);
4793                                         }
4794                                         if (atomic_read(&rdev->irq.pflip[1]))
4795                                                 radeon_crtc_handle_flip(rdev, 1);
4796                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4797                                         DRM_DEBUG("IH: D2 vblank\n");
4798                                 }
4799                                 break;
4800                         case 1: /* D2 vline */
4801                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4802                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4803                                         DRM_DEBUG("IH: D2 vline\n");
4804                                 }
4805                                 break;
4806                         default:
4807                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4808                                 break;
4809                         }
4810                         break;
4811                 case 3: /* D3 vblank/vline */
4812                         switch (src_data) {
4813                         case 0: /* D3 vblank */
4814                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4815                                         if (rdev->irq.crtc_vblank_int[2]) {
4816                                                 drm_handle_vblank(rdev->ddev, 2);
4817                                                 rdev->pm.vblank_sync = true;
4818                                                 wake_up(&rdev->irq.vblank_queue);
4819                                         }
4820                                         if (atomic_read(&rdev->irq.pflip[2]))
4821                                                 radeon_crtc_handle_flip(rdev, 2);
4822                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4823                                         DRM_DEBUG("IH: D3 vblank\n");
4824                                 }
4825                                 break;
4826                         case 1: /* D3 vline */
4827                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4828                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4829                                         DRM_DEBUG("IH: D3 vline\n");
4830                                 }
4831                                 break;
4832                         default:
4833                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4834                                 break;
4835                         }
4836                         break;
4837                 case 4: /* D4 vblank/vline */
4838                         switch (src_data) {
4839                         case 0: /* D4 vblank */
4840                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4841                                         if (rdev->irq.crtc_vblank_int[3]) {
4842                                                 drm_handle_vblank(rdev->ddev, 3);
4843                                                 rdev->pm.vblank_sync = true;
4844                                                 wake_up(&rdev->irq.vblank_queue);
4845                                         }
4846                                         if (atomic_read(&rdev->irq.pflip[3]))
4847                                                 radeon_crtc_handle_flip(rdev, 3);
4848                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4849                                         DRM_DEBUG("IH: D4 vblank\n");
4850                                 }
4851                                 break;
4852                         case 1: /* D4 vline */
4853                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4854                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4855                                         DRM_DEBUG("IH: D4 vline\n");
4856                                 }
4857                                 break;
4858                         default:
4859                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4860                                 break;
4861                         }
4862                         break;
4863                 case 5: /* D5 vblank/vline */
4864                         switch (src_data) {
4865                         case 0: /* D5 vblank */
4866                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4867                                         if (rdev->irq.crtc_vblank_int[4]) {
4868                                                 drm_handle_vblank(rdev->ddev, 4);
4869                                                 rdev->pm.vblank_sync = true;
4870                                                 wake_up(&rdev->irq.vblank_queue);
4871                                         }
4872                                         if (atomic_read(&rdev->irq.pflip[4]))
4873                                                 radeon_crtc_handle_flip(rdev, 4);
4874                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4875                                         DRM_DEBUG("IH: D5 vblank\n");
4876                                 }
4877                                 break;
4878                         case 1: /* D5 vline */
4879                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4880                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4881                                         DRM_DEBUG("IH: D5 vline\n");
4882                                 }
4883                                 break;
4884                         default:
4885                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4886                                 break;
4887                         }
4888                         break;
4889                 case 6: /* D6 vblank/vline */
4890                         switch (src_data) {
4891                         case 0: /* D6 vblank */
4892                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4893                                         if (rdev->irq.crtc_vblank_int[5]) {
4894                                                 drm_handle_vblank(rdev->ddev, 5);
4895                                                 rdev->pm.vblank_sync = true;
4896                                                 wake_up(&rdev->irq.vblank_queue);
4897                                         }
4898                                         if (atomic_read(&rdev->irq.pflip[5]))
4899                                                 radeon_crtc_handle_flip(rdev, 5);
4900                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4901                                         DRM_DEBUG("IH: D6 vblank\n");
4902                                 }
4903                                 break;
4904                         case 1: /* D6 vline */
4905                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4906                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4907                                         DRM_DEBUG("IH: D6 vline\n");
4908                                 }
4909                                 break;
4910                         default:
4911                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4912                                 break;
4913                         }
4914                         break;
4915                 case 42: /* HPD hotplug */
4916                         switch (src_data) {
4917                         case 0:
4918                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4919                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4920                                         queue_hotplug = true;
4921                                         DRM_DEBUG("IH: HPD1\n");
4922                                 }
4923                                 break;
4924                         case 1:
4925                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4926                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4927                                         queue_hotplug = true;
4928                                         DRM_DEBUG("IH: HPD2\n");
4929                                 }
4930                                 break;
4931                         case 2:
4932                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4933                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4934                                         queue_hotplug = true;
4935                                         DRM_DEBUG("IH: HPD3\n");
4936                                 }
4937                                 break;
4938                         case 3:
4939                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4940                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4941                                         queue_hotplug = true;
4942                                         DRM_DEBUG("IH: HPD4\n");
4943                                 }
4944                                 break;
4945                         case 4:
4946                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4947                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4948                                         queue_hotplug = true;
4949                                         DRM_DEBUG("IH: HPD5\n");
4950                                 }
4951                                 break;
4952                         case 5:
4953                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4954                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4955                                         queue_hotplug = true;
4956                                         DRM_DEBUG("IH: HPD6\n");
4957                                 }
4958                                 break;
4959                         default:
4960                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4961                                 break;
4962                         }
4963                         break;
4964                 case 44: /* hdmi */
4965                         switch (src_data) {
4966                         case 0:
4967                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4968                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4969                                         queue_hdmi = true;
4970                                         DRM_DEBUG("IH: HDMI0\n");
4971                                 }
4972                                 break;
4973                         case 1:
4974                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4975                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4976                                         queue_hdmi = true;
4977                                         DRM_DEBUG("IH: HDMI1\n");
4978                                 }
4979                                 break;
4980                         case 2:
4981                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4982                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4983                                         queue_hdmi = true;
4984                                         DRM_DEBUG("IH: HDMI2\n");
4985                                 }
4986                                 break;
4987                         case 3:
4988                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4989                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4990                                         queue_hdmi = true;
4991                                         DRM_DEBUG("IH: HDMI3\n");
4992                                 }
4993                                 break;
4994                         case 4:
4995                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4996                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4997                                         queue_hdmi = true;
4998                                         DRM_DEBUG("IH: HDMI4\n");
4999                                 }
5000                                 break;
5001                         case 5:
5002                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5003                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5004                                         queue_hdmi = true;
5005                                         DRM_DEBUG("IH: HDMI5\n");
5006                                 }
5007                                 break;
5008                         default:
5009                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5010                                 break;
5011                         }
5012                 case 124: /* UVD */
5013                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5014                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5015                         break;
5016                 case 146:
5017                 case 147:
5018                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5019                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5020                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5021                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5022                                 addr);
5023                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5024                                 status);
5025                         cayman_vm_decode_fault(rdev, status, addr);
5026                         /* reset addr and status */
5027                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5028                         break;
5029                 case 176: /* CP_INT in ring buffer */
5030                 case 177: /* CP_INT in IB1 */
5031                 case 178: /* CP_INT in IB2 */
5032                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5033                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5034                         break;
5035                 case 181: /* CP EOP event */
5036                         DRM_DEBUG("IH: CP EOP\n");
5037                         if (rdev->family >= CHIP_CAYMAN) {
5038                                 switch (src_data) {
5039                                 case 0:
5040                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5041                                         break;
5042                                 case 1:
5043                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5044                                         break;
5045                                 case 2:
5046                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5047                                         break;
5048                                 }
5049                         } else
5050                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5051                         break;
5052                 case 224: /* DMA trap event */
5053                         DRM_DEBUG("IH: DMA trap\n");
5054                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5055                         break;
5056                 case 230: /* thermal low to high */
5057                         DRM_DEBUG("IH: thermal low to high\n");
5058                         rdev->pm.dpm.thermal.high_to_low = false;
5059                         queue_thermal = true;
5060                         break;
5061                 case 231: /* thermal high to low */
5062                         DRM_DEBUG("IH: thermal high to low\n");
5063                         rdev->pm.dpm.thermal.high_to_low = true;
5064                         queue_thermal = true;
5065                         break;
5066                 case 233: /* GUI IDLE */
5067                         DRM_DEBUG("IH: GUI idle\n");
5068                         break;
5069                 case 244: /* DMA trap event */
5070                         if (rdev->family >= CHIP_CAYMAN) {
5071                                 DRM_DEBUG("IH: DMA1 trap\n");
5072                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5073                         }
5074                         break;
5075                 default:
5076                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5077                         break;
5078                 }
5079
5080                 /* wptr/rptr are in bytes! */
5081                 rptr += 16;
5082                 rptr &= rdev->ih.ptr_mask;
5083         }
5084         if (queue_hotplug)
5085                 schedule_work(&rdev->hotplug_work);
5086         if (queue_hdmi)
5087                 schedule_work(&rdev->audio_work);
5088         if (queue_thermal && rdev->pm.dpm_enabled)
5089                 schedule_work(&rdev->pm.dpm.thermal.work);
5090         rdev->ih.rptr = rptr;
5091         WREG32(IH_RB_RPTR, rdev->ih.rptr);
5092         atomic_set(&rdev->ih.lock, 0);
5093
5094         /* make sure wptr hasn't changed while processing */
5095         wptr = evergreen_get_ih_wptr(rdev);
5096         if (wptr != rptr)
5097                 goto restart_ih;
5098
5099         return IRQ_HANDLED;
5100 }
5101
5102 static int evergreen_startup(struct radeon_device *rdev)
5103 {
5104         struct radeon_ring *ring;
5105         int r;
5106
5107         /* enable pcie gen2 link */
5108         evergreen_pcie_gen2_enable(rdev);
5109         /* enable aspm */
5110         evergreen_program_aspm(rdev);
5111
5112         /* scratch needs to be initialized before MC */
5113         r = r600_vram_scratch_init(rdev);
5114         if (r)
5115                 return r;
5116
5117         evergreen_mc_program(rdev);
5118
5119         if (ASIC_IS_DCE5(rdev)) {
5120                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5121                         r = ni_init_microcode(rdev);
5122                         if (r) {
5123                                 DRM_ERROR("Failed to load firmware!\n");
5124                                 return r;
5125                         }
5126                 }
5127                 r = ni_mc_load_microcode(rdev);
5128                 if (r) {
5129                         DRM_ERROR("Failed to load MC firmware!\n");
5130                         return r;
5131                 }
5132         } else {
5133                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5134                         r = r600_init_microcode(rdev);
5135                         if (r) {
5136                                 DRM_ERROR("Failed to load firmware!\n");
5137                                 return r;
5138                         }
5139                 }
5140         }
5141
5142         if (rdev->flags & RADEON_IS_AGP) {
5143                 evergreen_agp_enable(rdev);
5144         } else {
5145                 r = evergreen_pcie_gart_enable(rdev);
5146                 if (r)
5147                         return r;
5148         }
5149         evergreen_gpu_init(rdev);
5150
5151         /* allocate rlc buffers */
5152         if (rdev->flags & RADEON_IS_IGP) {
5153                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5154                 rdev->rlc.reg_list_size =
5155                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5156                 rdev->rlc.cs_data = evergreen_cs_data;
5157                 r = sumo_rlc_init(rdev);
5158                 if (r) {
5159                         DRM_ERROR("Failed to init rlc BOs!\n");
5160                         return r;
5161                 }
5162         }
5163
5164         /* allocate wb buffer */
5165         r = radeon_wb_init(rdev);
5166         if (r)
5167                 return r;
5168
5169         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5170         if (r) {
5171                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5172                 return r;
5173         }
5174
5175         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5176         if (r) {
5177                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5178                 return r;
5179         }
5180
5181         r = uvd_v2_2_resume(rdev);
5182         if (!r) {
5183                 r = radeon_fence_driver_start_ring(rdev,
5184                                                    R600_RING_TYPE_UVD_INDEX);
5185                 if (r)
5186                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5187         }
5188
5189         if (r)
5190                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5191
5192         /* Enable IRQ */
5193         if (!rdev->irq.installed) {
5194                 r = radeon_irq_kms_init(rdev);
5195                 if (r)
5196                         return r;
5197         }
5198
5199         r = r600_irq_init(rdev);
5200         if (r) {
5201                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5202                 radeon_irq_kms_fini(rdev);
5203                 return r;
5204         }
5205         evergreen_irq_set(rdev);
5206
5207         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5208         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5209                              R600_CP_RB_RPTR, R600_CP_RB_WPTR,
5210                              RADEON_CP_PACKET2);
5211         if (r)
5212                 return r;
5213
5214         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5215         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5216                              DMA_RB_RPTR, DMA_RB_WPTR,
5217                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5218         if (r)
5219                 return r;
5220
5221         r = evergreen_cp_load_microcode(rdev);
5222         if (r)
5223                 return r;
5224         r = evergreen_cp_resume(rdev);
5225         if (r)
5226                 return r;
5227         r = r600_dma_resume(rdev);
5228         if (r)
5229                 return r;
5230
5231         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5232         if (ring->ring_size) {
5233                 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5234                                      UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
5235                                      RADEON_CP_PACKET2);
5236                 if (!r)
5237                         r = uvd_v1_0_init(rdev);
5238
5239                 if (r)
5240                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5241         }
5242
5243         r = radeon_ib_pool_init(rdev);
5244         if (r) {
5245                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5246                 return r;
5247         }
5248
5249         r = r600_audio_init(rdev);
5250         if (r) {
5251                 DRM_ERROR("radeon: audio init failed\n");
5252                 return r;
5253         }
5254
5255         return 0;
5256 }
5257
5258 int evergreen_resume(struct radeon_device *rdev)
5259 {
5260         int r;
5261
5262         /* reset the asic, the gfx blocks are often in a bad state
5263          * after the driver is unloaded or after a resume
5264          */
5265         if (radeon_asic_reset(rdev))
5266                 dev_warn(rdev->dev, "GPU reset failed !\n");
5267         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5268          * posting will perform necessary task to bring back GPU into good
5269          * shape.
5270          */
5271         /* post card */
5272         atom_asic_init(rdev->mode_info.atom_context);
5273
5274         /* init golden registers */
5275         evergreen_init_golden_registers(rdev);
5276
5277         rdev->accel_working = true;
5278         r = evergreen_startup(rdev);
5279         if (r) {
5280                 DRM_ERROR("evergreen startup failed on resume\n");
5281                 rdev->accel_working = false;
5282                 return r;
5283         }
5284
5285         return r;
5286
5287 }
5288
5289 int evergreen_suspend(struct radeon_device *rdev)
5290 {
5291         r600_audio_fini(rdev);
5292         uvd_v1_0_fini(rdev);
5293         radeon_uvd_suspend(rdev);
5294         r700_cp_stop(rdev);
5295         r600_dma_stop(rdev);
5296         evergreen_irq_suspend(rdev);
5297         radeon_wb_disable(rdev);
5298         evergreen_pcie_gart_disable(rdev);
5299
5300         return 0;
5301 }
5302
5303 /* Plan is to move initialization in that function and use
5304  * helper function so that radeon_device_init pretty much
5305  * do nothing more than calling asic specific function. This
5306  * should also allow to remove a bunch of callback function
5307  * like vram_info.
5308  */
5309 int evergreen_init(struct radeon_device *rdev)
5310 {
5311         int r;
5312
5313         /* Read BIOS */
5314         if (!radeon_get_bios(rdev)) {
5315                 if (ASIC_IS_AVIVO(rdev))
5316                         return -EINVAL;
5317         }
5318         /* Must be an ATOMBIOS */
5319         if (!rdev->is_atom_bios) {
5320                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5321                 return -EINVAL;
5322         }
5323         r = radeon_atombios_init(rdev);
5324         if (r)
5325                 return r;
5326         /* reset the asic, the gfx blocks are often in a bad state
5327          * after the driver is unloaded or after a resume
5328          */
5329         if (radeon_asic_reset(rdev))
5330                 dev_warn(rdev->dev, "GPU reset failed !\n");
5331         /* Post card if necessary */
5332         if (!radeon_card_posted(rdev)) {
5333                 if (!rdev->bios) {
5334                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5335                         return -EINVAL;
5336                 }
5337                 DRM_INFO("GPU not posted. posting now...\n");
5338                 atom_asic_init(rdev->mode_info.atom_context);
5339         }
5340         /* init golden registers */
5341         evergreen_init_golden_registers(rdev);
5342         /* Initialize scratch registers */
5343         r600_scratch_init(rdev);
5344         /* Initialize surface registers */
5345         radeon_surface_init(rdev);
5346         /* Initialize clocks */
5347         radeon_get_clock_info(rdev->ddev);
5348         /* Fence driver */
5349         r = radeon_fence_driver_init(rdev);
5350         if (r)
5351                 return r;
5352         /* initialize AGP */
5353         if (rdev->flags & RADEON_IS_AGP) {
5354                 r = radeon_agp_init(rdev);
5355                 if (r)
5356                         radeon_agp_disable(rdev);
5357         }
5358         /* initialize memory controller */
5359         r = evergreen_mc_init(rdev);
5360         if (r)
5361                 return r;
5362         /* Memory manager */
5363         r = radeon_bo_init(rdev);
5364         if (r)
5365                 return r;
5366
5367         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5368         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5369
5370         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5371         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5372
5373         r = radeon_uvd_init(rdev);
5374         if (!r) {
5375                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5376                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5377                                4096);
5378         }
5379
5380         rdev->ih.ring_obj = NULL;
5381         r600_ih_ring_init(rdev, 64 * 1024);
5382
5383         r = r600_pcie_gart_init(rdev);
5384         if (r)
5385                 return r;
5386
5387         rdev->accel_working = true;
5388         r = evergreen_startup(rdev);
5389         if (r) {
5390                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5391                 r700_cp_fini(rdev);
5392                 r600_dma_fini(rdev);
5393                 r600_irq_fini(rdev);
5394                 if (rdev->flags & RADEON_IS_IGP)
5395                         sumo_rlc_fini(rdev);
5396                 radeon_wb_fini(rdev);
5397                 radeon_ib_pool_fini(rdev);
5398                 radeon_irq_kms_fini(rdev);
5399                 evergreen_pcie_gart_fini(rdev);
5400                 rdev->accel_working = false;
5401         }
5402
5403         /* Don't start up if the MC ucode is missing on BTC parts.
5404          * The default clocks and voltages before the MC ucode
5405          * is loaded are not suffient for advanced operations.
5406          */
5407         if (ASIC_IS_DCE5(rdev)) {
5408                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5409                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5410                         return -EINVAL;
5411                 }
5412         }
5413
5414         return 0;
5415 }
5416
5417 void evergreen_fini(struct radeon_device *rdev)
5418 {
5419         r600_audio_fini(rdev);
5420         r700_cp_fini(rdev);
5421         r600_dma_fini(rdev);
5422         r600_irq_fini(rdev);
5423         if (rdev->flags & RADEON_IS_IGP)
5424                 sumo_rlc_fini(rdev);
5425         radeon_wb_fini(rdev);
5426         radeon_ib_pool_fini(rdev);
5427         radeon_irq_kms_fini(rdev);
5428         evergreen_pcie_gart_fini(rdev);
5429         uvd_v1_0_fini(rdev);
5430         radeon_uvd_fini(rdev);
5431         r600_vram_scratch_fini(rdev);
5432         radeon_gem_fini(rdev);
5433         radeon_fence_driver_fini(rdev);
5434         radeon_agp_fini(rdev);
5435         radeon_bo_fini(rdev);
5436         radeon_atombios_fini(rdev);
5437         kfree(rdev->bios);
5438         rdev->bios = NULL;
5439 }
5440
5441 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5442 {
5443         u32 link_width_cntl, speed_cntl;
5444
5445         if (radeon_pcie_gen2 == 0)
5446                 return;
5447
5448         if (rdev->flags & RADEON_IS_IGP)
5449                 return;
5450
5451         if (!(rdev->flags & RADEON_IS_PCIE))
5452                 return;
5453
5454         /* x2 cards have a special sequence */
5455         if (ASIC_IS_X2(rdev))
5456                 return;
5457
5458         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5459                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5460                 return;
5461
5462         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5463         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5464                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5465                 return;
5466         }
5467
5468         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5469
5470         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5471             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5472
5473                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5474                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5475                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5476
5477                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5478                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5479                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5480
5481                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5482                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5483                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5484
5485                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5486                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5487                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5488
5489                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5490                 speed_cntl |= LC_GEN2_EN_STRAP;
5491                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5492
5493         } else {
5494                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5495                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5496                 if (1)
5497                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5498                 else
5499                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5500                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5501         }
5502 }
5503
5504 void evergreen_program_aspm(struct radeon_device *rdev)
5505 {
5506         u32 data, orig;
5507         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5508         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5509         /* fusion_platform = true
5510          * if the system is a fusion system
5511          * (APU or DGPU in a fusion system).
5512          * todo: check if the system is a fusion platform.
5513          */
5514         bool fusion_platform = false;
5515
5516         if (radeon_aspm == 0)
5517                 return;
5518
5519         if (!(rdev->flags & RADEON_IS_PCIE))
5520                 return;
5521
5522         switch (rdev->family) {
5523         case CHIP_CYPRESS:
5524         case CHIP_HEMLOCK:
5525         case CHIP_JUNIPER:
5526         case CHIP_REDWOOD:
5527         case CHIP_CEDAR:
5528         case CHIP_SUMO:
5529         case CHIP_SUMO2:
5530         case CHIP_PALM:
5531         case CHIP_ARUBA:
5532                 disable_l0s = true;
5533                 break;
5534         default:
5535                 disable_l0s = false;
5536                 break;
5537         }
5538
5539         if (rdev->flags & RADEON_IS_IGP)
5540                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5541
5542         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5543         if (fusion_platform)
5544                 data &= ~MULTI_PIF;
5545         else
5546                 data |= MULTI_PIF;
5547         if (data != orig)
5548                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5549
5550         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5551         if (fusion_platform)
5552                 data &= ~MULTI_PIF;
5553         else
5554                 data |= MULTI_PIF;
5555         if (data != orig)
5556                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5557
5558         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5559         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5560         if (!disable_l0s) {
5561                 if (rdev->family >= CHIP_BARTS)
5562                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5563                 else
5564                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5565         }
5566
5567         if (!disable_l1) {
5568                 if (rdev->family >= CHIP_BARTS)
5569                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5570                 else
5571                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5572
5573                 if (!disable_plloff_in_l1) {
5574                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5575                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5576                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5577                         if (data != orig)
5578                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5579
5580                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5581                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5582                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5583                         if (data != orig)
5584                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5585
5586                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5587                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5588                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5589                         if (data != orig)
5590                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5591
5592                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5593                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5594                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5595                         if (data != orig)
5596                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5597
5598                         if (rdev->family >= CHIP_BARTS) {
5599                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5600                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5601                                 data |= PLL_RAMP_UP_TIME_0(4);
5602                                 if (data != orig)
5603                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5604
5605                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5606                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5607                                 data |= PLL_RAMP_UP_TIME_1(4);
5608                                 if (data != orig)
5609                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5610
5611                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5612                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5613                                 data |= PLL_RAMP_UP_TIME_0(4);
5614                                 if (data != orig)
5615                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5616
5617                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5618                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5619                                 data |= PLL_RAMP_UP_TIME_1(4);
5620                                 if (data != orig)
5621                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5622                         }
5623
5624                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5625                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5626                         data |= LC_DYN_LANES_PWR_STATE(3);
5627                         if (data != orig)
5628                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5629
5630                         if (rdev->family >= CHIP_BARTS) {
5631                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5632                                 data &= ~LS2_EXIT_TIME_MASK;
5633                                 data |= LS2_EXIT_TIME(1);
5634                                 if (data != orig)
5635                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5636
5637                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5638                                 data &= ~LS2_EXIT_TIME_MASK;
5639                                 data |= LS2_EXIT_TIME(1);
5640                                 if (data != orig)
5641                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5642                         }
5643                 }
5644         }
5645
5646         /* evergreen parts only */
5647         if (rdev->family < CHIP_BARTS)
5648                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5649
5650         if (pcie_lc_cntl != pcie_lc_cntl_old)
5651                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5652 }