Merge branch 'drm-next' of git://people.freedesktop.org/~airlied/linux
[cascardo/linux.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 static const u32 crtc_offsets[6] =
39 {
40         EVERGREEN_CRTC0_REGISTER_OFFSET,
41         EVERGREEN_CRTC1_REGISTER_OFFSET,
42         EVERGREEN_CRTC2_REGISTER_OFFSET,
43         EVERGREEN_CRTC3_REGISTER_OFFSET,
44         EVERGREEN_CRTC4_REGISTER_OFFSET,
45         EVERGREEN_CRTC5_REGISTER_OFFSET
46 };
47
48 #include "clearstate_evergreen.h"
49
50 static const u32 sumo_rlc_save_restore_register_list[] =
51 {
52         0x98fc,
53         0x9830,
54         0x9834,
55         0x9838,
56         0x9870,
57         0x9874,
58         0x8a14,
59         0x8b24,
60         0x8bcc,
61         0x8b10,
62         0x8d00,
63         0x8d04,
64         0x8c00,
65         0x8c04,
66         0x8c08,
67         0x8c0c,
68         0x8d8c,
69         0x8c20,
70         0x8c24,
71         0x8c28,
72         0x8c18,
73         0x8c1c,
74         0x8cf0,
75         0x8e2c,
76         0x8e38,
77         0x8c30,
78         0x9508,
79         0x9688,
80         0x9608,
81         0x960c,
82         0x9610,
83         0x9614,
84         0x88c4,
85         0x88d4,
86         0xa008,
87         0x900c,
88         0x9100,
89         0x913c,
90         0x98f8,
91         0x98f4,
92         0x9b7c,
93         0x3f8c,
94         0x8950,
95         0x8954,
96         0x8a18,
97         0x8b28,
98         0x9144,
99         0x9148,
100         0x914c,
101         0x3f90,
102         0x3f94,
103         0x915c,
104         0x9160,
105         0x9178,
106         0x917c,
107         0x9180,
108         0x918c,
109         0x9190,
110         0x9194,
111         0x9198,
112         0x919c,
113         0x91a8,
114         0x91ac,
115         0x91b0,
116         0x91b4,
117         0x91b8,
118         0x91c4,
119         0x91c8,
120         0x91cc,
121         0x91d0,
122         0x91d4,
123         0x91e0,
124         0x91e4,
125         0x91ec,
126         0x91f0,
127         0x91f4,
128         0x9200,
129         0x9204,
130         0x929c,
131         0x9150,
132         0x802c,
133 };
134
135 static void evergreen_gpu_init(struct radeon_device *rdev);
136 void evergreen_fini(struct radeon_device *rdev);
137 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
138 void evergreen_program_aspm(struct radeon_device *rdev);
139 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140                                      int ring, u32 cp_int_cntl);
141 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142                                    u32 status, u32 addr);
143 void cik_init_cp_pg_table(struct radeon_device *rdev);
144
145 extern u32 si_get_csb_size(struct radeon_device *rdev);
146 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
147 extern u32 cik_get_csb_size(struct radeon_device *rdev);
148 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
149
150 static const u32 evergreen_golden_registers[] =
151 {
152         0x3f90, 0xffff0000, 0xff000000,
153         0x9148, 0xffff0000, 0xff000000,
154         0x3f94, 0xffff0000, 0xff000000,
155         0x914c, 0xffff0000, 0xff000000,
156         0x9b7c, 0xffffffff, 0x00000000,
157         0x8a14, 0xffffffff, 0x00000007,
158         0x8b10, 0xffffffff, 0x00000000,
159         0x960c, 0xffffffff, 0x54763210,
160         0x88c4, 0xffffffff, 0x000000c2,
161         0x88d4, 0xffffffff, 0x00000010,
162         0x8974, 0xffffffff, 0x00000000,
163         0xc78, 0x00000080, 0x00000080,
164         0x5eb4, 0xffffffff, 0x00000002,
165         0x5e78, 0xffffffff, 0x001000f0,
166         0x6104, 0x01000300, 0x00000000,
167         0x5bc0, 0x00300000, 0x00000000,
168         0x7030, 0xffffffff, 0x00000011,
169         0x7c30, 0xffffffff, 0x00000011,
170         0x10830, 0xffffffff, 0x00000011,
171         0x11430, 0xffffffff, 0x00000011,
172         0x12030, 0xffffffff, 0x00000011,
173         0x12c30, 0xffffffff, 0x00000011,
174         0xd02c, 0xffffffff, 0x08421000,
175         0x240c, 0xffffffff, 0x00000380,
176         0x8b24, 0xffffffff, 0x00ff0fff,
177         0x28a4c, 0x06000000, 0x06000000,
178         0x10c, 0x00000001, 0x00000001,
179         0x8d00, 0xffffffff, 0x100e4848,
180         0x8d04, 0xffffffff, 0x00164745,
181         0x8c00, 0xffffffff, 0xe4000003,
182         0x8c04, 0xffffffff, 0x40600060,
183         0x8c08, 0xffffffff, 0x001c001c,
184         0x8cf0, 0xffffffff, 0x08e00620,
185         0x8c20, 0xffffffff, 0x00800080,
186         0x8c24, 0xffffffff, 0x00800080,
187         0x8c18, 0xffffffff, 0x20202078,
188         0x8c1c, 0xffffffff, 0x00001010,
189         0x28350, 0xffffffff, 0x00000000,
190         0xa008, 0xffffffff, 0x00010000,
191         0x5cc, 0xffffffff, 0x00000001,
192         0x9508, 0xffffffff, 0x00000002,
193         0x913c, 0x0000000f, 0x0000000a
194 };
195
196 static const u32 evergreen_golden_registers2[] =
197 {
198         0x2f4c, 0xffffffff, 0x00000000,
199         0x54f4, 0xffffffff, 0x00000000,
200         0x54f0, 0xffffffff, 0x00000000,
201         0x5498, 0xffffffff, 0x00000000,
202         0x549c, 0xffffffff, 0x00000000,
203         0x5494, 0xffffffff, 0x00000000,
204         0x53cc, 0xffffffff, 0x00000000,
205         0x53c8, 0xffffffff, 0x00000000,
206         0x53c4, 0xffffffff, 0x00000000,
207         0x53c0, 0xffffffff, 0x00000000,
208         0x53bc, 0xffffffff, 0x00000000,
209         0x53b8, 0xffffffff, 0x00000000,
210         0x53b4, 0xffffffff, 0x00000000,
211         0x53b0, 0xffffffff, 0x00000000
212 };
213
214 static const u32 cypress_mgcg_init[] =
215 {
216         0x802c, 0xffffffff, 0xc0000000,
217         0x5448, 0xffffffff, 0x00000100,
218         0x55e4, 0xffffffff, 0x00000100,
219         0x160c, 0xffffffff, 0x00000100,
220         0x5644, 0xffffffff, 0x00000100,
221         0xc164, 0xffffffff, 0x00000100,
222         0x8a18, 0xffffffff, 0x00000100,
223         0x897c, 0xffffffff, 0x06000100,
224         0x8b28, 0xffffffff, 0x00000100,
225         0x9144, 0xffffffff, 0x00000100,
226         0x9a60, 0xffffffff, 0x00000100,
227         0x9868, 0xffffffff, 0x00000100,
228         0x8d58, 0xffffffff, 0x00000100,
229         0x9510, 0xffffffff, 0x00000100,
230         0x949c, 0xffffffff, 0x00000100,
231         0x9654, 0xffffffff, 0x00000100,
232         0x9030, 0xffffffff, 0x00000100,
233         0x9034, 0xffffffff, 0x00000100,
234         0x9038, 0xffffffff, 0x00000100,
235         0x903c, 0xffffffff, 0x00000100,
236         0x9040, 0xffffffff, 0x00000100,
237         0xa200, 0xffffffff, 0x00000100,
238         0xa204, 0xffffffff, 0x00000100,
239         0xa208, 0xffffffff, 0x00000100,
240         0xa20c, 0xffffffff, 0x00000100,
241         0x971c, 0xffffffff, 0x00000100,
242         0x977c, 0xffffffff, 0x00000100,
243         0x3f80, 0xffffffff, 0x00000100,
244         0xa210, 0xffffffff, 0x00000100,
245         0xa214, 0xffffffff, 0x00000100,
246         0x4d8, 0xffffffff, 0x00000100,
247         0x9784, 0xffffffff, 0x00000100,
248         0x9698, 0xffffffff, 0x00000100,
249         0x4d4, 0xffffffff, 0x00000200,
250         0x30cc, 0xffffffff, 0x00000100,
251         0xd0c0, 0xffffffff, 0xff000100,
252         0x802c, 0xffffffff, 0x40000000,
253         0x915c, 0xffffffff, 0x00010000,
254         0x9160, 0xffffffff, 0x00030002,
255         0x9178, 0xffffffff, 0x00070000,
256         0x917c, 0xffffffff, 0x00030002,
257         0x9180, 0xffffffff, 0x00050004,
258         0x918c, 0xffffffff, 0x00010006,
259         0x9190, 0xffffffff, 0x00090008,
260         0x9194, 0xffffffff, 0x00070000,
261         0x9198, 0xffffffff, 0x00030002,
262         0x919c, 0xffffffff, 0x00050004,
263         0x91a8, 0xffffffff, 0x00010006,
264         0x91ac, 0xffffffff, 0x00090008,
265         0x91b0, 0xffffffff, 0x00070000,
266         0x91b4, 0xffffffff, 0x00030002,
267         0x91b8, 0xffffffff, 0x00050004,
268         0x91c4, 0xffffffff, 0x00010006,
269         0x91c8, 0xffffffff, 0x00090008,
270         0x91cc, 0xffffffff, 0x00070000,
271         0x91d0, 0xffffffff, 0x00030002,
272         0x91d4, 0xffffffff, 0x00050004,
273         0x91e0, 0xffffffff, 0x00010006,
274         0x91e4, 0xffffffff, 0x00090008,
275         0x91e8, 0xffffffff, 0x00000000,
276         0x91ec, 0xffffffff, 0x00070000,
277         0x91f0, 0xffffffff, 0x00030002,
278         0x91f4, 0xffffffff, 0x00050004,
279         0x9200, 0xffffffff, 0x00010006,
280         0x9204, 0xffffffff, 0x00090008,
281         0x9208, 0xffffffff, 0x00070000,
282         0x920c, 0xffffffff, 0x00030002,
283         0x9210, 0xffffffff, 0x00050004,
284         0x921c, 0xffffffff, 0x00010006,
285         0x9220, 0xffffffff, 0x00090008,
286         0x9224, 0xffffffff, 0x00070000,
287         0x9228, 0xffffffff, 0x00030002,
288         0x922c, 0xffffffff, 0x00050004,
289         0x9238, 0xffffffff, 0x00010006,
290         0x923c, 0xffffffff, 0x00090008,
291         0x9240, 0xffffffff, 0x00070000,
292         0x9244, 0xffffffff, 0x00030002,
293         0x9248, 0xffffffff, 0x00050004,
294         0x9254, 0xffffffff, 0x00010006,
295         0x9258, 0xffffffff, 0x00090008,
296         0x925c, 0xffffffff, 0x00070000,
297         0x9260, 0xffffffff, 0x00030002,
298         0x9264, 0xffffffff, 0x00050004,
299         0x9270, 0xffffffff, 0x00010006,
300         0x9274, 0xffffffff, 0x00090008,
301         0x9278, 0xffffffff, 0x00070000,
302         0x927c, 0xffffffff, 0x00030002,
303         0x9280, 0xffffffff, 0x00050004,
304         0x928c, 0xffffffff, 0x00010006,
305         0x9290, 0xffffffff, 0x00090008,
306         0x9294, 0xffffffff, 0x00000000,
307         0x929c, 0xffffffff, 0x00000001,
308         0x802c, 0xffffffff, 0x40010000,
309         0x915c, 0xffffffff, 0x00010000,
310         0x9160, 0xffffffff, 0x00030002,
311         0x9178, 0xffffffff, 0x00070000,
312         0x917c, 0xffffffff, 0x00030002,
313         0x9180, 0xffffffff, 0x00050004,
314         0x918c, 0xffffffff, 0x00010006,
315         0x9190, 0xffffffff, 0x00090008,
316         0x9194, 0xffffffff, 0x00070000,
317         0x9198, 0xffffffff, 0x00030002,
318         0x919c, 0xffffffff, 0x00050004,
319         0x91a8, 0xffffffff, 0x00010006,
320         0x91ac, 0xffffffff, 0x00090008,
321         0x91b0, 0xffffffff, 0x00070000,
322         0x91b4, 0xffffffff, 0x00030002,
323         0x91b8, 0xffffffff, 0x00050004,
324         0x91c4, 0xffffffff, 0x00010006,
325         0x91c8, 0xffffffff, 0x00090008,
326         0x91cc, 0xffffffff, 0x00070000,
327         0x91d0, 0xffffffff, 0x00030002,
328         0x91d4, 0xffffffff, 0x00050004,
329         0x91e0, 0xffffffff, 0x00010006,
330         0x91e4, 0xffffffff, 0x00090008,
331         0x91e8, 0xffffffff, 0x00000000,
332         0x91ec, 0xffffffff, 0x00070000,
333         0x91f0, 0xffffffff, 0x00030002,
334         0x91f4, 0xffffffff, 0x00050004,
335         0x9200, 0xffffffff, 0x00010006,
336         0x9204, 0xffffffff, 0x00090008,
337         0x9208, 0xffffffff, 0x00070000,
338         0x920c, 0xffffffff, 0x00030002,
339         0x9210, 0xffffffff, 0x00050004,
340         0x921c, 0xffffffff, 0x00010006,
341         0x9220, 0xffffffff, 0x00090008,
342         0x9224, 0xffffffff, 0x00070000,
343         0x9228, 0xffffffff, 0x00030002,
344         0x922c, 0xffffffff, 0x00050004,
345         0x9238, 0xffffffff, 0x00010006,
346         0x923c, 0xffffffff, 0x00090008,
347         0x9240, 0xffffffff, 0x00070000,
348         0x9244, 0xffffffff, 0x00030002,
349         0x9248, 0xffffffff, 0x00050004,
350         0x9254, 0xffffffff, 0x00010006,
351         0x9258, 0xffffffff, 0x00090008,
352         0x925c, 0xffffffff, 0x00070000,
353         0x9260, 0xffffffff, 0x00030002,
354         0x9264, 0xffffffff, 0x00050004,
355         0x9270, 0xffffffff, 0x00010006,
356         0x9274, 0xffffffff, 0x00090008,
357         0x9278, 0xffffffff, 0x00070000,
358         0x927c, 0xffffffff, 0x00030002,
359         0x9280, 0xffffffff, 0x00050004,
360         0x928c, 0xffffffff, 0x00010006,
361         0x9290, 0xffffffff, 0x00090008,
362         0x9294, 0xffffffff, 0x00000000,
363         0x929c, 0xffffffff, 0x00000001,
364         0x802c, 0xffffffff, 0xc0000000
365 };
366
367 static const u32 redwood_mgcg_init[] =
368 {
369         0x802c, 0xffffffff, 0xc0000000,
370         0x5448, 0xffffffff, 0x00000100,
371         0x55e4, 0xffffffff, 0x00000100,
372         0x160c, 0xffffffff, 0x00000100,
373         0x5644, 0xffffffff, 0x00000100,
374         0xc164, 0xffffffff, 0x00000100,
375         0x8a18, 0xffffffff, 0x00000100,
376         0x897c, 0xffffffff, 0x06000100,
377         0x8b28, 0xffffffff, 0x00000100,
378         0x9144, 0xffffffff, 0x00000100,
379         0x9a60, 0xffffffff, 0x00000100,
380         0x9868, 0xffffffff, 0x00000100,
381         0x8d58, 0xffffffff, 0x00000100,
382         0x9510, 0xffffffff, 0x00000100,
383         0x949c, 0xffffffff, 0x00000100,
384         0x9654, 0xffffffff, 0x00000100,
385         0x9030, 0xffffffff, 0x00000100,
386         0x9034, 0xffffffff, 0x00000100,
387         0x9038, 0xffffffff, 0x00000100,
388         0x903c, 0xffffffff, 0x00000100,
389         0x9040, 0xffffffff, 0x00000100,
390         0xa200, 0xffffffff, 0x00000100,
391         0xa204, 0xffffffff, 0x00000100,
392         0xa208, 0xffffffff, 0x00000100,
393         0xa20c, 0xffffffff, 0x00000100,
394         0x971c, 0xffffffff, 0x00000100,
395         0x977c, 0xffffffff, 0x00000100,
396         0x3f80, 0xffffffff, 0x00000100,
397         0xa210, 0xffffffff, 0x00000100,
398         0xa214, 0xffffffff, 0x00000100,
399         0x4d8, 0xffffffff, 0x00000100,
400         0x9784, 0xffffffff, 0x00000100,
401         0x9698, 0xffffffff, 0x00000100,
402         0x4d4, 0xffffffff, 0x00000200,
403         0x30cc, 0xffffffff, 0x00000100,
404         0xd0c0, 0xffffffff, 0xff000100,
405         0x802c, 0xffffffff, 0x40000000,
406         0x915c, 0xffffffff, 0x00010000,
407         0x9160, 0xffffffff, 0x00030002,
408         0x9178, 0xffffffff, 0x00070000,
409         0x917c, 0xffffffff, 0x00030002,
410         0x9180, 0xffffffff, 0x00050004,
411         0x918c, 0xffffffff, 0x00010006,
412         0x9190, 0xffffffff, 0x00090008,
413         0x9194, 0xffffffff, 0x00070000,
414         0x9198, 0xffffffff, 0x00030002,
415         0x919c, 0xffffffff, 0x00050004,
416         0x91a8, 0xffffffff, 0x00010006,
417         0x91ac, 0xffffffff, 0x00090008,
418         0x91b0, 0xffffffff, 0x00070000,
419         0x91b4, 0xffffffff, 0x00030002,
420         0x91b8, 0xffffffff, 0x00050004,
421         0x91c4, 0xffffffff, 0x00010006,
422         0x91c8, 0xffffffff, 0x00090008,
423         0x91cc, 0xffffffff, 0x00070000,
424         0x91d0, 0xffffffff, 0x00030002,
425         0x91d4, 0xffffffff, 0x00050004,
426         0x91e0, 0xffffffff, 0x00010006,
427         0x91e4, 0xffffffff, 0x00090008,
428         0x91e8, 0xffffffff, 0x00000000,
429         0x91ec, 0xffffffff, 0x00070000,
430         0x91f0, 0xffffffff, 0x00030002,
431         0x91f4, 0xffffffff, 0x00050004,
432         0x9200, 0xffffffff, 0x00010006,
433         0x9204, 0xffffffff, 0x00090008,
434         0x9294, 0xffffffff, 0x00000000,
435         0x929c, 0xffffffff, 0x00000001,
436         0x802c, 0xffffffff, 0xc0000000
437 };
438
439 static const u32 cedar_golden_registers[] =
440 {
441         0x3f90, 0xffff0000, 0xff000000,
442         0x9148, 0xffff0000, 0xff000000,
443         0x3f94, 0xffff0000, 0xff000000,
444         0x914c, 0xffff0000, 0xff000000,
445         0x9b7c, 0xffffffff, 0x00000000,
446         0x8a14, 0xffffffff, 0x00000007,
447         0x8b10, 0xffffffff, 0x00000000,
448         0x960c, 0xffffffff, 0x54763210,
449         0x88c4, 0xffffffff, 0x000000c2,
450         0x88d4, 0xffffffff, 0x00000000,
451         0x8974, 0xffffffff, 0x00000000,
452         0xc78, 0x00000080, 0x00000080,
453         0x5eb4, 0xffffffff, 0x00000002,
454         0x5e78, 0xffffffff, 0x001000f0,
455         0x6104, 0x01000300, 0x00000000,
456         0x5bc0, 0x00300000, 0x00000000,
457         0x7030, 0xffffffff, 0x00000011,
458         0x7c30, 0xffffffff, 0x00000011,
459         0x10830, 0xffffffff, 0x00000011,
460         0x11430, 0xffffffff, 0x00000011,
461         0xd02c, 0xffffffff, 0x08421000,
462         0x240c, 0xffffffff, 0x00000380,
463         0x8b24, 0xffffffff, 0x00ff0fff,
464         0x28a4c, 0x06000000, 0x06000000,
465         0x10c, 0x00000001, 0x00000001,
466         0x8d00, 0xffffffff, 0x100e4848,
467         0x8d04, 0xffffffff, 0x00164745,
468         0x8c00, 0xffffffff, 0xe4000003,
469         0x8c04, 0xffffffff, 0x40600060,
470         0x8c08, 0xffffffff, 0x001c001c,
471         0x8cf0, 0xffffffff, 0x08e00410,
472         0x8c20, 0xffffffff, 0x00800080,
473         0x8c24, 0xffffffff, 0x00800080,
474         0x8c18, 0xffffffff, 0x20202078,
475         0x8c1c, 0xffffffff, 0x00001010,
476         0x28350, 0xffffffff, 0x00000000,
477         0xa008, 0xffffffff, 0x00010000,
478         0x5cc, 0xffffffff, 0x00000001,
479         0x9508, 0xffffffff, 0x00000002
480 };
481
482 static const u32 cedar_mgcg_init[] =
483 {
484         0x802c, 0xffffffff, 0xc0000000,
485         0x5448, 0xffffffff, 0x00000100,
486         0x55e4, 0xffffffff, 0x00000100,
487         0x160c, 0xffffffff, 0x00000100,
488         0x5644, 0xffffffff, 0x00000100,
489         0xc164, 0xffffffff, 0x00000100,
490         0x8a18, 0xffffffff, 0x00000100,
491         0x897c, 0xffffffff, 0x06000100,
492         0x8b28, 0xffffffff, 0x00000100,
493         0x9144, 0xffffffff, 0x00000100,
494         0x9a60, 0xffffffff, 0x00000100,
495         0x9868, 0xffffffff, 0x00000100,
496         0x8d58, 0xffffffff, 0x00000100,
497         0x9510, 0xffffffff, 0x00000100,
498         0x949c, 0xffffffff, 0x00000100,
499         0x9654, 0xffffffff, 0x00000100,
500         0x9030, 0xffffffff, 0x00000100,
501         0x9034, 0xffffffff, 0x00000100,
502         0x9038, 0xffffffff, 0x00000100,
503         0x903c, 0xffffffff, 0x00000100,
504         0x9040, 0xffffffff, 0x00000100,
505         0xa200, 0xffffffff, 0x00000100,
506         0xa204, 0xffffffff, 0x00000100,
507         0xa208, 0xffffffff, 0x00000100,
508         0xa20c, 0xffffffff, 0x00000100,
509         0x971c, 0xffffffff, 0x00000100,
510         0x977c, 0xffffffff, 0x00000100,
511         0x3f80, 0xffffffff, 0x00000100,
512         0xa210, 0xffffffff, 0x00000100,
513         0xa214, 0xffffffff, 0x00000100,
514         0x4d8, 0xffffffff, 0x00000100,
515         0x9784, 0xffffffff, 0x00000100,
516         0x9698, 0xffffffff, 0x00000100,
517         0x4d4, 0xffffffff, 0x00000200,
518         0x30cc, 0xffffffff, 0x00000100,
519         0xd0c0, 0xffffffff, 0xff000100,
520         0x802c, 0xffffffff, 0x40000000,
521         0x915c, 0xffffffff, 0x00010000,
522         0x9178, 0xffffffff, 0x00050000,
523         0x917c, 0xffffffff, 0x00030002,
524         0x918c, 0xffffffff, 0x00010004,
525         0x9190, 0xffffffff, 0x00070006,
526         0x9194, 0xffffffff, 0x00050000,
527         0x9198, 0xffffffff, 0x00030002,
528         0x91a8, 0xffffffff, 0x00010004,
529         0x91ac, 0xffffffff, 0x00070006,
530         0x91e8, 0xffffffff, 0x00000000,
531         0x9294, 0xffffffff, 0x00000000,
532         0x929c, 0xffffffff, 0x00000001,
533         0x802c, 0xffffffff, 0xc0000000
534 };
535
536 static const u32 juniper_mgcg_init[] =
537 {
538         0x802c, 0xffffffff, 0xc0000000,
539         0x5448, 0xffffffff, 0x00000100,
540         0x55e4, 0xffffffff, 0x00000100,
541         0x160c, 0xffffffff, 0x00000100,
542         0x5644, 0xffffffff, 0x00000100,
543         0xc164, 0xffffffff, 0x00000100,
544         0x8a18, 0xffffffff, 0x00000100,
545         0x897c, 0xffffffff, 0x06000100,
546         0x8b28, 0xffffffff, 0x00000100,
547         0x9144, 0xffffffff, 0x00000100,
548         0x9a60, 0xffffffff, 0x00000100,
549         0x9868, 0xffffffff, 0x00000100,
550         0x8d58, 0xffffffff, 0x00000100,
551         0x9510, 0xffffffff, 0x00000100,
552         0x949c, 0xffffffff, 0x00000100,
553         0x9654, 0xffffffff, 0x00000100,
554         0x9030, 0xffffffff, 0x00000100,
555         0x9034, 0xffffffff, 0x00000100,
556         0x9038, 0xffffffff, 0x00000100,
557         0x903c, 0xffffffff, 0x00000100,
558         0x9040, 0xffffffff, 0x00000100,
559         0xa200, 0xffffffff, 0x00000100,
560         0xa204, 0xffffffff, 0x00000100,
561         0xa208, 0xffffffff, 0x00000100,
562         0xa20c, 0xffffffff, 0x00000100,
563         0x971c, 0xffffffff, 0x00000100,
564         0xd0c0, 0xffffffff, 0xff000100,
565         0x802c, 0xffffffff, 0x40000000,
566         0x915c, 0xffffffff, 0x00010000,
567         0x9160, 0xffffffff, 0x00030002,
568         0x9178, 0xffffffff, 0x00070000,
569         0x917c, 0xffffffff, 0x00030002,
570         0x9180, 0xffffffff, 0x00050004,
571         0x918c, 0xffffffff, 0x00010006,
572         0x9190, 0xffffffff, 0x00090008,
573         0x9194, 0xffffffff, 0x00070000,
574         0x9198, 0xffffffff, 0x00030002,
575         0x919c, 0xffffffff, 0x00050004,
576         0x91a8, 0xffffffff, 0x00010006,
577         0x91ac, 0xffffffff, 0x00090008,
578         0x91b0, 0xffffffff, 0x00070000,
579         0x91b4, 0xffffffff, 0x00030002,
580         0x91b8, 0xffffffff, 0x00050004,
581         0x91c4, 0xffffffff, 0x00010006,
582         0x91c8, 0xffffffff, 0x00090008,
583         0x91cc, 0xffffffff, 0x00070000,
584         0x91d0, 0xffffffff, 0x00030002,
585         0x91d4, 0xffffffff, 0x00050004,
586         0x91e0, 0xffffffff, 0x00010006,
587         0x91e4, 0xffffffff, 0x00090008,
588         0x91e8, 0xffffffff, 0x00000000,
589         0x91ec, 0xffffffff, 0x00070000,
590         0x91f0, 0xffffffff, 0x00030002,
591         0x91f4, 0xffffffff, 0x00050004,
592         0x9200, 0xffffffff, 0x00010006,
593         0x9204, 0xffffffff, 0x00090008,
594         0x9208, 0xffffffff, 0x00070000,
595         0x920c, 0xffffffff, 0x00030002,
596         0x9210, 0xffffffff, 0x00050004,
597         0x921c, 0xffffffff, 0x00010006,
598         0x9220, 0xffffffff, 0x00090008,
599         0x9224, 0xffffffff, 0x00070000,
600         0x9228, 0xffffffff, 0x00030002,
601         0x922c, 0xffffffff, 0x00050004,
602         0x9238, 0xffffffff, 0x00010006,
603         0x923c, 0xffffffff, 0x00090008,
604         0x9240, 0xffffffff, 0x00070000,
605         0x9244, 0xffffffff, 0x00030002,
606         0x9248, 0xffffffff, 0x00050004,
607         0x9254, 0xffffffff, 0x00010006,
608         0x9258, 0xffffffff, 0x00090008,
609         0x925c, 0xffffffff, 0x00070000,
610         0x9260, 0xffffffff, 0x00030002,
611         0x9264, 0xffffffff, 0x00050004,
612         0x9270, 0xffffffff, 0x00010006,
613         0x9274, 0xffffffff, 0x00090008,
614         0x9278, 0xffffffff, 0x00070000,
615         0x927c, 0xffffffff, 0x00030002,
616         0x9280, 0xffffffff, 0x00050004,
617         0x928c, 0xffffffff, 0x00010006,
618         0x9290, 0xffffffff, 0x00090008,
619         0x9294, 0xffffffff, 0x00000000,
620         0x929c, 0xffffffff, 0x00000001,
621         0x802c, 0xffffffff, 0xc0000000,
622         0x977c, 0xffffffff, 0x00000100,
623         0x3f80, 0xffffffff, 0x00000100,
624         0xa210, 0xffffffff, 0x00000100,
625         0xa214, 0xffffffff, 0x00000100,
626         0x4d8, 0xffffffff, 0x00000100,
627         0x9784, 0xffffffff, 0x00000100,
628         0x9698, 0xffffffff, 0x00000100,
629         0x4d4, 0xffffffff, 0x00000200,
630         0x30cc, 0xffffffff, 0x00000100,
631         0x802c, 0xffffffff, 0xc0000000
632 };
633
634 static const u32 supersumo_golden_registers[] =
635 {
636         0x5eb4, 0xffffffff, 0x00000002,
637         0x5cc, 0xffffffff, 0x00000001,
638         0x7030, 0xffffffff, 0x00000011,
639         0x7c30, 0xffffffff, 0x00000011,
640         0x6104, 0x01000300, 0x00000000,
641         0x5bc0, 0x00300000, 0x00000000,
642         0x8c04, 0xffffffff, 0x40600060,
643         0x8c08, 0xffffffff, 0x001c001c,
644         0x8c20, 0xffffffff, 0x00800080,
645         0x8c24, 0xffffffff, 0x00800080,
646         0x8c18, 0xffffffff, 0x20202078,
647         0x8c1c, 0xffffffff, 0x00001010,
648         0x918c, 0xffffffff, 0x00010006,
649         0x91a8, 0xffffffff, 0x00010006,
650         0x91c4, 0xffffffff, 0x00010006,
651         0x91e0, 0xffffffff, 0x00010006,
652         0x9200, 0xffffffff, 0x00010006,
653         0x9150, 0xffffffff, 0x6e944040,
654         0x917c, 0xffffffff, 0x00030002,
655         0x9180, 0xffffffff, 0x00050004,
656         0x9198, 0xffffffff, 0x00030002,
657         0x919c, 0xffffffff, 0x00050004,
658         0x91b4, 0xffffffff, 0x00030002,
659         0x91b8, 0xffffffff, 0x00050004,
660         0x91d0, 0xffffffff, 0x00030002,
661         0x91d4, 0xffffffff, 0x00050004,
662         0x91f0, 0xffffffff, 0x00030002,
663         0x91f4, 0xffffffff, 0x00050004,
664         0x915c, 0xffffffff, 0x00010000,
665         0x9160, 0xffffffff, 0x00030002,
666         0x3f90, 0xffff0000, 0xff000000,
667         0x9178, 0xffffffff, 0x00070000,
668         0x9194, 0xffffffff, 0x00070000,
669         0x91b0, 0xffffffff, 0x00070000,
670         0x91cc, 0xffffffff, 0x00070000,
671         0x91ec, 0xffffffff, 0x00070000,
672         0x9148, 0xffff0000, 0xff000000,
673         0x9190, 0xffffffff, 0x00090008,
674         0x91ac, 0xffffffff, 0x00090008,
675         0x91c8, 0xffffffff, 0x00090008,
676         0x91e4, 0xffffffff, 0x00090008,
677         0x9204, 0xffffffff, 0x00090008,
678         0x3f94, 0xffff0000, 0xff000000,
679         0x914c, 0xffff0000, 0xff000000,
680         0x929c, 0xffffffff, 0x00000001,
681         0x8a18, 0xffffffff, 0x00000100,
682         0x8b28, 0xffffffff, 0x00000100,
683         0x9144, 0xffffffff, 0x00000100,
684         0x5644, 0xffffffff, 0x00000100,
685         0x9b7c, 0xffffffff, 0x00000000,
686         0x8030, 0xffffffff, 0x0000100a,
687         0x8a14, 0xffffffff, 0x00000007,
688         0x8b24, 0xffffffff, 0x00ff0fff,
689         0x8b10, 0xffffffff, 0x00000000,
690         0x28a4c, 0x06000000, 0x06000000,
691         0x4d8, 0xffffffff, 0x00000100,
692         0x913c, 0xffff000f, 0x0100000a,
693         0x960c, 0xffffffff, 0x54763210,
694         0x88c4, 0xffffffff, 0x000000c2,
695         0x88d4, 0xffffffff, 0x00000010,
696         0x8974, 0xffffffff, 0x00000000,
697         0xc78, 0x00000080, 0x00000080,
698         0x5e78, 0xffffffff, 0x001000f0,
699         0xd02c, 0xffffffff, 0x08421000,
700         0xa008, 0xffffffff, 0x00010000,
701         0x8d00, 0xffffffff, 0x100e4848,
702         0x8d04, 0xffffffff, 0x00164745,
703         0x8c00, 0xffffffff, 0xe4000003,
704         0x8cf0, 0x1fffffff, 0x08e00620,
705         0x28350, 0xffffffff, 0x00000000,
706         0x9508, 0xffffffff, 0x00000002
707 };
708
709 static const u32 sumo_golden_registers[] =
710 {
711         0x900c, 0x00ffffff, 0x0017071f,
712         0x8c18, 0xffffffff, 0x10101060,
713         0x8c1c, 0xffffffff, 0x00001010,
714         0x8c30, 0x0000000f, 0x00000005,
715         0x9688, 0x0000000f, 0x00000007
716 };
717
718 static const u32 wrestler_golden_registers[] =
719 {
720         0x5eb4, 0xffffffff, 0x00000002,
721         0x5cc, 0xffffffff, 0x00000001,
722         0x7030, 0xffffffff, 0x00000011,
723         0x7c30, 0xffffffff, 0x00000011,
724         0x6104, 0x01000300, 0x00000000,
725         0x5bc0, 0x00300000, 0x00000000,
726         0x918c, 0xffffffff, 0x00010006,
727         0x91a8, 0xffffffff, 0x00010006,
728         0x9150, 0xffffffff, 0x6e944040,
729         0x917c, 0xffffffff, 0x00030002,
730         0x9198, 0xffffffff, 0x00030002,
731         0x915c, 0xffffffff, 0x00010000,
732         0x3f90, 0xffff0000, 0xff000000,
733         0x9178, 0xffffffff, 0x00070000,
734         0x9194, 0xffffffff, 0x00070000,
735         0x9148, 0xffff0000, 0xff000000,
736         0x9190, 0xffffffff, 0x00090008,
737         0x91ac, 0xffffffff, 0x00090008,
738         0x3f94, 0xffff0000, 0xff000000,
739         0x914c, 0xffff0000, 0xff000000,
740         0x929c, 0xffffffff, 0x00000001,
741         0x8a18, 0xffffffff, 0x00000100,
742         0x8b28, 0xffffffff, 0x00000100,
743         0x9144, 0xffffffff, 0x00000100,
744         0x9b7c, 0xffffffff, 0x00000000,
745         0x8030, 0xffffffff, 0x0000100a,
746         0x8a14, 0xffffffff, 0x00000001,
747         0x8b24, 0xffffffff, 0x00ff0fff,
748         0x8b10, 0xffffffff, 0x00000000,
749         0x28a4c, 0x06000000, 0x06000000,
750         0x4d8, 0xffffffff, 0x00000100,
751         0x913c, 0xffff000f, 0x0100000a,
752         0x960c, 0xffffffff, 0x54763210,
753         0x88c4, 0xffffffff, 0x000000c2,
754         0x88d4, 0xffffffff, 0x00000010,
755         0x8974, 0xffffffff, 0x00000000,
756         0xc78, 0x00000080, 0x00000080,
757         0x5e78, 0xffffffff, 0x001000f0,
758         0xd02c, 0xffffffff, 0x08421000,
759         0xa008, 0xffffffff, 0x00010000,
760         0x8d00, 0xffffffff, 0x100e4848,
761         0x8d04, 0xffffffff, 0x00164745,
762         0x8c00, 0xffffffff, 0xe4000003,
763         0x8cf0, 0x1fffffff, 0x08e00410,
764         0x28350, 0xffffffff, 0x00000000,
765         0x9508, 0xffffffff, 0x00000002,
766         0x900c, 0xffffffff, 0x0017071f,
767         0x8c18, 0xffffffff, 0x10101060,
768         0x8c1c, 0xffffffff, 0x00001010
769 };
770
771 static const u32 barts_golden_registers[] =
772 {
773         0x5eb4, 0xffffffff, 0x00000002,
774         0x5e78, 0x8f311ff1, 0x001000f0,
775         0x3f90, 0xffff0000, 0xff000000,
776         0x9148, 0xffff0000, 0xff000000,
777         0x3f94, 0xffff0000, 0xff000000,
778         0x914c, 0xffff0000, 0xff000000,
779         0xc78, 0x00000080, 0x00000080,
780         0xbd4, 0x70073777, 0x00010001,
781         0xd02c, 0xbfffff1f, 0x08421000,
782         0xd0b8, 0x03773777, 0x02011003,
783         0x5bc0, 0x00200000, 0x50100000,
784         0x98f8, 0x33773777, 0x02011003,
785         0x98fc, 0xffffffff, 0x76543210,
786         0x7030, 0x31000311, 0x00000011,
787         0x2f48, 0x00000007, 0x02011003,
788         0x6b28, 0x00000010, 0x00000012,
789         0x7728, 0x00000010, 0x00000012,
790         0x10328, 0x00000010, 0x00000012,
791         0x10f28, 0x00000010, 0x00000012,
792         0x11b28, 0x00000010, 0x00000012,
793         0x12728, 0x00000010, 0x00000012,
794         0x240c, 0x000007ff, 0x00000380,
795         0x8a14, 0xf000001f, 0x00000007,
796         0x8b24, 0x3fff3fff, 0x00ff0fff,
797         0x8b10, 0x0000ff0f, 0x00000000,
798         0x28a4c, 0x07ffffff, 0x06000000,
799         0x10c, 0x00000001, 0x00010003,
800         0xa02c, 0xffffffff, 0x0000009b,
801         0x913c, 0x0000000f, 0x0100000a,
802         0x8d00, 0xffff7f7f, 0x100e4848,
803         0x8d04, 0x00ffffff, 0x00164745,
804         0x8c00, 0xfffc0003, 0xe4000003,
805         0x8c04, 0xf8ff00ff, 0x40600060,
806         0x8c08, 0x00ff00ff, 0x001c001c,
807         0x8cf0, 0x1fff1fff, 0x08e00620,
808         0x8c20, 0x0fff0fff, 0x00800080,
809         0x8c24, 0x0fff0fff, 0x00800080,
810         0x8c18, 0xffffffff, 0x20202078,
811         0x8c1c, 0x0000ffff, 0x00001010,
812         0x28350, 0x00000f01, 0x00000000,
813         0x9508, 0x3700001f, 0x00000002,
814         0x960c, 0xffffffff, 0x54763210,
815         0x88c4, 0x001f3ae3, 0x000000c2,
816         0x88d4, 0x0000001f, 0x00000010,
817         0x8974, 0xffffffff, 0x00000000
818 };
819
820 static const u32 turks_golden_registers[] =
821 {
822         0x5eb4, 0xffffffff, 0x00000002,
823         0x5e78, 0x8f311ff1, 0x001000f0,
824         0x8c8, 0x00003000, 0x00001070,
825         0x8cc, 0x000fffff, 0x00040035,
826         0x3f90, 0xffff0000, 0xfff00000,
827         0x9148, 0xffff0000, 0xfff00000,
828         0x3f94, 0xffff0000, 0xfff00000,
829         0x914c, 0xffff0000, 0xfff00000,
830         0xc78, 0x00000080, 0x00000080,
831         0xbd4, 0x00073007, 0x00010002,
832         0xd02c, 0xbfffff1f, 0x08421000,
833         0xd0b8, 0x03773777, 0x02010002,
834         0x5bc0, 0x00200000, 0x50100000,
835         0x98f8, 0x33773777, 0x00010002,
836         0x98fc, 0xffffffff, 0x33221100,
837         0x7030, 0x31000311, 0x00000011,
838         0x2f48, 0x33773777, 0x00010002,
839         0x6b28, 0x00000010, 0x00000012,
840         0x7728, 0x00000010, 0x00000012,
841         0x10328, 0x00000010, 0x00000012,
842         0x10f28, 0x00000010, 0x00000012,
843         0x11b28, 0x00000010, 0x00000012,
844         0x12728, 0x00000010, 0x00000012,
845         0x240c, 0x000007ff, 0x00000380,
846         0x8a14, 0xf000001f, 0x00000007,
847         0x8b24, 0x3fff3fff, 0x00ff0fff,
848         0x8b10, 0x0000ff0f, 0x00000000,
849         0x28a4c, 0x07ffffff, 0x06000000,
850         0x10c, 0x00000001, 0x00010003,
851         0xa02c, 0xffffffff, 0x0000009b,
852         0x913c, 0x0000000f, 0x0100000a,
853         0x8d00, 0xffff7f7f, 0x100e4848,
854         0x8d04, 0x00ffffff, 0x00164745,
855         0x8c00, 0xfffc0003, 0xe4000003,
856         0x8c04, 0xf8ff00ff, 0x40600060,
857         0x8c08, 0x00ff00ff, 0x001c001c,
858         0x8cf0, 0x1fff1fff, 0x08e00410,
859         0x8c20, 0x0fff0fff, 0x00800080,
860         0x8c24, 0x0fff0fff, 0x00800080,
861         0x8c18, 0xffffffff, 0x20202078,
862         0x8c1c, 0x0000ffff, 0x00001010,
863         0x28350, 0x00000f01, 0x00000000,
864         0x9508, 0x3700001f, 0x00000002,
865         0x960c, 0xffffffff, 0x54763210,
866         0x88c4, 0x001f3ae3, 0x000000c2,
867         0x88d4, 0x0000001f, 0x00000010,
868         0x8974, 0xffffffff, 0x00000000
869 };
870
871 static const u32 caicos_golden_registers[] =
872 {
873         0x5eb4, 0xffffffff, 0x00000002,
874         0x5e78, 0x8f311ff1, 0x001000f0,
875         0x8c8, 0x00003420, 0x00001450,
876         0x8cc, 0x000fffff, 0x00040035,
877         0x3f90, 0xffff0000, 0xfffc0000,
878         0x9148, 0xffff0000, 0xfffc0000,
879         0x3f94, 0xffff0000, 0xfffc0000,
880         0x914c, 0xffff0000, 0xfffc0000,
881         0xc78, 0x00000080, 0x00000080,
882         0xbd4, 0x00073007, 0x00010001,
883         0xd02c, 0xbfffff1f, 0x08421000,
884         0xd0b8, 0x03773777, 0x02010001,
885         0x5bc0, 0x00200000, 0x50100000,
886         0x98f8, 0x33773777, 0x02010001,
887         0x98fc, 0xffffffff, 0x33221100,
888         0x7030, 0x31000311, 0x00000011,
889         0x2f48, 0x33773777, 0x02010001,
890         0x6b28, 0x00000010, 0x00000012,
891         0x7728, 0x00000010, 0x00000012,
892         0x10328, 0x00000010, 0x00000012,
893         0x10f28, 0x00000010, 0x00000012,
894         0x11b28, 0x00000010, 0x00000012,
895         0x12728, 0x00000010, 0x00000012,
896         0x240c, 0x000007ff, 0x00000380,
897         0x8a14, 0xf000001f, 0x00000001,
898         0x8b24, 0x3fff3fff, 0x00ff0fff,
899         0x8b10, 0x0000ff0f, 0x00000000,
900         0x28a4c, 0x07ffffff, 0x06000000,
901         0x10c, 0x00000001, 0x00010003,
902         0xa02c, 0xffffffff, 0x0000009b,
903         0x913c, 0x0000000f, 0x0100000a,
904         0x8d00, 0xffff7f7f, 0x100e4848,
905         0x8d04, 0x00ffffff, 0x00164745,
906         0x8c00, 0xfffc0003, 0xe4000003,
907         0x8c04, 0xf8ff00ff, 0x40600060,
908         0x8c08, 0x00ff00ff, 0x001c001c,
909         0x8cf0, 0x1fff1fff, 0x08e00410,
910         0x8c20, 0x0fff0fff, 0x00800080,
911         0x8c24, 0x0fff0fff, 0x00800080,
912         0x8c18, 0xffffffff, 0x20202078,
913         0x8c1c, 0x0000ffff, 0x00001010,
914         0x28350, 0x00000f01, 0x00000000,
915         0x9508, 0x3700001f, 0x00000002,
916         0x960c, 0xffffffff, 0x54763210,
917         0x88c4, 0x001f3ae3, 0x000000c2,
918         0x88d4, 0x0000001f, 0x00000010,
919         0x8974, 0xffffffff, 0x00000000
920 };
921
922 static void evergreen_init_golden_registers(struct radeon_device *rdev)
923 {
924         switch (rdev->family) {
925         case CHIP_CYPRESS:
926         case CHIP_HEMLOCK:
927                 radeon_program_register_sequence(rdev,
928                                                  evergreen_golden_registers,
929                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
930                 radeon_program_register_sequence(rdev,
931                                                  evergreen_golden_registers2,
932                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
933                 radeon_program_register_sequence(rdev,
934                                                  cypress_mgcg_init,
935                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
936                 break;
937         case CHIP_JUNIPER:
938                 radeon_program_register_sequence(rdev,
939                                                  evergreen_golden_registers,
940                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
941                 radeon_program_register_sequence(rdev,
942                                                  evergreen_golden_registers2,
943                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
944                 radeon_program_register_sequence(rdev,
945                                                  juniper_mgcg_init,
946                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
947                 break;
948         case CHIP_REDWOOD:
949                 radeon_program_register_sequence(rdev,
950                                                  evergreen_golden_registers,
951                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
952                 radeon_program_register_sequence(rdev,
953                                                  evergreen_golden_registers2,
954                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
955                 radeon_program_register_sequence(rdev,
956                                                  redwood_mgcg_init,
957                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
958                 break;
959         case CHIP_CEDAR:
960                 radeon_program_register_sequence(rdev,
961                                                  cedar_golden_registers,
962                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
963                 radeon_program_register_sequence(rdev,
964                                                  evergreen_golden_registers2,
965                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
966                 radeon_program_register_sequence(rdev,
967                                                  cedar_mgcg_init,
968                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
969                 break;
970         case CHIP_PALM:
971                 radeon_program_register_sequence(rdev,
972                                                  wrestler_golden_registers,
973                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
974                 break;
975         case CHIP_SUMO:
976                 radeon_program_register_sequence(rdev,
977                                                  supersumo_golden_registers,
978                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
979                 break;
980         case CHIP_SUMO2:
981                 radeon_program_register_sequence(rdev,
982                                                  supersumo_golden_registers,
983                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
984                 radeon_program_register_sequence(rdev,
985                                                  sumo_golden_registers,
986                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
987                 break;
988         case CHIP_BARTS:
989                 radeon_program_register_sequence(rdev,
990                                                  barts_golden_registers,
991                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
992                 break;
993         case CHIP_TURKS:
994                 radeon_program_register_sequence(rdev,
995                                                  turks_golden_registers,
996                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
997                 break;
998         case CHIP_CAICOS:
999                 radeon_program_register_sequence(rdev,
1000                                                  caicos_golden_registers,
1001                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1002                 break;
1003         default:
1004                 break;
1005         }
1006 }
1007
1008 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1009                              unsigned *bankh, unsigned *mtaspect,
1010                              unsigned *tile_split)
1011 {
1012         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1013         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1014         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1015         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1016         switch (*bankw) {
1017         default:
1018         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1019         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1020         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1021         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1022         }
1023         switch (*bankh) {
1024         default:
1025         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1026         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1027         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1028         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1029         }
1030         switch (*mtaspect) {
1031         default:
1032         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1033         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1034         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1035         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1036         }
1037 }
1038
1039 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1040                               u32 cntl_reg, u32 status_reg)
1041 {
1042         int r, i;
1043         struct atom_clock_dividers dividers;
1044
1045         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1046                                            clock, false, &dividers);
1047         if (r)
1048                 return r;
1049
1050         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1051
1052         for (i = 0; i < 100; i++) {
1053                 if (RREG32(status_reg) & DCLK_STATUS)
1054                         break;
1055                 mdelay(10);
1056         }
1057         if (i == 100)
1058                 return -ETIMEDOUT;
1059
1060         return 0;
1061 }
1062
1063 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1064 {
1065         int r = 0;
1066         u32 cg_scratch = RREG32(CG_SCRATCH1);
1067
1068         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1069         if (r)
1070                 goto done;
1071         cg_scratch &= 0xffff0000;
1072         cg_scratch |= vclk / 100; /* Mhz */
1073
1074         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1075         if (r)
1076                 goto done;
1077         cg_scratch &= 0x0000ffff;
1078         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1079
1080 done:
1081         WREG32(CG_SCRATCH1, cg_scratch);
1082
1083         return r;
1084 }
1085
1086 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1087 {
1088         /* start off with something large */
1089         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1090         int r;
1091
1092         /* bypass vclk and dclk with bclk */
1093         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1094                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1095                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1096
1097         /* put PLL in bypass mode */
1098         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1099
1100         if (!vclk || !dclk) {
1101                 /* keep the Bypass mode, put PLL to sleep */
1102                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1103                 return 0;
1104         }
1105
1106         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1107                                           16384, 0x03FFFFFF, 0, 128, 5,
1108                                           &fb_div, &vclk_div, &dclk_div);
1109         if (r)
1110                 return r;
1111
1112         /* set VCO_MODE to 1 */
1113         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1114
1115         /* toggle UPLL_SLEEP to 1 then back to 0 */
1116         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1117         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1118
1119         /* deassert UPLL_RESET */
1120         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1121
1122         mdelay(1);
1123
1124         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1125         if (r)
1126                 return r;
1127
1128         /* assert UPLL_RESET again */
1129         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1130
1131         /* disable spread spectrum. */
1132         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1133
1134         /* set feedback divider */
1135         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1136
1137         /* set ref divider to 0 */
1138         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1139
1140         if (fb_div < 307200)
1141                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1142         else
1143                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1144
1145         /* set PDIV_A and PDIV_B */
1146         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1147                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1148                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1149
1150         /* give the PLL some time to settle */
1151         mdelay(15);
1152
1153         /* deassert PLL_RESET */
1154         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1155
1156         mdelay(15);
1157
1158         /* switch from bypass mode to normal mode */
1159         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1160
1161         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1162         if (r)
1163                 return r;
1164
1165         /* switch VCLK and DCLK selection */
1166         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1167                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1168                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1169
1170         mdelay(100);
1171
1172         return 0;
1173 }
1174
1175 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1176 {
1177         int readrq;
1178         u16 v;
1179
1180         readrq = pcie_get_readrq(rdev->pdev);
1181         v = ffs(readrq) - 8;
1182         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1183          * to avoid hangs or perfomance issues
1184          */
1185         if ((v == 0) || (v == 6) || (v == 7))
1186                 pcie_set_readrq(rdev->pdev, 512);
1187 }
1188
1189 void dce4_program_fmt(struct drm_encoder *encoder)
1190 {
1191         struct drm_device *dev = encoder->dev;
1192         struct radeon_device *rdev = dev->dev_private;
1193         struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1194         struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1195         struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1196         int bpc = 0;
1197         u32 tmp = 0;
1198         enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1199
1200         if (connector) {
1201                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1202                 bpc = radeon_get_monitor_bpc(connector);
1203                 dither = radeon_connector->dither;
1204         }
1205
1206         /* LVDS/eDP FMT is set up by atom */
1207         if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1208                 return;
1209
1210         /* not needed for analog */
1211         if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1212             (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1213                 return;
1214
1215         if (bpc == 0)
1216                 return;
1217
1218         switch (bpc) {
1219         case 6:
1220                 if (dither == RADEON_FMT_DITHER_ENABLE)
1221                         /* XXX sort out optimal dither settings */
1222                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1223                                 FMT_SPATIAL_DITHER_EN);
1224                 else
1225                         tmp |= FMT_TRUNCATE_EN;
1226                 break;
1227         case 8:
1228                 if (dither == RADEON_FMT_DITHER_ENABLE)
1229                         /* XXX sort out optimal dither settings */
1230                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1231                                 FMT_RGB_RANDOM_ENABLE |
1232                                 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1233                 else
1234                         tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1235                 break;
1236         case 10:
1237         default:
1238                 /* not needed */
1239                 break;
1240         }
1241
1242         WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1243 }
1244
1245 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1246 {
1247         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1248                 return true;
1249         else
1250                 return false;
1251 }
1252
1253 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1254 {
1255         u32 pos1, pos2;
1256
1257         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1258         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1259
1260         if (pos1 != pos2)
1261                 return true;
1262         else
1263                 return false;
1264 }
1265
1266 /**
1267  * dce4_wait_for_vblank - vblank wait asic callback.
1268  *
1269  * @rdev: radeon_device pointer
1270  * @crtc: crtc to wait for vblank on
1271  *
1272  * Wait for vblank on the requested crtc (evergreen+).
1273  */
1274 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1275 {
1276         unsigned i = 0;
1277
1278         if (crtc >= rdev->num_crtc)
1279                 return;
1280
1281         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1282                 return;
1283
1284         /* depending on when we hit vblank, we may be close to active; if so,
1285          * wait for another frame.
1286          */
1287         while (dce4_is_in_vblank(rdev, crtc)) {
1288                 if (i++ % 100 == 0) {
1289                         if (!dce4_is_counter_moving(rdev, crtc))
1290                                 break;
1291                 }
1292         }
1293
1294         while (!dce4_is_in_vblank(rdev, crtc)) {
1295                 if (i++ % 100 == 0) {
1296                         if (!dce4_is_counter_moving(rdev, crtc))
1297                                 break;
1298                 }
1299         }
1300 }
1301
1302 /**
1303  * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1304  *
1305  * @rdev: radeon_device pointer
1306  * @crtc: crtc to prepare for pageflip on
1307  *
1308  * Pre-pageflip callback (evergreen+).
1309  * Enables the pageflip irq (vblank irq).
1310  */
1311 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1312 {
1313         /* enable the pflip int */
1314         radeon_irq_kms_pflip_irq_get(rdev, crtc);
1315 }
1316
1317 /**
1318  * evergreen_post_page_flip - pos-pageflip callback.
1319  *
1320  * @rdev: radeon_device pointer
1321  * @crtc: crtc to cleanup pageflip on
1322  *
1323  * Post-pageflip callback (evergreen+).
1324  * Disables the pageflip irq (vblank irq).
1325  */
1326 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1327 {
1328         /* disable the pflip int */
1329         radeon_irq_kms_pflip_irq_put(rdev, crtc);
1330 }
1331
1332 /**
1333  * evergreen_page_flip - pageflip callback.
1334  *
1335  * @rdev: radeon_device pointer
1336  * @crtc_id: crtc to cleanup pageflip on
1337  * @crtc_base: new address of the crtc (GPU MC address)
1338  *
1339  * Does the actual pageflip (evergreen+).
1340  * During vblank we take the crtc lock and wait for the update_pending
1341  * bit to go high, when it does, we release the lock, and allow the
1342  * double buffered update to take place.
1343  * Returns the current update pending status.
1344  */
1345 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1346 {
1347         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1348         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1349         int i;
1350
1351         /* Lock the graphics update lock */
1352         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1353         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1354
1355         /* update the scanout addresses */
1356         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1357                upper_32_bits(crtc_base));
1358         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1359                (u32)crtc_base);
1360
1361         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1362                upper_32_bits(crtc_base));
1363         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1364                (u32)crtc_base);
1365
1366         /* Wait for update_pending to go high. */
1367         for (i = 0; i < rdev->usec_timeout; i++) {
1368                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1369                         break;
1370                 udelay(1);
1371         }
1372         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1373
1374         /* Unlock the lock, so double-buffering can take place inside vblank */
1375         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1376         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1377
1378         /* Return current update_pending status: */
1379         return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1380 }
1381
1382 /* get temperature in millidegrees */
1383 int evergreen_get_temp(struct radeon_device *rdev)
1384 {
1385         u32 temp, toffset;
1386         int actual_temp = 0;
1387
1388         if (rdev->family == CHIP_JUNIPER) {
1389                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1390                         TOFFSET_SHIFT;
1391                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1392                         TS0_ADC_DOUT_SHIFT;
1393
1394                 if (toffset & 0x100)
1395                         actual_temp = temp / 2 - (0x200 - toffset);
1396                 else
1397                         actual_temp = temp / 2 + toffset;
1398
1399                 actual_temp = actual_temp * 1000;
1400
1401         } else {
1402                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1403                         ASIC_T_SHIFT;
1404
1405                 if (temp & 0x400)
1406                         actual_temp = -256;
1407                 else if (temp & 0x200)
1408                         actual_temp = 255;
1409                 else if (temp & 0x100) {
1410                         actual_temp = temp & 0x1ff;
1411                         actual_temp |= ~0x1ff;
1412                 } else
1413                         actual_temp = temp & 0xff;
1414
1415                 actual_temp = (actual_temp * 1000) / 2;
1416         }
1417
1418         return actual_temp;
1419 }
1420
1421 int sumo_get_temp(struct radeon_device *rdev)
1422 {
1423         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1424         int actual_temp = temp - 49;
1425
1426         return actual_temp * 1000;
1427 }
1428
1429 /**
1430  * sumo_pm_init_profile - Initialize power profiles callback.
1431  *
1432  * @rdev: radeon_device pointer
1433  *
1434  * Initialize the power states used in profile mode
1435  * (sumo, trinity, SI).
1436  * Used for profile mode only.
1437  */
1438 void sumo_pm_init_profile(struct radeon_device *rdev)
1439 {
1440         int idx;
1441
1442         /* default */
1443         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1444         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1445         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1446         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1447
1448         /* low,mid sh/mh */
1449         if (rdev->flags & RADEON_IS_MOBILITY)
1450                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1451         else
1452                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1453
1454         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1455         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1456         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1457         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1458
1459         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1460         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1461         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1462         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1463
1464         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1465         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1466         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1467         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1468
1469         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1470         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1471         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1472         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1473
1474         /* high sh/mh */
1475         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1476         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1477         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1478         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1479         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1480                 rdev->pm.power_state[idx].num_clock_modes - 1;
1481
1482         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1483         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1484         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1485         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1486                 rdev->pm.power_state[idx].num_clock_modes - 1;
1487 }
1488
1489 /**
1490  * btc_pm_init_profile - Initialize power profiles callback.
1491  *
1492  * @rdev: radeon_device pointer
1493  *
1494  * Initialize the power states used in profile mode
1495  * (BTC, cayman).
1496  * Used for profile mode only.
1497  */
1498 void btc_pm_init_profile(struct radeon_device *rdev)
1499 {
1500         int idx;
1501
1502         /* default */
1503         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1504         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1505         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1506         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1507         /* starting with BTC, there is one state that is used for both
1508          * MH and SH.  Difference is that we always use the high clock index for
1509          * mclk.
1510          */
1511         if (rdev->flags & RADEON_IS_MOBILITY)
1512                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1513         else
1514                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1515         /* low sh */
1516         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1517         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1518         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1519         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1520         /* mid sh */
1521         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1522         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1523         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1524         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1525         /* high sh */
1526         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1527         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1528         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1529         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1530         /* low mh */
1531         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1532         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1533         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1534         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1535         /* mid mh */
1536         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1537         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1538         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1539         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1540         /* high mh */
1541         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1542         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1543         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1544         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1545 }
1546
1547 /**
1548  * evergreen_pm_misc - set additional pm hw parameters callback.
1549  *
1550  * @rdev: radeon_device pointer
1551  *
1552  * Set non-clock parameters associated with a power state
1553  * (voltage, etc.) (evergreen+).
1554  */
1555 void evergreen_pm_misc(struct radeon_device *rdev)
1556 {
1557         int req_ps_idx = rdev->pm.requested_power_state_index;
1558         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1559         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1560         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1561
1562         if (voltage->type == VOLTAGE_SW) {
1563                 /* 0xff0x are flags rather then an actual voltage */
1564                 if ((voltage->voltage & 0xff00) == 0xff00)
1565                         return;
1566                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1567                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1568                         rdev->pm.current_vddc = voltage->voltage;
1569                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1570                 }
1571
1572                 /* starting with BTC, there is one state that is used for both
1573                  * MH and SH.  Difference is that we always use the high clock index for
1574                  * mclk and vddci.
1575                  */
1576                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1577                     (rdev->family >= CHIP_BARTS) &&
1578                     rdev->pm.active_crtc_count &&
1579                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1580                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1581                         voltage = &rdev->pm.power_state[req_ps_idx].
1582                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1583
1584                 /* 0xff0x are flags rather then an actual voltage */
1585                 if ((voltage->vddci & 0xff00) == 0xff00)
1586                         return;
1587                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1588                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1589                         rdev->pm.current_vddci = voltage->vddci;
1590                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1591                 }
1592         }
1593 }
1594
1595 /**
1596  * evergreen_pm_prepare - pre-power state change callback.
1597  *
1598  * @rdev: radeon_device pointer
1599  *
1600  * Prepare for a power state change (evergreen+).
1601  */
1602 void evergreen_pm_prepare(struct radeon_device *rdev)
1603 {
1604         struct drm_device *ddev = rdev->ddev;
1605         struct drm_crtc *crtc;
1606         struct radeon_crtc *radeon_crtc;
1607         u32 tmp;
1608
1609         /* disable any active CRTCs */
1610         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1611                 radeon_crtc = to_radeon_crtc(crtc);
1612                 if (radeon_crtc->enabled) {
1613                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1614                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1615                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1616                 }
1617         }
1618 }
1619
1620 /**
1621  * evergreen_pm_finish - post-power state change callback.
1622  *
1623  * @rdev: radeon_device pointer
1624  *
1625  * Clean up after a power state change (evergreen+).
1626  */
1627 void evergreen_pm_finish(struct radeon_device *rdev)
1628 {
1629         struct drm_device *ddev = rdev->ddev;
1630         struct drm_crtc *crtc;
1631         struct radeon_crtc *radeon_crtc;
1632         u32 tmp;
1633
1634         /* enable any active CRTCs */
1635         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1636                 radeon_crtc = to_radeon_crtc(crtc);
1637                 if (radeon_crtc->enabled) {
1638                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1639                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1640                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1641                 }
1642         }
1643 }
1644
1645 /**
1646  * evergreen_hpd_sense - hpd sense callback.
1647  *
1648  * @rdev: radeon_device pointer
1649  * @hpd: hpd (hotplug detect) pin
1650  *
1651  * Checks if a digital monitor is connected (evergreen+).
1652  * Returns true if connected, false if not connected.
1653  */
1654 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1655 {
1656         bool connected = false;
1657
1658         switch (hpd) {
1659         case RADEON_HPD_1:
1660                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1661                         connected = true;
1662                 break;
1663         case RADEON_HPD_2:
1664                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1665                         connected = true;
1666                 break;
1667         case RADEON_HPD_3:
1668                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1669                         connected = true;
1670                 break;
1671         case RADEON_HPD_4:
1672                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1673                         connected = true;
1674                 break;
1675         case RADEON_HPD_5:
1676                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1677                         connected = true;
1678                 break;
1679         case RADEON_HPD_6:
1680                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1681                         connected = true;
1682                         break;
1683         default:
1684                 break;
1685         }
1686
1687         return connected;
1688 }
1689
1690 /**
1691  * evergreen_hpd_set_polarity - hpd set polarity callback.
1692  *
1693  * @rdev: radeon_device pointer
1694  * @hpd: hpd (hotplug detect) pin
1695  *
1696  * Set the polarity of the hpd pin (evergreen+).
1697  */
1698 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1699                                 enum radeon_hpd_id hpd)
1700 {
1701         u32 tmp;
1702         bool connected = evergreen_hpd_sense(rdev, hpd);
1703
1704         switch (hpd) {
1705         case RADEON_HPD_1:
1706                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1707                 if (connected)
1708                         tmp &= ~DC_HPDx_INT_POLARITY;
1709                 else
1710                         tmp |= DC_HPDx_INT_POLARITY;
1711                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1712                 break;
1713         case RADEON_HPD_2:
1714                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1715                 if (connected)
1716                         tmp &= ~DC_HPDx_INT_POLARITY;
1717                 else
1718                         tmp |= DC_HPDx_INT_POLARITY;
1719                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1720                 break;
1721         case RADEON_HPD_3:
1722                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1723                 if (connected)
1724                         tmp &= ~DC_HPDx_INT_POLARITY;
1725                 else
1726                         tmp |= DC_HPDx_INT_POLARITY;
1727                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1728                 break;
1729         case RADEON_HPD_4:
1730                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1731                 if (connected)
1732                         tmp &= ~DC_HPDx_INT_POLARITY;
1733                 else
1734                         tmp |= DC_HPDx_INT_POLARITY;
1735                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1736                 break;
1737         case RADEON_HPD_5:
1738                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1739                 if (connected)
1740                         tmp &= ~DC_HPDx_INT_POLARITY;
1741                 else
1742                         tmp |= DC_HPDx_INT_POLARITY;
1743                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1744                         break;
1745         case RADEON_HPD_6:
1746                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1747                 if (connected)
1748                         tmp &= ~DC_HPDx_INT_POLARITY;
1749                 else
1750                         tmp |= DC_HPDx_INT_POLARITY;
1751                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1752                 break;
1753         default:
1754                 break;
1755         }
1756 }
1757
1758 /**
1759  * evergreen_hpd_init - hpd setup callback.
1760  *
1761  * @rdev: radeon_device pointer
1762  *
1763  * Setup the hpd pins used by the card (evergreen+).
1764  * Enable the pin, set the polarity, and enable the hpd interrupts.
1765  */
1766 void evergreen_hpd_init(struct radeon_device *rdev)
1767 {
1768         struct drm_device *dev = rdev->ddev;
1769         struct drm_connector *connector;
1770         unsigned enabled = 0;
1771         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1772                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1773
1774         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1775                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1776
1777                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1778                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1779                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1780                          * aux dp channel on imac and help (but not completely fix)
1781                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1782                          * also avoid interrupt storms during dpms.
1783                          */
1784                         continue;
1785                 }
1786                 switch (radeon_connector->hpd.hpd) {
1787                 case RADEON_HPD_1:
1788                         WREG32(DC_HPD1_CONTROL, tmp);
1789                         break;
1790                 case RADEON_HPD_2:
1791                         WREG32(DC_HPD2_CONTROL, tmp);
1792                         break;
1793                 case RADEON_HPD_3:
1794                         WREG32(DC_HPD3_CONTROL, tmp);
1795                         break;
1796                 case RADEON_HPD_4:
1797                         WREG32(DC_HPD4_CONTROL, tmp);
1798                         break;
1799                 case RADEON_HPD_5:
1800                         WREG32(DC_HPD5_CONTROL, tmp);
1801                         break;
1802                 case RADEON_HPD_6:
1803                         WREG32(DC_HPD6_CONTROL, tmp);
1804                         break;
1805                 default:
1806                         break;
1807                 }
1808                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1809                 enabled |= 1 << radeon_connector->hpd.hpd;
1810         }
1811         radeon_irq_kms_enable_hpd(rdev, enabled);
1812 }
1813
1814 /**
1815  * evergreen_hpd_fini - hpd tear down callback.
1816  *
1817  * @rdev: radeon_device pointer
1818  *
1819  * Tear down the hpd pins used by the card (evergreen+).
1820  * Disable the hpd interrupts.
1821  */
1822 void evergreen_hpd_fini(struct radeon_device *rdev)
1823 {
1824         struct drm_device *dev = rdev->ddev;
1825         struct drm_connector *connector;
1826         unsigned disabled = 0;
1827
1828         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1829                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1830                 switch (radeon_connector->hpd.hpd) {
1831                 case RADEON_HPD_1:
1832                         WREG32(DC_HPD1_CONTROL, 0);
1833                         break;
1834                 case RADEON_HPD_2:
1835                         WREG32(DC_HPD2_CONTROL, 0);
1836                         break;
1837                 case RADEON_HPD_3:
1838                         WREG32(DC_HPD3_CONTROL, 0);
1839                         break;
1840                 case RADEON_HPD_4:
1841                         WREG32(DC_HPD4_CONTROL, 0);
1842                         break;
1843                 case RADEON_HPD_5:
1844                         WREG32(DC_HPD5_CONTROL, 0);
1845                         break;
1846                 case RADEON_HPD_6:
1847                         WREG32(DC_HPD6_CONTROL, 0);
1848                         break;
1849                 default:
1850                         break;
1851                 }
1852                 disabled |= 1 << radeon_connector->hpd.hpd;
1853         }
1854         radeon_irq_kms_disable_hpd(rdev, disabled);
1855 }
1856
1857 /* watermark setup */
1858
1859 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1860                                         struct radeon_crtc *radeon_crtc,
1861                                         struct drm_display_mode *mode,
1862                                         struct drm_display_mode *other_mode)
1863 {
1864         u32 tmp, buffer_alloc, i;
1865         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1866         /*
1867          * Line Buffer Setup
1868          * There are 3 line buffers, each one shared by 2 display controllers.
1869          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1870          * the display controllers.  The paritioning is done via one of four
1871          * preset allocations specified in bits 2:0:
1872          * first display controller
1873          *  0 - first half of lb (3840 * 2)
1874          *  1 - first 3/4 of lb (5760 * 2)
1875          *  2 - whole lb (7680 * 2), other crtc must be disabled
1876          *  3 - first 1/4 of lb (1920 * 2)
1877          * second display controller
1878          *  4 - second half of lb (3840 * 2)
1879          *  5 - second 3/4 of lb (5760 * 2)
1880          *  6 - whole lb (7680 * 2), other crtc must be disabled
1881          *  7 - last 1/4 of lb (1920 * 2)
1882          */
1883         /* this can get tricky if we have two large displays on a paired group
1884          * of crtcs.  Ideally for multiple large displays we'd assign them to
1885          * non-linked crtcs for maximum line buffer allocation.
1886          */
1887         if (radeon_crtc->base.enabled && mode) {
1888                 if (other_mode) {
1889                         tmp = 0; /* 1/2 */
1890                         buffer_alloc = 1;
1891                 } else {
1892                         tmp = 2; /* whole */
1893                         buffer_alloc = 2;
1894                 }
1895         } else {
1896                 tmp = 0;
1897                 buffer_alloc = 0;
1898         }
1899
1900         /* second controller of the pair uses second half of the lb */
1901         if (radeon_crtc->crtc_id % 2)
1902                 tmp += 4;
1903         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1904
1905         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1906                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1907                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1908                 for (i = 0; i < rdev->usec_timeout; i++) {
1909                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1910                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1911                                 break;
1912                         udelay(1);
1913                 }
1914         }
1915
1916         if (radeon_crtc->base.enabled && mode) {
1917                 switch (tmp) {
1918                 case 0:
1919                 case 4:
1920                 default:
1921                         if (ASIC_IS_DCE5(rdev))
1922                                 return 4096 * 2;
1923                         else
1924                                 return 3840 * 2;
1925                 case 1:
1926                 case 5:
1927                         if (ASIC_IS_DCE5(rdev))
1928                                 return 6144 * 2;
1929                         else
1930                                 return 5760 * 2;
1931                 case 2:
1932                 case 6:
1933                         if (ASIC_IS_DCE5(rdev))
1934                                 return 8192 * 2;
1935                         else
1936                                 return 7680 * 2;
1937                 case 3:
1938                 case 7:
1939                         if (ASIC_IS_DCE5(rdev))
1940                                 return 2048 * 2;
1941                         else
1942                                 return 1920 * 2;
1943                 }
1944         }
1945
1946         /* controller not enabled, so no lb used */
1947         return 0;
1948 }
1949
1950 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1951 {
1952         u32 tmp = RREG32(MC_SHARED_CHMAP);
1953
1954         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1955         case 0:
1956         default:
1957                 return 1;
1958         case 1:
1959                 return 2;
1960         case 2:
1961                 return 4;
1962         case 3:
1963                 return 8;
1964         }
1965 }
1966
1967 struct evergreen_wm_params {
1968         u32 dram_channels; /* number of dram channels */
1969         u32 yclk;          /* bandwidth per dram data pin in kHz */
1970         u32 sclk;          /* engine clock in kHz */
1971         u32 disp_clk;      /* display clock in kHz */
1972         u32 src_width;     /* viewport width */
1973         u32 active_time;   /* active display time in ns */
1974         u32 blank_time;    /* blank time in ns */
1975         bool interlaced;    /* mode is interlaced */
1976         fixed20_12 vsc;    /* vertical scale ratio */
1977         u32 num_heads;     /* number of active crtcs */
1978         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1979         u32 lb_size;       /* line buffer allocated to pipe */
1980         u32 vtaps;         /* vertical scaler taps */
1981 };
1982
1983 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1984 {
1985         /* Calculate DRAM Bandwidth and the part allocated to display. */
1986         fixed20_12 dram_efficiency; /* 0.7 */
1987         fixed20_12 yclk, dram_channels, bandwidth;
1988         fixed20_12 a;
1989
1990         a.full = dfixed_const(1000);
1991         yclk.full = dfixed_const(wm->yclk);
1992         yclk.full = dfixed_div(yclk, a);
1993         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1994         a.full = dfixed_const(10);
1995         dram_efficiency.full = dfixed_const(7);
1996         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1997         bandwidth.full = dfixed_mul(dram_channels, yclk);
1998         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1999
2000         return dfixed_trunc(bandwidth);
2001 }
2002
2003 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2004 {
2005         /* Calculate DRAM Bandwidth and the part allocated to display. */
2006         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2007         fixed20_12 yclk, dram_channels, bandwidth;
2008         fixed20_12 a;
2009
2010         a.full = dfixed_const(1000);
2011         yclk.full = dfixed_const(wm->yclk);
2012         yclk.full = dfixed_div(yclk, a);
2013         dram_channels.full = dfixed_const(wm->dram_channels * 4);
2014         a.full = dfixed_const(10);
2015         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2016         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2017         bandwidth.full = dfixed_mul(dram_channels, yclk);
2018         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2019
2020         return dfixed_trunc(bandwidth);
2021 }
2022
2023 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2024 {
2025         /* Calculate the display Data return Bandwidth */
2026         fixed20_12 return_efficiency; /* 0.8 */
2027         fixed20_12 sclk, bandwidth;
2028         fixed20_12 a;
2029
2030         a.full = dfixed_const(1000);
2031         sclk.full = dfixed_const(wm->sclk);
2032         sclk.full = dfixed_div(sclk, a);
2033         a.full = dfixed_const(10);
2034         return_efficiency.full = dfixed_const(8);
2035         return_efficiency.full = dfixed_div(return_efficiency, a);
2036         a.full = dfixed_const(32);
2037         bandwidth.full = dfixed_mul(a, sclk);
2038         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2039
2040         return dfixed_trunc(bandwidth);
2041 }
2042
2043 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2044 {
2045         /* Calculate the DMIF Request Bandwidth */
2046         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2047         fixed20_12 disp_clk, bandwidth;
2048         fixed20_12 a;
2049
2050         a.full = dfixed_const(1000);
2051         disp_clk.full = dfixed_const(wm->disp_clk);
2052         disp_clk.full = dfixed_div(disp_clk, a);
2053         a.full = dfixed_const(10);
2054         disp_clk_request_efficiency.full = dfixed_const(8);
2055         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2056         a.full = dfixed_const(32);
2057         bandwidth.full = dfixed_mul(a, disp_clk);
2058         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2059
2060         return dfixed_trunc(bandwidth);
2061 }
2062
2063 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2064 {
2065         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2066         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2067         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2068         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2069
2070         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2071 }
2072
2073 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2074 {
2075         /* Calculate the display mode Average Bandwidth
2076          * DisplayMode should contain the source and destination dimensions,
2077          * timing, etc.
2078          */
2079         fixed20_12 bpp;
2080         fixed20_12 line_time;
2081         fixed20_12 src_width;
2082         fixed20_12 bandwidth;
2083         fixed20_12 a;
2084
2085         a.full = dfixed_const(1000);
2086         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2087         line_time.full = dfixed_div(line_time, a);
2088         bpp.full = dfixed_const(wm->bytes_per_pixel);
2089         src_width.full = dfixed_const(wm->src_width);
2090         bandwidth.full = dfixed_mul(src_width, bpp);
2091         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2092         bandwidth.full = dfixed_div(bandwidth, line_time);
2093
2094         return dfixed_trunc(bandwidth);
2095 }
2096
2097 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2098 {
2099         /* First calcualte the latency in ns */
2100         u32 mc_latency = 2000; /* 2000 ns. */
2101         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2102         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2103         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2104         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2105         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2106                 (wm->num_heads * cursor_line_pair_return_time);
2107         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2108         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2109         fixed20_12 a, b, c;
2110
2111         if (wm->num_heads == 0)
2112                 return 0;
2113
2114         a.full = dfixed_const(2);
2115         b.full = dfixed_const(1);
2116         if ((wm->vsc.full > a.full) ||
2117             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2118             (wm->vtaps >= 5) ||
2119             ((wm->vsc.full >= a.full) && wm->interlaced))
2120                 max_src_lines_per_dst_line = 4;
2121         else
2122                 max_src_lines_per_dst_line = 2;
2123
2124         a.full = dfixed_const(available_bandwidth);
2125         b.full = dfixed_const(wm->num_heads);
2126         a.full = dfixed_div(a, b);
2127
2128         b.full = dfixed_const(1000);
2129         c.full = dfixed_const(wm->disp_clk);
2130         b.full = dfixed_div(c, b);
2131         c.full = dfixed_const(wm->bytes_per_pixel);
2132         b.full = dfixed_mul(b, c);
2133
2134         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2135
2136         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2137         b.full = dfixed_const(1000);
2138         c.full = dfixed_const(lb_fill_bw);
2139         b.full = dfixed_div(c, b);
2140         a.full = dfixed_div(a, b);
2141         line_fill_time = dfixed_trunc(a);
2142
2143         if (line_fill_time < wm->active_time)
2144                 return latency;
2145         else
2146                 return latency + (line_fill_time - wm->active_time);
2147
2148 }
2149
2150 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2151 {
2152         if (evergreen_average_bandwidth(wm) <=
2153             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2154                 return true;
2155         else
2156                 return false;
2157 };
2158
2159 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2160 {
2161         if (evergreen_average_bandwidth(wm) <=
2162             (evergreen_available_bandwidth(wm) / wm->num_heads))
2163                 return true;
2164         else
2165                 return false;
2166 };
2167
2168 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2169 {
2170         u32 lb_partitions = wm->lb_size / wm->src_width;
2171         u32 line_time = wm->active_time + wm->blank_time;
2172         u32 latency_tolerant_lines;
2173         u32 latency_hiding;
2174         fixed20_12 a;
2175
2176         a.full = dfixed_const(1);
2177         if (wm->vsc.full > a.full)
2178                 latency_tolerant_lines = 1;
2179         else {
2180                 if (lb_partitions <= (wm->vtaps + 1))
2181                         latency_tolerant_lines = 1;
2182                 else
2183                         latency_tolerant_lines = 2;
2184         }
2185
2186         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2187
2188         if (evergreen_latency_watermark(wm) <= latency_hiding)
2189                 return true;
2190         else
2191                 return false;
2192 }
2193
2194 static void evergreen_program_watermarks(struct radeon_device *rdev,
2195                                          struct radeon_crtc *radeon_crtc,
2196                                          u32 lb_size, u32 num_heads)
2197 {
2198         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2199         struct evergreen_wm_params wm_low, wm_high;
2200         u32 dram_channels;
2201         u32 pixel_period;
2202         u32 line_time = 0;
2203         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2204         u32 priority_a_mark = 0, priority_b_mark = 0;
2205         u32 priority_a_cnt = PRIORITY_OFF;
2206         u32 priority_b_cnt = PRIORITY_OFF;
2207         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2208         u32 tmp, arb_control3;
2209         fixed20_12 a, b, c;
2210
2211         if (radeon_crtc->base.enabled && num_heads && mode) {
2212                 pixel_period = 1000000 / (u32)mode->clock;
2213                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2214                 priority_a_cnt = 0;
2215                 priority_b_cnt = 0;
2216                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2217
2218                 /* watermark for high clocks */
2219                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2220                         wm_high.yclk =
2221                                 radeon_dpm_get_mclk(rdev, false) * 10;
2222                         wm_high.sclk =
2223                                 radeon_dpm_get_sclk(rdev, false) * 10;
2224                 } else {
2225                         wm_high.yclk = rdev->pm.current_mclk * 10;
2226                         wm_high.sclk = rdev->pm.current_sclk * 10;
2227                 }
2228
2229                 wm_high.disp_clk = mode->clock;
2230                 wm_high.src_width = mode->crtc_hdisplay;
2231                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2232                 wm_high.blank_time = line_time - wm_high.active_time;
2233                 wm_high.interlaced = false;
2234                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2235                         wm_high.interlaced = true;
2236                 wm_high.vsc = radeon_crtc->vsc;
2237                 wm_high.vtaps = 1;
2238                 if (radeon_crtc->rmx_type != RMX_OFF)
2239                         wm_high.vtaps = 2;
2240                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2241                 wm_high.lb_size = lb_size;
2242                 wm_high.dram_channels = dram_channels;
2243                 wm_high.num_heads = num_heads;
2244
2245                 /* watermark for low clocks */
2246                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2247                         wm_low.yclk =
2248                                 radeon_dpm_get_mclk(rdev, true) * 10;
2249                         wm_low.sclk =
2250                                 radeon_dpm_get_sclk(rdev, true) * 10;
2251                 } else {
2252                         wm_low.yclk = rdev->pm.current_mclk * 10;
2253                         wm_low.sclk = rdev->pm.current_sclk * 10;
2254                 }
2255
2256                 wm_low.disp_clk = mode->clock;
2257                 wm_low.src_width = mode->crtc_hdisplay;
2258                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2259                 wm_low.blank_time = line_time - wm_low.active_time;
2260                 wm_low.interlaced = false;
2261                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2262                         wm_low.interlaced = true;
2263                 wm_low.vsc = radeon_crtc->vsc;
2264                 wm_low.vtaps = 1;
2265                 if (radeon_crtc->rmx_type != RMX_OFF)
2266                         wm_low.vtaps = 2;
2267                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2268                 wm_low.lb_size = lb_size;
2269                 wm_low.dram_channels = dram_channels;
2270                 wm_low.num_heads = num_heads;
2271
2272                 /* set for high clocks */
2273                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2274                 /* set for low clocks */
2275                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2276
2277                 /* possibly force display priority to high */
2278                 /* should really do this at mode validation time... */
2279                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2280                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2281                     !evergreen_check_latency_hiding(&wm_high) ||
2282                     (rdev->disp_priority == 2)) {
2283                         DRM_DEBUG_KMS("force priority a to high\n");
2284                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2285                 }
2286                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2287                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2288                     !evergreen_check_latency_hiding(&wm_low) ||
2289                     (rdev->disp_priority == 2)) {
2290                         DRM_DEBUG_KMS("force priority b to high\n");
2291                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2292                 }
2293
2294                 a.full = dfixed_const(1000);
2295                 b.full = dfixed_const(mode->clock);
2296                 b.full = dfixed_div(b, a);
2297                 c.full = dfixed_const(latency_watermark_a);
2298                 c.full = dfixed_mul(c, b);
2299                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2300                 c.full = dfixed_div(c, a);
2301                 a.full = dfixed_const(16);
2302                 c.full = dfixed_div(c, a);
2303                 priority_a_mark = dfixed_trunc(c);
2304                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2305
2306                 a.full = dfixed_const(1000);
2307                 b.full = dfixed_const(mode->clock);
2308                 b.full = dfixed_div(b, a);
2309                 c.full = dfixed_const(latency_watermark_b);
2310                 c.full = dfixed_mul(c, b);
2311                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2312                 c.full = dfixed_div(c, a);
2313                 a.full = dfixed_const(16);
2314                 c.full = dfixed_div(c, a);
2315                 priority_b_mark = dfixed_trunc(c);
2316                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2317         }
2318
2319         /* select wm A */
2320         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2321         tmp = arb_control3;
2322         tmp &= ~LATENCY_WATERMARK_MASK(3);
2323         tmp |= LATENCY_WATERMARK_MASK(1);
2324         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2325         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2326                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2327                 LATENCY_HIGH_WATERMARK(line_time)));
2328         /* select wm B */
2329         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2330         tmp &= ~LATENCY_WATERMARK_MASK(3);
2331         tmp |= LATENCY_WATERMARK_MASK(2);
2332         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2333         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2334                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2335                 LATENCY_HIGH_WATERMARK(line_time)));
2336         /* restore original selection */
2337         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2338
2339         /* write the priority marks */
2340         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2341         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2342
2343         /* save values for DPM */
2344         radeon_crtc->line_time = line_time;
2345         radeon_crtc->wm_high = latency_watermark_a;
2346         radeon_crtc->wm_low = latency_watermark_b;
2347 }
2348
2349 /**
2350  * evergreen_bandwidth_update - update display watermarks callback.
2351  *
2352  * @rdev: radeon_device pointer
2353  *
2354  * Update the display watermarks based on the requested mode(s)
2355  * (evergreen+).
2356  */
2357 void evergreen_bandwidth_update(struct radeon_device *rdev)
2358 {
2359         struct drm_display_mode *mode0 = NULL;
2360         struct drm_display_mode *mode1 = NULL;
2361         u32 num_heads = 0, lb_size;
2362         int i;
2363
2364         radeon_update_display_priority(rdev);
2365
2366         for (i = 0; i < rdev->num_crtc; i++) {
2367                 if (rdev->mode_info.crtcs[i]->base.enabled)
2368                         num_heads++;
2369         }
2370         for (i = 0; i < rdev->num_crtc; i += 2) {
2371                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2372                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2373                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2374                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2375                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2376                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2377         }
2378 }
2379
2380 /**
2381  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2382  *
2383  * @rdev: radeon_device pointer
2384  *
2385  * Wait for the MC (memory controller) to be idle.
2386  * (evergreen+).
2387  * Returns 0 if the MC is idle, -1 if not.
2388  */
2389 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2390 {
2391         unsigned i;
2392         u32 tmp;
2393
2394         for (i = 0; i < rdev->usec_timeout; i++) {
2395                 /* read MC_STATUS */
2396                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2397                 if (!tmp)
2398                         return 0;
2399                 udelay(1);
2400         }
2401         return -1;
2402 }
2403
2404 /*
2405  * GART
2406  */
2407 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2408 {
2409         unsigned i;
2410         u32 tmp;
2411
2412         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2413
2414         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2415         for (i = 0; i < rdev->usec_timeout; i++) {
2416                 /* read MC_STATUS */
2417                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2418                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2419                 if (tmp == 2) {
2420                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2421                         return;
2422                 }
2423                 if (tmp) {
2424                         return;
2425                 }
2426                 udelay(1);
2427         }
2428 }
2429
2430 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2431 {
2432         u32 tmp;
2433         int r;
2434
2435         if (rdev->gart.robj == NULL) {
2436                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2437                 return -EINVAL;
2438         }
2439         r = radeon_gart_table_vram_pin(rdev);
2440         if (r)
2441                 return r;
2442         radeon_gart_restore(rdev);
2443         /* Setup L2 cache */
2444         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2445                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2446                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2447         WREG32(VM_L2_CNTL2, 0);
2448         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2449         /* Setup TLB control */
2450         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2451                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2452                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2453                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2454         if (rdev->flags & RADEON_IS_IGP) {
2455                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2456                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2457                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2458         } else {
2459                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2460                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2461                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2462                 if ((rdev->family == CHIP_JUNIPER) ||
2463                     (rdev->family == CHIP_CYPRESS) ||
2464                     (rdev->family == CHIP_HEMLOCK) ||
2465                     (rdev->family == CHIP_BARTS))
2466                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2467         }
2468         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2469         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2470         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2471         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2472         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2473         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2474         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2475         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2476                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2477         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2478                         (u32)(rdev->dummy_page.addr >> 12));
2479         WREG32(VM_CONTEXT1_CNTL, 0);
2480
2481         evergreen_pcie_gart_tlb_flush(rdev);
2482         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2483                  (unsigned)(rdev->mc.gtt_size >> 20),
2484                  (unsigned long long)rdev->gart.table_addr);
2485         rdev->gart.ready = true;
2486         return 0;
2487 }
2488
2489 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2490 {
2491         u32 tmp;
2492
2493         /* Disable all tables */
2494         WREG32(VM_CONTEXT0_CNTL, 0);
2495         WREG32(VM_CONTEXT1_CNTL, 0);
2496
2497         /* Setup L2 cache */
2498         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2499                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2500         WREG32(VM_L2_CNTL2, 0);
2501         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2502         /* Setup TLB control */
2503         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2504         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2505         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2506         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2507         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2508         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2509         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2510         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2511         radeon_gart_table_vram_unpin(rdev);
2512 }
2513
2514 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2515 {
2516         evergreen_pcie_gart_disable(rdev);
2517         radeon_gart_table_vram_free(rdev);
2518         radeon_gart_fini(rdev);
2519 }
2520
2521
2522 static void evergreen_agp_enable(struct radeon_device *rdev)
2523 {
2524         u32 tmp;
2525
2526         /* Setup L2 cache */
2527         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2528                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2529                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2530         WREG32(VM_L2_CNTL2, 0);
2531         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2532         /* Setup TLB control */
2533         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2534                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2535                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2536                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2537         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2538         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2539         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2540         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2541         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2542         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2543         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2544         WREG32(VM_CONTEXT0_CNTL, 0);
2545         WREG32(VM_CONTEXT1_CNTL, 0);
2546 }
2547
2548 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2549 {
2550         u32 crtc_enabled, tmp, frame_count, blackout;
2551         int i, j;
2552
2553         if (!ASIC_IS_NODCE(rdev)) {
2554                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2555                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2556
2557                 /* disable VGA render */
2558                 WREG32(VGA_RENDER_CONTROL, 0);
2559         }
2560         /* blank the display controllers */
2561         for (i = 0; i < rdev->num_crtc; i++) {
2562                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2563                 if (crtc_enabled) {
2564                         save->crtc_enabled[i] = true;
2565                         if (ASIC_IS_DCE6(rdev)) {
2566                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2567                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2568                                         radeon_wait_for_vblank(rdev, i);
2569                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2570                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2571                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2572                                 }
2573                         } else {
2574                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2575                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2576                                         radeon_wait_for_vblank(rdev, i);
2577                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2578                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2579                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2580                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2581                                 }
2582                         }
2583                         /* wait for the next frame */
2584                         frame_count = radeon_get_vblank_counter(rdev, i);
2585                         for (j = 0; j < rdev->usec_timeout; j++) {
2586                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2587                                         break;
2588                                 udelay(1);
2589                         }
2590
2591                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2592                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2593                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2594                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2595                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2596                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2597                         save->crtc_enabled[i] = false;
2598                         /* ***** */
2599                 } else {
2600                         save->crtc_enabled[i] = false;
2601                 }
2602         }
2603
2604         radeon_mc_wait_for_idle(rdev);
2605
2606         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2607         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2608                 /* Block CPU access */
2609                 WREG32(BIF_FB_EN, 0);
2610                 /* blackout the MC */
2611                 blackout &= ~BLACKOUT_MODE_MASK;
2612                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2613         }
2614         /* wait for the MC to settle */
2615         udelay(100);
2616
2617         /* lock double buffered regs */
2618         for (i = 0; i < rdev->num_crtc; i++) {
2619                 if (save->crtc_enabled[i]) {
2620                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2621                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2622                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2623                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2624                         }
2625                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2626                         if (!(tmp & 1)) {
2627                                 tmp |= 1;
2628                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2629                         }
2630                 }
2631         }
2632 }
2633
2634 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2635 {
2636         u32 tmp, frame_count;
2637         int i, j;
2638
2639         /* update crtc base addresses */
2640         for (i = 0; i < rdev->num_crtc; i++) {
2641                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2642                        upper_32_bits(rdev->mc.vram_start));
2643                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2644                        upper_32_bits(rdev->mc.vram_start));
2645                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2646                        (u32)rdev->mc.vram_start);
2647                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2648                        (u32)rdev->mc.vram_start);
2649         }
2650
2651         if (!ASIC_IS_NODCE(rdev)) {
2652                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2653                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2654         }
2655
2656         /* unlock regs and wait for update */
2657         for (i = 0; i < rdev->num_crtc; i++) {
2658                 if (save->crtc_enabled[i]) {
2659                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2660                         if ((tmp & 0x3) != 0) {
2661                                 tmp &= ~0x3;
2662                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2663                         }
2664                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2665                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2666                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2667                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2668                         }
2669                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2670                         if (tmp & 1) {
2671                                 tmp &= ~1;
2672                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2673                         }
2674                         for (j = 0; j < rdev->usec_timeout; j++) {
2675                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2676                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2677                                         break;
2678                                 udelay(1);
2679                         }
2680                 }
2681         }
2682
2683         /* unblackout the MC */
2684         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2685         tmp &= ~BLACKOUT_MODE_MASK;
2686         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2687         /* allow CPU access */
2688         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2689
2690         for (i = 0; i < rdev->num_crtc; i++) {
2691                 if (save->crtc_enabled[i]) {
2692                         if (ASIC_IS_DCE6(rdev)) {
2693                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2694                                 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2695                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2696                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2697                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2698                         } else {
2699                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2700                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2701                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2702                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2703                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2704                         }
2705                         /* wait for the next frame */
2706                         frame_count = radeon_get_vblank_counter(rdev, i);
2707                         for (j = 0; j < rdev->usec_timeout; j++) {
2708                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2709                                         break;
2710                                 udelay(1);
2711                         }
2712                 }
2713         }
2714         if (!ASIC_IS_NODCE(rdev)) {
2715                 /* Unlock vga access */
2716                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2717                 mdelay(1);
2718                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2719         }
2720 }
2721
2722 void evergreen_mc_program(struct radeon_device *rdev)
2723 {
2724         struct evergreen_mc_save save;
2725         u32 tmp;
2726         int i, j;
2727
2728         /* Initialize HDP */
2729         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2730                 WREG32((0x2c14 + j), 0x00000000);
2731                 WREG32((0x2c18 + j), 0x00000000);
2732                 WREG32((0x2c1c + j), 0x00000000);
2733                 WREG32((0x2c20 + j), 0x00000000);
2734                 WREG32((0x2c24 + j), 0x00000000);
2735         }
2736         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2737
2738         evergreen_mc_stop(rdev, &save);
2739         if (evergreen_mc_wait_for_idle(rdev)) {
2740                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2741         }
2742         /* Lockout access through VGA aperture*/
2743         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2744         /* Update configuration */
2745         if (rdev->flags & RADEON_IS_AGP) {
2746                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2747                         /* VRAM before AGP */
2748                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2749                                 rdev->mc.vram_start >> 12);
2750                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2751                                 rdev->mc.gtt_end >> 12);
2752                 } else {
2753                         /* VRAM after AGP */
2754                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2755                                 rdev->mc.gtt_start >> 12);
2756                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2757                                 rdev->mc.vram_end >> 12);
2758                 }
2759         } else {
2760                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2761                         rdev->mc.vram_start >> 12);
2762                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2763                         rdev->mc.vram_end >> 12);
2764         }
2765         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2766         /* llano/ontario only */
2767         if ((rdev->family == CHIP_PALM) ||
2768             (rdev->family == CHIP_SUMO) ||
2769             (rdev->family == CHIP_SUMO2)) {
2770                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2771                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2772                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2773                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2774         }
2775         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2776         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2777         WREG32(MC_VM_FB_LOCATION, tmp);
2778         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2779         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2780         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2781         if (rdev->flags & RADEON_IS_AGP) {
2782                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2783                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2784                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2785         } else {
2786                 WREG32(MC_VM_AGP_BASE, 0);
2787                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2788                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2789         }
2790         if (evergreen_mc_wait_for_idle(rdev)) {
2791                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2792         }
2793         evergreen_mc_resume(rdev, &save);
2794         /* we need to own VRAM, so turn off the VGA renderer here
2795          * to stop it overwriting our objects */
2796         rv515_vga_render_disable(rdev);
2797 }
2798
2799 /*
2800  * CP.
2801  */
2802 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2803 {
2804         struct radeon_ring *ring = &rdev->ring[ib->ring];
2805         u32 next_rptr;
2806
2807         /* set to DX10/11 mode */
2808         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2809         radeon_ring_write(ring, 1);
2810
2811         if (ring->rptr_save_reg) {
2812                 next_rptr = ring->wptr + 3 + 4;
2813                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2814                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2815                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2816                 radeon_ring_write(ring, next_rptr);
2817         } else if (rdev->wb.enabled) {
2818                 next_rptr = ring->wptr + 5 + 4;
2819                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2820                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2821                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2822                 radeon_ring_write(ring, next_rptr);
2823                 radeon_ring_write(ring, 0);
2824         }
2825
2826         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2827         radeon_ring_write(ring,
2828 #ifdef __BIG_ENDIAN
2829                           (2 << 0) |
2830 #endif
2831                           (ib->gpu_addr & 0xFFFFFFFC));
2832         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2833         radeon_ring_write(ring, ib->length_dw);
2834 }
2835
2836
2837 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2838 {
2839         const __be32 *fw_data;
2840         int i;
2841
2842         if (!rdev->me_fw || !rdev->pfp_fw)
2843                 return -EINVAL;
2844
2845         r700_cp_stop(rdev);
2846         WREG32(CP_RB_CNTL,
2847 #ifdef __BIG_ENDIAN
2848                BUF_SWAP_32BIT |
2849 #endif
2850                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2851
2852         fw_data = (const __be32 *)rdev->pfp_fw->data;
2853         WREG32(CP_PFP_UCODE_ADDR, 0);
2854         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2855                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2856         WREG32(CP_PFP_UCODE_ADDR, 0);
2857
2858         fw_data = (const __be32 *)rdev->me_fw->data;
2859         WREG32(CP_ME_RAM_WADDR, 0);
2860         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2861                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2862
2863         WREG32(CP_PFP_UCODE_ADDR, 0);
2864         WREG32(CP_ME_RAM_WADDR, 0);
2865         WREG32(CP_ME_RAM_RADDR, 0);
2866         return 0;
2867 }
2868
2869 static int evergreen_cp_start(struct radeon_device *rdev)
2870 {
2871         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2872         int r, i;
2873         uint32_t cp_me;
2874
2875         r = radeon_ring_lock(rdev, ring, 7);
2876         if (r) {
2877                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2878                 return r;
2879         }
2880         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2881         radeon_ring_write(ring, 0x1);
2882         radeon_ring_write(ring, 0x0);
2883         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2884         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2885         radeon_ring_write(ring, 0);
2886         radeon_ring_write(ring, 0);
2887         radeon_ring_unlock_commit(rdev, ring);
2888
2889         cp_me = 0xff;
2890         WREG32(CP_ME_CNTL, cp_me);
2891
2892         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2893         if (r) {
2894                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2895                 return r;
2896         }
2897
2898         /* setup clear context state */
2899         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2900         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2901
2902         for (i = 0; i < evergreen_default_size; i++)
2903                 radeon_ring_write(ring, evergreen_default_state[i]);
2904
2905         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2906         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2907
2908         /* set clear context state */
2909         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2910         radeon_ring_write(ring, 0);
2911
2912         /* SQ_VTX_BASE_VTX_LOC */
2913         radeon_ring_write(ring, 0xc0026f00);
2914         radeon_ring_write(ring, 0x00000000);
2915         radeon_ring_write(ring, 0x00000000);
2916         radeon_ring_write(ring, 0x00000000);
2917
2918         /* Clear consts */
2919         radeon_ring_write(ring, 0xc0036f00);
2920         radeon_ring_write(ring, 0x00000bc4);
2921         radeon_ring_write(ring, 0xffffffff);
2922         radeon_ring_write(ring, 0xffffffff);
2923         radeon_ring_write(ring, 0xffffffff);
2924
2925         radeon_ring_write(ring, 0xc0026900);
2926         radeon_ring_write(ring, 0x00000316);
2927         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2928         radeon_ring_write(ring, 0x00000010); /*  */
2929
2930         radeon_ring_unlock_commit(rdev, ring);
2931
2932         return 0;
2933 }
2934
2935 static int evergreen_cp_resume(struct radeon_device *rdev)
2936 {
2937         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2938         u32 tmp;
2939         u32 rb_bufsz;
2940         int r;
2941
2942         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2943         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2944                                  SOFT_RESET_PA |
2945                                  SOFT_RESET_SH |
2946                                  SOFT_RESET_VGT |
2947                                  SOFT_RESET_SPI |
2948                                  SOFT_RESET_SX));
2949         RREG32(GRBM_SOFT_RESET);
2950         mdelay(15);
2951         WREG32(GRBM_SOFT_RESET, 0);
2952         RREG32(GRBM_SOFT_RESET);
2953
2954         /* Set ring buffer size */
2955         rb_bufsz = order_base_2(ring->ring_size / 8);
2956         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2957 #ifdef __BIG_ENDIAN
2958         tmp |= BUF_SWAP_32BIT;
2959 #endif
2960         WREG32(CP_RB_CNTL, tmp);
2961         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2962         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2963
2964         /* Set the write pointer delay */
2965         WREG32(CP_RB_WPTR_DELAY, 0);
2966
2967         /* Initialize the ring buffer's read and write pointers */
2968         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2969         WREG32(CP_RB_RPTR_WR, 0);
2970         ring->wptr = 0;
2971         WREG32(CP_RB_WPTR, ring->wptr);
2972
2973         /* set the wb address whether it's enabled or not */
2974         WREG32(CP_RB_RPTR_ADDR,
2975                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2976         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2977         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2978
2979         if (rdev->wb.enabled)
2980                 WREG32(SCRATCH_UMSK, 0xff);
2981         else {
2982                 tmp |= RB_NO_UPDATE;
2983                 WREG32(SCRATCH_UMSK, 0);
2984         }
2985
2986         mdelay(1);
2987         WREG32(CP_RB_CNTL, tmp);
2988
2989         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2990         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2991
2992         ring->rptr = RREG32(CP_RB_RPTR);
2993
2994         evergreen_cp_start(rdev);
2995         ring->ready = true;
2996         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2997         if (r) {
2998                 ring->ready = false;
2999                 return r;
3000         }
3001         return 0;
3002 }
3003
3004 /*
3005  * Core functions
3006  */
3007 static void evergreen_gpu_init(struct radeon_device *rdev)
3008 {
3009         u32 gb_addr_config;
3010         u32 mc_shared_chmap, mc_arb_ramcfg;
3011         u32 sx_debug_1;
3012         u32 smx_dc_ctl0;
3013         u32 sq_config;
3014         u32 sq_lds_resource_mgmt;
3015         u32 sq_gpr_resource_mgmt_1;
3016         u32 sq_gpr_resource_mgmt_2;
3017         u32 sq_gpr_resource_mgmt_3;
3018         u32 sq_thread_resource_mgmt;
3019         u32 sq_thread_resource_mgmt_2;
3020         u32 sq_stack_resource_mgmt_1;
3021         u32 sq_stack_resource_mgmt_2;
3022         u32 sq_stack_resource_mgmt_3;
3023         u32 vgt_cache_invalidation;
3024         u32 hdp_host_path_cntl, tmp;
3025         u32 disabled_rb_mask;
3026         int i, j, num_shader_engines, ps_thread_count;
3027
3028         switch (rdev->family) {
3029         case CHIP_CYPRESS:
3030         case CHIP_HEMLOCK:
3031                 rdev->config.evergreen.num_ses = 2;
3032                 rdev->config.evergreen.max_pipes = 4;
3033                 rdev->config.evergreen.max_tile_pipes = 8;
3034                 rdev->config.evergreen.max_simds = 10;
3035                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3036                 rdev->config.evergreen.max_gprs = 256;
3037                 rdev->config.evergreen.max_threads = 248;
3038                 rdev->config.evergreen.max_gs_threads = 32;
3039                 rdev->config.evergreen.max_stack_entries = 512;
3040                 rdev->config.evergreen.sx_num_of_sets = 4;
3041                 rdev->config.evergreen.sx_max_export_size = 256;
3042                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3043                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3044                 rdev->config.evergreen.max_hw_contexts = 8;
3045                 rdev->config.evergreen.sq_num_cf_insts = 2;
3046
3047                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3048                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3049                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3050                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3051                 break;
3052         case CHIP_JUNIPER:
3053                 rdev->config.evergreen.num_ses = 1;
3054                 rdev->config.evergreen.max_pipes = 4;
3055                 rdev->config.evergreen.max_tile_pipes = 4;
3056                 rdev->config.evergreen.max_simds = 10;
3057                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3058                 rdev->config.evergreen.max_gprs = 256;
3059                 rdev->config.evergreen.max_threads = 248;
3060                 rdev->config.evergreen.max_gs_threads = 32;
3061                 rdev->config.evergreen.max_stack_entries = 512;
3062                 rdev->config.evergreen.sx_num_of_sets = 4;
3063                 rdev->config.evergreen.sx_max_export_size = 256;
3064                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3065                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3066                 rdev->config.evergreen.max_hw_contexts = 8;
3067                 rdev->config.evergreen.sq_num_cf_insts = 2;
3068
3069                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3070                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3071                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3072                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3073                 break;
3074         case CHIP_REDWOOD:
3075                 rdev->config.evergreen.num_ses = 1;
3076                 rdev->config.evergreen.max_pipes = 4;
3077                 rdev->config.evergreen.max_tile_pipes = 4;
3078                 rdev->config.evergreen.max_simds = 5;
3079                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3080                 rdev->config.evergreen.max_gprs = 256;
3081                 rdev->config.evergreen.max_threads = 248;
3082                 rdev->config.evergreen.max_gs_threads = 32;
3083                 rdev->config.evergreen.max_stack_entries = 256;
3084                 rdev->config.evergreen.sx_num_of_sets = 4;
3085                 rdev->config.evergreen.sx_max_export_size = 256;
3086                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3087                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3088                 rdev->config.evergreen.max_hw_contexts = 8;
3089                 rdev->config.evergreen.sq_num_cf_insts = 2;
3090
3091                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3092                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3093                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3094                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3095                 break;
3096         case CHIP_CEDAR:
3097         default:
3098                 rdev->config.evergreen.num_ses = 1;
3099                 rdev->config.evergreen.max_pipes = 2;
3100                 rdev->config.evergreen.max_tile_pipes = 2;
3101                 rdev->config.evergreen.max_simds = 2;
3102                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3103                 rdev->config.evergreen.max_gprs = 256;
3104                 rdev->config.evergreen.max_threads = 192;
3105                 rdev->config.evergreen.max_gs_threads = 16;
3106                 rdev->config.evergreen.max_stack_entries = 256;
3107                 rdev->config.evergreen.sx_num_of_sets = 4;
3108                 rdev->config.evergreen.sx_max_export_size = 128;
3109                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3110                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3111                 rdev->config.evergreen.max_hw_contexts = 4;
3112                 rdev->config.evergreen.sq_num_cf_insts = 1;
3113
3114                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3115                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3116                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3117                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3118                 break;
3119         case CHIP_PALM:
3120                 rdev->config.evergreen.num_ses = 1;
3121                 rdev->config.evergreen.max_pipes = 2;
3122                 rdev->config.evergreen.max_tile_pipes = 2;
3123                 rdev->config.evergreen.max_simds = 2;
3124                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3125                 rdev->config.evergreen.max_gprs = 256;
3126                 rdev->config.evergreen.max_threads = 192;
3127                 rdev->config.evergreen.max_gs_threads = 16;
3128                 rdev->config.evergreen.max_stack_entries = 256;
3129                 rdev->config.evergreen.sx_num_of_sets = 4;
3130                 rdev->config.evergreen.sx_max_export_size = 128;
3131                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3132                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3133                 rdev->config.evergreen.max_hw_contexts = 4;
3134                 rdev->config.evergreen.sq_num_cf_insts = 1;
3135
3136                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3137                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3138                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3139                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3140                 break;
3141         case CHIP_SUMO:
3142                 rdev->config.evergreen.num_ses = 1;
3143                 rdev->config.evergreen.max_pipes = 4;
3144                 rdev->config.evergreen.max_tile_pipes = 4;
3145                 if (rdev->pdev->device == 0x9648)
3146                         rdev->config.evergreen.max_simds = 3;
3147                 else if ((rdev->pdev->device == 0x9647) ||
3148                          (rdev->pdev->device == 0x964a))
3149                         rdev->config.evergreen.max_simds = 4;
3150                 else
3151                         rdev->config.evergreen.max_simds = 5;
3152                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3153                 rdev->config.evergreen.max_gprs = 256;
3154                 rdev->config.evergreen.max_threads = 248;
3155                 rdev->config.evergreen.max_gs_threads = 32;
3156                 rdev->config.evergreen.max_stack_entries = 256;
3157                 rdev->config.evergreen.sx_num_of_sets = 4;
3158                 rdev->config.evergreen.sx_max_export_size = 256;
3159                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3160                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3161                 rdev->config.evergreen.max_hw_contexts = 8;
3162                 rdev->config.evergreen.sq_num_cf_insts = 2;
3163
3164                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3165                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3166                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3167                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3168                 break;
3169         case CHIP_SUMO2:
3170                 rdev->config.evergreen.num_ses = 1;
3171                 rdev->config.evergreen.max_pipes = 4;
3172                 rdev->config.evergreen.max_tile_pipes = 4;
3173                 rdev->config.evergreen.max_simds = 2;
3174                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3175                 rdev->config.evergreen.max_gprs = 256;
3176                 rdev->config.evergreen.max_threads = 248;
3177                 rdev->config.evergreen.max_gs_threads = 32;
3178                 rdev->config.evergreen.max_stack_entries = 512;
3179                 rdev->config.evergreen.sx_num_of_sets = 4;
3180                 rdev->config.evergreen.sx_max_export_size = 256;
3181                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3182                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3183                 rdev->config.evergreen.max_hw_contexts = 4;
3184                 rdev->config.evergreen.sq_num_cf_insts = 2;
3185
3186                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3187                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3188                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3189                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3190                 break;
3191         case CHIP_BARTS:
3192                 rdev->config.evergreen.num_ses = 2;
3193                 rdev->config.evergreen.max_pipes = 4;
3194                 rdev->config.evergreen.max_tile_pipes = 8;
3195                 rdev->config.evergreen.max_simds = 7;
3196                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3197                 rdev->config.evergreen.max_gprs = 256;
3198                 rdev->config.evergreen.max_threads = 248;
3199                 rdev->config.evergreen.max_gs_threads = 32;
3200                 rdev->config.evergreen.max_stack_entries = 512;
3201                 rdev->config.evergreen.sx_num_of_sets = 4;
3202                 rdev->config.evergreen.sx_max_export_size = 256;
3203                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3204                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3205                 rdev->config.evergreen.max_hw_contexts = 8;
3206                 rdev->config.evergreen.sq_num_cf_insts = 2;
3207
3208                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3209                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3210                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3211                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3212                 break;
3213         case CHIP_TURKS:
3214                 rdev->config.evergreen.num_ses = 1;
3215                 rdev->config.evergreen.max_pipes = 4;
3216                 rdev->config.evergreen.max_tile_pipes = 4;
3217                 rdev->config.evergreen.max_simds = 6;
3218                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3219                 rdev->config.evergreen.max_gprs = 256;
3220                 rdev->config.evergreen.max_threads = 248;
3221                 rdev->config.evergreen.max_gs_threads = 32;
3222                 rdev->config.evergreen.max_stack_entries = 256;
3223                 rdev->config.evergreen.sx_num_of_sets = 4;
3224                 rdev->config.evergreen.sx_max_export_size = 256;
3225                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3226                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3227                 rdev->config.evergreen.max_hw_contexts = 8;
3228                 rdev->config.evergreen.sq_num_cf_insts = 2;
3229
3230                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3231                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3232                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3233                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3234                 break;
3235         case CHIP_CAICOS:
3236                 rdev->config.evergreen.num_ses = 1;
3237                 rdev->config.evergreen.max_pipes = 2;
3238                 rdev->config.evergreen.max_tile_pipes = 2;
3239                 rdev->config.evergreen.max_simds = 2;
3240                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3241                 rdev->config.evergreen.max_gprs = 256;
3242                 rdev->config.evergreen.max_threads = 192;
3243                 rdev->config.evergreen.max_gs_threads = 16;
3244                 rdev->config.evergreen.max_stack_entries = 256;
3245                 rdev->config.evergreen.sx_num_of_sets = 4;
3246                 rdev->config.evergreen.sx_max_export_size = 128;
3247                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3248                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3249                 rdev->config.evergreen.max_hw_contexts = 4;
3250                 rdev->config.evergreen.sq_num_cf_insts = 1;
3251
3252                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3253                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3254                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3255                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3256                 break;
3257         }
3258
3259         /* Initialize HDP */
3260         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3261                 WREG32((0x2c14 + j), 0x00000000);
3262                 WREG32((0x2c18 + j), 0x00000000);
3263                 WREG32((0x2c1c + j), 0x00000000);
3264                 WREG32((0x2c20 + j), 0x00000000);
3265                 WREG32((0x2c24 + j), 0x00000000);
3266         }
3267
3268         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3269
3270         evergreen_fix_pci_max_read_req_size(rdev);
3271
3272         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3273         if ((rdev->family == CHIP_PALM) ||
3274             (rdev->family == CHIP_SUMO) ||
3275             (rdev->family == CHIP_SUMO2))
3276                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3277         else
3278                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3279
3280         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3281          * not have bank info, so create a custom tiling dword.
3282          * bits 3:0   num_pipes
3283          * bits 7:4   num_banks
3284          * bits 11:8  group_size
3285          * bits 15:12 row_size
3286          */
3287         rdev->config.evergreen.tile_config = 0;
3288         switch (rdev->config.evergreen.max_tile_pipes) {
3289         case 1:
3290         default:
3291                 rdev->config.evergreen.tile_config |= (0 << 0);
3292                 break;
3293         case 2:
3294                 rdev->config.evergreen.tile_config |= (1 << 0);
3295                 break;
3296         case 4:
3297                 rdev->config.evergreen.tile_config |= (2 << 0);
3298                 break;
3299         case 8:
3300                 rdev->config.evergreen.tile_config |= (3 << 0);
3301                 break;
3302         }
3303         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3304         if (rdev->flags & RADEON_IS_IGP)
3305                 rdev->config.evergreen.tile_config |= 1 << 4;
3306         else {
3307                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3308                 case 0: /* four banks */
3309                         rdev->config.evergreen.tile_config |= 0 << 4;
3310                         break;
3311                 case 1: /* eight banks */
3312                         rdev->config.evergreen.tile_config |= 1 << 4;
3313                         break;
3314                 case 2: /* sixteen banks */
3315                 default:
3316                         rdev->config.evergreen.tile_config |= 2 << 4;
3317                         break;
3318                 }
3319         }
3320         rdev->config.evergreen.tile_config |= 0 << 8;
3321         rdev->config.evergreen.tile_config |=
3322                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3323
3324         num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3325
3326         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3327                 u32 efuse_straps_4;
3328                 u32 efuse_straps_3;
3329
3330                 efuse_straps_4 = RREG32_RCU(0x204);
3331                 efuse_straps_3 = RREG32_RCU(0x203);
3332                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3333                       ((efuse_straps_3 & 0xf0000000) >> 28));
3334         } else {
3335                 tmp = 0;
3336                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3337                         u32 rb_disable_bitmap;
3338
3339                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3340                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3341                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3342                         tmp <<= 4;
3343                         tmp |= rb_disable_bitmap;
3344                 }
3345         }
3346         /* enabled rb are just the one not disabled :) */
3347         disabled_rb_mask = tmp;
3348         tmp = 0;
3349         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3350                 tmp |= (1 << i);
3351         /* if all the backends are disabled, fix it up here */
3352         if ((disabled_rb_mask & tmp) == tmp) {
3353                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3354                         disabled_rb_mask &= ~(1 << i);
3355         }
3356
3357         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3358         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3359
3360         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3361         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3362         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3363         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3364         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3365         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3366         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3367
3368         if ((rdev->config.evergreen.max_backends == 1) &&
3369             (rdev->flags & RADEON_IS_IGP)) {
3370                 if ((disabled_rb_mask & 3) == 1) {
3371                         /* RB0 disabled, RB1 enabled */
3372                         tmp = 0x11111111;
3373                 } else {
3374                         /* RB1 disabled, RB0 enabled */
3375                         tmp = 0x00000000;
3376                 }
3377         } else {
3378                 tmp = gb_addr_config & NUM_PIPES_MASK;
3379                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3380                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3381         }
3382         WREG32(GB_BACKEND_MAP, tmp);
3383
3384         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3385         WREG32(CGTS_TCC_DISABLE, 0);
3386         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3387         WREG32(CGTS_USER_TCC_DISABLE, 0);
3388
3389         /* set HW defaults for 3D engine */
3390         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3391                                      ROQ_IB2_START(0x2b)));
3392
3393         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3394
3395         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3396                              SYNC_GRADIENT |
3397                              SYNC_WALKER |
3398                              SYNC_ALIGNER));
3399
3400         sx_debug_1 = RREG32(SX_DEBUG_1);
3401         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3402         WREG32(SX_DEBUG_1, sx_debug_1);
3403
3404
3405         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3406         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3407         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3408         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3409
3410         if (rdev->family <= CHIP_SUMO2)
3411                 WREG32(SMX_SAR_CTL0, 0x00010000);
3412
3413         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3414                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3415                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3416
3417         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3418                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3419                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3420
3421         WREG32(VGT_NUM_INSTANCES, 1);
3422         WREG32(SPI_CONFIG_CNTL, 0);
3423         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3424         WREG32(CP_PERFMON_CNTL, 0);
3425
3426         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3427                                   FETCH_FIFO_HIWATER(0x4) |
3428                                   DONE_FIFO_HIWATER(0xe0) |
3429                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3430
3431         sq_config = RREG32(SQ_CONFIG);
3432         sq_config &= ~(PS_PRIO(3) |
3433                        VS_PRIO(3) |
3434                        GS_PRIO(3) |
3435                        ES_PRIO(3));
3436         sq_config |= (VC_ENABLE |
3437                       EXPORT_SRC_C |
3438                       PS_PRIO(0) |
3439                       VS_PRIO(1) |
3440                       GS_PRIO(2) |
3441                       ES_PRIO(3));
3442
3443         switch (rdev->family) {
3444         case CHIP_CEDAR:
3445         case CHIP_PALM:
3446         case CHIP_SUMO:
3447         case CHIP_SUMO2:
3448         case CHIP_CAICOS:
3449                 /* no vertex cache */
3450                 sq_config &= ~VC_ENABLE;
3451                 break;
3452         default:
3453                 break;
3454         }
3455
3456         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3457
3458         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3459         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3460         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3461         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3462         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3463         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3464         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3465
3466         switch (rdev->family) {
3467         case CHIP_CEDAR:
3468         case CHIP_PALM:
3469         case CHIP_SUMO:
3470         case CHIP_SUMO2:
3471                 ps_thread_count = 96;
3472                 break;
3473         default:
3474                 ps_thread_count = 128;
3475                 break;
3476         }
3477
3478         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3479         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3480         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3481         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3482         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3483         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3484
3485         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3486         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3487         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3488         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3489         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3490         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3491
3492         WREG32(SQ_CONFIG, sq_config);
3493         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3494         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3495         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3496         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3497         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3498         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3499         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3500         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3501         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3502         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3503
3504         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3505                                           FORCE_EOV_MAX_REZ_CNT(255)));
3506
3507         switch (rdev->family) {
3508         case CHIP_CEDAR:
3509         case CHIP_PALM:
3510         case CHIP_SUMO:
3511         case CHIP_SUMO2:
3512         case CHIP_CAICOS:
3513                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3514                 break;
3515         default:
3516                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3517                 break;
3518         }
3519         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3520         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3521
3522         WREG32(VGT_GS_VERTEX_REUSE, 16);
3523         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3524         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3525
3526         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3527         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3528
3529         WREG32(CB_PERF_CTR0_SEL_0, 0);
3530         WREG32(CB_PERF_CTR0_SEL_1, 0);
3531         WREG32(CB_PERF_CTR1_SEL_0, 0);
3532         WREG32(CB_PERF_CTR1_SEL_1, 0);
3533         WREG32(CB_PERF_CTR2_SEL_0, 0);
3534         WREG32(CB_PERF_CTR2_SEL_1, 0);
3535         WREG32(CB_PERF_CTR3_SEL_0, 0);
3536         WREG32(CB_PERF_CTR3_SEL_1, 0);
3537
3538         /* clear render buffer base addresses */
3539         WREG32(CB_COLOR0_BASE, 0);
3540         WREG32(CB_COLOR1_BASE, 0);
3541         WREG32(CB_COLOR2_BASE, 0);
3542         WREG32(CB_COLOR3_BASE, 0);
3543         WREG32(CB_COLOR4_BASE, 0);
3544         WREG32(CB_COLOR5_BASE, 0);
3545         WREG32(CB_COLOR6_BASE, 0);
3546         WREG32(CB_COLOR7_BASE, 0);
3547         WREG32(CB_COLOR8_BASE, 0);
3548         WREG32(CB_COLOR9_BASE, 0);
3549         WREG32(CB_COLOR10_BASE, 0);
3550         WREG32(CB_COLOR11_BASE, 0);
3551
3552         /* set the shader const cache sizes to 0 */
3553         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3554                 WREG32(i, 0);
3555         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3556                 WREG32(i, 0);
3557
3558         tmp = RREG32(HDP_MISC_CNTL);
3559         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3560         WREG32(HDP_MISC_CNTL, tmp);
3561
3562         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3563         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3564
3565         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3566
3567         udelay(50);
3568
3569 }
3570
3571 int evergreen_mc_init(struct radeon_device *rdev)
3572 {
3573         u32 tmp;
3574         int chansize, numchan;
3575
3576         /* Get VRAM informations */
3577         rdev->mc.vram_is_ddr = true;
3578         if ((rdev->family == CHIP_PALM) ||
3579             (rdev->family == CHIP_SUMO) ||
3580             (rdev->family == CHIP_SUMO2))
3581                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3582         else
3583                 tmp = RREG32(MC_ARB_RAMCFG);
3584         if (tmp & CHANSIZE_OVERRIDE) {
3585                 chansize = 16;
3586         } else if (tmp & CHANSIZE_MASK) {
3587                 chansize = 64;
3588         } else {
3589                 chansize = 32;
3590         }
3591         tmp = RREG32(MC_SHARED_CHMAP);
3592         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3593         case 0:
3594         default:
3595                 numchan = 1;
3596                 break;
3597         case 1:
3598                 numchan = 2;
3599                 break;
3600         case 2:
3601                 numchan = 4;
3602                 break;
3603         case 3:
3604                 numchan = 8;
3605                 break;
3606         }
3607         rdev->mc.vram_width = numchan * chansize;
3608         /* Could aper size report 0 ? */
3609         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3610         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3611         /* Setup GPU memory space */
3612         if ((rdev->family == CHIP_PALM) ||
3613             (rdev->family == CHIP_SUMO) ||
3614             (rdev->family == CHIP_SUMO2)) {
3615                 /* size in bytes on fusion */
3616                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3617                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3618         } else {
3619                 /* size in MB on evergreen/cayman/tn */
3620                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3621                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3622         }
3623         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3624         r700_vram_gtt_location(rdev, &rdev->mc);
3625         radeon_update_bandwidth_info(rdev);
3626
3627         return 0;
3628 }
3629
3630 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3631 {
3632         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3633                 RREG32(GRBM_STATUS));
3634         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3635                 RREG32(GRBM_STATUS_SE0));
3636         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3637                 RREG32(GRBM_STATUS_SE1));
3638         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3639                 RREG32(SRBM_STATUS));
3640         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3641                 RREG32(SRBM_STATUS2));
3642         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3643                 RREG32(CP_STALLED_STAT1));
3644         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3645                 RREG32(CP_STALLED_STAT2));
3646         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3647                 RREG32(CP_BUSY_STAT));
3648         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3649                 RREG32(CP_STAT));
3650         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3651                 RREG32(DMA_STATUS_REG));
3652         if (rdev->family >= CHIP_CAYMAN) {
3653                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3654                          RREG32(DMA_STATUS_REG + 0x800));
3655         }
3656 }
3657
3658 bool evergreen_is_display_hung(struct radeon_device *rdev)
3659 {
3660         u32 crtc_hung = 0;
3661         u32 crtc_status[6];
3662         u32 i, j, tmp;
3663
3664         for (i = 0; i < rdev->num_crtc; i++) {
3665                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3666                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3667                         crtc_hung |= (1 << i);
3668                 }
3669         }
3670
3671         for (j = 0; j < 10; j++) {
3672                 for (i = 0; i < rdev->num_crtc; i++) {
3673                         if (crtc_hung & (1 << i)) {
3674                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3675                                 if (tmp != crtc_status[i])
3676                                         crtc_hung &= ~(1 << i);
3677                         }
3678                 }
3679                 if (crtc_hung == 0)
3680                         return false;
3681                 udelay(100);
3682         }
3683
3684         return true;
3685 }
3686
3687 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3688 {
3689         u32 reset_mask = 0;
3690         u32 tmp;
3691
3692         /* GRBM_STATUS */
3693         tmp = RREG32(GRBM_STATUS);
3694         if (tmp & (PA_BUSY | SC_BUSY |
3695                    SH_BUSY | SX_BUSY |
3696                    TA_BUSY | VGT_BUSY |
3697                    DB_BUSY | CB_BUSY |
3698                    SPI_BUSY | VGT_BUSY_NO_DMA))
3699                 reset_mask |= RADEON_RESET_GFX;
3700
3701         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3702                    CP_BUSY | CP_COHERENCY_BUSY))
3703                 reset_mask |= RADEON_RESET_CP;
3704
3705         if (tmp & GRBM_EE_BUSY)
3706                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3707
3708         /* DMA_STATUS_REG */
3709         tmp = RREG32(DMA_STATUS_REG);
3710         if (!(tmp & DMA_IDLE))
3711                 reset_mask |= RADEON_RESET_DMA;
3712
3713         /* SRBM_STATUS2 */
3714         tmp = RREG32(SRBM_STATUS2);
3715         if (tmp & DMA_BUSY)
3716                 reset_mask |= RADEON_RESET_DMA;
3717
3718         /* SRBM_STATUS */
3719         tmp = RREG32(SRBM_STATUS);
3720         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3721                 reset_mask |= RADEON_RESET_RLC;
3722
3723         if (tmp & IH_BUSY)
3724                 reset_mask |= RADEON_RESET_IH;
3725
3726         if (tmp & SEM_BUSY)
3727                 reset_mask |= RADEON_RESET_SEM;
3728
3729         if (tmp & GRBM_RQ_PENDING)
3730                 reset_mask |= RADEON_RESET_GRBM;
3731
3732         if (tmp & VMC_BUSY)
3733                 reset_mask |= RADEON_RESET_VMC;
3734
3735         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3736                    MCC_BUSY | MCD_BUSY))
3737                 reset_mask |= RADEON_RESET_MC;
3738
3739         if (evergreen_is_display_hung(rdev))
3740                 reset_mask |= RADEON_RESET_DISPLAY;
3741
3742         /* VM_L2_STATUS */
3743         tmp = RREG32(VM_L2_STATUS);
3744         if (tmp & L2_BUSY)
3745                 reset_mask |= RADEON_RESET_VMC;
3746
3747         /* Skip MC reset as it's mostly likely not hung, just busy */
3748         if (reset_mask & RADEON_RESET_MC) {
3749                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3750                 reset_mask &= ~RADEON_RESET_MC;
3751         }
3752
3753         return reset_mask;
3754 }
3755
3756 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3757 {
3758         struct evergreen_mc_save save;
3759         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3760         u32 tmp;
3761
3762         if (reset_mask == 0)
3763                 return;
3764
3765         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3766
3767         evergreen_print_gpu_status_regs(rdev);
3768
3769         /* Disable CP parsing/prefetching */
3770         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3771
3772         if (reset_mask & RADEON_RESET_DMA) {
3773                 /* Disable DMA */
3774                 tmp = RREG32(DMA_RB_CNTL);
3775                 tmp &= ~DMA_RB_ENABLE;
3776                 WREG32(DMA_RB_CNTL, tmp);
3777         }
3778
3779         udelay(50);
3780
3781         evergreen_mc_stop(rdev, &save);
3782         if (evergreen_mc_wait_for_idle(rdev)) {
3783                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3784         }
3785
3786         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3787                 grbm_soft_reset |= SOFT_RESET_DB |
3788                         SOFT_RESET_CB |
3789                         SOFT_RESET_PA |
3790                         SOFT_RESET_SC |
3791                         SOFT_RESET_SPI |
3792                         SOFT_RESET_SX |
3793                         SOFT_RESET_SH |
3794                         SOFT_RESET_TC |
3795                         SOFT_RESET_TA |
3796                         SOFT_RESET_VC |
3797                         SOFT_RESET_VGT;
3798         }
3799
3800         if (reset_mask & RADEON_RESET_CP) {
3801                 grbm_soft_reset |= SOFT_RESET_CP |
3802                         SOFT_RESET_VGT;
3803
3804                 srbm_soft_reset |= SOFT_RESET_GRBM;
3805         }
3806
3807         if (reset_mask & RADEON_RESET_DMA)
3808                 srbm_soft_reset |= SOFT_RESET_DMA;
3809
3810         if (reset_mask & RADEON_RESET_DISPLAY)
3811                 srbm_soft_reset |= SOFT_RESET_DC;
3812
3813         if (reset_mask & RADEON_RESET_RLC)
3814                 srbm_soft_reset |= SOFT_RESET_RLC;
3815
3816         if (reset_mask & RADEON_RESET_SEM)
3817                 srbm_soft_reset |= SOFT_RESET_SEM;
3818
3819         if (reset_mask & RADEON_RESET_IH)
3820                 srbm_soft_reset |= SOFT_RESET_IH;
3821
3822         if (reset_mask & RADEON_RESET_GRBM)
3823                 srbm_soft_reset |= SOFT_RESET_GRBM;
3824
3825         if (reset_mask & RADEON_RESET_VMC)
3826                 srbm_soft_reset |= SOFT_RESET_VMC;
3827
3828         if (!(rdev->flags & RADEON_IS_IGP)) {
3829                 if (reset_mask & RADEON_RESET_MC)
3830                         srbm_soft_reset |= SOFT_RESET_MC;
3831         }
3832
3833         if (grbm_soft_reset) {
3834                 tmp = RREG32(GRBM_SOFT_RESET);
3835                 tmp |= grbm_soft_reset;
3836                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3837                 WREG32(GRBM_SOFT_RESET, tmp);
3838                 tmp = RREG32(GRBM_SOFT_RESET);
3839
3840                 udelay(50);
3841
3842                 tmp &= ~grbm_soft_reset;
3843                 WREG32(GRBM_SOFT_RESET, tmp);
3844                 tmp = RREG32(GRBM_SOFT_RESET);
3845         }
3846
3847         if (srbm_soft_reset) {
3848                 tmp = RREG32(SRBM_SOFT_RESET);
3849                 tmp |= srbm_soft_reset;
3850                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3851                 WREG32(SRBM_SOFT_RESET, tmp);
3852                 tmp = RREG32(SRBM_SOFT_RESET);
3853
3854                 udelay(50);
3855
3856                 tmp &= ~srbm_soft_reset;
3857                 WREG32(SRBM_SOFT_RESET, tmp);
3858                 tmp = RREG32(SRBM_SOFT_RESET);
3859         }
3860
3861         /* Wait a little for things to settle down */
3862         udelay(50);
3863
3864         evergreen_mc_resume(rdev, &save);
3865         udelay(50);
3866
3867         evergreen_print_gpu_status_regs(rdev);
3868 }
3869
3870 int evergreen_asic_reset(struct radeon_device *rdev)
3871 {
3872         u32 reset_mask;
3873
3874         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3875
3876         if (reset_mask)
3877                 r600_set_bios_scratch_engine_hung(rdev, true);
3878
3879         evergreen_gpu_soft_reset(rdev, reset_mask);
3880
3881         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3882
3883         if (!reset_mask)
3884                 r600_set_bios_scratch_engine_hung(rdev, false);
3885
3886         return 0;
3887 }
3888
3889 /**
3890  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3891  *
3892  * @rdev: radeon_device pointer
3893  * @ring: radeon_ring structure holding ring information
3894  *
3895  * Check if the GFX engine is locked up.
3896  * Returns true if the engine appears to be locked up, false if not.
3897  */
3898 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3899 {
3900         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3901
3902         if (!(reset_mask & (RADEON_RESET_GFX |
3903                             RADEON_RESET_COMPUTE |
3904                             RADEON_RESET_CP))) {
3905                 radeon_ring_lockup_update(ring);
3906                 return false;
3907         }
3908         /* force CP activities */
3909         radeon_ring_force_activity(rdev, ring);
3910         return radeon_ring_test_lockup(rdev, ring);
3911 }
3912
3913 /*
3914  * RLC
3915  */
3916 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3917 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3918
3919 void sumo_rlc_fini(struct radeon_device *rdev)
3920 {
3921         int r;
3922
3923         /* save restore block */
3924         if (rdev->rlc.save_restore_obj) {
3925                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3926                 if (unlikely(r != 0))
3927                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3928                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3929                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3930
3931                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3932                 rdev->rlc.save_restore_obj = NULL;
3933         }
3934
3935         /* clear state block */
3936         if (rdev->rlc.clear_state_obj) {
3937                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3938                 if (unlikely(r != 0))
3939                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3940                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3941                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3942
3943                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3944                 rdev->rlc.clear_state_obj = NULL;
3945         }
3946
3947         /* clear state block */
3948         if (rdev->rlc.cp_table_obj) {
3949                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3950                 if (unlikely(r != 0))
3951                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3952                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3953                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3954
3955                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3956                 rdev->rlc.cp_table_obj = NULL;
3957         }
3958 }
3959
3960 #define CP_ME_TABLE_SIZE    96
3961
3962 int sumo_rlc_init(struct radeon_device *rdev)
3963 {
3964         const u32 *src_ptr;
3965         volatile u32 *dst_ptr;
3966         u32 dws, data, i, j, k, reg_num;
3967         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
3968         u64 reg_list_mc_addr;
3969         const struct cs_section_def *cs_data;
3970         int r;
3971
3972         src_ptr = rdev->rlc.reg_list;
3973         dws = rdev->rlc.reg_list_size;
3974         if (rdev->family >= CHIP_BONAIRE) {
3975                 dws += (5 * 16) + 48 + 48 + 64;
3976         }
3977         cs_data = rdev->rlc.cs_data;
3978
3979         if (src_ptr) {
3980                 /* save restore block */
3981                 if (rdev->rlc.save_restore_obj == NULL) {
3982                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3983                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3984                         if (r) {
3985                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3986                                 return r;
3987                         }
3988                 }
3989
3990                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3991                 if (unlikely(r != 0)) {
3992                         sumo_rlc_fini(rdev);
3993                         return r;
3994                 }
3995                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
3996                                   &rdev->rlc.save_restore_gpu_addr);
3997                 if (r) {
3998                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3999                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4000                         sumo_rlc_fini(rdev);
4001                         return r;
4002                 }
4003
4004                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4005                 if (r) {
4006                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4007                         sumo_rlc_fini(rdev);
4008                         return r;
4009                 }
4010                 /* write the sr buffer */
4011                 dst_ptr = rdev->rlc.sr_ptr;
4012                 if (rdev->family >= CHIP_TAHITI) {
4013                         /* SI */
4014                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
4015                                 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4016                 } else {
4017                         /* ON/LN/TN */
4018                         /* format:
4019                          * dw0: (reg2 << 16) | reg1
4020                          * dw1: reg1 save space
4021                          * dw2: reg2 save space
4022                          */
4023                         for (i = 0; i < dws; i++) {
4024                                 data = src_ptr[i] >> 2;
4025                                 i++;
4026                                 if (i < dws)
4027                                         data |= (src_ptr[i] >> 2) << 16;
4028                                 j = (((i - 1) * 3) / 2);
4029                                 dst_ptr[j] = cpu_to_le32(data);
4030                         }
4031                         j = ((i * 3) / 2);
4032                         dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4033                 }
4034                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4035                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4036         }
4037
4038         if (cs_data) {
4039                 /* clear state block */
4040                 if (rdev->family >= CHIP_BONAIRE) {
4041                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4042                 } else if (rdev->family >= CHIP_TAHITI) {
4043                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4044                         dws = rdev->rlc.clear_state_size + (256 / 4);
4045                 } else {
4046                         reg_list_num = 0;
4047                         dws = 0;
4048                         for (i = 0; cs_data[i].section != NULL; i++) {
4049                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4050                                         reg_list_num++;
4051                                         dws += cs_data[i].section[j].reg_count;
4052                                 }
4053                         }
4054                         reg_list_blk_index = (3 * reg_list_num + 2);
4055                         dws += reg_list_blk_index;
4056                         rdev->rlc.clear_state_size = dws;
4057                 }
4058
4059                 if (rdev->rlc.clear_state_obj == NULL) {
4060                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4061                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
4062                         if (r) {
4063                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4064                                 sumo_rlc_fini(rdev);
4065                                 return r;
4066                         }
4067                 }
4068                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4069                 if (unlikely(r != 0)) {
4070                         sumo_rlc_fini(rdev);
4071                         return r;
4072                 }
4073                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4074                                   &rdev->rlc.clear_state_gpu_addr);
4075                 if (r) {
4076                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4077                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4078                         sumo_rlc_fini(rdev);
4079                         return r;
4080                 }
4081
4082                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4083                 if (r) {
4084                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4085                         sumo_rlc_fini(rdev);
4086                         return r;
4087                 }
4088                 /* set up the cs buffer */
4089                 dst_ptr = rdev->rlc.cs_ptr;
4090                 if (rdev->family >= CHIP_BONAIRE) {
4091                         cik_get_csb_buffer(rdev, dst_ptr);
4092                 } else if (rdev->family >= CHIP_TAHITI) {
4093                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4094                         dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4095                         dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4096                         dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4097                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4098                 } else {
4099                         reg_list_hdr_blk_index = 0;
4100                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4101                         data = upper_32_bits(reg_list_mc_addr);
4102                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4103                         reg_list_hdr_blk_index++;
4104                         for (i = 0; cs_data[i].section != NULL; i++) {
4105                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4106                                         reg_num = cs_data[i].section[j].reg_count;
4107                                         data = reg_list_mc_addr & 0xffffffff;
4108                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4109                                         reg_list_hdr_blk_index++;
4110
4111                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4112                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4113                                         reg_list_hdr_blk_index++;
4114
4115                                         data = 0x08000000 | (reg_num * 4);
4116                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4117                                         reg_list_hdr_blk_index++;
4118
4119                                         for (k = 0; k < reg_num; k++) {
4120                                                 data = cs_data[i].section[j].extent[k];
4121                                                 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4122                                         }
4123                                         reg_list_mc_addr += reg_num * 4;
4124                                         reg_list_blk_index += reg_num;
4125                                 }
4126                         }
4127                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4128                 }
4129                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4130                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4131         }
4132
4133         if (rdev->rlc.cp_table_size) {
4134                 if (rdev->rlc.cp_table_obj == NULL) {
4135                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4136                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4137                         if (r) {
4138                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4139                                 sumo_rlc_fini(rdev);
4140                                 return r;
4141                         }
4142                 }
4143
4144                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4145                 if (unlikely(r != 0)) {
4146                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4147                         sumo_rlc_fini(rdev);
4148                         return r;
4149                 }
4150                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4151                                   &rdev->rlc.cp_table_gpu_addr);
4152                 if (r) {
4153                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4154                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4155                         sumo_rlc_fini(rdev);
4156                         return r;
4157                 }
4158                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4159                 if (r) {
4160                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4161                         sumo_rlc_fini(rdev);
4162                         return r;
4163                 }
4164
4165                 cik_init_cp_pg_table(rdev);
4166
4167                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4168                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4169
4170         }
4171
4172         return 0;
4173 }
4174
4175 static void evergreen_rlc_start(struct radeon_device *rdev)
4176 {
4177         u32 mask = RLC_ENABLE;
4178
4179         if (rdev->flags & RADEON_IS_IGP) {
4180                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4181         }
4182
4183         WREG32(RLC_CNTL, mask);
4184 }
4185
4186 int evergreen_rlc_resume(struct radeon_device *rdev)
4187 {
4188         u32 i;
4189         const __be32 *fw_data;
4190
4191         if (!rdev->rlc_fw)
4192                 return -EINVAL;
4193
4194         r600_rlc_stop(rdev);
4195
4196         WREG32(RLC_HB_CNTL, 0);
4197
4198         if (rdev->flags & RADEON_IS_IGP) {
4199                 if (rdev->family == CHIP_ARUBA) {
4200                         u32 always_on_bitmap =
4201                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4202                         /* find out the number of active simds */
4203                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4204                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4205                         tmp = hweight32(~tmp);
4206                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4207                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4208                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4209                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4210                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4211                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4212                         }
4213                 } else {
4214                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4215                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4216                 }
4217                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4218                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4219         } else {
4220                 WREG32(RLC_HB_BASE, 0);
4221                 WREG32(RLC_HB_RPTR, 0);
4222                 WREG32(RLC_HB_WPTR, 0);
4223                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4224                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4225         }
4226         WREG32(RLC_MC_CNTL, 0);
4227         WREG32(RLC_UCODE_CNTL, 0);
4228
4229         fw_data = (const __be32 *)rdev->rlc_fw->data;
4230         if (rdev->family >= CHIP_ARUBA) {
4231                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4232                         WREG32(RLC_UCODE_ADDR, i);
4233                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4234                 }
4235         } else if (rdev->family >= CHIP_CAYMAN) {
4236                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4237                         WREG32(RLC_UCODE_ADDR, i);
4238                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4239                 }
4240         } else {
4241                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4242                         WREG32(RLC_UCODE_ADDR, i);
4243                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4244                 }
4245         }
4246         WREG32(RLC_UCODE_ADDR, 0);
4247
4248         evergreen_rlc_start(rdev);
4249
4250         return 0;
4251 }
4252
4253 /* Interrupts */
4254
4255 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4256 {
4257         if (crtc >= rdev->num_crtc)
4258                 return 0;
4259         else
4260                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4261 }
4262
4263 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4264 {
4265         u32 tmp;
4266
4267         if (rdev->family >= CHIP_CAYMAN) {
4268                 cayman_cp_int_cntl_setup(rdev, 0,
4269                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4270                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4271                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4272                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4273                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4274         } else
4275                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4276         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4277         WREG32(DMA_CNTL, tmp);
4278         WREG32(GRBM_INT_CNTL, 0);
4279         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4280         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4281         if (rdev->num_crtc >= 4) {
4282                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4283                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4284         }
4285         if (rdev->num_crtc >= 6) {
4286                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4287                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4288         }
4289
4290         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4291         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4292         if (rdev->num_crtc >= 4) {
4293                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4294                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4295         }
4296         if (rdev->num_crtc >= 6) {
4297                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4298                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4299         }
4300
4301         /* only one DAC on DCE6 */
4302         if (!ASIC_IS_DCE6(rdev))
4303                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4304         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4305
4306         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4307         WREG32(DC_HPD1_INT_CONTROL, tmp);
4308         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4309         WREG32(DC_HPD2_INT_CONTROL, tmp);
4310         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4311         WREG32(DC_HPD3_INT_CONTROL, tmp);
4312         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4313         WREG32(DC_HPD4_INT_CONTROL, tmp);
4314         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4315         WREG32(DC_HPD5_INT_CONTROL, tmp);
4316         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4317         WREG32(DC_HPD6_INT_CONTROL, tmp);
4318
4319 }
4320
4321 int evergreen_irq_set(struct radeon_device *rdev)
4322 {
4323         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4324         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4325         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4326         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4327         u32 grbm_int_cntl = 0;
4328         u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
4329         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4330         u32 dma_cntl, dma_cntl1 = 0;
4331         u32 thermal_int = 0;
4332
4333         if (!rdev->irq.installed) {
4334                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4335                 return -EINVAL;
4336         }
4337         /* don't enable anything if the ih is disabled */
4338         if (!rdev->ih.enabled) {
4339                 r600_disable_interrupts(rdev);
4340                 /* force the active interrupt state to all disabled */
4341                 evergreen_disable_interrupt_state(rdev);
4342                 return 0;
4343         }
4344
4345         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4346         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4347         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4348         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4349         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4350         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4351         if (rdev->family == CHIP_ARUBA)
4352                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4353                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4354         else
4355                 thermal_int = RREG32(CG_THERMAL_INT) &
4356                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4357
4358         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4359         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4360         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4361         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4362         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4363         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4364
4365         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4366
4367         if (rdev->family >= CHIP_CAYMAN) {
4368                 /* enable CP interrupts on all rings */
4369                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4370                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4371                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4372                 }
4373                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4374                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4375                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4376                 }
4377                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4378                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4379                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4380                 }
4381         } else {
4382                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4383                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4384                         cp_int_cntl |= RB_INT_ENABLE;
4385                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4386                 }
4387         }
4388
4389         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4390                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4391                 dma_cntl |= TRAP_ENABLE;
4392         }
4393
4394         if (rdev->family >= CHIP_CAYMAN) {
4395                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4396                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4397                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4398                         dma_cntl1 |= TRAP_ENABLE;
4399                 }
4400         }
4401
4402         if (rdev->irq.dpm_thermal) {
4403                 DRM_DEBUG("dpm thermal\n");
4404                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4405         }
4406
4407         if (rdev->irq.crtc_vblank_int[0] ||
4408             atomic_read(&rdev->irq.pflip[0])) {
4409                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4410                 crtc1 |= VBLANK_INT_MASK;
4411         }
4412         if (rdev->irq.crtc_vblank_int[1] ||
4413             atomic_read(&rdev->irq.pflip[1])) {
4414                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4415                 crtc2 |= VBLANK_INT_MASK;
4416         }
4417         if (rdev->irq.crtc_vblank_int[2] ||
4418             atomic_read(&rdev->irq.pflip[2])) {
4419                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4420                 crtc3 |= VBLANK_INT_MASK;
4421         }
4422         if (rdev->irq.crtc_vblank_int[3] ||
4423             atomic_read(&rdev->irq.pflip[3])) {
4424                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4425                 crtc4 |= VBLANK_INT_MASK;
4426         }
4427         if (rdev->irq.crtc_vblank_int[4] ||
4428             atomic_read(&rdev->irq.pflip[4])) {
4429                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4430                 crtc5 |= VBLANK_INT_MASK;
4431         }
4432         if (rdev->irq.crtc_vblank_int[5] ||
4433             atomic_read(&rdev->irq.pflip[5])) {
4434                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4435                 crtc6 |= VBLANK_INT_MASK;
4436         }
4437         if (rdev->irq.hpd[0]) {
4438                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4439                 hpd1 |= DC_HPDx_INT_EN;
4440         }
4441         if (rdev->irq.hpd[1]) {
4442                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4443                 hpd2 |= DC_HPDx_INT_EN;
4444         }
4445         if (rdev->irq.hpd[2]) {
4446                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4447                 hpd3 |= DC_HPDx_INT_EN;
4448         }
4449         if (rdev->irq.hpd[3]) {
4450                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4451                 hpd4 |= DC_HPDx_INT_EN;
4452         }
4453         if (rdev->irq.hpd[4]) {
4454                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4455                 hpd5 |= DC_HPDx_INT_EN;
4456         }
4457         if (rdev->irq.hpd[5]) {
4458                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4459                 hpd6 |= DC_HPDx_INT_EN;
4460         }
4461         if (rdev->irq.afmt[0]) {
4462                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4463                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4464         }
4465         if (rdev->irq.afmt[1]) {
4466                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4467                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4468         }
4469         if (rdev->irq.afmt[2]) {
4470                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4471                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4472         }
4473         if (rdev->irq.afmt[3]) {
4474                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4475                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4476         }
4477         if (rdev->irq.afmt[4]) {
4478                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4479                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4480         }
4481         if (rdev->irq.afmt[5]) {
4482                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4483                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4484         }
4485
4486         if (rdev->family >= CHIP_CAYMAN) {
4487                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4488                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4489                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4490         } else
4491                 WREG32(CP_INT_CNTL, cp_int_cntl);
4492
4493         WREG32(DMA_CNTL, dma_cntl);
4494
4495         if (rdev->family >= CHIP_CAYMAN)
4496                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4497
4498         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4499
4500         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4501         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4502         if (rdev->num_crtc >= 4) {
4503                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4504                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4505         }
4506         if (rdev->num_crtc >= 6) {
4507                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4508                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4509         }
4510
4511         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4512         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
4513         if (rdev->num_crtc >= 4) {
4514                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4515                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4516         }
4517         if (rdev->num_crtc >= 6) {
4518                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4519                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4520         }
4521
4522         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4523         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4524         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4525         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4526         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4527         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4528         if (rdev->family == CHIP_ARUBA)
4529                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4530         else
4531                 WREG32(CG_THERMAL_INT, thermal_int);
4532
4533         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4534         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4535         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4536         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4537         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4538         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4539
4540         return 0;
4541 }
4542
4543 static void evergreen_irq_ack(struct radeon_device *rdev)
4544 {
4545         u32 tmp;
4546
4547         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4548         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4549         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4550         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4551         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4552         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4553         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4554         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4555         if (rdev->num_crtc >= 4) {
4556                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4557                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4558         }
4559         if (rdev->num_crtc >= 6) {
4560                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4561                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4562         }
4563
4564         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4565         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4566         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4567         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4568         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4569         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4570
4571         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4572                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4573         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4574                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4575         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4576                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4577         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4578                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4579         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4580                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4581         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4582                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4583
4584         if (rdev->num_crtc >= 4) {
4585                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4586                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4587                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4588                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4589                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4590                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4591                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4592                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4593                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4594                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4595                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4596                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4597         }
4598
4599         if (rdev->num_crtc >= 6) {
4600                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4601                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4602                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4603                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4604                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4605                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4606                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4607                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4608                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4609                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4610                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4611                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4612         }
4613
4614         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4615                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4616                 tmp |= DC_HPDx_INT_ACK;
4617                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4618         }
4619         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4620                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4621                 tmp |= DC_HPDx_INT_ACK;
4622                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4623         }
4624         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4625                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4626                 tmp |= DC_HPDx_INT_ACK;
4627                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4628         }
4629         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4630                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4631                 tmp |= DC_HPDx_INT_ACK;
4632                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4633         }
4634         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4635                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4636                 tmp |= DC_HPDx_INT_ACK;
4637                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4638         }
4639         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4640                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4641                 tmp |= DC_HPDx_INT_ACK;
4642                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4643         }
4644         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4645                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4646                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4647                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4648         }
4649         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4650                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4651                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4652                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4653         }
4654         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4655                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4656                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4657                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4658         }
4659         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4660                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4661                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4662                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4663         }
4664         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4665                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4666                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4667                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4668         }
4669         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4670                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4671                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4672                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4673         }
4674 }
4675
4676 static void evergreen_irq_disable(struct radeon_device *rdev)
4677 {
4678         r600_disable_interrupts(rdev);
4679         /* Wait and acknowledge irq */
4680         mdelay(1);
4681         evergreen_irq_ack(rdev);
4682         evergreen_disable_interrupt_state(rdev);
4683 }
4684
4685 void evergreen_irq_suspend(struct radeon_device *rdev)
4686 {
4687         evergreen_irq_disable(rdev);
4688         r600_rlc_stop(rdev);
4689 }
4690
4691 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4692 {
4693         u32 wptr, tmp;
4694
4695         if (rdev->wb.enabled)
4696                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4697         else
4698                 wptr = RREG32(IH_RB_WPTR);
4699
4700         if (wptr & RB_OVERFLOW) {
4701                 /* When a ring buffer overflow happen start parsing interrupt
4702                  * from the last not overwritten vector (wptr + 16). Hopefully
4703                  * this should allow us to catchup.
4704                  */
4705                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4706                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4707                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4708                 tmp = RREG32(IH_RB_CNTL);
4709                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4710                 WREG32(IH_RB_CNTL, tmp);
4711         }
4712         return (wptr & rdev->ih.ptr_mask);
4713 }
4714
4715 int evergreen_irq_process(struct radeon_device *rdev)
4716 {
4717         u32 wptr;
4718         u32 rptr;
4719         u32 src_id, src_data;
4720         u32 ring_index;
4721         bool queue_hotplug = false;
4722         bool queue_hdmi = false;
4723         bool queue_thermal = false;
4724         u32 status, addr;
4725
4726         if (!rdev->ih.enabled || rdev->shutdown)
4727                 return IRQ_NONE;
4728
4729         wptr = evergreen_get_ih_wptr(rdev);
4730
4731 restart_ih:
4732         /* is somebody else already processing irqs? */
4733         if (atomic_xchg(&rdev->ih.lock, 1))
4734                 return IRQ_NONE;
4735
4736         rptr = rdev->ih.rptr;
4737         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4738
4739         /* Order reading of wptr vs. reading of IH ring data */
4740         rmb();
4741
4742         /* display interrupts */
4743         evergreen_irq_ack(rdev);
4744
4745         while (rptr != wptr) {
4746                 /* wptr/rptr are in bytes! */
4747                 ring_index = rptr / 4;
4748                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4749                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4750
4751                 switch (src_id) {
4752                 case 1: /* D1 vblank/vline */
4753                         switch (src_data) {
4754                         case 0: /* D1 vblank */
4755                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4756                                         if (rdev->irq.crtc_vblank_int[0]) {
4757                                                 drm_handle_vblank(rdev->ddev, 0);
4758                                                 rdev->pm.vblank_sync = true;
4759                                                 wake_up(&rdev->irq.vblank_queue);
4760                                         }
4761                                         if (atomic_read(&rdev->irq.pflip[0]))
4762                                                 radeon_crtc_handle_flip(rdev, 0);
4763                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4764                                         DRM_DEBUG("IH: D1 vblank\n");
4765                                 }
4766                                 break;
4767                         case 1: /* D1 vline */
4768                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4769                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4770                                         DRM_DEBUG("IH: D1 vline\n");
4771                                 }
4772                                 break;
4773                         default:
4774                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4775                                 break;
4776                         }
4777                         break;
4778                 case 2: /* D2 vblank/vline */
4779                         switch (src_data) {
4780                         case 0: /* D2 vblank */
4781                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4782                                         if (rdev->irq.crtc_vblank_int[1]) {
4783                                                 drm_handle_vblank(rdev->ddev, 1);
4784                                                 rdev->pm.vblank_sync = true;
4785                                                 wake_up(&rdev->irq.vblank_queue);
4786                                         }
4787                                         if (atomic_read(&rdev->irq.pflip[1]))
4788                                                 radeon_crtc_handle_flip(rdev, 1);
4789                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4790                                         DRM_DEBUG("IH: D2 vblank\n");
4791                                 }
4792                                 break;
4793                         case 1: /* D2 vline */
4794                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4795                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4796                                         DRM_DEBUG("IH: D2 vline\n");
4797                                 }
4798                                 break;
4799                         default:
4800                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4801                                 break;
4802                         }
4803                         break;
4804                 case 3: /* D3 vblank/vline */
4805                         switch (src_data) {
4806                         case 0: /* D3 vblank */
4807                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4808                                         if (rdev->irq.crtc_vblank_int[2]) {
4809                                                 drm_handle_vblank(rdev->ddev, 2);
4810                                                 rdev->pm.vblank_sync = true;
4811                                                 wake_up(&rdev->irq.vblank_queue);
4812                                         }
4813                                         if (atomic_read(&rdev->irq.pflip[2]))
4814                                                 radeon_crtc_handle_flip(rdev, 2);
4815                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4816                                         DRM_DEBUG("IH: D3 vblank\n");
4817                                 }
4818                                 break;
4819                         case 1: /* D3 vline */
4820                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4821                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4822                                         DRM_DEBUG("IH: D3 vline\n");
4823                                 }
4824                                 break;
4825                         default:
4826                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4827                                 break;
4828                         }
4829                         break;
4830                 case 4: /* D4 vblank/vline */
4831                         switch (src_data) {
4832                         case 0: /* D4 vblank */
4833                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4834                                         if (rdev->irq.crtc_vblank_int[3]) {
4835                                                 drm_handle_vblank(rdev->ddev, 3);
4836                                                 rdev->pm.vblank_sync = true;
4837                                                 wake_up(&rdev->irq.vblank_queue);
4838                                         }
4839                                         if (atomic_read(&rdev->irq.pflip[3]))
4840                                                 radeon_crtc_handle_flip(rdev, 3);
4841                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4842                                         DRM_DEBUG("IH: D4 vblank\n");
4843                                 }
4844                                 break;
4845                         case 1: /* D4 vline */
4846                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4847                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4848                                         DRM_DEBUG("IH: D4 vline\n");
4849                                 }
4850                                 break;
4851                         default:
4852                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4853                                 break;
4854                         }
4855                         break;
4856                 case 5: /* D5 vblank/vline */
4857                         switch (src_data) {
4858                         case 0: /* D5 vblank */
4859                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4860                                         if (rdev->irq.crtc_vblank_int[4]) {
4861                                                 drm_handle_vblank(rdev->ddev, 4);
4862                                                 rdev->pm.vblank_sync = true;
4863                                                 wake_up(&rdev->irq.vblank_queue);
4864                                         }
4865                                         if (atomic_read(&rdev->irq.pflip[4]))
4866                                                 radeon_crtc_handle_flip(rdev, 4);
4867                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4868                                         DRM_DEBUG("IH: D5 vblank\n");
4869                                 }
4870                                 break;
4871                         case 1: /* D5 vline */
4872                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4873                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4874                                         DRM_DEBUG("IH: D5 vline\n");
4875                                 }
4876                                 break;
4877                         default:
4878                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4879                                 break;
4880                         }
4881                         break;
4882                 case 6: /* D6 vblank/vline */
4883                         switch (src_data) {
4884                         case 0: /* D6 vblank */
4885                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4886                                         if (rdev->irq.crtc_vblank_int[5]) {
4887                                                 drm_handle_vblank(rdev->ddev, 5);
4888                                                 rdev->pm.vblank_sync = true;
4889                                                 wake_up(&rdev->irq.vblank_queue);
4890                                         }
4891                                         if (atomic_read(&rdev->irq.pflip[5]))
4892                                                 radeon_crtc_handle_flip(rdev, 5);
4893                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4894                                         DRM_DEBUG("IH: D6 vblank\n");
4895                                 }
4896                                 break;
4897                         case 1: /* D6 vline */
4898                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4899                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4900                                         DRM_DEBUG("IH: D6 vline\n");
4901                                 }
4902                                 break;
4903                         default:
4904                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4905                                 break;
4906                         }
4907                         break;
4908                 case 42: /* HPD hotplug */
4909                         switch (src_data) {
4910                         case 0:
4911                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4912                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4913                                         queue_hotplug = true;
4914                                         DRM_DEBUG("IH: HPD1\n");
4915                                 }
4916                                 break;
4917                         case 1:
4918                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4919                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4920                                         queue_hotplug = true;
4921                                         DRM_DEBUG("IH: HPD2\n");
4922                                 }
4923                                 break;
4924                         case 2:
4925                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4926                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4927                                         queue_hotplug = true;
4928                                         DRM_DEBUG("IH: HPD3\n");
4929                                 }
4930                                 break;
4931                         case 3:
4932                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4933                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4934                                         queue_hotplug = true;
4935                                         DRM_DEBUG("IH: HPD4\n");
4936                                 }
4937                                 break;
4938                         case 4:
4939                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4940                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4941                                         queue_hotplug = true;
4942                                         DRM_DEBUG("IH: HPD5\n");
4943                                 }
4944                                 break;
4945                         case 5:
4946                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4947                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4948                                         queue_hotplug = true;
4949                                         DRM_DEBUG("IH: HPD6\n");
4950                                 }
4951                                 break;
4952                         default:
4953                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4954                                 break;
4955                         }
4956                         break;
4957                 case 44: /* hdmi */
4958                         switch (src_data) {
4959                         case 0:
4960                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4961                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4962                                         queue_hdmi = true;
4963                                         DRM_DEBUG("IH: HDMI0\n");
4964                                 }
4965                                 break;
4966                         case 1:
4967                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4968                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4969                                         queue_hdmi = true;
4970                                         DRM_DEBUG("IH: HDMI1\n");
4971                                 }
4972                                 break;
4973                         case 2:
4974                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4975                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4976                                         queue_hdmi = true;
4977                                         DRM_DEBUG("IH: HDMI2\n");
4978                                 }
4979                                 break;
4980                         case 3:
4981                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4982                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4983                                         queue_hdmi = true;
4984                                         DRM_DEBUG("IH: HDMI3\n");
4985                                 }
4986                                 break;
4987                         case 4:
4988                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4989                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4990                                         queue_hdmi = true;
4991                                         DRM_DEBUG("IH: HDMI4\n");
4992                                 }
4993                                 break;
4994                         case 5:
4995                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4996                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4997                                         queue_hdmi = true;
4998                                         DRM_DEBUG("IH: HDMI5\n");
4999                                 }
5000                                 break;
5001                         default:
5002                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5003                                 break;
5004                         }
5005                 case 124: /* UVD */
5006                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5007                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5008                         break;
5009                 case 146:
5010                 case 147:
5011                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5012                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5013                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5014                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5015                                 addr);
5016                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5017                                 status);
5018                         cayman_vm_decode_fault(rdev, status, addr);
5019                         /* reset addr and status */
5020                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5021                         break;
5022                 case 176: /* CP_INT in ring buffer */
5023                 case 177: /* CP_INT in IB1 */
5024                 case 178: /* CP_INT in IB2 */
5025                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5026                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5027                         break;
5028                 case 181: /* CP EOP event */
5029                         DRM_DEBUG("IH: CP EOP\n");
5030                         if (rdev->family >= CHIP_CAYMAN) {
5031                                 switch (src_data) {
5032                                 case 0:
5033                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5034                                         break;
5035                                 case 1:
5036                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5037                                         break;
5038                                 case 2:
5039                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5040                                         break;
5041                                 }
5042                         } else
5043                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5044                         break;
5045                 case 224: /* DMA trap event */
5046                         DRM_DEBUG("IH: DMA trap\n");
5047                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5048                         break;
5049                 case 230: /* thermal low to high */
5050                         DRM_DEBUG("IH: thermal low to high\n");
5051                         rdev->pm.dpm.thermal.high_to_low = false;
5052                         queue_thermal = true;
5053                         break;
5054                 case 231: /* thermal high to low */
5055                         DRM_DEBUG("IH: thermal high to low\n");
5056                         rdev->pm.dpm.thermal.high_to_low = true;
5057                         queue_thermal = true;
5058                         break;
5059                 case 233: /* GUI IDLE */
5060                         DRM_DEBUG("IH: GUI idle\n");
5061                         break;
5062                 case 244: /* DMA trap event */
5063                         if (rdev->family >= CHIP_CAYMAN) {
5064                                 DRM_DEBUG("IH: DMA1 trap\n");
5065                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5066                         }
5067                         break;
5068                 default:
5069                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5070                         break;
5071                 }
5072
5073                 /* wptr/rptr are in bytes! */
5074                 rptr += 16;
5075                 rptr &= rdev->ih.ptr_mask;
5076         }
5077         if (queue_hotplug)
5078                 schedule_work(&rdev->hotplug_work);
5079         if (queue_hdmi)
5080                 schedule_work(&rdev->audio_work);
5081         if (queue_thermal && rdev->pm.dpm_enabled)
5082                 schedule_work(&rdev->pm.dpm.thermal.work);
5083         rdev->ih.rptr = rptr;
5084         WREG32(IH_RB_RPTR, rdev->ih.rptr);
5085         atomic_set(&rdev->ih.lock, 0);
5086
5087         /* make sure wptr hasn't changed while processing */
5088         wptr = evergreen_get_ih_wptr(rdev);
5089         if (wptr != rptr)
5090                 goto restart_ih;
5091
5092         return IRQ_HANDLED;
5093 }
5094
5095 static int evergreen_startup(struct radeon_device *rdev)
5096 {
5097         struct radeon_ring *ring;
5098         int r;
5099
5100         /* enable pcie gen2 link */
5101         evergreen_pcie_gen2_enable(rdev);
5102         /* enable aspm */
5103         evergreen_program_aspm(rdev);
5104
5105         /* scratch needs to be initialized before MC */
5106         r = r600_vram_scratch_init(rdev);
5107         if (r)
5108                 return r;
5109
5110         evergreen_mc_program(rdev);
5111
5112         if (ASIC_IS_DCE5(rdev)) {
5113                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5114                         r = ni_init_microcode(rdev);
5115                         if (r) {
5116                                 DRM_ERROR("Failed to load firmware!\n");
5117                                 return r;
5118                         }
5119                 }
5120                 r = ni_mc_load_microcode(rdev);
5121                 if (r) {
5122                         DRM_ERROR("Failed to load MC firmware!\n");
5123                         return r;
5124                 }
5125         } else {
5126                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5127                         r = r600_init_microcode(rdev);
5128                         if (r) {
5129                                 DRM_ERROR("Failed to load firmware!\n");
5130                                 return r;
5131                         }
5132                 }
5133         }
5134
5135         if (rdev->flags & RADEON_IS_AGP) {
5136                 evergreen_agp_enable(rdev);
5137         } else {
5138                 r = evergreen_pcie_gart_enable(rdev);
5139                 if (r)
5140                         return r;
5141         }
5142         evergreen_gpu_init(rdev);
5143
5144         /* allocate rlc buffers */
5145         if (rdev->flags & RADEON_IS_IGP) {
5146                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5147                 rdev->rlc.reg_list_size =
5148                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5149                 rdev->rlc.cs_data = evergreen_cs_data;
5150                 r = sumo_rlc_init(rdev);
5151                 if (r) {
5152                         DRM_ERROR("Failed to init rlc BOs!\n");
5153                         return r;
5154                 }
5155         }
5156
5157         /* allocate wb buffer */
5158         r = radeon_wb_init(rdev);
5159         if (r)
5160                 return r;
5161
5162         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5163         if (r) {
5164                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5165                 return r;
5166         }
5167
5168         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5169         if (r) {
5170                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5171                 return r;
5172         }
5173
5174         r = uvd_v2_2_resume(rdev);
5175         if (!r) {
5176                 r = radeon_fence_driver_start_ring(rdev,
5177                                                    R600_RING_TYPE_UVD_INDEX);
5178                 if (r)
5179                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5180         }
5181
5182         if (r)
5183                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5184
5185         /* Enable IRQ */
5186         if (!rdev->irq.installed) {
5187                 r = radeon_irq_kms_init(rdev);
5188                 if (r)
5189                         return r;
5190         }
5191
5192         r = r600_irq_init(rdev);
5193         if (r) {
5194                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5195                 radeon_irq_kms_fini(rdev);
5196                 return r;
5197         }
5198         evergreen_irq_set(rdev);
5199
5200         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5201         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5202                              R600_CP_RB_RPTR, R600_CP_RB_WPTR,
5203                              RADEON_CP_PACKET2);
5204         if (r)
5205                 return r;
5206
5207         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5208         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5209                              DMA_RB_RPTR, DMA_RB_WPTR,
5210                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5211         if (r)
5212                 return r;
5213
5214         r = evergreen_cp_load_microcode(rdev);
5215         if (r)
5216                 return r;
5217         r = evergreen_cp_resume(rdev);
5218         if (r)
5219                 return r;
5220         r = r600_dma_resume(rdev);
5221         if (r)
5222                 return r;
5223
5224         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5225         if (ring->ring_size) {
5226                 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5227                                      UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
5228                                      RADEON_CP_PACKET2);
5229                 if (!r)
5230                         r = uvd_v1_0_init(rdev);
5231
5232                 if (r)
5233                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5234         }
5235
5236         r = radeon_ib_pool_init(rdev);
5237         if (r) {
5238                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5239                 return r;
5240         }
5241
5242         r = r600_audio_init(rdev);
5243         if (r) {
5244                 DRM_ERROR("radeon: audio init failed\n");
5245                 return r;
5246         }
5247
5248         return 0;
5249 }
5250
5251 int evergreen_resume(struct radeon_device *rdev)
5252 {
5253         int r;
5254
5255         /* reset the asic, the gfx blocks are often in a bad state
5256          * after the driver is unloaded or after a resume
5257          */
5258         if (radeon_asic_reset(rdev))
5259                 dev_warn(rdev->dev, "GPU reset failed !\n");
5260         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5261          * posting will perform necessary task to bring back GPU into good
5262          * shape.
5263          */
5264         /* post card */
5265         atom_asic_init(rdev->mode_info.atom_context);
5266
5267         /* init golden registers */
5268         evergreen_init_golden_registers(rdev);
5269
5270         rdev->accel_working = true;
5271         r = evergreen_startup(rdev);
5272         if (r) {
5273                 DRM_ERROR("evergreen startup failed on resume\n");
5274                 rdev->accel_working = false;
5275                 return r;
5276         }
5277
5278         return r;
5279
5280 }
5281
5282 int evergreen_suspend(struct radeon_device *rdev)
5283 {
5284         r600_audio_fini(rdev);
5285         uvd_v1_0_fini(rdev);
5286         radeon_uvd_suspend(rdev);
5287         r700_cp_stop(rdev);
5288         r600_dma_stop(rdev);
5289         evergreen_irq_suspend(rdev);
5290         radeon_wb_disable(rdev);
5291         evergreen_pcie_gart_disable(rdev);
5292
5293         return 0;
5294 }
5295
5296 /* Plan is to move initialization in that function and use
5297  * helper function so that radeon_device_init pretty much
5298  * do nothing more than calling asic specific function. This
5299  * should also allow to remove a bunch of callback function
5300  * like vram_info.
5301  */
5302 int evergreen_init(struct radeon_device *rdev)
5303 {
5304         int r;
5305
5306         /* Read BIOS */
5307         if (!radeon_get_bios(rdev)) {
5308                 if (ASIC_IS_AVIVO(rdev))
5309                         return -EINVAL;
5310         }
5311         /* Must be an ATOMBIOS */
5312         if (!rdev->is_atom_bios) {
5313                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5314                 return -EINVAL;
5315         }
5316         r = radeon_atombios_init(rdev);
5317         if (r)
5318                 return r;
5319         /* reset the asic, the gfx blocks are often in a bad state
5320          * after the driver is unloaded or after a resume
5321          */
5322         if (radeon_asic_reset(rdev))
5323                 dev_warn(rdev->dev, "GPU reset failed !\n");
5324         /* Post card if necessary */
5325         if (!radeon_card_posted(rdev)) {
5326                 if (!rdev->bios) {
5327                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5328                         return -EINVAL;
5329                 }
5330                 DRM_INFO("GPU not posted. posting now...\n");
5331                 atom_asic_init(rdev->mode_info.atom_context);
5332         }
5333         /* init golden registers */
5334         evergreen_init_golden_registers(rdev);
5335         /* Initialize scratch registers */
5336         r600_scratch_init(rdev);
5337         /* Initialize surface registers */
5338         radeon_surface_init(rdev);
5339         /* Initialize clocks */
5340         radeon_get_clock_info(rdev->ddev);
5341         /* Fence driver */
5342         r = radeon_fence_driver_init(rdev);
5343         if (r)
5344                 return r;
5345         /* initialize AGP */
5346         if (rdev->flags & RADEON_IS_AGP) {
5347                 r = radeon_agp_init(rdev);
5348                 if (r)
5349                         radeon_agp_disable(rdev);
5350         }
5351         /* initialize memory controller */
5352         r = evergreen_mc_init(rdev);
5353         if (r)
5354                 return r;
5355         /* Memory manager */
5356         r = radeon_bo_init(rdev);
5357         if (r)
5358                 return r;
5359
5360         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5361         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5362
5363         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5364         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5365
5366         r = radeon_uvd_init(rdev);
5367         if (!r) {
5368                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5369                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5370                                4096);
5371         }
5372
5373         rdev->ih.ring_obj = NULL;
5374         r600_ih_ring_init(rdev, 64 * 1024);
5375
5376         r = r600_pcie_gart_init(rdev);
5377         if (r)
5378                 return r;
5379
5380         rdev->accel_working = true;
5381         r = evergreen_startup(rdev);
5382         if (r) {
5383                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5384                 r700_cp_fini(rdev);
5385                 r600_dma_fini(rdev);
5386                 r600_irq_fini(rdev);
5387                 if (rdev->flags & RADEON_IS_IGP)
5388                         sumo_rlc_fini(rdev);
5389                 radeon_wb_fini(rdev);
5390                 radeon_ib_pool_fini(rdev);
5391                 radeon_irq_kms_fini(rdev);
5392                 evergreen_pcie_gart_fini(rdev);
5393                 rdev->accel_working = false;
5394         }
5395
5396         /* Don't start up if the MC ucode is missing on BTC parts.
5397          * The default clocks and voltages before the MC ucode
5398          * is loaded are not suffient for advanced operations.
5399          */
5400         if (ASIC_IS_DCE5(rdev)) {
5401                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5402                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5403                         return -EINVAL;
5404                 }
5405         }
5406
5407         return 0;
5408 }
5409
5410 void evergreen_fini(struct radeon_device *rdev)
5411 {
5412         r600_audio_fini(rdev);
5413         r700_cp_fini(rdev);
5414         r600_dma_fini(rdev);
5415         r600_irq_fini(rdev);
5416         if (rdev->flags & RADEON_IS_IGP)
5417                 sumo_rlc_fini(rdev);
5418         radeon_wb_fini(rdev);
5419         radeon_ib_pool_fini(rdev);
5420         radeon_irq_kms_fini(rdev);
5421         evergreen_pcie_gart_fini(rdev);
5422         uvd_v1_0_fini(rdev);
5423         radeon_uvd_fini(rdev);
5424         r600_vram_scratch_fini(rdev);
5425         radeon_gem_fini(rdev);
5426         radeon_fence_driver_fini(rdev);
5427         radeon_agp_fini(rdev);
5428         radeon_bo_fini(rdev);
5429         radeon_atombios_fini(rdev);
5430         kfree(rdev->bios);
5431         rdev->bios = NULL;
5432 }
5433
5434 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5435 {
5436         u32 link_width_cntl, speed_cntl;
5437
5438         if (radeon_pcie_gen2 == 0)
5439                 return;
5440
5441         if (rdev->flags & RADEON_IS_IGP)
5442                 return;
5443
5444         if (!(rdev->flags & RADEON_IS_PCIE))
5445                 return;
5446
5447         /* x2 cards have a special sequence */
5448         if (ASIC_IS_X2(rdev))
5449                 return;
5450
5451         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5452                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5453                 return;
5454
5455         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5456         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5457                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5458                 return;
5459         }
5460
5461         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5462
5463         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5464             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5465
5466                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5467                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5468                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5469
5470                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5471                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5472                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5473
5474                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5475                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5476                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5477
5478                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5479                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5480                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5481
5482                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5483                 speed_cntl |= LC_GEN2_EN_STRAP;
5484                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5485
5486         } else {
5487                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5488                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5489                 if (1)
5490                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5491                 else
5492                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5493                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5494         }
5495 }
5496
5497 void evergreen_program_aspm(struct radeon_device *rdev)
5498 {
5499         u32 data, orig;
5500         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5501         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5502         /* fusion_platform = true
5503          * if the system is a fusion system
5504          * (APU or DGPU in a fusion system).
5505          * todo: check if the system is a fusion platform.
5506          */
5507         bool fusion_platform = false;
5508
5509         if (radeon_aspm == 0)
5510                 return;
5511
5512         if (!(rdev->flags & RADEON_IS_PCIE))
5513                 return;
5514
5515         switch (rdev->family) {
5516         case CHIP_CYPRESS:
5517         case CHIP_HEMLOCK:
5518         case CHIP_JUNIPER:
5519         case CHIP_REDWOOD:
5520         case CHIP_CEDAR:
5521         case CHIP_SUMO:
5522         case CHIP_SUMO2:
5523         case CHIP_PALM:
5524         case CHIP_ARUBA:
5525                 disable_l0s = true;
5526                 break;
5527         default:
5528                 disable_l0s = false;
5529                 break;
5530         }
5531
5532         if (rdev->flags & RADEON_IS_IGP)
5533                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5534
5535         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5536         if (fusion_platform)
5537                 data &= ~MULTI_PIF;
5538         else
5539                 data |= MULTI_PIF;
5540         if (data != orig)
5541                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5542
5543         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5544         if (fusion_platform)
5545                 data &= ~MULTI_PIF;
5546         else
5547                 data |= MULTI_PIF;
5548         if (data != orig)
5549                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5550
5551         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5552         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5553         if (!disable_l0s) {
5554                 if (rdev->family >= CHIP_BARTS)
5555                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5556                 else
5557                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5558         }
5559
5560         if (!disable_l1) {
5561                 if (rdev->family >= CHIP_BARTS)
5562                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5563                 else
5564                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5565
5566                 if (!disable_plloff_in_l1) {
5567                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5568                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5569                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5570                         if (data != orig)
5571                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5572
5573                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5574                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5575                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5576                         if (data != orig)
5577                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5578
5579                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5580                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5581                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5582                         if (data != orig)
5583                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5584
5585                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5586                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5587                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5588                         if (data != orig)
5589                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5590
5591                         if (rdev->family >= CHIP_BARTS) {
5592                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5593                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5594                                 data |= PLL_RAMP_UP_TIME_0(4);
5595                                 if (data != orig)
5596                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5597
5598                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5599                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5600                                 data |= PLL_RAMP_UP_TIME_1(4);
5601                                 if (data != orig)
5602                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5603
5604                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5605                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5606                                 data |= PLL_RAMP_UP_TIME_0(4);
5607                                 if (data != orig)
5608                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5609
5610                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5611                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5612                                 data |= PLL_RAMP_UP_TIME_1(4);
5613                                 if (data != orig)
5614                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5615                         }
5616
5617                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5618                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5619                         data |= LC_DYN_LANES_PWR_STATE(3);
5620                         if (data != orig)
5621                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5622
5623                         if (rdev->family >= CHIP_BARTS) {
5624                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5625                                 data &= ~LS2_EXIT_TIME_MASK;
5626                                 data |= LS2_EXIT_TIME(1);
5627                                 if (data != orig)
5628                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5629
5630                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5631                                 data &= ~LS2_EXIT_TIME_MASK;
5632                                 data |= LS2_EXIT_TIME(1);
5633                                 if (data != orig)
5634                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5635                         }
5636                 }
5637         }
5638
5639         /* evergreen parts only */
5640         if (rdev->family < CHIP_BARTS)
5641                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5642
5643         if (pcie_lc_cntl != pcie_lc_cntl_old)
5644                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5645 }