Merge tag 'at91-cleanup3' of git://git.kernel.org/pub/scm/linux/kernel/git/nferre...
[cascardo/linux.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include <drm/radeon_drm.h>
30 #include "evergreend.h"
31 #include "atom.h"
32 #include "avivod.h"
33 #include "evergreen_reg.h"
34 #include "evergreen_blit_shaders.h"
35 #include "radeon_ucode.h"
36
37 static const u32 crtc_offsets[6] =
38 {
39         EVERGREEN_CRTC0_REGISTER_OFFSET,
40         EVERGREEN_CRTC1_REGISTER_OFFSET,
41         EVERGREEN_CRTC2_REGISTER_OFFSET,
42         EVERGREEN_CRTC3_REGISTER_OFFSET,
43         EVERGREEN_CRTC4_REGISTER_OFFSET,
44         EVERGREEN_CRTC5_REGISTER_OFFSET
45 };
46
47 #include "clearstate_evergreen.h"
48
49 static const u32 sumo_rlc_save_restore_register_list[] =
50 {
51         0x98fc,
52         0x9830,
53         0x9834,
54         0x9838,
55         0x9870,
56         0x9874,
57         0x8a14,
58         0x8b24,
59         0x8bcc,
60         0x8b10,
61         0x8d00,
62         0x8d04,
63         0x8c00,
64         0x8c04,
65         0x8c08,
66         0x8c0c,
67         0x8d8c,
68         0x8c20,
69         0x8c24,
70         0x8c28,
71         0x8c18,
72         0x8c1c,
73         0x8cf0,
74         0x8e2c,
75         0x8e38,
76         0x8c30,
77         0x9508,
78         0x9688,
79         0x9608,
80         0x960c,
81         0x9610,
82         0x9614,
83         0x88c4,
84         0x88d4,
85         0xa008,
86         0x900c,
87         0x9100,
88         0x913c,
89         0x98f8,
90         0x98f4,
91         0x9b7c,
92         0x3f8c,
93         0x8950,
94         0x8954,
95         0x8a18,
96         0x8b28,
97         0x9144,
98         0x9148,
99         0x914c,
100         0x3f90,
101         0x3f94,
102         0x915c,
103         0x9160,
104         0x9178,
105         0x917c,
106         0x9180,
107         0x918c,
108         0x9190,
109         0x9194,
110         0x9198,
111         0x919c,
112         0x91a8,
113         0x91ac,
114         0x91b0,
115         0x91b4,
116         0x91b8,
117         0x91c4,
118         0x91c8,
119         0x91cc,
120         0x91d0,
121         0x91d4,
122         0x91e0,
123         0x91e4,
124         0x91ec,
125         0x91f0,
126         0x91f4,
127         0x9200,
128         0x9204,
129         0x929c,
130         0x9150,
131         0x802c,
132 };
133
134 static void evergreen_gpu_init(struct radeon_device *rdev);
135 void evergreen_fini(struct radeon_device *rdev);
136 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
137 void evergreen_program_aspm(struct radeon_device *rdev);
138 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
139                                      int ring, u32 cp_int_cntl);
140 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
141                                    u32 status, u32 addr);
142 void cik_init_cp_pg_table(struct radeon_device *rdev);
143
144 extern u32 si_get_csb_size(struct radeon_device *rdev);
145 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
146 extern u32 cik_get_csb_size(struct radeon_device *rdev);
147 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
148 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
149
150 static const u32 evergreen_golden_registers[] =
151 {
152         0x3f90, 0xffff0000, 0xff000000,
153         0x9148, 0xffff0000, 0xff000000,
154         0x3f94, 0xffff0000, 0xff000000,
155         0x914c, 0xffff0000, 0xff000000,
156         0x9b7c, 0xffffffff, 0x00000000,
157         0x8a14, 0xffffffff, 0x00000007,
158         0x8b10, 0xffffffff, 0x00000000,
159         0x960c, 0xffffffff, 0x54763210,
160         0x88c4, 0xffffffff, 0x000000c2,
161         0x88d4, 0xffffffff, 0x00000010,
162         0x8974, 0xffffffff, 0x00000000,
163         0xc78, 0x00000080, 0x00000080,
164         0x5eb4, 0xffffffff, 0x00000002,
165         0x5e78, 0xffffffff, 0x001000f0,
166         0x6104, 0x01000300, 0x00000000,
167         0x5bc0, 0x00300000, 0x00000000,
168         0x7030, 0xffffffff, 0x00000011,
169         0x7c30, 0xffffffff, 0x00000011,
170         0x10830, 0xffffffff, 0x00000011,
171         0x11430, 0xffffffff, 0x00000011,
172         0x12030, 0xffffffff, 0x00000011,
173         0x12c30, 0xffffffff, 0x00000011,
174         0xd02c, 0xffffffff, 0x08421000,
175         0x240c, 0xffffffff, 0x00000380,
176         0x8b24, 0xffffffff, 0x00ff0fff,
177         0x28a4c, 0x06000000, 0x06000000,
178         0x10c, 0x00000001, 0x00000001,
179         0x8d00, 0xffffffff, 0x100e4848,
180         0x8d04, 0xffffffff, 0x00164745,
181         0x8c00, 0xffffffff, 0xe4000003,
182         0x8c04, 0xffffffff, 0x40600060,
183         0x8c08, 0xffffffff, 0x001c001c,
184         0x8cf0, 0xffffffff, 0x08e00620,
185         0x8c20, 0xffffffff, 0x00800080,
186         0x8c24, 0xffffffff, 0x00800080,
187         0x8c18, 0xffffffff, 0x20202078,
188         0x8c1c, 0xffffffff, 0x00001010,
189         0x28350, 0xffffffff, 0x00000000,
190         0xa008, 0xffffffff, 0x00010000,
191         0x5c4, 0xffffffff, 0x00000001,
192         0x9508, 0xffffffff, 0x00000002,
193         0x913c, 0x0000000f, 0x0000000a
194 };
195
196 static const u32 evergreen_golden_registers2[] =
197 {
198         0x2f4c, 0xffffffff, 0x00000000,
199         0x54f4, 0xffffffff, 0x00000000,
200         0x54f0, 0xffffffff, 0x00000000,
201         0x5498, 0xffffffff, 0x00000000,
202         0x549c, 0xffffffff, 0x00000000,
203         0x5494, 0xffffffff, 0x00000000,
204         0x53cc, 0xffffffff, 0x00000000,
205         0x53c8, 0xffffffff, 0x00000000,
206         0x53c4, 0xffffffff, 0x00000000,
207         0x53c0, 0xffffffff, 0x00000000,
208         0x53bc, 0xffffffff, 0x00000000,
209         0x53b8, 0xffffffff, 0x00000000,
210         0x53b4, 0xffffffff, 0x00000000,
211         0x53b0, 0xffffffff, 0x00000000
212 };
213
214 static const u32 cypress_mgcg_init[] =
215 {
216         0x802c, 0xffffffff, 0xc0000000,
217         0x5448, 0xffffffff, 0x00000100,
218         0x55e4, 0xffffffff, 0x00000100,
219         0x160c, 0xffffffff, 0x00000100,
220         0x5644, 0xffffffff, 0x00000100,
221         0xc164, 0xffffffff, 0x00000100,
222         0x8a18, 0xffffffff, 0x00000100,
223         0x897c, 0xffffffff, 0x06000100,
224         0x8b28, 0xffffffff, 0x00000100,
225         0x9144, 0xffffffff, 0x00000100,
226         0x9a60, 0xffffffff, 0x00000100,
227         0x9868, 0xffffffff, 0x00000100,
228         0x8d58, 0xffffffff, 0x00000100,
229         0x9510, 0xffffffff, 0x00000100,
230         0x949c, 0xffffffff, 0x00000100,
231         0x9654, 0xffffffff, 0x00000100,
232         0x9030, 0xffffffff, 0x00000100,
233         0x9034, 0xffffffff, 0x00000100,
234         0x9038, 0xffffffff, 0x00000100,
235         0x903c, 0xffffffff, 0x00000100,
236         0x9040, 0xffffffff, 0x00000100,
237         0xa200, 0xffffffff, 0x00000100,
238         0xa204, 0xffffffff, 0x00000100,
239         0xa208, 0xffffffff, 0x00000100,
240         0xa20c, 0xffffffff, 0x00000100,
241         0x971c, 0xffffffff, 0x00000100,
242         0x977c, 0xffffffff, 0x00000100,
243         0x3f80, 0xffffffff, 0x00000100,
244         0xa210, 0xffffffff, 0x00000100,
245         0xa214, 0xffffffff, 0x00000100,
246         0x4d8, 0xffffffff, 0x00000100,
247         0x9784, 0xffffffff, 0x00000100,
248         0x9698, 0xffffffff, 0x00000100,
249         0x4d4, 0xffffffff, 0x00000200,
250         0x30cc, 0xffffffff, 0x00000100,
251         0xd0c0, 0xffffffff, 0xff000100,
252         0x802c, 0xffffffff, 0x40000000,
253         0x915c, 0xffffffff, 0x00010000,
254         0x9160, 0xffffffff, 0x00030002,
255         0x9178, 0xffffffff, 0x00070000,
256         0x917c, 0xffffffff, 0x00030002,
257         0x9180, 0xffffffff, 0x00050004,
258         0x918c, 0xffffffff, 0x00010006,
259         0x9190, 0xffffffff, 0x00090008,
260         0x9194, 0xffffffff, 0x00070000,
261         0x9198, 0xffffffff, 0x00030002,
262         0x919c, 0xffffffff, 0x00050004,
263         0x91a8, 0xffffffff, 0x00010006,
264         0x91ac, 0xffffffff, 0x00090008,
265         0x91b0, 0xffffffff, 0x00070000,
266         0x91b4, 0xffffffff, 0x00030002,
267         0x91b8, 0xffffffff, 0x00050004,
268         0x91c4, 0xffffffff, 0x00010006,
269         0x91c8, 0xffffffff, 0x00090008,
270         0x91cc, 0xffffffff, 0x00070000,
271         0x91d0, 0xffffffff, 0x00030002,
272         0x91d4, 0xffffffff, 0x00050004,
273         0x91e0, 0xffffffff, 0x00010006,
274         0x91e4, 0xffffffff, 0x00090008,
275         0x91e8, 0xffffffff, 0x00000000,
276         0x91ec, 0xffffffff, 0x00070000,
277         0x91f0, 0xffffffff, 0x00030002,
278         0x91f4, 0xffffffff, 0x00050004,
279         0x9200, 0xffffffff, 0x00010006,
280         0x9204, 0xffffffff, 0x00090008,
281         0x9208, 0xffffffff, 0x00070000,
282         0x920c, 0xffffffff, 0x00030002,
283         0x9210, 0xffffffff, 0x00050004,
284         0x921c, 0xffffffff, 0x00010006,
285         0x9220, 0xffffffff, 0x00090008,
286         0x9224, 0xffffffff, 0x00070000,
287         0x9228, 0xffffffff, 0x00030002,
288         0x922c, 0xffffffff, 0x00050004,
289         0x9238, 0xffffffff, 0x00010006,
290         0x923c, 0xffffffff, 0x00090008,
291         0x9240, 0xffffffff, 0x00070000,
292         0x9244, 0xffffffff, 0x00030002,
293         0x9248, 0xffffffff, 0x00050004,
294         0x9254, 0xffffffff, 0x00010006,
295         0x9258, 0xffffffff, 0x00090008,
296         0x925c, 0xffffffff, 0x00070000,
297         0x9260, 0xffffffff, 0x00030002,
298         0x9264, 0xffffffff, 0x00050004,
299         0x9270, 0xffffffff, 0x00010006,
300         0x9274, 0xffffffff, 0x00090008,
301         0x9278, 0xffffffff, 0x00070000,
302         0x927c, 0xffffffff, 0x00030002,
303         0x9280, 0xffffffff, 0x00050004,
304         0x928c, 0xffffffff, 0x00010006,
305         0x9290, 0xffffffff, 0x00090008,
306         0x9294, 0xffffffff, 0x00000000,
307         0x929c, 0xffffffff, 0x00000001,
308         0x802c, 0xffffffff, 0x40010000,
309         0x915c, 0xffffffff, 0x00010000,
310         0x9160, 0xffffffff, 0x00030002,
311         0x9178, 0xffffffff, 0x00070000,
312         0x917c, 0xffffffff, 0x00030002,
313         0x9180, 0xffffffff, 0x00050004,
314         0x918c, 0xffffffff, 0x00010006,
315         0x9190, 0xffffffff, 0x00090008,
316         0x9194, 0xffffffff, 0x00070000,
317         0x9198, 0xffffffff, 0x00030002,
318         0x919c, 0xffffffff, 0x00050004,
319         0x91a8, 0xffffffff, 0x00010006,
320         0x91ac, 0xffffffff, 0x00090008,
321         0x91b0, 0xffffffff, 0x00070000,
322         0x91b4, 0xffffffff, 0x00030002,
323         0x91b8, 0xffffffff, 0x00050004,
324         0x91c4, 0xffffffff, 0x00010006,
325         0x91c8, 0xffffffff, 0x00090008,
326         0x91cc, 0xffffffff, 0x00070000,
327         0x91d0, 0xffffffff, 0x00030002,
328         0x91d4, 0xffffffff, 0x00050004,
329         0x91e0, 0xffffffff, 0x00010006,
330         0x91e4, 0xffffffff, 0x00090008,
331         0x91e8, 0xffffffff, 0x00000000,
332         0x91ec, 0xffffffff, 0x00070000,
333         0x91f0, 0xffffffff, 0x00030002,
334         0x91f4, 0xffffffff, 0x00050004,
335         0x9200, 0xffffffff, 0x00010006,
336         0x9204, 0xffffffff, 0x00090008,
337         0x9208, 0xffffffff, 0x00070000,
338         0x920c, 0xffffffff, 0x00030002,
339         0x9210, 0xffffffff, 0x00050004,
340         0x921c, 0xffffffff, 0x00010006,
341         0x9220, 0xffffffff, 0x00090008,
342         0x9224, 0xffffffff, 0x00070000,
343         0x9228, 0xffffffff, 0x00030002,
344         0x922c, 0xffffffff, 0x00050004,
345         0x9238, 0xffffffff, 0x00010006,
346         0x923c, 0xffffffff, 0x00090008,
347         0x9240, 0xffffffff, 0x00070000,
348         0x9244, 0xffffffff, 0x00030002,
349         0x9248, 0xffffffff, 0x00050004,
350         0x9254, 0xffffffff, 0x00010006,
351         0x9258, 0xffffffff, 0x00090008,
352         0x925c, 0xffffffff, 0x00070000,
353         0x9260, 0xffffffff, 0x00030002,
354         0x9264, 0xffffffff, 0x00050004,
355         0x9270, 0xffffffff, 0x00010006,
356         0x9274, 0xffffffff, 0x00090008,
357         0x9278, 0xffffffff, 0x00070000,
358         0x927c, 0xffffffff, 0x00030002,
359         0x9280, 0xffffffff, 0x00050004,
360         0x928c, 0xffffffff, 0x00010006,
361         0x9290, 0xffffffff, 0x00090008,
362         0x9294, 0xffffffff, 0x00000000,
363         0x929c, 0xffffffff, 0x00000001,
364         0x802c, 0xffffffff, 0xc0000000
365 };
366
367 static const u32 redwood_mgcg_init[] =
368 {
369         0x802c, 0xffffffff, 0xc0000000,
370         0x5448, 0xffffffff, 0x00000100,
371         0x55e4, 0xffffffff, 0x00000100,
372         0x160c, 0xffffffff, 0x00000100,
373         0x5644, 0xffffffff, 0x00000100,
374         0xc164, 0xffffffff, 0x00000100,
375         0x8a18, 0xffffffff, 0x00000100,
376         0x897c, 0xffffffff, 0x06000100,
377         0x8b28, 0xffffffff, 0x00000100,
378         0x9144, 0xffffffff, 0x00000100,
379         0x9a60, 0xffffffff, 0x00000100,
380         0x9868, 0xffffffff, 0x00000100,
381         0x8d58, 0xffffffff, 0x00000100,
382         0x9510, 0xffffffff, 0x00000100,
383         0x949c, 0xffffffff, 0x00000100,
384         0x9654, 0xffffffff, 0x00000100,
385         0x9030, 0xffffffff, 0x00000100,
386         0x9034, 0xffffffff, 0x00000100,
387         0x9038, 0xffffffff, 0x00000100,
388         0x903c, 0xffffffff, 0x00000100,
389         0x9040, 0xffffffff, 0x00000100,
390         0xa200, 0xffffffff, 0x00000100,
391         0xa204, 0xffffffff, 0x00000100,
392         0xa208, 0xffffffff, 0x00000100,
393         0xa20c, 0xffffffff, 0x00000100,
394         0x971c, 0xffffffff, 0x00000100,
395         0x977c, 0xffffffff, 0x00000100,
396         0x3f80, 0xffffffff, 0x00000100,
397         0xa210, 0xffffffff, 0x00000100,
398         0xa214, 0xffffffff, 0x00000100,
399         0x4d8, 0xffffffff, 0x00000100,
400         0x9784, 0xffffffff, 0x00000100,
401         0x9698, 0xffffffff, 0x00000100,
402         0x4d4, 0xffffffff, 0x00000200,
403         0x30cc, 0xffffffff, 0x00000100,
404         0xd0c0, 0xffffffff, 0xff000100,
405         0x802c, 0xffffffff, 0x40000000,
406         0x915c, 0xffffffff, 0x00010000,
407         0x9160, 0xffffffff, 0x00030002,
408         0x9178, 0xffffffff, 0x00070000,
409         0x917c, 0xffffffff, 0x00030002,
410         0x9180, 0xffffffff, 0x00050004,
411         0x918c, 0xffffffff, 0x00010006,
412         0x9190, 0xffffffff, 0x00090008,
413         0x9194, 0xffffffff, 0x00070000,
414         0x9198, 0xffffffff, 0x00030002,
415         0x919c, 0xffffffff, 0x00050004,
416         0x91a8, 0xffffffff, 0x00010006,
417         0x91ac, 0xffffffff, 0x00090008,
418         0x91b0, 0xffffffff, 0x00070000,
419         0x91b4, 0xffffffff, 0x00030002,
420         0x91b8, 0xffffffff, 0x00050004,
421         0x91c4, 0xffffffff, 0x00010006,
422         0x91c8, 0xffffffff, 0x00090008,
423         0x91cc, 0xffffffff, 0x00070000,
424         0x91d0, 0xffffffff, 0x00030002,
425         0x91d4, 0xffffffff, 0x00050004,
426         0x91e0, 0xffffffff, 0x00010006,
427         0x91e4, 0xffffffff, 0x00090008,
428         0x91e8, 0xffffffff, 0x00000000,
429         0x91ec, 0xffffffff, 0x00070000,
430         0x91f0, 0xffffffff, 0x00030002,
431         0x91f4, 0xffffffff, 0x00050004,
432         0x9200, 0xffffffff, 0x00010006,
433         0x9204, 0xffffffff, 0x00090008,
434         0x9294, 0xffffffff, 0x00000000,
435         0x929c, 0xffffffff, 0x00000001,
436         0x802c, 0xffffffff, 0xc0000000
437 };
438
439 static const u32 cedar_golden_registers[] =
440 {
441         0x3f90, 0xffff0000, 0xff000000,
442         0x9148, 0xffff0000, 0xff000000,
443         0x3f94, 0xffff0000, 0xff000000,
444         0x914c, 0xffff0000, 0xff000000,
445         0x9b7c, 0xffffffff, 0x00000000,
446         0x8a14, 0xffffffff, 0x00000007,
447         0x8b10, 0xffffffff, 0x00000000,
448         0x960c, 0xffffffff, 0x54763210,
449         0x88c4, 0xffffffff, 0x000000c2,
450         0x88d4, 0xffffffff, 0x00000000,
451         0x8974, 0xffffffff, 0x00000000,
452         0xc78, 0x00000080, 0x00000080,
453         0x5eb4, 0xffffffff, 0x00000002,
454         0x5e78, 0xffffffff, 0x001000f0,
455         0x6104, 0x01000300, 0x00000000,
456         0x5bc0, 0x00300000, 0x00000000,
457         0x7030, 0xffffffff, 0x00000011,
458         0x7c30, 0xffffffff, 0x00000011,
459         0x10830, 0xffffffff, 0x00000011,
460         0x11430, 0xffffffff, 0x00000011,
461         0xd02c, 0xffffffff, 0x08421000,
462         0x240c, 0xffffffff, 0x00000380,
463         0x8b24, 0xffffffff, 0x00ff0fff,
464         0x28a4c, 0x06000000, 0x06000000,
465         0x10c, 0x00000001, 0x00000001,
466         0x8d00, 0xffffffff, 0x100e4848,
467         0x8d04, 0xffffffff, 0x00164745,
468         0x8c00, 0xffffffff, 0xe4000003,
469         0x8c04, 0xffffffff, 0x40600060,
470         0x8c08, 0xffffffff, 0x001c001c,
471         0x8cf0, 0xffffffff, 0x08e00410,
472         0x8c20, 0xffffffff, 0x00800080,
473         0x8c24, 0xffffffff, 0x00800080,
474         0x8c18, 0xffffffff, 0x20202078,
475         0x8c1c, 0xffffffff, 0x00001010,
476         0x28350, 0xffffffff, 0x00000000,
477         0xa008, 0xffffffff, 0x00010000,
478         0x5c4, 0xffffffff, 0x00000001,
479         0x9508, 0xffffffff, 0x00000002
480 };
481
482 static const u32 cedar_mgcg_init[] =
483 {
484         0x802c, 0xffffffff, 0xc0000000,
485         0x5448, 0xffffffff, 0x00000100,
486         0x55e4, 0xffffffff, 0x00000100,
487         0x160c, 0xffffffff, 0x00000100,
488         0x5644, 0xffffffff, 0x00000100,
489         0xc164, 0xffffffff, 0x00000100,
490         0x8a18, 0xffffffff, 0x00000100,
491         0x897c, 0xffffffff, 0x06000100,
492         0x8b28, 0xffffffff, 0x00000100,
493         0x9144, 0xffffffff, 0x00000100,
494         0x9a60, 0xffffffff, 0x00000100,
495         0x9868, 0xffffffff, 0x00000100,
496         0x8d58, 0xffffffff, 0x00000100,
497         0x9510, 0xffffffff, 0x00000100,
498         0x949c, 0xffffffff, 0x00000100,
499         0x9654, 0xffffffff, 0x00000100,
500         0x9030, 0xffffffff, 0x00000100,
501         0x9034, 0xffffffff, 0x00000100,
502         0x9038, 0xffffffff, 0x00000100,
503         0x903c, 0xffffffff, 0x00000100,
504         0x9040, 0xffffffff, 0x00000100,
505         0xa200, 0xffffffff, 0x00000100,
506         0xa204, 0xffffffff, 0x00000100,
507         0xa208, 0xffffffff, 0x00000100,
508         0xa20c, 0xffffffff, 0x00000100,
509         0x971c, 0xffffffff, 0x00000100,
510         0x977c, 0xffffffff, 0x00000100,
511         0x3f80, 0xffffffff, 0x00000100,
512         0xa210, 0xffffffff, 0x00000100,
513         0xa214, 0xffffffff, 0x00000100,
514         0x4d8, 0xffffffff, 0x00000100,
515         0x9784, 0xffffffff, 0x00000100,
516         0x9698, 0xffffffff, 0x00000100,
517         0x4d4, 0xffffffff, 0x00000200,
518         0x30cc, 0xffffffff, 0x00000100,
519         0xd0c0, 0xffffffff, 0xff000100,
520         0x802c, 0xffffffff, 0x40000000,
521         0x915c, 0xffffffff, 0x00010000,
522         0x9178, 0xffffffff, 0x00050000,
523         0x917c, 0xffffffff, 0x00030002,
524         0x918c, 0xffffffff, 0x00010004,
525         0x9190, 0xffffffff, 0x00070006,
526         0x9194, 0xffffffff, 0x00050000,
527         0x9198, 0xffffffff, 0x00030002,
528         0x91a8, 0xffffffff, 0x00010004,
529         0x91ac, 0xffffffff, 0x00070006,
530         0x91e8, 0xffffffff, 0x00000000,
531         0x9294, 0xffffffff, 0x00000000,
532         0x929c, 0xffffffff, 0x00000001,
533         0x802c, 0xffffffff, 0xc0000000
534 };
535
536 static const u32 juniper_mgcg_init[] =
537 {
538         0x802c, 0xffffffff, 0xc0000000,
539         0x5448, 0xffffffff, 0x00000100,
540         0x55e4, 0xffffffff, 0x00000100,
541         0x160c, 0xffffffff, 0x00000100,
542         0x5644, 0xffffffff, 0x00000100,
543         0xc164, 0xffffffff, 0x00000100,
544         0x8a18, 0xffffffff, 0x00000100,
545         0x897c, 0xffffffff, 0x06000100,
546         0x8b28, 0xffffffff, 0x00000100,
547         0x9144, 0xffffffff, 0x00000100,
548         0x9a60, 0xffffffff, 0x00000100,
549         0x9868, 0xffffffff, 0x00000100,
550         0x8d58, 0xffffffff, 0x00000100,
551         0x9510, 0xffffffff, 0x00000100,
552         0x949c, 0xffffffff, 0x00000100,
553         0x9654, 0xffffffff, 0x00000100,
554         0x9030, 0xffffffff, 0x00000100,
555         0x9034, 0xffffffff, 0x00000100,
556         0x9038, 0xffffffff, 0x00000100,
557         0x903c, 0xffffffff, 0x00000100,
558         0x9040, 0xffffffff, 0x00000100,
559         0xa200, 0xffffffff, 0x00000100,
560         0xa204, 0xffffffff, 0x00000100,
561         0xa208, 0xffffffff, 0x00000100,
562         0xa20c, 0xffffffff, 0x00000100,
563         0x971c, 0xffffffff, 0x00000100,
564         0xd0c0, 0xffffffff, 0xff000100,
565         0x802c, 0xffffffff, 0x40000000,
566         0x915c, 0xffffffff, 0x00010000,
567         0x9160, 0xffffffff, 0x00030002,
568         0x9178, 0xffffffff, 0x00070000,
569         0x917c, 0xffffffff, 0x00030002,
570         0x9180, 0xffffffff, 0x00050004,
571         0x918c, 0xffffffff, 0x00010006,
572         0x9190, 0xffffffff, 0x00090008,
573         0x9194, 0xffffffff, 0x00070000,
574         0x9198, 0xffffffff, 0x00030002,
575         0x919c, 0xffffffff, 0x00050004,
576         0x91a8, 0xffffffff, 0x00010006,
577         0x91ac, 0xffffffff, 0x00090008,
578         0x91b0, 0xffffffff, 0x00070000,
579         0x91b4, 0xffffffff, 0x00030002,
580         0x91b8, 0xffffffff, 0x00050004,
581         0x91c4, 0xffffffff, 0x00010006,
582         0x91c8, 0xffffffff, 0x00090008,
583         0x91cc, 0xffffffff, 0x00070000,
584         0x91d0, 0xffffffff, 0x00030002,
585         0x91d4, 0xffffffff, 0x00050004,
586         0x91e0, 0xffffffff, 0x00010006,
587         0x91e4, 0xffffffff, 0x00090008,
588         0x91e8, 0xffffffff, 0x00000000,
589         0x91ec, 0xffffffff, 0x00070000,
590         0x91f0, 0xffffffff, 0x00030002,
591         0x91f4, 0xffffffff, 0x00050004,
592         0x9200, 0xffffffff, 0x00010006,
593         0x9204, 0xffffffff, 0x00090008,
594         0x9208, 0xffffffff, 0x00070000,
595         0x920c, 0xffffffff, 0x00030002,
596         0x9210, 0xffffffff, 0x00050004,
597         0x921c, 0xffffffff, 0x00010006,
598         0x9220, 0xffffffff, 0x00090008,
599         0x9224, 0xffffffff, 0x00070000,
600         0x9228, 0xffffffff, 0x00030002,
601         0x922c, 0xffffffff, 0x00050004,
602         0x9238, 0xffffffff, 0x00010006,
603         0x923c, 0xffffffff, 0x00090008,
604         0x9240, 0xffffffff, 0x00070000,
605         0x9244, 0xffffffff, 0x00030002,
606         0x9248, 0xffffffff, 0x00050004,
607         0x9254, 0xffffffff, 0x00010006,
608         0x9258, 0xffffffff, 0x00090008,
609         0x925c, 0xffffffff, 0x00070000,
610         0x9260, 0xffffffff, 0x00030002,
611         0x9264, 0xffffffff, 0x00050004,
612         0x9270, 0xffffffff, 0x00010006,
613         0x9274, 0xffffffff, 0x00090008,
614         0x9278, 0xffffffff, 0x00070000,
615         0x927c, 0xffffffff, 0x00030002,
616         0x9280, 0xffffffff, 0x00050004,
617         0x928c, 0xffffffff, 0x00010006,
618         0x9290, 0xffffffff, 0x00090008,
619         0x9294, 0xffffffff, 0x00000000,
620         0x929c, 0xffffffff, 0x00000001,
621         0x802c, 0xffffffff, 0xc0000000,
622         0x977c, 0xffffffff, 0x00000100,
623         0x3f80, 0xffffffff, 0x00000100,
624         0xa210, 0xffffffff, 0x00000100,
625         0xa214, 0xffffffff, 0x00000100,
626         0x4d8, 0xffffffff, 0x00000100,
627         0x9784, 0xffffffff, 0x00000100,
628         0x9698, 0xffffffff, 0x00000100,
629         0x4d4, 0xffffffff, 0x00000200,
630         0x30cc, 0xffffffff, 0x00000100,
631         0x802c, 0xffffffff, 0xc0000000
632 };
633
634 static const u32 supersumo_golden_registers[] =
635 {
636         0x5eb4, 0xffffffff, 0x00000002,
637         0x5c4, 0xffffffff, 0x00000001,
638         0x7030, 0xffffffff, 0x00000011,
639         0x7c30, 0xffffffff, 0x00000011,
640         0x6104, 0x01000300, 0x00000000,
641         0x5bc0, 0x00300000, 0x00000000,
642         0x8c04, 0xffffffff, 0x40600060,
643         0x8c08, 0xffffffff, 0x001c001c,
644         0x8c20, 0xffffffff, 0x00800080,
645         0x8c24, 0xffffffff, 0x00800080,
646         0x8c18, 0xffffffff, 0x20202078,
647         0x8c1c, 0xffffffff, 0x00001010,
648         0x918c, 0xffffffff, 0x00010006,
649         0x91a8, 0xffffffff, 0x00010006,
650         0x91c4, 0xffffffff, 0x00010006,
651         0x91e0, 0xffffffff, 0x00010006,
652         0x9200, 0xffffffff, 0x00010006,
653         0x9150, 0xffffffff, 0x6e944040,
654         0x917c, 0xffffffff, 0x00030002,
655         0x9180, 0xffffffff, 0x00050004,
656         0x9198, 0xffffffff, 0x00030002,
657         0x919c, 0xffffffff, 0x00050004,
658         0x91b4, 0xffffffff, 0x00030002,
659         0x91b8, 0xffffffff, 0x00050004,
660         0x91d0, 0xffffffff, 0x00030002,
661         0x91d4, 0xffffffff, 0x00050004,
662         0x91f0, 0xffffffff, 0x00030002,
663         0x91f4, 0xffffffff, 0x00050004,
664         0x915c, 0xffffffff, 0x00010000,
665         0x9160, 0xffffffff, 0x00030002,
666         0x3f90, 0xffff0000, 0xff000000,
667         0x9178, 0xffffffff, 0x00070000,
668         0x9194, 0xffffffff, 0x00070000,
669         0x91b0, 0xffffffff, 0x00070000,
670         0x91cc, 0xffffffff, 0x00070000,
671         0x91ec, 0xffffffff, 0x00070000,
672         0x9148, 0xffff0000, 0xff000000,
673         0x9190, 0xffffffff, 0x00090008,
674         0x91ac, 0xffffffff, 0x00090008,
675         0x91c8, 0xffffffff, 0x00090008,
676         0x91e4, 0xffffffff, 0x00090008,
677         0x9204, 0xffffffff, 0x00090008,
678         0x3f94, 0xffff0000, 0xff000000,
679         0x914c, 0xffff0000, 0xff000000,
680         0x929c, 0xffffffff, 0x00000001,
681         0x8a18, 0xffffffff, 0x00000100,
682         0x8b28, 0xffffffff, 0x00000100,
683         0x9144, 0xffffffff, 0x00000100,
684         0x5644, 0xffffffff, 0x00000100,
685         0x9b7c, 0xffffffff, 0x00000000,
686         0x8030, 0xffffffff, 0x0000100a,
687         0x8a14, 0xffffffff, 0x00000007,
688         0x8b24, 0xffffffff, 0x00ff0fff,
689         0x8b10, 0xffffffff, 0x00000000,
690         0x28a4c, 0x06000000, 0x06000000,
691         0x4d8, 0xffffffff, 0x00000100,
692         0x913c, 0xffff000f, 0x0100000a,
693         0x960c, 0xffffffff, 0x54763210,
694         0x88c4, 0xffffffff, 0x000000c2,
695         0x88d4, 0xffffffff, 0x00000010,
696         0x8974, 0xffffffff, 0x00000000,
697         0xc78, 0x00000080, 0x00000080,
698         0x5e78, 0xffffffff, 0x001000f0,
699         0xd02c, 0xffffffff, 0x08421000,
700         0xa008, 0xffffffff, 0x00010000,
701         0x8d00, 0xffffffff, 0x100e4848,
702         0x8d04, 0xffffffff, 0x00164745,
703         0x8c00, 0xffffffff, 0xe4000003,
704         0x8cf0, 0x1fffffff, 0x08e00620,
705         0x28350, 0xffffffff, 0x00000000,
706         0x9508, 0xffffffff, 0x00000002
707 };
708
709 static const u32 sumo_golden_registers[] =
710 {
711         0x900c, 0x00ffffff, 0x0017071f,
712         0x8c18, 0xffffffff, 0x10101060,
713         0x8c1c, 0xffffffff, 0x00001010,
714         0x8c30, 0x0000000f, 0x00000005,
715         0x9688, 0x0000000f, 0x00000007
716 };
717
718 static const u32 wrestler_golden_registers[] =
719 {
720         0x5eb4, 0xffffffff, 0x00000002,
721         0x5c4, 0xffffffff, 0x00000001,
722         0x7030, 0xffffffff, 0x00000011,
723         0x7c30, 0xffffffff, 0x00000011,
724         0x6104, 0x01000300, 0x00000000,
725         0x5bc0, 0x00300000, 0x00000000,
726         0x918c, 0xffffffff, 0x00010006,
727         0x91a8, 0xffffffff, 0x00010006,
728         0x9150, 0xffffffff, 0x6e944040,
729         0x917c, 0xffffffff, 0x00030002,
730         0x9198, 0xffffffff, 0x00030002,
731         0x915c, 0xffffffff, 0x00010000,
732         0x3f90, 0xffff0000, 0xff000000,
733         0x9178, 0xffffffff, 0x00070000,
734         0x9194, 0xffffffff, 0x00070000,
735         0x9148, 0xffff0000, 0xff000000,
736         0x9190, 0xffffffff, 0x00090008,
737         0x91ac, 0xffffffff, 0x00090008,
738         0x3f94, 0xffff0000, 0xff000000,
739         0x914c, 0xffff0000, 0xff000000,
740         0x929c, 0xffffffff, 0x00000001,
741         0x8a18, 0xffffffff, 0x00000100,
742         0x8b28, 0xffffffff, 0x00000100,
743         0x9144, 0xffffffff, 0x00000100,
744         0x9b7c, 0xffffffff, 0x00000000,
745         0x8030, 0xffffffff, 0x0000100a,
746         0x8a14, 0xffffffff, 0x00000001,
747         0x8b24, 0xffffffff, 0x00ff0fff,
748         0x8b10, 0xffffffff, 0x00000000,
749         0x28a4c, 0x06000000, 0x06000000,
750         0x4d8, 0xffffffff, 0x00000100,
751         0x913c, 0xffff000f, 0x0100000a,
752         0x960c, 0xffffffff, 0x54763210,
753         0x88c4, 0xffffffff, 0x000000c2,
754         0x88d4, 0xffffffff, 0x00000010,
755         0x8974, 0xffffffff, 0x00000000,
756         0xc78, 0x00000080, 0x00000080,
757         0x5e78, 0xffffffff, 0x001000f0,
758         0xd02c, 0xffffffff, 0x08421000,
759         0xa008, 0xffffffff, 0x00010000,
760         0x8d00, 0xffffffff, 0x100e4848,
761         0x8d04, 0xffffffff, 0x00164745,
762         0x8c00, 0xffffffff, 0xe4000003,
763         0x8cf0, 0x1fffffff, 0x08e00410,
764         0x28350, 0xffffffff, 0x00000000,
765         0x9508, 0xffffffff, 0x00000002,
766         0x900c, 0xffffffff, 0x0017071f,
767         0x8c18, 0xffffffff, 0x10101060,
768         0x8c1c, 0xffffffff, 0x00001010
769 };
770
771 static const u32 barts_golden_registers[] =
772 {
773         0x5eb4, 0xffffffff, 0x00000002,
774         0x5e78, 0x8f311ff1, 0x001000f0,
775         0x3f90, 0xffff0000, 0xff000000,
776         0x9148, 0xffff0000, 0xff000000,
777         0x3f94, 0xffff0000, 0xff000000,
778         0x914c, 0xffff0000, 0xff000000,
779         0xc78, 0x00000080, 0x00000080,
780         0xbd4, 0x70073777, 0x00010001,
781         0xd02c, 0xbfffff1f, 0x08421000,
782         0xd0b8, 0x03773777, 0x02011003,
783         0x5bc0, 0x00200000, 0x50100000,
784         0x98f8, 0x33773777, 0x02011003,
785         0x98fc, 0xffffffff, 0x76543210,
786         0x7030, 0x31000311, 0x00000011,
787         0x2f48, 0x00000007, 0x02011003,
788         0x6b28, 0x00000010, 0x00000012,
789         0x7728, 0x00000010, 0x00000012,
790         0x10328, 0x00000010, 0x00000012,
791         0x10f28, 0x00000010, 0x00000012,
792         0x11b28, 0x00000010, 0x00000012,
793         0x12728, 0x00000010, 0x00000012,
794         0x240c, 0x000007ff, 0x00000380,
795         0x8a14, 0xf000001f, 0x00000007,
796         0x8b24, 0x3fff3fff, 0x00ff0fff,
797         0x8b10, 0x0000ff0f, 0x00000000,
798         0x28a4c, 0x07ffffff, 0x06000000,
799         0x10c, 0x00000001, 0x00010003,
800         0xa02c, 0xffffffff, 0x0000009b,
801         0x913c, 0x0000000f, 0x0100000a,
802         0x8d00, 0xffff7f7f, 0x100e4848,
803         0x8d04, 0x00ffffff, 0x00164745,
804         0x8c00, 0xfffc0003, 0xe4000003,
805         0x8c04, 0xf8ff00ff, 0x40600060,
806         0x8c08, 0x00ff00ff, 0x001c001c,
807         0x8cf0, 0x1fff1fff, 0x08e00620,
808         0x8c20, 0x0fff0fff, 0x00800080,
809         0x8c24, 0x0fff0fff, 0x00800080,
810         0x8c18, 0xffffffff, 0x20202078,
811         0x8c1c, 0x0000ffff, 0x00001010,
812         0x28350, 0x00000f01, 0x00000000,
813         0x9508, 0x3700001f, 0x00000002,
814         0x960c, 0xffffffff, 0x54763210,
815         0x88c4, 0x001f3ae3, 0x000000c2,
816         0x88d4, 0x0000001f, 0x00000010,
817         0x8974, 0xffffffff, 0x00000000
818 };
819
820 static const u32 turks_golden_registers[] =
821 {
822         0x5eb4, 0xffffffff, 0x00000002,
823         0x5e78, 0x8f311ff1, 0x001000f0,
824         0x8c8, 0x00003000, 0x00001070,
825         0x8cc, 0x000fffff, 0x00040035,
826         0x3f90, 0xffff0000, 0xfff00000,
827         0x9148, 0xffff0000, 0xfff00000,
828         0x3f94, 0xffff0000, 0xfff00000,
829         0x914c, 0xffff0000, 0xfff00000,
830         0xc78, 0x00000080, 0x00000080,
831         0xbd4, 0x00073007, 0x00010002,
832         0xd02c, 0xbfffff1f, 0x08421000,
833         0xd0b8, 0x03773777, 0x02010002,
834         0x5bc0, 0x00200000, 0x50100000,
835         0x98f8, 0x33773777, 0x00010002,
836         0x98fc, 0xffffffff, 0x33221100,
837         0x7030, 0x31000311, 0x00000011,
838         0x2f48, 0x33773777, 0x00010002,
839         0x6b28, 0x00000010, 0x00000012,
840         0x7728, 0x00000010, 0x00000012,
841         0x10328, 0x00000010, 0x00000012,
842         0x10f28, 0x00000010, 0x00000012,
843         0x11b28, 0x00000010, 0x00000012,
844         0x12728, 0x00000010, 0x00000012,
845         0x240c, 0x000007ff, 0x00000380,
846         0x8a14, 0xf000001f, 0x00000007,
847         0x8b24, 0x3fff3fff, 0x00ff0fff,
848         0x8b10, 0x0000ff0f, 0x00000000,
849         0x28a4c, 0x07ffffff, 0x06000000,
850         0x10c, 0x00000001, 0x00010003,
851         0xa02c, 0xffffffff, 0x0000009b,
852         0x913c, 0x0000000f, 0x0100000a,
853         0x8d00, 0xffff7f7f, 0x100e4848,
854         0x8d04, 0x00ffffff, 0x00164745,
855         0x8c00, 0xfffc0003, 0xe4000003,
856         0x8c04, 0xf8ff00ff, 0x40600060,
857         0x8c08, 0x00ff00ff, 0x001c001c,
858         0x8cf0, 0x1fff1fff, 0x08e00410,
859         0x8c20, 0x0fff0fff, 0x00800080,
860         0x8c24, 0x0fff0fff, 0x00800080,
861         0x8c18, 0xffffffff, 0x20202078,
862         0x8c1c, 0x0000ffff, 0x00001010,
863         0x28350, 0x00000f01, 0x00000000,
864         0x9508, 0x3700001f, 0x00000002,
865         0x960c, 0xffffffff, 0x54763210,
866         0x88c4, 0x001f3ae3, 0x000000c2,
867         0x88d4, 0x0000001f, 0x00000010,
868         0x8974, 0xffffffff, 0x00000000
869 };
870
871 static const u32 caicos_golden_registers[] =
872 {
873         0x5eb4, 0xffffffff, 0x00000002,
874         0x5e78, 0x8f311ff1, 0x001000f0,
875         0x8c8, 0x00003420, 0x00001450,
876         0x8cc, 0x000fffff, 0x00040035,
877         0x3f90, 0xffff0000, 0xfffc0000,
878         0x9148, 0xffff0000, 0xfffc0000,
879         0x3f94, 0xffff0000, 0xfffc0000,
880         0x914c, 0xffff0000, 0xfffc0000,
881         0xc78, 0x00000080, 0x00000080,
882         0xbd4, 0x00073007, 0x00010001,
883         0xd02c, 0xbfffff1f, 0x08421000,
884         0xd0b8, 0x03773777, 0x02010001,
885         0x5bc0, 0x00200000, 0x50100000,
886         0x98f8, 0x33773777, 0x02010001,
887         0x98fc, 0xffffffff, 0x33221100,
888         0x7030, 0x31000311, 0x00000011,
889         0x2f48, 0x33773777, 0x02010001,
890         0x6b28, 0x00000010, 0x00000012,
891         0x7728, 0x00000010, 0x00000012,
892         0x10328, 0x00000010, 0x00000012,
893         0x10f28, 0x00000010, 0x00000012,
894         0x11b28, 0x00000010, 0x00000012,
895         0x12728, 0x00000010, 0x00000012,
896         0x240c, 0x000007ff, 0x00000380,
897         0x8a14, 0xf000001f, 0x00000001,
898         0x8b24, 0x3fff3fff, 0x00ff0fff,
899         0x8b10, 0x0000ff0f, 0x00000000,
900         0x28a4c, 0x07ffffff, 0x06000000,
901         0x10c, 0x00000001, 0x00010003,
902         0xa02c, 0xffffffff, 0x0000009b,
903         0x913c, 0x0000000f, 0x0100000a,
904         0x8d00, 0xffff7f7f, 0x100e4848,
905         0x8d04, 0x00ffffff, 0x00164745,
906         0x8c00, 0xfffc0003, 0xe4000003,
907         0x8c04, 0xf8ff00ff, 0x40600060,
908         0x8c08, 0x00ff00ff, 0x001c001c,
909         0x8cf0, 0x1fff1fff, 0x08e00410,
910         0x8c20, 0x0fff0fff, 0x00800080,
911         0x8c24, 0x0fff0fff, 0x00800080,
912         0x8c18, 0xffffffff, 0x20202078,
913         0x8c1c, 0x0000ffff, 0x00001010,
914         0x28350, 0x00000f01, 0x00000000,
915         0x9508, 0x3700001f, 0x00000002,
916         0x960c, 0xffffffff, 0x54763210,
917         0x88c4, 0x001f3ae3, 0x000000c2,
918         0x88d4, 0x0000001f, 0x00000010,
919         0x8974, 0xffffffff, 0x00000000
920 };
921
922 static void evergreen_init_golden_registers(struct radeon_device *rdev)
923 {
924         switch (rdev->family) {
925         case CHIP_CYPRESS:
926         case CHIP_HEMLOCK:
927                 radeon_program_register_sequence(rdev,
928                                                  evergreen_golden_registers,
929                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
930                 radeon_program_register_sequence(rdev,
931                                                  evergreen_golden_registers2,
932                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
933                 radeon_program_register_sequence(rdev,
934                                                  cypress_mgcg_init,
935                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
936                 break;
937         case CHIP_JUNIPER:
938                 radeon_program_register_sequence(rdev,
939                                                  evergreen_golden_registers,
940                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
941                 radeon_program_register_sequence(rdev,
942                                                  evergreen_golden_registers2,
943                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
944                 radeon_program_register_sequence(rdev,
945                                                  juniper_mgcg_init,
946                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
947                 break;
948         case CHIP_REDWOOD:
949                 radeon_program_register_sequence(rdev,
950                                                  evergreen_golden_registers,
951                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
952                 radeon_program_register_sequence(rdev,
953                                                  evergreen_golden_registers2,
954                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
955                 radeon_program_register_sequence(rdev,
956                                                  redwood_mgcg_init,
957                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
958                 break;
959         case CHIP_CEDAR:
960                 radeon_program_register_sequence(rdev,
961                                                  cedar_golden_registers,
962                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
963                 radeon_program_register_sequence(rdev,
964                                                  evergreen_golden_registers2,
965                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
966                 radeon_program_register_sequence(rdev,
967                                                  cedar_mgcg_init,
968                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
969                 break;
970         case CHIP_PALM:
971                 radeon_program_register_sequence(rdev,
972                                                  wrestler_golden_registers,
973                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
974                 break;
975         case CHIP_SUMO:
976                 radeon_program_register_sequence(rdev,
977                                                  supersumo_golden_registers,
978                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
979                 break;
980         case CHIP_SUMO2:
981                 radeon_program_register_sequence(rdev,
982                                                  supersumo_golden_registers,
983                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
984                 radeon_program_register_sequence(rdev,
985                                                  sumo_golden_registers,
986                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
987                 break;
988         case CHIP_BARTS:
989                 radeon_program_register_sequence(rdev,
990                                                  barts_golden_registers,
991                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
992                 break;
993         case CHIP_TURKS:
994                 radeon_program_register_sequence(rdev,
995                                                  turks_golden_registers,
996                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
997                 break;
998         case CHIP_CAICOS:
999                 radeon_program_register_sequence(rdev,
1000                                                  caicos_golden_registers,
1001                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1002                 break;
1003         default:
1004                 break;
1005         }
1006 }
1007
1008 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1009                              unsigned *bankh, unsigned *mtaspect,
1010                              unsigned *tile_split)
1011 {
1012         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1013         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1014         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1015         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1016         switch (*bankw) {
1017         default:
1018         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1019         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1020         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1021         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1022         }
1023         switch (*bankh) {
1024         default:
1025         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1026         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1027         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1028         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1029         }
1030         switch (*mtaspect) {
1031         default:
1032         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1033         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1034         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1035         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1036         }
1037 }
1038
1039 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1040                               u32 cntl_reg, u32 status_reg)
1041 {
1042         int r, i;
1043         struct atom_clock_dividers dividers;
1044
1045         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1046                                            clock, false, &dividers);
1047         if (r)
1048                 return r;
1049
1050         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1051
1052         for (i = 0; i < 100; i++) {
1053                 if (RREG32(status_reg) & DCLK_STATUS)
1054                         break;
1055                 mdelay(10);
1056         }
1057         if (i == 100)
1058                 return -ETIMEDOUT;
1059
1060         return 0;
1061 }
1062
1063 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1064 {
1065         int r = 0;
1066         u32 cg_scratch = RREG32(CG_SCRATCH1);
1067
1068         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1069         if (r)
1070                 goto done;
1071         cg_scratch &= 0xffff0000;
1072         cg_scratch |= vclk / 100; /* Mhz */
1073
1074         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1075         if (r)
1076                 goto done;
1077         cg_scratch &= 0x0000ffff;
1078         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1079
1080 done:
1081         WREG32(CG_SCRATCH1, cg_scratch);
1082
1083         return r;
1084 }
1085
1086 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1087 {
1088         /* start off with something large */
1089         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1090         int r;
1091
1092         /* bypass vclk and dclk with bclk */
1093         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1094                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1095                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1096
1097         /* put PLL in bypass mode */
1098         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1099
1100         if (!vclk || !dclk) {
1101                 /* keep the Bypass mode, put PLL to sleep */
1102                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1103                 return 0;
1104         }
1105
1106         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1107                                           16384, 0x03FFFFFF, 0, 128, 5,
1108                                           &fb_div, &vclk_div, &dclk_div);
1109         if (r)
1110                 return r;
1111
1112         /* set VCO_MODE to 1 */
1113         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1114
1115         /* toggle UPLL_SLEEP to 1 then back to 0 */
1116         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1117         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1118
1119         /* deassert UPLL_RESET */
1120         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1121
1122         mdelay(1);
1123
1124         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1125         if (r)
1126                 return r;
1127
1128         /* assert UPLL_RESET again */
1129         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1130
1131         /* disable spread spectrum. */
1132         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1133
1134         /* set feedback divider */
1135         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1136
1137         /* set ref divider to 0 */
1138         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1139
1140         if (fb_div < 307200)
1141                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1142         else
1143                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1144
1145         /* set PDIV_A and PDIV_B */
1146         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1147                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1148                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1149
1150         /* give the PLL some time to settle */
1151         mdelay(15);
1152
1153         /* deassert PLL_RESET */
1154         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1155
1156         mdelay(15);
1157
1158         /* switch from bypass mode to normal mode */
1159         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1160
1161         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1162         if (r)
1163                 return r;
1164
1165         /* switch VCLK and DCLK selection */
1166         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1167                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1168                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1169
1170         mdelay(100);
1171
1172         return 0;
1173 }
1174
1175 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1176 {
1177         int readrq;
1178         u16 v;
1179
1180         readrq = pcie_get_readrq(rdev->pdev);
1181         v = ffs(readrq) - 8;
1182         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1183          * to avoid hangs or perfomance issues
1184          */
1185         if ((v == 0) || (v == 6) || (v == 7))
1186                 pcie_set_readrq(rdev->pdev, 512);
1187 }
1188
1189 void dce4_program_fmt(struct drm_encoder *encoder)
1190 {
1191         struct drm_device *dev = encoder->dev;
1192         struct radeon_device *rdev = dev->dev_private;
1193         struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1194         struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1195         struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1196         int bpc = 0;
1197         u32 tmp = 0;
1198         enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1199
1200         if (connector) {
1201                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1202                 bpc = radeon_get_monitor_bpc(connector);
1203                 dither = radeon_connector->dither;
1204         }
1205
1206         /* LVDS/eDP FMT is set up by atom */
1207         if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1208                 return;
1209
1210         /* not needed for analog */
1211         if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1212             (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1213                 return;
1214
1215         if (bpc == 0)
1216                 return;
1217
1218         switch (bpc) {
1219         case 6:
1220                 if (dither == RADEON_FMT_DITHER_ENABLE)
1221                         /* XXX sort out optimal dither settings */
1222                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1223                                 FMT_SPATIAL_DITHER_EN);
1224                 else
1225                         tmp |= FMT_TRUNCATE_EN;
1226                 break;
1227         case 8:
1228                 if (dither == RADEON_FMT_DITHER_ENABLE)
1229                         /* XXX sort out optimal dither settings */
1230                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1231                                 FMT_RGB_RANDOM_ENABLE |
1232                                 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1233                 else
1234                         tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1235                 break;
1236         case 10:
1237         default:
1238                 /* not needed */
1239                 break;
1240         }
1241
1242         WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1243 }
1244
1245 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1246 {
1247         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1248                 return true;
1249         else
1250                 return false;
1251 }
1252
1253 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1254 {
1255         u32 pos1, pos2;
1256
1257         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1258         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1259
1260         if (pos1 != pos2)
1261                 return true;
1262         else
1263                 return false;
1264 }
1265
1266 /**
1267  * dce4_wait_for_vblank - vblank wait asic callback.
1268  *
1269  * @rdev: radeon_device pointer
1270  * @crtc: crtc to wait for vblank on
1271  *
1272  * Wait for vblank on the requested crtc (evergreen+).
1273  */
1274 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1275 {
1276         unsigned i = 0;
1277
1278         if (crtc >= rdev->num_crtc)
1279                 return;
1280
1281         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1282                 return;
1283
1284         /* depending on when we hit vblank, we may be close to active; if so,
1285          * wait for another frame.
1286          */
1287         while (dce4_is_in_vblank(rdev, crtc)) {
1288                 if (i++ % 100 == 0) {
1289                         if (!dce4_is_counter_moving(rdev, crtc))
1290                                 break;
1291                 }
1292         }
1293
1294         while (!dce4_is_in_vblank(rdev, crtc)) {
1295                 if (i++ % 100 == 0) {
1296                         if (!dce4_is_counter_moving(rdev, crtc))
1297                                 break;
1298                 }
1299         }
1300 }
1301
1302 /**
1303  * evergreen_page_flip - pageflip callback.
1304  *
1305  * @rdev: radeon_device pointer
1306  * @crtc_id: crtc to cleanup pageflip on
1307  * @crtc_base: new address of the crtc (GPU MC address)
1308  *
1309  * Does the actual pageflip (evergreen+).
1310  * During vblank we take the crtc lock and wait for the update_pending
1311  * bit to go high, when it does, we release the lock, and allow the
1312  * double buffered update to take place.
1313  * Returns the current update pending status.
1314  */
1315 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1316 {
1317         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1318         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1319         int i;
1320
1321         /* Lock the graphics update lock */
1322         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1323         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1324
1325         /* update the scanout addresses */
1326         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1327                upper_32_bits(crtc_base));
1328         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1329                (u32)crtc_base);
1330
1331         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1332                upper_32_bits(crtc_base));
1333         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1334                (u32)crtc_base);
1335
1336         /* Wait for update_pending to go high. */
1337         for (i = 0; i < rdev->usec_timeout; i++) {
1338                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1339                         break;
1340                 udelay(1);
1341         }
1342         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1343
1344         /* Unlock the lock, so double-buffering can take place inside vblank */
1345         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1346         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1347 }
1348
1349 /**
1350  * evergreen_page_flip_pending - check if page flip is still pending
1351  *
1352  * @rdev: radeon_device pointer
1353  * @crtc_id: crtc to check
1354  *
1355  * Returns the current update pending status.
1356  */
1357 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1358 {
1359         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1360
1361         /* Return current update_pending status: */
1362         return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1363                 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1364 }
1365
1366 /* get temperature in millidegrees */
1367 int evergreen_get_temp(struct radeon_device *rdev)
1368 {
1369         u32 temp, toffset;
1370         int actual_temp = 0;
1371
1372         if (rdev->family == CHIP_JUNIPER) {
1373                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1374                         TOFFSET_SHIFT;
1375                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1376                         TS0_ADC_DOUT_SHIFT;
1377
1378                 if (toffset & 0x100)
1379                         actual_temp = temp / 2 - (0x200 - toffset);
1380                 else
1381                         actual_temp = temp / 2 + toffset;
1382
1383                 actual_temp = actual_temp * 1000;
1384
1385         } else {
1386                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1387                         ASIC_T_SHIFT;
1388
1389                 if (temp & 0x400)
1390                         actual_temp = -256;
1391                 else if (temp & 0x200)
1392                         actual_temp = 255;
1393                 else if (temp & 0x100) {
1394                         actual_temp = temp & 0x1ff;
1395                         actual_temp |= ~0x1ff;
1396                 } else
1397                         actual_temp = temp & 0xff;
1398
1399                 actual_temp = (actual_temp * 1000) / 2;
1400         }
1401
1402         return actual_temp;
1403 }
1404
1405 int sumo_get_temp(struct radeon_device *rdev)
1406 {
1407         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1408         int actual_temp = temp - 49;
1409
1410         return actual_temp * 1000;
1411 }
1412
1413 /**
1414  * sumo_pm_init_profile - Initialize power profiles callback.
1415  *
1416  * @rdev: radeon_device pointer
1417  *
1418  * Initialize the power states used in profile mode
1419  * (sumo, trinity, SI).
1420  * Used for profile mode only.
1421  */
1422 void sumo_pm_init_profile(struct radeon_device *rdev)
1423 {
1424         int idx;
1425
1426         /* default */
1427         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1428         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1429         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1430         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1431
1432         /* low,mid sh/mh */
1433         if (rdev->flags & RADEON_IS_MOBILITY)
1434                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1435         else
1436                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1437
1438         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1439         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1440         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1441         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1442
1443         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1444         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1445         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1446         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1447
1448         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1449         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1450         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1451         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1452
1453         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1454         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1455         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1456         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1457
1458         /* high sh/mh */
1459         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1460         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1461         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1462         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1463         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1464                 rdev->pm.power_state[idx].num_clock_modes - 1;
1465
1466         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1467         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1468         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1469         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1470                 rdev->pm.power_state[idx].num_clock_modes - 1;
1471 }
1472
1473 /**
1474  * btc_pm_init_profile - Initialize power profiles callback.
1475  *
1476  * @rdev: radeon_device pointer
1477  *
1478  * Initialize the power states used in profile mode
1479  * (BTC, cayman).
1480  * Used for profile mode only.
1481  */
1482 void btc_pm_init_profile(struct radeon_device *rdev)
1483 {
1484         int idx;
1485
1486         /* default */
1487         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1488         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1489         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1490         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1491         /* starting with BTC, there is one state that is used for both
1492          * MH and SH.  Difference is that we always use the high clock index for
1493          * mclk.
1494          */
1495         if (rdev->flags & RADEON_IS_MOBILITY)
1496                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1497         else
1498                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1499         /* low sh */
1500         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1501         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1502         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1503         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1504         /* mid sh */
1505         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1506         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1507         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1508         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1509         /* high sh */
1510         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1511         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1512         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1513         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1514         /* low mh */
1515         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1516         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1517         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1518         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1519         /* mid mh */
1520         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1521         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1522         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1523         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1524         /* high mh */
1525         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1526         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1527         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1528         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1529 }
1530
1531 /**
1532  * evergreen_pm_misc - set additional pm hw parameters callback.
1533  *
1534  * @rdev: radeon_device pointer
1535  *
1536  * Set non-clock parameters associated with a power state
1537  * (voltage, etc.) (evergreen+).
1538  */
1539 void evergreen_pm_misc(struct radeon_device *rdev)
1540 {
1541         int req_ps_idx = rdev->pm.requested_power_state_index;
1542         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1543         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1544         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1545
1546         if (voltage->type == VOLTAGE_SW) {
1547                 /* 0xff0x are flags rather then an actual voltage */
1548                 if ((voltage->voltage & 0xff00) == 0xff00)
1549                         return;
1550                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1551                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1552                         rdev->pm.current_vddc = voltage->voltage;
1553                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1554                 }
1555
1556                 /* starting with BTC, there is one state that is used for both
1557                  * MH and SH.  Difference is that we always use the high clock index for
1558                  * mclk and vddci.
1559                  */
1560                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1561                     (rdev->family >= CHIP_BARTS) &&
1562                     rdev->pm.active_crtc_count &&
1563                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1564                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1565                         voltage = &rdev->pm.power_state[req_ps_idx].
1566                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1567
1568                 /* 0xff0x are flags rather then an actual voltage */
1569                 if ((voltage->vddci & 0xff00) == 0xff00)
1570                         return;
1571                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1572                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1573                         rdev->pm.current_vddci = voltage->vddci;
1574                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1575                 }
1576         }
1577 }
1578
1579 /**
1580  * evergreen_pm_prepare - pre-power state change callback.
1581  *
1582  * @rdev: radeon_device pointer
1583  *
1584  * Prepare for a power state change (evergreen+).
1585  */
1586 void evergreen_pm_prepare(struct radeon_device *rdev)
1587 {
1588         struct drm_device *ddev = rdev->ddev;
1589         struct drm_crtc *crtc;
1590         struct radeon_crtc *radeon_crtc;
1591         u32 tmp;
1592
1593         /* disable any active CRTCs */
1594         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1595                 radeon_crtc = to_radeon_crtc(crtc);
1596                 if (radeon_crtc->enabled) {
1597                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1598                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1599                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1600                 }
1601         }
1602 }
1603
1604 /**
1605  * evergreen_pm_finish - post-power state change callback.
1606  *
1607  * @rdev: radeon_device pointer
1608  *
1609  * Clean up after a power state change (evergreen+).
1610  */
1611 void evergreen_pm_finish(struct radeon_device *rdev)
1612 {
1613         struct drm_device *ddev = rdev->ddev;
1614         struct drm_crtc *crtc;
1615         struct radeon_crtc *radeon_crtc;
1616         u32 tmp;
1617
1618         /* enable any active CRTCs */
1619         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1620                 radeon_crtc = to_radeon_crtc(crtc);
1621                 if (radeon_crtc->enabled) {
1622                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1623                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1624                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1625                 }
1626         }
1627 }
1628
1629 /**
1630  * evergreen_hpd_sense - hpd sense callback.
1631  *
1632  * @rdev: radeon_device pointer
1633  * @hpd: hpd (hotplug detect) pin
1634  *
1635  * Checks if a digital monitor is connected (evergreen+).
1636  * Returns true if connected, false if not connected.
1637  */
1638 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1639 {
1640         bool connected = false;
1641
1642         switch (hpd) {
1643         case RADEON_HPD_1:
1644                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1645                         connected = true;
1646                 break;
1647         case RADEON_HPD_2:
1648                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1649                         connected = true;
1650                 break;
1651         case RADEON_HPD_3:
1652                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1653                         connected = true;
1654                 break;
1655         case RADEON_HPD_4:
1656                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1657                         connected = true;
1658                 break;
1659         case RADEON_HPD_5:
1660                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1661                         connected = true;
1662                 break;
1663         case RADEON_HPD_6:
1664                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1665                         connected = true;
1666                 break;
1667         default:
1668                 break;
1669         }
1670
1671         return connected;
1672 }
1673
1674 /**
1675  * evergreen_hpd_set_polarity - hpd set polarity callback.
1676  *
1677  * @rdev: radeon_device pointer
1678  * @hpd: hpd (hotplug detect) pin
1679  *
1680  * Set the polarity of the hpd pin (evergreen+).
1681  */
1682 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1683                                 enum radeon_hpd_id hpd)
1684 {
1685         u32 tmp;
1686         bool connected = evergreen_hpd_sense(rdev, hpd);
1687
1688         switch (hpd) {
1689         case RADEON_HPD_1:
1690                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1691                 if (connected)
1692                         tmp &= ~DC_HPDx_INT_POLARITY;
1693                 else
1694                         tmp |= DC_HPDx_INT_POLARITY;
1695                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1696                 break;
1697         case RADEON_HPD_2:
1698                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1699                 if (connected)
1700                         tmp &= ~DC_HPDx_INT_POLARITY;
1701                 else
1702                         tmp |= DC_HPDx_INT_POLARITY;
1703                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1704                 break;
1705         case RADEON_HPD_3:
1706                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1707                 if (connected)
1708                         tmp &= ~DC_HPDx_INT_POLARITY;
1709                 else
1710                         tmp |= DC_HPDx_INT_POLARITY;
1711                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1712                 break;
1713         case RADEON_HPD_4:
1714                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1715                 if (connected)
1716                         tmp &= ~DC_HPDx_INT_POLARITY;
1717                 else
1718                         tmp |= DC_HPDx_INT_POLARITY;
1719                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1720                 break;
1721         case RADEON_HPD_5:
1722                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1723                 if (connected)
1724                         tmp &= ~DC_HPDx_INT_POLARITY;
1725                 else
1726                         tmp |= DC_HPDx_INT_POLARITY;
1727                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1728                         break;
1729         case RADEON_HPD_6:
1730                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1731                 if (connected)
1732                         tmp &= ~DC_HPDx_INT_POLARITY;
1733                 else
1734                         tmp |= DC_HPDx_INT_POLARITY;
1735                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1736                 break;
1737         default:
1738                 break;
1739         }
1740 }
1741
1742 /**
1743  * evergreen_hpd_init - hpd setup callback.
1744  *
1745  * @rdev: radeon_device pointer
1746  *
1747  * Setup the hpd pins used by the card (evergreen+).
1748  * Enable the pin, set the polarity, and enable the hpd interrupts.
1749  */
1750 void evergreen_hpd_init(struct radeon_device *rdev)
1751 {
1752         struct drm_device *dev = rdev->ddev;
1753         struct drm_connector *connector;
1754         unsigned enabled = 0;
1755         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1756                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1757
1758         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1759                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1760
1761                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1762                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1763                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1764                          * aux dp channel on imac and help (but not completely fix)
1765                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1766                          * also avoid interrupt storms during dpms.
1767                          */
1768                         continue;
1769                 }
1770                 switch (radeon_connector->hpd.hpd) {
1771                 case RADEON_HPD_1:
1772                         WREG32(DC_HPD1_CONTROL, tmp);
1773                         break;
1774                 case RADEON_HPD_2:
1775                         WREG32(DC_HPD2_CONTROL, tmp);
1776                         break;
1777                 case RADEON_HPD_3:
1778                         WREG32(DC_HPD3_CONTROL, tmp);
1779                         break;
1780                 case RADEON_HPD_4:
1781                         WREG32(DC_HPD4_CONTROL, tmp);
1782                         break;
1783                 case RADEON_HPD_5:
1784                         WREG32(DC_HPD5_CONTROL, tmp);
1785                         break;
1786                 case RADEON_HPD_6:
1787                         WREG32(DC_HPD6_CONTROL, tmp);
1788                         break;
1789                 default:
1790                         break;
1791                 }
1792                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1793                 enabled |= 1 << radeon_connector->hpd.hpd;
1794         }
1795         radeon_irq_kms_enable_hpd(rdev, enabled);
1796 }
1797
1798 /**
1799  * evergreen_hpd_fini - hpd tear down callback.
1800  *
1801  * @rdev: radeon_device pointer
1802  *
1803  * Tear down the hpd pins used by the card (evergreen+).
1804  * Disable the hpd interrupts.
1805  */
1806 void evergreen_hpd_fini(struct radeon_device *rdev)
1807 {
1808         struct drm_device *dev = rdev->ddev;
1809         struct drm_connector *connector;
1810         unsigned disabled = 0;
1811
1812         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1813                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1814                 switch (radeon_connector->hpd.hpd) {
1815                 case RADEON_HPD_1:
1816                         WREG32(DC_HPD1_CONTROL, 0);
1817                         break;
1818                 case RADEON_HPD_2:
1819                         WREG32(DC_HPD2_CONTROL, 0);
1820                         break;
1821                 case RADEON_HPD_3:
1822                         WREG32(DC_HPD3_CONTROL, 0);
1823                         break;
1824                 case RADEON_HPD_4:
1825                         WREG32(DC_HPD4_CONTROL, 0);
1826                         break;
1827                 case RADEON_HPD_5:
1828                         WREG32(DC_HPD5_CONTROL, 0);
1829                         break;
1830                 case RADEON_HPD_6:
1831                         WREG32(DC_HPD6_CONTROL, 0);
1832                         break;
1833                 default:
1834                         break;
1835                 }
1836                 disabled |= 1 << radeon_connector->hpd.hpd;
1837         }
1838         radeon_irq_kms_disable_hpd(rdev, disabled);
1839 }
1840
1841 /* watermark setup */
1842
1843 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1844                                         struct radeon_crtc *radeon_crtc,
1845                                         struct drm_display_mode *mode,
1846                                         struct drm_display_mode *other_mode)
1847 {
1848         u32 tmp, buffer_alloc, i;
1849         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1850         /*
1851          * Line Buffer Setup
1852          * There are 3 line buffers, each one shared by 2 display controllers.
1853          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1854          * the display controllers.  The paritioning is done via one of four
1855          * preset allocations specified in bits 2:0:
1856          * first display controller
1857          *  0 - first half of lb (3840 * 2)
1858          *  1 - first 3/4 of lb (5760 * 2)
1859          *  2 - whole lb (7680 * 2), other crtc must be disabled
1860          *  3 - first 1/4 of lb (1920 * 2)
1861          * second display controller
1862          *  4 - second half of lb (3840 * 2)
1863          *  5 - second 3/4 of lb (5760 * 2)
1864          *  6 - whole lb (7680 * 2), other crtc must be disabled
1865          *  7 - last 1/4 of lb (1920 * 2)
1866          */
1867         /* this can get tricky if we have two large displays on a paired group
1868          * of crtcs.  Ideally for multiple large displays we'd assign them to
1869          * non-linked crtcs for maximum line buffer allocation.
1870          */
1871         if (radeon_crtc->base.enabled && mode) {
1872                 if (other_mode) {
1873                         tmp = 0; /* 1/2 */
1874                         buffer_alloc = 1;
1875                 } else {
1876                         tmp = 2; /* whole */
1877                         buffer_alloc = 2;
1878                 }
1879         } else {
1880                 tmp = 0;
1881                 buffer_alloc = 0;
1882         }
1883
1884         /* second controller of the pair uses second half of the lb */
1885         if (radeon_crtc->crtc_id % 2)
1886                 tmp += 4;
1887         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1888
1889         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1890                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1891                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1892                 for (i = 0; i < rdev->usec_timeout; i++) {
1893                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1894                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1895                                 break;
1896                         udelay(1);
1897                 }
1898         }
1899
1900         if (radeon_crtc->base.enabled && mode) {
1901                 switch (tmp) {
1902                 case 0:
1903                 case 4:
1904                 default:
1905                         if (ASIC_IS_DCE5(rdev))
1906                                 return 4096 * 2;
1907                         else
1908                                 return 3840 * 2;
1909                 case 1:
1910                 case 5:
1911                         if (ASIC_IS_DCE5(rdev))
1912                                 return 6144 * 2;
1913                         else
1914                                 return 5760 * 2;
1915                 case 2:
1916                 case 6:
1917                         if (ASIC_IS_DCE5(rdev))
1918                                 return 8192 * 2;
1919                         else
1920                                 return 7680 * 2;
1921                 case 3:
1922                 case 7:
1923                         if (ASIC_IS_DCE5(rdev))
1924                                 return 2048 * 2;
1925                         else
1926                                 return 1920 * 2;
1927                 }
1928         }
1929
1930         /* controller not enabled, so no lb used */
1931         return 0;
1932 }
1933
1934 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1935 {
1936         u32 tmp = RREG32(MC_SHARED_CHMAP);
1937
1938         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1939         case 0:
1940         default:
1941                 return 1;
1942         case 1:
1943                 return 2;
1944         case 2:
1945                 return 4;
1946         case 3:
1947                 return 8;
1948         }
1949 }
1950
1951 struct evergreen_wm_params {
1952         u32 dram_channels; /* number of dram channels */
1953         u32 yclk;          /* bandwidth per dram data pin in kHz */
1954         u32 sclk;          /* engine clock in kHz */
1955         u32 disp_clk;      /* display clock in kHz */
1956         u32 src_width;     /* viewport width */
1957         u32 active_time;   /* active display time in ns */
1958         u32 blank_time;    /* blank time in ns */
1959         bool interlaced;    /* mode is interlaced */
1960         fixed20_12 vsc;    /* vertical scale ratio */
1961         u32 num_heads;     /* number of active crtcs */
1962         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1963         u32 lb_size;       /* line buffer allocated to pipe */
1964         u32 vtaps;         /* vertical scaler taps */
1965 };
1966
1967 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1968 {
1969         /* Calculate DRAM Bandwidth and the part allocated to display. */
1970         fixed20_12 dram_efficiency; /* 0.7 */
1971         fixed20_12 yclk, dram_channels, bandwidth;
1972         fixed20_12 a;
1973
1974         a.full = dfixed_const(1000);
1975         yclk.full = dfixed_const(wm->yclk);
1976         yclk.full = dfixed_div(yclk, a);
1977         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1978         a.full = dfixed_const(10);
1979         dram_efficiency.full = dfixed_const(7);
1980         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1981         bandwidth.full = dfixed_mul(dram_channels, yclk);
1982         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1983
1984         return dfixed_trunc(bandwidth);
1985 }
1986
1987 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1988 {
1989         /* Calculate DRAM Bandwidth and the part allocated to display. */
1990         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1991         fixed20_12 yclk, dram_channels, bandwidth;
1992         fixed20_12 a;
1993
1994         a.full = dfixed_const(1000);
1995         yclk.full = dfixed_const(wm->yclk);
1996         yclk.full = dfixed_div(yclk, a);
1997         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1998         a.full = dfixed_const(10);
1999         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2000         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2001         bandwidth.full = dfixed_mul(dram_channels, yclk);
2002         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2003
2004         return dfixed_trunc(bandwidth);
2005 }
2006
2007 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2008 {
2009         /* Calculate the display Data return Bandwidth */
2010         fixed20_12 return_efficiency; /* 0.8 */
2011         fixed20_12 sclk, bandwidth;
2012         fixed20_12 a;
2013
2014         a.full = dfixed_const(1000);
2015         sclk.full = dfixed_const(wm->sclk);
2016         sclk.full = dfixed_div(sclk, a);
2017         a.full = dfixed_const(10);
2018         return_efficiency.full = dfixed_const(8);
2019         return_efficiency.full = dfixed_div(return_efficiency, a);
2020         a.full = dfixed_const(32);
2021         bandwidth.full = dfixed_mul(a, sclk);
2022         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2023
2024         return dfixed_trunc(bandwidth);
2025 }
2026
2027 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2028 {
2029         /* Calculate the DMIF Request Bandwidth */
2030         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2031         fixed20_12 disp_clk, bandwidth;
2032         fixed20_12 a;
2033
2034         a.full = dfixed_const(1000);
2035         disp_clk.full = dfixed_const(wm->disp_clk);
2036         disp_clk.full = dfixed_div(disp_clk, a);
2037         a.full = dfixed_const(10);
2038         disp_clk_request_efficiency.full = dfixed_const(8);
2039         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2040         a.full = dfixed_const(32);
2041         bandwidth.full = dfixed_mul(a, disp_clk);
2042         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2043
2044         return dfixed_trunc(bandwidth);
2045 }
2046
2047 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2048 {
2049         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2050         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2051         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2052         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2053
2054         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2055 }
2056
2057 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2058 {
2059         /* Calculate the display mode Average Bandwidth
2060          * DisplayMode should contain the source and destination dimensions,
2061          * timing, etc.
2062          */
2063         fixed20_12 bpp;
2064         fixed20_12 line_time;
2065         fixed20_12 src_width;
2066         fixed20_12 bandwidth;
2067         fixed20_12 a;
2068
2069         a.full = dfixed_const(1000);
2070         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2071         line_time.full = dfixed_div(line_time, a);
2072         bpp.full = dfixed_const(wm->bytes_per_pixel);
2073         src_width.full = dfixed_const(wm->src_width);
2074         bandwidth.full = dfixed_mul(src_width, bpp);
2075         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2076         bandwidth.full = dfixed_div(bandwidth, line_time);
2077
2078         return dfixed_trunc(bandwidth);
2079 }
2080
2081 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2082 {
2083         /* First calcualte the latency in ns */
2084         u32 mc_latency = 2000; /* 2000 ns. */
2085         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2086         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2087         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2088         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2089         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2090                 (wm->num_heads * cursor_line_pair_return_time);
2091         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2092         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2093         fixed20_12 a, b, c;
2094
2095         if (wm->num_heads == 0)
2096                 return 0;
2097
2098         a.full = dfixed_const(2);
2099         b.full = dfixed_const(1);
2100         if ((wm->vsc.full > a.full) ||
2101             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2102             (wm->vtaps >= 5) ||
2103             ((wm->vsc.full >= a.full) && wm->interlaced))
2104                 max_src_lines_per_dst_line = 4;
2105         else
2106                 max_src_lines_per_dst_line = 2;
2107
2108         a.full = dfixed_const(available_bandwidth);
2109         b.full = dfixed_const(wm->num_heads);
2110         a.full = dfixed_div(a, b);
2111
2112         b.full = dfixed_const(1000);
2113         c.full = dfixed_const(wm->disp_clk);
2114         b.full = dfixed_div(c, b);
2115         c.full = dfixed_const(wm->bytes_per_pixel);
2116         b.full = dfixed_mul(b, c);
2117
2118         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2119
2120         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2121         b.full = dfixed_const(1000);
2122         c.full = dfixed_const(lb_fill_bw);
2123         b.full = dfixed_div(c, b);
2124         a.full = dfixed_div(a, b);
2125         line_fill_time = dfixed_trunc(a);
2126
2127         if (line_fill_time < wm->active_time)
2128                 return latency;
2129         else
2130                 return latency + (line_fill_time - wm->active_time);
2131
2132 }
2133
2134 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2135 {
2136         if (evergreen_average_bandwidth(wm) <=
2137             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2138                 return true;
2139         else
2140                 return false;
2141 };
2142
2143 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2144 {
2145         if (evergreen_average_bandwidth(wm) <=
2146             (evergreen_available_bandwidth(wm) / wm->num_heads))
2147                 return true;
2148         else
2149                 return false;
2150 };
2151
2152 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2153 {
2154         u32 lb_partitions = wm->lb_size / wm->src_width;
2155         u32 line_time = wm->active_time + wm->blank_time;
2156         u32 latency_tolerant_lines;
2157         u32 latency_hiding;
2158         fixed20_12 a;
2159
2160         a.full = dfixed_const(1);
2161         if (wm->vsc.full > a.full)
2162                 latency_tolerant_lines = 1;
2163         else {
2164                 if (lb_partitions <= (wm->vtaps + 1))
2165                         latency_tolerant_lines = 1;
2166                 else
2167                         latency_tolerant_lines = 2;
2168         }
2169
2170         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2171
2172         if (evergreen_latency_watermark(wm) <= latency_hiding)
2173                 return true;
2174         else
2175                 return false;
2176 }
2177
2178 static void evergreen_program_watermarks(struct radeon_device *rdev,
2179                                          struct radeon_crtc *radeon_crtc,
2180                                          u32 lb_size, u32 num_heads)
2181 {
2182         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2183         struct evergreen_wm_params wm_low, wm_high;
2184         u32 dram_channels;
2185         u32 pixel_period;
2186         u32 line_time = 0;
2187         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2188         u32 priority_a_mark = 0, priority_b_mark = 0;
2189         u32 priority_a_cnt = PRIORITY_OFF;
2190         u32 priority_b_cnt = PRIORITY_OFF;
2191         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2192         u32 tmp, arb_control3;
2193         fixed20_12 a, b, c;
2194
2195         if (radeon_crtc->base.enabled && num_heads && mode) {
2196                 pixel_period = 1000000 / (u32)mode->clock;
2197                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2198                 priority_a_cnt = 0;
2199                 priority_b_cnt = 0;
2200                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2201
2202                 /* watermark for high clocks */
2203                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2204                         wm_high.yclk =
2205                                 radeon_dpm_get_mclk(rdev, false) * 10;
2206                         wm_high.sclk =
2207                                 radeon_dpm_get_sclk(rdev, false) * 10;
2208                 } else {
2209                         wm_high.yclk = rdev->pm.current_mclk * 10;
2210                         wm_high.sclk = rdev->pm.current_sclk * 10;
2211                 }
2212
2213                 wm_high.disp_clk = mode->clock;
2214                 wm_high.src_width = mode->crtc_hdisplay;
2215                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2216                 wm_high.blank_time = line_time - wm_high.active_time;
2217                 wm_high.interlaced = false;
2218                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2219                         wm_high.interlaced = true;
2220                 wm_high.vsc = radeon_crtc->vsc;
2221                 wm_high.vtaps = 1;
2222                 if (radeon_crtc->rmx_type != RMX_OFF)
2223                         wm_high.vtaps = 2;
2224                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2225                 wm_high.lb_size = lb_size;
2226                 wm_high.dram_channels = dram_channels;
2227                 wm_high.num_heads = num_heads;
2228
2229                 /* watermark for low clocks */
2230                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2231                         wm_low.yclk =
2232                                 radeon_dpm_get_mclk(rdev, true) * 10;
2233                         wm_low.sclk =
2234                                 radeon_dpm_get_sclk(rdev, true) * 10;
2235                 } else {
2236                         wm_low.yclk = rdev->pm.current_mclk * 10;
2237                         wm_low.sclk = rdev->pm.current_sclk * 10;
2238                 }
2239
2240                 wm_low.disp_clk = mode->clock;
2241                 wm_low.src_width = mode->crtc_hdisplay;
2242                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2243                 wm_low.blank_time = line_time - wm_low.active_time;
2244                 wm_low.interlaced = false;
2245                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2246                         wm_low.interlaced = true;
2247                 wm_low.vsc = radeon_crtc->vsc;
2248                 wm_low.vtaps = 1;
2249                 if (radeon_crtc->rmx_type != RMX_OFF)
2250                         wm_low.vtaps = 2;
2251                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2252                 wm_low.lb_size = lb_size;
2253                 wm_low.dram_channels = dram_channels;
2254                 wm_low.num_heads = num_heads;
2255
2256                 /* set for high clocks */
2257                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2258                 /* set for low clocks */
2259                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2260
2261                 /* possibly force display priority to high */
2262                 /* should really do this at mode validation time... */
2263                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2264                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2265                     !evergreen_check_latency_hiding(&wm_high) ||
2266                     (rdev->disp_priority == 2)) {
2267                         DRM_DEBUG_KMS("force priority a to high\n");
2268                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2269                 }
2270                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2271                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2272                     !evergreen_check_latency_hiding(&wm_low) ||
2273                     (rdev->disp_priority == 2)) {
2274                         DRM_DEBUG_KMS("force priority b to high\n");
2275                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2276                 }
2277
2278                 a.full = dfixed_const(1000);
2279                 b.full = dfixed_const(mode->clock);
2280                 b.full = dfixed_div(b, a);
2281                 c.full = dfixed_const(latency_watermark_a);
2282                 c.full = dfixed_mul(c, b);
2283                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2284                 c.full = dfixed_div(c, a);
2285                 a.full = dfixed_const(16);
2286                 c.full = dfixed_div(c, a);
2287                 priority_a_mark = dfixed_trunc(c);
2288                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2289
2290                 a.full = dfixed_const(1000);
2291                 b.full = dfixed_const(mode->clock);
2292                 b.full = dfixed_div(b, a);
2293                 c.full = dfixed_const(latency_watermark_b);
2294                 c.full = dfixed_mul(c, b);
2295                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2296                 c.full = dfixed_div(c, a);
2297                 a.full = dfixed_const(16);
2298                 c.full = dfixed_div(c, a);
2299                 priority_b_mark = dfixed_trunc(c);
2300                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2301         }
2302
2303         /* select wm A */
2304         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2305         tmp = arb_control3;
2306         tmp &= ~LATENCY_WATERMARK_MASK(3);
2307         tmp |= LATENCY_WATERMARK_MASK(1);
2308         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2309         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2310                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2311                 LATENCY_HIGH_WATERMARK(line_time)));
2312         /* select wm B */
2313         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2314         tmp &= ~LATENCY_WATERMARK_MASK(3);
2315         tmp |= LATENCY_WATERMARK_MASK(2);
2316         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2317         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2318                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2319                 LATENCY_HIGH_WATERMARK(line_time)));
2320         /* restore original selection */
2321         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2322
2323         /* write the priority marks */
2324         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2325         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2326
2327         /* save values for DPM */
2328         radeon_crtc->line_time = line_time;
2329         radeon_crtc->wm_high = latency_watermark_a;
2330         radeon_crtc->wm_low = latency_watermark_b;
2331 }
2332
2333 /**
2334  * evergreen_bandwidth_update - update display watermarks callback.
2335  *
2336  * @rdev: radeon_device pointer
2337  *
2338  * Update the display watermarks based on the requested mode(s)
2339  * (evergreen+).
2340  */
2341 void evergreen_bandwidth_update(struct radeon_device *rdev)
2342 {
2343         struct drm_display_mode *mode0 = NULL;
2344         struct drm_display_mode *mode1 = NULL;
2345         u32 num_heads = 0, lb_size;
2346         int i;
2347
2348         radeon_update_display_priority(rdev);
2349
2350         for (i = 0; i < rdev->num_crtc; i++) {
2351                 if (rdev->mode_info.crtcs[i]->base.enabled)
2352                         num_heads++;
2353         }
2354         for (i = 0; i < rdev->num_crtc; i += 2) {
2355                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2356                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2357                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2358                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2359                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2360                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2361         }
2362 }
2363
2364 /**
2365  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2366  *
2367  * @rdev: radeon_device pointer
2368  *
2369  * Wait for the MC (memory controller) to be idle.
2370  * (evergreen+).
2371  * Returns 0 if the MC is idle, -1 if not.
2372  */
2373 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2374 {
2375         unsigned i;
2376         u32 tmp;
2377
2378         for (i = 0; i < rdev->usec_timeout; i++) {
2379                 /* read MC_STATUS */
2380                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2381                 if (!tmp)
2382                         return 0;
2383                 udelay(1);
2384         }
2385         return -1;
2386 }
2387
2388 /*
2389  * GART
2390  */
2391 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2392 {
2393         unsigned i;
2394         u32 tmp;
2395
2396         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2397
2398         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2399         for (i = 0; i < rdev->usec_timeout; i++) {
2400                 /* read MC_STATUS */
2401                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2402                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2403                 if (tmp == 2) {
2404                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2405                         return;
2406                 }
2407                 if (tmp) {
2408                         return;
2409                 }
2410                 udelay(1);
2411         }
2412 }
2413
2414 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2415 {
2416         u32 tmp;
2417         int r;
2418
2419         if (rdev->gart.robj == NULL) {
2420                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2421                 return -EINVAL;
2422         }
2423         r = radeon_gart_table_vram_pin(rdev);
2424         if (r)
2425                 return r;
2426         /* Setup L2 cache */
2427         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2428                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2429                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2430         WREG32(VM_L2_CNTL2, 0);
2431         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2432         /* Setup TLB control */
2433         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2434                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2435                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2436                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2437         if (rdev->flags & RADEON_IS_IGP) {
2438                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2439                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2440                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2441         } else {
2442                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2443                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2444                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2445                 if ((rdev->family == CHIP_JUNIPER) ||
2446                     (rdev->family == CHIP_CYPRESS) ||
2447                     (rdev->family == CHIP_HEMLOCK) ||
2448                     (rdev->family == CHIP_BARTS))
2449                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2450         }
2451         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2452         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2453         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2454         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2455         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2456         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2457         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2458         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2459                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2460         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2461                         (u32)(rdev->dummy_page.addr >> 12));
2462         WREG32(VM_CONTEXT1_CNTL, 0);
2463
2464         evergreen_pcie_gart_tlb_flush(rdev);
2465         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2466                  (unsigned)(rdev->mc.gtt_size >> 20),
2467                  (unsigned long long)rdev->gart.table_addr);
2468         rdev->gart.ready = true;
2469         return 0;
2470 }
2471
2472 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2473 {
2474         u32 tmp;
2475
2476         /* Disable all tables */
2477         WREG32(VM_CONTEXT0_CNTL, 0);
2478         WREG32(VM_CONTEXT1_CNTL, 0);
2479
2480         /* Setup L2 cache */
2481         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2482                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2483         WREG32(VM_L2_CNTL2, 0);
2484         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2485         /* Setup TLB control */
2486         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2487         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2488         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2489         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2490         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2491         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2492         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2493         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2494         radeon_gart_table_vram_unpin(rdev);
2495 }
2496
2497 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2498 {
2499         evergreen_pcie_gart_disable(rdev);
2500         radeon_gart_table_vram_free(rdev);
2501         radeon_gart_fini(rdev);
2502 }
2503
2504
2505 static void evergreen_agp_enable(struct radeon_device *rdev)
2506 {
2507         u32 tmp;
2508
2509         /* Setup L2 cache */
2510         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2511                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2512                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2513         WREG32(VM_L2_CNTL2, 0);
2514         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2515         /* Setup TLB control */
2516         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2517                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2518                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2519                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2520         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2521         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2522         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2523         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2524         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2525         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2526         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2527         WREG32(VM_CONTEXT0_CNTL, 0);
2528         WREG32(VM_CONTEXT1_CNTL, 0);
2529 }
2530
2531 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2532 {
2533         u32 crtc_enabled, tmp, frame_count, blackout;
2534         int i, j;
2535
2536         if (!ASIC_IS_NODCE(rdev)) {
2537                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2538                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2539
2540                 /* disable VGA render */
2541                 WREG32(VGA_RENDER_CONTROL, 0);
2542         }
2543         /* blank the display controllers */
2544         for (i = 0; i < rdev->num_crtc; i++) {
2545                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2546                 if (crtc_enabled) {
2547                         save->crtc_enabled[i] = true;
2548                         if (ASIC_IS_DCE6(rdev)) {
2549                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2550                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2551                                         radeon_wait_for_vblank(rdev, i);
2552                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2553                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2554                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2555                                 }
2556                         } else {
2557                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2558                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2559                                         radeon_wait_for_vblank(rdev, i);
2560                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2561                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2562                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2563                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2564                                 }
2565                         }
2566                         /* wait for the next frame */
2567                         frame_count = radeon_get_vblank_counter(rdev, i);
2568                         for (j = 0; j < rdev->usec_timeout; j++) {
2569                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2570                                         break;
2571                                 udelay(1);
2572                         }
2573
2574                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2575                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2576                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2577                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2578                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2579                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2580                         save->crtc_enabled[i] = false;
2581                         /* ***** */
2582                 } else {
2583                         save->crtc_enabled[i] = false;
2584                 }
2585         }
2586
2587         radeon_mc_wait_for_idle(rdev);
2588
2589         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2590         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2591                 /* Block CPU access */
2592                 WREG32(BIF_FB_EN, 0);
2593                 /* blackout the MC */
2594                 blackout &= ~BLACKOUT_MODE_MASK;
2595                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2596         }
2597         /* wait for the MC to settle */
2598         udelay(100);
2599
2600         /* lock double buffered regs */
2601         for (i = 0; i < rdev->num_crtc; i++) {
2602                 if (save->crtc_enabled[i]) {
2603                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2604                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2605                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2606                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2607                         }
2608                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2609                         if (!(tmp & 1)) {
2610                                 tmp |= 1;
2611                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2612                         }
2613                 }
2614         }
2615 }
2616
2617 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2618 {
2619         u32 tmp, frame_count;
2620         int i, j;
2621
2622         /* update crtc base addresses */
2623         for (i = 0; i < rdev->num_crtc; i++) {
2624                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2625                        upper_32_bits(rdev->mc.vram_start));
2626                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2627                        upper_32_bits(rdev->mc.vram_start));
2628                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2629                        (u32)rdev->mc.vram_start);
2630                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2631                        (u32)rdev->mc.vram_start);
2632         }
2633
2634         if (!ASIC_IS_NODCE(rdev)) {
2635                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2636                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2637         }
2638
2639         /* unlock regs and wait for update */
2640         for (i = 0; i < rdev->num_crtc; i++) {
2641                 if (save->crtc_enabled[i]) {
2642                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2643                         if ((tmp & 0x7) != 3) {
2644                                 tmp &= ~0x7;
2645                                 tmp |= 0x3;
2646                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2647                         }
2648                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2649                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2650                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2651                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2652                         }
2653                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2654                         if (tmp & 1) {
2655                                 tmp &= ~1;
2656                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2657                         }
2658                         for (j = 0; j < rdev->usec_timeout; j++) {
2659                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2660                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2661                                         break;
2662                                 udelay(1);
2663                         }
2664                 }
2665         }
2666
2667         /* unblackout the MC */
2668         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2669         tmp &= ~BLACKOUT_MODE_MASK;
2670         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2671         /* allow CPU access */
2672         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2673
2674         for (i = 0; i < rdev->num_crtc; i++) {
2675                 if (save->crtc_enabled[i]) {
2676                         if (ASIC_IS_DCE6(rdev)) {
2677                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2678                                 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2679                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2680                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2681                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2682                         } else {
2683                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2684                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2685                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2686                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2687                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2688                         }
2689                         /* wait for the next frame */
2690                         frame_count = radeon_get_vblank_counter(rdev, i);
2691                         for (j = 0; j < rdev->usec_timeout; j++) {
2692                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2693                                         break;
2694                                 udelay(1);
2695                         }
2696                 }
2697         }
2698         if (!ASIC_IS_NODCE(rdev)) {
2699                 /* Unlock vga access */
2700                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2701                 mdelay(1);
2702                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2703         }
2704 }
2705
2706 void evergreen_mc_program(struct radeon_device *rdev)
2707 {
2708         struct evergreen_mc_save save;
2709         u32 tmp;
2710         int i, j;
2711
2712         /* Initialize HDP */
2713         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2714                 WREG32((0x2c14 + j), 0x00000000);
2715                 WREG32((0x2c18 + j), 0x00000000);
2716                 WREG32((0x2c1c + j), 0x00000000);
2717                 WREG32((0x2c20 + j), 0x00000000);
2718                 WREG32((0x2c24 + j), 0x00000000);
2719         }
2720         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2721
2722         evergreen_mc_stop(rdev, &save);
2723         if (evergreen_mc_wait_for_idle(rdev)) {
2724                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2725         }
2726         /* Lockout access through VGA aperture*/
2727         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2728         /* Update configuration */
2729         if (rdev->flags & RADEON_IS_AGP) {
2730                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2731                         /* VRAM before AGP */
2732                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2733                                 rdev->mc.vram_start >> 12);
2734                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2735                                 rdev->mc.gtt_end >> 12);
2736                 } else {
2737                         /* VRAM after AGP */
2738                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2739                                 rdev->mc.gtt_start >> 12);
2740                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2741                                 rdev->mc.vram_end >> 12);
2742                 }
2743         } else {
2744                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2745                         rdev->mc.vram_start >> 12);
2746                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2747                         rdev->mc.vram_end >> 12);
2748         }
2749         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2750         /* llano/ontario only */
2751         if ((rdev->family == CHIP_PALM) ||
2752             (rdev->family == CHIP_SUMO) ||
2753             (rdev->family == CHIP_SUMO2)) {
2754                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2755                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2756                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2757                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2758         }
2759         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2760         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2761         WREG32(MC_VM_FB_LOCATION, tmp);
2762         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2763         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2764         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2765         if (rdev->flags & RADEON_IS_AGP) {
2766                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2767                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2768                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2769         } else {
2770                 WREG32(MC_VM_AGP_BASE, 0);
2771                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2772                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2773         }
2774         if (evergreen_mc_wait_for_idle(rdev)) {
2775                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2776         }
2777         evergreen_mc_resume(rdev, &save);
2778         /* we need to own VRAM, so turn off the VGA renderer here
2779          * to stop it overwriting our objects */
2780         rv515_vga_render_disable(rdev);
2781 }
2782
2783 /*
2784  * CP.
2785  */
2786 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2787 {
2788         struct radeon_ring *ring = &rdev->ring[ib->ring];
2789         u32 next_rptr;
2790
2791         /* set to DX10/11 mode */
2792         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2793         radeon_ring_write(ring, 1);
2794
2795         if (ring->rptr_save_reg) {
2796                 next_rptr = ring->wptr + 3 + 4;
2797                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2798                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2799                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2800                 radeon_ring_write(ring, next_rptr);
2801         } else if (rdev->wb.enabled) {
2802                 next_rptr = ring->wptr + 5 + 4;
2803                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2804                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2805                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2806                 radeon_ring_write(ring, next_rptr);
2807                 radeon_ring_write(ring, 0);
2808         }
2809
2810         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2811         radeon_ring_write(ring,
2812 #ifdef __BIG_ENDIAN
2813                           (2 << 0) |
2814 #endif
2815                           (ib->gpu_addr & 0xFFFFFFFC));
2816         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2817         radeon_ring_write(ring, ib->length_dw);
2818 }
2819
2820
2821 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2822 {
2823         const __be32 *fw_data;
2824         int i;
2825
2826         if (!rdev->me_fw || !rdev->pfp_fw)
2827                 return -EINVAL;
2828
2829         r700_cp_stop(rdev);
2830         WREG32(CP_RB_CNTL,
2831 #ifdef __BIG_ENDIAN
2832                BUF_SWAP_32BIT |
2833 #endif
2834                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2835
2836         fw_data = (const __be32 *)rdev->pfp_fw->data;
2837         WREG32(CP_PFP_UCODE_ADDR, 0);
2838         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2839                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2840         WREG32(CP_PFP_UCODE_ADDR, 0);
2841
2842         fw_data = (const __be32 *)rdev->me_fw->data;
2843         WREG32(CP_ME_RAM_WADDR, 0);
2844         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2845                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2846
2847         WREG32(CP_PFP_UCODE_ADDR, 0);
2848         WREG32(CP_ME_RAM_WADDR, 0);
2849         WREG32(CP_ME_RAM_RADDR, 0);
2850         return 0;
2851 }
2852
2853 static int evergreen_cp_start(struct radeon_device *rdev)
2854 {
2855         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2856         int r, i;
2857         uint32_t cp_me;
2858
2859         r = radeon_ring_lock(rdev, ring, 7);
2860         if (r) {
2861                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2862                 return r;
2863         }
2864         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2865         radeon_ring_write(ring, 0x1);
2866         radeon_ring_write(ring, 0x0);
2867         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2868         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2869         radeon_ring_write(ring, 0);
2870         radeon_ring_write(ring, 0);
2871         radeon_ring_unlock_commit(rdev, ring, false);
2872
2873         cp_me = 0xff;
2874         WREG32(CP_ME_CNTL, cp_me);
2875
2876         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2877         if (r) {
2878                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2879                 return r;
2880         }
2881
2882         /* setup clear context state */
2883         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2884         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2885
2886         for (i = 0; i < evergreen_default_size; i++)
2887                 radeon_ring_write(ring, evergreen_default_state[i]);
2888
2889         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2890         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2891
2892         /* set clear context state */
2893         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2894         radeon_ring_write(ring, 0);
2895
2896         /* SQ_VTX_BASE_VTX_LOC */
2897         radeon_ring_write(ring, 0xc0026f00);
2898         radeon_ring_write(ring, 0x00000000);
2899         radeon_ring_write(ring, 0x00000000);
2900         radeon_ring_write(ring, 0x00000000);
2901
2902         /* Clear consts */
2903         radeon_ring_write(ring, 0xc0036f00);
2904         radeon_ring_write(ring, 0x00000bc4);
2905         radeon_ring_write(ring, 0xffffffff);
2906         radeon_ring_write(ring, 0xffffffff);
2907         radeon_ring_write(ring, 0xffffffff);
2908
2909         radeon_ring_write(ring, 0xc0026900);
2910         radeon_ring_write(ring, 0x00000316);
2911         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2912         radeon_ring_write(ring, 0x00000010); /*  */
2913
2914         radeon_ring_unlock_commit(rdev, ring, false);
2915
2916         return 0;
2917 }
2918
2919 static int evergreen_cp_resume(struct radeon_device *rdev)
2920 {
2921         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2922         u32 tmp;
2923         u32 rb_bufsz;
2924         int r;
2925
2926         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2927         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2928                                  SOFT_RESET_PA |
2929                                  SOFT_RESET_SH |
2930                                  SOFT_RESET_VGT |
2931                                  SOFT_RESET_SPI |
2932                                  SOFT_RESET_SX));
2933         RREG32(GRBM_SOFT_RESET);
2934         mdelay(15);
2935         WREG32(GRBM_SOFT_RESET, 0);
2936         RREG32(GRBM_SOFT_RESET);
2937
2938         /* Set ring buffer size */
2939         rb_bufsz = order_base_2(ring->ring_size / 8);
2940         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2941 #ifdef __BIG_ENDIAN
2942         tmp |= BUF_SWAP_32BIT;
2943 #endif
2944         WREG32(CP_RB_CNTL, tmp);
2945         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2946         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2947
2948         /* Set the write pointer delay */
2949         WREG32(CP_RB_WPTR_DELAY, 0);
2950
2951         /* Initialize the ring buffer's read and write pointers */
2952         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2953         WREG32(CP_RB_RPTR_WR, 0);
2954         ring->wptr = 0;
2955         WREG32(CP_RB_WPTR, ring->wptr);
2956
2957         /* set the wb address whether it's enabled or not */
2958         WREG32(CP_RB_RPTR_ADDR,
2959                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2960         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2961         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2962
2963         if (rdev->wb.enabled)
2964                 WREG32(SCRATCH_UMSK, 0xff);
2965         else {
2966                 tmp |= RB_NO_UPDATE;
2967                 WREG32(SCRATCH_UMSK, 0);
2968         }
2969
2970         mdelay(1);
2971         WREG32(CP_RB_CNTL, tmp);
2972
2973         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2974         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2975
2976         evergreen_cp_start(rdev);
2977         ring->ready = true;
2978         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2979         if (r) {
2980                 ring->ready = false;
2981                 return r;
2982         }
2983         return 0;
2984 }
2985
2986 /*
2987  * Core functions
2988  */
2989 static void evergreen_gpu_init(struct radeon_device *rdev)
2990 {
2991         u32 gb_addr_config;
2992         u32 mc_shared_chmap, mc_arb_ramcfg;
2993         u32 sx_debug_1;
2994         u32 smx_dc_ctl0;
2995         u32 sq_config;
2996         u32 sq_lds_resource_mgmt;
2997         u32 sq_gpr_resource_mgmt_1;
2998         u32 sq_gpr_resource_mgmt_2;
2999         u32 sq_gpr_resource_mgmt_3;
3000         u32 sq_thread_resource_mgmt;
3001         u32 sq_thread_resource_mgmt_2;
3002         u32 sq_stack_resource_mgmt_1;
3003         u32 sq_stack_resource_mgmt_2;
3004         u32 sq_stack_resource_mgmt_3;
3005         u32 vgt_cache_invalidation;
3006         u32 hdp_host_path_cntl, tmp;
3007         u32 disabled_rb_mask;
3008         int i, j, ps_thread_count;
3009
3010         switch (rdev->family) {
3011         case CHIP_CYPRESS:
3012         case CHIP_HEMLOCK:
3013                 rdev->config.evergreen.num_ses = 2;
3014                 rdev->config.evergreen.max_pipes = 4;
3015                 rdev->config.evergreen.max_tile_pipes = 8;
3016                 rdev->config.evergreen.max_simds = 10;
3017                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3018                 rdev->config.evergreen.max_gprs = 256;
3019                 rdev->config.evergreen.max_threads = 248;
3020                 rdev->config.evergreen.max_gs_threads = 32;
3021                 rdev->config.evergreen.max_stack_entries = 512;
3022                 rdev->config.evergreen.sx_num_of_sets = 4;
3023                 rdev->config.evergreen.sx_max_export_size = 256;
3024                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3025                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3026                 rdev->config.evergreen.max_hw_contexts = 8;
3027                 rdev->config.evergreen.sq_num_cf_insts = 2;
3028
3029                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3030                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3031                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3032                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3033                 break;
3034         case CHIP_JUNIPER:
3035                 rdev->config.evergreen.num_ses = 1;
3036                 rdev->config.evergreen.max_pipes = 4;
3037                 rdev->config.evergreen.max_tile_pipes = 4;
3038                 rdev->config.evergreen.max_simds = 10;
3039                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3040                 rdev->config.evergreen.max_gprs = 256;
3041                 rdev->config.evergreen.max_threads = 248;
3042                 rdev->config.evergreen.max_gs_threads = 32;
3043                 rdev->config.evergreen.max_stack_entries = 512;
3044                 rdev->config.evergreen.sx_num_of_sets = 4;
3045                 rdev->config.evergreen.sx_max_export_size = 256;
3046                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3047                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3048                 rdev->config.evergreen.max_hw_contexts = 8;
3049                 rdev->config.evergreen.sq_num_cf_insts = 2;
3050
3051                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3052                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3053                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3054                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3055                 break;
3056         case CHIP_REDWOOD:
3057                 rdev->config.evergreen.num_ses = 1;
3058                 rdev->config.evergreen.max_pipes = 4;
3059                 rdev->config.evergreen.max_tile_pipes = 4;
3060                 rdev->config.evergreen.max_simds = 5;
3061                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3062                 rdev->config.evergreen.max_gprs = 256;
3063                 rdev->config.evergreen.max_threads = 248;
3064                 rdev->config.evergreen.max_gs_threads = 32;
3065                 rdev->config.evergreen.max_stack_entries = 256;
3066                 rdev->config.evergreen.sx_num_of_sets = 4;
3067                 rdev->config.evergreen.sx_max_export_size = 256;
3068                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3069                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3070                 rdev->config.evergreen.max_hw_contexts = 8;
3071                 rdev->config.evergreen.sq_num_cf_insts = 2;
3072
3073                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3074                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3075                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3076                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3077                 break;
3078         case CHIP_CEDAR:
3079         default:
3080                 rdev->config.evergreen.num_ses = 1;
3081                 rdev->config.evergreen.max_pipes = 2;
3082                 rdev->config.evergreen.max_tile_pipes = 2;
3083                 rdev->config.evergreen.max_simds = 2;
3084                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3085                 rdev->config.evergreen.max_gprs = 256;
3086                 rdev->config.evergreen.max_threads = 192;
3087                 rdev->config.evergreen.max_gs_threads = 16;
3088                 rdev->config.evergreen.max_stack_entries = 256;
3089                 rdev->config.evergreen.sx_num_of_sets = 4;
3090                 rdev->config.evergreen.sx_max_export_size = 128;
3091                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3092                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3093                 rdev->config.evergreen.max_hw_contexts = 4;
3094                 rdev->config.evergreen.sq_num_cf_insts = 1;
3095
3096                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3097                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3098                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3099                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3100                 break;
3101         case CHIP_PALM:
3102                 rdev->config.evergreen.num_ses = 1;
3103                 rdev->config.evergreen.max_pipes = 2;
3104                 rdev->config.evergreen.max_tile_pipes = 2;
3105                 rdev->config.evergreen.max_simds = 2;
3106                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3107                 rdev->config.evergreen.max_gprs = 256;
3108                 rdev->config.evergreen.max_threads = 192;
3109                 rdev->config.evergreen.max_gs_threads = 16;
3110                 rdev->config.evergreen.max_stack_entries = 256;
3111                 rdev->config.evergreen.sx_num_of_sets = 4;
3112                 rdev->config.evergreen.sx_max_export_size = 128;
3113                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3114                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3115                 rdev->config.evergreen.max_hw_contexts = 4;
3116                 rdev->config.evergreen.sq_num_cf_insts = 1;
3117
3118                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3119                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3120                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3121                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3122                 break;
3123         case CHIP_SUMO:
3124                 rdev->config.evergreen.num_ses = 1;
3125                 rdev->config.evergreen.max_pipes = 4;
3126                 rdev->config.evergreen.max_tile_pipes = 4;
3127                 if (rdev->pdev->device == 0x9648)
3128                         rdev->config.evergreen.max_simds = 3;
3129                 else if ((rdev->pdev->device == 0x9647) ||
3130                          (rdev->pdev->device == 0x964a))
3131                         rdev->config.evergreen.max_simds = 4;
3132                 else
3133                         rdev->config.evergreen.max_simds = 5;
3134                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3135                 rdev->config.evergreen.max_gprs = 256;
3136                 rdev->config.evergreen.max_threads = 248;
3137                 rdev->config.evergreen.max_gs_threads = 32;
3138                 rdev->config.evergreen.max_stack_entries = 256;
3139                 rdev->config.evergreen.sx_num_of_sets = 4;
3140                 rdev->config.evergreen.sx_max_export_size = 256;
3141                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3142                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3143                 rdev->config.evergreen.max_hw_contexts = 8;
3144                 rdev->config.evergreen.sq_num_cf_insts = 2;
3145
3146                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3147                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3148                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3149                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3150                 break;
3151         case CHIP_SUMO2:
3152                 rdev->config.evergreen.num_ses = 1;
3153                 rdev->config.evergreen.max_pipes = 4;
3154                 rdev->config.evergreen.max_tile_pipes = 4;
3155                 rdev->config.evergreen.max_simds = 2;
3156                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3157                 rdev->config.evergreen.max_gprs = 256;
3158                 rdev->config.evergreen.max_threads = 248;
3159                 rdev->config.evergreen.max_gs_threads = 32;
3160                 rdev->config.evergreen.max_stack_entries = 512;
3161                 rdev->config.evergreen.sx_num_of_sets = 4;
3162                 rdev->config.evergreen.sx_max_export_size = 256;
3163                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3164                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3165                 rdev->config.evergreen.max_hw_contexts = 4;
3166                 rdev->config.evergreen.sq_num_cf_insts = 2;
3167
3168                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3169                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3170                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3171                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3172                 break;
3173         case CHIP_BARTS:
3174                 rdev->config.evergreen.num_ses = 2;
3175                 rdev->config.evergreen.max_pipes = 4;
3176                 rdev->config.evergreen.max_tile_pipes = 8;
3177                 rdev->config.evergreen.max_simds = 7;
3178                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3179                 rdev->config.evergreen.max_gprs = 256;
3180                 rdev->config.evergreen.max_threads = 248;
3181                 rdev->config.evergreen.max_gs_threads = 32;
3182                 rdev->config.evergreen.max_stack_entries = 512;
3183                 rdev->config.evergreen.sx_num_of_sets = 4;
3184                 rdev->config.evergreen.sx_max_export_size = 256;
3185                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3186                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3187                 rdev->config.evergreen.max_hw_contexts = 8;
3188                 rdev->config.evergreen.sq_num_cf_insts = 2;
3189
3190                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3191                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3192                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3193                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3194                 break;
3195         case CHIP_TURKS:
3196                 rdev->config.evergreen.num_ses = 1;
3197                 rdev->config.evergreen.max_pipes = 4;
3198                 rdev->config.evergreen.max_tile_pipes = 4;
3199                 rdev->config.evergreen.max_simds = 6;
3200                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3201                 rdev->config.evergreen.max_gprs = 256;
3202                 rdev->config.evergreen.max_threads = 248;
3203                 rdev->config.evergreen.max_gs_threads = 32;
3204                 rdev->config.evergreen.max_stack_entries = 256;
3205                 rdev->config.evergreen.sx_num_of_sets = 4;
3206                 rdev->config.evergreen.sx_max_export_size = 256;
3207                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3208                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3209                 rdev->config.evergreen.max_hw_contexts = 8;
3210                 rdev->config.evergreen.sq_num_cf_insts = 2;
3211
3212                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3213                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3214                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3215                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3216                 break;
3217         case CHIP_CAICOS:
3218                 rdev->config.evergreen.num_ses = 1;
3219                 rdev->config.evergreen.max_pipes = 2;
3220                 rdev->config.evergreen.max_tile_pipes = 2;
3221                 rdev->config.evergreen.max_simds = 2;
3222                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3223                 rdev->config.evergreen.max_gprs = 256;
3224                 rdev->config.evergreen.max_threads = 192;
3225                 rdev->config.evergreen.max_gs_threads = 16;
3226                 rdev->config.evergreen.max_stack_entries = 256;
3227                 rdev->config.evergreen.sx_num_of_sets = 4;
3228                 rdev->config.evergreen.sx_max_export_size = 128;
3229                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3230                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3231                 rdev->config.evergreen.max_hw_contexts = 4;
3232                 rdev->config.evergreen.sq_num_cf_insts = 1;
3233
3234                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3235                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3236                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3237                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3238                 break;
3239         }
3240
3241         /* Initialize HDP */
3242         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3243                 WREG32((0x2c14 + j), 0x00000000);
3244                 WREG32((0x2c18 + j), 0x00000000);
3245                 WREG32((0x2c1c + j), 0x00000000);
3246                 WREG32((0x2c20 + j), 0x00000000);
3247                 WREG32((0x2c24 + j), 0x00000000);
3248         }
3249
3250         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3251
3252         evergreen_fix_pci_max_read_req_size(rdev);
3253
3254         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3255         if ((rdev->family == CHIP_PALM) ||
3256             (rdev->family == CHIP_SUMO) ||
3257             (rdev->family == CHIP_SUMO2))
3258                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3259         else
3260                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3261
3262         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3263          * not have bank info, so create a custom tiling dword.
3264          * bits 3:0   num_pipes
3265          * bits 7:4   num_banks
3266          * bits 11:8  group_size
3267          * bits 15:12 row_size
3268          */
3269         rdev->config.evergreen.tile_config = 0;
3270         switch (rdev->config.evergreen.max_tile_pipes) {
3271         case 1:
3272         default:
3273                 rdev->config.evergreen.tile_config |= (0 << 0);
3274                 break;
3275         case 2:
3276                 rdev->config.evergreen.tile_config |= (1 << 0);
3277                 break;
3278         case 4:
3279                 rdev->config.evergreen.tile_config |= (2 << 0);
3280                 break;
3281         case 8:
3282                 rdev->config.evergreen.tile_config |= (3 << 0);
3283                 break;
3284         }
3285         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3286         if (rdev->flags & RADEON_IS_IGP)
3287                 rdev->config.evergreen.tile_config |= 1 << 4;
3288         else {
3289                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3290                 case 0: /* four banks */
3291                         rdev->config.evergreen.tile_config |= 0 << 4;
3292                         break;
3293                 case 1: /* eight banks */
3294                         rdev->config.evergreen.tile_config |= 1 << 4;
3295                         break;
3296                 case 2: /* sixteen banks */
3297                 default:
3298                         rdev->config.evergreen.tile_config |= 2 << 4;
3299                         break;
3300                 }
3301         }
3302         rdev->config.evergreen.tile_config |= 0 << 8;
3303         rdev->config.evergreen.tile_config |=
3304                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3305
3306         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3307                 u32 efuse_straps_4;
3308                 u32 efuse_straps_3;
3309
3310                 efuse_straps_4 = RREG32_RCU(0x204);
3311                 efuse_straps_3 = RREG32_RCU(0x203);
3312                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3313                       ((efuse_straps_3 & 0xf0000000) >> 28));
3314         } else {
3315                 tmp = 0;
3316                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3317                         u32 rb_disable_bitmap;
3318
3319                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3320                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3321                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3322                         tmp <<= 4;
3323                         tmp |= rb_disable_bitmap;
3324                 }
3325         }
3326         /* enabled rb are just the one not disabled :) */
3327         disabled_rb_mask = tmp;
3328         tmp = 0;
3329         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3330                 tmp |= (1 << i);
3331         /* if all the backends are disabled, fix it up here */
3332         if ((disabled_rb_mask & tmp) == tmp) {
3333                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3334                         disabled_rb_mask &= ~(1 << i);
3335         }
3336
3337         for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3338                 u32 simd_disable_bitmap;
3339
3340                 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3341                 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3342                 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3343                 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3344                 tmp <<= 16;
3345                 tmp |= simd_disable_bitmap;
3346         }
3347         rdev->config.evergreen.active_simds = hweight32(~tmp);
3348
3349         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3350         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3351
3352         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3353         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3354         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3355         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3356         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3357         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3358         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3359
3360         if ((rdev->config.evergreen.max_backends == 1) &&
3361             (rdev->flags & RADEON_IS_IGP)) {
3362                 if ((disabled_rb_mask & 3) == 1) {
3363                         /* RB0 disabled, RB1 enabled */
3364                         tmp = 0x11111111;
3365                 } else {
3366                         /* RB1 disabled, RB0 enabled */
3367                         tmp = 0x00000000;
3368                 }
3369         } else {
3370                 tmp = gb_addr_config & NUM_PIPES_MASK;
3371                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3372                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3373         }
3374         WREG32(GB_BACKEND_MAP, tmp);
3375
3376         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3377         WREG32(CGTS_TCC_DISABLE, 0);
3378         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3379         WREG32(CGTS_USER_TCC_DISABLE, 0);
3380
3381         /* set HW defaults for 3D engine */
3382         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3383                                      ROQ_IB2_START(0x2b)));
3384
3385         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3386
3387         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3388                              SYNC_GRADIENT |
3389                              SYNC_WALKER |
3390                              SYNC_ALIGNER));
3391
3392         sx_debug_1 = RREG32(SX_DEBUG_1);
3393         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3394         WREG32(SX_DEBUG_1, sx_debug_1);
3395
3396
3397         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3398         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3399         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3400         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3401
3402         if (rdev->family <= CHIP_SUMO2)
3403                 WREG32(SMX_SAR_CTL0, 0x00010000);
3404
3405         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3406                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3407                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3408
3409         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3410                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3411                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3412
3413         WREG32(VGT_NUM_INSTANCES, 1);
3414         WREG32(SPI_CONFIG_CNTL, 0);
3415         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3416         WREG32(CP_PERFMON_CNTL, 0);
3417
3418         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3419                                   FETCH_FIFO_HIWATER(0x4) |
3420                                   DONE_FIFO_HIWATER(0xe0) |
3421                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3422
3423         sq_config = RREG32(SQ_CONFIG);
3424         sq_config &= ~(PS_PRIO(3) |
3425                        VS_PRIO(3) |
3426                        GS_PRIO(3) |
3427                        ES_PRIO(3));
3428         sq_config |= (VC_ENABLE |
3429                       EXPORT_SRC_C |
3430                       PS_PRIO(0) |
3431                       VS_PRIO(1) |
3432                       GS_PRIO(2) |
3433                       ES_PRIO(3));
3434
3435         switch (rdev->family) {
3436         case CHIP_CEDAR:
3437         case CHIP_PALM:
3438         case CHIP_SUMO:
3439         case CHIP_SUMO2:
3440         case CHIP_CAICOS:
3441                 /* no vertex cache */
3442                 sq_config &= ~VC_ENABLE;
3443                 break;
3444         default:
3445                 break;
3446         }
3447
3448         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3449
3450         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3451         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3452         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3453         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3454         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3455         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3456         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3457
3458         switch (rdev->family) {
3459         case CHIP_CEDAR:
3460         case CHIP_PALM:
3461         case CHIP_SUMO:
3462         case CHIP_SUMO2:
3463                 ps_thread_count = 96;
3464                 break;
3465         default:
3466                 ps_thread_count = 128;
3467                 break;
3468         }
3469
3470         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3471         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3472         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3473         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3474         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3475         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3476
3477         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3478         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3479         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3480         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3481         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3482         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3483
3484         WREG32(SQ_CONFIG, sq_config);
3485         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3486         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3487         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3488         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3489         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3490         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3491         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3492         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3493         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3494         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3495
3496         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3497                                           FORCE_EOV_MAX_REZ_CNT(255)));
3498
3499         switch (rdev->family) {
3500         case CHIP_CEDAR:
3501         case CHIP_PALM:
3502         case CHIP_SUMO:
3503         case CHIP_SUMO2:
3504         case CHIP_CAICOS:
3505                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3506                 break;
3507         default:
3508                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3509                 break;
3510         }
3511         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3512         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3513
3514         WREG32(VGT_GS_VERTEX_REUSE, 16);
3515         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3516         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3517
3518         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3519         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3520
3521         WREG32(CB_PERF_CTR0_SEL_0, 0);
3522         WREG32(CB_PERF_CTR0_SEL_1, 0);
3523         WREG32(CB_PERF_CTR1_SEL_0, 0);
3524         WREG32(CB_PERF_CTR1_SEL_1, 0);
3525         WREG32(CB_PERF_CTR2_SEL_0, 0);
3526         WREG32(CB_PERF_CTR2_SEL_1, 0);
3527         WREG32(CB_PERF_CTR3_SEL_0, 0);
3528         WREG32(CB_PERF_CTR3_SEL_1, 0);
3529
3530         /* clear render buffer base addresses */
3531         WREG32(CB_COLOR0_BASE, 0);
3532         WREG32(CB_COLOR1_BASE, 0);
3533         WREG32(CB_COLOR2_BASE, 0);
3534         WREG32(CB_COLOR3_BASE, 0);
3535         WREG32(CB_COLOR4_BASE, 0);
3536         WREG32(CB_COLOR5_BASE, 0);
3537         WREG32(CB_COLOR6_BASE, 0);
3538         WREG32(CB_COLOR7_BASE, 0);
3539         WREG32(CB_COLOR8_BASE, 0);
3540         WREG32(CB_COLOR9_BASE, 0);
3541         WREG32(CB_COLOR10_BASE, 0);
3542         WREG32(CB_COLOR11_BASE, 0);
3543
3544         /* set the shader const cache sizes to 0 */
3545         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3546                 WREG32(i, 0);
3547         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3548                 WREG32(i, 0);
3549
3550         tmp = RREG32(HDP_MISC_CNTL);
3551         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3552         WREG32(HDP_MISC_CNTL, tmp);
3553
3554         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3555         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3556
3557         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3558
3559         udelay(50);
3560
3561 }
3562
3563 int evergreen_mc_init(struct radeon_device *rdev)
3564 {
3565         u32 tmp;
3566         int chansize, numchan;
3567
3568         /* Get VRAM informations */
3569         rdev->mc.vram_is_ddr = true;
3570         if ((rdev->family == CHIP_PALM) ||
3571             (rdev->family == CHIP_SUMO) ||
3572             (rdev->family == CHIP_SUMO2))
3573                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3574         else
3575                 tmp = RREG32(MC_ARB_RAMCFG);
3576         if (tmp & CHANSIZE_OVERRIDE) {
3577                 chansize = 16;
3578         } else if (tmp & CHANSIZE_MASK) {
3579                 chansize = 64;
3580         } else {
3581                 chansize = 32;
3582         }
3583         tmp = RREG32(MC_SHARED_CHMAP);
3584         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3585         case 0:
3586         default:
3587                 numchan = 1;
3588                 break;
3589         case 1:
3590                 numchan = 2;
3591                 break;
3592         case 2:
3593                 numchan = 4;
3594                 break;
3595         case 3:
3596                 numchan = 8;
3597                 break;
3598         }
3599         rdev->mc.vram_width = numchan * chansize;
3600         /* Could aper size report 0 ? */
3601         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3602         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3603         /* Setup GPU memory space */
3604         if ((rdev->family == CHIP_PALM) ||
3605             (rdev->family == CHIP_SUMO) ||
3606             (rdev->family == CHIP_SUMO2)) {
3607                 /* size in bytes on fusion */
3608                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3609                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3610         } else {
3611                 /* size in MB on evergreen/cayman/tn */
3612                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3613                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3614         }
3615         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3616         r700_vram_gtt_location(rdev, &rdev->mc);
3617         radeon_update_bandwidth_info(rdev);
3618
3619         return 0;
3620 }
3621
3622 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3623 {
3624         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3625                 RREG32(GRBM_STATUS));
3626         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3627                 RREG32(GRBM_STATUS_SE0));
3628         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3629                 RREG32(GRBM_STATUS_SE1));
3630         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3631                 RREG32(SRBM_STATUS));
3632         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3633                 RREG32(SRBM_STATUS2));
3634         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3635                 RREG32(CP_STALLED_STAT1));
3636         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3637                 RREG32(CP_STALLED_STAT2));
3638         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3639                 RREG32(CP_BUSY_STAT));
3640         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3641                 RREG32(CP_STAT));
3642         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3643                 RREG32(DMA_STATUS_REG));
3644         if (rdev->family >= CHIP_CAYMAN) {
3645                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3646                          RREG32(DMA_STATUS_REG + 0x800));
3647         }
3648 }
3649
3650 bool evergreen_is_display_hung(struct radeon_device *rdev)
3651 {
3652         u32 crtc_hung = 0;
3653         u32 crtc_status[6];
3654         u32 i, j, tmp;
3655
3656         for (i = 0; i < rdev->num_crtc; i++) {
3657                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3658                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3659                         crtc_hung |= (1 << i);
3660                 }
3661         }
3662
3663         for (j = 0; j < 10; j++) {
3664                 for (i = 0; i < rdev->num_crtc; i++) {
3665                         if (crtc_hung & (1 << i)) {
3666                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3667                                 if (tmp != crtc_status[i])
3668                                         crtc_hung &= ~(1 << i);
3669                         }
3670                 }
3671                 if (crtc_hung == 0)
3672                         return false;
3673                 udelay(100);
3674         }
3675
3676         return true;
3677 }
3678
3679 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3680 {
3681         u32 reset_mask = 0;
3682         u32 tmp;
3683
3684         /* GRBM_STATUS */
3685         tmp = RREG32(GRBM_STATUS);
3686         if (tmp & (PA_BUSY | SC_BUSY |
3687                    SH_BUSY | SX_BUSY |
3688                    TA_BUSY | VGT_BUSY |
3689                    DB_BUSY | CB_BUSY |
3690                    SPI_BUSY | VGT_BUSY_NO_DMA))
3691                 reset_mask |= RADEON_RESET_GFX;
3692
3693         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3694                    CP_BUSY | CP_COHERENCY_BUSY))
3695                 reset_mask |= RADEON_RESET_CP;
3696
3697         if (tmp & GRBM_EE_BUSY)
3698                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3699
3700         /* DMA_STATUS_REG */
3701         tmp = RREG32(DMA_STATUS_REG);
3702         if (!(tmp & DMA_IDLE))
3703                 reset_mask |= RADEON_RESET_DMA;
3704
3705         /* SRBM_STATUS2 */
3706         tmp = RREG32(SRBM_STATUS2);
3707         if (tmp & DMA_BUSY)
3708                 reset_mask |= RADEON_RESET_DMA;
3709
3710         /* SRBM_STATUS */
3711         tmp = RREG32(SRBM_STATUS);
3712         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3713                 reset_mask |= RADEON_RESET_RLC;
3714
3715         if (tmp & IH_BUSY)
3716                 reset_mask |= RADEON_RESET_IH;
3717
3718         if (tmp & SEM_BUSY)
3719                 reset_mask |= RADEON_RESET_SEM;
3720
3721         if (tmp & GRBM_RQ_PENDING)
3722                 reset_mask |= RADEON_RESET_GRBM;
3723
3724         if (tmp & VMC_BUSY)
3725                 reset_mask |= RADEON_RESET_VMC;
3726
3727         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3728                    MCC_BUSY | MCD_BUSY))
3729                 reset_mask |= RADEON_RESET_MC;
3730
3731         if (evergreen_is_display_hung(rdev))
3732                 reset_mask |= RADEON_RESET_DISPLAY;
3733
3734         /* VM_L2_STATUS */
3735         tmp = RREG32(VM_L2_STATUS);
3736         if (tmp & L2_BUSY)
3737                 reset_mask |= RADEON_RESET_VMC;
3738
3739         /* Skip MC reset as it's mostly likely not hung, just busy */
3740         if (reset_mask & RADEON_RESET_MC) {
3741                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3742                 reset_mask &= ~RADEON_RESET_MC;
3743         }
3744
3745         return reset_mask;
3746 }
3747
3748 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3749 {
3750         struct evergreen_mc_save save;
3751         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3752         u32 tmp;
3753
3754         if (reset_mask == 0)
3755                 return;
3756
3757         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3758
3759         evergreen_print_gpu_status_regs(rdev);
3760
3761         /* Disable CP parsing/prefetching */
3762         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3763
3764         if (reset_mask & RADEON_RESET_DMA) {
3765                 /* Disable DMA */
3766                 tmp = RREG32(DMA_RB_CNTL);
3767                 tmp &= ~DMA_RB_ENABLE;
3768                 WREG32(DMA_RB_CNTL, tmp);
3769         }
3770
3771         udelay(50);
3772
3773         evergreen_mc_stop(rdev, &save);
3774         if (evergreen_mc_wait_for_idle(rdev)) {
3775                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3776         }
3777
3778         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3779                 grbm_soft_reset |= SOFT_RESET_DB |
3780                         SOFT_RESET_CB |
3781                         SOFT_RESET_PA |
3782                         SOFT_RESET_SC |
3783                         SOFT_RESET_SPI |
3784                         SOFT_RESET_SX |
3785                         SOFT_RESET_SH |
3786                         SOFT_RESET_TC |
3787                         SOFT_RESET_TA |
3788                         SOFT_RESET_VC |
3789                         SOFT_RESET_VGT;
3790         }
3791
3792         if (reset_mask & RADEON_RESET_CP) {
3793                 grbm_soft_reset |= SOFT_RESET_CP |
3794                         SOFT_RESET_VGT;
3795
3796                 srbm_soft_reset |= SOFT_RESET_GRBM;
3797         }
3798
3799         if (reset_mask & RADEON_RESET_DMA)
3800                 srbm_soft_reset |= SOFT_RESET_DMA;
3801
3802         if (reset_mask & RADEON_RESET_DISPLAY)
3803                 srbm_soft_reset |= SOFT_RESET_DC;
3804
3805         if (reset_mask & RADEON_RESET_RLC)
3806                 srbm_soft_reset |= SOFT_RESET_RLC;
3807
3808         if (reset_mask & RADEON_RESET_SEM)
3809                 srbm_soft_reset |= SOFT_RESET_SEM;
3810
3811         if (reset_mask & RADEON_RESET_IH)
3812                 srbm_soft_reset |= SOFT_RESET_IH;
3813
3814         if (reset_mask & RADEON_RESET_GRBM)
3815                 srbm_soft_reset |= SOFT_RESET_GRBM;
3816
3817         if (reset_mask & RADEON_RESET_VMC)
3818                 srbm_soft_reset |= SOFT_RESET_VMC;
3819
3820         if (!(rdev->flags & RADEON_IS_IGP)) {
3821                 if (reset_mask & RADEON_RESET_MC)
3822                         srbm_soft_reset |= SOFT_RESET_MC;
3823         }
3824
3825         if (grbm_soft_reset) {
3826                 tmp = RREG32(GRBM_SOFT_RESET);
3827                 tmp |= grbm_soft_reset;
3828                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3829                 WREG32(GRBM_SOFT_RESET, tmp);
3830                 tmp = RREG32(GRBM_SOFT_RESET);
3831
3832                 udelay(50);
3833
3834                 tmp &= ~grbm_soft_reset;
3835                 WREG32(GRBM_SOFT_RESET, tmp);
3836                 tmp = RREG32(GRBM_SOFT_RESET);
3837         }
3838
3839         if (srbm_soft_reset) {
3840                 tmp = RREG32(SRBM_SOFT_RESET);
3841                 tmp |= srbm_soft_reset;
3842                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3843                 WREG32(SRBM_SOFT_RESET, tmp);
3844                 tmp = RREG32(SRBM_SOFT_RESET);
3845
3846                 udelay(50);
3847
3848                 tmp &= ~srbm_soft_reset;
3849                 WREG32(SRBM_SOFT_RESET, tmp);
3850                 tmp = RREG32(SRBM_SOFT_RESET);
3851         }
3852
3853         /* Wait a little for things to settle down */
3854         udelay(50);
3855
3856         evergreen_mc_resume(rdev, &save);
3857         udelay(50);
3858
3859         evergreen_print_gpu_status_regs(rdev);
3860 }
3861
3862 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3863 {
3864         struct evergreen_mc_save save;
3865         u32 tmp, i;
3866
3867         dev_info(rdev->dev, "GPU pci config reset\n");
3868
3869         /* disable dpm? */
3870
3871         /* Disable CP parsing/prefetching */
3872         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3873         udelay(50);
3874         /* Disable DMA */
3875         tmp = RREG32(DMA_RB_CNTL);
3876         tmp &= ~DMA_RB_ENABLE;
3877         WREG32(DMA_RB_CNTL, tmp);
3878         /* XXX other engines? */
3879
3880         /* halt the rlc */
3881         r600_rlc_stop(rdev);
3882
3883         udelay(50);
3884
3885         /* set mclk/sclk to bypass */
3886         rv770_set_clk_bypass_mode(rdev);
3887         /* disable BM */
3888         pci_clear_master(rdev->pdev);
3889         /* disable mem access */
3890         evergreen_mc_stop(rdev, &save);
3891         if (evergreen_mc_wait_for_idle(rdev)) {
3892                 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3893         }
3894         /* reset */
3895         radeon_pci_config_reset(rdev);
3896         /* wait for asic to come out of reset */
3897         for (i = 0; i < rdev->usec_timeout; i++) {
3898                 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3899                         break;
3900                 udelay(1);
3901         }
3902 }
3903
3904 int evergreen_asic_reset(struct radeon_device *rdev)
3905 {
3906         u32 reset_mask;
3907
3908         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3909
3910         if (reset_mask)
3911                 r600_set_bios_scratch_engine_hung(rdev, true);
3912
3913         /* try soft reset */
3914         evergreen_gpu_soft_reset(rdev, reset_mask);
3915
3916         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3917
3918         /* try pci config reset */
3919         if (reset_mask && radeon_hard_reset)
3920                 evergreen_gpu_pci_config_reset(rdev);
3921
3922         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3923
3924         if (!reset_mask)
3925                 r600_set_bios_scratch_engine_hung(rdev, false);
3926
3927         return 0;
3928 }
3929
3930 /**
3931  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3932  *
3933  * @rdev: radeon_device pointer
3934  * @ring: radeon_ring structure holding ring information
3935  *
3936  * Check if the GFX engine is locked up.
3937  * Returns true if the engine appears to be locked up, false if not.
3938  */
3939 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3940 {
3941         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3942
3943         if (!(reset_mask & (RADEON_RESET_GFX |
3944                             RADEON_RESET_COMPUTE |
3945                             RADEON_RESET_CP))) {
3946                 radeon_ring_lockup_update(rdev, ring);
3947                 return false;
3948         }
3949         return radeon_ring_test_lockup(rdev, ring);
3950 }
3951
3952 /*
3953  * RLC
3954  */
3955 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3956 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3957
3958 void sumo_rlc_fini(struct radeon_device *rdev)
3959 {
3960         int r;
3961
3962         /* save restore block */
3963         if (rdev->rlc.save_restore_obj) {
3964                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3965                 if (unlikely(r != 0))
3966                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3967                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3968                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3969
3970                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3971                 rdev->rlc.save_restore_obj = NULL;
3972         }
3973
3974         /* clear state block */
3975         if (rdev->rlc.clear_state_obj) {
3976                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3977                 if (unlikely(r != 0))
3978                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3979                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3980                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3981
3982                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3983                 rdev->rlc.clear_state_obj = NULL;
3984         }
3985
3986         /* clear state block */
3987         if (rdev->rlc.cp_table_obj) {
3988                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3989                 if (unlikely(r != 0))
3990                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3991                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3992                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3993
3994                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3995                 rdev->rlc.cp_table_obj = NULL;
3996         }
3997 }
3998
3999 #define CP_ME_TABLE_SIZE    96
4000
4001 int sumo_rlc_init(struct radeon_device *rdev)
4002 {
4003         const u32 *src_ptr;
4004         volatile u32 *dst_ptr;
4005         u32 dws, data, i, j, k, reg_num;
4006         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4007         u64 reg_list_mc_addr;
4008         const struct cs_section_def *cs_data;
4009         int r;
4010
4011         src_ptr = rdev->rlc.reg_list;
4012         dws = rdev->rlc.reg_list_size;
4013         if (rdev->family >= CHIP_BONAIRE) {
4014                 dws += (5 * 16) + 48 + 48 + 64;
4015         }
4016         cs_data = rdev->rlc.cs_data;
4017
4018         if (src_ptr) {
4019                 /* save restore block */
4020                 if (rdev->rlc.save_restore_obj == NULL) {
4021                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4022                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4023                                              NULL, &rdev->rlc.save_restore_obj);
4024                         if (r) {
4025                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4026                                 return r;
4027                         }
4028                 }
4029
4030                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4031                 if (unlikely(r != 0)) {
4032                         sumo_rlc_fini(rdev);
4033                         return r;
4034                 }
4035                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4036                                   &rdev->rlc.save_restore_gpu_addr);
4037                 if (r) {
4038                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4039                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4040                         sumo_rlc_fini(rdev);
4041                         return r;
4042                 }
4043
4044                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4045                 if (r) {
4046                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4047                         sumo_rlc_fini(rdev);
4048                         return r;
4049                 }
4050                 /* write the sr buffer */
4051                 dst_ptr = rdev->rlc.sr_ptr;
4052                 if (rdev->family >= CHIP_TAHITI) {
4053                         /* SI */
4054                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
4055                                 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4056                 } else {
4057                         /* ON/LN/TN */
4058                         /* format:
4059                          * dw0: (reg2 << 16) | reg1
4060                          * dw1: reg1 save space
4061                          * dw2: reg2 save space
4062                          */
4063                         for (i = 0; i < dws; i++) {
4064                                 data = src_ptr[i] >> 2;
4065                                 i++;
4066                                 if (i < dws)
4067                                         data |= (src_ptr[i] >> 2) << 16;
4068                                 j = (((i - 1) * 3) / 2);
4069                                 dst_ptr[j] = cpu_to_le32(data);
4070                         }
4071                         j = ((i * 3) / 2);
4072                         dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4073                 }
4074                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4075                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4076         }
4077
4078         if (cs_data) {
4079                 /* clear state block */
4080                 if (rdev->family >= CHIP_BONAIRE) {
4081                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4082                 } else if (rdev->family >= CHIP_TAHITI) {
4083                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4084                         dws = rdev->rlc.clear_state_size + (256 / 4);
4085                 } else {
4086                         reg_list_num = 0;
4087                         dws = 0;
4088                         for (i = 0; cs_data[i].section != NULL; i++) {
4089                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4090                                         reg_list_num++;
4091                                         dws += cs_data[i].section[j].reg_count;
4092                                 }
4093                         }
4094                         reg_list_blk_index = (3 * reg_list_num + 2);
4095                         dws += reg_list_blk_index;
4096                         rdev->rlc.clear_state_size = dws;
4097                 }
4098
4099                 if (rdev->rlc.clear_state_obj == NULL) {
4100                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4101                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4102                                              NULL, &rdev->rlc.clear_state_obj);
4103                         if (r) {
4104                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4105                                 sumo_rlc_fini(rdev);
4106                                 return r;
4107                         }
4108                 }
4109                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4110                 if (unlikely(r != 0)) {
4111                         sumo_rlc_fini(rdev);
4112                         return r;
4113                 }
4114                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4115                                   &rdev->rlc.clear_state_gpu_addr);
4116                 if (r) {
4117                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4118                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4119                         sumo_rlc_fini(rdev);
4120                         return r;
4121                 }
4122
4123                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4124                 if (r) {
4125                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4126                         sumo_rlc_fini(rdev);
4127                         return r;
4128                 }
4129                 /* set up the cs buffer */
4130                 dst_ptr = rdev->rlc.cs_ptr;
4131                 if (rdev->family >= CHIP_BONAIRE) {
4132                         cik_get_csb_buffer(rdev, dst_ptr);
4133                 } else if (rdev->family >= CHIP_TAHITI) {
4134                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4135                         dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4136                         dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4137                         dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4138                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4139                 } else {
4140                         reg_list_hdr_blk_index = 0;
4141                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4142                         data = upper_32_bits(reg_list_mc_addr);
4143                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4144                         reg_list_hdr_blk_index++;
4145                         for (i = 0; cs_data[i].section != NULL; i++) {
4146                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4147                                         reg_num = cs_data[i].section[j].reg_count;
4148                                         data = reg_list_mc_addr & 0xffffffff;
4149                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4150                                         reg_list_hdr_blk_index++;
4151
4152                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4153                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4154                                         reg_list_hdr_blk_index++;
4155
4156                                         data = 0x08000000 | (reg_num * 4);
4157                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4158                                         reg_list_hdr_blk_index++;
4159
4160                                         for (k = 0; k < reg_num; k++) {
4161                                                 data = cs_data[i].section[j].extent[k];
4162                                                 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4163                                         }
4164                                         reg_list_mc_addr += reg_num * 4;
4165                                         reg_list_blk_index += reg_num;
4166                                 }
4167                         }
4168                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4169                 }
4170                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4171                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4172         }
4173
4174         if (rdev->rlc.cp_table_size) {
4175                 if (rdev->rlc.cp_table_obj == NULL) {
4176                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4177                                              PAGE_SIZE, true,
4178                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4179                                              NULL, &rdev->rlc.cp_table_obj);
4180                         if (r) {
4181                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4182                                 sumo_rlc_fini(rdev);
4183                                 return r;
4184                         }
4185                 }
4186
4187                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4188                 if (unlikely(r != 0)) {
4189                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4190                         sumo_rlc_fini(rdev);
4191                         return r;
4192                 }
4193                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4194                                   &rdev->rlc.cp_table_gpu_addr);
4195                 if (r) {
4196                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4197                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4198                         sumo_rlc_fini(rdev);
4199                         return r;
4200                 }
4201                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4202                 if (r) {
4203                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4204                         sumo_rlc_fini(rdev);
4205                         return r;
4206                 }
4207
4208                 cik_init_cp_pg_table(rdev);
4209
4210                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4211                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4212
4213         }
4214
4215         return 0;
4216 }
4217
4218 static void evergreen_rlc_start(struct radeon_device *rdev)
4219 {
4220         u32 mask = RLC_ENABLE;
4221
4222         if (rdev->flags & RADEON_IS_IGP) {
4223                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4224         }
4225
4226         WREG32(RLC_CNTL, mask);
4227 }
4228
4229 int evergreen_rlc_resume(struct radeon_device *rdev)
4230 {
4231         u32 i;
4232         const __be32 *fw_data;
4233
4234         if (!rdev->rlc_fw)
4235                 return -EINVAL;
4236
4237         r600_rlc_stop(rdev);
4238
4239         WREG32(RLC_HB_CNTL, 0);
4240
4241         if (rdev->flags & RADEON_IS_IGP) {
4242                 if (rdev->family == CHIP_ARUBA) {
4243                         u32 always_on_bitmap =
4244                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4245                         /* find out the number of active simds */
4246                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4247                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4248                         tmp = hweight32(~tmp);
4249                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4250                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4251                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4252                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4253                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4254                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4255                         }
4256                 } else {
4257                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4258                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4259                 }
4260                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4261                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4262         } else {
4263                 WREG32(RLC_HB_BASE, 0);
4264                 WREG32(RLC_HB_RPTR, 0);
4265                 WREG32(RLC_HB_WPTR, 0);
4266                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4267                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4268         }
4269         WREG32(RLC_MC_CNTL, 0);
4270         WREG32(RLC_UCODE_CNTL, 0);
4271
4272         fw_data = (const __be32 *)rdev->rlc_fw->data;
4273         if (rdev->family >= CHIP_ARUBA) {
4274                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4275                         WREG32(RLC_UCODE_ADDR, i);
4276                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4277                 }
4278         } else if (rdev->family >= CHIP_CAYMAN) {
4279                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4280                         WREG32(RLC_UCODE_ADDR, i);
4281                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4282                 }
4283         } else {
4284                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4285                         WREG32(RLC_UCODE_ADDR, i);
4286                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4287                 }
4288         }
4289         WREG32(RLC_UCODE_ADDR, 0);
4290
4291         evergreen_rlc_start(rdev);
4292
4293         return 0;
4294 }
4295
4296 /* Interrupts */
4297
4298 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4299 {
4300         if (crtc >= rdev->num_crtc)
4301                 return 0;
4302         else
4303                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4304 }
4305
4306 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4307 {
4308         u32 tmp;
4309
4310         if (rdev->family >= CHIP_CAYMAN) {
4311                 cayman_cp_int_cntl_setup(rdev, 0,
4312                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4313                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4314                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4315                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4316                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4317         } else
4318                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4319         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4320         WREG32(DMA_CNTL, tmp);
4321         WREG32(GRBM_INT_CNTL, 0);
4322         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4323         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4324         if (rdev->num_crtc >= 4) {
4325                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4326                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4327         }
4328         if (rdev->num_crtc >= 6) {
4329                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4330                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4331         }
4332
4333         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4334         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4335         if (rdev->num_crtc >= 4) {
4336                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4337                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4338         }
4339         if (rdev->num_crtc >= 6) {
4340                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4341                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4342         }
4343
4344         /* only one DAC on DCE5 */
4345         if (!ASIC_IS_DCE5(rdev))
4346                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4347         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4348
4349         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4350         WREG32(DC_HPD1_INT_CONTROL, tmp);
4351         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4352         WREG32(DC_HPD2_INT_CONTROL, tmp);
4353         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4354         WREG32(DC_HPD3_INT_CONTROL, tmp);
4355         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4356         WREG32(DC_HPD4_INT_CONTROL, tmp);
4357         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4358         WREG32(DC_HPD5_INT_CONTROL, tmp);
4359         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4360         WREG32(DC_HPD6_INT_CONTROL, tmp);
4361
4362 }
4363
4364 int evergreen_irq_set(struct radeon_device *rdev)
4365 {
4366         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4367         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4368         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4369         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4370         u32 grbm_int_cntl = 0;
4371         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4372         u32 dma_cntl, dma_cntl1 = 0;
4373         u32 thermal_int = 0;
4374
4375         if (!rdev->irq.installed) {
4376                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4377                 return -EINVAL;
4378         }
4379         /* don't enable anything if the ih is disabled */
4380         if (!rdev->ih.enabled) {
4381                 r600_disable_interrupts(rdev);
4382                 /* force the active interrupt state to all disabled */
4383                 evergreen_disable_interrupt_state(rdev);
4384                 return 0;
4385         }
4386
4387         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4388         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4389         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4390         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4391         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4392         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4393         if (rdev->family == CHIP_ARUBA)
4394                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4395                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4396         else
4397                 thermal_int = RREG32(CG_THERMAL_INT) &
4398                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4399
4400         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4401         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4402         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4403         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4404         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4405         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4406
4407         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4408
4409         if (rdev->family >= CHIP_CAYMAN) {
4410                 /* enable CP interrupts on all rings */
4411                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4412                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4413                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4414                 }
4415                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4416                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4417                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4418                 }
4419                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4420                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4421                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4422                 }
4423         } else {
4424                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4425                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4426                         cp_int_cntl |= RB_INT_ENABLE;
4427                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4428                 }
4429         }
4430
4431         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4432                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4433                 dma_cntl |= TRAP_ENABLE;
4434         }
4435
4436         if (rdev->family >= CHIP_CAYMAN) {
4437                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4438                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4439                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4440                         dma_cntl1 |= TRAP_ENABLE;
4441                 }
4442         }
4443
4444         if (rdev->irq.dpm_thermal) {
4445                 DRM_DEBUG("dpm thermal\n");
4446                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4447         }
4448
4449         if (rdev->irq.crtc_vblank_int[0] ||
4450             atomic_read(&rdev->irq.pflip[0])) {
4451                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4452                 crtc1 |= VBLANK_INT_MASK;
4453         }
4454         if (rdev->irq.crtc_vblank_int[1] ||
4455             atomic_read(&rdev->irq.pflip[1])) {
4456                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4457                 crtc2 |= VBLANK_INT_MASK;
4458         }
4459         if (rdev->irq.crtc_vblank_int[2] ||
4460             atomic_read(&rdev->irq.pflip[2])) {
4461                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4462                 crtc3 |= VBLANK_INT_MASK;
4463         }
4464         if (rdev->irq.crtc_vblank_int[3] ||
4465             atomic_read(&rdev->irq.pflip[3])) {
4466                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4467                 crtc4 |= VBLANK_INT_MASK;
4468         }
4469         if (rdev->irq.crtc_vblank_int[4] ||
4470             atomic_read(&rdev->irq.pflip[4])) {
4471                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4472                 crtc5 |= VBLANK_INT_MASK;
4473         }
4474         if (rdev->irq.crtc_vblank_int[5] ||
4475             atomic_read(&rdev->irq.pflip[5])) {
4476                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4477                 crtc6 |= VBLANK_INT_MASK;
4478         }
4479         if (rdev->irq.hpd[0]) {
4480                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4481                 hpd1 |= DC_HPDx_INT_EN;
4482         }
4483         if (rdev->irq.hpd[1]) {
4484                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4485                 hpd2 |= DC_HPDx_INT_EN;
4486         }
4487         if (rdev->irq.hpd[2]) {
4488                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4489                 hpd3 |= DC_HPDx_INT_EN;
4490         }
4491         if (rdev->irq.hpd[3]) {
4492                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4493                 hpd4 |= DC_HPDx_INT_EN;
4494         }
4495         if (rdev->irq.hpd[4]) {
4496                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4497                 hpd5 |= DC_HPDx_INT_EN;
4498         }
4499         if (rdev->irq.hpd[5]) {
4500                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4501                 hpd6 |= DC_HPDx_INT_EN;
4502         }
4503         if (rdev->irq.afmt[0]) {
4504                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4505                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4506         }
4507         if (rdev->irq.afmt[1]) {
4508                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4509                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4510         }
4511         if (rdev->irq.afmt[2]) {
4512                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4513                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4514         }
4515         if (rdev->irq.afmt[3]) {
4516                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4517                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4518         }
4519         if (rdev->irq.afmt[4]) {
4520                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4521                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4522         }
4523         if (rdev->irq.afmt[5]) {
4524                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4525                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4526         }
4527
4528         if (rdev->family >= CHIP_CAYMAN) {
4529                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4530                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4531                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4532         } else
4533                 WREG32(CP_INT_CNTL, cp_int_cntl);
4534
4535         WREG32(DMA_CNTL, dma_cntl);
4536
4537         if (rdev->family >= CHIP_CAYMAN)
4538                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4539
4540         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4541
4542         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4543         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4544         if (rdev->num_crtc >= 4) {
4545                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4546                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4547         }
4548         if (rdev->num_crtc >= 6) {
4549                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4550                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4551         }
4552
4553         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4554                GRPH_PFLIP_INT_MASK);
4555         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4556                GRPH_PFLIP_INT_MASK);
4557         if (rdev->num_crtc >= 4) {
4558                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4559                        GRPH_PFLIP_INT_MASK);
4560                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4561                        GRPH_PFLIP_INT_MASK);
4562         }
4563         if (rdev->num_crtc >= 6) {
4564                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4565                        GRPH_PFLIP_INT_MASK);
4566                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4567                        GRPH_PFLIP_INT_MASK);
4568         }
4569
4570         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4571         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4572         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4573         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4574         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4575         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4576         if (rdev->family == CHIP_ARUBA)
4577                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4578         else
4579                 WREG32(CG_THERMAL_INT, thermal_int);
4580
4581         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4582         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4583         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4584         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4585         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4586         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4587
4588         return 0;
4589 }
4590
4591 static void evergreen_irq_ack(struct radeon_device *rdev)
4592 {
4593         u32 tmp;
4594
4595         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4596         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4597         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4598         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4599         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4600         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4601         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4602         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4603         if (rdev->num_crtc >= 4) {
4604                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4605                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4606         }
4607         if (rdev->num_crtc >= 6) {
4608                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4609                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4610         }
4611
4612         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4613         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4614         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4615         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4616         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4617         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4618
4619         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4620                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4621         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4622                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4623         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4624                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4625         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4626                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4627         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4628                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4629         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4630                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4631
4632         if (rdev->num_crtc >= 4) {
4633                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4634                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4635                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4636                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4637                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4638                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4639                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4640                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4641                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4642                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4643                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4644                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4645         }
4646
4647         if (rdev->num_crtc >= 6) {
4648                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4649                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4650                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4651                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4652                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4653                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4654                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4655                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4656                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4657                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4658                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4659                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4660         }
4661
4662         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4663                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4664                 tmp |= DC_HPDx_INT_ACK;
4665                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4666         }
4667         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4668                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4669                 tmp |= DC_HPDx_INT_ACK;
4670                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4671         }
4672         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4673                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4674                 tmp |= DC_HPDx_INT_ACK;
4675                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4676         }
4677         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4678                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4679                 tmp |= DC_HPDx_INT_ACK;
4680                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4681         }
4682         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4683                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4684                 tmp |= DC_HPDx_INT_ACK;
4685                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4686         }
4687         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4688                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4689                 tmp |= DC_HPDx_INT_ACK;
4690                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4691         }
4692         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4693                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4694                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4695                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4696         }
4697         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4698                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4699                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4700                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4701         }
4702         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4703                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4704                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4705                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4706         }
4707         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4708                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4709                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4710                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4711         }
4712         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4713                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4714                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4715                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4716         }
4717         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4718                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4719                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4720                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4721         }
4722 }
4723
4724 static void evergreen_irq_disable(struct radeon_device *rdev)
4725 {
4726         r600_disable_interrupts(rdev);
4727         /* Wait and acknowledge irq */
4728         mdelay(1);
4729         evergreen_irq_ack(rdev);
4730         evergreen_disable_interrupt_state(rdev);
4731 }
4732
4733 void evergreen_irq_suspend(struct radeon_device *rdev)
4734 {
4735         evergreen_irq_disable(rdev);
4736         r600_rlc_stop(rdev);
4737 }
4738
4739 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4740 {
4741         u32 wptr, tmp;
4742
4743         if (rdev->wb.enabled)
4744                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4745         else
4746                 wptr = RREG32(IH_RB_WPTR);
4747
4748         if (wptr & RB_OVERFLOW) {
4749                 wptr &= ~RB_OVERFLOW;
4750                 /* When a ring buffer overflow happen start parsing interrupt
4751                  * from the last not overwritten vector (wptr + 16). Hopefully
4752                  * this should allow us to catchup.
4753                  */
4754                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4755                          wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4756                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4757                 tmp = RREG32(IH_RB_CNTL);
4758                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4759                 WREG32(IH_RB_CNTL, tmp);
4760         }
4761         return (wptr & rdev->ih.ptr_mask);
4762 }
4763
4764 int evergreen_irq_process(struct radeon_device *rdev)
4765 {
4766         u32 wptr;
4767         u32 rptr;
4768         u32 src_id, src_data;
4769         u32 ring_index;
4770         bool queue_hotplug = false;
4771         bool queue_hdmi = false;
4772         bool queue_thermal = false;
4773         u32 status, addr;
4774
4775         if (!rdev->ih.enabled || rdev->shutdown)
4776                 return IRQ_NONE;
4777
4778         wptr = evergreen_get_ih_wptr(rdev);
4779
4780 restart_ih:
4781         /* is somebody else already processing irqs? */
4782         if (atomic_xchg(&rdev->ih.lock, 1))
4783                 return IRQ_NONE;
4784
4785         rptr = rdev->ih.rptr;
4786         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4787
4788         /* Order reading of wptr vs. reading of IH ring data */
4789         rmb();
4790
4791         /* display interrupts */
4792         evergreen_irq_ack(rdev);
4793
4794         while (rptr != wptr) {
4795                 /* wptr/rptr are in bytes! */
4796                 ring_index = rptr / 4;
4797                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4798                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4799
4800                 switch (src_id) {
4801                 case 1: /* D1 vblank/vline */
4802                         switch (src_data) {
4803                         case 0: /* D1 vblank */
4804                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4805                                         if (rdev->irq.crtc_vblank_int[0]) {
4806                                                 drm_handle_vblank(rdev->ddev, 0);
4807                                                 rdev->pm.vblank_sync = true;
4808                                                 wake_up(&rdev->irq.vblank_queue);
4809                                         }
4810                                         if (atomic_read(&rdev->irq.pflip[0]))
4811                                                 radeon_crtc_handle_vblank(rdev, 0);
4812                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4813                                         DRM_DEBUG("IH: D1 vblank\n");
4814                                 }
4815                                 break;
4816                         case 1: /* D1 vline */
4817                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4818                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4819                                         DRM_DEBUG("IH: D1 vline\n");
4820                                 }
4821                                 break;
4822                         default:
4823                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4824                                 break;
4825                         }
4826                         break;
4827                 case 2: /* D2 vblank/vline */
4828                         switch (src_data) {
4829                         case 0: /* D2 vblank */
4830                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4831                                         if (rdev->irq.crtc_vblank_int[1]) {
4832                                                 drm_handle_vblank(rdev->ddev, 1);
4833                                                 rdev->pm.vblank_sync = true;
4834                                                 wake_up(&rdev->irq.vblank_queue);
4835                                         }
4836                                         if (atomic_read(&rdev->irq.pflip[1]))
4837                                                 radeon_crtc_handle_vblank(rdev, 1);
4838                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4839                                         DRM_DEBUG("IH: D2 vblank\n");
4840                                 }
4841                                 break;
4842                         case 1: /* D2 vline */
4843                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4844                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4845                                         DRM_DEBUG("IH: D2 vline\n");
4846                                 }
4847                                 break;
4848                         default:
4849                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4850                                 break;
4851                         }
4852                         break;
4853                 case 3: /* D3 vblank/vline */
4854                         switch (src_data) {
4855                         case 0: /* D3 vblank */
4856                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4857                                         if (rdev->irq.crtc_vblank_int[2]) {
4858                                                 drm_handle_vblank(rdev->ddev, 2);
4859                                                 rdev->pm.vblank_sync = true;
4860                                                 wake_up(&rdev->irq.vblank_queue);
4861                                         }
4862                                         if (atomic_read(&rdev->irq.pflip[2]))
4863                                                 radeon_crtc_handle_vblank(rdev, 2);
4864                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4865                                         DRM_DEBUG("IH: D3 vblank\n");
4866                                 }
4867                                 break;
4868                         case 1: /* D3 vline */
4869                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4870                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4871                                         DRM_DEBUG("IH: D3 vline\n");
4872                                 }
4873                                 break;
4874                         default:
4875                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4876                                 break;
4877                         }
4878                         break;
4879                 case 4: /* D4 vblank/vline */
4880                         switch (src_data) {
4881                         case 0: /* D4 vblank */
4882                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4883                                         if (rdev->irq.crtc_vblank_int[3]) {
4884                                                 drm_handle_vblank(rdev->ddev, 3);
4885                                                 rdev->pm.vblank_sync = true;
4886                                                 wake_up(&rdev->irq.vblank_queue);
4887                                         }
4888                                         if (atomic_read(&rdev->irq.pflip[3]))
4889                                                 radeon_crtc_handle_vblank(rdev, 3);
4890                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4891                                         DRM_DEBUG("IH: D4 vblank\n");
4892                                 }
4893                                 break;
4894                         case 1: /* D4 vline */
4895                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4896                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4897                                         DRM_DEBUG("IH: D4 vline\n");
4898                                 }
4899                                 break;
4900                         default:
4901                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4902                                 break;
4903                         }
4904                         break;
4905                 case 5: /* D5 vblank/vline */
4906                         switch (src_data) {
4907                         case 0: /* D5 vblank */
4908                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4909                                         if (rdev->irq.crtc_vblank_int[4]) {
4910                                                 drm_handle_vblank(rdev->ddev, 4);
4911                                                 rdev->pm.vblank_sync = true;
4912                                                 wake_up(&rdev->irq.vblank_queue);
4913                                         }
4914                                         if (atomic_read(&rdev->irq.pflip[4]))
4915                                                 radeon_crtc_handle_vblank(rdev, 4);
4916                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4917                                         DRM_DEBUG("IH: D5 vblank\n");
4918                                 }
4919                                 break;
4920                         case 1: /* D5 vline */
4921                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4922                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4923                                         DRM_DEBUG("IH: D5 vline\n");
4924                                 }
4925                                 break;
4926                         default:
4927                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4928                                 break;
4929                         }
4930                         break;
4931                 case 6: /* D6 vblank/vline */
4932                         switch (src_data) {
4933                         case 0: /* D6 vblank */
4934                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4935                                         if (rdev->irq.crtc_vblank_int[5]) {
4936                                                 drm_handle_vblank(rdev->ddev, 5);
4937                                                 rdev->pm.vblank_sync = true;
4938                                                 wake_up(&rdev->irq.vblank_queue);
4939                                         }
4940                                         if (atomic_read(&rdev->irq.pflip[5]))
4941                                                 radeon_crtc_handle_vblank(rdev, 5);
4942                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4943                                         DRM_DEBUG("IH: D6 vblank\n");
4944                                 }
4945                                 break;
4946                         case 1: /* D6 vline */
4947                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4948                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4949                                         DRM_DEBUG("IH: D6 vline\n");
4950                                 }
4951                                 break;
4952                         default:
4953                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4954                                 break;
4955                         }
4956                         break;
4957                 case 8: /* D1 page flip */
4958                 case 10: /* D2 page flip */
4959                 case 12: /* D3 page flip */
4960                 case 14: /* D4 page flip */
4961                 case 16: /* D5 page flip */
4962                 case 18: /* D6 page flip */
4963                         DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4964                         if (radeon_use_pflipirq > 0)
4965                                 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4966                         break;
4967                 case 42: /* HPD hotplug */
4968                         switch (src_data) {
4969                         case 0:
4970                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4971                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4972                                         queue_hotplug = true;
4973                                         DRM_DEBUG("IH: HPD1\n");
4974                                 }
4975                                 break;
4976                         case 1:
4977                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4978                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4979                                         queue_hotplug = true;
4980                                         DRM_DEBUG("IH: HPD2\n");
4981                                 }
4982                                 break;
4983                         case 2:
4984                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4985                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4986                                         queue_hotplug = true;
4987                                         DRM_DEBUG("IH: HPD3\n");
4988                                 }
4989                                 break;
4990                         case 3:
4991                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4992                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4993                                         queue_hotplug = true;
4994                                         DRM_DEBUG("IH: HPD4\n");
4995                                 }
4996                                 break;
4997                         case 4:
4998                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4999                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5000                                         queue_hotplug = true;
5001                                         DRM_DEBUG("IH: HPD5\n");
5002                                 }
5003                                 break;
5004                         case 5:
5005                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5006                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5007                                         queue_hotplug = true;
5008                                         DRM_DEBUG("IH: HPD6\n");
5009                                 }
5010                                 break;
5011                         default:
5012                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5013                                 break;
5014                         }
5015                         break;
5016                 case 44: /* hdmi */
5017                         switch (src_data) {
5018                         case 0:
5019                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
5020                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5021                                         queue_hdmi = true;
5022                                         DRM_DEBUG("IH: HDMI0\n");
5023                                 }
5024                                 break;
5025                         case 1:
5026                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
5027                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5028                                         queue_hdmi = true;
5029                                         DRM_DEBUG("IH: HDMI1\n");
5030                                 }
5031                                 break;
5032                         case 2:
5033                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
5034                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5035                                         queue_hdmi = true;
5036                                         DRM_DEBUG("IH: HDMI2\n");
5037                                 }
5038                                 break;
5039                         case 3:
5040                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
5041                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5042                                         queue_hdmi = true;
5043                                         DRM_DEBUG("IH: HDMI3\n");
5044                                 }
5045                                 break;
5046                         case 4:
5047                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5048                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5049                                         queue_hdmi = true;
5050                                         DRM_DEBUG("IH: HDMI4\n");
5051                                 }
5052                                 break;
5053                         case 5:
5054                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5055                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5056                                         queue_hdmi = true;
5057                                         DRM_DEBUG("IH: HDMI5\n");
5058                                 }
5059                                 break;
5060                         default:
5061                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5062                                 break;
5063                         }
5064                 case 124: /* UVD */
5065                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5066                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5067                         break;
5068                 case 146:
5069                 case 147:
5070                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5071                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5072                         /* reset addr and status */
5073                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5074                         if (addr == 0x0 && status == 0x0)
5075                                 break;
5076                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5077                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5078                                 addr);
5079                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5080                                 status);
5081                         cayman_vm_decode_fault(rdev, status, addr);
5082                         break;
5083                 case 176: /* CP_INT in ring buffer */
5084                 case 177: /* CP_INT in IB1 */
5085                 case 178: /* CP_INT in IB2 */
5086                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5087                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5088                         break;
5089                 case 181: /* CP EOP event */
5090                         DRM_DEBUG("IH: CP EOP\n");
5091                         if (rdev->family >= CHIP_CAYMAN) {
5092                                 switch (src_data) {
5093                                 case 0:
5094                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5095                                         break;
5096                                 case 1:
5097                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5098                                         break;
5099                                 case 2:
5100                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5101                                         break;
5102                                 }
5103                         } else
5104                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5105                         break;
5106                 case 224: /* DMA trap event */
5107                         DRM_DEBUG("IH: DMA trap\n");
5108                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5109                         break;
5110                 case 230: /* thermal low to high */
5111                         DRM_DEBUG("IH: thermal low to high\n");
5112                         rdev->pm.dpm.thermal.high_to_low = false;
5113                         queue_thermal = true;
5114                         break;
5115                 case 231: /* thermal high to low */
5116                         DRM_DEBUG("IH: thermal high to low\n");
5117                         rdev->pm.dpm.thermal.high_to_low = true;
5118                         queue_thermal = true;
5119                         break;
5120                 case 233: /* GUI IDLE */
5121                         DRM_DEBUG("IH: GUI idle\n");
5122                         break;
5123                 case 244: /* DMA trap event */
5124                         if (rdev->family >= CHIP_CAYMAN) {
5125                                 DRM_DEBUG("IH: DMA1 trap\n");
5126                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5127                         }
5128                         break;
5129                 default:
5130                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5131                         break;
5132                 }
5133
5134                 /* wptr/rptr are in bytes! */
5135                 rptr += 16;
5136                 rptr &= rdev->ih.ptr_mask;
5137                 WREG32(IH_RB_RPTR, rptr);
5138         }
5139         if (queue_hotplug)
5140                 schedule_work(&rdev->hotplug_work);
5141         if (queue_hdmi)
5142                 schedule_work(&rdev->audio_work);
5143         if (queue_thermal && rdev->pm.dpm_enabled)
5144                 schedule_work(&rdev->pm.dpm.thermal.work);
5145         rdev->ih.rptr = rptr;
5146         atomic_set(&rdev->ih.lock, 0);
5147
5148         /* make sure wptr hasn't changed while processing */
5149         wptr = evergreen_get_ih_wptr(rdev);
5150         if (wptr != rptr)
5151                 goto restart_ih;
5152
5153         return IRQ_HANDLED;
5154 }
5155
5156 static int evergreen_startup(struct radeon_device *rdev)
5157 {
5158         struct radeon_ring *ring;
5159         int r;
5160
5161         /* enable pcie gen2 link */
5162         evergreen_pcie_gen2_enable(rdev);
5163         /* enable aspm */
5164         evergreen_program_aspm(rdev);
5165
5166         /* scratch needs to be initialized before MC */
5167         r = r600_vram_scratch_init(rdev);
5168         if (r)
5169                 return r;
5170
5171         evergreen_mc_program(rdev);
5172
5173         if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5174                 r = ni_mc_load_microcode(rdev);
5175                 if (r) {
5176                         DRM_ERROR("Failed to load MC firmware!\n");
5177                         return r;
5178                 }
5179         }
5180
5181         if (rdev->flags & RADEON_IS_AGP) {
5182                 evergreen_agp_enable(rdev);
5183         } else {
5184                 r = evergreen_pcie_gart_enable(rdev);
5185                 if (r)
5186                         return r;
5187         }
5188         evergreen_gpu_init(rdev);
5189
5190         /* allocate rlc buffers */
5191         if (rdev->flags & RADEON_IS_IGP) {
5192                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5193                 rdev->rlc.reg_list_size =
5194                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5195                 rdev->rlc.cs_data = evergreen_cs_data;
5196                 r = sumo_rlc_init(rdev);
5197                 if (r) {
5198                         DRM_ERROR("Failed to init rlc BOs!\n");
5199                         return r;
5200                 }
5201         }
5202
5203         /* allocate wb buffer */
5204         r = radeon_wb_init(rdev);
5205         if (r)
5206                 return r;
5207
5208         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5209         if (r) {
5210                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5211                 return r;
5212         }
5213
5214         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5215         if (r) {
5216                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5217                 return r;
5218         }
5219
5220         r = uvd_v2_2_resume(rdev);
5221         if (!r) {
5222                 r = radeon_fence_driver_start_ring(rdev,
5223                                                    R600_RING_TYPE_UVD_INDEX);
5224                 if (r)
5225                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5226         }
5227
5228         if (r)
5229                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5230
5231         /* Enable IRQ */
5232         if (!rdev->irq.installed) {
5233                 r = radeon_irq_kms_init(rdev);
5234                 if (r)
5235                         return r;
5236         }
5237
5238         r = r600_irq_init(rdev);
5239         if (r) {
5240                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5241                 radeon_irq_kms_fini(rdev);
5242                 return r;
5243         }
5244         evergreen_irq_set(rdev);
5245
5246         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5247         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5248                              RADEON_CP_PACKET2);
5249         if (r)
5250                 return r;
5251
5252         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5253         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5254                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5255         if (r)
5256                 return r;
5257
5258         r = evergreen_cp_load_microcode(rdev);
5259         if (r)
5260                 return r;
5261         r = evergreen_cp_resume(rdev);
5262         if (r)
5263                 return r;
5264         r = r600_dma_resume(rdev);
5265         if (r)
5266                 return r;
5267
5268         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5269         if (ring->ring_size) {
5270                 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5271                                      RADEON_CP_PACKET2);
5272                 if (!r)
5273                         r = uvd_v1_0_init(rdev);
5274
5275                 if (r)
5276                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5277         }
5278
5279         r = radeon_ib_pool_init(rdev);
5280         if (r) {
5281                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5282                 return r;
5283         }
5284
5285         r = r600_audio_init(rdev);
5286         if (r) {
5287                 DRM_ERROR("radeon: audio init failed\n");
5288                 return r;
5289         }
5290
5291         return 0;
5292 }
5293
5294 int evergreen_resume(struct radeon_device *rdev)
5295 {
5296         int r;
5297
5298         /* reset the asic, the gfx blocks are often in a bad state
5299          * after the driver is unloaded or after a resume
5300          */
5301         if (radeon_asic_reset(rdev))
5302                 dev_warn(rdev->dev, "GPU reset failed !\n");
5303         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5304          * posting will perform necessary task to bring back GPU into good
5305          * shape.
5306          */
5307         /* post card */
5308         atom_asic_init(rdev->mode_info.atom_context);
5309
5310         /* init golden registers */
5311         evergreen_init_golden_registers(rdev);
5312
5313         if (rdev->pm.pm_method == PM_METHOD_DPM)
5314                 radeon_pm_resume(rdev);
5315
5316         rdev->accel_working = true;
5317         r = evergreen_startup(rdev);
5318         if (r) {
5319                 DRM_ERROR("evergreen startup failed on resume\n");
5320                 rdev->accel_working = false;
5321                 return r;
5322         }
5323
5324         return r;
5325
5326 }
5327
5328 int evergreen_suspend(struct radeon_device *rdev)
5329 {
5330         radeon_pm_suspend(rdev);
5331         r600_audio_fini(rdev);
5332         uvd_v1_0_fini(rdev);
5333         radeon_uvd_suspend(rdev);
5334         r700_cp_stop(rdev);
5335         r600_dma_stop(rdev);
5336         evergreen_irq_suspend(rdev);
5337         radeon_wb_disable(rdev);
5338         evergreen_pcie_gart_disable(rdev);
5339
5340         return 0;
5341 }
5342
5343 /* Plan is to move initialization in that function and use
5344  * helper function so that radeon_device_init pretty much
5345  * do nothing more than calling asic specific function. This
5346  * should also allow to remove a bunch of callback function
5347  * like vram_info.
5348  */
5349 int evergreen_init(struct radeon_device *rdev)
5350 {
5351         int r;
5352
5353         /* Read BIOS */
5354         if (!radeon_get_bios(rdev)) {
5355                 if (ASIC_IS_AVIVO(rdev))
5356                         return -EINVAL;
5357         }
5358         /* Must be an ATOMBIOS */
5359         if (!rdev->is_atom_bios) {
5360                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5361                 return -EINVAL;
5362         }
5363         r = radeon_atombios_init(rdev);
5364         if (r)
5365                 return r;
5366         /* reset the asic, the gfx blocks are often in a bad state
5367          * after the driver is unloaded or after a resume
5368          */
5369         if (radeon_asic_reset(rdev))
5370                 dev_warn(rdev->dev, "GPU reset failed !\n");
5371         /* Post card if necessary */
5372         if (!radeon_card_posted(rdev)) {
5373                 if (!rdev->bios) {
5374                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5375                         return -EINVAL;
5376                 }
5377                 DRM_INFO("GPU not posted. posting now...\n");
5378                 atom_asic_init(rdev->mode_info.atom_context);
5379         }
5380         /* init golden registers */
5381         evergreen_init_golden_registers(rdev);
5382         /* Initialize scratch registers */
5383         r600_scratch_init(rdev);
5384         /* Initialize surface registers */
5385         radeon_surface_init(rdev);
5386         /* Initialize clocks */
5387         radeon_get_clock_info(rdev->ddev);
5388         /* Fence driver */
5389         r = radeon_fence_driver_init(rdev);
5390         if (r)
5391                 return r;
5392         /* initialize AGP */
5393         if (rdev->flags & RADEON_IS_AGP) {
5394                 r = radeon_agp_init(rdev);
5395                 if (r)
5396                         radeon_agp_disable(rdev);
5397         }
5398         /* initialize memory controller */
5399         r = evergreen_mc_init(rdev);
5400         if (r)
5401                 return r;
5402         /* Memory manager */
5403         r = radeon_bo_init(rdev);
5404         if (r)
5405                 return r;
5406
5407         if (ASIC_IS_DCE5(rdev)) {
5408                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5409                         r = ni_init_microcode(rdev);
5410                         if (r) {
5411                                 DRM_ERROR("Failed to load firmware!\n");
5412                                 return r;
5413                         }
5414                 }
5415         } else {
5416                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5417                         r = r600_init_microcode(rdev);
5418                         if (r) {
5419                                 DRM_ERROR("Failed to load firmware!\n");
5420                                 return r;
5421                         }
5422                 }
5423         }
5424
5425         /* Initialize power management */
5426         radeon_pm_init(rdev);
5427
5428         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5429         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5430
5431         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5432         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5433
5434         r = radeon_uvd_init(rdev);
5435         if (!r) {
5436                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5437                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5438                                4096);
5439         }
5440
5441         rdev->ih.ring_obj = NULL;
5442         r600_ih_ring_init(rdev, 64 * 1024);
5443
5444         r = r600_pcie_gart_init(rdev);
5445         if (r)
5446                 return r;
5447
5448         rdev->accel_working = true;
5449         r = evergreen_startup(rdev);
5450         if (r) {
5451                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5452                 r700_cp_fini(rdev);
5453                 r600_dma_fini(rdev);
5454                 r600_irq_fini(rdev);
5455                 if (rdev->flags & RADEON_IS_IGP)
5456                         sumo_rlc_fini(rdev);
5457                 radeon_wb_fini(rdev);
5458                 radeon_ib_pool_fini(rdev);
5459                 radeon_irq_kms_fini(rdev);
5460                 evergreen_pcie_gart_fini(rdev);
5461                 rdev->accel_working = false;
5462         }
5463
5464         /* Don't start up if the MC ucode is missing on BTC parts.
5465          * The default clocks and voltages before the MC ucode
5466          * is loaded are not suffient for advanced operations.
5467          */
5468         if (ASIC_IS_DCE5(rdev)) {
5469                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5470                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5471                         return -EINVAL;
5472                 }
5473         }
5474
5475         return 0;
5476 }
5477
5478 void evergreen_fini(struct radeon_device *rdev)
5479 {
5480         radeon_pm_fini(rdev);
5481         r600_audio_fini(rdev);
5482         r700_cp_fini(rdev);
5483         r600_dma_fini(rdev);
5484         r600_irq_fini(rdev);
5485         if (rdev->flags & RADEON_IS_IGP)
5486                 sumo_rlc_fini(rdev);
5487         radeon_wb_fini(rdev);
5488         radeon_ib_pool_fini(rdev);
5489         radeon_irq_kms_fini(rdev);
5490         uvd_v1_0_fini(rdev);
5491         radeon_uvd_fini(rdev);
5492         evergreen_pcie_gart_fini(rdev);
5493         r600_vram_scratch_fini(rdev);
5494         radeon_gem_fini(rdev);
5495         radeon_fence_driver_fini(rdev);
5496         radeon_agp_fini(rdev);
5497         radeon_bo_fini(rdev);
5498         radeon_atombios_fini(rdev);
5499         kfree(rdev->bios);
5500         rdev->bios = NULL;
5501 }
5502
5503 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5504 {
5505         u32 link_width_cntl, speed_cntl;
5506
5507         if (radeon_pcie_gen2 == 0)
5508                 return;
5509
5510         if (rdev->flags & RADEON_IS_IGP)
5511                 return;
5512
5513         if (!(rdev->flags & RADEON_IS_PCIE))
5514                 return;
5515
5516         /* x2 cards have a special sequence */
5517         if (ASIC_IS_X2(rdev))
5518                 return;
5519
5520         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5521                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5522                 return;
5523
5524         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5525         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5526                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5527                 return;
5528         }
5529
5530         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5531
5532         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5533             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5534
5535                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5536                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5537                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5538
5539                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5540                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5541                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5542
5543                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5544                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5545                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5546
5547                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5548                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5549                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5550
5551                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5552                 speed_cntl |= LC_GEN2_EN_STRAP;
5553                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5554
5555         } else {
5556                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5557                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5558                 if (1)
5559                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5560                 else
5561                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5562                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5563         }
5564 }
5565
5566 void evergreen_program_aspm(struct radeon_device *rdev)
5567 {
5568         u32 data, orig;
5569         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5570         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5571         /* fusion_platform = true
5572          * if the system is a fusion system
5573          * (APU or DGPU in a fusion system).
5574          * todo: check if the system is a fusion platform.
5575          */
5576         bool fusion_platform = false;
5577
5578         if (radeon_aspm == 0)
5579                 return;
5580
5581         if (!(rdev->flags & RADEON_IS_PCIE))
5582                 return;
5583
5584         switch (rdev->family) {
5585         case CHIP_CYPRESS:
5586         case CHIP_HEMLOCK:
5587         case CHIP_JUNIPER:
5588         case CHIP_REDWOOD:
5589         case CHIP_CEDAR:
5590         case CHIP_SUMO:
5591         case CHIP_SUMO2:
5592         case CHIP_PALM:
5593         case CHIP_ARUBA:
5594                 disable_l0s = true;
5595                 break;
5596         default:
5597                 disable_l0s = false;
5598                 break;
5599         }
5600
5601         if (rdev->flags & RADEON_IS_IGP)
5602                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5603
5604         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5605         if (fusion_platform)
5606                 data &= ~MULTI_PIF;
5607         else
5608                 data |= MULTI_PIF;
5609         if (data != orig)
5610                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5611
5612         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5613         if (fusion_platform)
5614                 data &= ~MULTI_PIF;
5615         else
5616                 data |= MULTI_PIF;
5617         if (data != orig)
5618                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5619
5620         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5621         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5622         if (!disable_l0s) {
5623                 if (rdev->family >= CHIP_BARTS)
5624                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5625                 else
5626                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5627         }
5628
5629         if (!disable_l1) {
5630                 if (rdev->family >= CHIP_BARTS)
5631                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5632                 else
5633                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5634
5635                 if (!disable_plloff_in_l1) {
5636                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5637                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5638                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5639                         if (data != orig)
5640                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5641
5642                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5643                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5644                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5645                         if (data != orig)
5646                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5647
5648                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5649                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5650                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5651                         if (data != orig)
5652                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5653
5654                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5655                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5656                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5657                         if (data != orig)
5658                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5659
5660                         if (rdev->family >= CHIP_BARTS) {
5661                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5662                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5663                                 data |= PLL_RAMP_UP_TIME_0(4);
5664                                 if (data != orig)
5665                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5666
5667                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5668                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5669                                 data |= PLL_RAMP_UP_TIME_1(4);
5670                                 if (data != orig)
5671                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5672
5673                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5674                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5675                                 data |= PLL_RAMP_UP_TIME_0(4);
5676                                 if (data != orig)
5677                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5678
5679                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5680                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5681                                 data |= PLL_RAMP_UP_TIME_1(4);
5682                                 if (data != orig)
5683                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5684                         }
5685
5686                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5687                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5688                         data |= LC_DYN_LANES_PWR_STATE(3);
5689                         if (data != orig)
5690                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5691
5692                         if (rdev->family >= CHIP_BARTS) {
5693                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5694                                 data &= ~LS2_EXIT_TIME_MASK;
5695                                 data |= LS2_EXIT_TIME(1);
5696                                 if (data != orig)
5697                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5698
5699                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5700                                 data &= ~LS2_EXIT_TIME_MASK;
5701                                 data |= LS2_EXIT_TIME(1);
5702                                 if (data != orig)
5703                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5704                         }
5705                 }
5706         }
5707
5708         /* evergreen parts only */
5709         if (rdev->family < CHIP_BARTS)
5710                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5711
5712         if (pcie_lc_cntl != pcie_lc_cntl_old)
5713                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5714 }