Merge branch 'for-linus-dma-masks' of git://git.linaro.org/people/rmk/linux-arm
[cascardo/linux.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 static const u32 crtc_offsets[6] =
39 {
40         EVERGREEN_CRTC0_REGISTER_OFFSET,
41         EVERGREEN_CRTC1_REGISTER_OFFSET,
42         EVERGREEN_CRTC2_REGISTER_OFFSET,
43         EVERGREEN_CRTC3_REGISTER_OFFSET,
44         EVERGREEN_CRTC4_REGISTER_OFFSET,
45         EVERGREEN_CRTC5_REGISTER_OFFSET
46 };
47
48 #include "clearstate_evergreen.h"
49
50 static const u32 sumo_rlc_save_restore_register_list[] =
51 {
52         0x98fc,
53         0x9830,
54         0x9834,
55         0x9838,
56         0x9870,
57         0x9874,
58         0x8a14,
59         0x8b24,
60         0x8bcc,
61         0x8b10,
62         0x8d00,
63         0x8d04,
64         0x8c00,
65         0x8c04,
66         0x8c08,
67         0x8c0c,
68         0x8d8c,
69         0x8c20,
70         0x8c24,
71         0x8c28,
72         0x8c18,
73         0x8c1c,
74         0x8cf0,
75         0x8e2c,
76         0x8e38,
77         0x8c30,
78         0x9508,
79         0x9688,
80         0x9608,
81         0x960c,
82         0x9610,
83         0x9614,
84         0x88c4,
85         0x88d4,
86         0xa008,
87         0x900c,
88         0x9100,
89         0x913c,
90         0x98f8,
91         0x98f4,
92         0x9b7c,
93         0x3f8c,
94         0x8950,
95         0x8954,
96         0x8a18,
97         0x8b28,
98         0x9144,
99         0x9148,
100         0x914c,
101         0x3f90,
102         0x3f94,
103         0x915c,
104         0x9160,
105         0x9178,
106         0x917c,
107         0x9180,
108         0x918c,
109         0x9190,
110         0x9194,
111         0x9198,
112         0x919c,
113         0x91a8,
114         0x91ac,
115         0x91b0,
116         0x91b4,
117         0x91b8,
118         0x91c4,
119         0x91c8,
120         0x91cc,
121         0x91d0,
122         0x91d4,
123         0x91e0,
124         0x91e4,
125         0x91ec,
126         0x91f0,
127         0x91f4,
128         0x9200,
129         0x9204,
130         0x929c,
131         0x9150,
132         0x802c,
133 };
134
135 static void evergreen_gpu_init(struct radeon_device *rdev);
136 void evergreen_fini(struct radeon_device *rdev);
137 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
138 void evergreen_program_aspm(struct radeon_device *rdev);
139 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140                                      int ring, u32 cp_int_cntl);
141 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142                                    u32 status, u32 addr);
143 void cik_init_cp_pg_table(struct radeon_device *rdev);
144
145 extern u32 si_get_csb_size(struct radeon_device *rdev);
146 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
147 extern u32 cik_get_csb_size(struct radeon_device *rdev);
148 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
149
150 static const u32 evergreen_golden_registers[] =
151 {
152         0x3f90, 0xffff0000, 0xff000000,
153         0x9148, 0xffff0000, 0xff000000,
154         0x3f94, 0xffff0000, 0xff000000,
155         0x914c, 0xffff0000, 0xff000000,
156         0x9b7c, 0xffffffff, 0x00000000,
157         0x8a14, 0xffffffff, 0x00000007,
158         0x8b10, 0xffffffff, 0x00000000,
159         0x960c, 0xffffffff, 0x54763210,
160         0x88c4, 0xffffffff, 0x000000c2,
161         0x88d4, 0xffffffff, 0x00000010,
162         0x8974, 0xffffffff, 0x00000000,
163         0xc78, 0x00000080, 0x00000080,
164         0x5eb4, 0xffffffff, 0x00000002,
165         0x5e78, 0xffffffff, 0x001000f0,
166         0x6104, 0x01000300, 0x00000000,
167         0x5bc0, 0x00300000, 0x00000000,
168         0x7030, 0xffffffff, 0x00000011,
169         0x7c30, 0xffffffff, 0x00000011,
170         0x10830, 0xffffffff, 0x00000011,
171         0x11430, 0xffffffff, 0x00000011,
172         0x12030, 0xffffffff, 0x00000011,
173         0x12c30, 0xffffffff, 0x00000011,
174         0xd02c, 0xffffffff, 0x08421000,
175         0x240c, 0xffffffff, 0x00000380,
176         0x8b24, 0xffffffff, 0x00ff0fff,
177         0x28a4c, 0x06000000, 0x06000000,
178         0x10c, 0x00000001, 0x00000001,
179         0x8d00, 0xffffffff, 0x100e4848,
180         0x8d04, 0xffffffff, 0x00164745,
181         0x8c00, 0xffffffff, 0xe4000003,
182         0x8c04, 0xffffffff, 0x40600060,
183         0x8c08, 0xffffffff, 0x001c001c,
184         0x8cf0, 0xffffffff, 0x08e00620,
185         0x8c20, 0xffffffff, 0x00800080,
186         0x8c24, 0xffffffff, 0x00800080,
187         0x8c18, 0xffffffff, 0x20202078,
188         0x8c1c, 0xffffffff, 0x00001010,
189         0x28350, 0xffffffff, 0x00000000,
190         0xa008, 0xffffffff, 0x00010000,
191         0x5cc, 0xffffffff, 0x00000001,
192         0x9508, 0xffffffff, 0x00000002,
193         0x913c, 0x0000000f, 0x0000000a
194 };
195
196 static const u32 evergreen_golden_registers2[] =
197 {
198         0x2f4c, 0xffffffff, 0x00000000,
199         0x54f4, 0xffffffff, 0x00000000,
200         0x54f0, 0xffffffff, 0x00000000,
201         0x5498, 0xffffffff, 0x00000000,
202         0x549c, 0xffffffff, 0x00000000,
203         0x5494, 0xffffffff, 0x00000000,
204         0x53cc, 0xffffffff, 0x00000000,
205         0x53c8, 0xffffffff, 0x00000000,
206         0x53c4, 0xffffffff, 0x00000000,
207         0x53c0, 0xffffffff, 0x00000000,
208         0x53bc, 0xffffffff, 0x00000000,
209         0x53b8, 0xffffffff, 0x00000000,
210         0x53b4, 0xffffffff, 0x00000000,
211         0x53b0, 0xffffffff, 0x00000000
212 };
213
214 static const u32 cypress_mgcg_init[] =
215 {
216         0x802c, 0xffffffff, 0xc0000000,
217         0x5448, 0xffffffff, 0x00000100,
218         0x55e4, 0xffffffff, 0x00000100,
219         0x160c, 0xffffffff, 0x00000100,
220         0x5644, 0xffffffff, 0x00000100,
221         0xc164, 0xffffffff, 0x00000100,
222         0x8a18, 0xffffffff, 0x00000100,
223         0x897c, 0xffffffff, 0x06000100,
224         0x8b28, 0xffffffff, 0x00000100,
225         0x9144, 0xffffffff, 0x00000100,
226         0x9a60, 0xffffffff, 0x00000100,
227         0x9868, 0xffffffff, 0x00000100,
228         0x8d58, 0xffffffff, 0x00000100,
229         0x9510, 0xffffffff, 0x00000100,
230         0x949c, 0xffffffff, 0x00000100,
231         0x9654, 0xffffffff, 0x00000100,
232         0x9030, 0xffffffff, 0x00000100,
233         0x9034, 0xffffffff, 0x00000100,
234         0x9038, 0xffffffff, 0x00000100,
235         0x903c, 0xffffffff, 0x00000100,
236         0x9040, 0xffffffff, 0x00000100,
237         0xa200, 0xffffffff, 0x00000100,
238         0xa204, 0xffffffff, 0x00000100,
239         0xa208, 0xffffffff, 0x00000100,
240         0xa20c, 0xffffffff, 0x00000100,
241         0x971c, 0xffffffff, 0x00000100,
242         0x977c, 0xffffffff, 0x00000100,
243         0x3f80, 0xffffffff, 0x00000100,
244         0xa210, 0xffffffff, 0x00000100,
245         0xa214, 0xffffffff, 0x00000100,
246         0x4d8, 0xffffffff, 0x00000100,
247         0x9784, 0xffffffff, 0x00000100,
248         0x9698, 0xffffffff, 0x00000100,
249         0x4d4, 0xffffffff, 0x00000200,
250         0x30cc, 0xffffffff, 0x00000100,
251         0xd0c0, 0xffffffff, 0xff000100,
252         0x802c, 0xffffffff, 0x40000000,
253         0x915c, 0xffffffff, 0x00010000,
254         0x9160, 0xffffffff, 0x00030002,
255         0x9178, 0xffffffff, 0x00070000,
256         0x917c, 0xffffffff, 0x00030002,
257         0x9180, 0xffffffff, 0x00050004,
258         0x918c, 0xffffffff, 0x00010006,
259         0x9190, 0xffffffff, 0x00090008,
260         0x9194, 0xffffffff, 0x00070000,
261         0x9198, 0xffffffff, 0x00030002,
262         0x919c, 0xffffffff, 0x00050004,
263         0x91a8, 0xffffffff, 0x00010006,
264         0x91ac, 0xffffffff, 0x00090008,
265         0x91b0, 0xffffffff, 0x00070000,
266         0x91b4, 0xffffffff, 0x00030002,
267         0x91b8, 0xffffffff, 0x00050004,
268         0x91c4, 0xffffffff, 0x00010006,
269         0x91c8, 0xffffffff, 0x00090008,
270         0x91cc, 0xffffffff, 0x00070000,
271         0x91d0, 0xffffffff, 0x00030002,
272         0x91d4, 0xffffffff, 0x00050004,
273         0x91e0, 0xffffffff, 0x00010006,
274         0x91e4, 0xffffffff, 0x00090008,
275         0x91e8, 0xffffffff, 0x00000000,
276         0x91ec, 0xffffffff, 0x00070000,
277         0x91f0, 0xffffffff, 0x00030002,
278         0x91f4, 0xffffffff, 0x00050004,
279         0x9200, 0xffffffff, 0x00010006,
280         0x9204, 0xffffffff, 0x00090008,
281         0x9208, 0xffffffff, 0x00070000,
282         0x920c, 0xffffffff, 0x00030002,
283         0x9210, 0xffffffff, 0x00050004,
284         0x921c, 0xffffffff, 0x00010006,
285         0x9220, 0xffffffff, 0x00090008,
286         0x9224, 0xffffffff, 0x00070000,
287         0x9228, 0xffffffff, 0x00030002,
288         0x922c, 0xffffffff, 0x00050004,
289         0x9238, 0xffffffff, 0x00010006,
290         0x923c, 0xffffffff, 0x00090008,
291         0x9240, 0xffffffff, 0x00070000,
292         0x9244, 0xffffffff, 0x00030002,
293         0x9248, 0xffffffff, 0x00050004,
294         0x9254, 0xffffffff, 0x00010006,
295         0x9258, 0xffffffff, 0x00090008,
296         0x925c, 0xffffffff, 0x00070000,
297         0x9260, 0xffffffff, 0x00030002,
298         0x9264, 0xffffffff, 0x00050004,
299         0x9270, 0xffffffff, 0x00010006,
300         0x9274, 0xffffffff, 0x00090008,
301         0x9278, 0xffffffff, 0x00070000,
302         0x927c, 0xffffffff, 0x00030002,
303         0x9280, 0xffffffff, 0x00050004,
304         0x928c, 0xffffffff, 0x00010006,
305         0x9290, 0xffffffff, 0x00090008,
306         0x9294, 0xffffffff, 0x00000000,
307         0x929c, 0xffffffff, 0x00000001,
308         0x802c, 0xffffffff, 0x40010000,
309         0x915c, 0xffffffff, 0x00010000,
310         0x9160, 0xffffffff, 0x00030002,
311         0x9178, 0xffffffff, 0x00070000,
312         0x917c, 0xffffffff, 0x00030002,
313         0x9180, 0xffffffff, 0x00050004,
314         0x918c, 0xffffffff, 0x00010006,
315         0x9190, 0xffffffff, 0x00090008,
316         0x9194, 0xffffffff, 0x00070000,
317         0x9198, 0xffffffff, 0x00030002,
318         0x919c, 0xffffffff, 0x00050004,
319         0x91a8, 0xffffffff, 0x00010006,
320         0x91ac, 0xffffffff, 0x00090008,
321         0x91b0, 0xffffffff, 0x00070000,
322         0x91b4, 0xffffffff, 0x00030002,
323         0x91b8, 0xffffffff, 0x00050004,
324         0x91c4, 0xffffffff, 0x00010006,
325         0x91c8, 0xffffffff, 0x00090008,
326         0x91cc, 0xffffffff, 0x00070000,
327         0x91d0, 0xffffffff, 0x00030002,
328         0x91d4, 0xffffffff, 0x00050004,
329         0x91e0, 0xffffffff, 0x00010006,
330         0x91e4, 0xffffffff, 0x00090008,
331         0x91e8, 0xffffffff, 0x00000000,
332         0x91ec, 0xffffffff, 0x00070000,
333         0x91f0, 0xffffffff, 0x00030002,
334         0x91f4, 0xffffffff, 0x00050004,
335         0x9200, 0xffffffff, 0x00010006,
336         0x9204, 0xffffffff, 0x00090008,
337         0x9208, 0xffffffff, 0x00070000,
338         0x920c, 0xffffffff, 0x00030002,
339         0x9210, 0xffffffff, 0x00050004,
340         0x921c, 0xffffffff, 0x00010006,
341         0x9220, 0xffffffff, 0x00090008,
342         0x9224, 0xffffffff, 0x00070000,
343         0x9228, 0xffffffff, 0x00030002,
344         0x922c, 0xffffffff, 0x00050004,
345         0x9238, 0xffffffff, 0x00010006,
346         0x923c, 0xffffffff, 0x00090008,
347         0x9240, 0xffffffff, 0x00070000,
348         0x9244, 0xffffffff, 0x00030002,
349         0x9248, 0xffffffff, 0x00050004,
350         0x9254, 0xffffffff, 0x00010006,
351         0x9258, 0xffffffff, 0x00090008,
352         0x925c, 0xffffffff, 0x00070000,
353         0x9260, 0xffffffff, 0x00030002,
354         0x9264, 0xffffffff, 0x00050004,
355         0x9270, 0xffffffff, 0x00010006,
356         0x9274, 0xffffffff, 0x00090008,
357         0x9278, 0xffffffff, 0x00070000,
358         0x927c, 0xffffffff, 0x00030002,
359         0x9280, 0xffffffff, 0x00050004,
360         0x928c, 0xffffffff, 0x00010006,
361         0x9290, 0xffffffff, 0x00090008,
362         0x9294, 0xffffffff, 0x00000000,
363         0x929c, 0xffffffff, 0x00000001,
364         0x802c, 0xffffffff, 0xc0000000
365 };
366
367 static const u32 redwood_mgcg_init[] =
368 {
369         0x802c, 0xffffffff, 0xc0000000,
370         0x5448, 0xffffffff, 0x00000100,
371         0x55e4, 0xffffffff, 0x00000100,
372         0x160c, 0xffffffff, 0x00000100,
373         0x5644, 0xffffffff, 0x00000100,
374         0xc164, 0xffffffff, 0x00000100,
375         0x8a18, 0xffffffff, 0x00000100,
376         0x897c, 0xffffffff, 0x06000100,
377         0x8b28, 0xffffffff, 0x00000100,
378         0x9144, 0xffffffff, 0x00000100,
379         0x9a60, 0xffffffff, 0x00000100,
380         0x9868, 0xffffffff, 0x00000100,
381         0x8d58, 0xffffffff, 0x00000100,
382         0x9510, 0xffffffff, 0x00000100,
383         0x949c, 0xffffffff, 0x00000100,
384         0x9654, 0xffffffff, 0x00000100,
385         0x9030, 0xffffffff, 0x00000100,
386         0x9034, 0xffffffff, 0x00000100,
387         0x9038, 0xffffffff, 0x00000100,
388         0x903c, 0xffffffff, 0x00000100,
389         0x9040, 0xffffffff, 0x00000100,
390         0xa200, 0xffffffff, 0x00000100,
391         0xa204, 0xffffffff, 0x00000100,
392         0xa208, 0xffffffff, 0x00000100,
393         0xa20c, 0xffffffff, 0x00000100,
394         0x971c, 0xffffffff, 0x00000100,
395         0x977c, 0xffffffff, 0x00000100,
396         0x3f80, 0xffffffff, 0x00000100,
397         0xa210, 0xffffffff, 0x00000100,
398         0xa214, 0xffffffff, 0x00000100,
399         0x4d8, 0xffffffff, 0x00000100,
400         0x9784, 0xffffffff, 0x00000100,
401         0x9698, 0xffffffff, 0x00000100,
402         0x4d4, 0xffffffff, 0x00000200,
403         0x30cc, 0xffffffff, 0x00000100,
404         0xd0c0, 0xffffffff, 0xff000100,
405         0x802c, 0xffffffff, 0x40000000,
406         0x915c, 0xffffffff, 0x00010000,
407         0x9160, 0xffffffff, 0x00030002,
408         0x9178, 0xffffffff, 0x00070000,
409         0x917c, 0xffffffff, 0x00030002,
410         0x9180, 0xffffffff, 0x00050004,
411         0x918c, 0xffffffff, 0x00010006,
412         0x9190, 0xffffffff, 0x00090008,
413         0x9194, 0xffffffff, 0x00070000,
414         0x9198, 0xffffffff, 0x00030002,
415         0x919c, 0xffffffff, 0x00050004,
416         0x91a8, 0xffffffff, 0x00010006,
417         0x91ac, 0xffffffff, 0x00090008,
418         0x91b0, 0xffffffff, 0x00070000,
419         0x91b4, 0xffffffff, 0x00030002,
420         0x91b8, 0xffffffff, 0x00050004,
421         0x91c4, 0xffffffff, 0x00010006,
422         0x91c8, 0xffffffff, 0x00090008,
423         0x91cc, 0xffffffff, 0x00070000,
424         0x91d0, 0xffffffff, 0x00030002,
425         0x91d4, 0xffffffff, 0x00050004,
426         0x91e0, 0xffffffff, 0x00010006,
427         0x91e4, 0xffffffff, 0x00090008,
428         0x91e8, 0xffffffff, 0x00000000,
429         0x91ec, 0xffffffff, 0x00070000,
430         0x91f0, 0xffffffff, 0x00030002,
431         0x91f4, 0xffffffff, 0x00050004,
432         0x9200, 0xffffffff, 0x00010006,
433         0x9204, 0xffffffff, 0x00090008,
434         0x9294, 0xffffffff, 0x00000000,
435         0x929c, 0xffffffff, 0x00000001,
436         0x802c, 0xffffffff, 0xc0000000
437 };
438
439 static const u32 cedar_golden_registers[] =
440 {
441         0x3f90, 0xffff0000, 0xff000000,
442         0x9148, 0xffff0000, 0xff000000,
443         0x3f94, 0xffff0000, 0xff000000,
444         0x914c, 0xffff0000, 0xff000000,
445         0x9b7c, 0xffffffff, 0x00000000,
446         0x8a14, 0xffffffff, 0x00000007,
447         0x8b10, 0xffffffff, 0x00000000,
448         0x960c, 0xffffffff, 0x54763210,
449         0x88c4, 0xffffffff, 0x000000c2,
450         0x88d4, 0xffffffff, 0x00000000,
451         0x8974, 0xffffffff, 0x00000000,
452         0xc78, 0x00000080, 0x00000080,
453         0x5eb4, 0xffffffff, 0x00000002,
454         0x5e78, 0xffffffff, 0x001000f0,
455         0x6104, 0x01000300, 0x00000000,
456         0x5bc0, 0x00300000, 0x00000000,
457         0x7030, 0xffffffff, 0x00000011,
458         0x7c30, 0xffffffff, 0x00000011,
459         0x10830, 0xffffffff, 0x00000011,
460         0x11430, 0xffffffff, 0x00000011,
461         0xd02c, 0xffffffff, 0x08421000,
462         0x240c, 0xffffffff, 0x00000380,
463         0x8b24, 0xffffffff, 0x00ff0fff,
464         0x28a4c, 0x06000000, 0x06000000,
465         0x10c, 0x00000001, 0x00000001,
466         0x8d00, 0xffffffff, 0x100e4848,
467         0x8d04, 0xffffffff, 0x00164745,
468         0x8c00, 0xffffffff, 0xe4000003,
469         0x8c04, 0xffffffff, 0x40600060,
470         0x8c08, 0xffffffff, 0x001c001c,
471         0x8cf0, 0xffffffff, 0x08e00410,
472         0x8c20, 0xffffffff, 0x00800080,
473         0x8c24, 0xffffffff, 0x00800080,
474         0x8c18, 0xffffffff, 0x20202078,
475         0x8c1c, 0xffffffff, 0x00001010,
476         0x28350, 0xffffffff, 0x00000000,
477         0xa008, 0xffffffff, 0x00010000,
478         0x5cc, 0xffffffff, 0x00000001,
479         0x9508, 0xffffffff, 0x00000002
480 };
481
482 static const u32 cedar_mgcg_init[] =
483 {
484         0x802c, 0xffffffff, 0xc0000000,
485         0x5448, 0xffffffff, 0x00000100,
486         0x55e4, 0xffffffff, 0x00000100,
487         0x160c, 0xffffffff, 0x00000100,
488         0x5644, 0xffffffff, 0x00000100,
489         0xc164, 0xffffffff, 0x00000100,
490         0x8a18, 0xffffffff, 0x00000100,
491         0x897c, 0xffffffff, 0x06000100,
492         0x8b28, 0xffffffff, 0x00000100,
493         0x9144, 0xffffffff, 0x00000100,
494         0x9a60, 0xffffffff, 0x00000100,
495         0x9868, 0xffffffff, 0x00000100,
496         0x8d58, 0xffffffff, 0x00000100,
497         0x9510, 0xffffffff, 0x00000100,
498         0x949c, 0xffffffff, 0x00000100,
499         0x9654, 0xffffffff, 0x00000100,
500         0x9030, 0xffffffff, 0x00000100,
501         0x9034, 0xffffffff, 0x00000100,
502         0x9038, 0xffffffff, 0x00000100,
503         0x903c, 0xffffffff, 0x00000100,
504         0x9040, 0xffffffff, 0x00000100,
505         0xa200, 0xffffffff, 0x00000100,
506         0xa204, 0xffffffff, 0x00000100,
507         0xa208, 0xffffffff, 0x00000100,
508         0xa20c, 0xffffffff, 0x00000100,
509         0x971c, 0xffffffff, 0x00000100,
510         0x977c, 0xffffffff, 0x00000100,
511         0x3f80, 0xffffffff, 0x00000100,
512         0xa210, 0xffffffff, 0x00000100,
513         0xa214, 0xffffffff, 0x00000100,
514         0x4d8, 0xffffffff, 0x00000100,
515         0x9784, 0xffffffff, 0x00000100,
516         0x9698, 0xffffffff, 0x00000100,
517         0x4d4, 0xffffffff, 0x00000200,
518         0x30cc, 0xffffffff, 0x00000100,
519         0xd0c0, 0xffffffff, 0xff000100,
520         0x802c, 0xffffffff, 0x40000000,
521         0x915c, 0xffffffff, 0x00010000,
522         0x9178, 0xffffffff, 0x00050000,
523         0x917c, 0xffffffff, 0x00030002,
524         0x918c, 0xffffffff, 0x00010004,
525         0x9190, 0xffffffff, 0x00070006,
526         0x9194, 0xffffffff, 0x00050000,
527         0x9198, 0xffffffff, 0x00030002,
528         0x91a8, 0xffffffff, 0x00010004,
529         0x91ac, 0xffffffff, 0x00070006,
530         0x91e8, 0xffffffff, 0x00000000,
531         0x9294, 0xffffffff, 0x00000000,
532         0x929c, 0xffffffff, 0x00000001,
533         0x802c, 0xffffffff, 0xc0000000
534 };
535
536 static const u32 juniper_mgcg_init[] =
537 {
538         0x802c, 0xffffffff, 0xc0000000,
539         0x5448, 0xffffffff, 0x00000100,
540         0x55e4, 0xffffffff, 0x00000100,
541         0x160c, 0xffffffff, 0x00000100,
542         0x5644, 0xffffffff, 0x00000100,
543         0xc164, 0xffffffff, 0x00000100,
544         0x8a18, 0xffffffff, 0x00000100,
545         0x897c, 0xffffffff, 0x06000100,
546         0x8b28, 0xffffffff, 0x00000100,
547         0x9144, 0xffffffff, 0x00000100,
548         0x9a60, 0xffffffff, 0x00000100,
549         0x9868, 0xffffffff, 0x00000100,
550         0x8d58, 0xffffffff, 0x00000100,
551         0x9510, 0xffffffff, 0x00000100,
552         0x949c, 0xffffffff, 0x00000100,
553         0x9654, 0xffffffff, 0x00000100,
554         0x9030, 0xffffffff, 0x00000100,
555         0x9034, 0xffffffff, 0x00000100,
556         0x9038, 0xffffffff, 0x00000100,
557         0x903c, 0xffffffff, 0x00000100,
558         0x9040, 0xffffffff, 0x00000100,
559         0xa200, 0xffffffff, 0x00000100,
560         0xa204, 0xffffffff, 0x00000100,
561         0xa208, 0xffffffff, 0x00000100,
562         0xa20c, 0xffffffff, 0x00000100,
563         0x971c, 0xffffffff, 0x00000100,
564         0xd0c0, 0xffffffff, 0xff000100,
565         0x802c, 0xffffffff, 0x40000000,
566         0x915c, 0xffffffff, 0x00010000,
567         0x9160, 0xffffffff, 0x00030002,
568         0x9178, 0xffffffff, 0x00070000,
569         0x917c, 0xffffffff, 0x00030002,
570         0x9180, 0xffffffff, 0x00050004,
571         0x918c, 0xffffffff, 0x00010006,
572         0x9190, 0xffffffff, 0x00090008,
573         0x9194, 0xffffffff, 0x00070000,
574         0x9198, 0xffffffff, 0x00030002,
575         0x919c, 0xffffffff, 0x00050004,
576         0x91a8, 0xffffffff, 0x00010006,
577         0x91ac, 0xffffffff, 0x00090008,
578         0x91b0, 0xffffffff, 0x00070000,
579         0x91b4, 0xffffffff, 0x00030002,
580         0x91b8, 0xffffffff, 0x00050004,
581         0x91c4, 0xffffffff, 0x00010006,
582         0x91c8, 0xffffffff, 0x00090008,
583         0x91cc, 0xffffffff, 0x00070000,
584         0x91d0, 0xffffffff, 0x00030002,
585         0x91d4, 0xffffffff, 0x00050004,
586         0x91e0, 0xffffffff, 0x00010006,
587         0x91e4, 0xffffffff, 0x00090008,
588         0x91e8, 0xffffffff, 0x00000000,
589         0x91ec, 0xffffffff, 0x00070000,
590         0x91f0, 0xffffffff, 0x00030002,
591         0x91f4, 0xffffffff, 0x00050004,
592         0x9200, 0xffffffff, 0x00010006,
593         0x9204, 0xffffffff, 0x00090008,
594         0x9208, 0xffffffff, 0x00070000,
595         0x920c, 0xffffffff, 0x00030002,
596         0x9210, 0xffffffff, 0x00050004,
597         0x921c, 0xffffffff, 0x00010006,
598         0x9220, 0xffffffff, 0x00090008,
599         0x9224, 0xffffffff, 0x00070000,
600         0x9228, 0xffffffff, 0x00030002,
601         0x922c, 0xffffffff, 0x00050004,
602         0x9238, 0xffffffff, 0x00010006,
603         0x923c, 0xffffffff, 0x00090008,
604         0x9240, 0xffffffff, 0x00070000,
605         0x9244, 0xffffffff, 0x00030002,
606         0x9248, 0xffffffff, 0x00050004,
607         0x9254, 0xffffffff, 0x00010006,
608         0x9258, 0xffffffff, 0x00090008,
609         0x925c, 0xffffffff, 0x00070000,
610         0x9260, 0xffffffff, 0x00030002,
611         0x9264, 0xffffffff, 0x00050004,
612         0x9270, 0xffffffff, 0x00010006,
613         0x9274, 0xffffffff, 0x00090008,
614         0x9278, 0xffffffff, 0x00070000,
615         0x927c, 0xffffffff, 0x00030002,
616         0x9280, 0xffffffff, 0x00050004,
617         0x928c, 0xffffffff, 0x00010006,
618         0x9290, 0xffffffff, 0x00090008,
619         0x9294, 0xffffffff, 0x00000000,
620         0x929c, 0xffffffff, 0x00000001,
621         0x802c, 0xffffffff, 0xc0000000,
622         0x977c, 0xffffffff, 0x00000100,
623         0x3f80, 0xffffffff, 0x00000100,
624         0xa210, 0xffffffff, 0x00000100,
625         0xa214, 0xffffffff, 0x00000100,
626         0x4d8, 0xffffffff, 0x00000100,
627         0x9784, 0xffffffff, 0x00000100,
628         0x9698, 0xffffffff, 0x00000100,
629         0x4d4, 0xffffffff, 0x00000200,
630         0x30cc, 0xffffffff, 0x00000100,
631         0x802c, 0xffffffff, 0xc0000000
632 };
633
634 static const u32 supersumo_golden_registers[] =
635 {
636         0x5eb4, 0xffffffff, 0x00000002,
637         0x5cc, 0xffffffff, 0x00000001,
638         0x7030, 0xffffffff, 0x00000011,
639         0x7c30, 0xffffffff, 0x00000011,
640         0x6104, 0x01000300, 0x00000000,
641         0x5bc0, 0x00300000, 0x00000000,
642         0x8c04, 0xffffffff, 0x40600060,
643         0x8c08, 0xffffffff, 0x001c001c,
644         0x8c20, 0xffffffff, 0x00800080,
645         0x8c24, 0xffffffff, 0x00800080,
646         0x8c18, 0xffffffff, 0x20202078,
647         0x8c1c, 0xffffffff, 0x00001010,
648         0x918c, 0xffffffff, 0x00010006,
649         0x91a8, 0xffffffff, 0x00010006,
650         0x91c4, 0xffffffff, 0x00010006,
651         0x91e0, 0xffffffff, 0x00010006,
652         0x9200, 0xffffffff, 0x00010006,
653         0x9150, 0xffffffff, 0x6e944040,
654         0x917c, 0xffffffff, 0x00030002,
655         0x9180, 0xffffffff, 0x00050004,
656         0x9198, 0xffffffff, 0x00030002,
657         0x919c, 0xffffffff, 0x00050004,
658         0x91b4, 0xffffffff, 0x00030002,
659         0x91b8, 0xffffffff, 0x00050004,
660         0x91d0, 0xffffffff, 0x00030002,
661         0x91d4, 0xffffffff, 0x00050004,
662         0x91f0, 0xffffffff, 0x00030002,
663         0x91f4, 0xffffffff, 0x00050004,
664         0x915c, 0xffffffff, 0x00010000,
665         0x9160, 0xffffffff, 0x00030002,
666         0x3f90, 0xffff0000, 0xff000000,
667         0x9178, 0xffffffff, 0x00070000,
668         0x9194, 0xffffffff, 0x00070000,
669         0x91b0, 0xffffffff, 0x00070000,
670         0x91cc, 0xffffffff, 0x00070000,
671         0x91ec, 0xffffffff, 0x00070000,
672         0x9148, 0xffff0000, 0xff000000,
673         0x9190, 0xffffffff, 0x00090008,
674         0x91ac, 0xffffffff, 0x00090008,
675         0x91c8, 0xffffffff, 0x00090008,
676         0x91e4, 0xffffffff, 0x00090008,
677         0x9204, 0xffffffff, 0x00090008,
678         0x3f94, 0xffff0000, 0xff000000,
679         0x914c, 0xffff0000, 0xff000000,
680         0x929c, 0xffffffff, 0x00000001,
681         0x8a18, 0xffffffff, 0x00000100,
682         0x8b28, 0xffffffff, 0x00000100,
683         0x9144, 0xffffffff, 0x00000100,
684         0x5644, 0xffffffff, 0x00000100,
685         0x9b7c, 0xffffffff, 0x00000000,
686         0x8030, 0xffffffff, 0x0000100a,
687         0x8a14, 0xffffffff, 0x00000007,
688         0x8b24, 0xffffffff, 0x00ff0fff,
689         0x8b10, 0xffffffff, 0x00000000,
690         0x28a4c, 0x06000000, 0x06000000,
691         0x4d8, 0xffffffff, 0x00000100,
692         0x913c, 0xffff000f, 0x0100000a,
693         0x960c, 0xffffffff, 0x54763210,
694         0x88c4, 0xffffffff, 0x000000c2,
695         0x88d4, 0xffffffff, 0x00000010,
696         0x8974, 0xffffffff, 0x00000000,
697         0xc78, 0x00000080, 0x00000080,
698         0x5e78, 0xffffffff, 0x001000f0,
699         0xd02c, 0xffffffff, 0x08421000,
700         0xa008, 0xffffffff, 0x00010000,
701         0x8d00, 0xffffffff, 0x100e4848,
702         0x8d04, 0xffffffff, 0x00164745,
703         0x8c00, 0xffffffff, 0xe4000003,
704         0x8cf0, 0x1fffffff, 0x08e00620,
705         0x28350, 0xffffffff, 0x00000000,
706         0x9508, 0xffffffff, 0x00000002
707 };
708
709 static const u32 sumo_golden_registers[] =
710 {
711         0x900c, 0x00ffffff, 0x0017071f,
712         0x8c18, 0xffffffff, 0x10101060,
713         0x8c1c, 0xffffffff, 0x00001010,
714         0x8c30, 0x0000000f, 0x00000005,
715         0x9688, 0x0000000f, 0x00000007
716 };
717
718 static const u32 wrestler_golden_registers[] =
719 {
720         0x5eb4, 0xffffffff, 0x00000002,
721         0x5cc, 0xffffffff, 0x00000001,
722         0x7030, 0xffffffff, 0x00000011,
723         0x7c30, 0xffffffff, 0x00000011,
724         0x6104, 0x01000300, 0x00000000,
725         0x5bc0, 0x00300000, 0x00000000,
726         0x918c, 0xffffffff, 0x00010006,
727         0x91a8, 0xffffffff, 0x00010006,
728         0x9150, 0xffffffff, 0x6e944040,
729         0x917c, 0xffffffff, 0x00030002,
730         0x9198, 0xffffffff, 0x00030002,
731         0x915c, 0xffffffff, 0x00010000,
732         0x3f90, 0xffff0000, 0xff000000,
733         0x9178, 0xffffffff, 0x00070000,
734         0x9194, 0xffffffff, 0x00070000,
735         0x9148, 0xffff0000, 0xff000000,
736         0x9190, 0xffffffff, 0x00090008,
737         0x91ac, 0xffffffff, 0x00090008,
738         0x3f94, 0xffff0000, 0xff000000,
739         0x914c, 0xffff0000, 0xff000000,
740         0x929c, 0xffffffff, 0x00000001,
741         0x8a18, 0xffffffff, 0x00000100,
742         0x8b28, 0xffffffff, 0x00000100,
743         0x9144, 0xffffffff, 0x00000100,
744         0x9b7c, 0xffffffff, 0x00000000,
745         0x8030, 0xffffffff, 0x0000100a,
746         0x8a14, 0xffffffff, 0x00000001,
747         0x8b24, 0xffffffff, 0x00ff0fff,
748         0x8b10, 0xffffffff, 0x00000000,
749         0x28a4c, 0x06000000, 0x06000000,
750         0x4d8, 0xffffffff, 0x00000100,
751         0x913c, 0xffff000f, 0x0100000a,
752         0x960c, 0xffffffff, 0x54763210,
753         0x88c4, 0xffffffff, 0x000000c2,
754         0x88d4, 0xffffffff, 0x00000010,
755         0x8974, 0xffffffff, 0x00000000,
756         0xc78, 0x00000080, 0x00000080,
757         0x5e78, 0xffffffff, 0x001000f0,
758         0xd02c, 0xffffffff, 0x08421000,
759         0xa008, 0xffffffff, 0x00010000,
760         0x8d00, 0xffffffff, 0x100e4848,
761         0x8d04, 0xffffffff, 0x00164745,
762         0x8c00, 0xffffffff, 0xe4000003,
763         0x8cf0, 0x1fffffff, 0x08e00410,
764         0x28350, 0xffffffff, 0x00000000,
765         0x9508, 0xffffffff, 0x00000002,
766         0x900c, 0xffffffff, 0x0017071f,
767         0x8c18, 0xffffffff, 0x10101060,
768         0x8c1c, 0xffffffff, 0x00001010
769 };
770
771 static const u32 barts_golden_registers[] =
772 {
773         0x5eb4, 0xffffffff, 0x00000002,
774         0x5e78, 0x8f311ff1, 0x001000f0,
775         0x3f90, 0xffff0000, 0xff000000,
776         0x9148, 0xffff0000, 0xff000000,
777         0x3f94, 0xffff0000, 0xff000000,
778         0x914c, 0xffff0000, 0xff000000,
779         0xc78, 0x00000080, 0x00000080,
780         0xbd4, 0x70073777, 0x00010001,
781         0xd02c, 0xbfffff1f, 0x08421000,
782         0xd0b8, 0x03773777, 0x02011003,
783         0x5bc0, 0x00200000, 0x50100000,
784         0x98f8, 0x33773777, 0x02011003,
785         0x98fc, 0xffffffff, 0x76543210,
786         0x7030, 0x31000311, 0x00000011,
787         0x2f48, 0x00000007, 0x02011003,
788         0x6b28, 0x00000010, 0x00000012,
789         0x7728, 0x00000010, 0x00000012,
790         0x10328, 0x00000010, 0x00000012,
791         0x10f28, 0x00000010, 0x00000012,
792         0x11b28, 0x00000010, 0x00000012,
793         0x12728, 0x00000010, 0x00000012,
794         0x240c, 0x000007ff, 0x00000380,
795         0x8a14, 0xf000001f, 0x00000007,
796         0x8b24, 0x3fff3fff, 0x00ff0fff,
797         0x8b10, 0x0000ff0f, 0x00000000,
798         0x28a4c, 0x07ffffff, 0x06000000,
799         0x10c, 0x00000001, 0x00010003,
800         0xa02c, 0xffffffff, 0x0000009b,
801         0x913c, 0x0000000f, 0x0100000a,
802         0x8d00, 0xffff7f7f, 0x100e4848,
803         0x8d04, 0x00ffffff, 0x00164745,
804         0x8c00, 0xfffc0003, 0xe4000003,
805         0x8c04, 0xf8ff00ff, 0x40600060,
806         0x8c08, 0x00ff00ff, 0x001c001c,
807         0x8cf0, 0x1fff1fff, 0x08e00620,
808         0x8c20, 0x0fff0fff, 0x00800080,
809         0x8c24, 0x0fff0fff, 0x00800080,
810         0x8c18, 0xffffffff, 0x20202078,
811         0x8c1c, 0x0000ffff, 0x00001010,
812         0x28350, 0x00000f01, 0x00000000,
813         0x9508, 0x3700001f, 0x00000002,
814         0x960c, 0xffffffff, 0x54763210,
815         0x88c4, 0x001f3ae3, 0x000000c2,
816         0x88d4, 0x0000001f, 0x00000010,
817         0x8974, 0xffffffff, 0x00000000
818 };
819
820 static const u32 turks_golden_registers[] =
821 {
822         0x5eb4, 0xffffffff, 0x00000002,
823         0x5e78, 0x8f311ff1, 0x001000f0,
824         0x8c8, 0x00003000, 0x00001070,
825         0x8cc, 0x000fffff, 0x00040035,
826         0x3f90, 0xffff0000, 0xfff00000,
827         0x9148, 0xffff0000, 0xfff00000,
828         0x3f94, 0xffff0000, 0xfff00000,
829         0x914c, 0xffff0000, 0xfff00000,
830         0xc78, 0x00000080, 0x00000080,
831         0xbd4, 0x00073007, 0x00010002,
832         0xd02c, 0xbfffff1f, 0x08421000,
833         0xd0b8, 0x03773777, 0x02010002,
834         0x5bc0, 0x00200000, 0x50100000,
835         0x98f8, 0x33773777, 0x00010002,
836         0x98fc, 0xffffffff, 0x33221100,
837         0x7030, 0x31000311, 0x00000011,
838         0x2f48, 0x33773777, 0x00010002,
839         0x6b28, 0x00000010, 0x00000012,
840         0x7728, 0x00000010, 0x00000012,
841         0x10328, 0x00000010, 0x00000012,
842         0x10f28, 0x00000010, 0x00000012,
843         0x11b28, 0x00000010, 0x00000012,
844         0x12728, 0x00000010, 0x00000012,
845         0x240c, 0x000007ff, 0x00000380,
846         0x8a14, 0xf000001f, 0x00000007,
847         0x8b24, 0x3fff3fff, 0x00ff0fff,
848         0x8b10, 0x0000ff0f, 0x00000000,
849         0x28a4c, 0x07ffffff, 0x06000000,
850         0x10c, 0x00000001, 0x00010003,
851         0xa02c, 0xffffffff, 0x0000009b,
852         0x913c, 0x0000000f, 0x0100000a,
853         0x8d00, 0xffff7f7f, 0x100e4848,
854         0x8d04, 0x00ffffff, 0x00164745,
855         0x8c00, 0xfffc0003, 0xe4000003,
856         0x8c04, 0xf8ff00ff, 0x40600060,
857         0x8c08, 0x00ff00ff, 0x001c001c,
858         0x8cf0, 0x1fff1fff, 0x08e00410,
859         0x8c20, 0x0fff0fff, 0x00800080,
860         0x8c24, 0x0fff0fff, 0x00800080,
861         0x8c18, 0xffffffff, 0x20202078,
862         0x8c1c, 0x0000ffff, 0x00001010,
863         0x28350, 0x00000f01, 0x00000000,
864         0x9508, 0x3700001f, 0x00000002,
865         0x960c, 0xffffffff, 0x54763210,
866         0x88c4, 0x001f3ae3, 0x000000c2,
867         0x88d4, 0x0000001f, 0x00000010,
868         0x8974, 0xffffffff, 0x00000000
869 };
870
871 static const u32 caicos_golden_registers[] =
872 {
873         0x5eb4, 0xffffffff, 0x00000002,
874         0x5e78, 0x8f311ff1, 0x001000f0,
875         0x8c8, 0x00003420, 0x00001450,
876         0x8cc, 0x000fffff, 0x00040035,
877         0x3f90, 0xffff0000, 0xfffc0000,
878         0x9148, 0xffff0000, 0xfffc0000,
879         0x3f94, 0xffff0000, 0xfffc0000,
880         0x914c, 0xffff0000, 0xfffc0000,
881         0xc78, 0x00000080, 0x00000080,
882         0xbd4, 0x00073007, 0x00010001,
883         0xd02c, 0xbfffff1f, 0x08421000,
884         0xd0b8, 0x03773777, 0x02010001,
885         0x5bc0, 0x00200000, 0x50100000,
886         0x98f8, 0x33773777, 0x02010001,
887         0x98fc, 0xffffffff, 0x33221100,
888         0x7030, 0x31000311, 0x00000011,
889         0x2f48, 0x33773777, 0x02010001,
890         0x6b28, 0x00000010, 0x00000012,
891         0x7728, 0x00000010, 0x00000012,
892         0x10328, 0x00000010, 0x00000012,
893         0x10f28, 0x00000010, 0x00000012,
894         0x11b28, 0x00000010, 0x00000012,
895         0x12728, 0x00000010, 0x00000012,
896         0x240c, 0x000007ff, 0x00000380,
897         0x8a14, 0xf000001f, 0x00000001,
898         0x8b24, 0x3fff3fff, 0x00ff0fff,
899         0x8b10, 0x0000ff0f, 0x00000000,
900         0x28a4c, 0x07ffffff, 0x06000000,
901         0x10c, 0x00000001, 0x00010003,
902         0xa02c, 0xffffffff, 0x0000009b,
903         0x913c, 0x0000000f, 0x0100000a,
904         0x8d00, 0xffff7f7f, 0x100e4848,
905         0x8d04, 0x00ffffff, 0x00164745,
906         0x8c00, 0xfffc0003, 0xe4000003,
907         0x8c04, 0xf8ff00ff, 0x40600060,
908         0x8c08, 0x00ff00ff, 0x001c001c,
909         0x8cf0, 0x1fff1fff, 0x08e00410,
910         0x8c20, 0x0fff0fff, 0x00800080,
911         0x8c24, 0x0fff0fff, 0x00800080,
912         0x8c18, 0xffffffff, 0x20202078,
913         0x8c1c, 0x0000ffff, 0x00001010,
914         0x28350, 0x00000f01, 0x00000000,
915         0x9508, 0x3700001f, 0x00000002,
916         0x960c, 0xffffffff, 0x54763210,
917         0x88c4, 0x001f3ae3, 0x000000c2,
918         0x88d4, 0x0000001f, 0x00000010,
919         0x8974, 0xffffffff, 0x00000000
920 };
921
922 static void evergreen_init_golden_registers(struct radeon_device *rdev)
923 {
924         switch (rdev->family) {
925         case CHIP_CYPRESS:
926         case CHIP_HEMLOCK:
927                 radeon_program_register_sequence(rdev,
928                                                  evergreen_golden_registers,
929                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
930                 radeon_program_register_sequence(rdev,
931                                                  evergreen_golden_registers2,
932                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
933                 radeon_program_register_sequence(rdev,
934                                                  cypress_mgcg_init,
935                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
936                 break;
937         case CHIP_JUNIPER:
938                 radeon_program_register_sequence(rdev,
939                                                  evergreen_golden_registers,
940                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
941                 radeon_program_register_sequence(rdev,
942                                                  evergreen_golden_registers2,
943                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
944                 radeon_program_register_sequence(rdev,
945                                                  juniper_mgcg_init,
946                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
947                 break;
948         case CHIP_REDWOOD:
949                 radeon_program_register_sequence(rdev,
950                                                  evergreen_golden_registers,
951                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
952                 radeon_program_register_sequence(rdev,
953                                                  evergreen_golden_registers2,
954                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
955                 radeon_program_register_sequence(rdev,
956                                                  redwood_mgcg_init,
957                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
958                 break;
959         case CHIP_CEDAR:
960                 radeon_program_register_sequence(rdev,
961                                                  cedar_golden_registers,
962                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
963                 radeon_program_register_sequence(rdev,
964                                                  evergreen_golden_registers2,
965                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
966                 radeon_program_register_sequence(rdev,
967                                                  cedar_mgcg_init,
968                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
969                 break;
970         case CHIP_PALM:
971                 radeon_program_register_sequence(rdev,
972                                                  wrestler_golden_registers,
973                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
974                 break;
975         case CHIP_SUMO:
976                 radeon_program_register_sequence(rdev,
977                                                  supersumo_golden_registers,
978                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
979                 break;
980         case CHIP_SUMO2:
981                 radeon_program_register_sequence(rdev,
982                                                  supersumo_golden_registers,
983                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
984                 radeon_program_register_sequence(rdev,
985                                                  sumo_golden_registers,
986                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
987                 break;
988         case CHIP_BARTS:
989                 radeon_program_register_sequence(rdev,
990                                                  barts_golden_registers,
991                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
992                 break;
993         case CHIP_TURKS:
994                 radeon_program_register_sequence(rdev,
995                                                  turks_golden_registers,
996                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
997                 break;
998         case CHIP_CAICOS:
999                 radeon_program_register_sequence(rdev,
1000                                                  caicos_golden_registers,
1001                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1002                 break;
1003         default:
1004                 break;
1005         }
1006 }
1007
1008 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1009                              unsigned *bankh, unsigned *mtaspect,
1010                              unsigned *tile_split)
1011 {
1012         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1013         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1014         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1015         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1016         switch (*bankw) {
1017         default:
1018         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1019         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1020         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1021         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1022         }
1023         switch (*bankh) {
1024         default:
1025         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1026         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1027         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1028         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1029         }
1030         switch (*mtaspect) {
1031         default:
1032         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1033         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1034         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1035         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1036         }
1037 }
1038
1039 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1040                               u32 cntl_reg, u32 status_reg)
1041 {
1042         int r, i;
1043         struct atom_clock_dividers dividers;
1044
1045         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1046                                            clock, false, &dividers);
1047         if (r)
1048                 return r;
1049
1050         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1051
1052         for (i = 0; i < 100; i++) {
1053                 if (RREG32(status_reg) & DCLK_STATUS)
1054                         break;
1055                 mdelay(10);
1056         }
1057         if (i == 100)
1058                 return -ETIMEDOUT;
1059
1060         return 0;
1061 }
1062
1063 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1064 {
1065         int r = 0;
1066         u32 cg_scratch = RREG32(CG_SCRATCH1);
1067
1068         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1069         if (r)
1070                 goto done;
1071         cg_scratch &= 0xffff0000;
1072         cg_scratch |= vclk / 100; /* Mhz */
1073
1074         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1075         if (r)
1076                 goto done;
1077         cg_scratch &= 0x0000ffff;
1078         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1079
1080 done:
1081         WREG32(CG_SCRATCH1, cg_scratch);
1082
1083         return r;
1084 }
1085
1086 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1087 {
1088         /* start off with something large */
1089         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1090         int r;
1091
1092         /* bypass vclk and dclk with bclk */
1093         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1094                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1095                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1096
1097         /* put PLL in bypass mode */
1098         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1099
1100         if (!vclk || !dclk) {
1101                 /* keep the Bypass mode, put PLL to sleep */
1102                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1103                 return 0;
1104         }
1105
1106         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1107                                           16384, 0x03FFFFFF, 0, 128, 5,
1108                                           &fb_div, &vclk_div, &dclk_div);
1109         if (r)
1110                 return r;
1111
1112         /* set VCO_MODE to 1 */
1113         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1114
1115         /* toggle UPLL_SLEEP to 1 then back to 0 */
1116         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1117         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1118
1119         /* deassert UPLL_RESET */
1120         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1121
1122         mdelay(1);
1123
1124         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1125         if (r)
1126                 return r;
1127
1128         /* assert UPLL_RESET again */
1129         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1130
1131         /* disable spread spectrum. */
1132         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1133
1134         /* set feedback divider */
1135         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1136
1137         /* set ref divider to 0 */
1138         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1139
1140         if (fb_div < 307200)
1141                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1142         else
1143                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1144
1145         /* set PDIV_A and PDIV_B */
1146         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1147                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1148                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1149
1150         /* give the PLL some time to settle */
1151         mdelay(15);
1152
1153         /* deassert PLL_RESET */
1154         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1155
1156         mdelay(15);
1157
1158         /* switch from bypass mode to normal mode */
1159         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1160
1161         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1162         if (r)
1163                 return r;
1164
1165         /* switch VCLK and DCLK selection */
1166         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1167                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1168                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1169
1170         mdelay(100);
1171
1172         return 0;
1173 }
1174
1175 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1176 {
1177         u16 ctl, v;
1178         int err;
1179
1180         err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1181         if (err)
1182                 return;
1183
1184         v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1185
1186         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1187          * to avoid hangs or perfomance issues
1188          */
1189         if ((v == 0) || (v == 6) || (v == 7)) {
1190                 ctl &= ~PCI_EXP_DEVCTL_READRQ;
1191                 ctl |= (2 << 12);
1192                 pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1193         }
1194 }
1195
1196 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1197 {
1198         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1199                 return true;
1200         else
1201                 return false;
1202 }
1203
1204 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1205 {
1206         u32 pos1, pos2;
1207
1208         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1209         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1210
1211         if (pos1 != pos2)
1212                 return true;
1213         else
1214                 return false;
1215 }
1216
1217 /**
1218  * dce4_wait_for_vblank - vblank wait asic callback.
1219  *
1220  * @rdev: radeon_device pointer
1221  * @crtc: crtc to wait for vblank on
1222  *
1223  * Wait for vblank on the requested crtc (evergreen+).
1224  */
1225 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1226 {
1227         unsigned i = 0;
1228
1229         if (crtc >= rdev->num_crtc)
1230                 return;
1231
1232         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1233                 return;
1234
1235         /* depending on when we hit vblank, we may be close to active; if so,
1236          * wait for another frame.
1237          */
1238         while (dce4_is_in_vblank(rdev, crtc)) {
1239                 if (i++ % 100 == 0) {
1240                         if (!dce4_is_counter_moving(rdev, crtc))
1241                                 break;
1242                 }
1243         }
1244
1245         while (!dce4_is_in_vblank(rdev, crtc)) {
1246                 if (i++ % 100 == 0) {
1247                         if (!dce4_is_counter_moving(rdev, crtc))
1248                                 break;
1249                 }
1250         }
1251 }
1252
1253 /**
1254  * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1255  *
1256  * @rdev: radeon_device pointer
1257  * @crtc: crtc to prepare for pageflip on
1258  *
1259  * Pre-pageflip callback (evergreen+).
1260  * Enables the pageflip irq (vblank irq).
1261  */
1262 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1263 {
1264         /* enable the pflip int */
1265         radeon_irq_kms_pflip_irq_get(rdev, crtc);
1266 }
1267
1268 /**
1269  * evergreen_post_page_flip - pos-pageflip callback.
1270  *
1271  * @rdev: radeon_device pointer
1272  * @crtc: crtc to cleanup pageflip on
1273  *
1274  * Post-pageflip callback (evergreen+).
1275  * Disables the pageflip irq (vblank irq).
1276  */
1277 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1278 {
1279         /* disable the pflip int */
1280         radeon_irq_kms_pflip_irq_put(rdev, crtc);
1281 }
1282
1283 /**
1284  * evergreen_page_flip - pageflip callback.
1285  *
1286  * @rdev: radeon_device pointer
1287  * @crtc_id: crtc to cleanup pageflip on
1288  * @crtc_base: new address of the crtc (GPU MC address)
1289  *
1290  * Does the actual pageflip (evergreen+).
1291  * During vblank we take the crtc lock and wait for the update_pending
1292  * bit to go high, when it does, we release the lock, and allow the
1293  * double buffered update to take place.
1294  * Returns the current update pending status.
1295  */
1296 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1297 {
1298         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1299         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1300         int i;
1301
1302         /* Lock the graphics update lock */
1303         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1304         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1305
1306         /* update the scanout addresses */
1307         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1308                upper_32_bits(crtc_base));
1309         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1310                (u32)crtc_base);
1311
1312         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1313                upper_32_bits(crtc_base));
1314         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1315                (u32)crtc_base);
1316
1317         /* Wait for update_pending to go high. */
1318         for (i = 0; i < rdev->usec_timeout; i++) {
1319                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1320                         break;
1321                 udelay(1);
1322         }
1323         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1324
1325         /* Unlock the lock, so double-buffering can take place inside vblank */
1326         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1327         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1328
1329         /* Return current update_pending status: */
1330         return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1331 }
1332
1333 /* get temperature in millidegrees */
1334 int evergreen_get_temp(struct radeon_device *rdev)
1335 {
1336         u32 temp, toffset;
1337         int actual_temp = 0;
1338
1339         if (rdev->family == CHIP_JUNIPER) {
1340                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1341                         TOFFSET_SHIFT;
1342                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1343                         TS0_ADC_DOUT_SHIFT;
1344
1345                 if (toffset & 0x100)
1346                         actual_temp = temp / 2 - (0x200 - toffset);
1347                 else
1348                         actual_temp = temp / 2 + toffset;
1349
1350                 actual_temp = actual_temp * 1000;
1351
1352         } else {
1353                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1354                         ASIC_T_SHIFT;
1355
1356                 if (temp & 0x400)
1357                         actual_temp = -256;
1358                 else if (temp & 0x200)
1359                         actual_temp = 255;
1360                 else if (temp & 0x100) {
1361                         actual_temp = temp & 0x1ff;
1362                         actual_temp |= ~0x1ff;
1363                 } else
1364                         actual_temp = temp & 0xff;
1365
1366                 actual_temp = (actual_temp * 1000) / 2;
1367         }
1368
1369         return actual_temp;
1370 }
1371
1372 int sumo_get_temp(struct radeon_device *rdev)
1373 {
1374         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1375         int actual_temp = temp - 49;
1376
1377         return actual_temp * 1000;
1378 }
1379
1380 /**
1381  * sumo_pm_init_profile - Initialize power profiles callback.
1382  *
1383  * @rdev: radeon_device pointer
1384  *
1385  * Initialize the power states used in profile mode
1386  * (sumo, trinity, SI).
1387  * Used for profile mode only.
1388  */
1389 void sumo_pm_init_profile(struct radeon_device *rdev)
1390 {
1391         int idx;
1392
1393         /* default */
1394         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1395         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1396         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1397         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1398
1399         /* low,mid sh/mh */
1400         if (rdev->flags & RADEON_IS_MOBILITY)
1401                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1402         else
1403                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1404
1405         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1406         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1407         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1408         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1409
1410         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1411         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1412         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1413         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1414
1415         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1416         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1417         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1418         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1419
1420         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1421         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1422         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1423         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1424
1425         /* high sh/mh */
1426         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1427         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1428         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1429         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1430         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1431                 rdev->pm.power_state[idx].num_clock_modes - 1;
1432
1433         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1434         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1435         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1436         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1437                 rdev->pm.power_state[idx].num_clock_modes - 1;
1438 }
1439
1440 /**
1441  * btc_pm_init_profile - Initialize power profiles callback.
1442  *
1443  * @rdev: radeon_device pointer
1444  *
1445  * Initialize the power states used in profile mode
1446  * (BTC, cayman).
1447  * Used for profile mode only.
1448  */
1449 void btc_pm_init_profile(struct radeon_device *rdev)
1450 {
1451         int idx;
1452
1453         /* default */
1454         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1455         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1456         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1457         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1458         /* starting with BTC, there is one state that is used for both
1459          * MH and SH.  Difference is that we always use the high clock index for
1460          * mclk.
1461          */
1462         if (rdev->flags & RADEON_IS_MOBILITY)
1463                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1464         else
1465                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1466         /* low sh */
1467         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1468         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1469         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1470         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1471         /* mid sh */
1472         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1473         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1474         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1475         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1476         /* high sh */
1477         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1478         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1479         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1480         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1481         /* low mh */
1482         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1483         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1484         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1485         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1486         /* mid mh */
1487         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1488         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1489         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1490         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1491         /* high mh */
1492         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1493         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1494         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1495         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1496 }
1497
1498 /**
1499  * evergreen_pm_misc - set additional pm hw parameters callback.
1500  *
1501  * @rdev: radeon_device pointer
1502  *
1503  * Set non-clock parameters associated with a power state
1504  * (voltage, etc.) (evergreen+).
1505  */
1506 void evergreen_pm_misc(struct radeon_device *rdev)
1507 {
1508         int req_ps_idx = rdev->pm.requested_power_state_index;
1509         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1510         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1511         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1512
1513         if (voltage->type == VOLTAGE_SW) {
1514                 /* 0xff0x are flags rather then an actual voltage */
1515                 if ((voltage->voltage & 0xff00) == 0xff00)
1516                         return;
1517                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1518                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1519                         rdev->pm.current_vddc = voltage->voltage;
1520                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1521                 }
1522
1523                 /* starting with BTC, there is one state that is used for both
1524                  * MH and SH.  Difference is that we always use the high clock index for
1525                  * mclk and vddci.
1526                  */
1527                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1528                     (rdev->family >= CHIP_BARTS) &&
1529                     rdev->pm.active_crtc_count &&
1530                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1531                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1532                         voltage = &rdev->pm.power_state[req_ps_idx].
1533                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1534
1535                 /* 0xff0x are flags rather then an actual voltage */
1536                 if ((voltage->vddci & 0xff00) == 0xff00)
1537                         return;
1538                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1539                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1540                         rdev->pm.current_vddci = voltage->vddci;
1541                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1542                 }
1543         }
1544 }
1545
1546 /**
1547  * evergreen_pm_prepare - pre-power state change callback.
1548  *
1549  * @rdev: radeon_device pointer
1550  *
1551  * Prepare for a power state change (evergreen+).
1552  */
1553 void evergreen_pm_prepare(struct radeon_device *rdev)
1554 {
1555         struct drm_device *ddev = rdev->ddev;
1556         struct drm_crtc *crtc;
1557         struct radeon_crtc *radeon_crtc;
1558         u32 tmp;
1559
1560         /* disable any active CRTCs */
1561         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1562                 radeon_crtc = to_radeon_crtc(crtc);
1563                 if (radeon_crtc->enabled) {
1564                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1565                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1566                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1567                 }
1568         }
1569 }
1570
1571 /**
1572  * evergreen_pm_finish - post-power state change callback.
1573  *
1574  * @rdev: radeon_device pointer
1575  *
1576  * Clean up after a power state change (evergreen+).
1577  */
1578 void evergreen_pm_finish(struct radeon_device *rdev)
1579 {
1580         struct drm_device *ddev = rdev->ddev;
1581         struct drm_crtc *crtc;
1582         struct radeon_crtc *radeon_crtc;
1583         u32 tmp;
1584
1585         /* enable any active CRTCs */
1586         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1587                 radeon_crtc = to_radeon_crtc(crtc);
1588                 if (radeon_crtc->enabled) {
1589                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1590                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1591                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1592                 }
1593         }
1594 }
1595
1596 /**
1597  * evergreen_hpd_sense - hpd sense callback.
1598  *
1599  * @rdev: radeon_device pointer
1600  * @hpd: hpd (hotplug detect) pin
1601  *
1602  * Checks if a digital monitor is connected (evergreen+).
1603  * Returns true if connected, false if not connected.
1604  */
1605 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1606 {
1607         bool connected = false;
1608
1609         switch (hpd) {
1610         case RADEON_HPD_1:
1611                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1612                         connected = true;
1613                 break;
1614         case RADEON_HPD_2:
1615                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1616                         connected = true;
1617                 break;
1618         case RADEON_HPD_3:
1619                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1620                         connected = true;
1621                 break;
1622         case RADEON_HPD_4:
1623                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1624                         connected = true;
1625                 break;
1626         case RADEON_HPD_5:
1627                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1628                         connected = true;
1629                 break;
1630         case RADEON_HPD_6:
1631                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1632                         connected = true;
1633                         break;
1634         default:
1635                 break;
1636         }
1637
1638         return connected;
1639 }
1640
1641 /**
1642  * evergreen_hpd_set_polarity - hpd set polarity callback.
1643  *
1644  * @rdev: radeon_device pointer
1645  * @hpd: hpd (hotplug detect) pin
1646  *
1647  * Set the polarity of the hpd pin (evergreen+).
1648  */
1649 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1650                                 enum radeon_hpd_id hpd)
1651 {
1652         u32 tmp;
1653         bool connected = evergreen_hpd_sense(rdev, hpd);
1654
1655         switch (hpd) {
1656         case RADEON_HPD_1:
1657                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1658                 if (connected)
1659                         tmp &= ~DC_HPDx_INT_POLARITY;
1660                 else
1661                         tmp |= DC_HPDx_INT_POLARITY;
1662                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1663                 break;
1664         case RADEON_HPD_2:
1665                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1666                 if (connected)
1667                         tmp &= ~DC_HPDx_INT_POLARITY;
1668                 else
1669                         tmp |= DC_HPDx_INT_POLARITY;
1670                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1671                 break;
1672         case RADEON_HPD_3:
1673                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1674                 if (connected)
1675                         tmp &= ~DC_HPDx_INT_POLARITY;
1676                 else
1677                         tmp |= DC_HPDx_INT_POLARITY;
1678                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1679                 break;
1680         case RADEON_HPD_4:
1681                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1682                 if (connected)
1683                         tmp &= ~DC_HPDx_INT_POLARITY;
1684                 else
1685                         tmp |= DC_HPDx_INT_POLARITY;
1686                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1687                 break;
1688         case RADEON_HPD_5:
1689                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1690                 if (connected)
1691                         tmp &= ~DC_HPDx_INT_POLARITY;
1692                 else
1693                         tmp |= DC_HPDx_INT_POLARITY;
1694                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1695                         break;
1696         case RADEON_HPD_6:
1697                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1698                 if (connected)
1699                         tmp &= ~DC_HPDx_INT_POLARITY;
1700                 else
1701                         tmp |= DC_HPDx_INT_POLARITY;
1702                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1703                 break;
1704         default:
1705                 break;
1706         }
1707 }
1708
1709 /**
1710  * evergreen_hpd_init - hpd setup callback.
1711  *
1712  * @rdev: radeon_device pointer
1713  *
1714  * Setup the hpd pins used by the card (evergreen+).
1715  * Enable the pin, set the polarity, and enable the hpd interrupts.
1716  */
1717 void evergreen_hpd_init(struct radeon_device *rdev)
1718 {
1719         struct drm_device *dev = rdev->ddev;
1720         struct drm_connector *connector;
1721         unsigned enabled = 0;
1722         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1723                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1724
1725         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1726                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1727
1728                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1729                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1730                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1731                          * aux dp channel on imac and help (but not completely fix)
1732                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1733                          * also avoid interrupt storms during dpms.
1734                          */
1735                         continue;
1736                 }
1737                 switch (radeon_connector->hpd.hpd) {
1738                 case RADEON_HPD_1:
1739                         WREG32(DC_HPD1_CONTROL, tmp);
1740                         break;
1741                 case RADEON_HPD_2:
1742                         WREG32(DC_HPD2_CONTROL, tmp);
1743                         break;
1744                 case RADEON_HPD_3:
1745                         WREG32(DC_HPD3_CONTROL, tmp);
1746                         break;
1747                 case RADEON_HPD_4:
1748                         WREG32(DC_HPD4_CONTROL, tmp);
1749                         break;
1750                 case RADEON_HPD_5:
1751                         WREG32(DC_HPD5_CONTROL, tmp);
1752                         break;
1753                 case RADEON_HPD_6:
1754                         WREG32(DC_HPD6_CONTROL, tmp);
1755                         break;
1756                 default:
1757                         break;
1758                 }
1759                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1760                 enabled |= 1 << radeon_connector->hpd.hpd;
1761         }
1762         radeon_irq_kms_enable_hpd(rdev, enabled);
1763 }
1764
1765 /**
1766  * evergreen_hpd_fini - hpd tear down callback.
1767  *
1768  * @rdev: radeon_device pointer
1769  *
1770  * Tear down the hpd pins used by the card (evergreen+).
1771  * Disable the hpd interrupts.
1772  */
1773 void evergreen_hpd_fini(struct radeon_device *rdev)
1774 {
1775         struct drm_device *dev = rdev->ddev;
1776         struct drm_connector *connector;
1777         unsigned disabled = 0;
1778
1779         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1780                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1781                 switch (radeon_connector->hpd.hpd) {
1782                 case RADEON_HPD_1:
1783                         WREG32(DC_HPD1_CONTROL, 0);
1784                         break;
1785                 case RADEON_HPD_2:
1786                         WREG32(DC_HPD2_CONTROL, 0);
1787                         break;
1788                 case RADEON_HPD_3:
1789                         WREG32(DC_HPD3_CONTROL, 0);
1790                         break;
1791                 case RADEON_HPD_4:
1792                         WREG32(DC_HPD4_CONTROL, 0);
1793                         break;
1794                 case RADEON_HPD_5:
1795                         WREG32(DC_HPD5_CONTROL, 0);
1796                         break;
1797                 case RADEON_HPD_6:
1798                         WREG32(DC_HPD6_CONTROL, 0);
1799                         break;
1800                 default:
1801                         break;
1802                 }
1803                 disabled |= 1 << radeon_connector->hpd.hpd;
1804         }
1805         radeon_irq_kms_disable_hpd(rdev, disabled);
1806 }
1807
1808 /* watermark setup */
1809
1810 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1811                                         struct radeon_crtc *radeon_crtc,
1812                                         struct drm_display_mode *mode,
1813                                         struct drm_display_mode *other_mode)
1814 {
1815         u32 tmp, buffer_alloc, i;
1816         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1817         /*
1818          * Line Buffer Setup
1819          * There are 3 line buffers, each one shared by 2 display controllers.
1820          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1821          * the display controllers.  The paritioning is done via one of four
1822          * preset allocations specified in bits 2:0:
1823          * first display controller
1824          *  0 - first half of lb (3840 * 2)
1825          *  1 - first 3/4 of lb (5760 * 2)
1826          *  2 - whole lb (7680 * 2), other crtc must be disabled
1827          *  3 - first 1/4 of lb (1920 * 2)
1828          * second display controller
1829          *  4 - second half of lb (3840 * 2)
1830          *  5 - second 3/4 of lb (5760 * 2)
1831          *  6 - whole lb (7680 * 2), other crtc must be disabled
1832          *  7 - last 1/4 of lb (1920 * 2)
1833          */
1834         /* this can get tricky if we have two large displays on a paired group
1835          * of crtcs.  Ideally for multiple large displays we'd assign them to
1836          * non-linked crtcs for maximum line buffer allocation.
1837          */
1838         if (radeon_crtc->base.enabled && mode) {
1839                 if (other_mode) {
1840                         tmp = 0; /* 1/2 */
1841                         buffer_alloc = 1;
1842                 } else {
1843                         tmp = 2; /* whole */
1844                         buffer_alloc = 2;
1845                 }
1846         } else {
1847                 tmp = 0;
1848                 buffer_alloc = 0;
1849         }
1850
1851         /* second controller of the pair uses second half of the lb */
1852         if (radeon_crtc->crtc_id % 2)
1853                 tmp += 4;
1854         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1855
1856         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1857                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1858                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1859                 for (i = 0; i < rdev->usec_timeout; i++) {
1860                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1861                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1862                                 break;
1863                         udelay(1);
1864                 }
1865         }
1866
1867         if (radeon_crtc->base.enabled && mode) {
1868                 switch (tmp) {
1869                 case 0:
1870                 case 4:
1871                 default:
1872                         if (ASIC_IS_DCE5(rdev))
1873                                 return 4096 * 2;
1874                         else
1875                                 return 3840 * 2;
1876                 case 1:
1877                 case 5:
1878                         if (ASIC_IS_DCE5(rdev))
1879                                 return 6144 * 2;
1880                         else
1881                                 return 5760 * 2;
1882                 case 2:
1883                 case 6:
1884                         if (ASIC_IS_DCE5(rdev))
1885                                 return 8192 * 2;
1886                         else
1887                                 return 7680 * 2;
1888                 case 3:
1889                 case 7:
1890                         if (ASIC_IS_DCE5(rdev))
1891                                 return 2048 * 2;
1892                         else
1893                                 return 1920 * 2;
1894                 }
1895         }
1896
1897         /* controller not enabled, so no lb used */
1898         return 0;
1899 }
1900
1901 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1902 {
1903         u32 tmp = RREG32(MC_SHARED_CHMAP);
1904
1905         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1906         case 0:
1907         default:
1908                 return 1;
1909         case 1:
1910                 return 2;
1911         case 2:
1912                 return 4;
1913         case 3:
1914                 return 8;
1915         }
1916 }
1917
1918 struct evergreen_wm_params {
1919         u32 dram_channels; /* number of dram channels */
1920         u32 yclk;          /* bandwidth per dram data pin in kHz */
1921         u32 sclk;          /* engine clock in kHz */
1922         u32 disp_clk;      /* display clock in kHz */
1923         u32 src_width;     /* viewport width */
1924         u32 active_time;   /* active display time in ns */
1925         u32 blank_time;    /* blank time in ns */
1926         bool interlaced;    /* mode is interlaced */
1927         fixed20_12 vsc;    /* vertical scale ratio */
1928         u32 num_heads;     /* number of active crtcs */
1929         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1930         u32 lb_size;       /* line buffer allocated to pipe */
1931         u32 vtaps;         /* vertical scaler taps */
1932 };
1933
1934 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1935 {
1936         /* Calculate DRAM Bandwidth and the part allocated to display. */
1937         fixed20_12 dram_efficiency; /* 0.7 */
1938         fixed20_12 yclk, dram_channels, bandwidth;
1939         fixed20_12 a;
1940
1941         a.full = dfixed_const(1000);
1942         yclk.full = dfixed_const(wm->yclk);
1943         yclk.full = dfixed_div(yclk, a);
1944         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1945         a.full = dfixed_const(10);
1946         dram_efficiency.full = dfixed_const(7);
1947         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1948         bandwidth.full = dfixed_mul(dram_channels, yclk);
1949         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1950
1951         return dfixed_trunc(bandwidth);
1952 }
1953
1954 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1955 {
1956         /* Calculate DRAM Bandwidth and the part allocated to display. */
1957         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1958         fixed20_12 yclk, dram_channels, bandwidth;
1959         fixed20_12 a;
1960
1961         a.full = dfixed_const(1000);
1962         yclk.full = dfixed_const(wm->yclk);
1963         yclk.full = dfixed_div(yclk, a);
1964         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1965         a.full = dfixed_const(10);
1966         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1967         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1968         bandwidth.full = dfixed_mul(dram_channels, yclk);
1969         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1970
1971         return dfixed_trunc(bandwidth);
1972 }
1973
1974 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1975 {
1976         /* Calculate the display Data return Bandwidth */
1977         fixed20_12 return_efficiency; /* 0.8 */
1978         fixed20_12 sclk, bandwidth;
1979         fixed20_12 a;
1980
1981         a.full = dfixed_const(1000);
1982         sclk.full = dfixed_const(wm->sclk);
1983         sclk.full = dfixed_div(sclk, a);
1984         a.full = dfixed_const(10);
1985         return_efficiency.full = dfixed_const(8);
1986         return_efficiency.full = dfixed_div(return_efficiency, a);
1987         a.full = dfixed_const(32);
1988         bandwidth.full = dfixed_mul(a, sclk);
1989         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1990
1991         return dfixed_trunc(bandwidth);
1992 }
1993
1994 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1995 {
1996         /* Calculate the DMIF Request Bandwidth */
1997         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1998         fixed20_12 disp_clk, bandwidth;
1999         fixed20_12 a;
2000
2001         a.full = dfixed_const(1000);
2002         disp_clk.full = dfixed_const(wm->disp_clk);
2003         disp_clk.full = dfixed_div(disp_clk, a);
2004         a.full = dfixed_const(10);
2005         disp_clk_request_efficiency.full = dfixed_const(8);
2006         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2007         a.full = dfixed_const(32);
2008         bandwidth.full = dfixed_mul(a, disp_clk);
2009         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2010
2011         return dfixed_trunc(bandwidth);
2012 }
2013
2014 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2015 {
2016         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2017         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2018         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2019         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2020
2021         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2022 }
2023
2024 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2025 {
2026         /* Calculate the display mode Average Bandwidth
2027          * DisplayMode should contain the source and destination dimensions,
2028          * timing, etc.
2029          */
2030         fixed20_12 bpp;
2031         fixed20_12 line_time;
2032         fixed20_12 src_width;
2033         fixed20_12 bandwidth;
2034         fixed20_12 a;
2035
2036         a.full = dfixed_const(1000);
2037         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2038         line_time.full = dfixed_div(line_time, a);
2039         bpp.full = dfixed_const(wm->bytes_per_pixel);
2040         src_width.full = dfixed_const(wm->src_width);
2041         bandwidth.full = dfixed_mul(src_width, bpp);
2042         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2043         bandwidth.full = dfixed_div(bandwidth, line_time);
2044
2045         return dfixed_trunc(bandwidth);
2046 }
2047
2048 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2049 {
2050         /* First calcualte the latency in ns */
2051         u32 mc_latency = 2000; /* 2000 ns. */
2052         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2053         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2054         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2055         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2056         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2057                 (wm->num_heads * cursor_line_pair_return_time);
2058         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2059         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2060         fixed20_12 a, b, c;
2061
2062         if (wm->num_heads == 0)
2063                 return 0;
2064
2065         a.full = dfixed_const(2);
2066         b.full = dfixed_const(1);
2067         if ((wm->vsc.full > a.full) ||
2068             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2069             (wm->vtaps >= 5) ||
2070             ((wm->vsc.full >= a.full) && wm->interlaced))
2071                 max_src_lines_per_dst_line = 4;
2072         else
2073                 max_src_lines_per_dst_line = 2;
2074
2075         a.full = dfixed_const(available_bandwidth);
2076         b.full = dfixed_const(wm->num_heads);
2077         a.full = dfixed_div(a, b);
2078
2079         b.full = dfixed_const(1000);
2080         c.full = dfixed_const(wm->disp_clk);
2081         b.full = dfixed_div(c, b);
2082         c.full = dfixed_const(wm->bytes_per_pixel);
2083         b.full = dfixed_mul(b, c);
2084
2085         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2086
2087         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2088         b.full = dfixed_const(1000);
2089         c.full = dfixed_const(lb_fill_bw);
2090         b.full = dfixed_div(c, b);
2091         a.full = dfixed_div(a, b);
2092         line_fill_time = dfixed_trunc(a);
2093
2094         if (line_fill_time < wm->active_time)
2095                 return latency;
2096         else
2097                 return latency + (line_fill_time - wm->active_time);
2098
2099 }
2100
2101 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2102 {
2103         if (evergreen_average_bandwidth(wm) <=
2104             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2105                 return true;
2106         else
2107                 return false;
2108 };
2109
2110 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2111 {
2112         if (evergreen_average_bandwidth(wm) <=
2113             (evergreen_available_bandwidth(wm) / wm->num_heads))
2114                 return true;
2115         else
2116                 return false;
2117 };
2118
2119 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2120 {
2121         u32 lb_partitions = wm->lb_size / wm->src_width;
2122         u32 line_time = wm->active_time + wm->blank_time;
2123         u32 latency_tolerant_lines;
2124         u32 latency_hiding;
2125         fixed20_12 a;
2126
2127         a.full = dfixed_const(1);
2128         if (wm->vsc.full > a.full)
2129                 latency_tolerant_lines = 1;
2130         else {
2131                 if (lb_partitions <= (wm->vtaps + 1))
2132                         latency_tolerant_lines = 1;
2133                 else
2134                         latency_tolerant_lines = 2;
2135         }
2136
2137         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2138
2139         if (evergreen_latency_watermark(wm) <= latency_hiding)
2140                 return true;
2141         else
2142                 return false;
2143 }
2144
2145 static void evergreen_program_watermarks(struct radeon_device *rdev,
2146                                          struct radeon_crtc *radeon_crtc,
2147                                          u32 lb_size, u32 num_heads)
2148 {
2149         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2150         struct evergreen_wm_params wm_low, wm_high;
2151         u32 dram_channels;
2152         u32 pixel_period;
2153         u32 line_time = 0;
2154         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2155         u32 priority_a_mark = 0, priority_b_mark = 0;
2156         u32 priority_a_cnt = PRIORITY_OFF;
2157         u32 priority_b_cnt = PRIORITY_OFF;
2158         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2159         u32 tmp, arb_control3;
2160         fixed20_12 a, b, c;
2161
2162         if (radeon_crtc->base.enabled && num_heads && mode) {
2163                 pixel_period = 1000000 / (u32)mode->clock;
2164                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2165                 priority_a_cnt = 0;
2166                 priority_b_cnt = 0;
2167                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2168
2169                 /* watermark for high clocks */
2170                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2171                         wm_high.yclk =
2172                                 radeon_dpm_get_mclk(rdev, false) * 10;
2173                         wm_high.sclk =
2174                                 radeon_dpm_get_sclk(rdev, false) * 10;
2175                 } else {
2176                         wm_high.yclk = rdev->pm.current_mclk * 10;
2177                         wm_high.sclk = rdev->pm.current_sclk * 10;
2178                 }
2179
2180                 wm_high.disp_clk = mode->clock;
2181                 wm_high.src_width = mode->crtc_hdisplay;
2182                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2183                 wm_high.blank_time = line_time - wm_high.active_time;
2184                 wm_high.interlaced = false;
2185                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2186                         wm_high.interlaced = true;
2187                 wm_high.vsc = radeon_crtc->vsc;
2188                 wm_high.vtaps = 1;
2189                 if (radeon_crtc->rmx_type != RMX_OFF)
2190                         wm_high.vtaps = 2;
2191                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2192                 wm_high.lb_size = lb_size;
2193                 wm_high.dram_channels = dram_channels;
2194                 wm_high.num_heads = num_heads;
2195
2196                 /* watermark for low clocks */
2197                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2198                         wm_low.yclk =
2199                                 radeon_dpm_get_mclk(rdev, true) * 10;
2200                         wm_low.sclk =
2201                                 radeon_dpm_get_sclk(rdev, true) * 10;
2202                 } else {
2203                         wm_low.yclk = rdev->pm.current_mclk * 10;
2204                         wm_low.sclk = rdev->pm.current_sclk * 10;
2205                 }
2206
2207                 wm_low.disp_clk = mode->clock;
2208                 wm_low.src_width = mode->crtc_hdisplay;
2209                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2210                 wm_low.blank_time = line_time - wm_low.active_time;
2211                 wm_low.interlaced = false;
2212                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2213                         wm_low.interlaced = true;
2214                 wm_low.vsc = radeon_crtc->vsc;
2215                 wm_low.vtaps = 1;
2216                 if (radeon_crtc->rmx_type != RMX_OFF)
2217                         wm_low.vtaps = 2;
2218                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2219                 wm_low.lb_size = lb_size;
2220                 wm_low.dram_channels = dram_channels;
2221                 wm_low.num_heads = num_heads;
2222
2223                 /* set for high clocks */
2224                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2225                 /* set for low clocks */
2226                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2227
2228                 /* possibly force display priority to high */
2229                 /* should really do this at mode validation time... */
2230                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2231                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2232                     !evergreen_check_latency_hiding(&wm_high) ||
2233                     (rdev->disp_priority == 2)) {
2234                         DRM_DEBUG_KMS("force priority a to high\n");
2235                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2236                 }
2237                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2238                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2239                     !evergreen_check_latency_hiding(&wm_low) ||
2240                     (rdev->disp_priority == 2)) {
2241                         DRM_DEBUG_KMS("force priority b to high\n");
2242                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2243                 }
2244
2245                 a.full = dfixed_const(1000);
2246                 b.full = dfixed_const(mode->clock);
2247                 b.full = dfixed_div(b, a);
2248                 c.full = dfixed_const(latency_watermark_a);
2249                 c.full = dfixed_mul(c, b);
2250                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2251                 c.full = dfixed_div(c, a);
2252                 a.full = dfixed_const(16);
2253                 c.full = dfixed_div(c, a);
2254                 priority_a_mark = dfixed_trunc(c);
2255                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2256
2257                 a.full = dfixed_const(1000);
2258                 b.full = dfixed_const(mode->clock);
2259                 b.full = dfixed_div(b, a);
2260                 c.full = dfixed_const(latency_watermark_b);
2261                 c.full = dfixed_mul(c, b);
2262                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2263                 c.full = dfixed_div(c, a);
2264                 a.full = dfixed_const(16);
2265                 c.full = dfixed_div(c, a);
2266                 priority_b_mark = dfixed_trunc(c);
2267                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2268         }
2269
2270         /* select wm A */
2271         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2272         tmp = arb_control3;
2273         tmp &= ~LATENCY_WATERMARK_MASK(3);
2274         tmp |= LATENCY_WATERMARK_MASK(1);
2275         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2276         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2277                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2278                 LATENCY_HIGH_WATERMARK(line_time)));
2279         /* select wm B */
2280         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2281         tmp &= ~LATENCY_WATERMARK_MASK(3);
2282         tmp |= LATENCY_WATERMARK_MASK(2);
2283         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2284         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2285                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2286                 LATENCY_HIGH_WATERMARK(line_time)));
2287         /* restore original selection */
2288         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2289
2290         /* write the priority marks */
2291         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2292         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2293
2294         /* save values for DPM */
2295         radeon_crtc->line_time = line_time;
2296         radeon_crtc->wm_high = latency_watermark_a;
2297         radeon_crtc->wm_low = latency_watermark_b;
2298 }
2299
2300 /**
2301  * evergreen_bandwidth_update - update display watermarks callback.
2302  *
2303  * @rdev: radeon_device pointer
2304  *
2305  * Update the display watermarks based on the requested mode(s)
2306  * (evergreen+).
2307  */
2308 void evergreen_bandwidth_update(struct radeon_device *rdev)
2309 {
2310         struct drm_display_mode *mode0 = NULL;
2311         struct drm_display_mode *mode1 = NULL;
2312         u32 num_heads = 0, lb_size;
2313         int i;
2314
2315         radeon_update_display_priority(rdev);
2316
2317         for (i = 0; i < rdev->num_crtc; i++) {
2318                 if (rdev->mode_info.crtcs[i]->base.enabled)
2319                         num_heads++;
2320         }
2321         for (i = 0; i < rdev->num_crtc; i += 2) {
2322                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2323                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2324                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2325                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2326                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2327                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2328         }
2329 }
2330
2331 /**
2332  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2333  *
2334  * @rdev: radeon_device pointer
2335  *
2336  * Wait for the MC (memory controller) to be idle.
2337  * (evergreen+).
2338  * Returns 0 if the MC is idle, -1 if not.
2339  */
2340 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2341 {
2342         unsigned i;
2343         u32 tmp;
2344
2345         for (i = 0; i < rdev->usec_timeout; i++) {
2346                 /* read MC_STATUS */
2347                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2348                 if (!tmp)
2349                         return 0;
2350                 udelay(1);
2351         }
2352         return -1;
2353 }
2354
2355 /*
2356  * GART
2357  */
2358 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2359 {
2360         unsigned i;
2361         u32 tmp;
2362
2363         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2364
2365         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2366         for (i = 0; i < rdev->usec_timeout; i++) {
2367                 /* read MC_STATUS */
2368                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2369                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2370                 if (tmp == 2) {
2371                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2372                         return;
2373                 }
2374                 if (tmp) {
2375                         return;
2376                 }
2377                 udelay(1);
2378         }
2379 }
2380
2381 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2382 {
2383         u32 tmp;
2384         int r;
2385
2386         if (rdev->gart.robj == NULL) {
2387                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2388                 return -EINVAL;
2389         }
2390         r = radeon_gart_table_vram_pin(rdev);
2391         if (r)
2392                 return r;
2393         radeon_gart_restore(rdev);
2394         /* Setup L2 cache */
2395         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2396                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2397                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2398         WREG32(VM_L2_CNTL2, 0);
2399         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2400         /* Setup TLB control */
2401         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2402                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2403                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2404                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2405         if (rdev->flags & RADEON_IS_IGP) {
2406                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2407                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2408                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2409         } else {
2410                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2411                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2412                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2413                 if ((rdev->family == CHIP_JUNIPER) ||
2414                     (rdev->family == CHIP_CYPRESS) ||
2415                     (rdev->family == CHIP_HEMLOCK) ||
2416                     (rdev->family == CHIP_BARTS))
2417                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2418         }
2419         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2420         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2421         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2422         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2423         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2424         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2425         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2426         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2427                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2428         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2429                         (u32)(rdev->dummy_page.addr >> 12));
2430         WREG32(VM_CONTEXT1_CNTL, 0);
2431
2432         evergreen_pcie_gart_tlb_flush(rdev);
2433         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2434                  (unsigned)(rdev->mc.gtt_size >> 20),
2435                  (unsigned long long)rdev->gart.table_addr);
2436         rdev->gart.ready = true;
2437         return 0;
2438 }
2439
2440 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2441 {
2442         u32 tmp;
2443
2444         /* Disable all tables */
2445         WREG32(VM_CONTEXT0_CNTL, 0);
2446         WREG32(VM_CONTEXT1_CNTL, 0);
2447
2448         /* Setup L2 cache */
2449         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2450                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2451         WREG32(VM_L2_CNTL2, 0);
2452         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2453         /* Setup TLB control */
2454         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2455         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2456         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2457         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2458         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2459         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2460         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2461         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2462         radeon_gart_table_vram_unpin(rdev);
2463 }
2464
2465 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2466 {
2467         evergreen_pcie_gart_disable(rdev);
2468         radeon_gart_table_vram_free(rdev);
2469         radeon_gart_fini(rdev);
2470 }
2471
2472
2473 static void evergreen_agp_enable(struct radeon_device *rdev)
2474 {
2475         u32 tmp;
2476
2477         /* Setup L2 cache */
2478         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2479                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2480                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2481         WREG32(VM_L2_CNTL2, 0);
2482         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2483         /* Setup TLB control */
2484         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2485                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2486                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2487                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2488         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2489         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2490         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2491         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2492         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2493         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2494         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2495         WREG32(VM_CONTEXT0_CNTL, 0);
2496         WREG32(VM_CONTEXT1_CNTL, 0);
2497 }
2498
2499 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2500 {
2501         u32 crtc_enabled, tmp, frame_count, blackout;
2502         int i, j;
2503
2504         if (!ASIC_IS_NODCE(rdev)) {
2505                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2506                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2507
2508                 /* disable VGA render */
2509                 WREG32(VGA_RENDER_CONTROL, 0);
2510         }
2511         /* blank the display controllers */
2512         for (i = 0; i < rdev->num_crtc; i++) {
2513                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2514                 if (crtc_enabled) {
2515                         save->crtc_enabled[i] = true;
2516                         if (ASIC_IS_DCE6(rdev)) {
2517                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2518                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2519                                         radeon_wait_for_vblank(rdev, i);
2520                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2521                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2522                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2523                                 }
2524                         } else {
2525                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2526                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2527                                         radeon_wait_for_vblank(rdev, i);
2528                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2529                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2530                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2531                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2532                                 }
2533                         }
2534                         /* wait for the next frame */
2535                         frame_count = radeon_get_vblank_counter(rdev, i);
2536                         for (j = 0; j < rdev->usec_timeout; j++) {
2537                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2538                                         break;
2539                                 udelay(1);
2540                         }
2541
2542                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2543                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2544                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2545                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2546                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2547                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2548                         save->crtc_enabled[i] = false;
2549                         /* ***** */
2550                 } else {
2551                         save->crtc_enabled[i] = false;
2552                 }
2553         }
2554
2555         radeon_mc_wait_for_idle(rdev);
2556
2557         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2558         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2559                 /* Block CPU access */
2560                 WREG32(BIF_FB_EN, 0);
2561                 /* blackout the MC */
2562                 blackout &= ~BLACKOUT_MODE_MASK;
2563                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2564         }
2565         /* wait for the MC to settle */
2566         udelay(100);
2567
2568         /* lock double buffered regs */
2569         for (i = 0; i < rdev->num_crtc; i++) {
2570                 if (save->crtc_enabled[i]) {
2571                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2572                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2573                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2574                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2575                         }
2576                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2577                         if (!(tmp & 1)) {
2578                                 tmp |= 1;
2579                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2580                         }
2581                 }
2582         }
2583 }
2584
2585 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2586 {
2587         u32 tmp, frame_count;
2588         int i, j;
2589
2590         /* update crtc base addresses */
2591         for (i = 0; i < rdev->num_crtc; i++) {
2592                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2593                        upper_32_bits(rdev->mc.vram_start));
2594                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2595                        upper_32_bits(rdev->mc.vram_start));
2596                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2597                        (u32)rdev->mc.vram_start);
2598                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2599                        (u32)rdev->mc.vram_start);
2600         }
2601
2602         if (!ASIC_IS_NODCE(rdev)) {
2603                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2604                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2605         }
2606
2607         /* unlock regs and wait for update */
2608         for (i = 0; i < rdev->num_crtc; i++) {
2609                 if (save->crtc_enabled[i]) {
2610                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2611                         if ((tmp & 0x3) != 0) {
2612                                 tmp &= ~0x3;
2613                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2614                         }
2615                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2616                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2617                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2618                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2619                         }
2620                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2621                         if (tmp & 1) {
2622                                 tmp &= ~1;
2623                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2624                         }
2625                         for (j = 0; j < rdev->usec_timeout; j++) {
2626                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2627                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2628                                         break;
2629                                 udelay(1);
2630                         }
2631                 }
2632         }
2633
2634         /* unblackout the MC */
2635         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2636         tmp &= ~BLACKOUT_MODE_MASK;
2637         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2638         /* allow CPU access */
2639         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2640
2641         for (i = 0; i < rdev->num_crtc; i++) {
2642                 if (save->crtc_enabled[i]) {
2643                         if (ASIC_IS_DCE6(rdev)) {
2644                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2645                                 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2646                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2647                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2648                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2649                         } else {
2650                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2651                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2652                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2653                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2654                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2655                         }
2656                         /* wait for the next frame */
2657                         frame_count = radeon_get_vblank_counter(rdev, i);
2658                         for (j = 0; j < rdev->usec_timeout; j++) {
2659                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2660                                         break;
2661                                 udelay(1);
2662                         }
2663                 }
2664         }
2665         if (!ASIC_IS_NODCE(rdev)) {
2666                 /* Unlock vga access */
2667                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2668                 mdelay(1);
2669                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2670         }
2671 }
2672
2673 void evergreen_mc_program(struct radeon_device *rdev)
2674 {
2675         struct evergreen_mc_save save;
2676         u32 tmp;
2677         int i, j;
2678
2679         /* Initialize HDP */
2680         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2681                 WREG32((0x2c14 + j), 0x00000000);
2682                 WREG32((0x2c18 + j), 0x00000000);
2683                 WREG32((0x2c1c + j), 0x00000000);
2684                 WREG32((0x2c20 + j), 0x00000000);
2685                 WREG32((0x2c24 + j), 0x00000000);
2686         }
2687         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2688
2689         evergreen_mc_stop(rdev, &save);
2690         if (evergreen_mc_wait_for_idle(rdev)) {
2691                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2692         }
2693         /* Lockout access through VGA aperture*/
2694         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2695         /* Update configuration */
2696         if (rdev->flags & RADEON_IS_AGP) {
2697                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2698                         /* VRAM before AGP */
2699                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2700                                 rdev->mc.vram_start >> 12);
2701                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2702                                 rdev->mc.gtt_end >> 12);
2703                 } else {
2704                         /* VRAM after AGP */
2705                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2706                                 rdev->mc.gtt_start >> 12);
2707                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2708                                 rdev->mc.vram_end >> 12);
2709                 }
2710         } else {
2711                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2712                         rdev->mc.vram_start >> 12);
2713                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2714                         rdev->mc.vram_end >> 12);
2715         }
2716         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2717         /* llano/ontario only */
2718         if ((rdev->family == CHIP_PALM) ||
2719             (rdev->family == CHIP_SUMO) ||
2720             (rdev->family == CHIP_SUMO2)) {
2721                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2722                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2723                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2724                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2725         }
2726         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2727         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2728         WREG32(MC_VM_FB_LOCATION, tmp);
2729         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2730         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2731         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2732         if (rdev->flags & RADEON_IS_AGP) {
2733                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2734                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2735                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2736         } else {
2737                 WREG32(MC_VM_AGP_BASE, 0);
2738                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2739                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2740         }
2741         if (evergreen_mc_wait_for_idle(rdev)) {
2742                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2743         }
2744         evergreen_mc_resume(rdev, &save);
2745         /* we need to own VRAM, so turn off the VGA renderer here
2746          * to stop it overwriting our objects */
2747         rv515_vga_render_disable(rdev);
2748 }
2749
2750 /*
2751  * CP.
2752  */
2753 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2754 {
2755         struct radeon_ring *ring = &rdev->ring[ib->ring];
2756         u32 next_rptr;
2757
2758         /* set to DX10/11 mode */
2759         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2760         radeon_ring_write(ring, 1);
2761
2762         if (ring->rptr_save_reg) {
2763                 next_rptr = ring->wptr + 3 + 4;
2764                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2765                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2766                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2767                 radeon_ring_write(ring, next_rptr);
2768         } else if (rdev->wb.enabled) {
2769                 next_rptr = ring->wptr + 5 + 4;
2770                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2771                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2772                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2773                 radeon_ring_write(ring, next_rptr);
2774                 radeon_ring_write(ring, 0);
2775         }
2776
2777         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2778         radeon_ring_write(ring,
2779 #ifdef __BIG_ENDIAN
2780                           (2 << 0) |
2781 #endif
2782                           (ib->gpu_addr & 0xFFFFFFFC));
2783         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2784         radeon_ring_write(ring, ib->length_dw);
2785 }
2786
2787
2788 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2789 {
2790         const __be32 *fw_data;
2791         int i;
2792
2793         if (!rdev->me_fw || !rdev->pfp_fw)
2794                 return -EINVAL;
2795
2796         r700_cp_stop(rdev);
2797         WREG32(CP_RB_CNTL,
2798 #ifdef __BIG_ENDIAN
2799                BUF_SWAP_32BIT |
2800 #endif
2801                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2802
2803         fw_data = (const __be32 *)rdev->pfp_fw->data;
2804         WREG32(CP_PFP_UCODE_ADDR, 0);
2805         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2806                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2807         WREG32(CP_PFP_UCODE_ADDR, 0);
2808
2809         fw_data = (const __be32 *)rdev->me_fw->data;
2810         WREG32(CP_ME_RAM_WADDR, 0);
2811         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2812                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2813
2814         WREG32(CP_PFP_UCODE_ADDR, 0);
2815         WREG32(CP_ME_RAM_WADDR, 0);
2816         WREG32(CP_ME_RAM_RADDR, 0);
2817         return 0;
2818 }
2819
2820 static int evergreen_cp_start(struct radeon_device *rdev)
2821 {
2822         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2823         int r, i;
2824         uint32_t cp_me;
2825
2826         r = radeon_ring_lock(rdev, ring, 7);
2827         if (r) {
2828                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2829                 return r;
2830         }
2831         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2832         radeon_ring_write(ring, 0x1);
2833         radeon_ring_write(ring, 0x0);
2834         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2835         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2836         radeon_ring_write(ring, 0);
2837         radeon_ring_write(ring, 0);
2838         radeon_ring_unlock_commit(rdev, ring);
2839
2840         cp_me = 0xff;
2841         WREG32(CP_ME_CNTL, cp_me);
2842
2843         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2844         if (r) {
2845                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2846                 return r;
2847         }
2848
2849         /* setup clear context state */
2850         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2851         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2852
2853         for (i = 0; i < evergreen_default_size; i++)
2854                 radeon_ring_write(ring, evergreen_default_state[i]);
2855
2856         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2857         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2858
2859         /* set clear context state */
2860         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2861         radeon_ring_write(ring, 0);
2862
2863         /* SQ_VTX_BASE_VTX_LOC */
2864         radeon_ring_write(ring, 0xc0026f00);
2865         radeon_ring_write(ring, 0x00000000);
2866         radeon_ring_write(ring, 0x00000000);
2867         radeon_ring_write(ring, 0x00000000);
2868
2869         /* Clear consts */
2870         radeon_ring_write(ring, 0xc0036f00);
2871         radeon_ring_write(ring, 0x00000bc4);
2872         radeon_ring_write(ring, 0xffffffff);
2873         radeon_ring_write(ring, 0xffffffff);
2874         radeon_ring_write(ring, 0xffffffff);
2875
2876         radeon_ring_write(ring, 0xc0026900);
2877         radeon_ring_write(ring, 0x00000316);
2878         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2879         radeon_ring_write(ring, 0x00000010); /*  */
2880
2881         radeon_ring_unlock_commit(rdev, ring);
2882
2883         return 0;
2884 }
2885
2886 static int evergreen_cp_resume(struct radeon_device *rdev)
2887 {
2888         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2889         u32 tmp;
2890         u32 rb_bufsz;
2891         int r;
2892
2893         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2894         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2895                                  SOFT_RESET_PA |
2896                                  SOFT_RESET_SH |
2897                                  SOFT_RESET_VGT |
2898                                  SOFT_RESET_SPI |
2899                                  SOFT_RESET_SX));
2900         RREG32(GRBM_SOFT_RESET);
2901         mdelay(15);
2902         WREG32(GRBM_SOFT_RESET, 0);
2903         RREG32(GRBM_SOFT_RESET);
2904
2905         /* Set ring buffer size */
2906         rb_bufsz = order_base_2(ring->ring_size / 8);
2907         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2908 #ifdef __BIG_ENDIAN
2909         tmp |= BUF_SWAP_32BIT;
2910 #endif
2911         WREG32(CP_RB_CNTL, tmp);
2912         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2913         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2914
2915         /* Set the write pointer delay */
2916         WREG32(CP_RB_WPTR_DELAY, 0);
2917
2918         /* Initialize the ring buffer's read and write pointers */
2919         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2920         WREG32(CP_RB_RPTR_WR, 0);
2921         ring->wptr = 0;
2922         WREG32(CP_RB_WPTR, ring->wptr);
2923
2924         /* set the wb address whether it's enabled or not */
2925         WREG32(CP_RB_RPTR_ADDR,
2926                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2927         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2928         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2929
2930         if (rdev->wb.enabled)
2931                 WREG32(SCRATCH_UMSK, 0xff);
2932         else {
2933                 tmp |= RB_NO_UPDATE;
2934                 WREG32(SCRATCH_UMSK, 0);
2935         }
2936
2937         mdelay(1);
2938         WREG32(CP_RB_CNTL, tmp);
2939
2940         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2941         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2942
2943         ring->rptr = RREG32(CP_RB_RPTR);
2944
2945         evergreen_cp_start(rdev);
2946         ring->ready = true;
2947         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2948         if (r) {
2949                 ring->ready = false;
2950                 return r;
2951         }
2952         return 0;
2953 }
2954
2955 /*
2956  * Core functions
2957  */
2958 static void evergreen_gpu_init(struct radeon_device *rdev)
2959 {
2960         u32 gb_addr_config;
2961         u32 mc_shared_chmap, mc_arb_ramcfg;
2962         u32 sx_debug_1;
2963         u32 smx_dc_ctl0;
2964         u32 sq_config;
2965         u32 sq_lds_resource_mgmt;
2966         u32 sq_gpr_resource_mgmt_1;
2967         u32 sq_gpr_resource_mgmt_2;
2968         u32 sq_gpr_resource_mgmt_3;
2969         u32 sq_thread_resource_mgmt;
2970         u32 sq_thread_resource_mgmt_2;
2971         u32 sq_stack_resource_mgmt_1;
2972         u32 sq_stack_resource_mgmt_2;
2973         u32 sq_stack_resource_mgmt_3;
2974         u32 vgt_cache_invalidation;
2975         u32 hdp_host_path_cntl, tmp;
2976         u32 disabled_rb_mask;
2977         int i, j, num_shader_engines, ps_thread_count;
2978
2979         switch (rdev->family) {
2980         case CHIP_CYPRESS:
2981         case CHIP_HEMLOCK:
2982                 rdev->config.evergreen.num_ses = 2;
2983                 rdev->config.evergreen.max_pipes = 4;
2984                 rdev->config.evergreen.max_tile_pipes = 8;
2985                 rdev->config.evergreen.max_simds = 10;
2986                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2987                 rdev->config.evergreen.max_gprs = 256;
2988                 rdev->config.evergreen.max_threads = 248;
2989                 rdev->config.evergreen.max_gs_threads = 32;
2990                 rdev->config.evergreen.max_stack_entries = 512;
2991                 rdev->config.evergreen.sx_num_of_sets = 4;
2992                 rdev->config.evergreen.sx_max_export_size = 256;
2993                 rdev->config.evergreen.sx_max_export_pos_size = 64;
2994                 rdev->config.evergreen.sx_max_export_smx_size = 192;
2995                 rdev->config.evergreen.max_hw_contexts = 8;
2996                 rdev->config.evergreen.sq_num_cf_insts = 2;
2997
2998                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2999                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3000                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3001                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3002                 break;
3003         case CHIP_JUNIPER:
3004                 rdev->config.evergreen.num_ses = 1;
3005                 rdev->config.evergreen.max_pipes = 4;
3006                 rdev->config.evergreen.max_tile_pipes = 4;
3007                 rdev->config.evergreen.max_simds = 10;
3008                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3009                 rdev->config.evergreen.max_gprs = 256;
3010                 rdev->config.evergreen.max_threads = 248;
3011                 rdev->config.evergreen.max_gs_threads = 32;
3012                 rdev->config.evergreen.max_stack_entries = 512;
3013                 rdev->config.evergreen.sx_num_of_sets = 4;
3014                 rdev->config.evergreen.sx_max_export_size = 256;
3015                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3016                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3017                 rdev->config.evergreen.max_hw_contexts = 8;
3018                 rdev->config.evergreen.sq_num_cf_insts = 2;
3019
3020                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3021                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3022                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3023                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3024                 break;
3025         case CHIP_REDWOOD:
3026                 rdev->config.evergreen.num_ses = 1;
3027                 rdev->config.evergreen.max_pipes = 4;
3028                 rdev->config.evergreen.max_tile_pipes = 4;
3029                 rdev->config.evergreen.max_simds = 5;
3030                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3031                 rdev->config.evergreen.max_gprs = 256;
3032                 rdev->config.evergreen.max_threads = 248;
3033                 rdev->config.evergreen.max_gs_threads = 32;
3034                 rdev->config.evergreen.max_stack_entries = 256;
3035                 rdev->config.evergreen.sx_num_of_sets = 4;
3036                 rdev->config.evergreen.sx_max_export_size = 256;
3037                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3038                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3039                 rdev->config.evergreen.max_hw_contexts = 8;
3040                 rdev->config.evergreen.sq_num_cf_insts = 2;
3041
3042                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3043                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3044                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3045                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3046                 break;
3047         case CHIP_CEDAR:
3048         default:
3049                 rdev->config.evergreen.num_ses = 1;
3050                 rdev->config.evergreen.max_pipes = 2;
3051                 rdev->config.evergreen.max_tile_pipes = 2;
3052                 rdev->config.evergreen.max_simds = 2;
3053                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3054                 rdev->config.evergreen.max_gprs = 256;
3055                 rdev->config.evergreen.max_threads = 192;
3056                 rdev->config.evergreen.max_gs_threads = 16;
3057                 rdev->config.evergreen.max_stack_entries = 256;
3058                 rdev->config.evergreen.sx_num_of_sets = 4;
3059                 rdev->config.evergreen.sx_max_export_size = 128;
3060                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3061                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3062                 rdev->config.evergreen.max_hw_contexts = 4;
3063                 rdev->config.evergreen.sq_num_cf_insts = 1;
3064
3065                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3066                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3067                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3068                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3069                 break;
3070         case CHIP_PALM:
3071                 rdev->config.evergreen.num_ses = 1;
3072                 rdev->config.evergreen.max_pipes = 2;
3073                 rdev->config.evergreen.max_tile_pipes = 2;
3074                 rdev->config.evergreen.max_simds = 2;
3075                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3076                 rdev->config.evergreen.max_gprs = 256;
3077                 rdev->config.evergreen.max_threads = 192;
3078                 rdev->config.evergreen.max_gs_threads = 16;
3079                 rdev->config.evergreen.max_stack_entries = 256;
3080                 rdev->config.evergreen.sx_num_of_sets = 4;
3081                 rdev->config.evergreen.sx_max_export_size = 128;
3082                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3083                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3084                 rdev->config.evergreen.max_hw_contexts = 4;
3085                 rdev->config.evergreen.sq_num_cf_insts = 1;
3086
3087                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3088                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3089                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3090                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3091                 break;
3092         case CHIP_SUMO:
3093                 rdev->config.evergreen.num_ses = 1;
3094                 rdev->config.evergreen.max_pipes = 4;
3095                 rdev->config.evergreen.max_tile_pipes = 4;
3096                 if (rdev->pdev->device == 0x9648)
3097                         rdev->config.evergreen.max_simds = 3;
3098                 else if ((rdev->pdev->device == 0x9647) ||
3099                          (rdev->pdev->device == 0x964a))
3100                         rdev->config.evergreen.max_simds = 4;
3101                 else
3102                         rdev->config.evergreen.max_simds = 5;
3103                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3104                 rdev->config.evergreen.max_gprs = 256;
3105                 rdev->config.evergreen.max_threads = 248;
3106                 rdev->config.evergreen.max_gs_threads = 32;
3107                 rdev->config.evergreen.max_stack_entries = 256;
3108                 rdev->config.evergreen.sx_num_of_sets = 4;
3109                 rdev->config.evergreen.sx_max_export_size = 256;
3110                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3111                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3112                 rdev->config.evergreen.max_hw_contexts = 8;
3113                 rdev->config.evergreen.sq_num_cf_insts = 2;
3114
3115                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3116                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3117                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3118                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3119                 break;
3120         case CHIP_SUMO2:
3121                 rdev->config.evergreen.num_ses = 1;
3122                 rdev->config.evergreen.max_pipes = 4;
3123                 rdev->config.evergreen.max_tile_pipes = 4;
3124                 rdev->config.evergreen.max_simds = 2;
3125                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3126                 rdev->config.evergreen.max_gprs = 256;
3127                 rdev->config.evergreen.max_threads = 248;
3128                 rdev->config.evergreen.max_gs_threads = 32;
3129                 rdev->config.evergreen.max_stack_entries = 512;
3130                 rdev->config.evergreen.sx_num_of_sets = 4;
3131                 rdev->config.evergreen.sx_max_export_size = 256;
3132                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3133                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3134                 rdev->config.evergreen.max_hw_contexts = 4;
3135                 rdev->config.evergreen.sq_num_cf_insts = 2;
3136
3137                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3138                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3139                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3140                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3141                 break;
3142         case CHIP_BARTS:
3143                 rdev->config.evergreen.num_ses = 2;
3144                 rdev->config.evergreen.max_pipes = 4;
3145                 rdev->config.evergreen.max_tile_pipes = 8;
3146                 rdev->config.evergreen.max_simds = 7;
3147                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3148                 rdev->config.evergreen.max_gprs = 256;
3149                 rdev->config.evergreen.max_threads = 248;
3150                 rdev->config.evergreen.max_gs_threads = 32;
3151                 rdev->config.evergreen.max_stack_entries = 512;
3152                 rdev->config.evergreen.sx_num_of_sets = 4;
3153                 rdev->config.evergreen.sx_max_export_size = 256;
3154                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3155                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3156                 rdev->config.evergreen.max_hw_contexts = 8;
3157                 rdev->config.evergreen.sq_num_cf_insts = 2;
3158
3159                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3160                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3161                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3162                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3163                 break;
3164         case CHIP_TURKS:
3165                 rdev->config.evergreen.num_ses = 1;
3166                 rdev->config.evergreen.max_pipes = 4;
3167                 rdev->config.evergreen.max_tile_pipes = 4;
3168                 rdev->config.evergreen.max_simds = 6;
3169                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3170                 rdev->config.evergreen.max_gprs = 256;
3171                 rdev->config.evergreen.max_threads = 248;
3172                 rdev->config.evergreen.max_gs_threads = 32;
3173                 rdev->config.evergreen.max_stack_entries = 256;
3174                 rdev->config.evergreen.sx_num_of_sets = 4;
3175                 rdev->config.evergreen.sx_max_export_size = 256;
3176                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3177                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3178                 rdev->config.evergreen.max_hw_contexts = 8;
3179                 rdev->config.evergreen.sq_num_cf_insts = 2;
3180
3181                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3182                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3183                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3184                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3185                 break;
3186         case CHIP_CAICOS:
3187                 rdev->config.evergreen.num_ses = 1;
3188                 rdev->config.evergreen.max_pipes = 2;
3189                 rdev->config.evergreen.max_tile_pipes = 2;
3190                 rdev->config.evergreen.max_simds = 2;
3191                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3192                 rdev->config.evergreen.max_gprs = 256;
3193                 rdev->config.evergreen.max_threads = 192;
3194                 rdev->config.evergreen.max_gs_threads = 16;
3195                 rdev->config.evergreen.max_stack_entries = 256;
3196                 rdev->config.evergreen.sx_num_of_sets = 4;
3197                 rdev->config.evergreen.sx_max_export_size = 128;
3198                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3199                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3200                 rdev->config.evergreen.max_hw_contexts = 4;
3201                 rdev->config.evergreen.sq_num_cf_insts = 1;
3202
3203                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3204                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3205                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3206                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3207                 break;
3208         }
3209
3210         /* Initialize HDP */
3211         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3212                 WREG32((0x2c14 + j), 0x00000000);
3213                 WREG32((0x2c18 + j), 0x00000000);
3214                 WREG32((0x2c1c + j), 0x00000000);
3215                 WREG32((0x2c20 + j), 0x00000000);
3216                 WREG32((0x2c24 + j), 0x00000000);
3217         }
3218
3219         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3220
3221         evergreen_fix_pci_max_read_req_size(rdev);
3222
3223         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3224         if ((rdev->family == CHIP_PALM) ||
3225             (rdev->family == CHIP_SUMO) ||
3226             (rdev->family == CHIP_SUMO2))
3227                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3228         else
3229                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3230
3231         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3232          * not have bank info, so create a custom tiling dword.
3233          * bits 3:0   num_pipes
3234          * bits 7:4   num_banks
3235          * bits 11:8  group_size
3236          * bits 15:12 row_size
3237          */
3238         rdev->config.evergreen.tile_config = 0;
3239         switch (rdev->config.evergreen.max_tile_pipes) {
3240         case 1:
3241         default:
3242                 rdev->config.evergreen.tile_config |= (0 << 0);
3243                 break;
3244         case 2:
3245                 rdev->config.evergreen.tile_config |= (1 << 0);
3246                 break;
3247         case 4:
3248                 rdev->config.evergreen.tile_config |= (2 << 0);
3249                 break;
3250         case 8:
3251                 rdev->config.evergreen.tile_config |= (3 << 0);
3252                 break;
3253         }
3254         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3255         if (rdev->flags & RADEON_IS_IGP)
3256                 rdev->config.evergreen.tile_config |= 1 << 4;
3257         else {
3258                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3259                 case 0: /* four banks */
3260                         rdev->config.evergreen.tile_config |= 0 << 4;
3261                         break;
3262                 case 1: /* eight banks */
3263                         rdev->config.evergreen.tile_config |= 1 << 4;
3264                         break;
3265                 case 2: /* sixteen banks */
3266                 default:
3267                         rdev->config.evergreen.tile_config |= 2 << 4;
3268                         break;
3269                 }
3270         }
3271         rdev->config.evergreen.tile_config |= 0 << 8;
3272         rdev->config.evergreen.tile_config |=
3273                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3274
3275         num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3276
3277         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3278                 u32 efuse_straps_4;
3279                 u32 efuse_straps_3;
3280
3281                 efuse_straps_4 = RREG32_RCU(0x204);
3282                 efuse_straps_3 = RREG32_RCU(0x203);
3283                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3284                       ((efuse_straps_3 & 0xf0000000) >> 28));
3285         } else {
3286                 tmp = 0;
3287                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3288                         u32 rb_disable_bitmap;
3289
3290                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3291                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3292                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3293                         tmp <<= 4;
3294                         tmp |= rb_disable_bitmap;
3295                 }
3296         }
3297         /* enabled rb are just the one not disabled :) */
3298         disabled_rb_mask = tmp;
3299         tmp = 0;
3300         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3301                 tmp |= (1 << i);
3302         /* if all the backends are disabled, fix it up here */
3303         if ((disabled_rb_mask & tmp) == tmp) {
3304                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3305                         disabled_rb_mask &= ~(1 << i);
3306         }
3307
3308         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3309         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3310
3311         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3312         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3313         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3314         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3315         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3316         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3317         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3318
3319         if ((rdev->config.evergreen.max_backends == 1) &&
3320             (rdev->flags & RADEON_IS_IGP)) {
3321                 if ((disabled_rb_mask & 3) == 1) {
3322                         /* RB0 disabled, RB1 enabled */
3323                         tmp = 0x11111111;
3324                 } else {
3325                         /* RB1 disabled, RB0 enabled */
3326                         tmp = 0x00000000;
3327                 }
3328         } else {
3329                 tmp = gb_addr_config & NUM_PIPES_MASK;
3330                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3331                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3332         }
3333         WREG32(GB_BACKEND_MAP, tmp);
3334
3335         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3336         WREG32(CGTS_TCC_DISABLE, 0);
3337         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3338         WREG32(CGTS_USER_TCC_DISABLE, 0);
3339
3340         /* set HW defaults for 3D engine */
3341         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3342                                      ROQ_IB2_START(0x2b)));
3343
3344         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3345
3346         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3347                              SYNC_GRADIENT |
3348                              SYNC_WALKER |
3349                              SYNC_ALIGNER));
3350
3351         sx_debug_1 = RREG32(SX_DEBUG_1);
3352         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3353         WREG32(SX_DEBUG_1, sx_debug_1);
3354
3355
3356         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3357         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3358         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3359         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3360
3361         if (rdev->family <= CHIP_SUMO2)
3362                 WREG32(SMX_SAR_CTL0, 0x00010000);
3363
3364         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3365                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3366                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3367
3368         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3369                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3370                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3371
3372         WREG32(VGT_NUM_INSTANCES, 1);
3373         WREG32(SPI_CONFIG_CNTL, 0);
3374         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3375         WREG32(CP_PERFMON_CNTL, 0);
3376
3377         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3378                                   FETCH_FIFO_HIWATER(0x4) |
3379                                   DONE_FIFO_HIWATER(0xe0) |
3380                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3381
3382         sq_config = RREG32(SQ_CONFIG);
3383         sq_config &= ~(PS_PRIO(3) |
3384                        VS_PRIO(3) |
3385                        GS_PRIO(3) |
3386                        ES_PRIO(3));
3387         sq_config |= (VC_ENABLE |
3388                       EXPORT_SRC_C |
3389                       PS_PRIO(0) |
3390                       VS_PRIO(1) |
3391                       GS_PRIO(2) |
3392                       ES_PRIO(3));
3393
3394         switch (rdev->family) {
3395         case CHIP_CEDAR:
3396         case CHIP_PALM:
3397         case CHIP_SUMO:
3398         case CHIP_SUMO2:
3399         case CHIP_CAICOS:
3400                 /* no vertex cache */
3401                 sq_config &= ~VC_ENABLE;
3402                 break;
3403         default:
3404                 break;
3405         }
3406
3407         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3408
3409         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3410         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3411         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3412         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3413         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3414         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3415         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3416
3417         switch (rdev->family) {
3418         case CHIP_CEDAR:
3419         case CHIP_PALM:
3420         case CHIP_SUMO:
3421         case CHIP_SUMO2:
3422                 ps_thread_count = 96;
3423                 break;
3424         default:
3425                 ps_thread_count = 128;
3426                 break;
3427         }
3428
3429         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3430         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3431         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3432         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3433         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3434         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3435
3436         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3437         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3438         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3439         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3440         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3441         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3442
3443         WREG32(SQ_CONFIG, sq_config);
3444         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3445         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3446         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3447         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3448         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3449         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3450         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3451         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3452         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3453         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3454
3455         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3456                                           FORCE_EOV_MAX_REZ_CNT(255)));
3457
3458         switch (rdev->family) {
3459         case CHIP_CEDAR:
3460         case CHIP_PALM:
3461         case CHIP_SUMO:
3462         case CHIP_SUMO2:
3463         case CHIP_CAICOS:
3464                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3465                 break;
3466         default:
3467                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3468                 break;
3469         }
3470         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3471         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3472
3473         WREG32(VGT_GS_VERTEX_REUSE, 16);
3474         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3475         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3476
3477         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3478         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3479
3480         WREG32(CB_PERF_CTR0_SEL_0, 0);
3481         WREG32(CB_PERF_CTR0_SEL_1, 0);
3482         WREG32(CB_PERF_CTR1_SEL_0, 0);
3483         WREG32(CB_PERF_CTR1_SEL_1, 0);
3484         WREG32(CB_PERF_CTR2_SEL_0, 0);
3485         WREG32(CB_PERF_CTR2_SEL_1, 0);
3486         WREG32(CB_PERF_CTR3_SEL_0, 0);
3487         WREG32(CB_PERF_CTR3_SEL_1, 0);
3488
3489         /* clear render buffer base addresses */
3490         WREG32(CB_COLOR0_BASE, 0);
3491         WREG32(CB_COLOR1_BASE, 0);
3492         WREG32(CB_COLOR2_BASE, 0);
3493         WREG32(CB_COLOR3_BASE, 0);
3494         WREG32(CB_COLOR4_BASE, 0);
3495         WREG32(CB_COLOR5_BASE, 0);
3496         WREG32(CB_COLOR6_BASE, 0);
3497         WREG32(CB_COLOR7_BASE, 0);
3498         WREG32(CB_COLOR8_BASE, 0);
3499         WREG32(CB_COLOR9_BASE, 0);
3500         WREG32(CB_COLOR10_BASE, 0);
3501         WREG32(CB_COLOR11_BASE, 0);
3502
3503         /* set the shader const cache sizes to 0 */
3504         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3505                 WREG32(i, 0);
3506         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3507                 WREG32(i, 0);
3508
3509         tmp = RREG32(HDP_MISC_CNTL);
3510         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3511         WREG32(HDP_MISC_CNTL, tmp);
3512
3513         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3514         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3515
3516         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3517
3518         udelay(50);
3519
3520 }
3521
3522 int evergreen_mc_init(struct radeon_device *rdev)
3523 {
3524         u32 tmp;
3525         int chansize, numchan;
3526
3527         /* Get VRAM informations */
3528         rdev->mc.vram_is_ddr = true;
3529         if ((rdev->family == CHIP_PALM) ||
3530             (rdev->family == CHIP_SUMO) ||
3531             (rdev->family == CHIP_SUMO2))
3532                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3533         else
3534                 tmp = RREG32(MC_ARB_RAMCFG);
3535         if (tmp & CHANSIZE_OVERRIDE) {
3536                 chansize = 16;
3537         } else if (tmp & CHANSIZE_MASK) {
3538                 chansize = 64;
3539         } else {
3540                 chansize = 32;
3541         }
3542         tmp = RREG32(MC_SHARED_CHMAP);
3543         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3544         case 0:
3545         default:
3546                 numchan = 1;
3547                 break;
3548         case 1:
3549                 numchan = 2;
3550                 break;
3551         case 2:
3552                 numchan = 4;
3553                 break;
3554         case 3:
3555                 numchan = 8;
3556                 break;
3557         }
3558         rdev->mc.vram_width = numchan * chansize;
3559         /* Could aper size report 0 ? */
3560         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3561         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3562         /* Setup GPU memory space */
3563         if ((rdev->family == CHIP_PALM) ||
3564             (rdev->family == CHIP_SUMO) ||
3565             (rdev->family == CHIP_SUMO2)) {
3566                 /* size in bytes on fusion */
3567                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3568                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3569         } else {
3570                 /* size in MB on evergreen/cayman/tn */
3571                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3572                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3573         }
3574         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3575         r700_vram_gtt_location(rdev, &rdev->mc);
3576         radeon_update_bandwidth_info(rdev);
3577
3578         return 0;
3579 }
3580
3581 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3582 {
3583         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3584                 RREG32(GRBM_STATUS));
3585         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3586                 RREG32(GRBM_STATUS_SE0));
3587         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3588                 RREG32(GRBM_STATUS_SE1));
3589         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3590                 RREG32(SRBM_STATUS));
3591         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3592                 RREG32(SRBM_STATUS2));
3593         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3594                 RREG32(CP_STALLED_STAT1));
3595         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3596                 RREG32(CP_STALLED_STAT2));
3597         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3598                 RREG32(CP_BUSY_STAT));
3599         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3600                 RREG32(CP_STAT));
3601         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3602                 RREG32(DMA_STATUS_REG));
3603         if (rdev->family >= CHIP_CAYMAN) {
3604                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3605                          RREG32(DMA_STATUS_REG + 0x800));
3606         }
3607 }
3608
3609 bool evergreen_is_display_hung(struct radeon_device *rdev)
3610 {
3611         u32 crtc_hung = 0;
3612         u32 crtc_status[6];
3613         u32 i, j, tmp;
3614
3615         for (i = 0; i < rdev->num_crtc; i++) {
3616                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3617                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3618                         crtc_hung |= (1 << i);
3619                 }
3620         }
3621
3622         for (j = 0; j < 10; j++) {
3623                 for (i = 0; i < rdev->num_crtc; i++) {
3624                         if (crtc_hung & (1 << i)) {
3625                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3626                                 if (tmp != crtc_status[i])
3627                                         crtc_hung &= ~(1 << i);
3628                         }
3629                 }
3630                 if (crtc_hung == 0)
3631                         return false;
3632                 udelay(100);
3633         }
3634
3635         return true;
3636 }
3637
3638 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3639 {
3640         u32 reset_mask = 0;
3641         u32 tmp;
3642
3643         /* GRBM_STATUS */
3644         tmp = RREG32(GRBM_STATUS);
3645         if (tmp & (PA_BUSY | SC_BUSY |
3646                    SH_BUSY | SX_BUSY |
3647                    TA_BUSY | VGT_BUSY |
3648                    DB_BUSY | CB_BUSY |
3649                    SPI_BUSY | VGT_BUSY_NO_DMA))
3650                 reset_mask |= RADEON_RESET_GFX;
3651
3652         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3653                    CP_BUSY | CP_COHERENCY_BUSY))
3654                 reset_mask |= RADEON_RESET_CP;
3655
3656         if (tmp & GRBM_EE_BUSY)
3657                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3658
3659         /* DMA_STATUS_REG */
3660         tmp = RREG32(DMA_STATUS_REG);
3661         if (!(tmp & DMA_IDLE))
3662                 reset_mask |= RADEON_RESET_DMA;
3663
3664         /* SRBM_STATUS2 */
3665         tmp = RREG32(SRBM_STATUS2);
3666         if (tmp & DMA_BUSY)
3667                 reset_mask |= RADEON_RESET_DMA;
3668
3669         /* SRBM_STATUS */
3670         tmp = RREG32(SRBM_STATUS);
3671         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3672                 reset_mask |= RADEON_RESET_RLC;
3673
3674         if (tmp & IH_BUSY)
3675                 reset_mask |= RADEON_RESET_IH;
3676
3677         if (tmp & SEM_BUSY)
3678                 reset_mask |= RADEON_RESET_SEM;
3679
3680         if (tmp & GRBM_RQ_PENDING)
3681                 reset_mask |= RADEON_RESET_GRBM;
3682
3683         if (tmp & VMC_BUSY)
3684                 reset_mask |= RADEON_RESET_VMC;
3685
3686         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3687                    MCC_BUSY | MCD_BUSY))
3688                 reset_mask |= RADEON_RESET_MC;
3689
3690         if (evergreen_is_display_hung(rdev))
3691                 reset_mask |= RADEON_RESET_DISPLAY;
3692
3693         /* VM_L2_STATUS */
3694         tmp = RREG32(VM_L2_STATUS);
3695         if (tmp & L2_BUSY)
3696                 reset_mask |= RADEON_RESET_VMC;
3697
3698         /* Skip MC reset as it's mostly likely not hung, just busy */
3699         if (reset_mask & RADEON_RESET_MC) {
3700                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3701                 reset_mask &= ~RADEON_RESET_MC;
3702         }
3703
3704         return reset_mask;
3705 }
3706
3707 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3708 {
3709         struct evergreen_mc_save save;
3710         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3711         u32 tmp;
3712
3713         if (reset_mask == 0)
3714                 return;
3715
3716         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3717
3718         evergreen_print_gpu_status_regs(rdev);
3719
3720         /* Disable CP parsing/prefetching */
3721         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3722
3723         if (reset_mask & RADEON_RESET_DMA) {
3724                 /* Disable DMA */
3725                 tmp = RREG32(DMA_RB_CNTL);
3726                 tmp &= ~DMA_RB_ENABLE;
3727                 WREG32(DMA_RB_CNTL, tmp);
3728         }
3729
3730         udelay(50);
3731
3732         evergreen_mc_stop(rdev, &save);
3733         if (evergreen_mc_wait_for_idle(rdev)) {
3734                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3735         }
3736
3737         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3738                 grbm_soft_reset |= SOFT_RESET_DB |
3739                         SOFT_RESET_CB |
3740                         SOFT_RESET_PA |
3741                         SOFT_RESET_SC |
3742                         SOFT_RESET_SPI |
3743                         SOFT_RESET_SX |
3744                         SOFT_RESET_SH |
3745                         SOFT_RESET_TC |
3746                         SOFT_RESET_TA |
3747                         SOFT_RESET_VC |
3748                         SOFT_RESET_VGT;
3749         }
3750
3751         if (reset_mask & RADEON_RESET_CP) {
3752                 grbm_soft_reset |= SOFT_RESET_CP |
3753                         SOFT_RESET_VGT;
3754
3755                 srbm_soft_reset |= SOFT_RESET_GRBM;
3756         }
3757
3758         if (reset_mask & RADEON_RESET_DMA)
3759                 srbm_soft_reset |= SOFT_RESET_DMA;
3760
3761         if (reset_mask & RADEON_RESET_DISPLAY)
3762                 srbm_soft_reset |= SOFT_RESET_DC;
3763
3764         if (reset_mask & RADEON_RESET_RLC)
3765                 srbm_soft_reset |= SOFT_RESET_RLC;
3766
3767         if (reset_mask & RADEON_RESET_SEM)
3768                 srbm_soft_reset |= SOFT_RESET_SEM;
3769
3770         if (reset_mask & RADEON_RESET_IH)
3771                 srbm_soft_reset |= SOFT_RESET_IH;
3772
3773         if (reset_mask & RADEON_RESET_GRBM)
3774                 srbm_soft_reset |= SOFT_RESET_GRBM;
3775
3776         if (reset_mask & RADEON_RESET_VMC)
3777                 srbm_soft_reset |= SOFT_RESET_VMC;
3778
3779         if (!(rdev->flags & RADEON_IS_IGP)) {
3780                 if (reset_mask & RADEON_RESET_MC)
3781                         srbm_soft_reset |= SOFT_RESET_MC;
3782         }
3783
3784         if (grbm_soft_reset) {
3785                 tmp = RREG32(GRBM_SOFT_RESET);
3786                 tmp |= grbm_soft_reset;
3787                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3788                 WREG32(GRBM_SOFT_RESET, tmp);
3789                 tmp = RREG32(GRBM_SOFT_RESET);
3790
3791                 udelay(50);
3792
3793                 tmp &= ~grbm_soft_reset;
3794                 WREG32(GRBM_SOFT_RESET, tmp);
3795                 tmp = RREG32(GRBM_SOFT_RESET);
3796         }
3797
3798         if (srbm_soft_reset) {
3799                 tmp = RREG32(SRBM_SOFT_RESET);
3800                 tmp |= srbm_soft_reset;
3801                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3802                 WREG32(SRBM_SOFT_RESET, tmp);
3803                 tmp = RREG32(SRBM_SOFT_RESET);
3804
3805                 udelay(50);
3806
3807                 tmp &= ~srbm_soft_reset;
3808                 WREG32(SRBM_SOFT_RESET, tmp);
3809                 tmp = RREG32(SRBM_SOFT_RESET);
3810         }
3811
3812         /* Wait a little for things to settle down */
3813         udelay(50);
3814
3815         evergreen_mc_resume(rdev, &save);
3816         udelay(50);
3817
3818         evergreen_print_gpu_status_regs(rdev);
3819 }
3820
3821 int evergreen_asic_reset(struct radeon_device *rdev)
3822 {
3823         u32 reset_mask;
3824
3825         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3826
3827         if (reset_mask)
3828                 r600_set_bios_scratch_engine_hung(rdev, true);
3829
3830         evergreen_gpu_soft_reset(rdev, reset_mask);
3831
3832         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3833
3834         if (!reset_mask)
3835                 r600_set_bios_scratch_engine_hung(rdev, false);
3836
3837         return 0;
3838 }
3839
3840 /**
3841  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3842  *
3843  * @rdev: radeon_device pointer
3844  * @ring: radeon_ring structure holding ring information
3845  *
3846  * Check if the GFX engine is locked up.
3847  * Returns true if the engine appears to be locked up, false if not.
3848  */
3849 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3850 {
3851         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3852
3853         if (!(reset_mask & (RADEON_RESET_GFX |
3854                             RADEON_RESET_COMPUTE |
3855                             RADEON_RESET_CP))) {
3856                 radeon_ring_lockup_update(ring);
3857                 return false;
3858         }
3859         /* force CP activities */
3860         radeon_ring_force_activity(rdev, ring);
3861         return radeon_ring_test_lockup(rdev, ring);
3862 }
3863
3864 /*
3865  * RLC
3866  */
3867 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3868 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3869
3870 void sumo_rlc_fini(struct radeon_device *rdev)
3871 {
3872         int r;
3873
3874         /* save restore block */
3875         if (rdev->rlc.save_restore_obj) {
3876                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3877                 if (unlikely(r != 0))
3878                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3879                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3880                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3881
3882                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3883                 rdev->rlc.save_restore_obj = NULL;
3884         }
3885
3886         /* clear state block */
3887         if (rdev->rlc.clear_state_obj) {
3888                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3889                 if (unlikely(r != 0))
3890                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3891                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3892                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3893
3894                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3895                 rdev->rlc.clear_state_obj = NULL;
3896         }
3897
3898         /* clear state block */
3899         if (rdev->rlc.cp_table_obj) {
3900                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3901                 if (unlikely(r != 0))
3902                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3903                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3904                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
3905
3906                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
3907                 rdev->rlc.cp_table_obj = NULL;
3908         }
3909 }
3910
3911 #define CP_ME_TABLE_SIZE    96
3912
3913 int sumo_rlc_init(struct radeon_device *rdev)
3914 {
3915         const u32 *src_ptr;
3916         volatile u32 *dst_ptr;
3917         u32 dws, data, i, j, k, reg_num;
3918         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
3919         u64 reg_list_mc_addr;
3920         const struct cs_section_def *cs_data;
3921         int r;
3922
3923         src_ptr = rdev->rlc.reg_list;
3924         dws = rdev->rlc.reg_list_size;
3925         if (rdev->family >= CHIP_BONAIRE) {
3926                 dws += (5 * 16) + 48 + 48 + 64;
3927         }
3928         cs_data = rdev->rlc.cs_data;
3929
3930         if (src_ptr) {
3931                 /* save restore block */
3932                 if (rdev->rlc.save_restore_obj == NULL) {
3933                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
3934                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
3935                         if (r) {
3936                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
3937                                 return r;
3938                         }
3939                 }
3940
3941                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3942                 if (unlikely(r != 0)) {
3943                         sumo_rlc_fini(rdev);
3944                         return r;
3945                 }
3946                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
3947                                   &rdev->rlc.save_restore_gpu_addr);
3948                 if (r) {
3949                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3950                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
3951                         sumo_rlc_fini(rdev);
3952                         return r;
3953                 }
3954
3955                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
3956                 if (r) {
3957                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
3958                         sumo_rlc_fini(rdev);
3959                         return r;
3960                 }
3961                 /* write the sr buffer */
3962                 dst_ptr = rdev->rlc.sr_ptr;
3963                 if (rdev->family >= CHIP_TAHITI) {
3964                         /* SI */
3965                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
3966                                 dst_ptr[i] = src_ptr[i];
3967                 } else {
3968                         /* ON/LN/TN */
3969                         /* format:
3970                          * dw0: (reg2 << 16) | reg1
3971                          * dw1: reg1 save space
3972                          * dw2: reg2 save space
3973                          */
3974                         for (i = 0; i < dws; i++) {
3975                                 data = src_ptr[i] >> 2;
3976                                 i++;
3977                                 if (i < dws)
3978                                         data |= (src_ptr[i] >> 2) << 16;
3979                                 j = (((i - 1) * 3) / 2);
3980                                 dst_ptr[j] = data;
3981                         }
3982                         j = ((i * 3) / 2);
3983                         dst_ptr[j] = RLC_SAVE_RESTORE_LIST_END_MARKER;
3984                 }
3985                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
3986                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3987         }
3988
3989         if (cs_data) {
3990                 /* clear state block */
3991                 if (rdev->family >= CHIP_BONAIRE) {
3992                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
3993                 } else if (rdev->family >= CHIP_TAHITI) {
3994                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
3995                         dws = rdev->rlc.clear_state_size + (256 / 4);
3996                 } else {
3997                         reg_list_num = 0;
3998                         dws = 0;
3999                         for (i = 0; cs_data[i].section != NULL; i++) {
4000                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4001                                         reg_list_num++;
4002                                         dws += cs_data[i].section[j].reg_count;
4003                                 }
4004                         }
4005                         reg_list_blk_index = (3 * reg_list_num + 2);
4006                         dws += reg_list_blk_index;
4007                         rdev->rlc.clear_state_size = dws;
4008                 }
4009
4010                 if (rdev->rlc.clear_state_obj == NULL) {
4011                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4012                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
4013                         if (r) {
4014                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4015                                 sumo_rlc_fini(rdev);
4016                                 return r;
4017                         }
4018                 }
4019                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4020                 if (unlikely(r != 0)) {
4021                         sumo_rlc_fini(rdev);
4022                         return r;
4023                 }
4024                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4025                                   &rdev->rlc.clear_state_gpu_addr);
4026                 if (r) {
4027                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4028                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4029                         sumo_rlc_fini(rdev);
4030                         return r;
4031                 }
4032
4033                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4034                 if (r) {
4035                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4036                         sumo_rlc_fini(rdev);
4037                         return r;
4038                 }
4039                 /* set up the cs buffer */
4040                 dst_ptr = rdev->rlc.cs_ptr;
4041                 if (rdev->family >= CHIP_BONAIRE) {
4042                         cik_get_csb_buffer(rdev, dst_ptr);
4043                 } else if (rdev->family >= CHIP_TAHITI) {
4044                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4045                         dst_ptr[0] = upper_32_bits(reg_list_mc_addr);
4046                         dst_ptr[1] = lower_32_bits(reg_list_mc_addr);
4047                         dst_ptr[2] = rdev->rlc.clear_state_size;
4048                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4049                 } else {
4050                         reg_list_hdr_blk_index = 0;
4051                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4052                         data = upper_32_bits(reg_list_mc_addr);
4053                         dst_ptr[reg_list_hdr_blk_index] = data;
4054                         reg_list_hdr_blk_index++;
4055                         for (i = 0; cs_data[i].section != NULL; i++) {
4056                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4057                                         reg_num = cs_data[i].section[j].reg_count;
4058                                         data = reg_list_mc_addr & 0xffffffff;
4059                                         dst_ptr[reg_list_hdr_blk_index] = data;
4060                                         reg_list_hdr_blk_index++;
4061
4062                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4063                                         dst_ptr[reg_list_hdr_blk_index] = data;
4064                                         reg_list_hdr_blk_index++;
4065
4066                                         data = 0x08000000 | (reg_num * 4);
4067                                         dst_ptr[reg_list_hdr_blk_index] = data;
4068                                         reg_list_hdr_blk_index++;
4069
4070                                         for (k = 0; k < reg_num; k++) {
4071                                                 data = cs_data[i].section[j].extent[k];
4072                                                 dst_ptr[reg_list_blk_index + k] = data;
4073                                         }
4074                                         reg_list_mc_addr += reg_num * 4;
4075                                         reg_list_blk_index += reg_num;
4076                                 }
4077                         }
4078                         dst_ptr[reg_list_hdr_blk_index] = RLC_CLEAR_STATE_END_MARKER;
4079                 }
4080                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4081                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4082         }
4083
4084         if (rdev->rlc.cp_table_size) {
4085                 if (rdev->rlc.cp_table_obj == NULL) {
4086                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4087                                              RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4088                         if (r) {
4089                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4090                                 sumo_rlc_fini(rdev);
4091                                 return r;
4092                         }
4093                 }
4094
4095                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4096                 if (unlikely(r != 0)) {
4097                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4098                         sumo_rlc_fini(rdev);
4099                         return r;
4100                 }
4101                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4102                                   &rdev->rlc.cp_table_gpu_addr);
4103                 if (r) {
4104                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4105                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4106                         sumo_rlc_fini(rdev);
4107                         return r;
4108                 }
4109                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4110                 if (r) {
4111                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4112                         sumo_rlc_fini(rdev);
4113                         return r;
4114                 }
4115
4116                 cik_init_cp_pg_table(rdev);
4117
4118                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4119                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4120
4121         }
4122
4123         return 0;
4124 }
4125
4126 static void evergreen_rlc_start(struct radeon_device *rdev)
4127 {
4128         u32 mask = RLC_ENABLE;
4129
4130         if (rdev->flags & RADEON_IS_IGP) {
4131                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4132         }
4133
4134         WREG32(RLC_CNTL, mask);
4135 }
4136
4137 int evergreen_rlc_resume(struct radeon_device *rdev)
4138 {
4139         u32 i;
4140         const __be32 *fw_data;
4141
4142         if (!rdev->rlc_fw)
4143                 return -EINVAL;
4144
4145         r600_rlc_stop(rdev);
4146
4147         WREG32(RLC_HB_CNTL, 0);
4148
4149         if (rdev->flags & RADEON_IS_IGP) {
4150                 if (rdev->family == CHIP_ARUBA) {
4151                         u32 always_on_bitmap =
4152                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4153                         /* find out the number of active simds */
4154                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4155                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4156                         tmp = hweight32(~tmp);
4157                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4158                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4159                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4160                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4161                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4162                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4163                         }
4164                 } else {
4165                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4166                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4167                 }
4168                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4169                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4170         } else {
4171                 WREG32(RLC_HB_BASE, 0);
4172                 WREG32(RLC_HB_RPTR, 0);
4173                 WREG32(RLC_HB_WPTR, 0);
4174                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4175                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4176         }
4177         WREG32(RLC_MC_CNTL, 0);
4178         WREG32(RLC_UCODE_CNTL, 0);
4179
4180         fw_data = (const __be32 *)rdev->rlc_fw->data;
4181         if (rdev->family >= CHIP_ARUBA) {
4182                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4183                         WREG32(RLC_UCODE_ADDR, i);
4184                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4185                 }
4186         } else if (rdev->family >= CHIP_CAYMAN) {
4187                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4188                         WREG32(RLC_UCODE_ADDR, i);
4189                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4190                 }
4191         } else {
4192                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4193                         WREG32(RLC_UCODE_ADDR, i);
4194                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4195                 }
4196         }
4197         WREG32(RLC_UCODE_ADDR, 0);
4198
4199         evergreen_rlc_start(rdev);
4200
4201         return 0;
4202 }
4203
4204 /* Interrupts */
4205
4206 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4207 {
4208         if (crtc >= rdev->num_crtc)
4209                 return 0;
4210         else
4211                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4212 }
4213
4214 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4215 {
4216         u32 tmp;
4217
4218         if (rdev->family >= CHIP_CAYMAN) {
4219                 cayman_cp_int_cntl_setup(rdev, 0,
4220                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4221                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4222                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4223                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4224                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4225         } else
4226                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4227         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4228         WREG32(DMA_CNTL, tmp);
4229         WREG32(GRBM_INT_CNTL, 0);
4230         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4231         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4232         if (rdev->num_crtc >= 4) {
4233                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4234                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4235         }
4236         if (rdev->num_crtc >= 6) {
4237                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4238                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4239         }
4240
4241         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4242         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4243         if (rdev->num_crtc >= 4) {
4244                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4245                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4246         }
4247         if (rdev->num_crtc >= 6) {
4248                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4249                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4250         }
4251
4252         /* only one DAC on DCE6 */
4253         if (!ASIC_IS_DCE6(rdev))
4254                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4255         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4256
4257         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4258         WREG32(DC_HPD1_INT_CONTROL, tmp);
4259         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4260         WREG32(DC_HPD2_INT_CONTROL, tmp);
4261         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4262         WREG32(DC_HPD3_INT_CONTROL, tmp);
4263         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4264         WREG32(DC_HPD4_INT_CONTROL, tmp);
4265         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4266         WREG32(DC_HPD5_INT_CONTROL, tmp);
4267         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4268         WREG32(DC_HPD6_INT_CONTROL, tmp);
4269
4270 }
4271
4272 int evergreen_irq_set(struct radeon_device *rdev)
4273 {
4274         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4275         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4276         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4277         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4278         u32 grbm_int_cntl = 0;
4279         u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
4280         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4281         u32 dma_cntl, dma_cntl1 = 0;
4282         u32 thermal_int = 0;
4283
4284         if (!rdev->irq.installed) {
4285                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4286                 return -EINVAL;
4287         }
4288         /* don't enable anything if the ih is disabled */
4289         if (!rdev->ih.enabled) {
4290                 r600_disable_interrupts(rdev);
4291                 /* force the active interrupt state to all disabled */
4292                 evergreen_disable_interrupt_state(rdev);
4293                 return 0;
4294         }
4295
4296         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4297         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4298         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4299         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4300         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4301         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4302         if (rdev->family == CHIP_ARUBA)
4303                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4304                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4305         else
4306                 thermal_int = RREG32(CG_THERMAL_INT) &
4307                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4308
4309         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4310         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4311         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4312         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4313         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4314         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4315
4316         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4317
4318         if (rdev->family >= CHIP_CAYMAN) {
4319                 /* enable CP interrupts on all rings */
4320                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4321                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4322                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4323                 }
4324                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4325                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4326                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4327                 }
4328                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4329                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4330                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4331                 }
4332         } else {
4333                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4334                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4335                         cp_int_cntl |= RB_INT_ENABLE;
4336                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4337                 }
4338         }
4339
4340         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4341                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4342                 dma_cntl |= TRAP_ENABLE;
4343         }
4344
4345         if (rdev->family >= CHIP_CAYMAN) {
4346                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4347                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4348                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4349                         dma_cntl1 |= TRAP_ENABLE;
4350                 }
4351         }
4352
4353         if (rdev->irq.dpm_thermal) {
4354                 DRM_DEBUG("dpm thermal\n");
4355                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4356         }
4357
4358         if (rdev->irq.crtc_vblank_int[0] ||
4359             atomic_read(&rdev->irq.pflip[0])) {
4360                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4361                 crtc1 |= VBLANK_INT_MASK;
4362         }
4363         if (rdev->irq.crtc_vblank_int[1] ||
4364             atomic_read(&rdev->irq.pflip[1])) {
4365                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4366                 crtc2 |= VBLANK_INT_MASK;
4367         }
4368         if (rdev->irq.crtc_vblank_int[2] ||
4369             atomic_read(&rdev->irq.pflip[2])) {
4370                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4371                 crtc3 |= VBLANK_INT_MASK;
4372         }
4373         if (rdev->irq.crtc_vblank_int[3] ||
4374             atomic_read(&rdev->irq.pflip[3])) {
4375                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4376                 crtc4 |= VBLANK_INT_MASK;
4377         }
4378         if (rdev->irq.crtc_vblank_int[4] ||
4379             atomic_read(&rdev->irq.pflip[4])) {
4380                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4381                 crtc5 |= VBLANK_INT_MASK;
4382         }
4383         if (rdev->irq.crtc_vblank_int[5] ||
4384             atomic_read(&rdev->irq.pflip[5])) {
4385                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4386                 crtc6 |= VBLANK_INT_MASK;
4387         }
4388         if (rdev->irq.hpd[0]) {
4389                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4390                 hpd1 |= DC_HPDx_INT_EN;
4391         }
4392         if (rdev->irq.hpd[1]) {
4393                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4394                 hpd2 |= DC_HPDx_INT_EN;
4395         }
4396         if (rdev->irq.hpd[2]) {
4397                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4398                 hpd3 |= DC_HPDx_INT_EN;
4399         }
4400         if (rdev->irq.hpd[3]) {
4401                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4402                 hpd4 |= DC_HPDx_INT_EN;
4403         }
4404         if (rdev->irq.hpd[4]) {
4405                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4406                 hpd5 |= DC_HPDx_INT_EN;
4407         }
4408         if (rdev->irq.hpd[5]) {
4409                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4410                 hpd6 |= DC_HPDx_INT_EN;
4411         }
4412         if (rdev->irq.afmt[0]) {
4413                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4414                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4415         }
4416         if (rdev->irq.afmt[1]) {
4417                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4418                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4419         }
4420         if (rdev->irq.afmt[2]) {
4421                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4422                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4423         }
4424         if (rdev->irq.afmt[3]) {
4425                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4426                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4427         }
4428         if (rdev->irq.afmt[4]) {
4429                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4430                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4431         }
4432         if (rdev->irq.afmt[5]) {
4433                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4434                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4435         }
4436
4437         if (rdev->family >= CHIP_CAYMAN) {
4438                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4439                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4440                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4441         } else
4442                 WREG32(CP_INT_CNTL, cp_int_cntl);
4443
4444         WREG32(DMA_CNTL, dma_cntl);
4445
4446         if (rdev->family >= CHIP_CAYMAN)
4447                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4448
4449         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4450
4451         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4452         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4453         if (rdev->num_crtc >= 4) {
4454                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4455                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4456         }
4457         if (rdev->num_crtc >= 6) {
4458                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4459                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4460         }
4461
4462         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
4463         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
4464         if (rdev->num_crtc >= 4) {
4465                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
4466                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
4467         }
4468         if (rdev->num_crtc >= 6) {
4469                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
4470                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
4471         }
4472
4473         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4474         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4475         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4476         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4477         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4478         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4479         if (rdev->family == CHIP_ARUBA)
4480                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4481         else
4482                 WREG32(CG_THERMAL_INT, thermal_int);
4483
4484         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4485         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4486         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4487         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4488         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4489         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4490
4491         return 0;
4492 }
4493
4494 static void evergreen_irq_ack(struct radeon_device *rdev)
4495 {
4496         u32 tmp;
4497
4498         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4499         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4500         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4501         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4502         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4503         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4504         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4505         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4506         if (rdev->num_crtc >= 4) {
4507                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4508                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4509         }
4510         if (rdev->num_crtc >= 6) {
4511                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4512                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4513         }
4514
4515         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4516         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4517         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4518         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4519         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4520         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4521
4522         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4523                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4524         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4525                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4526         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4527                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4528         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4529                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4530         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4531                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4532         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4533                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4534
4535         if (rdev->num_crtc >= 4) {
4536                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4537                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4538                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4539                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4540                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4541                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4542                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4543                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4544                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4545                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4546                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4547                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4548         }
4549
4550         if (rdev->num_crtc >= 6) {
4551                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4552                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4553                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4554                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4555                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4556                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4557                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4558                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4559                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4560                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4561                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4562                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4563         }
4564
4565         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4566                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4567                 tmp |= DC_HPDx_INT_ACK;
4568                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4569         }
4570         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4571                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4572                 tmp |= DC_HPDx_INT_ACK;
4573                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4574         }
4575         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4576                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4577                 tmp |= DC_HPDx_INT_ACK;
4578                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4579         }
4580         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4581                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4582                 tmp |= DC_HPDx_INT_ACK;
4583                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4584         }
4585         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4586                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4587                 tmp |= DC_HPDx_INT_ACK;
4588                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4589         }
4590         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4591                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4592                 tmp |= DC_HPDx_INT_ACK;
4593                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4594         }
4595         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4596                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4597                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4598                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4599         }
4600         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4601                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4602                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4603                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4604         }
4605         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4606                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4607                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4608                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4609         }
4610         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4611                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4612                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4613                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4614         }
4615         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4616                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4617                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4618                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4619         }
4620         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4621                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4622                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4623                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4624         }
4625 }
4626
4627 static void evergreen_irq_disable(struct radeon_device *rdev)
4628 {
4629         r600_disable_interrupts(rdev);
4630         /* Wait and acknowledge irq */
4631         mdelay(1);
4632         evergreen_irq_ack(rdev);
4633         evergreen_disable_interrupt_state(rdev);
4634 }
4635
4636 void evergreen_irq_suspend(struct radeon_device *rdev)
4637 {
4638         evergreen_irq_disable(rdev);
4639         r600_rlc_stop(rdev);
4640 }
4641
4642 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4643 {
4644         u32 wptr, tmp;
4645
4646         if (rdev->wb.enabled)
4647                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4648         else
4649                 wptr = RREG32(IH_RB_WPTR);
4650
4651         if (wptr & RB_OVERFLOW) {
4652                 /* When a ring buffer overflow happen start parsing interrupt
4653                  * from the last not overwritten vector (wptr + 16). Hopefully
4654                  * this should allow us to catchup.
4655                  */
4656                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4657                         wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4658                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4659                 tmp = RREG32(IH_RB_CNTL);
4660                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4661                 WREG32(IH_RB_CNTL, tmp);
4662         }
4663         return (wptr & rdev->ih.ptr_mask);
4664 }
4665
4666 int evergreen_irq_process(struct radeon_device *rdev)
4667 {
4668         u32 wptr;
4669         u32 rptr;
4670         u32 src_id, src_data;
4671         u32 ring_index;
4672         bool queue_hotplug = false;
4673         bool queue_hdmi = false;
4674         bool queue_thermal = false;
4675         u32 status, addr;
4676
4677         if (!rdev->ih.enabled || rdev->shutdown)
4678                 return IRQ_NONE;
4679
4680         wptr = evergreen_get_ih_wptr(rdev);
4681
4682 restart_ih:
4683         /* is somebody else already processing irqs? */
4684         if (atomic_xchg(&rdev->ih.lock, 1))
4685                 return IRQ_NONE;
4686
4687         rptr = rdev->ih.rptr;
4688         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4689
4690         /* Order reading of wptr vs. reading of IH ring data */
4691         rmb();
4692
4693         /* display interrupts */
4694         evergreen_irq_ack(rdev);
4695
4696         while (rptr != wptr) {
4697                 /* wptr/rptr are in bytes! */
4698                 ring_index = rptr / 4;
4699                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4700                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4701
4702                 switch (src_id) {
4703                 case 1: /* D1 vblank/vline */
4704                         switch (src_data) {
4705                         case 0: /* D1 vblank */
4706                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4707                                         if (rdev->irq.crtc_vblank_int[0]) {
4708                                                 drm_handle_vblank(rdev->ddev, 0);
4709                                                 rdev->pm.vblank_sync = true;
4710                                                 wake_up(&rdev->irq.vblank_queue);
4711                                         }
4712                                         if (atomic_read(&rdev->irq.pflip[0]))
4713                                                 radeon_crtc_handle_flip(rdev, 0);
4714                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4715                                         DRM_DEBUG("IH: D1 vblank\n");
4716                                 }
4717                                 break;
4718                         case 1: /* D1 vline */
4719                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4720                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4721                                         DRM_DEBUG("IH: D1 vline\n");
4722                                 }
4723                                 break;
4724                         default:
4725                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4726                                 break;
4727                         }
4728                         break;
4729                 case 2: /* D2 vblank/vline */
4730                         switch (src_data) {
4731                         case 0: /* D2 vblank */
4732                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4733                                         if (rdev->irq.crtc_vblank_int[1]) {
4734                                                 drm_handle_vblank(rdev->ddev, 1);
4735                                                 rdev->pm.vblank_sync = true;
4736                                                 wake_up(&rdev->irq.vblank_queue);
4737                                         }
4738                                         if (atomic_read(&rdev->irq.pflip[1]))
4739                                                 radeon_crtc_handle_flip(rdev, 1);
4740                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4741                                         DRM_DEBUG("IH: D2 vblank\n");
4742                                 }
4743                                 break;
4744                         case 1: /* D2 vline */
4745                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4746                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4747                                         DRM_DEBUG("IH: D2 vline\n");
4748                                 }
4749                                 break;
4750                         default:
4751                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4752                                 break;
4753                         }
4754                         break;
4755                 case 3: /* D3 vblank/vline */
4756                         switch (src_data) {
4757                         case 0: /* D3 vblank */
4758                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4759                                         if (rdev->irq.crtc_vblank_int[2]) {
4760                                                 drm_handle_vblank(rdev->ddev, 2);
4761                                                 rdev->pm.vblank_sync = true;
4762                                                 wake_up(&rdev->irq.vblank_queue);
4763                                         }
4764                                         if (atomic_read(&rdev->irq.pflip[2]))
4765                                                 radeon_crtc_handle_flip(rdev, 2);
4766                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4767                                         DRM_DEBUG("IH: D3 vblank\n");
4768                                 }
4769                                 break;
4770                         case 1: /* D3 vline */
4771                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4772                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4773                                         DRM_DEBUG("IH: D3 vline\n");
4774                                 }
4775                                 break;
4776                         default:
4777                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4778                                 break;
4779                         }
4780                         break;
4781                 case 4: /* D4 vblank/vline */
4782                         switch (src_data) {
4783                         case 0: /* D4 vblank */
4784                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4785                                         if (rdev->irq.crtc_vblank_int[3]) {
4786                                                 drm_handle_vblank(rdev->ddev, 3);
4787                                                 rdev->pm.vblank_sync = true;
4788                                                 wake_up(&rdev->irq.vblank_queue);
4789                                         }
4790                                         if (atomic_read(&rdev->irq.pflip[3]))
4791                                                 radeon_crtc_handle_flip(rdev, 3);
4792                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4793                                         DRM_DEBUG("IH: D4 vblank\n");
4794                                 }
4795                                 break;
4796                         case 1: /* D4 vline */
4797                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4798                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4799                                         DRM_DEBUG("IH: D4 vline\n");
4800                                 }
4801                                 break;
4802                         default:
4803                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4804                                 break;
4805                         }
4806                         break;
4807                 case 5: /* D5 vblank/vline */
4808                         switch (src_data) {
4809                         case 0: /* D5 vblank */
4810                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4811                                         if (rdev->irq.crtc_vblank_int[4]) {
4812                                                 drm_handle_vblank(rdev->ddev, 4);
4813                                                 rdev->pm.vblank_sync = true;
4814                                                 wake_up(&rdev->irq.vblank_queue);
4815                                         }
4816                                         if (atomic_read(&rdev->irq.pflip[4]))
4817                                                 radeon_crtc_handle_flip(rdev, 4);
4818                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4819                                         DRM_DEBUG("IH: D5 vblank\n");
4820                                 }
4821                                 break;
4822                         case 1: /* D5 vline */
4823                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4824                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4825                                         DRM_DEBUG("IH: D5 vline\n");
4826                                 }
4827                                 break;
4828                         default:
4829                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4830                                 break;
4831                         }
4832                         break;
4833                 case 6: /* D6 vblank/vline */
4834                         switch (src_data) {
4835                         case 0: /* D6 vblank */
4836                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4837                                         if (rdev->irq.crtc_vblank_int[5]) {
4838                                                 drm_handle_vblank(rdev->ddev, 5);
4839                                                 rdev->pm.vblank_sync = true;
4840                                                 wake_up(&rdev->irq.vblank_queue);
4841                                         }
4842                                         if (atomic_read(&rdev->irq.pflip[5]))
4843                                                 radeon_crtc_handle_flip(rdev, 5);
4844                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4845                                         DRM_DEBUG("IH: D6 vblank\n");
4846                                 }
4847                                 break;
4848                         case 1: /* D6 vline */
4849                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4850                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4851                                         DRM_DEBUG("IH: D6 vline\n");
4852                                 }
4853                                 break;
4854                         default:
4855                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4856                                 break;
4857                         }
4858                         break;
4859                 case 42: /* HPD hotplug */
4860                         switch (src_data) {
4861                         case 0:
4862                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4863                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4864                                         queue_hotplug = true;
4865                                         DRM_DEBUG("IH: HPD1\n");
4866                                 }
4867                                 break;
4868                         case 1:
4869                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4870                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4871                                         queue_hotplug = true;
4872                                         DRM_DEBUG("IH: HPD2\n");
4873                                 }
4874                                 break;
4875                         case 2:
4876                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4877                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4878                                         queue_hotplug = true;
4879                                         DRM_DEBUG("IH: HPD3\n");
4880                                 }
4881                                 break;
4882                         case 3:
4883                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4884                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4885                                         queue_hotplug = true;
4886                                         DRM_DEBUG("IH: HPD4\n");
4887                                 }
4888                                 break;
4889                         case 4:
4890                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4891                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4892                                         queue_hotplug = true;
4893                                         DRM_DEBUG("IH: HPD5\n");
4894                                 }
4895                                 break;
4896                         case 5:
4897                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4898                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4899                                         queue_hotplug = true;
4900                                         DRM_DEBUG("IH: HPD6\n");
4901                                 }
4902                                 break;
4903                         default:
4904                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4905                                 break;
4906                         }
4907                         break;
4908                 case 44: /* hdmi */
4909                         switch (src_data) {
4910                         case 0:
4911                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4912                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4913                                         queue_hdmi = true;
4914                                         DRM_DEBUG("IH: HDMI0\n");
4915                                 }
4916                                 break;
4917                         case 1:
4918                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4919                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4920                                         queue_hdmi = true;
4921                                         DRM_DEBUG("IH: HDMI1\n");
4922                                 }
4923                                 break;
4924                         case 2:
4925                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4926                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4927                                         queue_hdmi = true;
4928                                         DRM_DEBUG("IH: HDMI2\n");
4929                                 }
4930                                 break;
4931                         case 3:
4932                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4933                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4934                                         queue_hdmi = true;
4935                                         DRM_DEBUG("IH: HDMI3\n");
4936                                 }
4937                                 break;
4938                         case 4:
4939                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4940                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4941                                         queue_hdmi = true;
4942                                         DRM_DEBUG("IH: HDMI4\n");
4943                                 }
4944                                 break;
4945                         case 5:
4946                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4947                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4948                                         queue_hdmi = true;
4949                                         DRM_DEBUG("IH: HDMI5\n");
4950                                 }
4951                                 break;
4952                         default:
4953                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4954                                 break;
4955                         }
4956                 case 124: /* UVD */
4957                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4958                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4959                         break;
4960                 case 146:
4961                 case 147:
4962                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4963                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
4964                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4965                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
4966                                 addr);
4967                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4968                                 status);
4969                         cayman_vm_decode_fault(rdev, status, addr);
4970                         /* reset addr and status */
4971                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4972                         break;
4973                 case 176: /* CP_INT in ring buffer */
4974                 case 177: /* CP_INT in IB1 */
4975                 case 178: /* CP_INT in IB2 */
4976                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4977                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4978                         break;
4979                 case 181: /* CP EOP event */
4980                         DRM_DEBUG("IH: CP EOP\n");
4981                         if (rdev->family >= CHIP_CAYMAN) {
4982                                 switch (src_data) {
4983                                 case 0:
4984                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4985                                         break;
4986                                 case 1:
4987                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4988                                         break;
4989                                 case 2:
4990                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4991                                         break;
4992                                 }
4993                         } else
4994                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4995                         break;
4996                 case 224: /* DMA trap event */
4997                         DRM_DEBUG("IH: DMA trap\n");
4998                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4999                         break;
5000                 case 230: /* thermal low to high */
5001                         DRM_DEBUG("IH: thermal low to high\n");
5002                         rdev->pm.dpm.thermal.high_to_low = false;
5003                         queue_thermal = true;
5004                         break;
5005                 case 231: /* thermal high to low */
5006                         DRM_DEBUG("IH: thermal high to low\n");
5007                         rdev->pm.dpm.thermal.high_to_low = true;
5008                         queue_thermal = true;
5009                         break;
5010                 case 233: /* GUI IDLE */
5011                         DRM_DEBUG("IH: GUI idle\n");
5012                         break;
5013                 case 244: /* DMA trap event */
5014                         if (rdev->family >= CHIP_CAYMAN) {
5015                                 DRM_DEBUG("IH: DMA1 trap\n");
5016                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5017                         }
5018                         break;
5019                 default:
5020                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5021                         break;
5022                 }
5023
5024                 /* wptr/rptr are in bytes! */
5025                 rptr += 16;
5026                 rptr &= rdev->ih.ptr_mask;
5027         }
5028         if (queue_hotplug)
5029                 schedule_work(&rdev->hotplug_work);
5030         if (queue_hdmi)
5031                 schedule_work(&rdev->audio_work);
5032         if (queue_thermal && rdev->pm.dpm_enabled)
5033                 schedule_work(&rdev->pm.dpm.thermal.work);
5034         rdev->ih.rptr = rptr;
5035         WREG32(IH_RB_RPTR, rdev->ih.rptr);
5036         atomic_set(&rdev->ih.lock, 0);
5037
5038         /* make sure wptr hasn't changed while processing */
5039         wptr = evergreen_get_ih_wptr(rdev);
5040         if (wptr != rptr)
5041                 goto restart_ih;
5042
5043         return IRQ_HANDLED;
5044 }
5045
5046 static int evergreen_startup(struct radeon_device *rdev)
5047 {
5048         struct radeon_ring *ring;
5049         int r;
5050
5051         /* enable pcie gen2 link */
5052         evergreen_pcie_gen2_enable(rdev);
5053         /* enable aspm */
5054         evergreen_program_aspm(rdev);
5055
5056         /* scratch needs to be initialized before MC */
5057         r = r600_vram_scratch_init(rdev);
5058         if (r)
5059                 return r;
5060
5061         evergreen_mc_program(rdev);
5062
5063         if (ASIC_IS_DCE5(rdev)) {
5064                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5065                         r = ni_init_microcode(rdev);
5066                         if (r) {
5067                                 DRM_ERROR("Failed to load firmware!\n");
5068                                 return r;
5069                         }
5070                 }
5071                 r = ni_mc_load_microcode(rdev);
5072                 if (r) {
5073                         DRM_ERROR("Failed to load MC firmware!\n");
5074                         return r;
5075                 }
5076         } else {
5077                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5078                         r = r600_init_microcode(rdev);
5079                         if (r) {
5080                                 DRM_ERROR("Failed to load firmware!\n");
5081                                 return r;
5082                         }
5083                 }
5084         }
5085
5086         if (rdev->flags & RADEON_IS_AGP) {
5087                 evergreen_agp_enable(rdev);
5088         } else {
5089                 r = evergreen_pcie_gart_enable(rdev);
5090                 if (r)
5091                         return r;
5092         }
5093         evergreen_gpu_init(rdev);
5094
5095         /* allocate rlc buffers */
5096         if (rdev->flags & RADEON_IS_IGP) {
5097                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5098                 rdev->rlc.reg_list_size =
5099                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5100                 rdev->rlc.cs_data = evergreen_cs_data;
5101                 r = sumo_rlc_init(rdev);
5102                 if (r) {
5103                         DRM_ERROR("Failed to init rlc BOs!\n");
5104                         return r;
5105                 }
5106         }
5107
5108         /* allocate wb buffer */
5109         r = radeon_wb_init(rdev);
5110         if (r)
5111                 return r;
5112
5113         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5114         if (r) {
5115                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5116                 return r;
5117         }
5118
5119         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5120         if (r) {
5121                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5122                 return r;
5123         }
5124
5125         r = uvd_v2_2_resume(rdev);
5126         if (!r) {
5127                 r = radeon_fence_driver_start_ring(rdev,
5128                                                    R600_RING_TYPE_UVD_INDEX);
5129                 if (r)
5130                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5131         }
5132
5133         if (r)
5134                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5135
5136         /* Enable IRQ */
5137         if (!rdev->irq.installed) {
5138                 r = radeon_irq_kms_init(rdev);
5139                 if (r)
5140                         return r;
5141         }
5142
5143         r = r600_irq_init(rdev);
5144         if (r) {
5145                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5146                 radeon_irq_kms_fini(rdev);
5147                 return r;
5148         }
5149         evergreen_irq_set(rdev);
5150
5151         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5152         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5153                              R600_CP_RB_RPTR, R600_CP_RB_WPTR,
5154                              RADEON_CP_PACKET2);
5155         if (r)
5156                 return r;
5157
5158         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5159         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5160                              DMA_RB_RPTR, DMA_RB_WPTR,
5161                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5162         if (r)
5163                 return r;
5164
5165         r = evergreen_cp_load_microcode(rdev);
5166         if (r)
5167                 return r;
5168         r = evergreen_cp_resume(rdev);
5169         if (r)
5170                 return r;
5171         r = r600_dma_resume(rdev);
5172         if (r)
5173                 return r;
5174
5175         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5176         if (ring->ring_size) {
5177                 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5178                                      UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
5179                                      RADEON_CP_PACKET2);
5180                 if (!r)
5181                         r = uvd_v1_0_init(rdev);
5182
5183                 if (r)
5184                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5185         }
5186
5187         r = radeon_ib_pool_init(rdev);
5188         if (r) {
5189                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5190                 return r;
5191         }
5192
5193         r = r600_audio_init(rdev);
5194         if (r) {
5195                 DRM_ERROR("radeon: audio init failed\n");
5196                 return r;
5197         }
5198
5199         return 0;
5200 }
5201
5202 int evergreen_resume(struct radeon_device *rdev)
5203 {
5204         int r;
5205
5206         /* reset the asic, the gfx blocks are often in a bad state
5207          * after the driver is unloaded or after a resume
5208          */
5209         if (radeon_asic_reset(rdev))
5210                 dev_warn(rdev->dev, "GPU reset failed !\n");
5211         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5212          * posting will perform necessary task to bring back GPU into good
5213          * shape.
5214          */
5215         /* post card */
5216         atom_asic_init(rdev->mode_info.atom_context);
5217
5218         /* init golden registers */
5219         evergreen_init_golden_registers(rdev);
5220
5221         rdev->accel_working = true;
5222         r = evergreen_startup(rdev);
5223         if (r) {
5224                 DRM_ERROR("evergreen startup failed on resume\n");
5225                 rdev->accel_working = false;
5226                 return r;
5227         }
5228
5229         return r;
5230
5231 }
5232
5233 int evergreen_suspend(struct radeon_device *rdev)
5234 {
5235         r600_audio_fini(rdev);
5236         uvd_v1_0_fini(rdev);
5237         radeon_uvd_suspend(rdev);
5238         r700_cp_stop(rdev);
5239         r600_dma_stop(rdev);
5240         evergreen_irq_suspend(rdev);
5241         radeon_wb_disable(rdev);
5242         evergreen_pcie_gart_disable(rdev);
5243
5244         return 0;
5245 }
5246
5247 /* Plan is to move initialization in that function and use
5248  * helper function so that radeon_device_init pretty much
5249  * do nothing more than calling asic specific function. This
5250  * should also allow to remove a bunch of callback function
5251  * like vram_info.
5252  */
5253 int evergreen_init(struct radeon_device *rdev)
5254 {
5255         int r;
5256
5257         /* Read BIOS */
5258         if (!radeon_get_bios(rdev)) {
5259                 if (ASIC_IS_AVIVO(rdev))
5260                         return -EINVAL;
5261         }
5262         /* Must be an ATOMBIOS */
5263         if (!rdev->is_atom_bios) {
5264                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5265                 return -EINVAL;
5266         }
5267         r = radeon_atombios_init(rdev);
5268         if (r)
5269                 return r;
5270         /* reset the asic, the gfx blocks are often in a bad state
5271          * after the driver is unloaded or after a resume
5272          */
5273         if (radeon_asic_reset(rdev))
5274                 dev_warn(rdev->dev, "GPU reset failed !\n");
5275         /* Post card if necessary */
5276         if (!radeon_card_posted(rdev)) {
5277                 if (!rdev->bios) {
5278                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5279                         return -EINVAL;
5280                 }
5281                 DRM_INFO("GPU not posted. posting now...\n");
5282                 atom_asic_init(rdev->mode_info.atom_context);
5283         }
5284         /* init golden registers */
5285         evergreen_init_golden_registers(rdev);
5286         /* Initialize scratch registers */
5287         r600_scratch_init(rdev);
5288         /* Initialize surface registers */
5289         radeon_surface_init(rdev);
5290         /* Initialize clocks */
5291         radeon_get_clock_info(rdev->ddev);
5292         /* Fence driver */
5293         r = radeon_fence_driver_init(rdev);
5294         if (r)
5295                 return r;
5296         /* initialize AGP */
5297         if (rdev->flags & RADEON_IS_AGP) {
5298                 r = radeon_agp_init(rdev);
5299                 if (r)
5300                         radeon_agp_disable(rdev);
5301         }
5302         /* initialize memory controller */
5303         r = evergreen_mc_init(rdev);
5304         if (r)
5305                 return r;
5306         /* Memory manager */
5307         r = radeon_bo_init(rdev);
5308         if (r)
5309                 return r;
5310
5311         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5312         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5313
5314         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5315         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5316
5317         r = radeon_uvd_init(rdev);
5318         if (!r) {
5319                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5320                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5321                                4096);
5322         }
5323
5324         rdev->ih.ring_obj = NULL;
5325         r600_ih_ring_init(rdev, 64 * 1024);
5326
5327         r = r600_pcie_gart_init(rdev);
5328         if (r)
5329                 return r;
5330
5331         rdev->accel_working = true;
5332         r = evergreen_startup(rdev);
5333         if (r) {
5334                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5335                 r700_cp_fini(rdev);
5336                 r600_dma_fini(rdev);
5337                 r600_irq_fini(rdev);
5338                 if (rdev->flags & RADEON_IS_IGP)
5339                         sumo_rlc_fini(rdev);
5340                 radeon_wb_fini(rdev);
5341                 radeon_ib_pool_fini(rdev);
5342                 radeon_irq_kms_fini(rdev);
5343                 evergreen_pcie_gart_fini(rdev);
5344                 rdev->accel_working = false;
5345         }
5346
5347         /* Don't start up if the MC ucode is missing on BTC parts.
5348          * The default clocks and voltages before the MC ucode
5349          * is loaded are not suffient for advanced operations.
5350          */
5351         if (ASIC_IS_DCE5(rdev)) {
5352                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5353                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5354                         return -EINVAL;
5355                 }
5356         }
5357
5358         return 0;
5359 }
5360
5361 void evergreen_fini(struct radeon_device *rdev)
5362 {
5363         r600_audio_fini(rdev);
5364         r700_cp_fini(rdev);
5365         r600_dma_fini(rdev);
5366         r600_irq_fini(rdev);
5367         if (rdev->flags & RADEON_IS_IGP)
5368                 sumo_rlc_fini(rdev);
5369         radeon_wb_fini(rdev);
5370         radeon_ib_pool_fini(rdev);
5371         radeon_irq_kms_fini(rdev);
5372         evergreen_pcie_gart_fini(rdev);
5373         uvd_v1_0_fini(rdev);
5374         radeon_uvd_fini(rdev);
5375         r600_vram_scratch_fini(rdev);
5376         radeon_gem_fini(rdev);
5377         radeon_fence_driver_fini(rdev);
5378         radeon_agp_fini(rdev);
5379         radeon_bo_fini(rdev);
5380         radeon_atombios_fini(rdev);
5381         kfree(rdev->bios);
5382         rdev->bios = NULL;
5383 }
5384
5385 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5386 {
5387         u32 link_width_cntl, speed_cntl;
5388
5389         if (radeon_pcie_gen2 == 0)
5390                 return;
5391
5392         if (rdev->flags & RADEON_IS_IGP)
5393                 return;
5394
5395         if (!(rdev->flags & RADEON_IS_PCIE))
5396                 return;
5397
5398         /* x2 cards have a special sequence */
5399         if (ASIC_IS_X2(rdev))
5400                 return;
5401
5402         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5403                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5404                 return;
5405
5406         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5407         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5408                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5409                 return;
5410         }
5411
5412         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5413
5414         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5415             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5416
5417                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5418                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5419                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5420
5421                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5422                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5423                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5424
5425                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5426                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5427                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5428
5429                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5430                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5431                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5432
5433                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5434                 speed_cntl |= LC_GEN2_EN_STRAP;
5435                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5436
5437         } else {
5438                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5439                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5440                 if (1)
5441                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5442                 else
5443                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5444                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5445         }
5446 }
5447
5448 void evergreen_program_aspm(struct radeon_device *rdev)
5449 {
5450         u32 data, orig;
5451         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5452         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5453         /* fusion_platform = true
5454          * if the system is a fusion system
5455          * (APU or DGPU in a fusion system).
5456          * todo: check if the system is a fusion platform.
5457          */
5458         bool fusion_platform = false;
5459
5460         if (radeon_aspm == 0)
5461                 return;
5462
5463         if (!(rdev->flags & RADEON_IS_PCIE))
5464                 return;
5465
5466         switch (rdev->family) {
5467         case CHIP_CYPRESS:
5468         case CHIP_HEMLOCK:
5469         case CHIP_JUNIPER:
5470         case CHIP_REDWOOD:
5471         case CHIP_CEDAR:
5472         case CHIP_SUMO:
5473         case CHIP_SUMO2:
5474         case CHIP_PALM:
5475         case CHIP_ARUBA:
5476                 disable_l0s = true;
5477                 break;
5478         default:
5479                 disable_l0s = false;
5480                 break;
5481         }
5482
5483         if (rdev->flags & RADEON_IS_IGP)
5484                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5485
5486         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5487         if (fusion_platform)
5488                 data &= ~MULTI_PIF;
5489         else
5490                 data |= MULTI_PIF;
5491         if (data != orig)
5492                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5493
5494         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5495         if (fusion_platform)
5496                 data &= ~MULTI_PIF;
5497         else
5498                 data |= MULTI_PIF;
5499         if (data != orig)
5500                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5501
5502         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5503         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5504         if (!disable_l0s) {
5505                 if (rdev->family >= CHIP_BARTS)
5506                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5507                 else
5508                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5509         }
5510
5511         if (!disable_l1) {
5512                 if (rdev->family >= CHIP_BARTS)
5513                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5514                 else
5515                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5516
5517                 if (!disable_plloff_in_l1) {
5518                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5519                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5520                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5521                         if (data != orig)
5522                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5523
5524                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5525                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5526                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5527                         if (data != orig)
5528                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5529
5530                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5531                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5532                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5533                         if (data != orig)
5534                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5535
5536                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5537                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5538                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5539                         if (data != orig)
5540                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5541
5542                         if (rdev->family >= CHIP_BARTS) {
5543                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5544                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5545                                 data |= PLL_RAMP_UP_TIME_0(4);
5546                                 if (data != orig)
5547                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5548
5549                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5550                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5551                                 data |= PLL_RAMP_UP_TIME_1(4);
5552                                 if (data != orig)
5553                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5554
5555                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5556                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5557                                 data |= PLL_RAMP_UP_TIME_0(4);
5558                                 if (data != orig)
5559                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5560
5561                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5562                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5563                                 data |= PLL_RAMP_UP_TIME_1(4);
5564                                 if (data != orig)
5565                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5566                         }
5567
5568                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5569                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5570                         data |= LC_DYN_LANES_PWR_STATE(3);
5571                         if (data != orig)
5572                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5573
5574                         if (rdev->family >= CHIP_BARTS) {
5575                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5576                                 data &= ~LS2_EXIT_TIME_MASK;
5577                                 data |= LS2_EXIT_TIME(1);
5578                                 if (data != orig)
5579                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5580
5581                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5582                                 data &= ~LS2_EXIT_TIME_MASK;
5583                                 data |= LS2_EXIT_TIME(1);
5584                                 if (data != orig)
5585                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5586                         }
5587                 }
5588         }
5589
5590         /* evergreen parts only */
5591         if (rdev->family < CHIP_BARTS)
5592                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5593
5594         if (pcie_lc_cntl != pcie_lc_cntl_old)
5595                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5596 }