Merge branch 'drm-core-next' of ../linux-2.6 into drm-next
[cascardo/linux.git] / drivers / gpu / drm / radeon / radeon_asic.h
1 /*
2  * Copyright 2008 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  * Copyright 2009 Jerome Glisse.
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the "Software"),
8  * to deal in the Software without restriction, including without limitation
9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10  * and/or sell copies of the Software, and to permit persons to whom the
11  * Software is furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in
14  * all copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22  * OTHER DEALINGS IN THE SOFTWARE.
23  *
24  * Authors: Dave Airlie
25  *          Alex Deucher
26  *          Jerome Glisse
27  */
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
30
31 /*
32  * common functions
33  */
34 uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev);
35 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
36 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
37
38 uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev);
39 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
40 uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev);
41 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
42 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
43
44 /*
45  * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
46  */
47 extern int r100_init(struct radeon_device *rdev);
48 extern void r100_fini(struct radeon_device *rdev);
49 extern int r100_suspend(struct radeon_device *rdev);
50 extern int r100_resume(struct radeon_device *rdev);
51 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
52 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
53 void r100_vga_set_state(struct radeon_device *rdev, bool state);
54 int r100_gpu_reset(struct radeon_device *rdev);
55 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
56 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
57 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
58 void r100_cp_commit(struct radeon_device *rdev);
59 void r100_ring_start(struct radeon_device *rdev);
60 int r100_irq_set(struct radeon_device *rdev);
61 int r100_irq_process(struct radeon_device *rdev);
62 void r100_fence_ring_emit(struct radeon_device *rdev,
63                           struct radeon_fence *fence);
64 int r100_cs_parse(struct radeon_cs_parser *p);
65 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
66 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
67 int r100_copy_blit(struct radeon_device *rdev,
68                    uint64_t src_offset,
69                    uint64_t dst_offset,
70                    unsigned num_pages,
71                    struct radeon_fence *fence);
72 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
73                          uint32_t tiling_flags, uint32_t pitch,
74                          uint32_t offset, uint32_t obj_size);
75 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
76 void r100_bandwidth_update(struct radeon_device *rdev);
77 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
78 int r100_ring_test(struct radeon_device *rdev);
79
80 static struct radeon_asic r100_asic = {
81         .init = &r100_init,
82         .fini = &r100_fini,
83         .suspend = &r100_suspend,
84         .resume = &r100_resume,
85         .vga_set_state = &r100_vga_set_state,
86         .gpu_reset = &r100_gpu_reset,
87         .gart_tlb_flush = &r100_pci_gart_tlb_flush,
88         .gart_set_page = &r100_pci_gart_set_page,
89         .cp_commit = &r100_cp_commit,
90         .ring_start = &r100_ring_start,
91         .ring_test = &r100_ring_test,
92         .ring_ib_execute = &r100_ring_ib_execute,
93         .irq_set = &r100_irq_set,
94         .irq_process = &r100_irq_process,
95         .get_vblank_counter = &r100_get_vblank_counter,
96         .fence_ring_emit = &r100_fence_ring_emit,
97         .cs_parse = &r100_cs_parse,
98         .copy_blit = &r100_copy_blit,
99         .copy_dma = NULL,
100         .copy = &r100_copy_blit,
101         .get_engine_clock = &radeon_legacy_get_engine_clock,
102         .set_engine_clock = &radeon_legacy_set_engine_clock,
103         .get_memory_clock = NULL,
104         .set_memory_clock = NULL,
105         .set_pcie_lanes = NULL,
106         .set_clock_gating = &radeon_legacy_set_clock_gating,
107         .set_surface_reg = r100_set_surface_reg,
108         .clear_surface_reg = r100_clear_surface_reg,
109         .bandwidth_update = &r100_bandwidth_update,
110 };
111
112
113 /*
114  * r300,r350,rv350,rv380
115  */
116 extern int r300_init(struct radeon_device *rdev);
117 extern void r300_fini(struct radeon_device *rdev);
118 extern int r300_suspend(struct radeon_device *rdev);
119 extern int r300_resume(struct radeon_device *rdev);
120 extern int r300_gpu_reset(struct radeon_device *rdev);
121 extern void r300_ring_start(struct radeon_device *rdev);
122 extern void r300_fence_ring_emit(struct radeon_device *rdev,
123                                 struct radeon_fence *fence);
124 extern int r300_cs_parse(struct radeon_cs_parser *p);
125 extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
126 extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
127 extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
128 extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
129 extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
130 extern int r300_copy_dma(struct radeon_device *rdev,
131                         uint64_t src_offset,
132                         uint64_t dst_offset,
133                         unsigned num_pages,
134                         struct radeon_fence *fence);
135 static struct radeon_asic r300_asic = {
136         .init = &r300_init,
137         .fini = &r300_fini,
138         .suspend = &r300_suspend,
139         .resume = &r300_resume,
140         .vga_set_state = &r100_vga_set_state,
141         .gpu_reset = &r300_gpu_reset,
142         .gart_tlb_flush = &r100_pci_gart_tlb_flush,
143         .gart_set_page = &r100_pci_gart_set_page,
144         .cp_commit = &r100_cp_commit,
145         .ring_start = &r300_ring_start,
146         .ring_test = &r100_ring_test,
147         .ring_ib_execute = &r100_ring_ib_execute,
148         .irq_set = &r100_irq_set,
149         .irq_process = &r100_irq_process,
150         .get_vblank_counter = &r100_get_vblank_counter,
151         .fence_ring_emit = &r300_fence_ring_emit,
152         .cs_parse = &r300_cs_parse,
153         .copy_blit = &r100_copy_blit,
154         .copy_dma = &r300_copy_dma,
155         .copy = &r100_copy_blit,
156         .get_engine_clock = &radeon_legacy_get_engine_clock,
157         .set_engine_clock = &radeon_legacy_set_engine_clock,
158         .get_memory_clock = NULL,
159         .set_memory_clock = NULL,
160         .set_pcie_lanes = &rv370_set_pcie_lanes,
161         .set_clock_gating = &radeon_legacy_set_clock_gating,
162         .set_surface_reg = r100_set_surface_reg,
163         .clear_surface_reg = r100_clear_surface_reg,
164         .bandwidth_update = &r100_bandwidth_update,
165 };
166
167 /*
168  * r420,r423,rv410
169  */
170 extern int r420_init(struct radeon_device *rdev);
171 extern void r420_fini(struct radeon_device *rdev);
172 extern int r420_suspend(struct radeon_device *rdev);
173 extern int r420_resume(struct radeon_device *rdev);
174 static struct radeon_asic r420_asic = {
175         .init = &r420_init,
176         .fini = &r420_fini,
177         .suspend = &r420_suspend,
178         .resume = &r420_resume,
179         .vga_set_state = &r100_vga_set_state,
180         .gpu_reset = &r300_gpu_reset,
181         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
182         .gart_set_page = &rv370_pcie_gart_set_page,
183         .cp_commit = &r100_cp_commit,
184         .ring_start = &r300_ring_start,
185         .ring_test = &r100_ring_test,
186         .ring_ib_execute = &r100_ring_ib_execute,
187         .irq_set = &r100_irq_set,
188         .irq_process = &r100_irq_process,
189         .get_vblank_counter = &r100_get_vblank_counter,
190         .fence_ring_emit = &r300_fence_ring_emit,
191         .cs_parse = &r300_cs_parse,
192         .copy_blit = &r100_copy_blit,
193         .copy_dma = &r300_copy_dma,
194         .copy = &r100_copy_blit,
195         .get_engine_clock = &radeon_atom_get_engine_clock,
196         .set_engine_clock = &radeon_atom_set_engine_clock,
197         .get_memory_clock = &radeon_atom_get_memory_clock,
198         .set_memory_clock = &radeon_atom_set_memory_clock,
199         .set_pcie_lanes = &rv370_set_pcie_lanes,
200         .set_clock_gating = &radeon_atom_set_clock_gating,
201         .set_surface_reg = r100_set_surface_reg,
202         .clear_surface_reg = r100_clear_surface_reg,
203         .bandwidth_update = &r100_bandwidth_update,
204 };
205
206
207 /*
208  * rs400,rs480
209  */
210 extern int rs400_init(struct radeon_device *rdev);
211 extern void rs400_fini(struct radeon_device *rdev);
212 extern int rs400_suspend(struct radeon_device *rdev);
213 extern int rs400_resume(struct radeon_device *rdev);
214 void rs400_gart_tlb_flush(struct radeon_device *rdev);
215 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
216 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
217 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
218 static struct radeon_asic rs400_asic = {
219         .init = &rs400_init,
220         .fini = &rs400_fini,
221         .suspend = &rs400_suspend,
222         .resume = &rs400_resume,
223         .vga_set_state = &r100_vga_set_state,
224         .gpu_reset = &r300_gpu_reset,
225         .gart_tlb_flush = &rs400_gart_tlb_flush,
226         .gart_set_page = &rs400_gart_set_page,
227         .cp_commit = &r100_cp_commit,
228         .ring_start = &r300_ring_start,
229         .ring_test = &r100_ring_test,
230         .ring_ib_execute = &r100_ring_ib_execute,
231         .irq_set = &r100_irq_set,
232         .irq_process = &r100_irq_process,
233         .get_vblank_counter = &r100_get_vblank_counter,
234         .fence_ring_emit = &r300_fence_ring_emit,
235         .cs_parse = &r300_cs_parse,
236         .copy_blit = &r100_copy_blit,
237         .copy_dma = &r300_copy_dma,
238         .copy = &r100_copy_blit,
239         .get_engine_clock = &radeon_legacy_get_engine_clock,
240         .set_engine_clock = &radeon_legacy_set_engine_clock,
241         .get_memory_clock = NULL,
242         .set_memory_clock = NULL,
243         .set_pcie_lanes = NULL,
244         .set_clock_gating = &radeon_legacy_set_clock_gating,
245         .set_surface_reg = r100_set_surface_reg,
246         .clear_surface_reg = r100_clear_surface_reg,
247         .bandwidth_update = &r100_bandwidth_update,
248 };
249
250
251 /*
252  * rs600.
253  */
254 extern int rs600_init(struct radeon_device *rdev);
255 extern void rs600_fini(struct radeon_device *rdev);
256 extern int rs600_suspend(struct radeon_device *rdev);
257 extern int rs600_resume(struct radeon_device *rdev);
258 int rs600_irq_set(struct radeon_device *rdev);
259 int rs600_irq_process(struct radeon_device *rdev);
260 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
261 void rs600_gart_tlb_flush(struct radeon_device *rdev);
262 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
263 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
264 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
265 void rs600_bandwidth_update(struct radeon_device *rdev);
266 static struct radeon_asic rs600_asic = {
267         .init = &rs600_init,
268         .fini = &rs600_fini,
269         .suspend = &rs600_suspend,
270         .resume = &rs600_resume,
271         .vga_set_state = &r100_vga_set_state,
272         .gpu_reset = &r300_gpu_reset,
273         .gart_tlb_flush = &rs600_gart_tlb_flush,
274         .gart_set_page = &rs600_gart_set_page,
275         .cp_commit = &r100_cp_commit,
276         .ring_start = &r300_ring_start,
277         .ring_test = &r100_ring_test,
278         .ring_ib_execute = &r100_ring_ib_execute,
279         .irq_set = &rs600_irq_set,
280         .irq_process = &rs600_irq_process,
281         .get_vblank_counter = &rs600_get_vblank_counter,
282         .fence_ring_emit = &r300_fence_ring_emit,
283         .cs_parse = &r300_cs_parse,
284         .copy_blit = &r100_copy_blit,
285         .copy_dma = &r300_copy_dma,
286         .copy = &r100_copy_blit,
287         .get_engine_clock = &radeon_atom_get_engine_clock,
288         .set_engine_clock = &radeon_atom_set_engine_clock,
289         .get_memory_clock = &radeon_atom_get_memory_clock,
290         .set_memory_clock = &radeon_atom_set_memory_clock,
291         .set_pcie_lanes = NULL,
292         .set_clock_gating = &radeon_atom_set_clock_gating,
293         .bandwidth_update = &rs600_bandwidth_update,
294 };
295
296
297 /*
298  * rs690,rs740
299  */
300 int rs690_init(struct radeon_device *rdev);
301 void rs690_fini(struct radeon_device *rdev);
302 int rs690_resume(struct radeon_device *rdev);
303 int rs690_suspend(struct radeon_device *rdev);
304 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
305 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
306 void rs690_bandwidth_update(struct radeon_device *rdev);
307 static struct radeon_asic rs690_asic = {
308         .init = &rs690_init,
309         .fini = &rs690_fini,
310         .suspend = &rs690_suspend,
311         .resume = &rs690_resume,
312         .vga_set_state = &r100_vga_set_state,
313         .gpu_reset = &r300_gpu_reset,
314         .gart_tlb_flush = &rs400_gart_tlb_flush,
315         .gart_set_page = &rs400_gart_set_page,
316         .cp_commit = &r100_cp_commit,
317         .ring_start = &r300_ring_start,
318         .ring_test = &r100_ring_test,
319         .ring_ib_execute = &r100_ring_ib_execute,
320         .irq_set = &rs600_irq_set,
321         .irq_process = &rs600_irq_process,
322         .get_vblank_counter = &rs600_get_vblank_counter,
323         .fence_ring_emit = &r300_fence_ring_emit,
324         .cs_parse = &r300_cs_parse,
325         .copy_blit = &r100_copy_blit,
326         .copy_dma = &r300_copy_dma,
327         .copy = &r300_copy_dma,
328         .get_engine_clock = &radeon_atom_get_engine_clock,
329         .set_engine_clock = &radeon_atom_set_engine_clock,
330         .get_memory_clock = &radeon_atom_get_memory_clock,
331         .set_memory_clock = &radeon_atom_set_memory_clock,
332         .set_pcie_lanes = NULL,
333         .set_clock_gating = &radeon_atom_set_clock_gating,
334         .set_surface_reg = r100_set_surface_reg,
335         .clear_surface_reg = r100_clear_surface_reg,
336         .bandwidth_update = &rs690_bandwidth_update,
337 };
338
339
340 /*
341  * rv515
342  */
343 int rv515_init(struct radeon_device *rdev);
344 void rv515_fini(struct radeon_device *rdev);
345 int rv515_gpu_reset(struct radeon_device *rdev);
346 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
347 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
348 void rv515_ring_start(struct radeon_device *rdev);
349 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
350 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
351 void rv515_bandwidth_update(struct radeon_device *rdev);
352 int rv515_resume(struct radeon_device *rdev);
353 int rv515_suspend(struct radeon_device *rdev);
354 static struct radeon_asic rv515_asic = {
355         .init = &rv515_init,
356         .fini = &rv515_fini,
357         .suspend = &rv515_suspend,
358         .resume = &rv515_resume,
359         .vga_set_state = &r100_vga_set_state,
360         .gpu_reset = &rv515_gpu_reset,
361         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
362         .gart_set_page = &rv370_pcie_gart_set_page,
363         .cp_commit = &r100_cp_commit,
364         .ring_start = &rv515_ring_start,
365         .ring_test = &r100_ring_test,
366         .ring_ib_execute = &r100_ring_ib_execute,
367         .irq_set = &rs600_irq_set,
368         .irq_process = &rs600_irq_process,
369         .get_vblank_counter = &rs600_get_vblank_counter,
370         .fence_ring_emit = &r300_fence_ring_emit,
371         .cs_parse = &r300_cs_parse,
372         .copy_blit = &r100_copy_blit,
373         .copy_dma = &r300_copy_dma,
374         .copy = &r100_copy_blit,
375         .get_engine_clock = &radeon_atom_get_engine_clock,
376         .set_engine_clock = &radeon_atom_set_engine_clock,
377         .get_memory_clock = &radeon_atom_get_memory_clock,
378         .set_memory_clock = &radeon_atom_set_memory_clock,
379         .set_pcie_lanes = &rv370_set_pcie_lanes,
380         .set_clock_gating = &radeon_atom_set_clock_gating,
381         .set_surface_reg = r100_set_surface_reg,
382         .clear_surface_reg = r100_clear_surface_reg,
383         .bandwidth_update = &rv515_bandwidth_update,
384 };
385
386
387 /*
388  * r520,rv530,rv560,rv570,r580
389  */
390 int r520_init(struct radeon_device *rdev);
391 int r520_resume(struct radeon_device *rdev);
392 static struct radeon_asic r520_asic = {
393         .init = &r520_init,
394         .fini = &rv515_fini,
395         .suspend = &rv515_suspend,
396         .resume = &r520_resume,
397         .vga_set_state = &r100_vga_set_state,
398         .gpu_reset = &rv515_gpu_reset,
399         .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
400         .gart_set_page = &rv370_pcie_gart_set_page,
401         .cp_commit = &r100_cp_commit,
402         .ring_start = &rv515_ring_start,
403         .ring_test = &r100_ring_test,
404         .ring_ib_execute = &r100_ring_ib_execute,
405         .irq_set = &rs600_irq_set,
406         .irq_process = &rs600_irq_process,
407         .get_vblank_counter = &rs600_get_vblank_counter,
408         .fence_ring_emit = &r300_fence_ring_emit,
409         .cs_parse = &r300_cs_parse,
410         .copy_blit = &r100_copy_blit,
411         .copy_dma = &r300_copy_dma,
412         .copy = &r100_copy_blit,
413         .get_engine_clock = &radeon_atom_get_engine_clock,
414         .set_engine_clock = &radeon_atom_set_engine_clock,
415         .get_memory_clock = &radeon_atom_get_memory_clock,
416         .set_memory_clock = &radeon_atom_set_memory_clock,
417         .set_pcie_lanes = &rv370_set_pcie_lanes,
418         .set_clock_gating = &radeon_atom_set_clock_gating,
419         .set_surface_reg = r100_set_surface_reg,
420         .clear_surface_reg = r100_clear_surface_reg,
421         .bandwidth_update = &rv515_bandwidth_update,
422 };
423
424 /*
425  * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
426  */
427 int r600_init(struct radeon_device *rdev);
428 void r600_fini(struct radeon_device *rdev);
429 int r600_suspend(struct radeon_device *rdev);
430 int r600_resume(struct radeon_device *rdev);
431 void r600_vga_set_state(struct radeon_device *rdev, bool state);
432 int r600_wb_init(struct radeon_device *rdev);
433 void r600_wb_fini(struct radeon_device *rdev);
434 void r600_cp_commit(struct radeon_device *rdev);
435 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
436 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
437 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
438 int r600_cs_parse(struct radeon_cs_parser *p);
439 void r600_fence_ring_emit(struct radeon_device *rdev,
440                           struct radeon_fence *fence);
441 int r600_copy_dma(struct radeon_device *rdev,
442                   uint64_t src_offset,
443                   uint64_t dst_offset,
444                   unsigned num_pages,
445                   struct radeon_fence *fence);
446 int r600_irq_process(struct radeon_device *rdev);
447 int r600_irq_set(struct radeon_device *rdev);
448 int r600_gpu_reset(struct radeon_device *rdev);
449 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
450                          uint32_t tiling_flags, uint32_t pitch,
451                          uint32_t offset, uint32_t obj_size);
452 int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
453 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
454 int r600_ring_test(struct radeon_device *rdev);
455 int r600_copy_blit(struct radeon_device *rdev,
456                    uint64_t src_offset, uint64_t dst_offset,
457                    unsigned num_pages, struct radeon_fence *fence);
458
459 static struct radeon_asic r600_asic = {
460         .init = &r600_init,
461         .fini = &r600_fini,
462         .suspend = &r600_suspend,
463         .resume = &r600_resume,
464         .cp_commit = &r600_cp_commit,
465         .vga_set_state = &r600_vga_set_state,
466         .gpu_reset = &r600_gpu_reset,
467         .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
468         .gart_set_page = &rs600_gart_set_page,
469         .ring_test = &r600_ring_test,
470         .ring_ib_execute = &r600_ring_ib_execute,
471         .irq_set = &r600_irq_set,
472         .irq_process = &r600_irq_process,
473         .fence_ring_emit = &r600_fence_ring_emit,
474         .cs_parse = &r600_cs_parse,
475         .copy_blit = &r600_copy_blit,
476         .copy_dma = &r600_copy_blit,
477         .copy = &r600_copy_blit,
478         .get_engine_clock = &radeon_atom_get_engine_clock,
479         .set_engine_clock = &radeon_atom_set_engine_clock,
480         .get_memory_clock = &radeon_atom_get_memory_clock,
481         .set_memory_clock = &radeon_atom_set_memory_clock,
482         .set_pcie_lanes = NULL,
483         .set_clock_gating = &radeon_atom_set_clock_gating,
484         .set_surface_reg = r600_set_surface_reg,
485         .clear_surface_reg = r600_clear_surface_reg,
486         .bandwidth_update = &rv515_bandwidth_update,
487 };
488
489 /*
490  * rv770,rv730,rv710,rv740
491  */
492 int rv770_init(struct radeon_device *rdev);
493 void rv770_fini(struct radeon_device *rdev);
494 int rv770_suspend(struct radeon_device *rdev);
495 int rv770_resume(struct radeon_device *rdev);
496 int rv770_gpu_reset(struct radeon_device *rdev);
497
498 static struct radeon_asic rv770_asic = {
499         .init = &rv770_init,
500         .fini = &rv770_fini,
501         .suspend = &rv770_suspend,
502         .resume = &rv770_resume,
503         .cp_commit = &r600_cp_commit,
504         .gpu_reset = &rv770_gpu_reset,
505         .vga_set_state = &r600_vga_set_state,
506         .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
507         .gart_set_page = &rs600_gart_set_page,
508         .ring_test = &r600_ring_test,
509         .ring_ib_execute = &r600_ring_ib_execute,
510         .irq_set = &r600_irq_set,
511         .irq_process = &r600_irq_process,
512         .fence_ring_emit = &r600_fence_ring_emit,
513         .cs_parse = &r600_cs_parse,
514         .copy_blit = &r600_copy_blit,
515         .copy_dma = &r600_copy_blit,
516         .copy = &r600_copy_blit,
517         .get_engine_clock = &radeon_atom_get_engine_clock,
518         .set_engine_clock = &radeon_atom_set_engine_clock,
519         .get_memory_clock = &radeon_atom_get_memory_clock,
520         .set_memory_clock = &radeon_atom_set_memory_clock,
521         .set_pcie_lanes = NULL,
522         .set_clock_gating = &radeon_atom_set_clock_gating,
523         .set_surface_reg = r600_set_surface_reg,
524         .clear_surface_reg = r600_clear_surface_reg,
525         .bandwidth_update = &rv515_bandwidth_update,
526 };
527
528 #endif