drm/nouveau/gr: convert user classes to new-style nvkm_object
[cascardo/linux.git] / drivers / gpu / drm / nouveau / nvkm / engine / gr / nv04.c
1 /*
2  * Copyright 2007 Stephane Marchesin
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragr) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22  * DEALINGS IN THE SOFTWARE.
23  */
24 #include "priv.h"
25 #include "regs.h"
26
27 #include <core/client.h>
28 #include <engine/fifo.h>
29 #include <engine/fifo/chan.h>
30 #include <subdev/instmem.h>
31 #include <subdev/timer.h>
32
33 static u32
34 nv04_gr_ctx_regs[] = {
35         0x0040053c,
36         0x00400544,
37         0x00400540,
38         0x00400548,
39         NV04_PGRAPH_CTX_SWITCH1,
40         NV04_PGRAPH_CTX_SWITCH2,
41         NV04_PGRAPH_CTX_SWITCH3,
42         NV04_PGRAPH_CTX_SWITCH4,
43         NV04_PGRAPH_CTX_CACHE1,
44         NV04_PGRAPH_CTX_CACHE2,
45         NV04_PGRAPH_CTX_CACHE3,
46         NV04_PGRAPH_CTX_CACHE4,
47         0x00400184,
48         0x004001a4,
49         0x004001c4,
50         0x004001e4,
51         0x00400188,
52         0x004001a8,
53         0x004001c8,
54         0x004001e8,
55         0x0040018c,
56         0x004001ac,
57         0x004001cc,
58         0x004001ec,
59         0x00400190,
60         0x004001b0,
61         0x004001d0,
62         0x004001f0,
63         0x00400194,
64         0x004001b4,
65         0x004001d4,
66         0x004001f4,
67         0x00400198,
68         0x004001b8,
69         0x004001d8,
70         0x004001f8,
71         0x0040019c,
72         0x004001bc,
73         0x004001dc,
74         0x004001fc,
75         0x00400174,
76         NV04_PGRAPH_DMA_START_0,
77         NV04_PGRAPH_DMA_START_1,
78         NV04_PGRAPH_DMA_LENGTH,
79         NV04_PGRAPH_DMA_MISC,
80         NV04_PGRAPH_DMA_PITCH,
81         NV04_PGRAPH_BOFFSET0,
82         NV04_PGRAPH_BBASE0,
83         NV04_PGRAPH_BLIMIT0,
84         NV04_PGRAPH_BOFFSET1,
85         NV04_PGRAPH_BBASE1,
86         NV04_PGRAPH_BLIMIT1,
87         NV04_PGRAPH_BOFFSET2,
88         NV04_PGRAPH_BBASE2,
89         NV04_PGRAPH_BLIMIT2,
90         NV04_PGRAPH_BOFFSET3,
91         NV04_PGRAPH_BBASE3,
92         NV04_PGRAPH_BLIMIT3,
93         NV04_PGRAPH_BOFFSET4,
94         NV04_PGRAPH_BBASE4,
95         NV04_PGRAPH_BLIMIT4,
96         NV04_PGRAPH_BOFFSET5,
97         NV04_PGRAPH_BBASE5,
98         NV04_PGRAPH_BLIMIT5,
99         NV04_PGRAPH_BPITCH0,
100         NV04_PGRAPH_BPITCH1,
101         NV04_PGRAPH_BPITCH2,
102         NV04_PGRAPH_BPITCH3,
103         NV04_PGRAPH_BPITCH4,
104         NV04_PGRAPH_SURFACE,
105         NV04_PGRAPH_STATE,
106         NV04_PGRAPH_BSWIZZLE2,
107         NV04_PGRAPH_BSWIZZLE5,
108         NV04_PGRAPH_BPIXEL,
109         NV04_PGRAPH_NOTIFY,
110         NV04_PGRAPH_PATT_COLOR0,
111         NV04_PGRAPH_PATT_COLOR1,
112         NV04_PGRAPH_PATT_COLORRAM+0x00,
113         NV04_PGRAPH_PATT_COLORRAM+0x04,
114         NV04_PGRAPH_PATT_COLORRAM+0x08,
115         NV04_PGRAPH_PATT_COLORRAM+0x0c,
116         NV04_PGRAPH_PATT_COLORRAM+0x10,
117         NV04_PGRAPH_PATT_COLORRAM+0x14,
118         NV04_PGRAPH_PATT_COLORRAM+0x18,
119         NV04_PGRAPH_PATT_COLORRAM+0x1c,
120         NV04_PGRAPH_PATT_COLORRAM+0x20,
121         NV04_PGRAPH_PATT_COLORRAM+0x24,
122         NV04_PGRAPH_PATT_COLORRAM+0x28,
123         NV04_PGRAPH_PATT_COLORRAM+0x2c,
124         NV04_PGRAPH_PATT_COLORRAM+0x30,
125         NV04_PGRAPH_PATT_COLORRAM+0x34,
126         NV04_PGRAPH_PATT_COLORRAM+0x38,
127         NV04_PGRAPH_PATT_COLORRAM+0x3c,
128         NV04_PGRAPH_PATT_COLORRAM+0x40,
129         NV04_PGRAPH_PATT_COLORRAM+0x44,
130         NV04_PGRAPH_PATT_COLORRAM+0x48,
131         NV04_PGRAPH_PATT_COLORRAM+0x4c,
132         NV04_PGRAPH_PATT_COLORRAM+0x50,
133         NV04_PGRAPH_PATT_COLORRAM+0x54,
134         NV04_PGRAPH_PATT_COLORRAM+0x58,
135         NV04_PGRAPH_PATT_COLORRAM+0x5c,
136         NV04_PGRAPH_PATT_COLORRAM+0x60,
137         NV04_PGRAPH_PATT_COLORRAM+0x64,
138         NV04_PGRAPH_PATT_COLORRAM+0x68,
139         NV04_PGRAPH_PATT_COLORRAM+0x6c,
140         NV04_PGRAPH_PATT_COLORRAM+0x70,
141         NV04_PGRAPH_PATT_COLORRAM+0x74,
142         NV04_PGRAPH_PATT_COLORRAM+0x78,
143         NV04_PGRAPH_PATT_COLORRAM+0x7c,
144         NV04_PGRAPH_PATT_COLORRAM+0x80,
145         NV04_PGRAPH_PATT_COLORRAM+0x84,
146         NV04_PGRAPH_PATT_COLORRAM+0x88,
147         NV04_PGRAPH_PATT_COLORRAM+0x8c,
148         NV04_PGRAPH_PATT_COLORRAM+0x90,
149         NV04_PGRAPH_PATT_COLORRAM+0x94,
150         NV04_PGRAPH_PATT_COLORRAM+0x98,
151         NV04_PGRAPH_PATT_COLORRAM+0x9c,
152         NV04_PGRAPH_PATT_COLORRAM+0xa0,
153         NV04_PGRAPH_PATT_COLORRAM+0xa4,
154         NV04_PGRAPH_PATT_COLORRAM+0xa8,
155         NV04_PGRAPH_PATT_COLORRAM+0xac,
156         NV04_PGRAPH_PATT_COLORRAM+0xb0,
157         NV04_PGRAPH_PATT_COLORRAM+0xb4,
158         NV04_PGRAPH_PATT_COLORRAM+0xb8,
159         NV04_PGRAPH_PATT_COLORRAM+0xbc,
160         NV04_PGRAPH_PATT_COLORRAM+0xc0,
161         NV04_PGRAPH_PATT_COLORRAM+0xc4,
162         NV04_PGRAPH_PATT_COLORRAM+0xc8,
163         NV04_PGRAPH_PATT_COLORRAM+0xcc,
164         NV04_PGRAPH_PATT_COLORRAM+0xd0,
165         NV04_PGRAPH_PATT_COLORRAM+0xd4,
166         NV04_PGRAPH_PATT_COLORRAM+0xd8,
167         NV04_PGRAPH_PATT_COLORRAM+0xdc,
168         NV04_PGRAPH_PATT_COLORRAM+0xe0,
169         NV04_PGRAPH_PATT_COLORRAM+0xe4,
170         NV04_PGRAPH_PATT_COLORRAM+0xe8,
171         NV04_PGRAPH_PATT_COLORRAM+0xec,
172         NV04_PGRAPH_PATT_COLORRAM+0xf0,
173         NV04_PGRAPH_PATT_COLORRAM+0xf4,
174         NV04_PGRAPH_PATT_COLORRAM+0xf8,
175         NV04_PGRAPH_PATT_COLORRAM+0xfc,
176         NV04_PGRAPH_PATTERN,
177         0x0040080c,
178         NV04_PGRAPH_PATTERN_SHAPE,
179         0x00400600,
180         NV04_PGRAPH_ROP3,
181         NV04_PGRAPH_CHROMA,
182         NV04_PGRAPH_BETA_AND,
183         NV04_PGRAPH_BETA_PREMULT,
184         NV04_PGRAPH_CONTROL0,
185         NV04_PGRAPH_CONTROL1,
186         NV04_PGRAPH_CONTROL2,
187         NV04_PGRAPH_BLEND,
188         NV04_PGRAPH_STORED_FMT,
189         NV04_PGRAPH_SOURCE_COLOR,
190         0x00400560,
191         0x00400568,
192         0x00400564,
193         0x0040056c,
194         0x00400400,
195         0x00400480,
196         0x00400404,
197         0x00400484,
198         0x00400408,
199         0x00400488,
200         0x0040040c,
201         0x0040048c,
202         0x00400410,
203         0x00400490,
204         0x00400414,
205         0x00400494,
206         0x00400418,
207         0x00400498,
208         0x0040041c,
209         0x0040049c,
210         0x00400420,
211         0x004004a0,
212         0x00400424,
213         0x004004a4,
214         0x00400428,
215         0x004004a8,
216         0x0040042c,
217         0x004004ac,
218         0x00400430,
219         0x004004b0,
220         0x00400434,
221         0x004004b4,
222         0x00400438,
223         0x004004b8,
224         0x0040043c,
225         0x004004bc,
226         0x00400440,
227         0x004004c0,
228         0x00400444,
229         0x004004c4,
230         0x00400448,
231         0x004004c8,
232         0x0040044c,
233         0x004004cc,
234         0x00400450,
235         0x004004d0,
236         0x00400454,
237         0x004004d4,
238         0x00400458,
239         0x004004d8,
240         0x0040045c,
241         0x004004dc,
242         0x00400460,
243         0x004004e0,
244         0x00400464,
245         0x004004e4,
246         0x00400468,
247         0x004004e8,
248         0x0040046c,
249         0x004004ec,
250         0x00400470,
251         0x004004f0,
252         0x00400474,
253         0x004004f4,
254         0x00400478,
255         0x004004f8,
256         0x0040047c,
257         0x004004fc,
258         0x00400534,
259         0x00400538,
260         0x00400514,
261         0x00400518,
262         0x0040051c,
263         0x00400520,
264         0x00400524,
265         0x00400528,
266         0x0040052c,
267         0x00400530,
268         0x00400d00,
269         0x00400d40,
270         0x00400d80,
271         0x00400d04,
272         0x00400d44,
273         0x00400d84,
274         0x00400d08,
275         0x00400d48,
276         0x00400d88,
277         0x00400d0c,
278         0x00400d4c,
279         0x00400d8c,
280         0x00400d10,
281         0x00400d50,
282         0x00400d90,
283         0x00400d14,
284         0x00400d54,
285         0x00400d94,
286         0x00400d18,
287         0x00400d58,
288         0x00400d98,
289         0x00400d1c,
290         0x00400d5c,
291         0x00400d9c,
292         0x00400d20,
293         0x00400d60,
294         0x00400da0,
295         0x00400d24,
296         0x00400d64,
297         0x00400da4,
298         0x00400d28,
299         0x00400d68,
300         0x00400da8,
301         0x00400d2c,
302         0x00400d6c,
303         0x00400dac,
304         0x00400d30,
305         0x00400d70,
306         0x00400db0,
307         0x00400d34,
308         0x00400d74,
309         0x00400db4,
310         0x00400d38,
311         0x00400d78,
312         0x00400db8,
313         0x00400d3c,
314         0x00400d7c,
315         0x00400dbc,
316         0x00400590,
317         0x00400594,
318         0x00400598,
319         0x0040059c,
320         0x004005a8,
321         0x004005ac,
322         0x004005b0,
323         0x004005b4,
324         0x004005c0,
325         0x004005c4,
326         0x004005c8,
327         0x004005cc,
328         0x004005d0,
329         0x004005d4,
330         0x004005d8,
331         0x004005dc,
332         0x004005e0,
333         NV04_PGRAPH_PASSTHRU_0,
334         NV04_PGRAPH_PASSTHRU_1,
335         NV04_PGRAPH_PASSTHRU_2,
336         NV04_PGRAPH_DVD_COLORFMT,
337         NV04_PGRAPH_SCALED_FORMAT,
338         NV04_PGRAPH_MISC24_0,
339         NV04_PGRAPH_MISC24_1,
340         NV04_PGRAPH_MISC24_2,
341         0x00400500,
342         0x00400504,
343         NV04_PGRAPH_VALID1,
344         NV04_PGRAPH_VALID2,
345         NV04_PGRAPH_DEBUG_3
346 };
347
348 #define nv04_gr(p) container_of((p), struct nv04_gr, base)
349
350 struct nv04_gr {
351         struct nvkm_gr base;
352         struct nv04_gr_chan *chan[16];
353         spinlock_t lock;
354 };
355
356 #define nv04_gr_chan(p) container_of((p), struct nv04_gr_chan, object)
357
358 struct nv04_gr_chan {
359         struct nvkm_object object;
360         struct nv04_gr *gr;
361         int chid;
362         u32 nv04[ARRAY_SIZE(nv04_gr_ctx_regs)];
363 };
364
365 /*******************************************************************************
366  * Graphics object classes
367  ******************************************************************************/
368
369 /*
370  * Software methods, why they are needed, and how they all work:
371  *
372  * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
373  * 2d engine settings are kept inside the grobjs themselves. The grobjs are
374  * 3 words long on both. grobj format on NV04 is:
375  *
376  * word 0:
377  *  - bits 0-7: class
378  *  - bit 12: color key active
379  *  - bit 13: clip rect active
380  *  - bit 14: if set, destination surface is swizzled and taken from buffer 5
381  *            [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
382  *            from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
383  *            NV03_CONTEXT_SURFACE_DST].
384  *  - bits 15-17: 2d operation [aka patch config]
385  *  - bit 24: patch valid [enables rendering using this object]
386  *  - bit 25: surf3d valid [for tex_tri and multitex_tri only]
387  * word 1:
388  *  - bits 0-1: mono format
389  *  - bits 8-13: color format
390  *  - bits 16-31: DMA_NOTIFY instance
391  * word 2:
392  *  - bits 0-15: DMA_A instance
393  *  - bits 16-31: DMA_B instance
394  *
395  * On NV05 it's:
396  *
397  * word 0:
398  *  - bits 0-7: class
399  *  - bit 12: color key active
400  *  - bit 13: clip rect active
401  *  - bit 14: if set, destination surface is swizzled and taken from buffer 5
402  *            [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
403  *            from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
404  *            NV03_CONTEXT_SURFACE_DST].
405  *  - bits 15-17: 2d operation [aka patch config]
406  *  - bits 20-22: dither mode
407  *  - bit 24: patch valid [enables rendering using this object]
408  *  - bit 25: surface_dst/surface_color/surf2d/surf3d valid
409  *  - bit 26: surface_src/surface_zeta valid
410  *  - bit 27: pattern valid
411  *  - bit 28: rop valid
412  *  - bit 29: beta1 valid
413  *  - bit 30: beta4 valid
414  * word 1:
415  *  - bits 0-1: mono format
416  *  - bits 8-13: color format
417  *  - bits 16-31: DMA_NOTIFY instance
418  * word 2:
419  *  - bits 0-15: DMA_A instance
420  *  - bits 16-31: DMA_B instance
421  *
422  * NV05 will set/unset the relevant valid bits when you poke the relevant
423  * object-binding methods with object of the proper type, or with the NULL
424  * type. It'll only allow rendering using the grobj if all needed objects
425  * are bound. The needed set of objects depends on selected operation: for
426  * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
427  *
428  * NV04 doesn't have these methods implemented at all, and doesn't have the
429  * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
430  * is set. So we have to emulate them in software, internally keeping the
431  * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
432  * but the last word isn't actually used for anything, we abuse it for this
433  * purpose.
434  *
435  * Actually, NV05 can optionally check bit 24 too, but we disable this since
436  * there's no use for it.
437  *
438  * For unknown reasons, NV04 implements surf3d binding in hardware as an
439  * exception. Also for unknown reasons, NV04 doesn't implement the clipping
440  * methods on the surf3d object, so we have to emulate them too.
441  */
442
443 static void
444 nv04_gr_set_ctx1(struct nvkm_device *device, u32 inst, u32 mask, u32 value)
445 {
446         int subc = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
447         u32 tmp;
448
449         tmp  = nvkm_rd32(device, 0x700000 + inst);
450         tmp &= ~mask;
451         tmp |= value;
452         nvkm_wr32(device, 0x700000 + inst, tmp);
453
454         nvkm_wr32(device, NV04_PGRAPH_CTX_SWITCH1, tmp);
455         nvkm_wr32(device, NV04_PGRAPH_CTX_CACHE1 + (subc << 2), tmp);
456 }
457
458 static void
459 nv04_gr_set_ctx_val(struct nvkm_device *device, u32 inst, u32 mask, u32 value)
460 {
461         int class, op, valid = 1;
462         u32 tmp, ctx1;
463
464         ctx1 = nvkm_rd32(device, 0x700000 + inst);
465         class = ctx1 & 0xff;
466         op = (ctx1 >> 15) & 7;
467
468         tmp = nvkm_rd32(device, 0x70000c + inst);
469         tmp &= ~mask;
470         tmp |= value;
471         nvkm_wr32(device, 0x70000c + inst, tmp);
472
473         /* check for valid surf2d/surf_dst/surf_color */
474         if (!(tmp & 0x02000000))
475                 valid = 0;
476         /* check for valid surf_src/surf_zeta */
477         if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
478                 valid = 0;
479
480         switch (op) {
481         /* SRCCOPY_AND, SRCCOPY: no extra objects required */
482         case 0:
483         case 3:
484                 break;
485         /* ROP_AND: requires pattern and rop */
486         case 1:
487                 if (!(tmp & 0x18000000))
488                         valid = 0;
489                 break;
490         /* BLEND_AND: requires beta1 */
491         case 2:
492                 if (!(tmp & 0x20000000))
493                         valid = 0;
494                 break;
495         /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
496         case 4:
497         case 5:
498                 if (!(tmp & 0x40000000))
499                         valid = 0;
500                 break;
501         }
502
503         nv04_gr_set_ctx1(device, inst, 0x01000000, valid << 24);
504 }
505
506 static bool
507 nv04_gr_mthd_set_operation(struct nvkm_device *device, u32 inst, u32 data)
508 {
509         u8 class = nvkm_rd32(device, 0x700000) & 0x000000ff;
510         if (data > 5)
511                 return false;
512         /* Old versions of the objects only accept first three operations. */
513         if (data > 2 && class < 0x40)
514                 return false;
515         nv04_gr_set_ctx1(device, inst, 0x00038000, data << 15);
516         /* changing operation changes set of objects needed for validation */
517         nv04_gr_set_ctx_val(device, inst, 0, 0);
518         return true;
519 }
520
521 static bool
522 nv04_gr_mthd_surf3d_clip_h(struct nvkm_device *device, u32 inst, u32 data)
523 {
524         u32 min = data & 0xffff, max;
525         u32 w = data >> 16;
526         if (min & 0x8000)
527                 /* too large */
528                 return false;
529         if (w & 0x8000)
530                 /* yes, it accepts negative for some reason. */
531                 w |= 0xffff0000;
532         max = min + w;
533         max &= 0x3ffff;
534         nvkm_wr32(device, 0x40053c, min);
535         nvkm_wr32(device, 0x400544, max);
536         return true;
537 }
538
539 static bool
540 nv04_gr_mthd_surf3d_clip_v(struct nvkm_device *device, u32 inst, u32 data)
541 {
542         u32 min = data & 0xffff, max;
543         u32 w = data >> 16;
544         if (min & 0x8000)
545                 /* too large */
546                 return false;
547         if (w & 0x8000)
548                 /* yes, it accepts negative for some reason. */
549                 w |= 0xffff0000;
550         max = min + w;
551         max &= 0x3ffff;
552         nvkm_wr32(device, 0x400540, min);
553         nvkm_wr32(device, 0x400548, max);
554         return true;
555 }
556
557 static u8
558 nv04_gr_mthd_bind_class(struct nvkm_device *device, u32 inst)
559 {
560         return nvkm_rd32(device, 0x700000 + (inst << 4));
561 }
562
563 static bool
564 nv04_gr_mthd_bind_surf2d(struct nvkm_device *device, u32 inst, u32 data)
565 {
566         switch (nv04_gr_mthd_bind_class(device, data)) {
567         case 0x30:
568                 nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
569                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
570                 return true;
571         case 0x42:
572                 nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
573                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
574                 return true;
575         }
576         return false;
577 }
578
579 static bool
580 nv04_gr_mthd_bind_surf2d_swzsurf(struct nvkm_device *device, u32 inst, u32 data)
581 {
582         switch (nv04_gr_mthd_bind_class(device, data)) {
583         case 0x30:
584                 nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
585                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
586                 return true;
587         case 0x42:
588                 nv04_gr_set_ctx1(device, inst, 0x00004000, 0);
589                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
590                 return true;
591         case 0x52:
592                 nv04_gr_set_ctx1(device, inst, 0x00004000, 0x00004000);
593                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
594                 return true;
595         }
596         return false;
597 }
598
599 static bool
600 nv01_gr_mthd_bind_patt(struct nvkm_device *device, u32 inst, u32 data)
601 {
602         switch (nv04_gr_mthd_bind_class(device, data)) {
603         case 0x30:
604                 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0);
605                 return true;
606         case 0x18:
607                 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0x08000000);
608                 return true;
609         }
610         return false;
611 }
612
613 static bool
614 nv04_gr_mthd_bind_patt(struct nvkm_device *device, u32 inst, u32 data)
615 {
616         switch (nv04_gr_mthd_bind_class(device, data)) {
617         case 0x30:
618                 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0);
619                 return true;
620         case 0x44:
621                 nv04_gr_set_ctx_val(device, inst, 0x08000000, 0x08000000);
622                 return true;
623         }
624         return false;
625 }
626
627 static bool
628 nv04_gr_mthd_bind_rop(struct nvkm_device *device, u32 inst, u32 data)
629 {
630         switch (nv04_gr_mthd_bind_class(device, data)) {
631         case 0x30:
632                 nv04_gr_set_ctx_val(device, inst, 0x10000000, 0);
633                 return true;
634         case 0x43:
635                 nv04_gr_set_ctx_val(device, inst, 0x10000000, 0x10000000);
636                 return true;
637         }
638         return false;
639 }
640
641 static bool
642 nv04_gr_mthd_bind_beta1(struct nvkm_device *device, u32 inst, u32 data)
643 {
644         switch (nv04_gr_mthd_bind_class(device, data)) {
645         case 0x30:
646                 nv04_gr_set_ctx_val(device, inst, 0x20000000, 0);
647                 return true;
648         case 0x12:
649                 nv04_gr_set_ctx_val(device, inst, 0x20000000, 0x20000000);
650                 return true;
651         }
652         return false;
653 }
654
655 static bool
656 nv04_gr_mthd_bind_beta4(struct nvkm_device *device, u32 inst, u32 data)
657 {
658         switch (nv04_gr_mthd_bind_class(device, data)) {
659         case 0x30:
660                 nv04_gr_set_ctx_val(device, inst, 0x40000000, 0);
661                 return true;
662         case 0x72:
663                 nv04_gr_set_ctx_val(device, inst, 0x40000000, 0x40000000);
664                 return true;
665         }
666         return false;
667 }
668
669 static bool
670 nv04_gr_mthd_bind_surf_dst(struct nvkm_device *device, u32 inst, u32 data)
671 {
672         switch (nv04_gr_mthd_bind_class(device, data)) {
673         case 0x30:
674                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
675                 return true;
676         case 0x58:
677                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
678                 return true;
679         }
680         return false;
681 }
682
683 static bool
684 nv04_gr_mthd_bind_surf_src(struct nvkm_device *device, u32 inst, u32 data)
685 {
686         switch (nv04_gr_mthd_bind_class(device, data)) {
687         case 0x30:
688                 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0);
689                 return true;
690         case 0x59:
691                 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0x04000000);
692                 return true;
693         }
694         return false;
695 }
696
697 static bool
698 nv04_gr_mthd_bind_surf_color(struct nvkm_device *device, u32 inst, u32 data)
699 {
700         switch (nv04_gr_mthd_bind_class(device, data)) {
701         case 0x30:
702                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0);
703                 return true;
704         case 0x5a:
705                 nv04_gr_set_ctx_val(device, inst, 0x02000000, 0x02000000);
706                 return true;
707         }
708         return false;
709 }
710
711 static bool
712 nv04_gr_mthd_bind_surf_zeta(struct nvkm_device *device, u32 inst, u32 data)
713 {
714         switch (nv04_gr_mthd_bind_class(device, data)) {
715         case 0x30:
716                 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0);
717                 return true;
718         case 0x5b:
719                 nv04_gr_set_ctx_val(device, inst, 0x04000000, 0x04000000);
720                 return true;
721         }
722         return false;
723 }
724
725 static bool
726 nv01_gr_mthd_bind_clip(struct nvkm_device *device, u32 inst, u32 data)
727 {
728         switch (nv04_gr_mthd_bind_class(device, data)) {
729         case 0x30:
730                 nv04_gr_set_ctx1(device, inst, 0x2000, 0);
731                 return true;
732         case 0x19:
733                 nv04_gr_set_ctx1(device, inst, 0x2000, 0x2000);
734                 return true;
735         }
736         return false;
737 }
738
739 static bool
740 nv01_gr_mthd_bind_chroma(struct nvkm_device *device, u32 inst, u32 data)
741 {
742         switch (nv04_gr_mthd_bind_class(device, data)) {
743         case 0x30:
744                 nv04_gr_set_ctx1(device, inst, 0x1000, 0);
745                 return true;
746         /* Yes, for some reason even the old versions of objects
747          * accept 0x57 and not 0x17. Consistency be damned.
748          */
749         case 0x57:
750                 nv04_gr_set_ctx1(device, inst, 0x1000, 0x1000);
751                 return true;
752         }
753         return false;
754 }
755
756 static bool
757 nv03_gr_mthd_gdi(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
758 {
759         bool (*func)(struct nvkm_device *, u32, u32);
760         switch (mthd) {
761         case 0x0184: func = nv01_gr_mthd_bind_patt; break;
762         case 0x0188: func = nv04_gr_mthd_bind_rop; break;
763         case 0x018c: func = nv04_gr_mthd_bind_beta1; break;
764         case 0x0190: func = nv04_gr_mthd_bind_surf_dst; break;
765         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
766         default:
767                 return false;
768         }
769         return func(device, inst, data);
770 }
771
772 static bool
773 nv04_gr_mthd_gdi(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
774 {
775         bool (*func)(struct nvkm_device *, u32, u32);
776         switch (mthd) {
777         case 0x0188: func = nv04_gr_mthd_bind_patt; break;
778         case 0x018c: func = nv04_gr_mthd_bind_rop; break;
779         case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
780         case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
781         case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
782         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
783         default:
784                 return false;
785         }
786         return func(device, inst, data);
787 }
788
789 static bool
790 nv01_gr_mthd_blit(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
791 {
792         bool (*func)(struct nvkm_device *, u32, u32);
793         switch (mthd) {
794         case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
795         case 0x0188: func = nv01_gr_mthd_bind_clip; break;
796         case 0x018c: func = nv01_gr_mthd_bind_patt; break;
797         case 0x0190: func = nv04_gr_mthd_bind_rop; break;
798         case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
799         case 0x0198: func = nv04_gr_mthd_bind_surf_dst; break;
800         case 0x019c: func = nv04_gr_mthd_bind_surf_src; break;
801         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
802         default:
803                 return false;
804         }
805         return func(device, inst, data);
806 }
807
808 static bool
809 nv04_gr_mthd_blit(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
810 {
811         bool (*func)(struct nvkm_device *, u32, u32);
812         switch (mthd) {
813         case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
814         case 0x0188: func = nv01_gr_mthd_bind_clip; break;
815         case 0x018c: func = nv04_gr_mthd_bind_patt; break;
816         case 0x0190: func = nv04_gr_mthd_bind_rop; break;
817         case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
818         case 0x0198: func = nv04_gr_mthd_bind_beta4; break;
819         case 0x019c: func = nv04_gr_mthd_bind_surf2d; break;
820         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
821         default:
822                 return false;
823         }
824         return func(device, inst, data);
825 }
826
827 static bool
828 nv04_gr_mthd_iifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
829 {
830         bool (*func)(struct nvkm_device *, u32, u32);
831         switch (mthd) {
832         case 0x0188: func = nv01_gr_mthd_bind_chroma; break;
833         case 0x018c: func = nv01_gr_mthd_bind_clip; break;
834         case 0x0190: func = nv04_gr_mthd_bind_patt; break;
835         case 0x0194: func = nv04_gr_mthd_bind_rop; break;
836         case 0x0198: func = nv04_gr_mthd_bind_beta1; break;
837         case 0x019c: func = nv04_gr_mthd_bind_beta4; break;
838         case 0x01a0: func = nv04_gr_mthd_bind_surf2d_swzsurf; break;
839         case 0x03e4: func = nv04_gr_mthd_set_operation; break;
840         default:
841                 return false;
842         }
843         return func(device, inst, data);
844 }
845
846 static bool
847 nv01_gr_mthd_ifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
848 {
849         bool (*func)(struct nvkm_device *, u32, u32);
850         switch (mthd) {
851         case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
852         case 0x0188: func = nv01_gr_mthd_bind_clip; break;
853         case 0x018c: func = nv01_gr_mthd_bind_patt; break;
854         case 0x0190: func = nv04_gr_mthd_bind_rop; break;
855         case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
856         case 0x0198: func = nv04_gr_mthd_bind_surf_dst; break;
857         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
858         default:
859                 return false;
860         }
861         return func(device, inst, data);
862 }
863
864 static bool
865 nv04_gr_mthd_ifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
866 {
867         bool (*func)(struct nvkm_device *, u32, u32);
868         switch (mthd) {
869         case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
870         case 0x0188: func = nv01_gr_mthd_bind_clip; break;
871         case 0x018c: func = nv04_gr_mthd_bind_patt; break;
872         case 0x0190: func = nv04_gr_mthd_bind_rop; break;
873         case 0x0194: func = nv04_gr_mthd_bind_beta1; break;
874         case 0x0198: func = nv04_gr_mthd_bind_beta4; break;
875         case 0x019c: func = nv04_gr_mthd_bind_surf2d; break;
876         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
877         default:
878                 return false;
879         }
880         return func(device, inst, data);
881 }
882
883 static bool
884 nv03_gr_mthd_sifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
885 {
886         bool (*func)(struct nvkm_device *, u32, u32);
887         switch (mthd) {
888         case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
889         case 0x0188: func = nv01_gr_mthd_bind_patt; break;
890         case 0x018c: func = nv04_gr_mthd_bind_rop; break;
891         case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
892         case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
893         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
894         default:
895                 return false;
896         }
897         return func(device, inst, data);
898 }
899
900 static bool
901 nv04_gr_mthd_sifc(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
902 {
903         bool (*func)(struct nvkm_device *, u32, u32);
904         switch (mthd) {
905         case 0x0184: func = nv01_gr_mthd_bind_chroma; break;
906         case 0x0188: func = nv04_gr_mthd_bind_patt; break;
907         case 0x018c: func = nv04_gr_mthd_bind_rop; break;
908         case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
909         case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
910         case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
911         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
912         default:
913                 return false;
914         }
915         return func(device, inst, data);
916 }
917
918 static bool
919 nv03_gr_mthd_sifm(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
920 {
921         bool (*func)(struct nvkm_device *, u32, u32);
922         switch (mthd) {
923         case 0x0188: func = nv01_gr_mthd_bind_patt; break;
924         case 0x018c: func = nv04_gr_mthd_bind_rop; break;
925         case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
926         case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
927         case 0x0304: func = nv04_gr_mthd_set_operation; break;
928         default:
929                 return false;
930         }
931         return func(device, inst, data);
932 }
933
934 static bool
935 nv04_gr_mthd_sifm(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
936 {
937         bool (*func)(struct nvkm_device *, u32, u32);
938         switch (mthd) {
939         case 0x0188: func = nv04_gr_mthd_bind_patt; break;
940         case 0x018c: func = nv04_gr_mthd_bind_rop; break;
941         case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
942         case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
943         case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
944         case 0x0304: func = nv04_gr_mthd_set_operation; break;
945         default:
946                 return false;
947         }
948         return func(device, inst, data);
949 }
950
951 static bool
952 nv04_gr_mthd_surf3d(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
953 {
954         bool (*func)(struct nvkm_device *, u32, u32);
955         switch (mthd) {
956         case 0x02f8: func = nv04_gr_mthd_surf3d_clip_h; break;
957         case 0x02fc: func = nv04_gr_mthd_surf3d_clip_v; break;
958         default:
959                 return false;
960         }
961         return func(device, inst, data);
962 }
963
964 static bool
965 nv03_gr_mthd_ttri(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
966 {
967         bool (*func)(struct nvkm_device *, u32, u32);
968         switch (mthd) {
969         case 0x0188: func = nv01_gr_mthd_bind_clip; break;
970         case 0x018c: func = nv04_gr_mthd_bind_surf_color; break;
971         case 0x0190: func = nv04_gr_mthd_bind_surf_zeta; break;
972         default:
973                 return false;
974         }
975         return func(device, inst, data);
976 }
977
978 static bool
979 nv01_gr_mthd_prim(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
980 {
981         bool (*func)(struct nvkm_device *, u32, u32);
982         switch (mthd) {
983         case 0x0184: func = nv01_gr_mthd_bind_clip; break;
984         case 0x0188: func = nv01_gr_mthd_bind_patt; break;
985         case 0x018c: func = nv04_gr_mthd_bind_rop; break;
986         case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
987         case 0x0194: func = nv04_gr_mthd_bind_surf_dst; break;
988         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
989         default:
990                 return false;
991         }
992         return func(device, inst, data);
993 }
994
995 static bool
996 nv04_gr_mthd_prim(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
997 {
998         bool (*func)(struct nvkm_device *, u32, u32);
999         switch (mthd) {
1000         case 0x0184: func = nv01_gr_mthd_bind_clip; break;
1001         case 0x0188: func = nv04_gr_mthd_bind_patt; break;
1002         case 0x018c: func = nv04_gr_mthd_bind_rop; break;
1003         case 0x0190: func = nv04_gr_mthd_bind_beta1; break;
1004         case 0x0194: func = nv04_gr_mthd_bind_beta4; break;
1005         case 0x0198: func = nv04_gr_mthd_bind_surf2d; break;
1006         case 0x02fc: func = nv04_gr_mthd_set_operation; break;
1007         default:
1008                 return false;
1009         }
1010         return func(device, inst, data);
1011 }
1012
1013 static bool
1014 nv04_gr_mthd(struct nvkm_device *device, u32 inst, u32 mthd, u32 data)
1015 {
1016         bool (*func)(struct nvkm_device *, u32, u32, u32);
1017         switch (nvkm_rd32(device, 0x700000 + inst) & 0x000000ff) {
1018         case 0x1c ... 0x1e:
1019                    func = nv01_gr_mthd_prim; break;
1020         case 0x1f: func = nv01_gr_mthd_blit; break;
1021         case 0x21: func = nv01_gr_mthd_ifc; break;
1022         case 0x36: func = nv03_gr_mthd_sifc; break;
1023         case 0x37: func = nv03_gr_mthd_sifm; break;
1024         case 0x48: func = nv03_gr_mthd_ttri; break;
1025         case 0x4a: func = nv04_gr_mthd_gdi; break;
1026         case 0x4b: func = nv03_gr_mthd_gdi; break;
1027         case 0x53: func = nv04_gr_mthd_surf3d; break;
1028         case 0x5c ... 0x5e:
1029                    func = nv04_gr_mthd_prim; break;
1030         case 0x5f: func = nv04_gr_mthd_blit; break;
1031         case 0x60: func = nv04_gr_mthd_iifc; break;
1032         case 0x61: func = nv04_gr_mthd_ifc; break;
1033         case 0x76: func = nv04_gr_mthd_sifc; break;
1034         case 0x77: func = nv04_gr_mthd_sifm; break;
1035         default:
1036                 return false;
1037         }
1038         return func(device, inst, mthd, data);
1039 }
1040
1041 static int
1042 nv04_gr_object_bind(struct nvkm_object *object, struct nvkm_gpuobj *parent,
1043                     int align, struct nvkm_gpuobj **pgpuobj)
1044 {
1045         int ret = nvkm_gpuobj_new(object->engine->subdev.device, 16, align,
1046                                   false, parent, pgpuobj);
1047         if (ret == 0) {
1048                 nvkm_kmap(*pgpuobj);
1049                 nvkm_wo32(*pgpuobj, 0x00, object->oclass_name);
1050                 nvkm_wo32(*pgpuobj, 0x04, 0x00000000);
1051                 nvkm_wo32(*pgpuobj, 0x08, 0x00000000);
1052 #ifdef __BIG_ENDIAN
1053                 nvkm_mo32(*pgpuobj, 0x08, 0x00080000, 0x00080000);
1054 #endif
1055                 nvkm_wo32(*pgpuobj, 0x0c, 0x00000000);
1056                 nvkm_done(*pgpuobj);
1057         }
1058         return ret;
1059 }
1060
1061 const struct nvkm_object_func
1062 nv04_gr_object = {
1063         .bind = nv04_gr_object_bind,
1064 };
1065
1066 /*******************************************************************************
1067  * PGRAPH context
1068  ******************************************************************************/
1069
1070 static struct nv04_gr_chan *
1071 nv04_gr_channel(struct nv04_gr *gr)
1072 {
1073         struct nvkm_device *device = gr->base.engine.subdev.device;
1074         struct nv04_gr_chan *chan = NULL;
1075         if (nvkm_rd32(device, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) {
1076                 int chid = nvkm_rd32(device, NV04_PGRAPH_CTX_USER) >> 24;
1077                 if (chid < ARRAY_SIZE(gr->chan))
1078                         chan = gr->chan[chid];
1079         }
1080         return chan;
1081 }
1082
1083 static int
1084 nv04_gr_load_context(struct nv04_gr_chan *chan, int chid)
1085 {
1086         struct nvkm_device *device = chan->gr->base.engine.subdev.device;
1087         int i;
1088
1089         for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++)
1090                 nvkm_wr32(device, nv04_gr_ctx_regs[i], chan->nv04[i]);
1091
1092         nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
1093         nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24);
1094         nvkm_mask(device, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000);
1095         return 0;
1096 }
1097
1098 static int
1099 nv04_gr_unload_context(struct nv04_gr_chan *chan)
1100 {
1101         struct nvkm_device *device = chan->gr->base.engine.subdev.device;
1102         int i;
1103
1104         for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++)
1105                 chan->nv04[i] = nvkm_rd32(device, nv04_gr_ctx_regs[i]);
1106
1107         nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
1108         nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1109         return 0;
1110 }
1111
1112 static void
1113 nv04_gr_context_switch(struct nv04_gr *gr)
1114 {
1115         struct nvkm_device *device = gr->base.engine.subdev.device;
1116         struct nv04_gr_chan *prev = NULL;
1117         struct nv04_gr_chan *next = NULL;
1118         int chid;
1119
1120         nv04_gr_idle(&gr->base);
1121
1122         /* If previous context is valid, we need to save it */
1123         prev = nv04_gr_channel(gr);
1124         if (prev)
1125                 nv04_gr_unload_context(prev);
1126
1127         /* load context for next channel */
1128         chid = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f;
1129         next = gr->chan[chid];
1130         if (next)
1131                 nv04_gr_load_context(next, chid);
1132 }
1133
1134 static u32 *ctx_reg(struct nv04_gr_chan *chan, u32 reg)
1135 {
1136         int i;
1137
1138         for (i = 0; i < ARRAY_SIZE(nv04_gr_ctx_regs); i++) {
1139                 if (nv04_gr_ctx_regs[i] == reg)
1140                         return &chan->nv04[i];
1141         }
1142
1143         return NULL;
1144 }
1145
1146 static void *
1147 nv04_gr_chan_dtor(struct nvkm_object *object)
1148 {
1149         struct nv04_gr_chan *chan = nv04_gr_chan(object);
1150         struct nv04_gr *gr = chan->gr;
1151         unsigned long flags;
1152
1153         spin_lock_irqsave(&gr->lock, flags);
1154         gr->chan[chan->chid] = NULL;
1155         spin_unlock_irqrestore(&gr->lock, flags);
1156         return chan;
1157 }
1158
1159 static int
1160 nv04_gr_chan_fini(struct nvkm_object *object, bool suspend)
1161 {
1162         struct nv04_gr_chan *chan = nv04_gr_chan(object);
1163         struct nv04_gr *gr = chan->gr;
1164         struct nvkm_device *device = gr->base.engine.subdev.device;
1165         unsigned long flags;
1166
1167         spin_lock_irqsave(&gr->lock, flags);
1168         nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
1169         if (nv04_gr_channel(gr) == chan)
1170                 nv04_gr_unload_context(chan);
1171         nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
1172         spin_unlock_irqrestore(&gr->lock, flags);
1173         return 0;
1174 }
1175
1176 static const struct nvkm_object_func
1177 nv04_gr_chan = {
1178         .dtor = nv04_gr_chan_dtor,
1179         .fini = nv04_gr_chan_fini,
1180 };
1181
1182 static int
1183 nv04_gr_chan_new(struct nvkm_gr *base, struct nvkm_fifo_chan *fifoch,
1184                  const struct nvkm_oclass *oclass, struct nvkm_object **pobject)
1185 {
1186         struct nv04_gr *gr = nv04_gr(base);
1187         struct nv04_gr_chan *chan;
1188         unsigned long flags;
1189
1190         if (!(chan = kzalloc(sizeof(*chan), GFP_KERNEL)))
1191                 return -ENOMEM;
1192         nvkm_object_ctor(&nv04_gr_chan, oclass, &chan->object);
1193         chan->gr = gr;
1194         chan->chid = fifoch->chid;
1195         *pobject = &chan->object;
1196
1197         *ctx_reg(chan, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
1198
1199         spin_lock_irqsave(&gr->lock, flags);
1200         gr->chan[chan->chid] = chan;
1201         spin_unlock_irqrestore(&gr->lock, flags);
1202         return 0;
1203 }
1204
1205 /*******************************************************************************
1206  * PGRAPH engine/subdev functions
1207  ******************************************************************************/
1208
1209 bool
1210 nv04_gr_idle(struct nvkm_gr *gr)
1211 {
1212         struct nvkm_subdev *subdev = &gr->engine.subdev;
1213         struct nvkm_device *device = subdev->device;
1214         u32 mask = 0xffffffff;
1215
1216         if (device->card_type == NV_40)
1217                 mask &= ~NV40_PGRAPH_STATUS_SYNC_STALL;
1218
1219         if (nvkm_msec(device, 2000,
1220                 if (!(nvkm_rd32(device, NV04_PGRAPH_STATUS) & mask))
1221                         break;
1222         ) < 0) {
1223                 nvkm_error(subdev, "idle timed out with status %08x\n",
1224                            nvkm_rd32(device, NV04_PGRAPH_STATUS));
1225                 return false;
1226         }
1227
1228         return true;
1229 }
1230
1231 static const struct nvkm_bitfield
1232 nv04_gr_intr_name[] = {
1233         { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1234         {}
1235 };
1236
1237 static const struct nvkm_bitfield
1238 nv04_gr_nstatus[] = {
1239         { NV04_PGRAPH_NSTATUS_STATE_IN_USE,       "STATE_IN_USE" },
1240         { NV04_PGRAPH_NSTATUS_INVALID_STATE,      "INVALID_STATE" },
1241         { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT,       "BAD_ARGUMENT" },
1242         { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT,   "PROTECTION_FAULT" },
1243         {}
1244 };
1245
1246 const struct nvkm_bitfield
1247 nv04_gr_nsource[] = {
1248         { NV03_PGRAPH_NSOURCE_NOTIFICATION,       "NOTIFICATION" },
1249         { NV03_PGRAPH_NSOURCE_DATA_ERROR,         "DATA_ERROR" },
1250         { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR,   "PROTECTION_ERROR" },
1251         { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION,    "RANGE_EXCEPTION" },
1252         { NV03_PGRAPH_NSOURCE_LIMIT_COLOR,        "LIMIT_COLOR" },
1253         { NV03_PGRAPH_NSOURCE_LIMIT_ZETA,         "LIMIT_ZETA" },
1254         { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD,       "ILLEGAL_MTHD" },
1255         { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION,   "DMA_R_PROTECTION" },
1256         { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION,   "DMA_W_PROTECTION" },
1257         { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION,   "FORMAT_EXCEPTION" },
1258         { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION,    "PATCH_EXCEPTION" },
1259         { NV03_PGRAPH_NSOURCE_STATE_INVALID,      "STATE_INVALID" },
1260         { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY,      "DOUBLE_NOTIFY" },
1261         { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE,      "NOTIFY_IN_USE" },
1262         { NV03_PGRAPH_NSOURCE_METHOD_CNT,         "METHOD_CNT" },
1263         { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION,   "BFR_NOTIFICATION" },
1264         { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1265         { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A,        "DMA_WIDTH_A" },
1266         { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B,        "DMA_WIDTH_B" },
1267         {}
1268 };
1269
1270 static void
1271 nv04_gr_intr(struct nvkm_subdev *subdev)
1272 {
1273         struct nv04_gr *gr = (void *)subdev;
1274         struct nv04_gr_chan *chan = NULL;
1275         struct nvkm_device *device = gr->base.engine.subdev.device;
1276         u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR);
1277         u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE);
1278         u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS);
1279         u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR);
1280         u32 chid = (addr & 0x0f000000) >> 24;
1281         u32 subc = (addr & 0x0000e000) >> 13;
1282         u32 mthd = (addr & 0x00001ffc);
1283         u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA);
1284         u32 class = nvkm_rd32(device, 0x400180 + subc * 4) & 0xff;
1285         u32 inst = (nvkm_rd32(device, 0x40016c) & 0xffff) << 4;
1286         u32 show = stat;
1287         char msg[128], src[128], sta[128];
1288         unsigned long flags;
1289
1290         spin_lock_irqsave(&gr->lock, flags);
1291         chan = gr->chan[chid];
1292
1293         if (stat & NV_PGRAPH_INTR_NOTIFY) {
1294                 if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
1295                         if (!nv04_gr_mthd(device, inst, mthd, data))
1296                                 show &= ~NV_PGRAPH_INTR_NOTIFY;
1297                 }
1298         }
1299
1300         if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1301                 nvkm_wr32(device, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1302                 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1303                 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1304                 nv04_gr_context_switch(gr);
1305         }
1306
1307         nvkm_wr32(device, NV03_PGRAPH_INTR, stat);
1308         nvkm_wr32(device, NV04_PGRAPH_FIFO, 0x00000001);
1309
1310         if (show) {
1311                 nvkm_snprintbf(msg, sizeof(msg), nv04_gr_intr_name, show);
1312                 nvkm_snprintbf(src, sizeof(src), nv04_gr_nsource, nsource);
1313                 nvkm_snprintbf(sta, sizeof(sta), nv04_gr_nstatus, nstatus);
1314                 nvkm_error(subdev, "intr %08x [%s] nsource %08x [%s] "
1315                                    "nstatus %08x [%s] ch %d [%s] subc %d "
1316                                    "class %04x mthd %04x data %08x\n",
1317                            show, msg, nsource, src, nstatus, sta, chid,
1318                            chan ? chan->object.client->name : "unknown",
1319                            subc, class, mthd, data);
1320         }
1321
1322         spin_unlock_irqrestore(&gr->lock, flags);
1323 }
1324
1325 static const struct nvkm_gr_func
1326 nv04_gr = {
1327         .chan_new = nv04_gr_chan_new,
1328         .sclass = {
1329                 { -1, -1, 0x0012, &nv04_gr_object }, /* beta1 */
1330                 { -1, -1, 0x0017, &nv04_gr_object }, /* chroma */
1331                 { -1, -1, 0x0018, &nv04_gr_object }, /* pattern (nv01) */
1332                 { -1, -1, 0x0019, &nv04_gr_object }, /* clip */
1333                 { -1, -1, 0x001c, &nv04_gr_object }, /* line */
1334                 { -1, -1, 0x001d, &nv04_gr_object }, /* tri */
1335                 { -1, -1, 0x001e, &nv04_gr_object }, /* rect */
1336                 { -1, -1, 0x001f, &nv04_gr_object },
1337                 { -1, -1, 0x0021, &nv04_gr_object },
1338                 { -1, -1, 0x0030, &nv04_gr_object }, /* null */
1339                 { -1, -1, 0x0036, &nv04_gr_object },
1340                 { -1, -1, 0x0037, &nv04_gr_object },
1341                 { -1, -1, 0x0038, &nv04_gr_object }, /* dvd subpicture */
1342                 { -1, -1, 0x0039, &nv04_gr_object }, /* m2mf */
1343                 { -1, -1, 0x0042, &nv04_gr_object }, /* surf2d */
1344                 { -1, -1, 0x0043, &nv04_gr_object }, /* rop */
1345                 { -1, -1, 0x0044, &nv04_gr_object }, /* pattern */
1346                 { -1, -1, 0x0048, &nv04_gr_object },
1347                 { -1, -1, 0x004a, &nv04_gr_object },
1348                 { -1, -1, 0x004b, &nv04_gr_object },
1349                 { -1, -1, 0x0052, &nv04_gr_object }, /* swzsurf */
1350                 { -1, -1, 0x0053, &nv04_gr_object },
1351                 { -1, -1, 0x0054, &nv04_gr_object }, /* ttri */
1352                 { -1, -1, 0x0055, &nv04_gr_object }, /* mtri */
1353                 { -1, -1, 0x0057, &nv04_gr_object }, /* chroma */
1354                 { -1, -1, 0x0058, &nv04_gr_object }, /* surf_dst */
1355                 { -1, -1, 0x0059, &nv04_gr_object }, /* surf_src */
1356                 { -1, -1, 0x005a, &nv04_gr_object }, /* surf_color */
1357                 { -1, -1, 0x005b, &nv04_gr_object }, /* surf_zeta */
1358                 { -1, -1, 0x005c, &nv04_gr_object }, /* line */
1359                 { -1, -1, 0x005d, &nv04_gr_object }, /* tri */
1360                 { -1, -1, 0x005e, &nv04_gr_object }, /* rect */
1361                 { -1, -1, 0x005f, &nv04_gr_object },
1362                 { -1, -1, 0x0060, &nv04_gr_object },
1363                 { -1, -1, 0x0061, &nv04_gr_object },
1364                 { -1, -1, 0x0064, &nv04_gr_object }, /* iifc (nv05) */
1365                 { -1, -1, 0x0065, &nv04_gr_object }, /* ifc (nv05) */
1366                 { -1, -1, 0x0066, &nv04_gr_object }, /* sifc (nv05) */
1367                 { -1, -1, 0x0072, &nv04_gr_object }, /* beta4 */
1368                 { -1, -1, 0x0076, &nv04_gr_object },
1369                 { -1, -1, 0x0077, &nv04_gr_object },
1370                 {}
1371         }
1372 };
1373
1374 static int
1375 nv04_gr_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
1376              struct nvkm_oclass *oclass, void *data, u32 size,
1377              struct nvkm_object **pobject)
1378 {
1379         struct nv04_gr *gr;
1380         int ret;
1381
1382         ret = nvkm_gr_create(parent, engine, oclass, true, &gr);
1383         *pobject = nv_object(gr);
1384         if (ret)
1385                 return ret;
1386
1387         gr->base.func = &nv04_gr;
1388         nv_subdev(gr)->unit = 0x00001000;
1389         nv_subdev(gr)->intr = nv04_gr_intr;
1390         spin_lock_init(&gr->lock);
1391         return 0;
1392 }
1393
1394 static int
1395 nv04_gr_init(struct nvkm_object *object)
1396 {
1397         struct nvkm_engine *engine = nv_engine(object);
1398         struct nv04_gr *gr = (void *)engine;
1399         struct nvkm_device *device = gr->base.engine.subdev.device;
1400         int ret;
1401
1402         ret = nvkm_gr_init(&gr->base);
1403         if (ret)
1404                 return ret;
1405
1406         /* Enable PGRAPH interrupts */
1407         nvkm_wr32(device, NV03_PGRAPH_INTR, 0xFFFFFFFF);
1408         nvkm_wr32(device, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
1409
1410         nvkm_wr32(device, NV04_PGRAPH_VALID1, 0);
1411         nvkm_wr32(device, NV04_PGRAPH_VALID2, 0);
1412         /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x000001FF);
1413         nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
1414         nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x1231c000);
1415         /*1231C000 blob, 001 haiku*/
1416         /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
1417         nvkm_wr32(device, NV04_PGRAPH_DEBUG_1, 0x72111100);
1418         /*0x72111100 blob , 01 haiku*/
1419         /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
1420         nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
1421         /*haiku same*/
1422
1423         /*nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
1424         nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
1425         /*haiku and blob 10d4*/
1426
1427         nvkm_wr32(device, NV04_PGRAPH_STATE        , 0xFFFFFFFF);
1428         nvkm_wr32(device, NV04_PGRAPH_CTX_CONTROL  , 0x10000100);
1429         nvkm_mask(device, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1430
1431         /* These don't belong here, they're part of a per-channel context */
1432         nvkm_wr32(device, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
1433         nvkm_wr32(device, NV04_PGRAPH_BETA_AND     , 0xFFFFFFFF);
1434         return 0;
1435 }
1436
1437 struct nvkm_oclass
1438 nv04_gr_oclass = {
1439         .handle = NV_ENGINE(GR, 0x04),
1440         .ofuncs = &(struct nvkm_ofuncs) {
1441                 .ctor = nv04_gr_ctor,
1442                 .dtor = _nvkm_gr_dtor,
1443                 .init = nv04_gr_init,
1444                 .fini = _nvkm_gr_fini,
1445         },
1446 };