drm/nouveau/fifo: split user classes out from engine implementations
[linux-2.6-block.git] / drivers / gpu / drm / nouveau / nvkm / engine / gr / nv10.c
1 /*
2  * Copyright 2007 Matthieu CASTET <castet.matthieu@free.fr>
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragr) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22  * DEALINGS IN THE SOFTWARE.
23  */
24 #include <engine/gr.h>
25 #include "regs.h"
26
27 #include <core/client.h>
28 #include <engine/fifo.h>
29 #include <engine/fifo/chan.h>
30 #include <subdev/fb.h>
31
32 struct pipe_state {
33         u32 pipe_0x0000[0x040/4];
34         u32 pipe_0x0040[0x010/4];
35         u32 pipe_0x0200[0x0c0/4];
36         u32 pipe_0x4400[0x080/4];
37         u32 pipe_0x6400[0x3b0/4];
38         u32 pipe_0x6800[0x2f0/4];
39         u32 pipe_0x6c00[0x030/4];
40         u32 pipe_0x7000[0x130/4];
41         u32 pipe_0x7400[0x0c0/4];
42         u32 pipe_0x7800[0x0c0/4];
43 };
44
45 static int nv10_gr_ctx_regs[] = {
46         NV10_PGRAPH_CTX_SWITCH(0),
47         NV10_PGRAPH_CTX_SWITCH(1),
48         NV10_PGRAPH_CTX_SWITCH(2),
49         NV10_PGRAPH_CTX_SWITCH(3),
50         NV10_PGRAPH_CTX_SWITCH(4),
51         NV10_PGRAPH_CTX_CACHE(0, 0),
52         NV10_PGRAPH_CTX_CACHE(0, 1),
53         NV10_PGRAPH_CTX_CACHE(0, 2),
54         NV10_PGRAPH_CTX_CACHE(0, 3),
55         NV10_PGRAPH_CTX_CACHE(0, 4),
56         NV10_PGRAPH_CTX_CACHE(1, 0),
57         NV10_PGRAPH_CTX_CACHE(1, 1),
58         NV10_PGRAPH_CTX_CACHE(1, 2),
59         NV10_PGRAPH_CTX_CACHE(1, 3),
60         NV10_PGRAPH_CTX_CACHE(1, 4),
61         NV10_PGRAPH_CTX_CACHE(2, 0),
62         NV10_PGRAPH_CTX_CACHE(2, 1),
63         NV10_PGRAPH_CTX_CACHE(2, 2),
64         NV10_PGRAPH_CTX_CACHE(2, 3),
65         NV10_PGRAPH_CTX_CACHE(2, 4),
66         NV10_PGRAPH_CTX_CACHE(3, 0),
67         NV10_PGRAPH_CTX_CACHE(3, 1),
68         NV10_PGRAPH_CTX_CACHE(3, 2),
69         NV10_PGRAPH_CTX_CACHE(3, 3),
70         NV10_PGRAPH_CTX_CACHE(3, 4),
71         NV10_PGRAPH_CTX_CACHE(4, 0),
72         NV10_PGRAPH_CTX_CACHE(4, 1),
73         NV10_PGRAPH_CTX_CACHE(4, 2),
74         NV10_PGRAPH_CTX_CACHE(4, 3),
75         NV10_PGRAPH_CTX_CACHE(4, 4),
76         NV10_PGRAPH_CTX_CACHE(5, 0),
77         NV10_PGRAPH_CTX_CACHE(5, 1),
78         NV10_PGRAPH_CTX_CACHE(5, 2),
79         NV10_PGRAPH_CTX_CACHE(5, 3),
80         NV10_PGRAPH_CTX_CACHE(5, 4),
81         NV10_PGRAPH_CTX_CACHE(6, 0),
82         NV10_PGRAPH_CTX_CACHE(6, 1),
83         NV10_PGRAPH_CTX_CACHE(6, 2),
84         NV10_PGRAPH_CTX_CACHE(6, 3),
85         NV10_PGRAPH_CTX_CACHE(6, 4),
86         NV10_PGRAPH_CTX_CACHE(7, 0),
87         NV10_PGRAPH_CTX_CACHE(7, 1),
88         NV10_PGRAPH_CTX_CACHE(7, 2),
89         NV10_PGRAPH_CTX_CACHE(7, 3),
90         NV10_PGRAPH_CTX_CACHE(7, 4),
91         NV10_PGRAPH_CTX_USER,
92         NV04_PGRAPH_DMA_START_0,
93         NV04_PGRAPH_DMA_START_1,
94         NV04_PGRAPH_DMA_LENGTH,
95         NV04_PGRAPH_DMA_MISC,
96         NV10_PGRAPH_DMA_PITCH,
97         NV04_PGRAPH_BOFFSET0,
98         NV04_PGRAPH_BBASE0,
99         NV04_PGRAPH_BLIMIT0,
100         NV04_PGRAPH_BOFFSET1,
101         NV04_PGRAPH_BBASE1,
102         NV04_PGRAPH_BLIMIT1,
103         NV04_PGRAPH_BOFFSET2,
104         NV04_PGRAPH_BBASE2,
105         NV04_PGRAPH_BLIMIT2,
106         NV04_PGRAPH_BOFFSET3,
107         NV04_PGRAPH_BBASE3,
108         NV04_PGRAPH_BLIMIT3,
109         NV04_PGRAPH_BOFFSET4,
110         NV04_PGRAPH_BBASE4,
111         NV04_PGRAPH_BLIMIT4,
112         NV04_PGRAPH_BOFFSET5,
113         NV04_PGRAPH_BBASE5,
114         NV04_PGRAPH_BLIMIT5,
115         NV04_PGRAPH_BPITCH0,
116         NV04_PGRAPH_BPITCH1,
117         NV04_PGRAPH_BPITCH2,
118         NV04_PGRAPH_BPITCH3,
119         NV04_PGRAPH_BPITCH4,
120         NV10_PGRAPH_SURFACE,
121         NV10_PGRAPH_STATE,
122         NV04_PGRAPH_BSWIZZLE2,
123         NV04_PGRAPH_BSWIZZLE5,
124         NV04_PGRAPH_BPIXEL,
125         NV10_PGRAPH_NOTIFY,
126         NV04_PGRAPH_PATT_COLOR0,
127         NV04_PGRAPH_PATT_COLOR1,
128         NV04_PGRAPH_PATT_COLORRAM, /* 64 values from 0x400900 to 0x4009fc */
129         0x00400904,
130         0x00400908,
131         0x0040090c,
132         0x00400910,
133         0x00400914,
134         0x00400918,
135         0x0040091c,
136         0x00400920,
137         0x00400924,
138         0x00400928,
139         0x0040092c,
140         0x00400930,
141         0x00400934,
142         0x00400938,
143         0x0040093c,
144         0x00400940,
145         0x00400944,
146         0x00400948,
147         0x0040094c,
148         0x00400950,
149         0x00400954,
150         0x00400958,
151         0x0040095c,
152         0x00400960,
153         0x00400964,
154         0x00400968,
155         0x0040096c,
156         0x00400970,
157         0x00400974,
158         0x00400978,
159         0x0040097c,
160         0x00400980,
161         0x00400984,
162         0x00400988,
163         0x0040098c,
164         0x00400990,
165         0x00400994,
166         0x00400998,
167         0x0040099c,
168         0x004009a0,
169         0x004009a4,
170         0x004009a8,
171         0x004009ac,
172         0x004009b0,
173         0x004009b4,
174         0x004009b8,
175         0x004009bc,
176         0x004009c0,
177         0x004009c4,
178         0x004009c8,
179         0x004009cc,
180         0x004009d0,
181         0x004009d4,
182         0x004009d8,
183         0x004009dc,
184         0x004009e0,
185         0x004009e4,
186         0x004009e8,
187         0x004009ec,
188         0x004009f0,
189         0x004009f4,
190         0x004009f8,
191         0x004009fc,
192         NV04_PGRAPH_PATTERN,    /* 2 values from 0x400808 to 0x40080c */
193         0x0040080c,
194         NV04_PGRAPH_PATTERN_SHAPE,
195         NV03_PGRAPH_MONO_COLOR0,
196         NV04_PGRAPH_ROP3,
197         NV04_PGRAPH_CHROMA,
198         NV04_PGRAPH_BETA_AND,
199         NV04_PGRAPH_BETA_PREMULT,
200         0x00400e70,
201         0x00400e74,
202         0x00400e78,
203         0x00400e7c,
204         0x00400e80,
205         0x00400e84,
206         0x00400e88,
207         0x00400e8c,
208         0x00400ea0,
209         0x00400ea4,
210         0x00400ea8,
211         0x00400e90,
212         0x00400e94,
213         0x00400e98,
214         0x00400e9c,
215         NV10_PGRAPH_WINDOWCLIP_HORIZONTAL, /* 8 values from 0x400f00-0x400f1c */
216         NV10_PGRAPH_WINDOWCLIP_VERTICAL,   /* 8 values from 0x400f20-0x400f3c */
217         0x00400f04,
218         0x00400f24,
219         0x00400f08,
220         0x00400f28,
221         0x00400f0c,
222         0x00400f2c,
223         0x00400f10,
224         0x00400f30,
225         0x00400f14,
226         0x00400f34,
227         0x00400f18,
228         0x00400f38,
229         0x00400f1c,
230         0x00400f3c,
231         NV10_PGRAPH_XFMODE0,
232         NV10_PGRAPH_XFMODE1,
233         NV10_PGRAPH_GLOBALSTATE0,
234         NV10_PGRAPH_GLOBALSTATE1,
235         NV04_PGRAPH_STORED_FMT,
236         NV04_PGRAPH_SOURCE_COLOR,
237         NV03_PGRAPH_ABS_X_RAM,  /* 32 values from 0x400400 to 0x40047c */
238         NV03_PGRAPH_ABS_Y_RAM,  /* 32 values from 0x400480 to 0x4004fc */
239         0x00400404,
240         0x00400484,
241         0x00400408,
242         0x00400488,
243         0x0040040c,
244         0x0040048c,
245         0x00400410,
246         0x00400490,
247         0x00400414,
248         0x00400494,
249         0x00400418,
250         0x00400498,
251         0x0040041c,
252         0x0040049c,
253         0x00400420,
254         0x004004a0,
255         0x00400424,
256         0x004004a4,
257         0x00400428,
258         0x004004a8,
259         0x0040042c,
260         0x004004ac,
261         0x00400430,
262         0x004004b0,
263         0x00400434,
264         0x004004b4,
265         0x00400438,
266         0x004004b8,
267         0x0040043c,
268         0x004004bc,
269         0x00400440,
270         0x004004c0,
271         0x00400444,
272         0x004004c4,
273         0x00400448,
274         0x004004c8,
275         0x0040044c,
276         0x004004cc,
277         0x00400450,
278         0x004004d0,
279         0x00400454,
280         0x004004d4,
281         0x00400458,
282         0x004004d8,
283         0x0040045c,
284         0x004004dc,
285         0x00400460,
286         0x004004e0,
287         0x00400464,
288         0x004004e4,
289         0x00400468,
290         0x004004e8,
291         0x0040046c,
292         0x004004ec,
293         0x00400470,
294         0x004004f0,
295         0x00400474,
296         0x004004f4,
297         0x00400478,
298         0x004004f8,
299         0x0040047c,
300         0x004004fc,
301         NV03_PGRAPH_ABS_UCLIP_XMIN,
302         NV03_PGRAPH_ABS_UCLIP_XMAX,
303         NV03_PGRAPH_ABS_UCLIP_YMIN,
304         NV03_PGRAPH_ABS_UCLIP_YMAX,
305         0x00400550,
306         0x00400558,
307         0x00400554,
308         0x0040055c,
309         NV03_PGRAPH_ABS_UCLIPA_XMIN,
310         NV03_PGRAPH_ABS_UCLIPA_XMAX,
311         NV03_PGRAPH_ABS_UCLIPA_YMIN,
312         NV03_PGRAPH_ABS_UCLIPA_YMAX,
313         NV03_PGRAPH_ABS_ICLIP_XMAX,
314         NV03_PGRAPH_ABS_ICLIP_YMAX,
315         NV03_PGRAPH_XY_LOGIC_MISC0,
316         NV03_PGRAPH_XY_LOGIC_MISC1,
317         NV03_PGRAPH_XY_LOGIC_MISC2,
318         NV03_PGRAPH_XY_LOGIC_MISC3,
319         NV03_PGRAPH_CLIPX_0,
320         NV03_PGRAPH_CLIPX_1,
321         NV03_PGRAPH_CLIPY_0,
322         NV03_PGRAPH_CLIPY_1,
323         NV10_PGRAPH_COMBINER0_IN_ALPHA,
324         NV10_PGRAPH_COMBINER1_IN_ALPHA,
325         NV10_PGRAPH_COMBINER0_IN_RGB,
326         NV10_PGRAPH_COMBINER1_IN_RGB,
327         NV10_PGRAPH_COMBINER_COLOR0,
328         NV10_PGRAPH_COMBINER_COLOR1,
329         NV10_PGRAPH_COMBINER0_OUT_ALPHA,
330         NV10_PGRAPH_COMBINER1_OUT_ALPHA,
331         NV10_PGRAPH_COMBINER0_OUT_RGB,
332         NV10_PGRAPH_COMBINER1_OUT_RGB,
333         NV10_PGRAPH_COMBINER_FINAL0,
334         NV10_PGRAPH_COMBINER_FINAL1,
335         0x00400e00,
336         0x00400e04,
337         0x00400e08,
338         0x00400e0c,
339         0x00400e10,
340         0x00400e14,
341         0x00400e18,
342         0x00400e1c,
343         0x00400e20,
344         0x00400e24,
345         0x00400e28,
346         0x00400e2c,
347         0x00400e30,
348         0x00400e34,
349         0x00400e38,
350         0x00400e3c,
351         NV04_PGRAPH_PASSTHRU_0,
352         NV04_PGRAPH_PASSTHRU_1,
353         NV04_PGRAPH_PASSTHRU_2,
354         NV10_PGRAPH_DIMX_TEXTURE,
355         NV10_PGRAPH_WDIMX_TEXTURE,
356         NV10_PGRAPH_DVD_COLORFMT,
357         NV10_PGRAPH_SCALED_FORMAT,
358         NV04_PGRAPH_MISC24_0,
359         NV04_PGRAPH_MISC24_1,
360         NV04_PGRAPH_MISC24_2,
361         NV03_PGRAPH_X_MISC,
362         NV03_PGRAPH_Y_MISC,
363         NV04_PGRAPH_VALID1,
364         NV04_PGRAPH_VALID2,
365 };
366
367 static int nv17_gr_ctx_regs[] = {
368         NV10_PGRAPH_DEBUG_4,
369         0x004006b0,
370         0x00400eac,
371         0x00400eb0,
372         0x00400eb4,
373         0x00400eb8,
374         0x00400ebc,
375         0x00400ec0,
376         0x00400ec4,
377         0x00400ec8,
378         0x00400ecc,
379         0x00400ed0,
380         0x00400ed4,
381         0x00400ed8,
382         0x00400edc,
383         0x00400ee0,
384         0x00400a00,
385         0x00400a04,
386 };
387
388 struct nv10_gr {
389         struct nvkm_gr base;
390         struct nv10_gr_chan *chan[32];
391         spinlock_t lock;
392 };
393
394 struct nv10_gr_chan {
395         struct nvkm_object base;
396         int chid;
397         int nv10[ARRAY_SIZE(nv10_gr_ctx_regs)];
398         int nv17[ARRAY_SIZE(nv17_gr_ctx_regs)];
399         struct pipe_state pipe_state;
400         u32 lma_window[4];
401 };
402
403
404 static inline struct nv10_gr *
405 nv10_gr(struct nv10_gr_chan *chan)
406 {
407         return (void *)nv_object(chan)->engine;
408 }
409
410 /*******************************************************************************
411  * Graphics object classes
412  ******************************************************************************/
413
414 #define PIPE_SAVE(gr, state, addr)                                      \
415         do {                                                            \
416                 int __i;                                                \
417                 nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, addr);              \
418                 for (__i = 0; __i < ARRAY_SIZE(state); __i++)           \
419                         state[__i] = nvkm_rd32(device, NV10_PGRAPH_PIPE_DATA); \
420         } while (0)
421
422 #define PIPE_RESTORE(gr, state, addr)                                   \
423         do {                                                            \
424                 int __i;                                                \
425                 nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, addr);              \
426                 for (__i = 0; __i < ARRAY_SIZE(state); __i++)           \
427                         nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, state[__i]); \
428         } while (0)
429
430 static struct nvkm_oclass
431 nv10_gr_sclass[] = {
432         { 0x0012, &nv04_gr_ofuncs }, /* beta1 */
433         { 0x0019, &nv04_gr_ofuncs }, /* clip */
434         { 0x0030, &nv04_gr_ofuncs }, /* null */
435         { 0x0039, &nv04_gr_ofuncs }, /* m2mf */
436         { 0x0043, &nv04_gr_ofuncs }, /* rop */
437         { 0x0044, &nv04_gr_ofuncs }, /* pattern */
438         { 0x004a, &nv04_gr_ofuncs }, /* gdi */
439         { 0x0052, &nv04_gr_ofuncs }, /* swzsurf */
440         { 0x005f, &nv04_gr_ofuncs }, /* blit */
441         { 0x0062, &nv04_gr_ofuncs }, /* surf2d */
442         { 0x0072, &nv04_gr_ofuncs }, /* beta4 */
443         { 0x0089, &nv04_gr_ofuncs }, /* sifm */
444         { 0x008a, &nv04_gr_ofuncs }, /* ifc */
445         { 0x009f, &nv04_gr_ofuncs }, /* blit */
446         { 0x0093, &nv04_gr_ofuncs }, /* surf3d */
447         { 0x0094, &nv04_gr_ofuncs }, /* ttri */
448         { 0x0095, &nv04_gr_ofuncs }, /* mtri */
449         { 0x0056, &nv04_gr_ofuncs }, /* celcius */
450         {},
451 };
452
453 static struct nvkm_oclass
454 nv15_gr_sclass[] = {
455         { 0x0012, &nv04_gr_ofuncs }, /* beta1 */
456         { 0x0019, &nv04_gr_ofuncs }, /* clip */
457         { 0x0030, &nv04_gr_ofuncs }, /* null */
458         { 0x0039, &nv04_gr_ofuncs }, /* m2mf */
459         { 0x0043, &nv04_gr_ofuncs }, /* rop */
460         { 0x0044, &nv04_gr_ofuncs }, /* pattern */
461         { 0x004a, &nv04_gr_ofuncs }, /* gdi */
462         { 0x0052, &nv04_gr_ofuncs }, /* swzsurf */
463         { 0x005f, &nv04_gr_ofuncs }, /* blit */
464         { 0x0062, &nv04_gr_ofuncs }, /* surf2d */
465         { 0x0072, &nv04_gr_ofuncs }, /* beta4 */
466         { 0x0089, &nv04_gr_ofuncs }, /* sifm */
467         { 0x008a, &nv04_gr_ofuncs }, /* ifc */
468         { 0x009f, &nv04_gr_ofuncs }, /* blit */
469         { 0x0093, &nv04_gr_ofuncs }, /* surf3d */
470         { 0x0094, &nv04_gr_ofuncs }, /* ttri */
471         { 0x0095, &nv04_gr_ofuncs }, /* mtri */
472         { 0x0096, &nv04_gr_ofuncs }, /* celcius */
473         {},
474 };
475
476 static void
477 nv17_gr_mthd_lma_window(struct nv10_gr_chan *chan, u32 mthd, u32 data)
478 {
479         struct nvkm_device *device = chan->base.engine->subdev.device;
480         struct nvkm_gr *gr = nvkm_gr(chan);
481         struct pipe_state *pipe = &chan->pipe_state;
482         u32 pipe_0x0040[1], pipe_0x64c0[8], pipe_0x6a80[3], pipe_0x6ab0[3];
483         u32 xfmode0, xfmode1;
484         int i;
485
486         chan->lma_window[(mthd - 0x1638) / 4] = data;
487
488         if (mthd != 0x1644)
489                 return;
490
491         nv04_gr_idle(gr);
492
493         PIPE_SAVE(device, pipe_0x0040, 0x0040);
494         PIPE_SAVE(device, pipe->pipe_0x0200, 0x0200);
495
496         PIPE_RESTORE(device, chan->lma_window, 0x6790);
497
498         nv04_gr_idle(gr);
499
500         xfmode0 = nvkm_rd32(device, NV10_PGRAPH_XFMODE0);
501         xfmode1 = nvkm_rd32(device, NV10_PGRAPH_XFMODE1);
502
503         PIPE_SAVE(device, pipe->pipe_0x4400, 0x4400);
504         PIPE_SAVE(device, pipe_0x64c0, 0x64c0);
505         PIPE_SAVE(device, pipe_0x6ab0, 0x6ab0);
506         PIPE_SAVE(device, pipe_0x6a80, 0x6a80);
507
508         nv04_gr_idle(gr);
509
510         nvkm_wr32(device, NV10_PGRAPH_XFMODE0, 0x10000000);
511         nvkm_wr32(device, NV10_PGRAPH_XFMODE1, 0x00000000);
512         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
513         for (i = 0; i < 4; i++)
514                 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
515         for (i = 0; i < 4; i++)
516                 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
517
518         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
519         for (i = 0; i < 3; i++)
520                 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
521
522         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
523         for (i = 0; i < 3; i++)
524                 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
525
526         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
527         nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000008);
528
529         PIPE_RESTORE(device, pipe->pipe_0x0200, 0x0200);
530
531         nv04_gr_idle(gr);
532
533         PIPE_RESTORE(device, pipe_0x0040, 0x0040);
534
535         nvkm_wr32(device, NV10_PGRAPH_XFMODE0, xfmode0);
536         nvkm_wr32(device, NV10_PGRAPH_XFMODE1, xfmode1);
537
538         PIPE_RESTORE(device, pipe_0x64c0, 0x64c0);
539         PIPE_RESTORE(device, pipe_0x6ab0, 0x6ab0);
540         PIPE_RESTORE(device, pipe_0x6a80, 0x6a80);
541         PIPE_RESTORE(device, pipe->pipe_0x4400, 0x4400);
542
543         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x000000c0);
544         nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
545
546         nv04_gr_idle(gr);
547 }
548
549 static void
550 nv17_gr_mthd_lma_enable(struct nv10_gr_chan *chan, u32 mthd, u32 data)
551 {
552         struct nvkm_device *device = chan->base.engine->subdev.device;
553         struct nvkm_gr *gr = nvkm_gr(chan);
554
555         nv04_gr_idle(gr);
556
557         nvkm_mask(device, NV10_PGRAPH_DEBUG_4, 0x00000100, 0x00000100);
558         nvkm_mask(device, 0x4006b0, 0x08000000, 0x08000000);
559 }
560
561 static bool
562 nv17_gr_mthd_celcius(struct nv10_gr_chan *chan, u32 mthd, u32 data)
563 {
564         void (*func)(struct nv10_gr_chan *, u32, u32);
565         switch (mthd) {
566         case 0x1638 ... 0x1644:
567                      func = nv17_gr_mthd_lma_window; break;
568         case 0x1658: func = nv17_gr_mthd_lma_enable; break;
569         default:
570                 return false;
571         }
572         func(chan, mthd, data);
573         return true;
574 }
575
576 static bool
577 nv10_gr_mthd(struct nv10_gr_chan *chan, u8 class, u32 mthd, u32 data)
578 {
579         bool (*func)(struct nv10_gr_chan *, u32, u32);
580         switch (class) {
581         case 0x99: func = nv17_gr_mthd_celcius; break;
582         default:
583                 return false;
584         }
585         return func(chan, mthd, data);
586 }
587
588 static struct nvkm_oclass
589 nv17_gr_sclass[] = {
590         { 0x0012, &nv04_gr_ofuncs }, /* beta1 */
591         { 0x0019, &nv04_gr_ofuncs }, /* clip */
592         { 0x0030, &nv04_gr_ofuncs }, /* null */
593         { 0x0039, &nv04_gr_ofuncs }, /* m2mf */
594         { 0x0043, &nv04_gr_ofuncs }, /* rop */
595         { 0x0044, &nv04_gr_ofuncs }, /* pattern */
596         { 0x004a, &nv04_gr_ofuncs }, /* gdi */
597         { 0x0052, &nv04_gr_ofuncs }, /* swzsurf */
598         { 0x005f, &nv04_gr_ofuncs }, /* blit */
599         { 0x0062, &nv04_gr_ofuncs }, /* surf2d */
600         { 0x0072, &nv04_gr_ofuncs }, /* beta4 */
601         { 0x0089, &nv04_gr_ofuncs }, /* sifm */
602         { 0x008a, &nv04_gr_ofuncs }, /* ifc */
603         { 0x009f, &nv04_gr_ofuncs }, /* blit */
604         { 0x0093, &nv04_gr_ofuncs }, /* surf3d */
605         { 0x0094, &nv04_gr_ofuncs }, /* ttri */
606         { 0x0095, &nv04_gr_ofuncs }, /* mtri */
607         { 0x0099, &nv04_gr_ofuncs },
608         {},
609 };
610
611 /*******************************************************************************
612  * PGRAPH context
613  ******************************************************************************/
614
615 static struct nv10_gr_chan *
616 nv10_gr_channel(struct nv10_gr *gr)
617 {
618         struct nvkm_device *device = gr->base.engine.subdev.device;
619         struct nv10_gr_chan *chan = NULL;
620         if (nvkm_rd32(device, 0x400144) & 0x00010000) {
621                 int chid = nvkm_rd32(device, 0x400148) >> 24;
622                 if (chid < ARRAY_SIZE(gr->chan))
623                         chan = gr->chan[chid];
624         }
625         return chan;
626 }
627
628 static void
629 nv10_gr_save_pipe(struct nv10_gr_chan *chan)
630 {
631         struct nv10_gr *gr = nv10_gr(chan);
632         struct pipe_state *pipe = &chan->pipe_state;
633         struct nvkm_device *device = gr->base.engine.subdev.device;
634
635         PIPE_SAVE(gr, pipe->pipe_0x4400, 0x4400);
636         PIPE_SAVE(gr, pipe->pipe_0x0200, 0x0200);
637         PIPE_SAVE(gr, pipe->pipe_0x6400, 0x6400);
638         PIPE_SAVE(gr, pipe->pipe_0x6800, 0x6800);
639         PIPE_SAVE(gr, pipe->pipe_0x6c00, 0x6c00);
640         PIPE_SAVE(gr, pipe->pipe_0x7000, 0x7000);
641         PIPE_SAVE(gr, pipe->pipe_0x7400, 0x7400);
642         PIPE_SAVE(gr, pipe->pipe_0x7800, 0x7800);
643         PIPE_SAVE(gr, pipe->pipe_0x0040, 0x0040);
644         PIPE_SAVE(gr, pipe->pipe_0x0000, 0x0000);
645 }
646
647 static void
648 nv10_gr_load_pipe(struct nv10_gr_chan *chan)
649 {
650         struct nv10_gr *gr = nv10_gr(chan);
651         struct pipe_state *pipe = &chan->pipe_state;
652         struct nvkm_device *device = gr->base.engine.subdev.device;
653         u32 xfmode0, xfmode1;
654         int i;
655
656         nv04_gr_idle(gr);
657         /* XXX check haiku comments */
658         xfmode0 = nvkm_rd32(device, NV10_PGRAPH_XFMODE0);
659         xfmode1 = nvkm_rd32(device, NV10_PGRAPH_XFMODE1);
660         nvkm_wr32(device, NV10_PGRAPH_XFMODE0, 0x10000000);
661         nvkm_wr32(device, NV10_PGRAPH_XFMODE1, 0x00000000);
662         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
663         for (i = 0; i < 4; i++)
664                 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
665         for (i = 0; i < 4; i++)
666                 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
667
668         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
669         for (i = 0; i < 3; i++)
670                 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
671
672         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
673         for (i = 0; i < 3; i++)
674                 nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000000);
675
676         nvkm_wr32(device, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
677         nvkm_wr32(device, NV10_PGRAPH_PIPE_DATA, 0x00000008);
678
679
680         PIPE_RESTORE(gr, pipe->pipe_0x0200, 0x0200);
681         nv04_gr_idle(gr);
682
683         /* restore XFMODE */
684         nvkm_wr32(device, NV10_PGRAPH_XFMODE0, xfmode0);
685         nvkm_wr32(device, NV10_PGRAPH_XFMODE1, xfmode1);
686         PIPE_RESTORE(gr, pipe->pipe_0x6400, 0x6400);
687         PIPE_RESTORE(gr, pipe->pipe_0x6800, 0x6800);
688         PIPE_RESTORE(gr, pipe->pipe_0x6c00, 0x6c00);
689         PIPE_RESTORE(gr, pipe->pipe_0x7000, 0x7000);
690         PIPE_RESTORE(gr, pipe->pipe_0x7400, 0x7400);
691         PIPE_RESTORE(gr, pipe->pipe_0x7800, 0x7800);
692         PIPE_RESTORE(gr, pipe->pipe_0x4400, 0x4400);
693         PIPE_RESTORE(gr, pipe->pipe_0x0000, 0x0000);
694         PIPE_RESTORE(gr, pipe->pipe_0x0040, 0x0040);
695         nv04_gr_idle(gr);
696 }
697
698 static void
699 nv10_gr_create_pipe(struct nv10_gr_chan *chan)
700 {
701         struct nv10_gr *gr = nv10_gr(chan);
702         struct nvkm_subdev *subdev = &gr->base.engine.subdev;
703         struct pipe_state *pipe_state = &chan->pipe_state;
704         u32 *pipe_state_addr;
705         int i;
706 #define PIPE_INIT(addr) \
707         do { \
708                 pipe_state_addr = pipe_state->pipe_##addr; \
709         } while (0)
710 #define PIPE_INIT_END(addr) \
711         do { \
712                 u32 *__end_addr = pipe_state->pipe_##addr + \
713                                 ARRAY_SIZE(pipe_state->pipe_##addr); \
714                 if (pipe_state_addr != __end_addr) \
715                         nvkm_error(subdev, "incomplete pipe init for 0x%x :  %p/%p\n", \
716                                 addr, pipe_state_addr, __end_addr); \
717         } while (0)
718 #define NV_WRITE_PIPE_INIT(value) *(pipe_state_addr++) = value
719
720         PIPE_INIT(0x0200);
721         for (i = 0; i < 48; i++)
722                 NV_WRITE_PIPE_INIT(0x00000000);
723         PIPE_INIT_END(0x0200);
724
725         PIPE_INIT(0x6400);
726         for (i = 0; i < 211; i++)
727                 NV_WRITE_PIPE_INIT(0x00000000);
728         NV_WRITE_PIPE_INIT(0x3f800000);
729         NV_WRITE_PIPE_INIT(0x40000000);
730         NV_WRITE_PIPE_INIT(0x40000000);
731         NV_WRITE_PIPE_INIT(0x40000000);
732         NV_WRITE_PIPE_INIT(0x40000000);
733         NV_WRITE_PIPE_INIT(0x00000000);
734         NV_WRITE_PIPE_INIT(0x00000000);
735         NV_WRITE_PIPE_INIT(0x3f800000);
736         NV_WRITE_PIPE_INIT(0x00000000);
737         NV_WRITE_PIPE_INIT(0x3f000000);
738         NV_WRITE_PIPE_INIT(0x3f000000);
739         NV_WRITE_PIPE_INIT(0x00000000);
740         NV_WRITE_PIPE_INIT(0x00000000);
741         NV_WRITE_PIPE_INIT(0x00000000);
742         NV_WRITE_PIPE_INIT(0x00000000);
743         NV_WRITE_PIPE_INIT(0x3f800000);
744         NV_WRITE_PIPE_INIT(0x00000000);
745         NV_WRITE_PIPE_INIT(0x00000000);
746         NV_WRITE_PIPE_INIT(0x00000000);
747         NV_WRITE_PIPE_INIT(0x00000000);
748         NV_WRITE_PIPE_INIT(0x00000000);
749         NV_WRITE_PIPE_INIT(0x3f800000);
750         NV_WRITE_PIPE_INIT(0x3f800000);
751         NV_WRITE_PIPE_INIT(0x3f800000);
752         NV_WRITE_PIPE_INIT(0x3f800000);
753         PIPE_INIT_END(0x6400);
754
755         PIPE_INIT(0x6800);
756         for (i = 0; i < 162; i++)
757                 NV_WRITE_PIPE_INIT(0x00000000);
758         NV_WRITE_PIPE_INIT(0x3f800000);
759         for (i = 0; i < 25; i++)
760                 NV_WRITE_PIPE_INIT(0x00000000);
761         PIPE_INIT_END(0x6800);
762
763         PIPE_INIT(0x6c00);
764         NV_WRITE_PIPE_INIT(0x00000000);
765         NV_WRITE_PIPE_INIT(0x00000000);
766         NV_WRITE_PIPE_INIT(0x00000000);
767         NV_WRITE_PIPE_INIT(0x00000000);
768         NV_WRITE_PIPE_INIT(0xbf800000);
769         NV_WRITE_PIPE_INIT(0x00000000);
770         NV_WRITE_PIPE_INIT(0x00000000);
771         NV_WRITE_PIPE_INIT(0x00000000);
772         NV_WRITE_PIPE_INIT(0x00000000);
773         NV_WRITE_PIPE_INIT(0x00000000);
774         NV_WRITE_PIPE_INIT(0x00000000);
775         NV_WRITE_PIPE_INIT(0x00000000);
776         PIPE_INIT_END(0x6c00);
777
778         PIPE_INIT(0x7000);
779         NV_WRITE_PIPE_INIT(0x00000000);
780         NV_WRITE_PIPE_INIT(0x00000000);
781         NV_WRITE_PIPE_INIT(0x00000000);
782         NV_WRITE_PIPE_INIT(0x00000000);
783         NV_WRITE_PIPE_INIT(0x00000000);
784         NV_WRITE_PIPE_INIT(0x00000000);
785         NV_WRITE_PIPE_INIT(0x00000000);
786         NV_WRITE_PIPE_INIT(0x00000000);
787         NV_WRITE_PIPE_INIT(0x00000000);
788         NV_WRITE_PIPE_INIT(0x00000000);
789         NV_WRITE_PIPE_INIT(0x00000000);
790         NV_WRITE_PIPE_INIT(0x00000000);
791         NV_WRITE_PIPE_INIT(0x7149f2ca);
792         NV_WRITE_PIPE_INIT(0x00000000);
793         NV_WRITE_PIPE_INIT(0x00000000);
794         NV_WRITE_PIPE_INIT(0x00000000);
795         NV_WRITE_PIPE_INIT(0x7149f2ca);
796         NV_WRITE_PIPE_INIT(0x00000000);
797         NV_WRITE_PIPE_INIT(0x00000000);
798         NV_WRITE_PIPE_INIT(0x00000000);
799         NV_WRITE_PIPE_INIT(0x7149f2ca);
800         NV_WRITE_PIPE_INIT(0x00000000);
801         NV_WRITE_PIPE_INIT(0x00000000);
802         NV_WRITE_PIPE_INIT(0x00000000);
803         NV_WRITE_PIPE_INIT(0x7149f2ca);
804         NV_WRITE_PIPE_INIT(0x00000000);
805         NV_WRITE_PIPE_INIT(0x00000000);
806         NV_WRITE_PIPE_INIT(0x00000000);
807         NV_WRITE_PIPE_INIT(0x7149f2ca);
808         NV_WRITE_PIPE_INIT(0x00000000);
809         NV_WRITE_PIPE_INIT(0x00000000);
810         NV_WRITE_PIPE_INIT(0x00000000);
811         NV_WRITE_PIPE_INIT(0x7149f2ca);
812         NV_WRITE_PIPE_INIT(0x00000000);
813         NV_WRITE_PIPE_INIT(0x00000000);
814         NV_WRITE_PIPE_INIT(0x00000000);
815         NV_WRITE_PIPE_INIT(0x7149f2ca);
816         NV_WRITE_PIPE_INIT(0x00000000);
817         NV_WRITE_PIPE_INIT(0x00000000);
818         NV_WRITE_PIPE_INIT(0x00000000);
819         NV_WRITE_PIPE_INIT(0x7149f2ca);
820         for (i = 0; i < 35; i++)
821                 NV_WRITE_PIPE_INIT(0x00000000);
822         PIPE_INIT_END(0x7000);
823
824         PIPE_INIT(0x7400);
825         for (i = 0; i < 48; i++)
826                 NV_WRITE_PIPE_INIT(0x00000000);
827         PIPE_INIT_END(0x7400);
828
829         PIPE_INIT(0x7800);
830         for (i = 0; i < 48; i++)
831                 NV_WRITE_PIPE_INIT(0x00000000);
832         PIPE_INIT_END(0x7800);
833
834         PIPE_INIT(0x4400);
835         for (i = 0; i < 32; i++)
836                 NV_WRITE_PIPE_INIT(0x00000000);
837         PIPE_INIT_END(0x4400);
838
839         PIPE_INIT(0x0000);
840         for (i = 0; i < 16; i++)
841                 NV_WRITE_PIPE_INIT(0x00000000);
842         PIPE_INIT_END(0x0000);
843
844         PIPE_INIT(0x0040);
845         for (i = 0; i < 4; i++)
846                 NV_WRITE_PIPE_INIT(0x00000000);
847         PIPE_INIT_END(0x0040);
848
849 #undef PIPE_INIT
850 #undef PIPE_INIT_END
851 #undef NV_WRITE_PIPE_INIT
852 }
853
854 static int
855 nv10_gr_ctx_regs_find_offset(struct nv10_gr *gr, int reg)
856 {
857         struct nvkm_subdev *subdev = &gr->base.engine.subdev;
858         int i;
859         for (i = 0; i < ARRAY_SIZE(nv10_gr_ctx_regs); i++) {
860                 if (nv10_gr_ctx_regs[i] == reg)
861                         return i;
862         }
863         nvkm_error(subdev, "unknow offset nv10_ctx_regs %d\n", reg);
864         return -1;
865 }
866
867 static int
868 nv17_gr_ctx_regs_find_offset(struct nv10_gr *gr, int reg)
869 {
870         struct nvkm_subdev *subdev = &gr->base.engine.subdev;
871         int i;
872         for (i = 0; i < ARRAY_SIZE(nv17_gr_ctx_regs); i++) {
873                 if (nv17_gr_ctx_regs[i] == reg)
874                         return i;
875         }
876         nvkm_error(subdev, "unknow offset nv17_ctx_regs %d\n", reg);
877         return -1;
878 }
879
880 static void
881 nv10_gr_load_dma_vtxbuf(struct nv10_gr_chan *chan, int chid, u32 inst)
882 {
883         struct nv10_gr *gr = nv10_gr(chan);
884         struct nvkm_device *device = gr->base.engine.subdev.device;
885         u32 st2, st2_dl, st2_dh, fifo_ptr, fifo[0x60/4];
886         u32 ctx_user, ctx_switch[5];
887         int i, subchan = -1;
888
889         /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
890          * that cannot be restored via MMIO. Do it through the FIFO
891          * instead.
892          */
893
894         /* Look for a celsius object */
895         for (i = 0; i < 8; i++) {
896                 int class = nvkm_rd32(device, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff;
897
898                 if (class == 0x56 || class == 0x96 || class == 0x99) {
899                         subchan = i;
900                         break;
901                 }
902         }
903
904         if (subchan < 0 || !inst)
905                 return;
906
907         /* Save the current ctx object */
908         ctx_user = nvkm_rd32(device, NV10_PGRAPH_CTX_USER);
909         for (i = 0; i < 5; i++)
910                 ctx_switch[i] = nvkm_rd32(device, NV10_PGRAPH_CTX_SWITCH(i));
911
912         /* Save the FIFO state */
913         st2 = nvkm_rd32(device, NV10_PGRAPH_FFINTFC_ST2);
914         st2_dl = nvkm_rd32(device, NV10_PGRAPH_FFINTFC_ST2_DL);
915         st2_dh = nvkm_rd32(device, NV10_PGRAPH_FFINTFC_ST2_DH);
916         fifo_ptr = nvkm_rd32(device, NV10_PGRAPH_FFINTFC_FIFO_PTR);
917
918         for (i = 0; i < ARRAY_SIZE(fifo); i++)
919                 fifo[i] = nvkm_rd32(device, 0x4007a0 + 4 * i);
920
921         /* Switch to the celsius subchannel */
922         for (i = 0; i < 5; i++)
923                 nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(i),
924                         nvkm_rd32(device, NV10_PGRAPH_CTX_CACHE(subchan, i)));
925         nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xe000, subchan << 13);
926
927         /* Inject NV10TCL_DMA_VTXBUF */
928         nvkm_wr32(device, NV10_PGRAPH_FFINTFC_FIFO_PTR, 0);
929         nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2,
930                 0x2c000000 | chid << 20 | subchan << 16 | 0x18c);
931         nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2_DL, inst);
932         nvkm_mask(device, NV10_PGRAPH_CTX_CONTROL, 0, 0x10000);
933         nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
934         nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
935
936         /* Restore the FIFO state */
937         for (i = 0; i < ARRAY_SIZE(fifo); i++)
938                 nvkm_wr32(device, 0x4007a0 + 4 * i, fifo[i]);
939
940         nvkm_wr32(device, NV10_PGRAPH_FFINTFC_FIFO_PTR, fifo_ptr);
941         nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2, st2);
942         nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2_DL, st2_dl);
943         nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2_DH, st2_dh);
944
945         /* Restore the current ctx object */
946         for (i = 0; i < 5; i++)
947                 nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(i), ctx_switch[i]);
948         nvkm_wr32(device, NV10_PGRAPH_CTX_USER, ctx_user);
949 }
950
951 static int
952 nv10_gr_load_context(struct nv10_gr_chan *chan, int chid)
953 {
954         struct nv10_gr *gr = nv10_gr(chan);
955         struct nvkm_device *device = gr->base.engine.subdev.device;
956         u32 inst;
957         int i;
958
959         for (i = 0; i < ARRAY_SIZE(nv10_gr_ctx_regs); i++)
960                 nvkm_wr32(device, nv10_gr_ctx_regs[i], chan->nv10[i]);
961
962         if (nv_device(gr)->card_type >= NV_11 &&
963             nv_device(gr)->chipset >= 0x17) {
964                 for (i = 0; i < ARRAY_SIZE(nv17_gr_ctx_regs); i++)
965                         nvkm_wr32(device, nv17_gr_ctx_regs[i], chan->nv17[i]);
966         }
967
968         nv10_gr_load_pipe(chan);
969
970         inst = nvkm_rd32(device, NV10_PGRAPH_GLOBALSTATE1) & 0xffff;
971         nv10_gr_load_dma_vtxbuf(chan, chid, inst);
972
973         nvkm_wr32(device, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
974         nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xff000000, chid << 24);
975         nvkm_mask(device, NV10_PGRAPH_FFINTFC_ST2, 0x30000000, 0x00000000);
976         return 0;
977 }
978
979 static int
980 nv10_gr_unload_context(struct nv10_gr_chan *chan)
981 {
982         struct nv10_gr *gr = nv10_gr(chan);
983         struct nvkm_device *device = gr->base.engine.subdev.device;
984         int i;
985
986         for (i = 0; i < ARRAY_SIZE(nv10_gr_ctx_regs); i++)
987                 chan->nv10[i] = nvkm_rd32(device, nv10_gr_ctx_regs[i]);
988
989         if (nv_device(gr)->card_type >= NV_11 &&
990             nv_device(gr)->chipset >= 0x17) {
991                 for (i = 0; i < ARRAY_SIZE(nv17_gr_ctx_regs); i++)
992                         chan->nv17[i] = nvkm_rd32(device, nv17_gr_ctx_regs[i]);
993         }
994
995         nv10_gr_save_pipe(chan);
996
997         nvkm_wr32(device, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
998         nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xff000000, 0x1f000000);
999         return 0;
1000 }
1001
1002 static void
1003 nv10_gr_context_switch(struct nv10_gr *gr)
1004 {
1005         struct nvkm_device *device = gr->base.engine.subdev.device;
1006         struct nv10_gr_chan *prev = NULL;
1007         struct nv10_gr_chan *next = NULL;
1008         int chid;
1009
1010         nv04_gr_idle(gr);
1011
1012         /* If previous context is valid, we need to save it */
1013         prev = nv10_gr_channel(gr);
1014         if (prev)
1015                 nv10_gr_unload_context(prev);
1016
1017         /* load context for next channel */
1018         chid = (nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR) >> 20) & 0x1f;
1019         next = gr->chan[chid];
1020         if (next)
1021                 nv10_gr_load_context(next, chid);
1022 }
1023
1024 #define NV_WRITE_CTX(reg, val) do { \
1025         int offset = nv10_gr_ctx_regs_find_offset(gr, reg); \
1026         if (offset > 0) \
1027                 chan->nv10[offset] = val; \
1028         } while (0)
1029
1030 #define NV17_WRITE_CTX(reg, val) do { \
1031         int offset = nv17_gr_ctx_regs_find_offset(gr, reg); \
1032         if (offset > 0) \
1033                 chan->nv17[offset] = val; \
1034         } while (0)
1035
1036 static int
1037 nv10_gr_context_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
1038                      struct nvkm_oclass *oclass, void *data, u32 size,
1039                      struct nvkm_object **pobject)
1040 {
1041         struct nvkm_fifo_chan *fifo = (void *)parent;
1042         struct nv10_gr *gr = (void *)engine;
1043         struct nv10_gr_chan *chan;
1044         struct nvkm_device *device = gr->base.engine.subdev.device;
1045         unsigned long flags;
1046         int ret;
1047
1048         ret = nvkm_object_create(parent, engine, oclass, 0, &chan);
1049         *pobject = nv_object(chan);
1050         if (ret)
1051                 return ret;
1052
1053         spin_lock_irqsave(&gr->lock, flags);
1054         if (gr->chan[fifo->chid]) {
1055                 *pobject = nv_object(gr->chan[fifo->chid]);
1056                 atomic_inc(&(*pobject)->refcount);
1057                 spin_unlock_irqrestore(&gr->lock, flags);
1058                 nvkm_object_destroy(&chan->base);
1059                 return 1;
1060         }
1061
1062         NV_WRITE_CTX(0x00400e88, 0x08000000);
1063         NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
1064         NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0, 0x0001ffff);
1065         NV_WRITE_CTX(0x00400e10, 0x00001000);
1066         NV_WRITE_CTX(0x00400e14, 0x00001000);
1067         NV_WRITE_CTX(0x00400e30, 0x00080008);
1068         NV_WRITE_CTX(0x00400e34, 0x00080008);
1069         if (nv_device(gr)->card_type >= NV_11 &&
1070             nv_device(gr)->chipset >= 0x17) {
1071                 /* is it really needed ??? */
1072                 NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4,
1073                                         nvkm_rd32(device, NV10_PGRAPH_DEBUG_4));
1074                 NV17_WRITE_CTX(0x004006b0, nvkm_rd32(device, 0x004006b0));
1075                 NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
1076                 NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
1077                 NV17_WRITE_CTX(0x00400ec0, 0x00000080);
1078                 NV17_WRITE_CTX(0x00400ed0, 0x00000080);
1079         }
1080         NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->chid << 24);
1081
1082         nv10_gr_create_pipe(chan);
1083
1084         gr->chan[fifo->chid] = chan;
1085         chan->chid = fifo->chid;
1086         spin_unlock_irqrestore(&gr->lock, flags);
1087         return 0;
1088 }
1089
1090 static void
1091 nv10_gr_context_dtor(struct nvkm_object *object)
1092 {
1093         struct nv10_gr *gr = (void *)object->engine;
1094         struct nv10_gr_chan *chan = (void *)object;
1095         unsigned long flags;
1096
1097         spin_lock_irqsave(&gr->lock, flags);
1098         gr->chan[chan->chid] = NULL;
1099         spin_unlock_irqrestore(&gr->lock, flags);
1100
1101         nvkm_object_destroy(&chan->base);
1102 }
1103
1104 static int
1105 nv10_gr_context_fini(struct nvkm_object *object, bool suspend)
1106 {
1107         struct nv10_gr *gr = (void *)object->engine;
1108         struct nv10_gr_chan *chan = (void *)object;
1109         struct nvkm_device *device = gr->base.engine.subdev.device;
1110         unsigned long flags;
1111
1112         spin_lock_irqsave(&gr->lock, flags);
1113         nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
1114         if (nv10_gr_channel(gr) == chan)
1115                 nv10_gr_unload_context(chan);
1116         nvkm_mask(device, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
1117         spin_unlock_irqrestore(&gr->lock, flags);
1118
1119         return _nvkm_object_fini(&chan->base, suspend);
1120 }
1121
1122 static struct nvkm_oclass
1123 nv10_gr_cclass = {
1124         .handle = NV_ENGCTX(GR, 0x10),
1125         .ofuncs = &(struct nvkm_ofuncs) {
1126                 .ctor = nv10_gr_context_ctor,
1127                 .dtor = nv10_gr_context_dtor,
1128                 .init = _nvkm_object_init,
1129                 .fini = nv10_gr_context_fini,
1130         },
1131 };
1132
1133 /*******************************************************************************
1134  * PGRAPH engine/subdev functions
1135  ******************************************************************************/
1136
1137 static void
1138 nv10_gr_tile_prog(struct nvkm_engine *engine, int i)
1139 {
1140         struct nv10_gr *gr = (void *)engine;
1141         struct nvkm_device *device = gr->base.engine.subdev.device;
1142         struct nvkm_fifo *fifo = device->fifo;
1143         struct nvkm_fb_tile *tile = &device->fb->tile.region[i];
1144         unsigned long flags;
1145
1146         fifo->pause(fifo, &flags);
1147         nv04_gr_idle(gr);
1148
1149         nvkm_wr32(device, NV10_PGRAPH_TLIMIT(i), tile->limit);
1150         nvkm_wr32(device, NV10_PGRAPH_TSIZE(i), tile->pitch);
1151         nvkm_wr32(device, NV10_PGRAPH_TILE(i), tile->addr);
1152
1153         fifo->start(fifo, &flags);
1154 }
1155
1156 const struct nvkm_bitfield nv10_gr_intr_name[] = {
1157         { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1158         { NV_PGRAPH_INTR_ERROR,  "ERROR"  },
1159         {}
1160 };
1161
1162 const struct nvkm_bitfield nv10_gr_nstatus[] = {
1163         { NV10_PGRAPH_NSTATUS_STATE_IN_USE,       "STATE_IN_USE" },
1164         { NV10_PGRAPH_NSTATUS_INVALID_STATE,      "INVALID_STATE" },
1165         { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT,       "BAD_ARGUMENT" },
1166         { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT,   "PROTECTION_FAULT" },
1167         {}
1168 };
1169
1170 static void
1171 nv10_gr_intr(struct nvkm_subdev *subdev)
1172 {
1173         struct nv10_gr *gr = (void *)subdev;
1174         struct nv10_gr_chan *chan = NULL;
1175         struct nvkm_device *device = gr->base.engine.subdev.device;
1176         u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR);
1177         u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE);
1178         u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS);
1179         u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR);
1180         u32 chid = (addr & 0x01f00000) >> 20;
1181         u32 subc = (addr & 0x00070000) >> 16;
1182         u32 mthd = (addr & 0x00001ffc);
1183         u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA);
1184         u32 class = nvkm_rd32(device, 0x400160 + subc * 4) & 0xfff;
1185         u32 show = stat;
1186         char msg[128], src[128], sta[128];
1187         unsigned long flags;
1188
1189         spin_lock_irqsave(&gr->lock, flags);
1190         chan = gr->chan[chid];
1191
1192         if (stat & NV_PGRAPH_INTR_ERROR) {
1193                 if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
1194                         if (!nv10_gr_mthd(chan, class, mthd, data))
1195                                 show &= ~NV_PGRAPH_INTR_ERROR;
1196                 }
1197         }
1198
1199         if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1200                 nvkm_wr32(device, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1201                 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1202                 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1203                 nv10_gr_context_switch(gr);
1204         }
1205
1206         nvkm_wr32(device, NV03_PGRAPH_INTR, stat);
1207         nvkm_wr32(device, NV04_PGRAPH_FIFO, 0x00000001);
1208
1209         if (show) {
1210                 nvkm_snprintbf(msg, sizeof(msg), nv10_gr_intr_name, show);
1211                 nvkm_snprintbf(src, sizeof(src), nv04_gr_nsource, nsource);
1212                 nvkm_snprintbf(sta, sizeof(sta), nv10_gr_nstatus, nstatus);
1213                 nvkm_error(subdev, "intr %08x [%s] nsource %08x [%s] "
1214                                    "nstatus %08x [%s] ch %d [%s] subc %d "
1215                                    "class %04x mthd %04x data %08x\n",
1216                            show, msg, nsource, src, nstatus, sta, chid,
1217                            nvkm_client_name(chan), subc, class, mthd, data);
1218         }
1219
1220         spin_unlock_irqrestore(&gr->lock, flags);
1221 }
1222
1223 static int
1224 nv10_gr_ctor(struct nvkm_object *parent, struct nvkm_object *engine,
1225              struct nvkm_oclass *oclass, void *data, u32 size,
1226              struct nvkm_object **pobject)
1227 {
1228         struct nv10_gr *gr;
1229         int ret;
1230
1231         ret = nvkm_gr_create(parent, engine, oclass, true, &gr);
1232         *pobject = nv_object(gr);
1233         if (ret)
1234                 return ret;
1235
1236         nv_subdev(gr)->unit = 0x00001000;
1237         nv_subdev(gr)->intr = nv10_gr_intr;
1238         nv_engine(gr)->cclass = &nv10_gr_cclass;
1239
1240         if (nv_device(gr)->chipset <= 0x10)
1241                 nv_engine(gr)->sclass = nv10_gr_sclass;
1242         else
1243         if (nv_device(gr)->chipset <  0x17 ||
1244             nv_device(gr)->card_type < NV_11)
1245                 nv_engine(gr)->sclass = nv15_gr_sclass;
1246         else
1247                 nv_engine(gr)->sclass = nv17_gr_sclass;
1248
1249         nv_engine(gr)->tile_prog = nv10_gr_tile_prog;
1250         spin_lock_init(&gr->lock);
1251         return 0;
1252 }
1253
1254 static void
1255 nv10_gr_dtor(struct nvkm_object *object)
1256 {
1257         struct nv10_gr *gr = (void *)object;
1258         nvkm_gr_destroy(&gr->base);
1259 }
1260
1261 static int
1262 nv10_gr_init(struct nvkm_object *object)
1263 {
1264         struct nvkm_engine *engine = nv_engine(object);
1265         struct nv10_gr *gr = (void *)engine;
1266         struct nvkm_device *device = gr->base.engine.subdev.device;
1267         struct nvkm_fb *fb = device->fb;
1268         int ret, i;
1269
1270         ret = nvkm_gr_init(&gr->base);
1271         if (ret)
1272                 return ret;
1273
1274         nvkm_wr32(device, NV03_PGRAPH_INTR   , 0xFFFFFFFF);
1275         nvkm_wr32(device, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
1276
1277         nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
1278         nvkm_wr32(device, NV04_PGRAPH_DEBUG_0, 0x00000000);
1279         nvkm_wr32(device, NV04_PGRAPH_DEBUG_1, 0x00118700);
1280         /* nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
1281         nvkm_wr32(device, NV04_PGRAPH_DEBUG_2, 0x25f92ad9);
1282         nvkm_wr32(device, NV04_PGRAPH_DEBUG_3, 0x55DE0830 | (1 << 29) | (1 << 31));
1283
1284         if (nv_device(gr)->card_type >= NV_11 &&
1285             nv_device(gr)->chipset >= 0x17) {
1286                 nvkm_wr32(device, NV10_PGRAPH_DEBUG_4, 0x1f000000);
1287                 nvkm_wr32(device, 0x400a10, 0x03ff3fb6);
1288                 nvkm_wr32(device, 0x400838, 0x002f8684);
1289                 nvkm_wr32(device, 0x40083c, 0x00115f3f);
1290                 nvkm_wr32(device, 0x4006b0, 0x40000020);
1291         } else {
1292                 nvkm_wr32(device, NV10_PGRAPH_DEBUG_4, 0x00000000);
1293         }
1294
1295         /* Turn all the tiling regions off. */
1296         for (i = 0; i < fb->tile.regions; i++)
1297                 engine->tile_prog(engine, i);
1298
1299         nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
1300         nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
1301         nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
1302         nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
1303         nvkm_wr32(device, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
1304         nvkm_wr32(device, NV10_PGRAPH_STATE, 0xFFFFFFFF);
1305
1306         nvkm_mask(device, NV10_PGRAPH_CTX_USER, 0xff000000, 0x1f000000);
1307         nvkm_wr32(device, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
1308         nvkm_wr32(device, NV10_PGRAPH_FFINTFC_ST2, 0x08000000);
1309         return 0;
1310 }
1311
1312 static int
1313 nv10_gr_fini(struct nvkm_object *object, bool suspend)
1314 {
1315         struct nv10_gr *gr = (void *)object;
1316         return nvkm_gr_fini(&gr->base, suspend);
1317 }
1318
1319 struct nvkm_oclass
1320 nv10_gr_oclass = {
1321         .handle = NV_ENGINE(GR, 0x10),
1322         .ofuncs = &(struct nvkm_ofuncs) {
1323                 .ctor = nv10_gr_ctor,
1324                 .dtor = nv10_gr_dtor,
1325                 .init = nv10_gr_init,
1326                 .fini = nv10_gr_fini,
1327         },
1328 };