drm/nouveau: unpin pushbuffer bo before destroying it
[linux-2.6-block.git] / drivers / gpu / drm / nouveau / nv50_display.c
CommitLineData
e225f446 1 /*
26f6d88b
BS
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
51beb428 25#include <linux/dma-mapping.h>
83fc083c 26
760285e7
DH
27#include <drm/drmP.h>
28#include <drm/drm_crtc_helper.h>
26f6d88b 29
77145f1c
BS
30#include "nouveau_drm.h"
31#include "nouveau_dma.h"
32#include "nouveau_gem.h"
26f6d88b
BS
33#include "nouveau_connector.h"
34#include "nouveau_encoder.h"
35#include "nouveau_crtc.h"
f589be88 36#include "nouveau_fence.h"
3a89cd02 37#include "nv50_display.h"
26f6d88b 38
b5a794b0 39#include <core/client.h>
77145f1c 40#include <core/gpuobj.h>
b5a794b0 41#include <core/class.h>
77145f1c
BS
42
43#include <subdev/timer.h>
44#include <subdev/bar.h>
45#include <subdev/fb.h>
46
8a46438a
BS
47#define EVO_DMA_NR 9
48
bdb8c212 49#define EVO_MASTER (0x00)
a63a97eb 50#define EVO_FLIP(c) (0x01 + (c))
8a46438a
BS
51#define EVO_OVLY(c) (0x05 + (c))
52#define EVO_OIMM(c) (0x09 + (c))
bdb8c212
BS
53#define EVO_CURS(c) (0x0d + (c))
54
816af2f2
BS
55/* offsets in shared sync bo of various structures */
56#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
57#define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
58#define EVO_FLIP_SEM0(c) EVO_SYNC((c), 0x00)
59#define EVO_FLIP_SEM1(c) EVO_SYNC((c), 0x10)
60
b5a794b0
BS
61#define EVO_CORE_HANDLE (0xd1500000)
62#define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
63#define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
64#define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) | \
65 (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
66
67/******************************************************************************
68 * EVO channel
69 *****************************************************************************/
70
e225f446 71struct nv50_chan {
b5a794b0
BS
72 struct nouveau_object *user;
73 u32 handle;
74};
75
76static int
e225f446
BS
77nv50_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
78 void *data, u32 size, struct nv50_chan *chan)
b5a794b0
BS
79{
80 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
81 const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
82 const u32 handle = EVO_CHAN_HANDLE(bclass, head);
83 int ret;
84
85 ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
86 oclass, data, size, &chan->user);
87 if (ret)
88 return ret;
89
90 chan->handle = handle;
91 return 0;
92}
93
94static void
e225f446 95nv50_chan_destroy(struct nouveau_object *core, struct nv50_chan *chan)
b5a794b0
BS
96{
97 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
98 if (chan->handle)
99 nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
100}
101
102/******************************************************************************
103 * PIO EVO channel
104 *****************************************************************************/
105
e225f446
BS
106struct nv50_pioc {
107 struct nv50_chan base;
b5a794b0
BS
108};
109
110static void
e225f446 111nv50_pioc_destroy(struct nouveau_object *core, struct nv50_pioc *pioc)
b5a794b0 112{
e225f446 113 nv50_chan_destroy(core, &pioc->base);
b5a794b0
BS
114}
115
116static int
e225f446
BS
117nv50_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
118 void *data, u32 size, struct nv50_pioc *pioc)
b5a794b0 119{
e225f446 120 return nv50_chan_create(core, bclass, head, data, size, &pioc->base);
b5a794b0
BS
121}
122
123/******************************************************************************
124 * DMA EVO channel
125 *****************************************************************************/
126
e225f446
BS
127struct nv50_dmac {
128 struct nv50_chan base;
3376ee37
BS
129 dma_addr_t handle;
130 u32 *ptr;
b5a794b0
BS
131};
132
133static void
e225f446 134nv50_dmac_destroy(struct nouveau_object *core, struct nv50_dmac *dmac)
b5a794b0
BS
135{
136 if (dmac->ptr) {
137 struct pci_dev *pdev = nv_device(core)->pdev;
138 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
139 }
140
e225f446 141 nv50_chan_destroy(core, &dmac->base);
b5a794b0
BS
142}
143
144static int
47057302 145nv50_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
b5a794b0
BS
146{
147 struct nouveau_fb *pfb = nouveau_fb(core);
148 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
149 struct nouveau_object *object;
47057302
BS
150 int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
151 NV_DMA_IN_MEMORY_CLASS,
152 &(struct nv_dma_class) {
153 .flags = NV_DMA_TARGET_VRAM |
154 NV_DMA_ACCESS_RDWR,
155 .start = 0,
156 .limit = pfb->ram.size - 1,
157 .conf0 = NV50_DMA_CONF0_ENABLE |
158 NV50_DMA_CONF0_PART_256,
159 }, sizeof(struct nv_dma_class), &object);
160 if (ret)
161 return ret;
b5a794b0 162
47057302
BS
163 ret = nouveau_object_new(client, parent, NvEvoFB16,
164 NV_DMA_IN_MEMORY_CLASS,
b5a794b0 165 &(struct nv_dma_class) {
47057302
BS
166 .flags = NV_DMA_TARGET_VRAM |
167 NV_DMA_ACCESS_RDWR,
168 .start = 0,
169 .limit = pfb->ram.size - 1,
170 .conf0 = NV50_DMA_CONF0_ENABLE | 0x70 |
171 NV50_DMA_CONF0_PART_256,
b5a794b0
BS
172 }, sizeof(struct nv_dma_class), &object);
173 if (ret)
174 return ret;
175
47057302
BS
176 ret = nouveau_object_new(client, parent, NvEvoFB32,
177 NV_DMA_IN_MEMORY_CLASS,
178 &(struct nv_dma_class) {
179 .flags = NV_DMA_TARGET_VRAM |
180 NV_DMA_ACCESS_RDWR,
181 .start = 0,
182 .limit = pfb->ram.size - 1,
183 .conf0 = NV50_DMA_CONF0_ENABLE | 0x7a |
184 NV50_DMA_CONF0_PART_256,
185 }, sizeof(struct nv_dma_class), &object);
186 return ret;
187}
188
189static int
190nvc0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
191{
192 struct nouveau_fb *pfb = nouveau_fb(core);
193 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
194 struct nouveau_object *object;
195 int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
196 NV_DMA_IN_MEMORY_CLASS,
197 &(struct nv_dma_class) {
198 .flags = NV_DMA_TARGET_VRAM |
199 NV_DMA_ACCESS_RDWR,
200 .start = 0,
201 .limit = pfb->ram.size - 1,
202 .conf0 = NVC0_DMA_CONF0_ENABLE,
203 }, sizeof(struct nv_dma_class), &object);
b5a794b0
BS
204 if (ret)
205 return ret;
206
47057302 207 ret = nouveau_object_new(client, parent, NvEvoFB16,
b5a794b0
BS
208 NV_DMA_IN_MEMORY_CLASS,
209 &(struct nv_dma_class) {
210 .flags = NV_DMA_TARGET_VRAM |
211 NV_DMA_ACCESS_RDWR,
47057302
BS
212 .start = 0,
213 .limit = pfb->ram.size - 1,
214 .conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
b5a794b0
BS
215 }, sizeof(struct nv_dma_class), &object);
216 if (ret)
47057302 217 return ret;
b5a794b0 218
47057302 219 ret = nouveau_object_new(client, parent, NvEvoFB32,
b5a794b0
BS
220 NV_DMA_IN_MEMORY_CLASS,
221 &(struct nv_dma_class) {
222 .flags = NV_DMA_TARGET_VRAM |
223 NV_DMA_ACCESS_RDWR,
224 .start = 0,
225 .limit = pfb->ram.size - 1,
47057302 226 .conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
b5a794b0 227 }, sizeof(struct nv_dma_class), &object);
47057302
BS
228 return ret;
229}
230
231static int
232nvd0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
233{
234 struct nouveau_fb *pfb = nouveau_fb(core);
235 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
236 struct nouveau_object *object;
237 int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
238 NV_DMA_IN_MEMORY_CLASS,
239 &(struct nv_dma_class) {
240 .flags = NV_DMA_TARGET_VRAM |
241 NV_DMA_ACCESS_RDWR,
242 .start = 0,
243 .limit = pfb->ram.size - 1,
244 .conf0 = NVD0_DMA_CONF0_ENABLE |
245 NVD0_DMA_CONF0_PAGE_LP,
246 }, sizeof(struct nv_dma_class), &object);
b5a794b0 247 if (ret)
47057302 248 return ret;
b5a794b0 249
47057302 250 ret = nouveau_object_new(client, parent, NvEvoFB32,
b5a794b0
BS
251 NV_DMA_IN_MEMORY_CLASS,
252 &(struct nv_dma_class) {
253 .flags = NV_DMA_TARGET_VRAM |
254 NV_DMA_ACCESS_RDWR,
255 .start = 0,
256 .limit = pfb->ram.size - 1,
47057302 257 .conf0 = NVD0_DMA_CONF0_ENABLE | 0xfe |
b5a794b0
BS
258 NVD0_DMA_CONF0_PAGE_LP,
259 }, sizeof(struct nv_dma_class), &object);
47057302
BS
260 return ret;
261}
262
263static int
e225f446 264nv50_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
47057302 265 void *data, u32 size, u64 syncbuf,
e225f446 266 struct nv50_dmac *dmac)
47057302
BS
267{
268 struct nouveau_fb *pfb = nouveau_fb(core);
269 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
270 struct nouveau_object *object;
271 u32 pushbuf = *(u32 *)data;
272 int ret;
273
274 dmac->ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE,
275 &dmac->handle);
276 if (!dmac->ptr)
277 return -ENOMEM;
278
279 ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
280 NV_DMA_FROM_MEMORY_CLASS,
281 &(struct nv_dma_class) {
282 .flags = NV_DMA_TARGET_PCI_US |
283 NV_DMA_ACCESS_RD,
284 .start = dmac->handle + 0x0000,
285 .limit = dmac->handle + 0x0fff,
286 }, sizeof(struct nv_dma_class), &object);
b5a794b0 287 if (ret)
47057302 288 return ret;
b5a794b0 289
e225f446 290 ret = nv50_chan_create(core, bclass, head, data, size, &dmac->base);
47057302
BS
291 if (ret)
292 return ret;
293
294 ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
295 NV_DMA_IN_MEMORY_CLASS,
296 &(struct nv_dma_class) {
297 .flags = NV_DMA_TARGET_VRAM |
298 NV_DMA_ACCESS_RDWR,
299 .start = syncbuf + 0x0000,
300 .limit = syncbuf + 0x0fff,
301 }, sizeof(struct nv_dma_class), &object);
302 if (ret)
303 return ret;
304
305 ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
b5a794b0
BS
306 NV_DMA_IN_MEMORY_CLASS,
307 &(struct nv_dma_class) {
308 .flags = NV_DMA_TARGET_VRAM |
309 NV_DMA_ACCESS_RDWR,
310 .start = 0,
311 .limit = pfb->ram.size - 1,
b5a794b0 312 }, sizeof(struct nv_dma_class), &object);
b5a794b0 313 if (ret)
47057302
BS
314 return ret;
315
316 if (nv_device(core)->card_type < NV_C0)
317 ret = nv50_dmac_create_fbdma(core, dmac->base.handle);
318 else
319 if (nv_device(core)->card_type < NV_D0)
320 ret = nvc0_dmac_create_fbdma(core, dmac->base.handle);
321 else
322 ret = nvd0_dmac_create_fbdma(core, dmac->base.handle);
b5a794b0
BS
323 return ret;
324}
325
e225f446
BS
326struct nv50_mast {
327 struct nv50_dmac base;
b5a794b0
BS
328};
329
e225f446
BS
330struct nv50_curs {
331 struct nv50_pioc base;
b5a794b0
BS
332};
333
e225f446
BS
334struct nv50_sync {
335 struct nv50_dmac base;
3376ee37 336 struct {
3376ee37
BS
337 u32 offset;
338 u16 value;
339 } sem;
340};
341
e225f446
BS
342struct nv50_ovly {
343 struct nv50_dmac base;
b5a794b0 344};
f20ce962 345
e225f446
BS
346struct nv50_oimm {
347 struct nv50_pioc base;
26f6d88b
BS
348};
349
e225f446 350struct nv50_head {
dd0e3d53 351 struct nouveau_crtc base;
e225f446
BS
352 struct nv50_curs curs;
353 struct nv50_sync sync;
354 struct nv50_ovly ovly;
355 struct nv50_oimm oimm;
b5a794b0
BS
356};
357
e225f446
BS
358#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
359#define nv50_curs(c) (&nv50_head(c)->curs)
360#define nv50_sync(c) (&nv50_head(c)->sync)
361#define nv50_ovly(c) (&nv50_head(c)->ovly)
362#define nv50_oimm(c) (&nv50_head(c)->oimm)
363#define nv50_chan(c) (&(c)->base.base)
364#define nv50_vers(c) nv_mclass(nv50_chan(c)->user)
b5a794b0 365
e225f446 366struct nv50_disp {
b5a794b0 367 struct nouveau_object *core;
e225f446 368 struct nv50_mast mast;
b5a794b0 369
b5a794b0
BS
370 u32 modeset;
371
372 struct nouveau_bo *sync;
dd0e3d53
BS
373};
374
e225f446
BS
375static struct nv50_disp *
376nv50_disp(struct drm_device *dev)
26f6d88b 377{
77145f1c 378 return nouveau_display(dev)->priv;
26f6d88b
BS
379}
380
e225f446 381#define nv50_mast(d) (&nv50_disp(d)->mast)
b5a794b0 382
bdb8c212 383static struct drm_crtc *
e225f446 384nv50_display_crtc_get(struct drm_encoder *encoder)
bdb8c212
BS
385{
386 return nouveau_encoder(encoder)->crtc;
387}
388
389/******************************************************************************
390 * EVO channel helpers
391 *****************************************************************************/
51beb428 392static u32 *
b5a794b0 393evo_wait(void *evoc, int nr)
51beb428 394{
e225f446 395 struct nv50_dmac *dmac = evoc;
b5a794b0 396 u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
51beb428 397
de8268c5 398 if (put + nr >= (PAGE_SIZE / 4) - 8) {
b5a794b0 399 dmac->ptr[put] = 0x20000000;
51beb428 400
b5a794b0
BS
401 nv_wo32(dmac->base.user, 0x0000, 0x00000000);
402 if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
403 NV_ERROR(dmac->base.user, "channel stalled\n");
51beb428
BS
404 return NULL;
405 }
406
407 put = 0;
408 }
409
b5a794b0 410 return dmac->ptr + put;
51beb428
BS
411}
412
413static void
b5a794b0 414evo_kick(u32 *push, void *evoc)
51beb428 415{
e225f446 416 struct nv50_dmac *dmac = evoc;
b5a794b0 417 nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
51beb428
BS
418}
419
420#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
421#define evo_data(p,d) *((p)++) = (d)
422
3376ee37
BS
423static bool
424evo_sync_wait(void *data)
425{
816af2f2 426 return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
3376ee37
BS
427}
428
429static int
b5a794b0 430evo_sync(struct drm_device *dev)
3376ee37 431{
77145f1c 432 struct nouveau_device *device = nouveau_dev(dev);
e225f446
BS
433 struct nv50_disp *disp = nv50_disp(dev);
434 struct nv50_mast *mast = nv50_mast(dev);
b5a794b0 435 u32 *push = evo_wait(mast, 8);
3376ee37 436 if (push) {
816af2f2 437 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
3376ee37 438 evo_mthd(push, 0x0084, 1);
816af2f2 439 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
3376ee37
BS
440 evo_mthd(push, 0x0080, 2);
441 evo_data(push, 0x00000000);
442 evo_data(push, 0x00000000);
b5a794b0 443 evo_kick(push, mast);
77145f1c 444 if (nv_wait_cb(device, evo_sync_wait, disp->sync))
3376ee37
BS
445 return 0;
446 }
447
448 return -EBUSY;
449}
450
451/******************************************************************************
a63a97eb 452 * Page flipping channel
3376ee37
BS
453 *****************************************************************************/
454struct nouveau_bo *
e225f446 455nv50_display_crtc_sema(struct drm_device *dev, int crtc)
3376ee37 456{
e225f446 457 return nv50_disp(dev)->sync;
3376ee37
BS
458}
459
460void
e225f446 461nv50_display_flip_stop(struct drm_crtc *crtc)
3376ee37 462{
e225f446 463 struct nv50_sync *sync = nv50_sync(crtc);
3376ee37
BS
464 u32 *push;
465
b5a794b0 466 push = evo_wait(sync, 8);
3376ee37
BS
467 if (push) {
468 evo_mthd(push, 0x0084, 1);
469 evo_data(push, 0x00000000);
470 evo_mthd(push, 0x0094, 1);
471 evo_data(push, 0x00000000);
472 evo_mthd(push, 0x00c0, 1);
473 evo_data(push, 0x00000000);
474 evo_mthd(push, 0x0080, 1);
475 evo_data(push, 0x00000000);
b5a794b0 476 evo_kick(push, sync);
3376ee37
BS
477 }
478}
479
480int
e225f446 481nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
3376ee37
BS
482 struct nouveau_channel *chan, u32 swap_interval)
483{
484 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
e225f446 485 struct nv50_disp *disp = nv50_disp(crtc->dev);
3376ee37 486 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
e225f446 487 struct nv50_sync *sync = nv50_sync(crtc);
3376ee37
BS
488 u32 *push;
489 int ret;
490
491 swap_interval <<= 4;
492 if (swap_interval == 0)
493 swap_interval |= 0x100;
494
b5a794b0 495 push = evo_wait(sync, 128);
3376ee37
BS
496 if (unlikely(push == NULL))
497 return -EBUSY;
498
499 /* synchronise with the rendering channel, if necessary */
500 if (likely(chan)) {
501 ret = RING_SPACE(chan, 10);
502 if (ret)
503 return ret;
504
ed5085a5
BS
505 if (nv_mclass(chan->object) < NVC0_CHANNEL_IND_CLASS) {
506 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
507 OUT_RING (chan, NvEvoSema0 + nv_crtc->index);
508 OUT_RING (chan, sync->sem.offset);
509 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
510 OUT_RING (chan, 0xf00d0000 | sync->sem.value);
511 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
512 OUT_RING (chan, sync->sem.offset ^ 0x10);
513 OUT_RING (chan, 0x74b1e000);
514 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
515 if (nv_mclass(chan->object) < NV84_CHANNEL_DMA_CLASS)
516 OUT_RING (chan, NvSema);
517 else
518 OUT_RING (chan, chan->vram);
519 } else {
520 u64 offset = nvc0_fence_crtc(chan, nv_crtc->index);
521 offset += sync->sem.offset;
522
523 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
524 OUT_RING (chan, upper_32_bits(offset));
525 OUT_RING (chan, lower_32_bits(offset));
526 OUT_RING (chan, 0xf00d0000 | sync->sem.value);
527 OUT_RING (chan, 0x1002);
528 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
529 OUT_RING (chan, upper_32_bits(offset));
530 OUT_RING (chan, lower_32_bits(offset ^ 0x10));
531 OUT_RING (chan, 0x74b1e000);
532 OUT_RING (chan, 0x1001);
533 }
35bcf5d5 534
3376ee37
BS
535 FIRE_RING (chan);
536 } else {
b5a794b0
BS
537 nouveau_bo_wr32(disp->sync, sync->sem.offset / 4,
538 0xf00d0000 | sync->sem.value);
539 evo_sync(crtc->dev);
3376ee37
BS
540 }
541
542 /* queue the flip */
543 evo_mthd(push, 0x0100, 1);
544 evo_data(push, 0xfffe0000);
545 evo_mthd(push, 0x0084, 1);
546 evo_data(push, swap_interval);
547 if (!(swap_interval & 0x00000100)) {
548 evo_mthd(push, 0x00e0, 1);
549 evo_data(push, 0x40000000);
550 }
551 evo_mthd(push, 0x0088, 4);
b5a794b0
BS
552 evo_data(push, sync->sem.offset);
553 evo_data(push, 0xf00d0000 | sync->sem.value);
3376ee37
BS
554 evo_data(push, 0x74b1e000);
555 evo_data(push, NvEvoSync);
556 evo_mthd(push, 0x00a0, 2);
557 evo_data(push, 0x00000000);
558 evo_data(push, 0x00000000);
559 evo_mthd(push, 0x00c0, 1);
560 evo_data(push, nv_fb->r_dma);
561 evo_mthd(push, 0x0110, 2);
562 evo_data(push, 0x00000000);
563 evo_data(push, 0x00000000);
e225f446 564 if (nv50_vers(sync) < NVD0_DISP_SYNC_CLASS) {
ed5085a5
BS
565 evo_mthd(push, 0x0800, 5);
566 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
567 evo_data(push, 0);
568 evo_data(push, (fb->height << 16) | fb->width);
569 evo_data(push, nv_fb->r_pitch);
570 evo_data(push, nv_fb->r_format);
571 } else {
572 evo_mthd(push, 0x0400, 5);
573 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
574 evo_data(push, 0);
575 evo_data(push, (fb->height << 16) | fb->width);
576 evo_data(push, nv_fb->r_pitch);
577 evo_data(push, nv_fb->r_format);
578 }
3376ee37
BS
579 evo_mthd(push, 0x0080, 1);
580 evo_data(push, 0x00000000);
b5a794b0 581 evo_kick(push, sync);
3376ee37 582
b5a794b0
BS
583 sync->sem.offset ^= 0x10;
584 sync->sem.value++;
3376ee37
BS
585 return 0;
586}
587
438d99e3
BS
588/******************************************************************************
589 * CRTC
590 *****************************************************************************/
591static int
e225f446 592nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
438d99e3 593{
e225f446 594 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
de691855
BS
595 struct nouveau_connector *nv_connector;
596 struct drm_connector *connector;
597 u32 *push, mode = 0x00;
438d99e3 598
488ff207 599 nv_connector = nouveau_crtc_connector_get(nv_crtc);
de691855
BS
600 connector = &nv_connector->base;
601 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
602 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
603 mode = DITHERING_MODE_DYNAMIC2X2;
604 } else {
605 mode = nv_connector->dithering_mode;
606 }
607
608 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
609 if (connector->display_info.bpc >= 8)
610 mode |= DITHERING_DEPTH_8BPC;
611 } else {
612 mode |= nv_connector->dithering_depth;
438d99e3
BS
613 }
614
de8268c5 615 push = evo_wait(mast, 4);
438d99e3 616 if (push) {
e225f446 617 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
de8268c5
BS
618 evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
619 evo_data(push, mode);
620 } else
e225f446 621 if (nv50_vers(mast) < NVE0_DISP_MAST_CLASS) {
de8268c5
BS
622 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
623 evo_data(push, mode);
624 } else {
625 evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
626 evo_data(push, mode);
627 }
628
438d99e3
BS
629 if (update) {
630 evo_mthd(push, 0x0080, 1);
631 evo_data(push, 0x00000000);
632 }
de8268c5 633 evo_kick(push, mast);
438d99e3
BS
634 }
635
636 return 0;
637}
638
639static int
e225f446 640nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
438d99e3 641{
e225f446 642 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
92854622 643 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
3376ee37 644 struct drm_crtc *crtc = &nv_crtc->base;
f3fdc52d 645 struct nouveau_connector *nv_connector;
92854622
BS
646 int mode = DRM_MODE_SCALE_NONE;
647 u32 oX, oY, *push;
f3fdc52d 648
92854622
BS
649 /* start off at the resolution we programmed the crtc for, this
650 * effectively handles NONE/FULL scaling
651 */
f3fdc52d 652 nv_connector = nouveau_crtc_connector_get(nv_crtc);
92854622
BS
653 if (nv_connector && nv_connector->native_mode)
654 mode = nv_connector->scaling_mode;
655
656 if (mode != DRM_MODE_SCALE_NONE)
657 omode = nv_connector->native_mode;
658 else
659 omode = umode;
660
661 oX = omode->hdisplay;
662 oY = omode->vdisplay;
663 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
664 oY *= 2;
665
666 /* add overscan compensation if necessary, will keep the aspect
667 * ratio the same as the backend mode unless overridden by the
668 * user setting both hborder and vborder properties.
669 */
670 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
671 (nv_connector->underscan == UNDERSCAN_AUTO &&
672 nv_connector->edid &&
673 drm_detect_hdmi_monitor(nv_connector->edid)))) {
674 u32 bX = nv_connector->underscan_hborder;
675 u32 bY = nv_connector->underscan_vborder;
676 u32 aspect = (oY << 19) / oX;
677
678 if (bX) {
679 oX -= (bX * 2);
680 if (bY) oY -= (bY * 2);
681 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
682 } else {
683 oX -= (oX >> 4) + 32;
684 if (bY) oY -= (bY * 2);
685 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
686 }
687 }
688
689 /* handle CENTER/ASPECT scaling, taking into account the areas
690 * removed already for overscan compensation
691 */
692 switch (mode) {
693 case DRM_MODE_SCALE_CENTER:
694 oX = min((u32)umode->hdisplay, oX);
695 oY = min((u32)umode->vdisplay, oY);
696 /* fall-through */
697 case DRM_MODE_SCALE_ASPECT:
698 if (oY < oX) {
699 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
700 oX = ((oY * aspect) + (aspect / 2)) >> 19;
701 } else {
702 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
703 oY = ((oX * aspect) + (aspect / 2)) >> 19;
f3fdc52d 704 }
92854622
BS
705 break;
706 default:
707 break;
f3fdc52d 708 }
438d99e3 709
de8268c5 710 push = evo_wait(mast, 8);
438d99e3 711 if (push) {
e225f446 712 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
de8268c5
BS
713 /*XXX: SCALE_CTRL_ACTIVE??? */
714 evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
715 evo_data(push, (oY << 16) | oX);
716 evo_data(push, (oY << 16) | oX);
717 evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
718 evo_data(push, 0x00000000);
719 evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
720 evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
721 } else {
722 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
723 evo_data(push, (oY << 16) | oX);
724 evo_data(push, (oY << 16) | oX);
725 evo_data(push, (oY << 16) | oX);
726 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
727 evo_data(push, 0x00000000);
728 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
729 evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
730 }
731
732 evo_kick(push, mast);
733
438d99e3 734 if (update) {
e225f446
BS
735 nv50_display_flip_stop(crtc);
736 nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
438d99e3 737 }
438d99e3
BS
738 }
739
740 return 0;
741}
742
f9887d09 743static int
e225f446 744nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
f9887d09 745{
e225f446 746 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
f9887d09
BS
747 u32 *push, hue, vib;
748 int adj;
749
750 adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
751 vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
752 hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
753
754 push = evo_wait(mast, 16);
755 if (push) {
e225f446 756 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
f9887d09
BS
757 evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
758 evo_data(push, (hue << 20) | (vib << 8));
759 } else {
760 evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
761 evo_data(push, (hue << 20) | (vib << 8));
762 }
763
764 if (update) {
765 evo_mthd(push, 0x0080, 1);
766 evo_data(push, 0x00000000);
767 }
768 evo_kick(push, mast);
769 }
770
771 return 0;
772}
773
438d99e3 774static int
e225f446 775nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
438d99e3
BS
776 int x, int y, bool update)
777{
778 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
e225f446 779 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
438d99e3
BS
780 u32 *push;
781
de8268c5 782 push = evo_wait(mast, 16);
438d99e3 783 if (push) {
e225f446 784 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
de8268c5
BS
785 evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
786 evo_data(push, nvfb->nvbo->bo.offset >> 8);
787 evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
788 evo_data(push, (fb->height << 16) | fb->width);
789 evo_data(push, nvfb->r_pitch);
790 evo_data(push, nvfb->r_format);
791 evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
792 evo_data(push, (y << 16) | x);
e225f446 793 if (nv50_vers(mast) > NV50_DISP_MAST_CLASS) {
de8268c5
BS
794 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
795 evo_data(push, nvfb->r_dma);
796 }
797 } else {
798 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
799 evo_data(push, nvfb->nvbo->bo.offset >> 8);
800 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
801 evo_data(push, (fb->height << 16) | fb->width);
802 evo_data(push, nvfb->r_pitch);
803 evo_data(push, nvfb->r_format);
804 evo_data(push, nvfb->r_dma);
805 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
806 evo_data(push, (y << 16) | x);
807 }
808
a46232ee
BS
809 if (update) {
810 evo_mthd(push, 0x0080, 1);
811 evo_data(push, 0x00000000);
812 }
de8268c5 813 evo_kick(push, mast);
438d99e3
BS
814 }
815
c0cc92a1 816 nv_crtc->fb.tile_flags = nvfb->r_dma;
438d99e3
BS
817 return 0;
818}
819
820static void
e225f446 821nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
438d99e3 822{
e225f446 823 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
de8268c5 824 u32 *push = evo_wait(mast, 16);
438d99e3 825 if (push) {
e225f446 826 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
de8268c5
BS
827 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
828 evo_data(push, 0x85000000);
829 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
830 } else
e225f446 831 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
de8268c5
BS
832 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
833 evo_data(push, 0x85000000);
834 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
835 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
836 evo_data(push, NvEvoVRAM);
837 } else {
438d99e3
BS
838 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
839 evo_data(push, 0x85000000);
840 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
841 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
37b034a6 842 evo_data(push, NvEvoVRAM);
de8268c5
BS
843 }
844 evo_kick(push, mast);
845 }
846}
847
848static void
e225f446 849nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
de8268c5 850{
e225f446 851 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
de8268c5
BS
852 u32 *push = evo_wait(mast, 16);
853 if (push) {
e225f446 854 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
de8268c5
BS
855 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
856 evo_data(push, 0x05000000);
857 } else
e225f446 858 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
de8268c5
BS
859 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
860 evo_data(push, 0x05000000);
861 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
862 evo_data(push, 0x00000000);
438d99e3
BS
863 } else {
864 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
865 evo_data(push, 0x05000000);
866 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
867 evo_data(push, 0x00000000);
868 }
de8268c5
BS
869 evo_kick(push, mast);
870 }
871}
438d99e3 872
de8268c5 873static void
e225f446 874nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
de8268c5 875{
e225f446 876 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
de8268c5
BS
877
878 if (show)
e225f446 879 nv50_crtc_cursor_show(nv_crtc);
de8268c5 880 else
e225f446 881 nv50_crtc_cursor_hide(nv_crtc);
de8268c5
BS
882
883 if (update) {
884 u32 *push = evo_wait(mast, 2);
885 if (push) {
438d99e3
BS
886 evo_mthd(push, 0x0080, 1);
887 evo_data(push, 0x00000000);
de8268c5 888 evo_kick(push, mast);
438d99e3 889 }
438d99e3
BS
890 }
891}
892
893static void
e225f446 894nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
438d99e3
BS
895{
896}
897
898static void
e225f446 899nv50_crtc_prepare(struct drm_crtc *crtc)
438d99e3
BS
900{
901 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
e225f446 902 struct nv50_mast *mast = nv50_mast(crtc->dev);
438d99e3
BS
903 u32 *push;
904
e225f446 905 nv50_display_flip_stop(crtc);
3376ee37 906
de8268c5 907 push = evo_wait(mast, 2);
438d99e3 908 if (push) {
e225f446 909 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
de8268c5
BS
910 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
911 evo_data(push, 0x00000000);
912 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
913 evo_data(push, 0x40000000);
914 } else
e225f446 915 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
de8268c5
BS
916 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
917 evo_data(push, 0x00000000);
918 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
919 evo_data(push, 0x40000000);
920 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
921 evo_data(push, 0x00000000);
922 } else {
923 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
924 evo_data(push, 0x00000000);
925 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
926 evo_data(push, 0x03000000);
927 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
928 evo_data(push, 0x00000000);
929 }
930
931 evo_kick(push, mast);
438d99e3
BS
932 }
933
e225f446 934 nv50_crtc_cursor_show_hide(nv_crtc, false, false);
438d99e3
BS
935}
936
937static void
e225f446 938nv50_crtc_commit(struct drm_crtc *crtc)
438d99e3
BS
939{
940 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
e225f446 941 struct nv50_mast *mast = nv50_mast(crtc->dev);
438d99e3
BS
942 u32 *push;
943
de8268c5 944 push = evo_wait(mast, 32);
438d99e3 945 if (push) {
e225f446 946 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
de8268c5
BS
947 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
948 evo_data(push, NvEvoVRAM_LP);
949 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
950 evo_data(push, 0xc0000000);
951 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
952 } else
e225f446 953 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
de8268c5
BS
954 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
955 evo_data(push, nv_crtc->fb.tile_flags);
956 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
957 evo_data(push, 0xc0000000);
958 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
959 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
960 evo_data(push, NvEvoVRAM);
961 } else {
962 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
963 evo_data(push, nv_crtc->fb.tile_flags);
964 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
965 evo_data(push, 0x83000000);
966 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
967 evo_data(push, 0x00000000);
968 evo_data(push, 0x00000000);
969 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
970 evo_data(push, NvEvoVRAM);
971 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
972 evo_data(push, 0xffffff00);
973 }
974
975 evo_kick(push, mast);
438d99e3
BS
976 }
977
e225f446
BS
978 nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
979 nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
438d99e3
BS
980}
981
982static bool
e225f446 983nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
438d99e3
BS
984 struct drm_display_mode *adjusted_mode)
985{
986 return true;
987}
988
989static int
e225f446 990nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
438d99e3
BS
991{
992 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
993 int ret;
994
995 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
996 if (ret)
997 return ret;
998
999 if (old_fb) {
1000 nvfb = nouveau_framebuffer(old_fb);
1001 nouveau_bo_unpin(nvfb->nvbo);
1002 }
1003
1004 return 0;
1005}
1006
1007static int
e225f446 1008nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
438d99e3
BS
1009 struct drm_display_mode *mode, int x, int y,
1010 struct drm_framebuffer *old_fb)
1011{
e225f446 1012 struct nv50_mast *mast = nv50_mast(crtc->dev);
438d99e3
BS
1013 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1014 struct nouveau_connector *nv_connector;
2d1d898b
BS
1015 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1016 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1017 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1018 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1019 u32 vblan2e = 0, vblan2s = 1;
3488c57b 1020 u32 *push;
438d99e3
BS
1021 int ret;
1022
2d1d898b
BS
1023 hactive = mode->htotal;
1024 hsynce = mode->hsync_end - mode->hsync_start - 1;
1025 hbackp = mode->htotal - mode->hsync_end;
1026 hblanke = hsynce + hbackp;
1027 hfrontp = mode->hsync_start - mode->hdisplay;
1028 hblanks = mode->htotal - hfrontp - 1;
1029
1030 vactive = mode->vtotal * vscan / ilace;
1031 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1032 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1033 vblanke = vsynce + vbackp;
1034 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1035 vblanks = vactive - vfrontp - 1;
1036 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1037 vblan2e = vactive + vsynce + vbackp;
1038 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1039 vactive = (vactive * 2) + 1;
2d1d898b
BS
1040 }
1041
e225f446 1042 ret = nv50_crtc_swap_fbs(crtc, old_fb);
438d99e3
BS
1043 if (ret)
1044 return ret;
1045
de8268c5 1046 push = evo_wait(mast, 64);
438d99e3 1047 if (push) {
e225f446 1048 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
de8268c5
BS
1049 evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1050 evo_data(push, 0x00800000 | mode->clock);
1051 evo_data(push, (ilace == 2) ? 2 : 0);
1052 evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1053 evo_data(push, 0x00000000);
1054 evo_data(push, (vactive << 16) | hactive);
1055 evo_data(push, ( vsynce << 16) | hsynce);
1056 evo_data(push, (vblanke << 16) | hblanke);
1057 evo_data(push, (vblanks << 16) | hblanks);
1058 evo_data(push, (vblan2e << 16) | vblan2s);
1059 evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1060 evo_data(push, 0x00000000);
1061 evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1062 evo_data(push, 0x00000311);
1063 evo_data(push, 0x00000100);
1064 } else {
1065 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1066 evo_data(push, 0x00000000);
1067 evo_data(push, (vactive << 16) | hactive);
1068 evo_data(push, ( vsynce << 16) | hsynce);
1069 evo_data(push, (vblanke << 16) | hblanke);
1070 evo_data(push, (vblanks << 16) | hblanks);
1071 evo_data(push, (vblan2e << 16) | vblan2s);
1072 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1073 evo_data(push, 0x00000000); /* ??? */
1074 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1075 evo_data(push, mode->clock * 1000);
1076 evo_data(push, 0x00200000); /* ??? */
1077 evo_data(push, mode->clock * 1000);
1078 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1079 evo_data(push, 0x00000311);
1080 evo_data(push, 0x00000100);
1081 }
1082
1083 evo_kick(push, mast);
438d99e3
BS
1084 }
1085
1086 nv_connector = nouveau_crtc_connector_get(nv_crtc);
e225f446
BS
1087 nv50_crtc_set_dither(nv_crtc, false);
1088 nv50_crtc_set_scale(nv_crtc, false);
1089 nv50_crtc_set_color_vibrance(nv_crtc, false);
1090 nv50_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
438d99e3
BS
1091 return 0;
1092}
1093
1094static int
e225f446 1095nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
438d99e3
BS
1096 struct drm_framebuffer *old_fb)
1097{
77145f1c 1098 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
438d99e3
BS
1099 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1100 int ret;
1101
84e2ad8b 1102 if (!crtc->fb) {
77145f1c 1103 NV_DEBUG(drm, "No FB bound\n");
84e2ad8b
BS
1104 return 0;
1105 }
1106
e225f446 1107 ret = nv50_crtc_swap_fbs(crtc, old_fb);
438d99e3
BS
1108 if (ret)
1109 return ret;
1110
e225f446
BS
1111 nv50_display_flip_stop(crtc);
1112 nv50_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
1113 nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
438d99e3
BS
1114 return 0;
1115}
1116
1117static int
e225f446 1118nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
438d99e3
BS
1119 struct drm_framebuffer *fb, int x, int y,
1120 enum mode_set_atomic state)
1121{
1122 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
e225f446
BS
1123 nv50_display_flip_stop(crtc);
1124 nv50_crtc_set_image(nv_crtc, fb, x, y, true);
438d99e3
BS
1125 return 0;
1126}
1127
1128static void
e225f446 1129nv50_crtc_lut_load(struct drm_crtc *crtc)
438d99e3 1130{
e225f446 1131 struct nv50_disp *disp = nv50_disp(crtc->dev);
438d99e3
BS
1132 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1133 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1134 int i;
1135
1136 for (i = 0; i < 256; i++) {
de8268c5
BS
1137 u16 r = nv_crtc->lut.r[i] >> 2;
1138 u16 g = nv_crtc->lut.g[i] >> 2;
1139 u16 b = nv_crtc->lut.b[i] >> 2;
1140
1141 if (nv_mclass(disp->core) < NVD0_DISP_CLASS) {
1142 writew(r + 0x0000, lut + (i * 0x08) + 0);
1143 writew(g + 0x0000, lut + (i * 0x08) + 2);
1144 writew(b + 0x0000, lut + (i * 0x08) + 4);
1145 } else {
1146 writew(r + 0x6000, lut + (i * 0x20) + 0);
1147 writew(g + 0x6000, lut + (i * 0x20) + 2);
1148 writew(b + 0x6000, lut + (i * 0x20) + 4);
1149 }
438d99e3
BS
1150 }
1151}
1152
1153static int
e225f446 1154nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
438d99e3
BS
1155 uint32_t handle, uint32_t width, uint32_t height)
1156{
1157 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1158 struct drm_device *dev = crtc->dev;
1159 struct drm_gem_object *gem;
1160 struct nouveau_bo *nvbo;
1161 bool visible = (handle != 0);
1162 int i, ret = 0;
1163
1164 if (visible) {
1165 if (width != 64 || height != 64)
1166 return -EINVAL;
1167
1168 gem = drm_gem_object_lookup(dev, file_priv, handle);
1169 if (unlikely(!gem))
1170 return -ENOENT;
1171 nvbo = nouveau_gem_object(gem);
1172
1173 ret = nouveau_bo_map(nvbo);
1174 if (ret == 0) {
1175 for (i = 0; i < 64 * 64; i++) {
1176 u32 v = nouveau_bo_rd32(nvbo, i);
1177 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1178 }
1179 nouveau_bo_unmap(nvbo);
1180 }
1181
1182 drm_gem_object_unreference_unlocked(gem);
1183 }
1184
1185 if (visible != nv_crtc->cursor.visible) {
e225f446 1186 nv50_crtc_cursor_show_hide(nv_crtc, visible, true);
438d99e3
BS
1187 nv_crtc->cursor.visible = visible;
1188 }
1189
1190 return ret;
1191}
1192
1193static int
e225f446 1194nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
438d99e3 1195{
e225f446
BS
1196 struct nv50_curs *curs = nv50_curs(crtc);
1197 struct nv50_chan *chan = nv50_chan(curs);
b5a794b0
BS
1198 nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
1199 nv_wo32(chan->user, 0x0080, 0x00000000);
438d99e3
BS
1200 return 0;
1201}
1202
1203static void
e225f446 1204nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
438d99e3
BS
1205 uint32_t start, uint32_t size)
1206{
1207 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1208 u32 end = max(start + size, (u32)256);
1209 u32 i;
1210
1211 for (i = start; i < end; i++) {
1212 nv_crtc->lut.r[i] = r[i];
1213 nv_crtc->lut.g[i] = g[i];
1214 nv_crtc->lut.b[i] = b[i];
1215 }
1216
e225f446 1217 nv50_crtc_lut_load(crtc);
438d99e3
BS
1218}
1219
1220static void
e225f446 1221nv50_crtc_destroy(struct drm_crtc *crtc)
438d99e3
BS
1222{
1223 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
e225f446
BS
1224 struct nv50_disp *disp = nv50_disp(crtc->dev);
1225 struct nv50_head *head = nv50_head(crtc);
1226 nv50_dmac_destroy(disp->core, &head->ovly.base);
1227 nv50_pioc_destroy(disp->core, &head->oimm.base);
1228 nv50_dmac_destroy(disp->core, &head->sync.base);
1229 nv50_pioc_destroy(disp->core, &head->curs.base);
438d99e3
BS
1230 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1231 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1232 nouveau_bo_unmap(nv_crtc->lut.nvbo);
1233 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1234 drm_crtc_cleanup(crtc);
1235 kfree(crtc);
1236}
1237
e225f446
BS
1238static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1239 .dpms = nv50_crtc_dpms,
1240 .prepare = nv50_crtc_prepare,
1241 .commit = nv50_crtc_commit,
1242 .mode_fixup = nv50_crtc_mode_fixup,
1243 .mode_set = nv50_crtc_mode_set,
1244 .mode_set_base = nv50_crtc_mode_set_base,
1245 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1246 .load_lut = nv50_crtc_lut_load,
438d99e3
BS
1247};
1248
e225f446
BS
1249static const struct drm_crtc_funcs nv50_crtc_func = {
1250 .cursor_set = nv50_crtc_cursor_set,
1251 .cursor_move = nv50_crtc_cursor_move,
1252 .gamma_set = nv50_crtc_gamma_set,
438d99e3 1253 .set_config = drm_crtc_helper_set_config,
e225f446 1254 .destroy = nv50_crtc_destroy,
3376ee37 1255 .page_flip = nouveau_crtc_page_flip,
438d99e3
BS
1256};
1257
c20ab3e1 1258static void
e225f446 1259nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
c20ab3e1
BS
1260{
1261}
1262
1263static void
e225f446 1264nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
c20ab3e1
BS
1265{
1266}
1267
438d99e3 1268static int
e225f446 1269nv50_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
438d99e3 1270{
e225f446
BS
1271 struct nv50_disp *disp = nv50_disp(dev);
1272 struct nv50_head *head;
438d99e3
BS
1273 struct drm_crtc *crtc;
1274 int ret, i;
1275
dd0e3d53
BS
1276 head = kzalloc(sizeof(*head), GFP_KERNEL);
1277 if (!head)
438d99e3
BS
1278 return -ENOMEM;
1279
dd0e3d53 1280 head->base.index = index;
e225f446
BS
1281 head->base.set_dither = nv50_crtc_set_dither;
1282 head->base.set_scale = nv50_crtc_set_scale;
1283 head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
f9887d09
BS
1284 head->base.color_vibrance = 50;
1285 head->base.vibrant_hue = 0;
e225f446
BS
1286 head->base.cursor.set_offset = nv50_cursor_set_offset;
1287 head->base.cursor.set_pos = nv50_cursor_set_pos;
438d99e3 1288 for (i = 0; i < 256; i++) {
dd0e3d53
BS
1289 head->base.lut.r[i] = i << 8;
1290 head->base.lut.g[i] = i << 8;
1291 head->base.lut.b[i] = i << 8;
438d99e3
BS
1292 }
1293
dd0e3d53 1294 crtc = &head->base.base;
e225f446
BS
1295 drm_crtc_init(dev, crtc, &nv50_crtc_func);
1296 drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
438d99e3
BS
1297 drm_mode_crtc_set_gamma_size(crtc, 256);
1298
b5a794b0
BS
1299 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1300 0, 0x0000, NULL, &head->base.lut.nvbo);
1301 if (!ret) {
1302 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1303 if (!ret)
1304 ret = nouveau_bo_map(head->base.lut.nvbo);
1305 if (ret)
1306 nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1307 }
1308
1309 if (ret)
1310 goto out;
1311
e225f446 1312 nv50_crtc_lut_load(crtc);
b5a794b0
BS
1313
1314 /* allocate cursor resources */
e225f446 1315 ret = nv50_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
b5a794b0
BS
1316 &(struct nv50_display_curs_class) {
1317 .head = index,
1318 }, sizeof(struct nv50_display_curs_class),
1319 &head->curs.base);
1320 if (ret)
1321 goto out;
1322
438d99e3 1323 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
dd0e3d53 1324 0, 0x0000, NULL, &head->base.cursor.nvbo);
438d99e3 1325 if (!ret) {
dd0e3d53 1326 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
438d99e3 1327 if (!ret)
dd0e3d53 1328 ret = nouveau_bo_map(head->base.cursor.nvbo);
438d99e3 1329 if (ret)
dd0e3d53 1330 nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
438d99e3
BS
1331 }
1332
1333 if (ret)
1334 goto out;
1335
b5a794b0 1336 /* allocate page flip / sync resources */
e225f446 1337 ret = nv50_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
b5a794b0
BS
1338 &(struct nv50_display_sync_class) {
1339 .pushbuf = EVO_PUSH_HANDLE(SYNC, index),
1340 .head = index,
1341 }, sizeof(struct nv50_display_sync_class),
1342 disp->sync->bo.offset, &head->sync.base);
1343 if (ret)
1344 goto out;
1345
1346 head->sync.sem.offset = EVO_SYNC(1 + index, 0x00);
438d99e3 1347
b5a794b0 1348 /* allocate overlay resources */
e225f446 1349 ret = nv50_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
b5a794b0
BS
1350 &(struct nv50_display_oimm_class) {
1351 .head = index,
1352 }, sizeof(struct nv50_display_oimm_class),
1353 &head->oimm.base);
438d99e3
BS
1354 if (ret)
1355 goto out;
1356
e225f446 1357 ret = nv50_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
b5a794b0
BS
1358 &(struct nv50_display_ovly_class) {
1359 .pushbuf = EVO_PUSH_HANDLE(OVLY, index),
1360 .head = index,
1361 }, sizeof(struct nv50_display_ovly_class),
1362 disp->sync->bo.offset, &head->ovly.base);
1363 if (ret)
1364 goto out;
438d99e3
BS
1365
1366out:
1367 if (ret)
e225f446 1368 nv50_crtc_destroy(crtc);
438d99e3
BS
1369 return ret;
1370}
1371
26f6d88b
BS
1372/******************************************************************************
1373 * DAC
1374 *****************************************************************************/
8eaa9669 1375static void
e225f446 1376nv50_dac_dpms(struct drm_encoder *encoder, int mode)
8eaa9669
BS
1377{
1378 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
e225f446 1379 struct nv50_disp *disp = nv50_disp(encoder->dev);
8eaa9669
BS
1380 int or = nv_encoder->or;
1381 u32 dpms_ctrl;
1382
35b21d39 1383 dpms_ctrl = 0x00000000;
8eaa9669
BS
1384 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1385 dpms_ctrl |= 0x00000001;
1386 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1387 dpms_ctrl |= 0x00000004;
1388
35b21d39 1389 nv_call(disp->core, NV50_DISP_DAC_PWR + or, dpms_ctrl);
8eaa9669
BS
1390}
1391
1392static bool
e225f446 1393nv50_dac_mode_fixup(struct drm_encoder *encoder,
e811f5ae 1394 const struct drm_display_mode *mode,
8eaa9669
BS
1395 struct drm_display_mode *adjusted_mode)
1396{
1397 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1398 struct nouveau_connector *nv_connector;
1399
1400 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1401 if (nv_connector && nv_connector->native_mode) {
1402 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1403 int id = adjusted_mode->base.id;
1404 *adjusted_mode = *nv_connector->native_mode;
1405 adjusted_mode->base.id = id;
1406 }
1407 }
1408
1409 return true;
1410}
1411
8eaa9669 1412static void
e225f446 1413nv50_dac_commit(struct drm_encoder *encoder)
8eaa9669
BS
1414{
1415}
1416
1417static void
e225f446 1418nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
8eaa9669
BS
1419 struct drm_display_mode *adjusted_mode)
1420{
e225f446 1421 struct nv50_mast *mast = nv50_mast(encoder->dev);
8eaa9669
BS
1422 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1423 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
97b19b5c 1424 u32 *push;
8eaa9669 1425
e225f446 1426 nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
8eaa9669 1427
97b19b5c 1428 push = evo_wait(mast, 8);
8eaa9669 1429 if (push) {
e225f446 1430 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
97b19b5c
BS
1431 u32 syncs = 0x00000000;
1432
1433 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1434 syncs |= 0x00000001;
1435 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1436 syncs |= 0x00000002;
1437
1438 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1439 evo_data(push, 1 << nv_crtc->index);
1440 evo_data(push, syncs);
1441 } else {
1442 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1443 u32 syncs = 0x00000001;
1444
1445 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1446 syncs |= 0x00000008;
1447 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1448 syncs |= 0x00000010;
1449
1450 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1451 magic |= 0x00000001;
1452
1453 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1454 evo_data(push, syncs);
1455 evo_data(push, magic);
1456 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1457 evo_data(push, 1 << nv_crtc->index);
1458 }
1459
1460 evo_kick(push, mast);
8eaa9669
BS
1461 }
1462
1463 nv_encoder->crtc = encoder->crtc;
1464}
1465
1466static void
e225f446 1467nv50_dac_disconnect(struct drm_encoder *encoder)
8eaa9669
BS
1468{
1469 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
e225f446 1470 struct nv50_mast *mast = nv50_mast(encoder->dev);
97b19b5c 1471 const int or = nv_encoder->or;
8eaa9669
BS
1472 u32 *push;
1473
1474 if (nv_encoder->crtc) {
e225f446 1475 nv50_crtc_prepare(nv_encoder->crtc);
8eaa9669 1476
97b19b5c 1477 push = evo_wait(mast, 4);
8eaa9669 1478 if (push) {
e225f446 1479 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
97b19b5c
BS
1480 evo_mthd(push, 0x0400 + (or * 0x080), 1);
1481 evo_data(push, 0x00000000);
1482 } else {
1483 evo_mthd(push, 0x0180 + (or * 0x020), 1);
1484 evo_data(push, 0x00000000);
1485 }
1486
8eaa9669
BS
1487 evo_mthd(push, 0x0080, 1);
1488 evo_data(push, 0x00000000);
97b19b5c 1489 evo_kick(push, mast);
8eaa9669 1490 }
8eaa9669 1491 }
97b19b5c
BS
1492
1493 nv_encoder->crtc = NULL;
8eaa9669
BS
1494}
1495
b6d8e7ec 1496static enum drm_connector_status
e225f446 1497nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
b6d8e7ec 1498{
e225f446 1499 struct nv50_disp *disp = nv50_disp(encoder->dev);
35b21d39 1500 int ret, or = nouveau_encoder(encoder)->or;
7ebb38b5 1501 u32 load = 0;
b681993f 1502
35b21d39
BS
1503 ret = nv_exec(disp->core, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
1504 if (ret || load != 7)
1505 return connector_status_disconnected;
b681993f 1506
35b21d39 1507 return connector_status_connected;
b6d8e7ec
BS
1508}
1509
8eaa9669 1510static void
e225f446 1511nv50_dac_destroy(struct drm_encoder *encoder)
8eaa9669
BS
1512{
1513 drm_encoder_cleanup(encoder);
1514 kfree(encoder);
1515}
1516
e225f446
BS
1517static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1518 .dpms = nv50_dac_dpms,
1519 .mode_fixup = nv50_dac_mode_fixup,
1520 .prepare = nv50_dac_disconnect,
1521 .commit = nv50_dac_commit,
1522 .mode_set = nv50_dac_mode_set,
1523 .disable = nv50_dac_disconnect,
1524 .get_crtc = nv50_display_crtc_get,
1525 .detect = nv50_dac_detect
8eaa9669
BS
1526};
1527
e225f446
BS
1528static const struct drm_encoder_funcs nv50_dac_func = {
1529 .destroy = nv50_dac_destroy,
8eaa9669
BS
1530};
1531
1532static int
e225f446 1533nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
8eaa9669
BS
1534{
1535 struct drm_device *dev = connector->dev;
1536 struct nouveau_encoder *nv_encoder;
1537 struct drm_encoder *encoder;
1538
1539 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1540 if (!nv_encoder)
1541 return -ENOMEM;
1542 nv_encoder->dcb = dcbe;
1543 nv_encoder->or = ffs(dcbe->or) - 1;
1544
1545 encoder = to_drm_encoder(nv_encoder);
1546 encoder->possible_crtcs = dcbe->heads;
1547 encoder->possible_clones = 0;
e225f446
BS
1548 drm_encoder_init(dev, encoder, &nv50_dac_func, DRM_MODE_ENCODER_DAC);
1549 drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
8eaa9669
BS
1550
1551 drm_mode_connector_attach_encoder(connector, encoder);
1552 return 0;
1553}
26f6d88b 1554
78951d22
BS
1555/******************************************************************************
1556 * Audio
1557 *****************************************************************************/
1558static void
e225f446 1559nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
78951d22
BS
1560{
1561 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1562 struct nouveau_connector *nv_connector;
e225f446 1563 struct nv50_disp *disp = nv50_disp(encoder->dev);
78951d22
BS
1564
1565 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1566 if (!drm_detect_monitor_audio(nv_connector->edid))
1567 return;
1568
78951d22 1569 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
78951d22 1570
0a9e2b95
BS
1571 nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or,
1572 nv_connector->base.eld,
1573 nv_connector->base.eld[2] * 4);
78951d22
BS
1574}
1575
1576static void
e225f446 1577nv50_audio_disconnect(struct drm_encoder *encoder)
78951d22
BS
1578{
1579 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
e225f446 1580 struct nv50_disp *disp = nv50_disp(encoder->dev);
78951d22 1581
0a9e2b95 1582 nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0);
78951d22
BS
1583}
1584
1585/******************************************************************************
1586 * HDMI
1587 *****************************************************************************/
1588static void
e225f446 1589nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
78951d22 1590{
64d9cc04
BS
1591 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1592 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1593 struct nouveau_connector *nv_connector;
e225f446 1594 struct nv50_disp *disp = nv50_disp(encoder->dev);
1c30cd09 1595 const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
64d9cc04
BS
1596 u32 rekey = 56; /* binary driver, and tegra constant */
1597 u32 max_ac_packet;
1598
1599 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1600 if (!drm_detect_hdmi_monitor(nv_connector->edid))
1601 return;
1602
1603 max_ac_packet = mode->htotal - mode->hdisplay;
1604 max_ac_packet -= rekey;
1605 max_ac_packet -= 18; /* constant from tegra */
1606 max_ac_packet /= 32;
1607
1c30cd09
BS
1608 nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff,
1609 NV84_DISP_SOR_HDMI_PWR_STATE_ON |
1610 (max_ac_packet << 16) | rekey);
091e40cd 1611
e225f446 1612 nv50_audio_mode_set(encoder, mode);
78951d22
BS
1613}
1614
1615static void
e225f446 1616nv50_hdmi_disconnect(struct drm_encoder *encoder)
78951d22 1617{
64d9cc04
BS
1618 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1619 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
e225f446 1620 struct nv50_disp *disp = nv50_disp(encoder->dev);
1c30cd09 1621 const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
64d9cc04 1622
e225f446 1623 nv50_audio_disconnect(encoder);
64d9cc04 1624
1c30cd09 1625 nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff, 0x00000000);
78951d22
BS
1626}
1627
26f6d88b
BS
1628/******************************************************************************
1629 * SOR
1630 *****************************************************************************/
83fc083c 1631static void
e225f446 1632nv50_sor_dpms(struct drm_encoder *encoder, int mode)
83fc083c
BS
1633{
1634 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1635 struct drm_device *dev = encoder->dev;
e225f446 1636 struct nv50_disp *disp = nv50_disp(dev);
83fc083c
BS
1637 struct drm_encoder *partner;
1638 int or = nv_encoder->or;
83fc083c
BS
1639
1640 nv_encoder->last_dpms = mode;
1641
1642 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1643 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1644
1645 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1646 continue;
1647
1648 if (nv_partner != nv_encoder &&
26cfa813 1649 nv_partner->dcb->or == nv_encoder->dcb->or) {
83fc083c
BS
1650 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1651 return;
1652 break;
1653 }
1654 }
1655
74b66850 1656 nv_call(disp->core, NV50_DISP_SOR_PWR + or, (mode == DRM_MODE_DPMS_ON));
6e83fda2 1657
6c8e4633
BS
1658 if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1659 nouveau_dp_dpms(encoder, mode, nv_encoder->dp.datarate, disp->core);
83fc083c
BS
1660}
1661
1662static bool
e225f446 1663nv50_sor_mode_fixup(struct drm_encoder *encoder,
e811f5ae 1664 const struct drm_display_mode *mode,
83fc083c
BS
1665 struct drm_display_mode *adjusted_mode)
1666{
1667 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1668 struct nouveau_connector *nv_connector;
1669
1670 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1671 if (nv_connector && nv_connector->native_mode) {
1672 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1673 int id = adjusted_mode->base.id;
1674 *adjusted_mode = *nv_connector->native_mode;
1675 adjusted_mode->base.id = id;
1676 }
1677 }
1678
1679 return true;
1680}
1681
4cbb0f8d 1682static void
e225f446 1683nv50_sor_disconnect(struct drm_encoder *encoder)
4cbb0f8d
BS
1684{
1685 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
e225f446 1686 struct nv50_mast *mast = nv50_mast(encoder->dev);
419e8dc0 1687 const int or = nv_encoder->or;
4cbb0f8d
BS
1688 u32 *push;
1689
1690 if (nv_encoder->crtc) {
e225f446 1691 nv50_crtc_prepare(nv_encoder->crtc);
4cbb0f8d 1692
419e8dc0 1693 push = evo_wait(mast, 4);
4cbb0f8d 1694 if (push) {
e225f446 1695 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
419e8dc0
BS
1696 evo_mthd(push, 0x0600 + (or * 0x40), 1);
1697 evo_data(push, 0x00000000);
1698 } else {
1699 evo_mthd(push, 0x0200 + (or * 0x20), 1);
1700 evo_data(push, 0x00000000);
1701 }
1702
4cbb0f8d
BS
1703 evo_mthd(push, 0x0080, 1);
1704 evo_data(push, 0x00000000);
419e8dc0 1705 evo_kick(push, mast);
4cbb0f8d
BS
1706 }
1707
e225f446 1708 nv50_hdmi_disconnect(encoder);
4cbb0f8d 1709 }
419e8dc0
BS
1710
1711 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1712 nv_encoder->crtc = NULL;
4cbb0f8d
BS
1713}
1714
83fc083c 1715static void
e225f446 1716nv50_sor_prepare(struct drm_encoder *encoder)
83fc083c 1717{
e225f446 1718 nv50_sor_disconnect(encoder);
cb75d97e 1719 if (nouveau_encoder(encoder)->dcb->type == DCB_OUTPUT_DP)
b5a794b0 1720 evo_sync(encoder->dev);
83fc083c
BS
1721}
1722
1723static void
e225f446 1724nv50_sor_commit(struct drm_encoder *encoder)
83fc083c
BS
1725{
1726}
1727
1728static void
e225f446 1729nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
3b6d83d1 1730 struct drm_display_mode *mode)
83fc083c 1731{
e225f446
BS
1732 struct nv50_disp *disp = nv50_disp(encoder->dev);
1733 struct nv50_mast *mast = nv50_mast(encoder->dev);
78951d22 1734 struct drm_device *dev = encoder->dev;
77145f1c 1735 struct nouveau_drm *drm = nouveau_drm(dev);
83fc083c
BS
1736 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1737 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3b6d83d1 1738 struct nouveau_connector *nv_connector;
77145f1c 1739 struct nvbios *bios = &drm->vbios;
419e8dc0
BS
1740 u32 *push, lvds = 0;
1741 u8 owner = 1 << nv_crtc->index;
1742 u8 proto = 0xf;
1743 u8 depth = 0x0;
83fc083c 1744
3b6d83d1
BS
1745 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1746 switch (nv_encoder->dcb->type) {
cb75d97e 1747 case DCB_OUTPUT_TMDS:
3b6d83d1
BS
1748 if (nv_encoder->dcb->sorconf.link & 1) {
1749 if (mode->clock < 165000)
419e8dc0 1750 proto = 0x1;
3b6d83d1 1751 else
419e8dc0 1752 proto = 0x5;
3b6d83d1 1753 } else {
419e8dc0 1754 proto = 0x2;
3b6d83d1
BS
1755 }
1756
e225f446 1757 nv50_hdmi_mode_set(encoder, mode);
3b6d83d1 1758 break;
cb75d97e 1759 case DCB_OUTPUT_LVDS:
419e8dc0
BS
1760 proto = 0x0;
1761
3b6d83d1
BS
1762 if (bios->fp_no_ddc) {
1763 if (bios->fp.dual_link)
419e8dc0 1764 lvds |= 0x0100;
3b6d83d1 1765 if (bios->fp.if_is_24bit)
419e8dc0 1766 lvds |= 0x0200;
3b6d83d1 1767 } else {
befb51e9 1768 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
3b6d83d1 1769 if (((u8 *)nv_connector->edid)[121] == 2)
419e8dc0 1770 lvds |= 0x0100;
3b6d83d1
BS
1771 } else
1772 if (mode->clock >= bios->fp.duallink_transition_clk) {
419e8dc0 1773 lvds |= 0x0100;
3b6d83d1 1774 }
83fc083c 1775
419e8dc0 1776 if (lvds & 0x0100) {
3b6d83d1 1777 if (bios->fp.strapless_is_24bit & 2)
419e8dc0 1778 lvds |= 0x0200;
3b6d83d1
BS
1779 } else {
1780 if (bios->fp.strapless_is_24bit & 1)
419e8dc0 1781 lvds |= 0x0200;
3b6d83d1
BS
1782 }
1783
1784 if (nv_connector->base.display_info.bpc == 8)
419e8dc0 1785 lvds |= 0x0200;
3b6d83d1 1786 }
4a230fa6 1787
419e8dc0 1788 nv_call(disp->core, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, lvds);
3b6d83d1 1789 break;
cb75d97e 1790 case DCB_OUTPUT_DP:
3488c57b 1791 if (nv_connector->base.display_info.bpc == 6) {
6e83fda2 1792 nv_encoder->dp.datarate = mode->clock * 18 / 8;
419e8dc0 1793 depth = 0x2;
bf2c886a
BS
1794 } else
1795 if (nv_connector->base.display_info.bpc == 8) {
6e83fda2 1796 nv_encoder->dp.datarate = mode->clock * 24 / 8;
419e8dc0 1797 depth = 0x5;
bf2c886a
BS
1798 } else {
1799 nv_encoder->dp.datarate = mode->clock * 30 / 8;
1800 depth = 0x6;
3488c57b 1801 }
6e83fda2
BS
1802
1803 if (nv_encoder->dcb->sorconf.link & 1)
419e8dc0 1804 proto = 0x8;
6e83fda2 1805 else
419e8dc0 1806 proto = 0x9;
6e83fda2 1807 break;
3b6d83d1
BS
1808 default:
1809 BUG_ON(1);
1810 break;
1811 }
ff8ff503 1812
e225f446 1813 nv50_sor_dpms(encoder, DRM_MODE_DPMS_ON);
83fc083c 1814
e225f446 1815 push = evo_wait(nv50_mast(dev), 8);
83fc083c 1816 if (push) {
e225f446 1817 if (nv50_vers(mast) < NVD0_DISP_CLASS) {
419e8dc0
BS
1818 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x040), 1);
1819 evo_data(push, (depth << 16) | (proto << 8) | owner);
1820 } else {
1821 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1822 u32 syncs = 0x00000001;
1823
1824 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1825 syncs |= 0x00000008;
1826 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1827 syncs |= 0x00000010;
1828
1829 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1830 magic |= 0x00000001;
1831
1832 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1833 evo_data(push, syncs | (depth << 6));
1834 evo_data(push, magic);
1835 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 1);
1836 evo_data(push, owner | (proto << 8));
1837 }
1838
1839 evo_kick(push, mast);
83fc083c
BS
1840 }
1841
1842 nv_encoder->crtc = encoder->crtc;
1843}
1844
83fc083c 1845static void
e225f446 1846nv50_sor_destroy(struct drm_encoder *encoder)
83fc083c
BS
1847{
1848 drm_encoder_cleanup(encoder);
1849 kfree(encoder);
1850}
1851
e225f446
BS
1852static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
1853 .dpms = nv50_sor_dpms,
1854 .mode_fixup = nv50_sor_mode_fixup,
1855 .prepare = nv50_sor_prepare,
1856 .commit = nv50_sor_commit,
1857 .mode_set = nv50_sor_mode_set,
1858 .disable = nv50_sor_disconnect,
1859 .get_crtc = nv50_display_crtc_get,
83fc083c
BS
1860};
1861
e225f446
BS
1862static const struct drm_encoder_funcs nv50_sor_func = {
1863 .destroy = nv50_sor_destroy,
83fc083c
BS
1864};
1865
1866static int
e225f446 1867nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
83fc083c
BS
1868{
1869 struct drm_device *dev = connector->dev;
1870 struct nouveau_encoder *nv_encoder;
1871 struct drm_encoder *encoder;
1872
1873 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1874 if (!nv_encoder)
1875 return -ENOMEM;
1876 nv_encoder->dcb = dcbe;
1877 nv_encoder->or = ffs(dcbe->or) - 1;
1878 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1879
1880 encoder = to_drm_encoder(nv_encoder);
1881 encoder->possible_crtcs = dcbe->heads;
1882 encoder->possible_clones = 0;
e225f446
BS
1883 drm_encoder_init(dev, encoder, &nv50_sor_func, DRM_MODE_ENCODER_TMDS);
1884 drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
83fc083c
BS
1885
1886 drm_mode_connector_attach_encoder(connector, encoder);
1887 return 0;
1888}
26f6d88b 1889
26f6d88b
BS
1890/******************************************************************************
1891 * Init
1892 *****************************************************************************/
2a44e499 1893void
e225f446 1894nv50_display_fini(struct drm_device *dev)
26f6d88b 1895{
26f6d88b
BS
1896}
1897
1898int
e225f446 1899nv50_display_init(struct drm_device *dev)
26f6d88b 1900{
e225f446 1901 u32 *push = evo_wait(nv50_mast(dev), 32);
b5a794b0
BS
1902 if (push) {
1903 evo_mthd(push, 0x0088, 1);
1904 evo_data(push, NvEvoSync);
e225f446 1905 evo_kick(push, nv50_mast(dev));
647bf61d 1906 return evo_sync(dev);
bdb8c212 1907 }
efd272a7 1908
b5a794b0 1909 return -EBUSY;
26f6d88b
BS
1910}
1911
1912void
e225f446 1913nv50_display_destroy(struct drm_device *dev)
26f6d88b 1914{
e225f446 1915 struct nv50_disp *disp = nv50_disp(dev);
bdb8c212 1916
e225f446 1917 nv50_dmac_destroy(disp->core, &disp->mast.base);
26f6d88b 1918
816af2f2
BS
1919 nouveau_bo_unmap(disp->sync);
1920 nouveau_bo_ref(NULL, &disp->sync);
51beb428 1921
77145f1c 1922 nouveau_display(dev)->priv = NULL;
26f6d88b
BS
1923 kfree(disp);
1924}
1925
1926int
e225f446 1927nv50_display_create(struct drm_device *dev)
26f6d88b 1928{
b5a794b0
BS
1929 static const u16 oclass[] = {
1930 NVE0_DISP_CLASS,
1931 NVD0_DISP_CLASS,
63718a07
BS
1932 NVA3_DISP_CLASS,
1933 NV94_DISP_CLASS,
1934 NVA0_DISP_CLASS,
1935 NV84_DISP_CLASS,
1936 NV50_DISP_CLASS,
b5a794b0 1937 };
77145f1c
BS
1938 struct nouveau_device *device = nouveau_dev(dev);
1939 struct nouveau_drm *drm = nouveau_drm(dev);
77145f1c 1940 struct dcb_table *dcb = &drm->vbios.dcb;
83fc083c 1941 struct drm_connector *connector, *tmp;
e225f446 1942 struct nv50_disp *disp;
cb75d97e 1943 struct dcb_output *dcbe;
7c5f6a87 1944 int crtcs, ret, i;
26f6d88b
BS
1945
1946 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1947 if (!disp)
1948 return -ENOMEM;
77145f1c
BS
1949
1950 nouveau_display(dev)->priv = disp;
e225f446
BS
1951 nouveau_display(dev)->dtor = nv50_display_destroy;
1952 nouveau_display(dev)->init = nv50_display_init;
1953 nouveau_display(dev)->fini = nv50_display_fini;
26f6d88b 1954
b5a794b0
BS
1955 /* small shared memory area we use for notifiers and semaphores */
1956 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
1957 0, 0x0000, NULL, &disp->sync);
1958 if (!ret) {
1959 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
1960 if (!ret)
1961 ret = nouveau_bo_map(disp->sync);
1962 if (ret)
1963 nouveau_bo_ref(NULL, &disp->sync);
1964 }
1965
1966 if (ret)
1967 goto out;
1968
1969 /* attempt to allocate a supported evo display class */
1970 ret = -ENODEV;
1971 for (i = 0; ret && i < ARRAY_SIZE(oclass); i++) {
1972 ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
1973 0xd1500000, oclass[i], NULL, 0,
1974 &disp->core);
1975 }
1976
1977 if (ret)
1978 goto out;
1979
1980 /* allocate master evo channel */
e225f446 1981 ret = nv50_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
b5a794b0
BS
1982 &(struct nv50_display_mast_class) {
1983 .pushbuf = EVO_PUSH_HANDLE(MAST, 0),
1984 }, sizeof(struct nv50_display_mast_class),
1985 disp->sync->bo.offset, &disp->mast.base);
1986 if (ret)
1987 goto out;
1988
438d99e3 1989 /* create crtc objects to represent the hw heads */
63718a07
BS
1990 if (nv_mclass(disp->core) >= NVD0_DISP_CLASS)
1991 crtcs = nv_rd32(device, 0x022448);
1992 else
1993 crtcs = 2;
1994
7c5f6a87 1995 for (i = 0; i < crtcs; i++) {
e225f446 1996 ret = nv50_crtc_create(dev, disp->core, i);
438d99e3
BS
1997 if (ret)
1998 goto out;
1999 }
2000
83fc083c
BS
2001 /* create encoder/connector objects based on VBIOS DCB table */
2002 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2003 connector = nouveau_connector_create(dev, dcbe->connector);
2004 if (IS_ERR(connector))
2005 continue;
2006
2007 if (dcbe->location != DCB_LOC_ON_CHIP) {
77145f1c 2008 NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
83fc083c
BS
2009 dcbe->type, ffs(dcbe->or) - 1);
2010 continue;
2011 }
2012
2013 switch (dcbe->type) {
cb75d97e
BS
2014 case DCB_OUTPUT_TMDS:
2015 case DCB_OUTPUT_LVDS:
2016 case DCB_OUTPUT_DP:
e225f446 2017 nv50_sor_create(connector, dcbe);
83fc083c 2018 break;
cb75d97e 2019 case DCB_OUTPUT_ANALOG:
e225f446 2020 nv50_dac_create(connector, dcbe);
8eaa9669 2021 break;
83fc083c 2022 default:
77145f1c 2023 NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
83fc083c
BS
2024 dcbe->type, ffs(dcbe->or) - 1);
2025 continue;
2026 }
2027 }
2028
2029 /* cull any connectors we created that don't have an encoder */
2030 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2031 if (connector->encoder_ids[0])
2032 continue;
2033
77145f1c 2034 NV_WARN(drm, "%s has no encoders, removing\n",
83fc083c
BS
2035 drm_get_connector_name(connector));
2036 connector->funcs->destroy(connector);
2037 }
2038
26f6d88b
BS
2039out:
2040 if (ret)
e225f446 2041 nv50_display_destroy(dev);
26f6d88b
BS
2042 return ret;
2043}