drm/nvd0/disp: implement sor support for older display classes
[linux-block.git] / drivers / gpu / drm / nouveau / nvd0_display.c
CommitLineData
26f6d88b
BS
1/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
51beb428 25#include <linux/dma-mapping.h>
83fc083c 26
760285e7
DH
27#include <drm/drmP.h>
28#include <drm/drm_crtc_helper.h>
26f6d88b 29
77145f1c
BS
30#include "nouveau_drm.h"
31#include "nouveau_dma.h"
32#include "nouveau_gem.h"
26f6d88b
BS
33#include "nouveau_connector.h"
34#include "nouveau_encoder.h"
35#include "nouveau_crtc.h"
f589be88 36#include "nouveau_fence.h"
3a89cd02 37#include "nv50_display.h"
26f6d88b 38
b5a794b0 39#include <core/client.h>
77145f1c 40#include <core/gpuobj.h>
b5a794b0 41#include <core/class.h>
77145f1c
BS
42
43#include <subdev/timer.h>
44#include <subdev/bar.h>
45#include <subdev/fb.h>
46
8a46438a
BS
47#define EVO_DMA_NR 9
48
bdb8c212 49#define EVO_MASTER (0x00)
a63a97eb 50#define EVO_FLIP(c) (0x01 + (c))
8a46438a
BS
51#define EVO_OVLY(c) (0x05 + (c))
52#define EVO_OIMM(c) (0x09 + (c))
bdb8c212
BS
53#define EVO_CURS(c) (0x0d + (c))
54
816af2f2
BS
55/* offsets in shared sync bo of various structures */
56#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
57#define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
58#define EVO_FLIP_SEM0(c) EVO_SYNC((c), 0x00)
59#define EVO_FLIP_SEM1(c) EVO_SYNC((c), 0x10)
60
b5a794b0
BS
61#define EVO_CORE_HANDLE (0xd1500000)
62#define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
63#define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
64#define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) | \
65 (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
66
67/******************************************************************************
68 * EVO channel
69 *****************************************************************************/
70
71struct nvd0_chan {
72 struct nouveau_object *user;
73 u32 handle;
74};
75
76static int
77nvd0_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
78 void *data, u32 size, struct nvd0_chan *chan)
79{
80 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
81 const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
82 const u32 handle = EVO_CHAN_HANDLE(bclass, head);
83 int ret;
84
85 ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
86 oclass, data, size, &chan->user);
87 if (ret)
88 return ret;
89
90 chan->handle = handle;
91 return 0;
92}
93
94static void
95nvd0_chan_destroy(struct nouveau_object *core, struct nvd0_chan *chan)
96{
97 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
98 if (chan->handle)
99 nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
100}
101
102/******************************************************************************
103 * PIO EVO channel
104 *****************************************************************************/
105
106struct nvd0_pioc {
107 struct nvd0_chan base;
108};
109
110static void
111nvd0_pioc_destroy(struct nouveau_object *core, struct nvd0_pioc *pioc)
112{
113 nvd0_chan_destroy(core, &pioc->base);
114}
115
116static int
117nvd0_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
118 void *data, u32 size, struct nvd0_pioc *pioc)
119{
120 return nvd0_chan_create(core, bclass, head, data, size, &pioc->base);
121}
122
123/******************************************************************************
124 * DMA EVO channel
125 *****************************************************************************/
126
127struct nvd0_dmac {
128 struct nvd0_chan base;
3376ee37
BS
129 dma_addr_t handle;
130 u32 *ptr;
b5a794b0
BS
131};
132
133static void
134nvd0_dmac_destroy(struct nouveau_object *core, struct nvd0_dmac *dmac)
135{
136 if (dmac->ptr) {
137 struct pci_dev *pdev = nv_device(core)->pdev;
138 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
139 }
140
141 nvd0_chan_destroy(core, &dmac->base);
142}
143
144static int
145nvd0_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
146 void *data, u32 size, u64 syncbuf,
147 struct nvd0_dmac *dmac)
148{
149 struct nouveau_fb *pfb = nouveau_fb(core);
150 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
151 struct nouveau_object *object;
152 u32 pushbuf = *(u32 *)data;
153 dma_addr_t handle;
154 void *ptr;
155 int ret;
156
157 ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE, &handle);
158 if (!ptr)
159 return -ENOMEM;
160
161 ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
162 NV_DMA_FROM_MEMORY_CLASS,
163 &(struct nv_dma_class) {
164 .flags = NV_DMA_TARGET_PCI_US |
165 NV_DMA_ACCESS_RD,
166 .start = handle + 0x0000,
167 .limit = handle + 0x0fff,
168 }, sizeof(struct nv_dma_class), &object);
169 if (ret)
170 return ret;
171
172 ret = nvd0_chan_create(core, bclass, head, data, size, &dmac->base);
173 if (ret)
174 return ret;
175
176 dmac->handle = handle;
177 dmac->ptr = ptr;
178
179 ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
180 NV_DMA_IN_MEMORY_CLASS,
181 &(struct nv_dma_class) {
182 .flags = NV_DMA_TARGET_VRAM |
183 NV_DMA_ACCESS_RDWR,
184 .start = syncbuf + 0x0000,
185 .limit = syncbuf + 0x0fff,
186 }, sizeof(struct nv_dma_class), &object);
187 if (ret)
188 goto out;
189
190 ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
191 NV_DMA_IN_MEMORY_CLASS,
192 &(struct nv_dma_class) {
193 .flags = NV_DMA_TARGET_VRAM |
194 NV_DMA_ACCESS_RDWR,
195 .start = 0,
196 .limit = pfb->ram.size - 1,
197 }, sizeof(struct nv_dma_class), &object);
198 if (ret)
199 goto out;
200
201 ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM_LP,
202 NV_DMA_IN_MEMORY_CLASS,
203 &(struct nv_dma_class) {
204 .flags = NV_DMA_TARGET_VRAM |
205 NV_DMA_ACCESS_RDWR,
206 .start = 0,
207 .limit = pfb->ram.size - 1,
208 .conf0 = NVD0_DMA_CONF0_ENABLE |
209 NVD0_DMA_CONF0_PAGE_LP,
210 }, sizeof(struct nv_dma_class), &object);
211 if (ret)
212 goto out;
213
214 ret = nouveau_object_new(client, dmac->base.handle, NvEvoFB32,
215 NV_DMA_IN_MEMORY_CLASS,
216 &(struct nv_dma_class) {
217 .flags = NV_DMA_TARGET_VRAM |
218 NV_DMA_ACCESS_RDWR,
219 .start = 0,
220 .limit = pfb->ram.size - 1,
221 .conf0 = 0x00fe |
222 NVD0_DMA_CONF0_ENABLE |
223 NVD0_DMA_CONF0_PAGE_LP,
224 }, sizeof(struct nv_dma_class), &object);
225out:
226 if (ret)
227 nvd0_dmac_destroy(core, dmac);
228 return ret;
229}
230
231struct nvd0_mast {
232 struct nvd0_dmac base;
233};
234
235struct nvd0_curs {
236 struct nvd0_pioc base;
237};
238
239struct nvd0_sync {
240 struct nvd0_dmac base;
3376ee37 241 struct {
3376ee37
BS
242 u32 offset;
243 u16 value;
244 } sem;
245};
246
b5a794b0
BS
247struct nvd0_ovly {
248 struct nvd0_dmac base;
249};
f20ce962 250
b5a794b0
BS
251struct nvd0_oimm {
252 struct nvd0_pioc base;
26f6d88b
BS
253};
254
dd0e3d53
BS
255struct nvd0_head {
256 struct nouveau_crtc base;
b5a794b0
BS
257 struct nvd0_curs curs;
258 struct nvd0_sync sync;
259 struct nvd0_ovly ovly;
260 struct nvd0_oimm oimm;
261};
262
263#define nvd0_head(c) ((struct nvd0_head *)nouveau_crtc(c))
264#define nvd0_curs(c) (&nvd0_head(c)->curs)
265#define nvd0_sync(c) (&nvd0_head(c)->sync)
266#define nvd0_ovly(c) (&nvd0_head(c)->ovly)
267#define nvd0_oimm(c) (&nvd0_head(c)->oimm)
268#define nvd0_chan(c) (&(c)->base.base)
de8268c5 269#define nvd0_vers(c) nv_mclass(nvd0_chan(c)->user)
b5a794b0
BS
270
271struct nvd0_disp {
272 struct nouveau_object *core;
273 struct nvd0_mast mast;
274
b5a794b0
BS
275 u32 modeset;
276
277 struct nouveau_bo *sync;
dd0e3d53
BS
278};
279
94e5c39b
BS
280static struct nvd0_disp *
281nvd0_disp(struct drm_device *dev)
26f6d88b 282{
77145f1c 283 return nouveau_display(dev)->priv;
26f6d88b
BS
284}
285
b5a794b0
BS
286#define nvd0_mast(d) (&nvd0_disp(d)->mast)
287
bdb8c212
BS
288static struct drm_crtc *
289nvd0_display_crtc_get(struct drm_encoder *encoder)
290{
291 return nouveau_encoder(encoder)->crtc;
292}
293
294/******************************************************************************
295 * EVO channel helpers
296 *****************************************************************************/
51beb428 297static u32 *
b5a794b0 298evo_wait(void *evoc, int nr)
51beb428 299{
b5a794b0
BS
300 struct nvd0_dmac *dmac = evoc;
301 u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
51beb428 302
de8268c5 303 if (put + nr >= (PAGE_SIZE / 4) - 8) {
b5a794b0 304 dmac->ptr[put] = 0x20000000;
51beb428 305
b5a794b0
BS
306 nv_wo32(dmac->base.user, 0x0000, 0x00000000);
307 if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
308 NV_ERROR(dmac->base.user, "channel stalled\n");
51beb428
BS
309 return NULL;
310 }
311
312 put = 0;
313 }
314
b5a794b0 315 return dmac->ptr + put;
51beb428
BS
316}
317
318static void
b5a794b0 319evo_kick(u32 *push, void *evoc)
51beb428 320{
b5a794b0
BS
321 struct nvd0_dmac *dmac = evoc;
322 nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
51beb428
BS
323}
324
325#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
326#define evo_data(p,d) *((p)++) = (d)
327
3376ee37
BS
328static bool
329evo_sync_wait(void *data)
330{
816af2f2 331 return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
3376ee37
BS
332}
333
334static int
b5a794b0 335evo_sync(struct drm_device *dev)
3376ee37 336{
77145f1c 337 struct nouveau_device *device = nouveau_dev(dev);
94e5c39b 338 struct nvd0_disp *disp = nvd0_disp(dev);
b5a794b0
BS
339 struct nvd0_mast *mast = nvd0_mast(dev);
340 u32 *push = evo_wait(mast, 8);
3376ee37 341 if (push) {
816af2f2 342 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
3376ee37 343 evo_mthd(push, 0x0084, 1);
816af2f2 344 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
3376ee37
BS
345 evo_mthd(push, 0x0080, 2);
346 evo_data(push, 0x00000000);
347 evo_data(push, 0x00000000);
b5a794b0 348 evo_kick(push, mast);
77145f1c 349 if (nv_wait_cb(device, evo_sync_wait, disp->sync))
3376ee37
BS
350 return 0;
351 }
352
353 return -EBUSY;
354}
355
356/******************************************************************************
a63a97eb 357 * Page flipping channel
3376ee37
BS
358 *****************************************************************************/
359struct nouveau_bo *
360nvd0_display_crtc_sema(struct drm_device *dev, int crtc)
361{
94e5c39b 362 return nvd0_disp(dev)->sync;
3376ee37
BS
363}
364
365void
366nvd0_display_flip_stop(struct drm_crtc *crtc)
367{
b5a794b0 368 struct nvd0_sync *sync = nvd0_sync(crtc);
3376ee37
BS
369 u32 *push;
370
b5a794b0 371 push = evo_wait(sync, 8);
3376ee37
BS
372 if (push) {
373 evo_mthd(push, 0x0084, 1);
374 evo_data(push, 0x00000000);
375 evo_mthd(push, 0x0094, 1);
376 evo_data(push, 0x00000000);
377 evo_mthd(push, 0x00c0, 1);
378 evo_data(push, 0x00000000);
379 evo_mthd(push, 0x0080, 1);
380 evo_data(push, 0x00000000);
b5a794b0 381 evo_kick(push, sync);
3376ee37
BS
382 }
383}
384
385int
386nvd0_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
387 struct nouveau_channel *chan, u32 swap_interval)
388{
389 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
94e5c39b 390 struct nvd0_disp *disp = nvd0_disp(crtc->dev);
3376ee37 391 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
b5a794b0 392 struct nvd0_sync *sync = nvd0_sync(crtc);
3376ee37
BS
393 u64 offset;
394 u32 *push;
395 int ret;
396
397 swap_interval <<= 4;
398 if (swap_interval == 0)
399 swap_interval |= 0x100;
400
b5a794b0 401 push = evo_wait(sync, 128);
3376ee37
BS
402 if (unlikely(push == NULL))
403 return -EBUSY;
404
405 /* synchronise with the rendering channel, if necessary */
406 if (likely(chan)) {
407 ret = RING_SPACE(chan, 10);
408 if (ret)
409 return ret;
410
35bcf5d5 411
f589be88 412 offset = nvc0_fence_crtc(chan, nv_crtc->index);
b5a794b0 413 offset += sync->sem.offset;
3376ee37 414
6d597027 415 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
3376ee37
BS
416 OUT_RING (chan, upper_32_bits(offset));
417 OUT_RING (chan, lower_32_bits(offset));
b5a794b0 418 OUT_RING (chan, 0xf00d0000 | sync->sem.value);
3376ee37 419 OUT_RING (chan, 0x1002);
6d597027 420 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
3376ee37
BS
421 OUT_RING (chan, upper_32_bits(offset));
422 OUT_RING (chan, lower_32_bits(offset ^ 0x10));
423 OUT_RING (chan, 0x74b1e000);
424 OUT_RING (chan, 0x1001);
425 FIRE_RING (chan);
426 } else {
b5a794b0
BS
427 nouveau_bo_wr32(disp->sync, sync->sem.offset / 4,
428 0xf00d0000 | sync->sem.value);
429 evo_sync(crtc->dev);
3376ee37
BS
430 }
431
432 /* queue the flip */
433 evo_mthd(push, 0x0100, 1);
434 evo_data(push, 0xfffe0000);
435 evo_mthd(push, 0x0084, 1);
436 evo_data(push, swap_interval);
437 if (!(swap_interval & 0x00000100)) {
438 evo_mthd(push, 0x00e0, 1);
439 evo_data(push, 0x40000000);
440 }
441 evo_mthd(push, 0x0088, 4);
b5a794b0
BS
442 evo_data(push, sync->sem.offset);
443 evo_data(push, 0xf00d0000 | sync->sem.value);
3376ee37
BS
444 evo_data(push, 0x74b1e000);
445 evo_data(push, NvEvoSync);
446 evo_mthd(push, 0x00a0, 2);
447 evo_data(push, 0x00000000);
448 evo_data(push, 0x00000000);
449 evo_mthd(push, 0x00c0, 1);
450 evo_data(push, nv_fb->r_dma);
451 evo_mthd(push, 0x0110, 2);
452 evo_data(push, 0x00000000);
453 evo_data(push, 0x00000000);
454 evo_mthd(push, 0x0400, 5);
455 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
456 evo_data(push, 0);
457 evo_data(push, (fb->height << 16) | fb->width);
458 evo_data(push, nv_fb->r_pitch);
459 evo_data(push, nv_fb->r_format);
460 evo_mthd(push, 0x0080, 1);
461 evo_data(push, 0x00000000);
b5a794b0 462 evo_kick(push, sync);
3376ee37 463
b5a794b0
BS
464 sync->sem.offset ^= 0x10;
465 sync->sem.value++;
3376ee37
BS
466 return 0;
467}
468
438d99e3
BS
469/******************************************************************************
470 * CRTC
471 *****************************************************************************/
472static int
488ff207 473nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
438d99e3 474{
de8268c5 475 struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
de691855
BS
476 struct nouveau_connector *nv_connector;
477 struct drm_connector *connector;
478 u32 *push, mode = 0x00;
438d99e3 479
488ff207 480 nv_connector = nouveau_crtc_connector_get(nv_crtc);
de691855
BS
481 connector = &nv_connector->base;
482 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
483 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
484 mode = DITHERING_MODE_DYNAMIC2X2;
485 } else {
486 mode = nv_connector->dithering_mode;
487 }
488
489 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
490 if (connector->display_info.bpc >= 8)
491 mode |= DITHERING_DEPTH_8BPC;
492 } else {
493 mode |= nv_connector->dithering_depth;
438d99e3
BS
494 }
495
de8268c5 496 push = evo_wait(mast, 4);
438d99e3 497 if (push) {
de8268c5
BS
498 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
499 evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
500 evo_data(push, mode);
501 } else
502 if (nvd0_vers(mast) < NVE0_DISP_MAST_CLASS) {
503 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
504 evo_data(push, mode);
505 } else {
506 evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
507 evo_data(push, mode);
508 }
509
438d99e3
BS
510 if (update) {
511 evo_mthd(push, 0x0080, 1);
512 evo_data(push, 0x00000000);
513 }
de8268c5 514 evo_kick(push, mast);
438d99e3
BS
515 }
516
517 return 0;
518}
519
520static int
488ff207 521nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
438d99e3 522{
de8268c5 523 struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
92854622 524 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
3376ee37 525 struct drm_crtc *crtc = &nv_crtc->base;
f3fdc52d 526 struct nouveau_connector *nv_connector;
92854622
BS
527 int mode = DRM_MODE_SCALE_NONE;
528 u32 oX, oY, *push;
f3fdc52d 529
92854622
BS
530 /* start off at the resolution we programmed the crtc for, this
531 * effectively handles NONE/FULL scaling
532 */
f3fdc52d 533 nv_connector = nouveau_crtc_connector_get(nv_crtc);
92854622
BS
534 if (nv_connector && nv_connector->native_mode)
535 mode = nv_connector->scaling_mode;
536
537 if (mode != DRM_MODE_SCALE_NONE)
538 omode = nv_connector->native_mode;
539 else
540 omode = umode;
541
542 oX = omode->hdisplay;
543 oY = omode->vdisplay;
544 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
545 oY *= 2;
546
547 /* add overscan compensation if necessary, will keep the aspect
548 * ratio the same as the backend mode unless overridden by the
549 * user setting both hborder and vborder properties.
550 */
551 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
552 (nv_connector->underscan == UNDERSCAN_AUTO &&
553 nv_connector->edid &&
554 drm_detect_hdmi_monitor(nv_connector->edid)))) {
555 u32 bX = nv_connector->underscan_hborder;
556 u32 bY = nv_connector->underscan_vborder;
557 u32 aspect = (oY << 19) / oX;
558
559 if (bX) {
560 oX -= (bX * 2);
561 if (bY) oY -= (bY * 2);
562 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
563 } else {
564 oX -= (oX >> 4) + 32;
565 if (bY) oY -= (bY * 2);
566 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
567 }
568 }
569
570 /* handle CENTER/ASPECT scaling, taking into account the areas
571 * removed already for overscan compensation
572 */
573 switch (mode) {
574 case DRM_MODE_SCALE_CENTER:
575 oX = min((u32)umode->hdisplay, oX);
576 oY = min((u32)umode->vdisplay, oY);
577 /* fall-through */
578 case DRM_MODE_SCALE_ASPECT:
579 if (oY < oX) {
580 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
581 oX = ((oY * aspect) + (aspect / 2)) >> 19;
582 } else {
583 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
584 oY = ((oX * aspect) + (aspect / 2)) >> 19;
f3fdc52d 585 }
92854622
BS
586 break;
587 default:
588 break;
f3fdc52d 589 }
438d99e3 590
de8268c5 591 push = evo_wait(mast, 8);
438d99e3 592 if (push) {
de8268c5
BS
593 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
594 /*XXX: SCALE_CTRL_ACTIVE??? */
595 evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
596 evo_data(push, (oY << 16) | oX);
597 evo_data(push, (oY << 16) | oX);
598 evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
599 evo_data(push, 0x00000000);
600 evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
601 evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
602 } else {
603 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
604 evo_data(push, (oY << 16) | oX);
605 evo_data(push, (oY << 16) | oX);
606 evo_data(push, (oY << 16) | oX);
607 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
608 evo_data(push, 0x00000000);
609 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
610 evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
611 }
612
613 evo_kick(push, mast);
614
438d99e3 615 if (update) {
3376ee37
BS
616 nvd0_display_flip_stop(crtc);
617 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
438d99e3 618 }
438d99e3
BS
619 }
620
621 return 0;
622}
623
624static int
625nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
626 int x, int y, bool update)
627{
628 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
de8268c5 629 struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
438d99e3
BS
630 u32 *push;
631
de8268c5 632 push = evo_wait(mast, 16);
438d99e3 633 if (push) {
de8268c5
BS
634 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
635 evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
636 evo_data(push, nvfb->nvbo->bo.offset >> 8);
637 evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
638 evo_data(push, (fb->height << 16) | fb->width);
639 evo_data(push, nvfb->r_pitch);
640 evo_data(push, nvfb->r_format);
641 evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
642 evo_data(push, (y << 16) | x);
643 if (nvd0_vers(mast) > NV50_DISP_MAST_CLASS) {
644 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
645 evo_data(push, nvfb->r_dma);
646 }
647 } else {
648 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
649 evo_data(push, nvfb->nvbo->bo.offset >> 8);
650 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
651 evo_data(push, (fb->height << 16) | fb->width);
652 evo_data(push, nvfb->r_pitch);
653 evo_data(push, nvfb->r_format);
654 evo_data(push, nvfb->r_dma);
655 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
656 evo_data(push, (y << 16) | x);
657 }
658
a46232ee
BS
659 if (update) {
660 evo_mthd(push, 0x0080, 1);
661 evo_data(push, 0x00000000);
662 }
de8268c5 663 evo_kick(push, mast);
438d99e3
BS
664 }
665
c0cc92a1 666 nv_crtc->fb.tile_flags = nvfb->r_dma;
438d99e3
BS
667 return 0;
668}
669
670static void
de8268c5 671nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
438d99e3 672{
de8268c5
BS
673 struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
674 u32 *push = evo_wait(mast, 16);
438d99e3 675 if (push) {
de8268c5
BS
676 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
677 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
678 evo_data(push, 0x85000000);
679 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
680 } else
681 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
682 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
683 evo_data(push, 0x85000000);
684 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
685 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
686 evo_data(push, NvEvoVRAM);
687 } else {
438d99e3
BS
688 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
689 evo_data(push, 0x85000000);
690 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
691 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
37b034a6 692 evo_data(push, NvEvoVRAM);
de8268c5
BS
693 }
694 evo_kick(push, mast);
695 }
696}
697
698static void
699nvd0_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
700{
701 struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
702 u32 *push = evo_wait(mast, 16);
703 if (push) {
704 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
705 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
706 evo_data(push, 0x05000000);
707 } else
708 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
709 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
710 evo_data(push, 0x05000000);
711 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
712 evo_data(push, 0x00000000);
438d99e3
BS
713 } else {
714 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
715 evo_data(push, 0x05000000);
716 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
717 evo_data(push, 0x00000000);
718 }
de8268c5
BS
719 evo_kick(push, mast);
720 }
721}
438d99e3 722
de8268c5
BS
723static void
724nvd0_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
725{
726 struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
727
728 if (show)
729 nvd0_crtc_cursor_show(nv_crtc);
730 else
731 nvd0_crtc_cursor_hide(nv_crtc);
732
733 if (update) {
734 u32 *push = evo_wait(mast, 2);
735 if (push) {
438d99e3
BS
736 evo_mthd(push, 0x0080, 1);
737 evo_data(push, 0x00000000);
de8268c5 738 evo_kick(push, mast);
438d99e3 739 }
438d99e3
BS
740 }
741}
742
743static void
744nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
745{
746}
747
748static void
749nvd0_crtc_prepare(struct drm_crtc *crtc)
750{
751 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
de8268c5 752 struct nvd0_mast *mast = nvd0_mast(crtc->dev);
438d99e3
BS
753 u32 *push;
754
3376ee37
BS
755 nvd0_display_flip_stop(crtc);
756
de8268c5 757 push = evo_wait(mast, 2);
438d99e3 758 if (push) {
de8268c5
BS
759 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
760 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
761 evo_data(push, 0x00000000);
762 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
763 evo_data(push, 0x40000000);
764 } else
765 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
766 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
767 evo_data(push, 0x00000000);
768 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
769 evo_data(push, 0x40000000);
770 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
771 evo_data(push, 0x00000000);
772 } else {
773 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
774 evo_data(push, 0x00000000);
775 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
776 evo_data(push, 0x03000000);
777 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
778 evo_data(push, 0x00000000);
779 }
780
781 evo_kick(push, mast);
438d99e3
BS
782 }
783
de8268c5 784 nvd0_crtc_cursor_show_hide(nv_crtc, false, false);
438d99e3
BS
785}
786
787static void
788nvd0_crtc_commit(struct drm_crtc *crtc)
789{
790 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
de8268c5 791 struct nvd0_mast *mast = nvd0_mast(crtc->dev);
438d99e3
BS
792 u32 *push;
793
de8268c5 794 push = evo_wait(mast, 32);
438d99e3 795 if (push) {
de8268c5
BS
796 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
797 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
798 evo_data(push, NvEvoVRAM_LP);
799 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
800 evo_data(push, 0xc0000000);
801 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
802 } else
803 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
804 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
805 evo_data(push, nv_crtc->fb.tile_flags);
806 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
807 evo_data(push, 0xc0000000);
808 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
809 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
810 evo_data(push, NvEvoVRAM);
811 } else {
812 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
813 evo_data(push, nv_crtc->fb.tile_flags);
814 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
815 evo_data(push, 0x83000000);
816 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
817 evo_data(push, 0x00000000);
818 evo_data(push, 0x00000000);
819 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
820 evo_data(push, NvEvoVRAM);
821 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
822 evo_data(push, 0xffffff00);
823 }
824
825 evo_kick(push, mast);
438d99e3
BS
826 }
827
de8268c5 828 nvd0_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
3376ee37 829 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
438d99e3
BS
830}
831
832static bool
e811f5ae 833nvd0_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
438d99e3
BS
834 struct drm_display_mode *adjusted_mode)
835{
836 return true;
837}
838
839static int
840nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
841{
842 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
843 int ret;
844
845 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
846 if (ret)
847 return ret;
848
849 if (old_fb) {
850 nvfb = nouveau_framebuffer(old_fb);
851 nouveau_bo_unpin(nvfb->nvbo);
852 }
853
854 return 0;
855}
856
857static int
858nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
859 struct drm_display_mode *mode, int x, int y,
860 struct drm_framebuffer *old_fb)
861{
de8268c5 862 struct nvd0_mast *mast = nvd0_mast(crtc->dev);
438d99e3
BS
863 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
864 struct nouveau_connector *nv_connector;
2d1d898b
BS
865 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
866 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
867 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
868 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
869 u32 vblan2e = 0, vblan2s = 1;
3488c57b 870 u32 *push;
438d99e3
BS
871 int ret;
872
2d1d898b
BS
873 hactive = mode->htotal;
874 hsynce = mode->hsync_end - mode->hsync_start - 1;
875 hbackp = mode->htotal - mode->hsync_end;
876 hblanke = hsynce + hbackp;
877 hfrontp = mode->hsync_start - mode->hdisplay;
878 hblanks = mode->htotal - hfrontp - 1;
879
880 vactive = mode->vtotal * vscan / ilace;
881 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
882 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
883 vblanke = vsynce + vbackp;
884 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
885 vblanks = vactive - vfrontp - 1;
886 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
887 vblan2e = vactive + vsynce + vbackp;
888 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
889 vactive = (vactive * 2) + 1;
2d1d898b
BS
890 }
891
438d99e3
BS
892 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
893 if (ret)
894 return ret;
895
de8268c5 896 push = evo_wait(mast, 64);
438d99e3 897 if (push) {
de8268c5
BS
898 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
899 evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
900 evo_data(push, 0x00800000 | mode->clock);
901 evo_data(push, (ilace == 2) ? 2 : 0);
902 evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
903 evo_data(push, 0x00000000);
904 evo_data(push, (vactive << 16) | hactive);
905 evo_data(push, ( vsynce << 16) | hsynce);
906 evo_data(push, (vblanke << 16) | hblanke);
907 evo_data(push, (vblanks << 16) | hblanks);
908 evo_data(push, (vblan2e << 16) | vblan2s);
909 evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
910 evo_data(push, 0x00000000);
911 evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
912 evo_data(push, 0x00000311);
913 evo_data(push, 0x00000100);
914 } else {
915 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
916 evo_data(push, 0x00000000);
917 evo_data(push, (vactive << 16) | hactive);
918 evo_data(push, ( vsynce << 16) | hsynce);
919 evo_data(push, (vblanke << 16) | hblanke);
920 evo_data(push, (vblanks << 16) | hblanks);
921 evo_data(push, (vblan2e << 16) | vblan2s);
922 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
923 evo_data(push, 0x00000000); /* ??? */
924 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
925 evo_data(push, mode->clock * 1000);
926 evo_data(push, 0x00200000); /* ??? */
927 evo_data(push, mode->clock * 1000);
928 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
929 evo_data(push, 0x00000311);
930 evo_data(push, 0x00000100);
931 }
932
933 evo_kick(push, mast);
438d99e3
BS
934 }
935
936 nv_connector = nouveau_crtc_connector_get(nv_crtc);
488ff207
BS
937 nvd0_crtc_set_dither(nv_crtc, false);
938 nvd0_crtc_set_scale(nv_crtc, false);
438d99e3
BS
939 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
940 return 0;
941}
942
943static int
944nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
945 struct drm_framebuffer *old_fb)
946{
77145f1c 947 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
438d99e3
BS
948 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
949 int ret;
950
84e2ad8b 951 if (!crtc->fb) {
77145f1c 952 NV_DEBUG(drm, "No FB bound\n");
84e2ad8b
BS
953 return 0;
954 }
955
438d99e3
BS
956 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
957 if (ret)
958 return ret;
959
3376ee37 960 nvd0_display_flip_stop(crtc);
438d99e3 961 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
3376ee37 962 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
438d99e3
BS
963 return 0;
964}
965
966static int
967nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
968 struct drm_framebuffer *fb, int x, int y,
969 enum mode_set_atomic state)
970{
971 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
3376ee37 972 nvd0_display_flip_stop(crtc);
438d99e3
BS
973 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
974 return 0;
975}
976
977static void
978nvd0_crtc_lut_load(struct drm_crtc *crtc)
979{
de8268c5 980 struct nvd0_disp *disp = nvd0_disp(crtc->dev);
438d99e3
BS
981 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
982 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
983 int i;
984
985 for (i = 0; i < 256; i++) {
de8268c5
BS
986 u16 r = nv_crtc->lut.r[i] >> 2;
987 u16 g = nv_crtc->lut.g[i] >> 2;
988 u16 b = nv_crtc->lut.b[i] >> 2;
989
990 if (nv_mclass(disp->core) < NVD0_DISP_CLASS) {
991 writew(r + 0x0000, lut + (i * 0x08) + 0);
992 writew(g + 0x0000, lut + (i * 0x08) + 2);
993 writew(b + 0x0000, lut + (i * 0x08) + 4);
994 } else {
995 writew(r + 0x6000, lut + (i * 0x20) + 0);
996 writew(g + 0x6000, lut + (i * 0x20) + 2);
997 writew(b + 0x6000, lut + (i * 0x20) + 4);
998 }
438d99e3
BS
999 }
1000}
1001
1002static int
1003nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1004 uint32_t handle, uint32_t width, uint32_t height)
1005{
1006 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1007 struct drm_device *dev = crtc->dev;
1008 struct drm_gem_object *gem;
1009 struct nouveau_bo *nvbo;
1010 bool visible = (handle != 0);
1011 int i, ret = 0;
1012
1013 if (visible) {
1014 if (width != 64 || height != 64)
1015 return -EINVAL;
1016
1017 gem = drm_gem_object_lookup(dev, file_priv, handle);
1018 if (unlikely(!gem))
1019 return -ENOENT;
1020 nvbo = nouveau_gem_object(gem);
1021
1022 ret = nouveau_bo_map(nvbo);
1023 if (ret == 0) {
1024 for (i = 0; i < 64 * 64; i++) {
1025 u32 v = nouveau_bo_rd32(nvbo, i);
1026 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1027 }
1028 nouveau_bo_unmap(nvbo);
1029 }
1030
1031 drm_gem_object_unreference_unlocked(gem);
1032 }
1033
1034 if (visible != nv_crtc->cursor.visible) {
de8268c5 1035 nvd0_crtc_cursor_show_hide(nv_crtc, visible, true);
438d99e3
BS
1036 nv_crtc->cursor.visible = visible;
1037 }
1038
1039 return ret;
1040}
1041
1042static int
1043nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1044{
b5a794b0
BS
1045 struct nvd0_curs *curs = nvd0_curs(crtc);
1046 struct nvd0_chan *chan = nvd0_chan(curs);
1047 nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
1048 nv_wo32(chan->user, 0x0080, 0x00000000);
438d99e3
BS
1049 return 0;
1050}
1051
1052static void
1053nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1054 uint32_t start, uint32_t size)
1055{
1056 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1057 u32 end = max(start + size, (u32)256);
1058 u32 i;
1059
1060 for (i = start; i < end; i++) {
1061 nv_crtc->lut.r[i] = r[i];
1062 nv_crtc->lut.g[i] = g[i];
1063 nv_crtc->lut.b[i] = b[i];
1064 }
1065
1066 nvd0_crtc_lut_load(crtc);
1067}
1068
1069static void
1070nvd0_crtc_destroy(struct drm_crtc *crtc)
1071{
1072 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
b5a794b0
BS
1073 struct nvd0_disp *disp = nvd0_disp(crtc->dev);
1074 struct nvd0_head *head = nvd0_head(crtc);
1075 nvd0_dmac_destroy(disp->core, &head->ovly.base);
1076 nvd0_pioc_destroy(disp->core, &head->oimm.base);
1077 nvd0_dmac_destroy(disp->core, &head->sync.base);
1078 nvd0_pioc_destroy(disp->core, &head->curs.base);
438d99e3
BS
1079 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1080 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1081 nouveau_bo_unmap(nv_crtc->lut.nvbo);
1082 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1083 drm_crtc_cleanup(crtc);
1084 kfree(crtc);
1085}
1086
1087static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
1088 .dpms = nvd0_crtc_dpms,
1089 .prepare = nvd0_crtc_prepare,
1090 .commit = nvd0_crtc_commit,
1091 .mode_fixup = nvd0_crtc_mode_fixup,
1092 .mode_set = nvd0_crtc_mode_set,
1093 .mode_set_base = nvd0_crtc_mode_set_base,
1094 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
1095 .load_lut = nvd0_crtc_lut_load,
1096};
1097
1098static const struct drm_crtc_funcs nvd0_crtc_func = {
1099 .cursor_set = nvd0_crtc_cursor_set,
1100 .cursor_move = nvd0_crtc_cursor_move,
1101 .gamma_set = nvd0_crtc_gamma_set,
1102 .set_config = drm_crtc_helper_set_config,
1103 .destroy = nvd0_crtc_destroy,
3376ee37 1104 .page_flip = nouveau_crtc_page_flip,
438d99e3
BS
1105};
1106
c20ab3e1
BS
1107static void
1108nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
1109{
1110}
1111
1112static void
1113nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
1114{
1115}
1116
438d99e3 1117static int
b5a794b0 1118nvd0_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
438d99e3 1119{
b5a794b0 1120 struct nvd0_disp *disp = nvd0_disp(dev);
dd0e3d53 1121 struct nvd0_head *head;
438d99e3
BS
1122 struct drm_crtc *crtc;
1123 int ret, i;
1124
dd0e3d53
BS
1125 head = kzalloc(sizeof(*head), GFP_KERNEL);
1126 if (!head)
438d99e3
BS
1127 return -ENOMEM;
1128
dd0e3d53
BS
1129 head->base.index = index;
1130 head->base.set_dither = nvd0_crtc_set_dither;
1131 head->base.set_scale = nvd0_crtc_set_scale;
1132 head->base.cursor.set_offset = nvd0_cursor_set_offset;
1133 head->base.cursor.set_pos = nvd0_cursor_set_pos;
438d99e3 1134 for (i = 0; i < 256; i++) {
dd0e3d53
BS
1135 head->base.lut.r[i] = i << 8;
1136 head->base.lut.g[i] = i << 8;
1137 head->base.lut.b[i] = i << 8;
438d99e3
BS
1138 }
1139
dd0e3d53 1140 crtc = &head->base.base;
438d99e3
BS
1141 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
1142 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
1143 drm_mode_crtc_set_gamma_size(crtc, 256);
1144
b5a794b0
BS
1145 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1146 0, 0x0000, NULL, &head->base.lut.nvbo);
1147 if (!ret) {
1148 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1149 if (!ret)
1150 ret = nouveau_bo_map(head->base.lut.nvbo);
1151 if (ret)
1152 nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1153 }
1154
1155 if (ret)
1156 goto out;
1157
1158 nvd0_crtc_lut_load(crtc);
1159
1160 /* allocate cursor resources */
1161 ret = nvd0_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
1162 &(struct nv50_display_curs_class) {
1163 .head = index,
1164 }, sizeof(struct nv50_display_curs_class),
1165 &head->curs.base);
1166 if (ret)
1167 goto out;
1168
438d99e3 1169 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
dd0e3d53 1170 0, 0x0000, NULL, &head->base.cursor.nvbo);
438d99e3 1171 if (!ret) {
dd0e3d53 1172 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
438d99e3 1173 if (!ret)
dd0e3d53 1174 ret = nouveau_bo_map(head->base.cursor.nvbo);
438d99e3 1175 if (ret)
dd0e3d53 1176 nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
438d99e3
BS
1177 }
1178
1179 if (ret)
1180 goto out;
1181
b5a794b0
BS
1182 /* allocate page flip / sync resources */
1183 ret = nvd0_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
1184 &(struct nv50_display_sync_class) {
1185 .pushbuf = EVO_PUSH_HANDLE(SYNC, index),
1186 .head = index,
1187 }, sizeof(struct nv50_display_sync_class),
1188 disp->sync->bo.offset, &head->sync.base);
1189 if (ret)
1190 goto out;
1191
1192 head->sync.sem.offset = EVO_SYNC(1 + index, 0x00);
438d99e3 1193
b5a794b0
BS
1194 /* allocate overlay resources */
1195 ret = nvd0_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
1196 &(struct nv50_display_oimm_class) {
1197 .head = index,
1198 }, sizeof(struct nv50_display_oimm_class),
1199 &head->oimm.base);
438d99e3
BS
1200 if (ret)
1201 goto out;
1202
b5a794b0
BS
1203 ret = nvd0_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
1204 &(struct nv50_display_ovly_class) {
1205 .pushbuf = EVO_PUSH_HANDLE(OVLY, index),
1206 .head = index,
1207 }, sizeof(struct nv50_display_ovly_class),
1208 disp->sync->bo.offset, &head->ovly.base);
1209 if (ret)
1210 goto out;
438d99e3
BS
1211
1212out:
1213 if (ret)
1214 nvd0_crtc_destroy(crtc);
1215 return ret;
1216}
1217
26f6d88b
BS
1218/******************************************************************************
1219 * DAC
1220 *****************************************************************************/
8eaa9669
BS
1221static void
1222nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
1223{
1224 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
35b21d39 1225 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
8eaa9669
BS
1226 int or = nv_encoder->or;
1227 u32 dpms_ctrl;
1228
35b21d39 1229 dpms_ctrl = 0x00000000;
8eaa9669
BS
1230 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1231 dpms_ctrl |= 0x00000001;
1232 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1233 dpms_ctrl |= 0x00000004;
1234
35b21d39 1235 nv_call(disp->core, NV50_DISP_DAC_PWR + or, dpms_ctrl);
8eaa9669
BS
1236}
1237
1238static bool
e811f5ae
LP
1239nvd0_dac_mode_fixup(struct drm_encoder *encoder,
1240 const struct drm_display_mode *mode,
8eaa9669
BS
1241 struct drm_display_mode *adjusted_mode)
1242{
1243 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1244 struct nouveau_connector *nv_connector;
1245
1246 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1247 if (nv_connector && nv_connector->native_mode) {
1248 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1249 int id = adjusted_mode->base.id;
1250 *adjusted_mode = *nv_connector->native_mode;
1251 adjusted_mode->base.id = id;
1252 }
1253 }
1254
1255 return true;
1256}
1257
8eaa9669
BS
1258static void
1259nvd0_dac_commit(struct drm_encoder *encoder)
1260{
1261}
1262
1263static void
1264nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1265 struct drm_display_mode *adjusted_mode)
1266{
97b19b5c 1267 struct nvd0_mast *mast = nvd0_mast(encoder->dev);
8eaa9669
BS
1268 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1269 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
97b19b5c 1270 u32 *push;
8eaa9669
BS
1271
1272 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1273
97b19b5c 1274 push = evo_wait(mast, 8);
8eaa9669 1275 if (push) {
97b19b5c
BS
1276 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1277 u32 syncs = 0x00000000;
1278
1279 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1280 syncs |= 0x00000001;
1281 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1282 syncs |= 0x00000002;
1283
1284 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1285 evo_data(push, 1 << nv_crtc->index);
1286 evo_data(push, syncs);
1287 } else {
1288 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1289 u32 syncs = 0x00000001;
1290
1291 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1292 syncs |= 0x00000008;
1293 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1294 syncs |= 0x00000010;
1295
1296 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1297 magic |= 0x00000001;
1298
1299 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1300 evo_data(push, syncs);
1301 evo_data(push, magic);
1302 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1303 evo_data(push, 1 << nv_crtc->index);
1304 }
1305
1306 evo_kick(push, mast);
8eaa9669
BS
1307 }
1308
1309 nv_encoder->crtc = encoder->crtc;
1310}
1311
1312static void
1313nvd0_dac_disconnect(struct drm_encoder *encoder)
1314{
1315 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
97b19b5c
BS
1316 struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1317 const int or = nv_encoder->or;
8eaa9669
BS
1318 u32 *push;
1319
1320 if (nv_encoder->crtc) {
1321 nvd0_crtc_prepare(nv_encoder->crtc);
1322
97b19b5c 1323 push = evo_wait(mast, 4);
8eaa9669 1324 if (push) {
97b19b5c
BS
1325 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1326 evo_mthd(push, 0x0400 + (or * 0x080), 1);
1327 evo_data(push, 0x00000000);
1328 } else {
1329 evo_mthd(push, 0x0180 + (or * 0x020), 1);
1330 evo_data(push, 0x00000000);
1331 }
1332
8eaa9669
BS
1333 evo_mthd(push, 0x0080, 1);
1334 evo_data(push, 0x00000000);
97b19b5c 1335 evo_kick(push, mast);
8eaa9669 1336 }
8eaa9669 1337 }
97b19b5c
BS
1338
1339 nv_encoder->crtc = NULL;
8eaa9669
BS
1340}
1341
b6d8e7ec
BS
1342static enum drm_connector_status
1343nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1344{
35b21d39
BS
1345 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1346 int ret, or = nouveau_encoder(encoder)->or;
7ebb38b5 1347 u32 load = 0;
b681993f 1348
35b21d39
BS
1349 ret = nv_exec(disp->core, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
1350 if (ret || load != 7)
1351 return connector_status_disconnected;
b681993f 1352
35b21d39 1353 return connector_status_connected;
b6d8e7ec
BS
1354}
1355
8eaa9669
BS
1356static void
1357nvd0_dac_destroy(struct drm_encoder *encoder)
1358{
1359 drm_encoder_cleanup(encoder);
1360 kfree(encoder);
1361}
1362
1363static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
1364 .dpms = nvd0_dac_dpms,
1365 .mode_fixup = nvd0_dac_mode_fixup,
4cbb0f8d 1366 .prepare = nvd0_dac_disconnect,
8eaa9669
BS
1367 .commit = nvd0_dac_commit,
1368 .mode_set = nvd0_dac_mode_set,
1369 .disable = nvd0_dac_disconnect,
1370 .get_crtc = nvd0_display_crtc_get,
b6d8e7ec 1371 .detect = nvd0_dac_detect
8eaa9669
BS
1372};
1373
1374static const struct drm_encoder_funcs nvd0_dac_func = {
1375 .destroy = nvd0_dac_destroy,
1376};
1377
1378static int
cb75d97e 1379nvd0_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
8eaa9669
BS
1380{
1381 struct drm_device *dev = connector->dev;
1382 struct nouveau_encoder *nv_encoder;
1383 struct drm_encoder *encoder;
1384
1385 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1386 if (!nv_encoder)
1387 return -ENOMEM;
1388 nv_encoder->dcb = dcbe;
1389 nv_encoder->or = ffs(dcbe->or) - 1;
1390
1391 encoder = to_drm_encoder(nv_encoder);
1392 encoder->possible_crtcs = dcbe->heads;
1393 encoder->possible_clones = 0;
1394 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
1395 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1396
1397 drm_mode_connector_attach_encoder(connector, encoder);
1398 return 0;
1399}
26f6d88b 1400
78951d22
BS
1401/******************************************************************************
1402 * Audio
1403 *****************************************************************************/
1404static void
1405nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1406{
1407 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1408 struct nouveau_connector *nv_connector;
0a9e2b95 1409 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
78951d22
BS
1410
1411 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1412 if (!drm_detect_monitor_audio(nv_connector->edid))
1413 return;
1414
78951d22 1415 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
78951d22 1416
0a9e2b95
BS
1417 nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or,
1418 nv_connector->base.eld,
1419 nv_connector->base.eld[2] * 4);
78951d22
BS
1420}
1421
1422static void
1423nvd0_audio_disconnect(struct drm_encoder *encoder)
1424{
1425 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
0a9e2b95 1426 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
78951d22 1427
0a9e2b95 1428 nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0);
78951d22
BS
1429}
1430
1431/******************************************************************************
1432 * HDMI
1433 *****************************************************************************/
1434static void
1435nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1436{
64d9cc04
BS
1437 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1438 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1439 struct nouveau_connector *nv_connector;
1c30cd09
BS
1440 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1441 const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
64d9cc04
BS
1442 u32 rekey = 56; /* binary driver, and tegra constant */
1443 u32 max_ac_packet;
1444
1445 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1446 if (!drm_detect_hdmi_monitor(nv_connector->edid))
1447 return;
1448
1449 max_ac_packet = mode->htotal - mode->hdisplay;
1450 max_ac_packet -= rekey;
1451 max_ac_packet -= 18; /* constant from tegra */
1452 max_ac_packet /= 32;
1453
1c30cd09
BS
1454 nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff,
1455 NV84_DISP_SOR_HDMI_PWR_STATE_ON |
1456 (max_ac_packet << 16) | rekey);
091e40cd 1457
78951d22
BS
1458 nvd0_audio_mode_set(encoder, mode);
1459}
1460
1461static void
1462nvd0_hdmi_disconnect(struct drm_encoder *encoder)
1463{
64d9cc04
BS
1464 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1465 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1c30cd09
BS
1466 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1467 const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
64d9cc04 1468
78951d22 1469 nvd0_audio_disconnect(encoder);
64d9cc04 1470
1c30cd09 1471 nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff, 0x00000000);
78951d22
BS
1472}
1473
26f6d88b
BS
1474/******************************************************************************
1475 * SOR
1476 *****************************************************************************/
83fc083c
BS
1477static void
1478nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
1479{
1480 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1481 struct drm_device *dev = encoder->dev;
74b66850 1482 struct nvd0_disp *disp = nvd0_disp(dev);
83fc083c
BS
1483 struct drm_encoder *partner;
1484 int or = nv_encoder->or;
83fc083c
BS
1485
1486 nv_encoder->last_dpms = mode;
1487
1488 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1489 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1490
1491 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1492 continue;
1493
1494 if (nv_partner != nv_encoder &&
26cfa813 1495 nv_partner->dcb->or == nv_encoder->dcb->or) {
83fc083c
BS
1496 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1497 return;
1498 break;
1499 }
1500 }
1501
74b66850 1502 nv_call(disp->core, NV50_DISP_SOR_PWR + or, (mode == DRM_MODE_DPMS_ON));
6e83fda2 1503
6c8e4633
BS
1504 if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1505 nouveau_dp_dpms(encoder, mode, nv_encoder->dp.datarate, disp->core);
83fc083c
BS
1506}
1507
1508static bool
e811f5ae
LP
1509nvd0_sor_mode_fixup(struct drm_encoder *encoder,
1510 const struct drm_display_mode *mode,
83fc083c
BS
1511 struct drm_display_mode *adjusted_mode)
1512{
1513 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1514 struct nouveau_connector *nv_connector;
1515
1516 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1517 if (nv_connector && nv_connector->native_mode) {
1518 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1519 int id = adjusted_mode->base.id;
1520 *adjusted_mode = *nv_connector->native_mode;
1521 adjusted_mode->base.id = id;
1522 }
1523 }
1524
1525 return true;
1526}
1527
4cbb0f8d
BS
1528static void
1529nvd0_sor_disconnect(struct drm_encoder *encoder)
1530{
1531 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
419e8dc0
BS
1532 struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1533 const int or = nv_encoder->or;
4cbb0f8d
BS
1534 u32 *push;
1535
1536 if (nv_encoder->crtc) {
1537 nvd0_crtc_prepare(nv_encoder->crtc);
1538
419e8dc0 1539 push = evo_wait(mast, 4);
4cbb0f8d 1540 if (push) {
419e8dc0
BS
1541 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1542 evo_mthd(push, 0x0600 + (or * 0x40), 1);
1543 evo_data(push, 0x00000000);
1544 } else {
1545 evo_mthd(push, 0x0200 + (or * 0x20), 1);
1546 evo_data(push, 0x00000000);
1547 }
1548
4cbb0f8d
BS
1549 evo_mthd(push, 0x0080, 1);
1550 evo_data(push, 0x00000000);
419e8dc0 1551 evo_kick(push, mast);
4cbb0f8d
BS
1552 }
1553
1554 nvd0_hdmi_disconnect(encoder);
4cbb0f8d 1555 }
419e8dc0
BS
1556
1557 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1558 nv_encoder->crtc = NULL;
4cbb0f8d
BS
1559}
1560
83fc083c
BS
1561static void
1562nvd0_sor_prepare(struct drm_encoder *encoder)
1563{
4cbb0f8d 1564 nvd0_sor_disconnect(encoder);
cb75d97e 1565 if (nouveau_encoder(encoder)->dcb->type == DCB_OUTPUT_DP)
b5a794b0 1566 evo_sync(encoder->dev);
83fc083c
BS
1567}
1568
1569static void
1570nvd0_sor_commit(struct drm_encoder *encoder)
1571{
1572}
1573
1574static void
3b6d83d1
BS
1575nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1576 struct drm_display_mode *mode)
83fc083c 1577{
4a230fa6 1578 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
419e8dc0 1579 struct nvd0_mast *mast = nvd0_mast(encoder->dev);
78951d22 1580 struct drm_device *dev = encoder->dev;
77145f1c 1581 struct nouveau_drm *drm = nouveau_drm(dev);
83fc083c
BS
1582 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1583 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3b6d83d1 1584 struct nouveau_connector *nv_connector;
77145f1c 1585 struct nvbios *bios = &drm->vbios;
419e8dc0
BS
1586 u32 *push, lvds = 0;
1587 u8 owner = 1 << nv_crtc->index;
1588 u8 proto = 0xf;
1589 u8 depth = 0x0;
83fc083c 1590
3b6d83d1
BS
1591 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1592 switch (nv_encoder->dcb->type) {
cb75d97e 1593 case DCB_OUTPUT_TMDS:
3b6d83d1
BS
1594 if (nv_encoder->dcb->sorconf.link & 1) {
1595 if (mode->clock < 165000)
419e8dc0 1596 proto = 0x1;
3b6d83d1 1597 else
419e8dc0 1598 proto = 0x5;
3b6d83d1 1599 } else {
419e8dc0 1600 proto = 0x2;
3b6d83d1
BS
1601 }
1602
78951d22 1603 nvd0_hdmi_mode_set(encoder, mode);
3b6d83d1 1604 break;
cb75d97e 1605 case DCB_OUTPUT_LVDS:
419e8dc0
BS
1606 proto = 0x0;
1607
3b6d83d1
BS
1608 if (bios->fp_no_ddc) {
1609 if (bios->fp.dual_link)
419e8dc0 1610 lvds |= 0x0100;
3b6d83d1 1611 if (bios->fp.if_is_24bit)
419e8dc0 1612 lvds |= 0x0200;
3b6d83d1 1613 } else {
befb51e9 1614 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
3b6d83d1 1615 if (((u8 *)nv_connector->edid)[121] == 2)
419e8dc0 1616 lvds |= 0x0100;
3b6d83d1
BS
1617 } else
1618 if (mode->clock >= bios->fp.duallink_transition_clk) {
419e8dc0 1619 lvds |= 0x0100;
3b6d83d1 1620 }
83fc083c 1621
419e8dc0 1622 if (lvds & 0x0100) {
3b6d83d1 1623 if (bios->fp.strapless_is_24bit & 2)
419e8dc0 1624 lvds |= 0x0200;
3b6d83d1
BS
1625 } else {
1626 if (bios->fp.strapless_is_24bit & 1)
419e8dc0 1627 lvds |= 0x0200;
3b6d83d1
BS
1628 }
1629
1630 if (nv_connector->base.display_info.bpc == 8)
419e8dc0 1631 lvds |= 0x0200;
3b6d83d1 1632 }
4a230fa6 1633
419e8dc0 1634 nv_call(disp->core, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, lvds);
3b6d83d1 1635 break;
cb75d97e 1636 case DCB_OUTPUT_DP:
3488c57b 1637 if (nv_connector->base.display_info.bpc == 6) {
6e83fda2 1638 nv_encoder->dp.datarate = mode->clock * 18 / 8;
419e8dc0 1639 depth = 0x2;
3488c57b 1640 } else {
6e83fda2 1641 nv_encoder->dp.datarate = mode->clock * 24 / 8;
419e8dc0 1642 depth = 0x5;
3488c57b 1643 }
6e83fda2
BS
1644
1645 if (nv_encoder->dcb->sorconf.link & 1)
419e8dc0 1646 proto = 0x8;
6e83fda2 1647 else
419e8dc0 1648 proto = 0x9;
6e83fda2 1649 break;
3b6d83d1
BS
1650 default:
1651 BUG_ON(1);
1652 break;
1653 }
ff8ff503 1654
83fc083c
BS
1655 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1656
b5a794b0 1657 push = evo_wait(nvd0_mast(dev), 8);
83fc083c 1658 if (push) {
419e8dc0
BS
1659 if (nvd0_vers(mast) < NVD0_DISP_CLASS) {
1660 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x040), 1);
1661 evo_data(push, (depth << 16) | (proto << 8) | owner);
1662 } else {
1663 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1664 u32 syncs = 0x00000001;
1665
1666 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1667 syncs |= 0x00000008;
1668 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1669 syncs |= 0x00000010;
1670
1671 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1672 magic |= 0x00000001;
1673
1674 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1675 evo_data(push, syncs | (depth << 6));
1676 evo_data(push, magic);
1677 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 1);
1678 evo_data(push, owner | (proto << 8));
1679 }
1680
1681 evo_kick(push, mast);
83fc083c
BS
1682 }
1683
1684 nv_encoder->crtc = encoder->crtc;
1685}
1686
83fc083c
BS
1687static void
1688nvd0_sor_destroy(struct drm_encoder *encoder)
1689{
1690 drm_encoder_cleanup(encoder);
1691 kfree(encoder);
1692}
1693
1694static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
1695 .dpms = nvd0_sor_dpms,
1696 .mode_fixup = nvd0_sor_mode_fixup,
1697 .prepare = nvd0_sor_prepare,
1698 .commit = nvd0_sor_commit,
1699 .mode_set = nvd0_sor_mode_set,
1700 .disable = nvd0_sor_disconnect,
1701 .get_crtc = nvd0_display_crtc_get,
1702};
1703
1704static const struct drm_encoder_funcs nvd0_sor_func = {
1705 .destroy = nvd0_sor_destroy,
1706};
1707
1708static int
cb75d97e 1709nvd0_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
83fc083c
BS
1710{
1711 struct drm_device *dev = connector->dev;
1712 struct nouveau_encoder *nv_encoder;
1713 struct drm_encoder *encoder;
1714
1715 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1716 if (!nv_encoder)
1717 return -ENOMEM;
1718 nv_encoder->dcb = dcbe;
1719 nv_encoder->or = ffs(dcbe->or) - 1;
1720 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1721
1722 encoder = to_drm_encoder(nv_encoder);
1723 encoder->possible_crtcs = dcbe->heads;
1724 encoder->possible_clones = 0;
1725 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
1726 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1727
1728 drm_mode_connector_attach_encoder(connector, encoder);
1729 return 0;
1730}
26f6d88b 1731
26f6d88b
BS
1732/******************************************************************************
1733 * Init
1734 *****************************************************************************/
2a44e499 1735void
26f6d88b
BS
1736nvd0_display_fini(struct drm_device *dev)
1737{
26f6d88b
BS
1738}
1739
1740int
1741nvd0_display_init(struct drm_device *dev)
1742{
b5a794b0
BS
1743 u32 *push = evo_wait(nvd0_mast(dev), 32);
1744 if (push) {
1745 evo_mthd(push, 0x0088, 1);
1746 evo_data(push, NvEvoSync);
1747 evo_mthd(push, 0x0084, 1);
1748 evo_data(push, 0x00000000);
1749 evo_mthd(push, 0x0084, 1);
1750 evo_data(push, 0x80000000);
1751 evo_mthd(push, 0x008c, 1);
1752 evo_data(push, 0x00000000);
1753 evo_kick(push, nvd0_mast(dev));
1754 return 0;
bdb8c212 1755 }
efd272a7 1756
b5a794b0 1757 return -EBUSY;
26f6d88b
BS
1758}
1759
1760void
1761nvd0_display_destroy(struct drm_device *dev)
1762{
94e5c39b 1763 struct nvd0_disp *disp = nvd0_disp(dev);
bdb8c212 1764
b5a794b0 1765 nvd0_dmac_destroy(disp->core, &disp->mast.base);
26f6d88b 1766
816af2f2
BS
1767 nouveau_bo_unmap(disp->sync);
1768 nouveau_bo_ref(NULL, &disp->sync);
51beb428 1769
77145f1c 1770 nouveau_display(dev)->priv = NULL;
26f6d88b
BS
1771 kfree(disp);
1772}
1773
1774int
1775nvd0_display_create(struct drm_device *dev)
1776{
b5a794b0
BS
1777 static const u16 oclass[] = {
1778 NVE0_DISP_CLASS,
1779 NVD0_DISP_CLASS,
1780 };
77145f1c
BS
1781 struct nouveau_device *device = nouveau_dev(dev);
1782 struct nouveau_drm *drm = nouveau_drm(dev);
77145f1c 1783 struct dcb_table *dcb = &drm->vbios.dcb;
83fc083c 1784 struct drm_connector *connector, *tmp;
94e5c39b 1785 struct nvd0_disp *disp;
cb75d97e 1786 struct dcb_output *dcbe;
7c5f6a87 1787 int crtcs, ret, i;
26f6d88b
BS
1788
1789 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1790 if (!disp)
1791 return -ENOMEM;
77145f1c
BS
1792
1793 nouveau_display(dev)->priv = disp;
1794 nouveau_display(dev)->dtor = nvd0_display_destroy;
1795 nouveau_display(dev)->init = nvd0_display_init;
1796 nouveau_display(dev)->fini = nvd0_display_fini;
26f6d88b 1797
b5a794b0
BS
1798 /* small shared memory area we use for notifiers and semaphores */
1799 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
1800 0, 0x0000, NULL, &disp->sync);
1801 if (!ret) {
1802 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
1803 if (!ret)
1804 ret = nouveau_bo_map(disp->sync);
1805 if (ret)
1806 nouveau_bo_ref(NULL, &disp->sync);
1807 }
1808
1809 if (ret)
1810 goto out;
1811
1812 /* attempt to allocate a supported evo display class */
1813 ret = -ENODEV;
1814 for (i = 0; ret && i < ARRAY_SIZE(oclass); i++) {
1815 ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
1816 0xd1500000, oclass[i], NULL, 0,
1817 &disp->core);
1818 }
1819
1820 if (ret)
1821 goto out;
1822
1823 /* allocate master evo channel */
1824 ret = nvd0_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
1825 &(struct nv50_display_mast_class) {
1826 .pushbuf = EVO_PUSH_HANDLE(MAST, 0),
1827 }, sizeof(struct nv50_display_mast_class),
1828 disp->sync->bo.offset, &disp->mast.base);
1829 if (ret)
1830 goto out;
1831
438d99e3 1832 /* create crtc objects to represent the hw heads */
77145f1c 1833 crtcs = nv_rd32(device, 0x022448);
7c5f6a87 1834 for (i = 0; i < crtcs; i++) {
b5a794b0 1835 ret = nvd0_crtc_create(dev, disp->core, i);
438d99e3
BS
1836 if (ret)
1837 goto out;
1838 }
1839
83fc083c
BS
1840 /* create encoder/connector objects based on VBIOS DCB table */
1841 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1842 connector = nouveau_connector_create(dev, dcbe->connector);
1843 if (IS_ERR(connector))
1844 continue;
1845
1846 if (dcbe->location != DCB_LOC_ON_CHIP) {
77145f1c 1847 NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
83fc083c
BS
1848 dcbe->type, ffs(dcbe->or) - 1);
1849 continue;
1850 }
1851
1852 switch (dcbe->type) {
cb75d97e
BS
1853 case DCB_OUTPUT_TMDS:
1854 case DCB_OUTPUT_LVDS:
1855 case DCB_OUTPUT_DP:
83fc083c
BS
1856 nvd0_sor_create(connector, dcbe);
1857 break;
cb75d97e 1858 case DCB_OUTPUT_ANALOG:
8eaa9669
BS
1859 nvd0_dac_create(connector, dcbe);
1860 break;
83fc083c 1861 default:
77145f1c 1862 NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
83fc083c
BS
1863 dcbe->type, ffs(dcbe->or) - 1);
1864 continue;
1865 }
1866 }
1867
1868 /* cull any connectors we created that don't have an encoder */
1869 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1870 if (connector->encoder_ids[0])
1871 continue;
1872
77145f1c 1873 NV_WARN(drm, "%s has no encoders, removing\n",
83fc083c
BS
1874 drm_get_connector_name(connector));
1875 connector->funcs->destroy(connector);
1876 }
1877
26f6d88b
BS
1878out:
1879 if (ret)
1880 nvd0_display_destroy(dev);
1881 return ret;
1882}