nv50_display.c revision 410f3ec63570bea8efe00826a2b83ceb353553b1
1/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
25#include <linux/dma-mapping.h>
26
27#include <drm/drmP.h>
28#include <drm/drm_crtc_helper.h>
29#include <drm/drm_dp_helper.h>
30
31#include <nvif/class.h>
32
33#include "nouveau_drm.h"
34#include "nouveau_dma.h"
35#include "nouveau_gem.h"
36#include "nouveau_connector.h"
37#include "nouveau_encoder.h"
38#include "nouveau_crtc.h"
39#include "nouveau_fence.h"
40#include "nv50_display.h"
41
42#define EVO_DMA_NR 9
43
44#define EVO_MASTER  (0x00)
45#define EVO_FLIP(c) (0x01 + (c))
46#define EVO_OVLY(c) (0x05 + (c))
47#define EVO_OIMM(c) (0x09 + (c))
48#define EVO_CURS(c) (0x0d + (c))
49
50/* offsets in shared sync bo of various structures */
51#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
52#define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
53#define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
54#define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
55
56#define EVO_CORE_HANDLE      (0xd1500000)
57#define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
58#define EVO_CHAN_OCLASS(t,c) (((c)->oclass & 0xff00) | ((t) & 0x00ff))
59#define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) |                               \
60			      (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
61
62/******************************************************************************
63 * EVO channel
64 *****************************************************************************/
65
66struct nv50_chan {
67	struct nvif_object user;
68};
69
70static int
71nv50_chan_create(struct nvif_object *disp, const u32 *oclass, u8 head,
72		 void *data, u32 size, struct nv50_chan *chan)
73{
74	while (oclass[0]) {
75		int ret = nvif_object_init(disp, NULL, (oclass[0] << 16) | head,
76					   oclass[0], data, size,
77					  &chan->user);
78		if (oclass++, ret == 0)
79			return ret;
80	}
81	return -ENOSYS;
82}
83
84static void
85nv50_chan_destroy(struct nv50_chan *chan)
86{
87	nvif_object_fini(&chan->user);
88}
89
90/******************************************************************************
91 * PIO EVO channel
92 *****************************************************************************/
93
94struct nv50_pioc {
95	struct nv50_chan base;
96};
97
98static void
99nv50_pioc_destroy(struct nv50_pioc *pioc)
100{
101	nv50_chan_destroy(&pioc->base);
102}
103
104static int
105nv50_pioc_create(struct nvif_object *disp, const u32 *oclass, u8 head,
106		 void *data, u32 size, struct nv50_pioc *pioc)
107{
108	return nv50_chan_create(disp, oclass, head, data, size, &pioc->base);
109}
110
111/******************************************************************************
112 * Cursor Immediate
113 *****************************************************************************/
114
115struct nv50_curs {
116	struct nv50_pioc base;
117};
118
119static int
120nv50_curs_create(struct nvif_object *disp, int head, struct nv50_curs *curs)
121{
122	struct nv50_display_curs_class args = {
123		.head = head,
124	};
125	static const u32 oclass[] = {
126		GM107_DISP_CURS_CLASS,
127		NVF0_DISP_CURS_CLASS,
128		NVE0_DISP_CURS_CLASS,
129		NVD0_DISP_CURS_CLASS,
130		NVA3_DISP_CURS_CLASS,
131		NV94_DISP_CURS_CLASS,
132		NVA0_DISP_CURS_CLASS,
133		NV84_DISP_CURS_CLASS,
134		NV50_DISP_CURS_CLASS,
135		0
136	};
137
138	return nv50_pioc_create(disp, oclass, head, &args, sizeof(args),
139			       &curs->base);
140}
141
142/******************************************************************************
143 * Overlay Immediate
144 *****************************************************************************/
145
146struct nv50_oimm {
147	struct nv50_pioc base;
148};
149
150static int
151nv50_oimm_create(struct nvif_object *disp, int head, struct nv50_oimm *oimm)
152{
153	struct nv50_display_oimm_class args = {
154		.head = head,
155	};
156	static const u32 oclass[] = {
157		GM107_DISP_OIMM_CLASS,
158		NVF0_DISP_OIMM_CLASS,
159		NVE0_DISP_OIMM_CLASS,
160		NVD0_DISP_OIMM_CLASS,
161		NVA3_DISP_OIMM_CLASS,
162		NV94_DISP_OIMM_CLASS,
163		NVA0_DISP_OIMM_CLASS,
164		NV84_DISP_OIMM_CLASS,
165		NV50_DISP_OIMM_CLASS,
166		0
167	};
168
169	return nv50_pioc_create(disp, oclass, head, &args, sizeof(args),
170			       &oimm->base);
171}
172
173/******************************************************************************
174 * DMA EVO channel
175 *****************************************************************************/
176
177struct nv50_dmac {
178	struct nv50_chan base;
179	dma_addr_t handle;
180	u32 *ptr;
181
182	struct nvif_object sync;
183	struct nvif_object vram;
184
185	/* Protects against concurrent pushbuf access to this channel, lock is
186	 * grabbed by evo_wait (if the pushbuf reservation is successful) and
187	 * dropped again by evo_kick. */
188	struct mutex lock;
189};
190
191static void
192nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
193{
194	nvif_object_fini(&dmac->vram);
195	nvif_object_fini(&dmac->sync);
196
197	nv50_chan_destroy(&dmac->base);
198
199	if (dmac->ptr) {
200		struct pci_dev *pdev = nvkm_device(nvif_device(disp))->pdev;
201		pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
202	}
203}
204
205static int
206nv50_dmac_create(struct nvif_object *disp, const u32 *oclass, u8 head,
207		 void *data, u32 size, u64 syncbuf,
208		 struct nv50_dmac *dmac)
209{
210	struct nouveau_fb *pfb = nvkm_fb(nvif_device(disp));
211	struct nvif_object pushbuf;
212	u32 handle = *(u32 *)data;
213	int ret;
214
215	mutex_init(&dmac->lock);
216
217	dmac->ptr = pci_alloc_consistent(nvkm_device(nvif_device(disp))->pdev,
218					 PAGE_SIZE, &dmac->handle);
219	if (!dmac->ptr)
220		return -ENOMEM;
221
222	ret = nvif_object_init(nvif_object(nvif_device(disp)), NULL, handle,
223			       NV_DMA_FROM_MEMORY,
224			       &(struct nv_dma_v0) {
225					.target = NV_DMA_V0_TARGET_PCI_US,
226					.access = NV_DMA_V0_ACCESS_RD,
227					.start = dmac->handle + 0x0000,
228					.limit = dmac->handle + 0x0fff,
229			       }, sizeof(struct nv_dma_v0), &pushbuf);
230	if (ret)
231		return ret;
232
233	ret = nv50_chan_create(disp, oclass, head, data, size, &dmac->base);
234	nvif_object_fini(&pushbuf);
235	if (ret)
236		return ret;
237
238	ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000000,
239			       NV_DMA_IN_MEMORY,
240			       &(struct nv_dma_v0) {
241					.target = NV_DMA_V0_TARGET_VRAM,
242					.access = NV_DMA_V0_ACCESS_RDWR,
243					.start = syncbuf + 0x0000,
244					.limit = syncbuf + 0x0fff,
245			       }, sizeof(struct nv_dma_v0),
246			       &dmac->sync);
247	if (ret)
248		return ret;
249
250	ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000001,
251			       NV_DMA_IN_MEMORY,
252			       &(struct nv_dma_v0) {
253					.target = NV_DMA_V0_TARGET_VRAM,
254					.access = NV_DMA_V0_ACCESS_RDWR,
255					.start = 0,
256					.limit = pfb->ram->size - 1,
257			       }, sizeof(struct nv_dma_v0),
258			       &dmac->vram);
259	if (ret)
260		return ret;
261
262	return ret;
263}
264
265/******************************************************************************
266 * Core
267 *****************************************************************************/
268
269struct nv50_mast {
270	struct nv50_dmac base;
271};
272
273static int
274nv50_core_create(struct nvif_object *disp, u64 syncbuf, struct nv50_mast *core)
275{
276	struct nv50_display_mast_class args = {
277		.pushbuf = EVO_PUSH_HANDLE(MAST, 0),
278	};
279	static const u32 oclass[] = {
280		GM107_DISP_MAST_CLASS,
281		NVF0_DISP_MAST_CLASS,
282		NVE0_DISP_MAST_CLASS,
283		NVD0_DISP_MAST_CLASS,
284		NVA3_DISP_MAST_CLASS,
285		NV94_DISP_MAST_CLASS,
286		NVA0_DISP_MAST_CLASS,
287		NV84_DISP_MAST_CLASS,
288		NV50_DISP_MAST_CLASS,
289		0
290	};
291
292	return nv50_dmac_create(disp, oclass, 0, &args, sizeof(args), syncbuf,
293			       &core->base);
294}
295
296/******************************************************************************
297 * Base
298 *****************************************************************************/
299
300struct nv50_sync {
301	struct nv50_dmac base;
302	u32 addr;
303	u32 data;
304};
305
306static int
307nv50_base_create(struct nvif_object *disp, int head, u64 syncbuf,
308		 struct nv50_sync *base)
309{
310	struct nv50_display_sync_class args = {
311		.pushbuf = EVO_PUSH_HANDLE(SYNC, head),
312		.head = head,
313	};
314	static const u32 oclass[] = {
315		GM107_DISP_SYNC_CLASS,
316		NVF0_DISP_SYNC_CLASS,
317		NVE0_DISP_SYNC_CLASS,
318		NVD0_DISP_SYNC_CLASS,
319		NVA3_DISP_SYNC_CLASS,
320		NV94_DISP_SYNC_CLASS,
321		NVA0_DISP_SYNC_CLASS,
322		NV84_DISP_SYNC_CLASS,
323		NV50_DISP_SYNC_CLASS,
324		0
325	};
326
327	return nv50_dmac_create(disp, oclass, head, &args, sizeof(args),
328				syncbuf, &base->base);
329}
330
331/******************************************************************************
332 * Overlay
333 *****************************************************************************/
334
335struct nv50_ovly {
336	struct nv50_dmac base;
337};
338
339static int
340nv50_ovly_create(struct nvif_object *disp, int head, u64 syncbuf,
341		 struct nv50_ovly *ovly)
342{
343	struct nv50_display_ovly_class args = {
344		.pushbuf = EVO_PUSH_HANDLE(OVLY, head),
345		.head = head,
346	};
347	static const u32 oclass[] = {
348		GM107_DISP_OVLY_CLASS,
349		NVF0_DISP_OVLY_CLASS,
350		NVE0_DISP_OVLY_CLASS,
351		NVD0_DISP_OVLY_CLASS,
352		NVA3_DISP_OVLY_CLASS,
353		NV94_DISP_OVLY_CLASS,
354		NVA0_DISP_OVLY_CLASS,
355		NV84_DISP_OVLY_CLASS,
356		NV50_DISP_OVLY_CLASS,
357		0
358	};
359
360	return nv50_dmac_create(disp, oclass, head, &args, sizeof(args),
361				syncbuf, &ovly->base);
362}
363
364struct nv50_head {
365	struct nouveau_crtc base;
366	struct nouveau_bo *image;
367	struct nv50_curs curs;
368	struct nv50_sync sync;
369	struct nv50_ovly ovly;
370	struct nv50_oimm oimm;
371};
372
373#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
374#define nv50_curs(c) (&nv50_head(c)->curs)
375#define nv50_sync(c) (&nv50_head(c)->sync)
376#define nv50_ovly(c) (&nv50_head(c)->ovly)
377#define nv50_oimm(c) (&nv50_head(c)->oimm)
378#define nv50_chan(c) (&(c)->base.base)
379#define nv50_vers(c) nv50_chan(c)->user.oclass
380
381struct nv50_fbdma {
382	struct list_head head;
383	struct nvif_object core;
384	struct nvif_object base[4];
385};
386
387struct nv50_disp {
388	struct nvif_object *disp;
389	struct nv50_mast mast;
390
391	struct list_head fbdma;
392
393	struct nouveau_bo *sync;
394};
395
396static struct nv50_disp *
397nv50_disp(struct drm_device *dev)
398{
399	return nouveau_display(dev)->priv;
400}
401
402#define nv50_mast(d) (&nv50_disp(d)->mast)
403
404static struct drm_crtc *
405nv50_display_crtc_get(struct drm_encoder *encoder)
406{
407	return nouveau_encoder(encoder)->crtc;
408}
409
410/******************************************************************************
411 * EVO channel helpers
412 *****************************************************************************/
413static u32 *
414evo_wait(void *evoc, int nr)
415{
416	struct nv50_dmac *dmac = evoc;
417	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
418
419	mutex_lock(&dmac->lock);
420	if (put + nr >= (PAGE_SIZE / 4) - 8) {
421		dmac->ptr[put] = 0x20000000;
422
423		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
424		if (!nvkm_wait(&dmac->base.user, 0x0004, ~0, 0x00000000)) {
425			mutex_unlock(&dmac->lock);
426			nv_error(nvkm_object(&dmac->base.user), "channel stalled\n");
427			return NULL;
428		}
429
430		put = 0;
431	}
432
433	return dmac->ptr + put;
434}
435
436static void
437evo_kick(u32 *push, void *evoc)
438{
439	struct nv50_dmac *dmac = evoc;
440	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
441	mutex_unlock(&dmac->lock);
442}
443
444#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
445#define evo_data(p,d)   *((p)++) = (d)
446
447static bool
448evo_sync_wait(void *data)
449{
450	if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
451		return true;
452	usleep_range(1, 2);
453	return false;
454}
455
456static int
457evo_sync(struct drm_device *dev)
458{
459	struct nvif_device *device = &nouveau_drm(dev)->device;
460	struct nv50_disp *disp = nv50_disp(dev);
461	struct nv50_mast *mast = nv50_mast(dev);
462	u32 *push = evo_wait(mast, 8);
463	if (push) {
464		nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
465		evo_mthd(push, 0x0084, 1);
466		evo_data(push, 0x80000000 | EVO_MAST_NTFY);
467		evo_mthd(push, 0x0080, 2);
468		evo_data(push, 0x00000000);
469		evo_data(push, 0x00000000);
470		evo_kick(push, mast);
471		if (nv_wait_cb(nvkm_device(device), evo_sync_wait, disp->sync))
472			return 0;
473	}
474
475	return -EBUSY;
476}
477
478/******************************************************************************
479 * Page flipping channel
480 *****************************************************************************/
481struct nouveau_bo *
482nv50_display_crtc_sema(struct drm_device *dev, int crtc)
483{
484	return nv50_disp(dev)->sync;
485}
486
487struct nv50_display_flip {
488	struct nv50_disp *disp;
489	struct nv50_sync *chan;
490};
491
492static bool
493nv50_display_flip_wait(void *data)
494{
495	struct nv50_display_flip *flip = data;
496	if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
497					      flip->chan->data)
498		return true;
499	usleep_range(1, 2);
500	return false;
501}
502
503void
504nv50_display_flip_stop(struct drm_crtc *crtc)
505{
506	struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
507	struct nv50_display_flip flip = {
508		.disp = nv50_disp(crtc->dev),
509		.chan = nv50_sync(crtc),
510	};
511	u32 *push;
512
513	push = evo_wait(flip.chan, 8);
514	if (push) {
515		evo_mthd(push, 0x0084, 1);
516		evo_data(push, 0x00000000);
517		evo_mthd(push, 0x0094, 1);
518		evo_data(push, 0x00000000);
519		evo_mthd(push, 0x00c0, 1);
520		evo_data(push, 0x00000000);
521		evo_mthd(push, 0x0080, 1);
522		evo_data(push, 0x00000000);
523		evo_kick(push, flip.chan);
524	}
525
526	nv_wait_cb(nvkm_device(device), nv50_display_flip_wait, &flip);
527}
528
529int
530nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
531		       struct nouveau_channel *chan, u32 swap_interval)
532{
533	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
534	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
535	struct nv50_head *head = nv50_head(crtc);
536	struct nv50_sync *sync = nv50_sync(crtc);
537	u32 *push;
538	int ret;
539
540	swap_interval <<= 4;
541	if (swap_interval == 0)
542		swap_interval |= 0x100;
543	if (chan == NULL)
544		evo_sync(crtc->dev);
545
546	push = evo_wait(sync, 128);
547	if (unlikely(push == NULL))
548		return -EBUSY;
549
550	if (chan && chan->object->oclass < G82_CHANNEL_GPFIFO) {
551		ret = RING_SPACE(chan, 8);
552		if (ret)
553			return ret;
554
555		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
556		OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
557		OUT_RING  (chan, sync->addr ^ 0x10);
558		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
559		OUT_RING  (chan, sync->data + 1);
560		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
561		OUT_RING  (chan, sync->addr);
562		OUT_RING  (chan, sync->data);
563	} else
564	if (chan && chan->object->oclass < FERMI_CHANNEL_GPFIFO) {
565		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
566		ret = RING_SPACE(chan, 12);
567		if (ret)
568			return ret;
569
570		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
571		OUT_RING  (chan, chan->vram.handle);
572		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
573		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
574		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
575		OUT_RING  (chan, sync->data + 1);
576		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
577		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
578		OUT_RING  (chan, upper_32_bits(addr));
579		OUT_RING  (chan, lower_32_bits(addr));
580		OUT_RING  (chan, sync->data);
581		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
582	} else
583	if (chan) {
584		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
585		ret = RING_SPACE(chan, 10);
586		if (ret)
587			return ret;
588
589		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
590		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
591		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
592		OUT_RING  (chan, sync->data + 1);
593		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
594				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
595		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
596		OUT_RING  (chan, upper_32_bits(addr));
597		OUT_RING  (chan, lower_32_bits(addr));
598		OUT_RING  (chan, sync->data);
599		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
600				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
601	}
602
603	if (chan) {
604		sync->addr ^= 0x10;
605		sync->data++;
606		FIRE_RING (chan);
607	}
608
609	/* queue the flip */
610	evo_mthd(push, 0x0100, 1);
611	evo_data(push, 0xfffe0000);
612	evo_mthd(push, 0x0084, 1);
613	evo_data(push, swap_interval);
614	if (!(swap_interval & 0x00000100)) {
615		evo_mthd(push, 0x00e0, 1);
616		evo_data(push, 0x40000000);
617	}
618	evo_mthd(push, 0x0088, 4);
619	evo_data(push, sync->addr);
620	evo_data(push, sync->data++);
621	evo_data(push, sync->data);
622	evo_data(push, sync->base.sync.handle);
623	evo_mthd(push, 0x00a0, 2);
624	evo_data(push, 0x00000000);
625	evo_data(push, 0x00000000);
626	evo_mthd(push, 0x00c0, 1);
627	evo_data(push, nv_fb->r_handle);
628	evo_mthd(push, 0x0110, 2);
629	evo_data(push, 0x00000000);
630	evo_data(push, 0x00000000);
631	if (nv50_vers(sync) < NVD0_DISP_SYNC_CLASS) {
632		evo_mthd(push, 0x0800, 5);
633		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
634		evo_data(push, 0);
635		evo_data(push, (fb->height << 16) | fb->width);
636		evo_data(push, nv_fb->r_pitch);
637		evo_data(push, nv_fb->r_format);
638	} else {
639		evo_mthd(push, 0x0400, 5);
640		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
641		evo_data(push, 0);
642		evo_data(push, (fb->height << 16) | fb->width);
643		evo_data(push, nv_fb->r_pitch);
644		evo_data(push, nv_fb->r_format);
645	}
646	evo_mthd(push, 0x0080, 1);
647	evo_data(push, 0x00000000);
648	evo_kick(push, sync);
649
650	nouveau_bo_ref(nv_fb->nvbo, &head->image);
651	return 0;
652}
653
654/******************************************************************************
655 * CRTC
656 *****************************************************************************/
657static int
658nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
659{
660	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
661	struct nouveau_connector *nv_connector;
662	struct drm_connector *connector;
663	u32 *push, mode = 0x00;
664
665	nv_connector = nouveau_crtc_connector_get(nv_crtc);
666	connector = &nv_connector->base;
667	if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
668		if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
669			mode = DITHERING_MODE_DYNAMIC2X2;
670	} else {
671		mode = nv_connector->dithering_mode;
672	}
673
674	if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
675		if (connector->display_info.bpc >= 8)
676			mode |= DITHERING_DEPTH_8BPC;
677	} else {
678		mode |= nv_connector->dithering_depth;
679	}
680
681	push = evo_wait(mast, 4);
682	if (push) {
683		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
684			evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
685			evo_data(push, mode);
686		} else
687		if (nv50_vers(mast) < NVE0_DISP_MAST_CLASS) {
688			evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
689			evo_data(push, mode);
690		} else {
691			evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
692			evo_data(push, mode);
693		}
694
695		if (update) {
696			evo_mthd(push, 0x0080, 1);
697			evo_data(push, 0x00000000);
698		}
699		evo_kick(push, mast);
700	}
701
702	return 0;
703}
704
705static int
706nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
707{
708	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
709	struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
710	struct drm_crtc *crtc = &nv_crtc->base;
711	struct nouveau_connector *nv_connector;
712	int mode = DRM_MODE_SCALE_NONE;
713	u32 oX, oY, *push;
714
715	/* start off at the resolution we programmed the crtc for, this
716	 * effectively handles NONE/FULL scaling
717	 */
718	nv_connector = nouveau_crtc_connector_get(nv_crtc);
719	if (nv_connector && nv_connector->native_mode)
720		mode = nv_connector->scaling_mode;
721
722	if (mode != DRM_MODE_SCALE_NONE)
723		omode = nv_connector->native_mode;
724	else
725		omode = umode;
726
727	oX = omode->hdisplay;
728	oY = omode->vdisplay;
729	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
730		oY *= 2;
731
732	/* add overscan compensation if necessary, will keep the aspect
733	 * ratio the same as the backend mode unless overridden by the
734	 * user setting both hborder and vborder properties.
735	 */
736	if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
737			     (nv_connector->underscan == UNDERSCAN_AUTO &&
738			      nv_connector->edid &&
739			      drm_detect_hdmi_monitor(nv_connector->edid)))) {
740		u32 bX = nv_connector->underscan_hborder;
741		u32 bY = nv_connector->underscan_vborder;
742		u32 aspect = (oY << 19) / oX;
743
744		if (bX) {
745			oX -= (bX * 2);
746			if (bY) oY -= (bY * 2);
747			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
748		} else {
749			oX -= (oX >> 4) + 32;
750			if (bY) oY -= (bY * 2);
751			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
752		}
753	}
754
755	/* handle CENTER/ASPECT scaling, taking into account the areas
756	 * removed already for overscan compensation
757	 */
758	switch (mode) {
759	case DRM_MODE_SCALE_CENTER:
760		oX = min((u32)umode->hdisplay, oX);
761		oY = min((u32)umode->vdisplay, oY);
762		/* fall-through */
763	case DRM_MODE_SCALE_ASPECT:
764		if (oY < oX) {
765			u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
766			oX = ((oY * aspect) + (aspect / 2)) >> 19;
767		} else {
768			u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
769			oY = ((oX * aspect) + (aspect / 2)) >> 19;
770		}
771		break;
772	default:
773		break;
774	}
775
776	push = evo_wait(mast, 8);
777	if (push) {
778		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
779			/*XXX: SCALE_CTRL_ACTIVE??? */
780			evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
781			evo_data(push, (oY << 16) | oX);
782			evo_data(push, (oY << 16) | oX);
783			evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
784			evo_data(push, 0x00000000);
785			evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
786			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
787		} else {
788			evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
789			evo_data(push, (oY << 16) | oX);
790			evo_data(push, (oY << 16) | oX);
791			evo_data(push, (oY << 16) | oX);
792			evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
793			evo_data(push, 0x00000000);
794			evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
795			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
796		}
797
798		evo_kick(push, mast);
799
800		if (update) {
801			nv50_display_flip_stop(crtc);
802			nv50_display_flip_next(crtc, crtc->primary->fb,
803					       NULL, 1);
804		}
805	}
806
807	return 0;
808}
809
810static int
811nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
812{
813	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
814	u32 *push, hue, vib;
815	int adj;
816
817	adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
818	vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
819	hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
820
821	push = evo_wait(mast, 16);
822	if (push) {
823		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
824			evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
825			evo_data(push, (hue << 20) | (vib << 8));
826		} else {
827			evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
828			evo_data(push, (hue << 20) | (vib << 8));
829		}
830
831		if (update) {
832			evo_mthd(push, 0x0080, 1);
833			evo_data(push, 0x00000000);
834		}
835		evo_kick(push, mast);
836	}
837
838	return 0;
839}
840
841static int
842nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
843		    int x, int y, bool update)
844{
845	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
846	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
847	u32 *push;
848
849	push = evo_wait(mast, 16);
850	if (push) {
851		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
852			evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
853			evo_data(push, nvfb->nvbo->bo.offset >> 8);
854			evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
855			evo_data(push, (fb->height << 16) | fb->width);
856			evo_data(push, nvfb->r_pitch);
857			evo_data(push, nvfb->r_format);
858			evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
859			evo_data(push, (y << 16) | x);
860			if (nv50_vers(mast) > NV50_DISP_MAST_CLASS) {
861				evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
862				evo_data(push, nvfb->r_handle);
863			}
864		} else {
865			evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
866			evo_data(push, nvfb->nvbo->bo.offset >> 8);
867			evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
868			evo_data(push, (fb->height << 16) | fb->width);
869			evo_data(push, nvfb->r_pitch);
870			evo_data(push, nvfb->r_format);
871			evo_data(push, nvfb->r_handle);
872			evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
873			evo_data(push, (y << 16) | x);
874		}
875
876		if (update) {
877			evo_mthd(push, 0x0080, 1);
878			evo_data(push, 0x00000000);
879		}
880		evo_kick(push, mast);
881	}
882
883	nv_crtc->fb.handle = nvfb->r_handle;
884	return 0;
885}
886
887static void
888nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
889{
890	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
891	u32 *push = evo_wait(mast, 16);
892	if (push) {
893		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
894			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
895			evo_data(push, 0x85000000);
896			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
897		} else
898		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
899			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
900			evo_data(push, 0x85000000);
901			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
902			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
903			evo_data(push, mast->base.vram.handle);
904		} else {
905			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
906			evo_data(push, 0x85000000);
907			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
908			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
909			evo_data(push, mast->base.vram.handle);
910		}
911		evo_kick(push, mast);
912	}
913}
914
915static void
916nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
917{
918	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
919	u32 *push = evo_wait(mast, 16);
920	if (push) {
921		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
922			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
923			evo_data(push, 0x05000000);
924		} else
925		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
926			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
927			evo_data(push, 0x05000000);
928			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
929			evo_data(push, 0x00000000);
930		} else {
931			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
932			evo_data(push, 0x05000000);
933			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
934			evo_data(push, 0x00000000);
935		}
936		evo_kick(push, mast);
937	}
938}
939
940static void
941nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
942{
943	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
944
945	if (show)
946		nv50_crtc_cursor_show(nv_crtc);
947	else
948		nv50_crtc_cursor_hide(nv_crtc);
949
950	if (update) {
951		u32 *push = evo_wait(mast, 2);
952		if (push) {
953			evo_mthd(push, 0x0080, 1);
954			evo_data(push, 0x00000000);
955			evo_kick(push, mast);
956		}
957	}
958}
959
960static void
961nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
962{
963}
964
965static void
966nv50_crtc_prepare(struct drm_crtc *crtc)
967{
968	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
969	struct nv50_mast *mast = nv50_mast(crtc->dev);
970	u32 *push;
971
972	nv50_display_flip_stop(crtc);
973
974	push = evo_wait(mast, 6);
975	if (push) {
976		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
977			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
978			evo_data(push, 0x00000000);
979			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
980			evo_data(push, 0x40000000);
981		} else
982		if (nv50_vers(mast) <  NVD0_DISP_MAST_CLASS) {
983			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
984			evo_data(push, 0x00000000);
985			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
986			evo_data(push, 0x40000000);
987			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
988			evo_data(push, 0x00000000);
989		} else {
990			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
991			evo_data(push, 0x00000000);
992			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
993			evo_data(push, 0x03000000);
994			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
995			evo_data(push, 0x00000000);
996		}
997
998		evo_kick(push, mast);
999	}
1000
1001	nv50_crtc_cursor_show_hide(nv_crtc, false, false);
1002}
1003
1004static void
1005nv50_crtc_commit(struct drm_crtc *crtc)
1006{
1007	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1008	struct nv50_mast *mast = nv50_mast(crtc->dev);
1009	u32 *push;
1010
1011	push = evo_wait(mast, 32);
1012	if (push) {
1013		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
1014			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1015			evo_data(push, nv_crtc->fb.handle);
1016			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1017			evo_data(push, 0xc0000000);
1018			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1019		} else
1020		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1021			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1022			evo_data(push, nv_crtc->fb.handle);
1023			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1024			evo_data(push, 0xc0000000);
1025			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1026			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1027			evo_data(push, mast->base.vram.handle);
1028		} else {
1029			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1030			evo_data(push, nv_crtc->fb.handle);
1031			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
1032			evo_data(push, 0x83000000);
1033			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1034			evo_data(push, 0x00000000);
1035			evo_data(push, 0x00000000);
1036			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1037			evo_data(push, mast->base.vram.handle);
1038			evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
1039			evo_data(push, 0xffffff00);
1040		}
1041
1042		evo_kick(push, mast);
1043	}
1044
1045	nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
1046	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1047}
1048
1049static bool
1050nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1051		     struct drm_display_mode *adjusted_mode)
1052{
1053	drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1054	return true;
1055}
1056
1057static int
1058nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1059{
1060	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1061	struct nv50_head *head = nv50_head(crtc);
1062	int ret;
1063
1064	ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
1065	if (ret == 0) {
1066		if (head->image)
1067			nouveau_bo_unpin(head->image);
1068		nouveau_bo_ref(nvfb->nvbo, &head->image);
1069	}
1070
1071	return ret;
1072}
1073
1074static int
1075nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1076		   struct drm_display_mode *mode, int x, int y,
1077		   struct drm_framebuffer *old_fb)
1078{
1079	struct nv50_mast *mast = nv50_mast(crtc->dev);
1080	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1081	struct nouveau_connector *nv_connector;
1082	u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1083	u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1084	u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1085	u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1086	u32 vblan2e = 0, vblan2s = 1;
1087	u32 *push;
1088	int ret;
1089
1090	hactive = mode->htotal;
1091	hsynce  = mode->hsync_end - mode->hsync_start - 1;
1092	hbackp  = mode->htotal - mode->hsync_end;
1093	hblanke = hsynce + hbackp;
1094	hfrontp = mode->hsync_start - mode->hdisplay;
1095	hblanks = mode->htotal - hfrontp - 1;
1096
1097	vactive = mode->vtotal * vscan / ilace;
1098	vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1099	vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1100	vblanke = vsynce + vbackp;
1101	vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1102	vblanks = vactive - vfrontp - 1;
1103	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1104		vblan2e = vactive + vsynce + vbackp;
1105		vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1106		vactive = (vactive * 2) + 1;
1107	}
1108
1109	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1110	if (ret)
1111		return ret;
1112
1113	push = evo_wait(mast, 64);
1114	if (push) {
1115		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1116			evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1117			evo_data(push, 0x00800000 | mode->clock);
1118			evo_data(push, (ilace == 2) ? 2 : 0);
1119			evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1120			evo_data(push, 0x00000000);
1121			evo_data(push, (vactive << 16) | hactive);
1122			evo_data(push, ( vsynce << 16) | hsynce);
1123			evo_data(push, (vblanke << 16) | hblanke);
1124			evo_data(push, (vblanks << 16) | hblanks);
1125			evo_data(push, (vblan2e << 16) | vblan2s);
1126			evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1127			evo_data(push, 0x00000000);
1128			evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1129			evo_data(push, 0x00000311);
1130			evo_data(push, 0x00000100);
1131		} else {
1132			evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1133			evo_data(push, 0x00000000);
1134			evo_data(push, (vactive << 16) | hactive);
1135			evo_data(push, ( vsynce << 16) | hsynce);
1136			evo_data(push, (vblanke << 16) | hblanke);
1137			evo_data(push, (vblanks << 16) | hblanks);
1138			evo_data(push, (vblan2e << 16) | vblan2s);
1139			evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1140			evo_data(push, 0x00000000); /* ??? */
1141			evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1142			evo_data(push, mode->clock * 1000);
1143			evo_data(push, 0x00200000); /* ??? */
1144			evo_data(push, mode->clock * 1000);
1145			evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1146			evo_data(push, 0x00000311);
1147			evo_data(push, 0x00000100);
1148		}
1149
1150		evo_kick(push, mast);
1151	}
1152
1153	nv_connector = nouveau_crtc_connector_get(nv_crtc);
1154	nv50_crtc_set_dither(nv_crtc, false);
1155	nv50_crtc_set_scale(nv_crtc, false);
1156	nv50_crtc_set_color_vibrance(nv_crtc, false);
1157	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1158	return 0;
1159}
1160
1161static int
1162nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1163			struct drm_framebuffer *old_fb)
1164{
1165	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1166	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1167	int ret;
1168
1169	if (!crtc->primary->fb) {
1170		NV_DEBUG(drm, "No FB bound\n");
1171		return 0;
1172	}
1173
1174	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1175	if (ret)
1176		return ret;
1177
1178	nv50_display_flip_stop(crtc);
1179	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1180	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1181	return 0;
1182}
1183
1184static int
1185nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1186			       struct drm_framebuffer *fb, int x, int y,
1187			       enum mode_set_atomic state)
1188{
1189	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1190	nv50_display_flip_stop(crtc);
1191	nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1192	return 0;
1193}
1194
1195static void
1196nv50_crtc_lut_load(struct drm_crtc *crtc)
1197{
1198	struct nv50_disp *disp = nv50_disp(crtc->dev);
1199	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1200	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1201	int i;
1202
1203	for (i = 0; i < 256; i++) {
1204		u16 r = nv_crtc->lut.r[i] >> 2;
1205		u16 g = nv_crtc->lut.g[i] >> 2;
1206		u16 b = nv_crtc->lut.b[i] >> 2;
1207
1208		if (disp->disp->oclass < NVD0_DISP_CLASS) {
1209			writew(r + 0x0000, lut + (i * 0x08) + 0);
1210			writew(g + 0x0000, lut + (i * 0x08) + 2);
1211			writew(b + 0x0000, lut + (i * 0x08) + 4);
1212		} else {
1213			writew(r + 0x6000, lut + (i * 0x20) + 0);
1214			writew(g + 0x6000, lut + (i * 0x20) + 2);
1215			writew(b + 0x6000, lut + (i * 0x20) + 4);
1216		}
1217	}
1218}
1219
1220static void
1221nv50_crtc_disable(struct drm_crtc *crtc)
1222{
1223	struct nv50_head *head = nv50_head(crtc);
1224	evo_sync(crtc->dev);
1225	if (head->image)
1226		nouveau_bo_unpin(head->image);
1227	nouveau_bo_ref(NULL, &head->image);
1228}
1229
1230static int
1231nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1232		     uint32_t handle, uint32_t width, uint32_t height)
1233{
1234	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1235	struct drm_device *dev = crtc->dev;
1236	struct drm_gem_object *gem;
1237	struct nouveau_bo *nvbo;
1238	bool visible = (handle != 0);
1239	int i, ret = 0;
1240
1241	if (visible) {
1242		if (width != 64 || height != 64)
1243			return -EINVAL;
1244
1245		gem = drm_gem_object_lookup(dev, file_priv, handle);
1246		if (unlikely(!gem))
1247			return -ENOENT;
1248		nvbo = nouveau_gem_object(gem);
1249
1250		ret = nouveau_bo_map(nvbo);
1251		if (ret == 0) {
1252			for (i = 0; i < 64 * 64; i++) {
1253				u32 v = nouveau_bo_rd32(nvbo, i);
1254				nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1255			}
1256			nouveau_bo_unmap(nvbo);
1257		}
1258
1259		drm_gem_object_unreference_unlocked(gem);
1260	}
1261
1262	if (visible != nv_crtc->cursor.visible) {
1263		nv50_crtc_cursor_show_hide(nv_crtc, visible, true);
1264		nv_crtc->cursor.visible = visible;
1265	}
1266
1267	return ret;
1268}
1269
1270static int
1271nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1272{
1273	struct nv50_curs *curs = nv50_curs(crtc);
1274	struct nv50_chan *chan = nv50_chan(curs);
1275	nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
1276	nvif_wr32(&chan->user, 0x0080, 0x00000000);
1277	return 0;
1278}
1279
1280static void
1281nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1282		    uint32_t start, uint32_t size)
1283{
1284	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1285	u32 end = min_t(u32, start + size, 256);
1286	u32 i;
1287
1288	for (i = start; i < end; i++) {
1289		nv_crtc->lut.r[i] = r[i];
1290		nv_crtc->lut.g[i] = g[i];
1291		nv_crtc->lut.b[i] = b[i];
1292	}
1293
1294	nv50_crtc_lut_load(crtc);
1295}
1296
1297static void
1298nv50_crtc_destroy(struct drm_crtc *crtc)
1299{
1300	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1301	struct nv50_disp *disp = nv50_disp(crtc->dev);
1302	struct nv50_head *head = nv50_head(crtc);
1303	struct nv50_fbdma *fbdma;
1304
1305	list_for_each_entry(fbdma, &disp->fbdma, head) {
1306		nvif_object_fini(&fbdma->base[nv_crtc->index]);
1307	}
1308
1309	nv50_dmac_destroy(&head->ovly.base, disp->disp);
1310	nv50_pioc_destroy(&head->oimm.base);
1311	nv50_dmac_destroy(&head->sync.base, disp->disp);
1312	nv50_pioc_destroy(&head->curs.base);
1313
1314	/*XXX: this shouldn't be necessary, but the core doesn't call
1315	 *     disconnect() during the cleanup paths
1316	 */
1317	if (head->image)
1318		nouveau_bo_unpin(head->image);
1319	nouveau_bo_ref(NULL, &head->image);
1320
1321	nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1322	if (nv_crtc->cursor.nvbo)
1323		nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1324	nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1325
1326	nouveau_bo_unmap(nv_crtc->lut.nvbo);
1327	if (nv_crtc->lut.nvbo)
1328		nouveau_bo_unpin(nv_crtc->lut.nvbo);
1329	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1330
1331	drm_crtc_cleanup(crtc);
1332	kfree(crtc);
1333}
1334
1335static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1336	.dpms = nv50_crtc_dpms,
1337	.prepare = nv50_crtc_prepare,
1338	.commit = nv50_crtc_commit,
1339	.mode_fixup = nv50_crtc_mode_fixup,
1340	.mode_set = nv50_crtc_mode_set,
1341	.mode_set_base = nv50_crtc_mode_set_base,
1342	.mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1343	.load_lut = nv50_crtc_lut_load,
1344	.disable = nv50_crtc_disable,
1345};
1346
1347static const struct drm_crtc_funcs nv50_crtc_func = {
1348	.cursor_set = nv50_crtc_cursor_set,
1349	.cursor_move = nv50_crtc_cursor_move,
1350	.gamma_set = nv50_crtc_gamma_set,
1351	.set_config = nouveau_crtc_set_config,
1352	.destroy = nv50_crtc_destroy,
1353	.page_flip = nouveau_crtc_page_flip,
1354};
1355
1356static void
1357nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
1358{
1359}
1360
1361static void
1362nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
1363{
1364}
1365
1366static int
1367nv50_crtc_create(struct drm_device *dev, int index)
1368{
1369	struct nv50_disp *disp = nv50_disp(dev);
1370	struct nv50_head *head;
1371	struct drm_crtc *crtc;
1372	int ret, i;
1373
1374	head = kzalloc(sizeof(*head), GFP_KERNEL);
1375	if (!head)
1376		return -ENOMEM;
1377
1378	head->base.index = index;
1379	head->base.set_dither = nv50_crtc_set_dither;
1380	head->base.set_scale = nv50_crtc_set_scale;
1381	head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
1382	head->base.color_vibrance = 50;
1383	head->base.vibrant_hue = 0;
1384	head->base.cursor.set_offset = nv50_cursor_set_offset;
1385	head->base.cursor.set_pos = nv50_cursor_set_pos;
1386	for (i = 0; i < 256; i++) {
1387		head->base.lut.r[i] = i << 8;
1388		head->base.lut.g[i] = i << 8;
1389		head->base.lut.b[i] = i << 8;
1390	}
1391
1392	crtc = &head->base.base;
1393	drm_crtc_init(dev, crtc, &nv50_crtc_func);
1394	drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1395	drm_mode_crtc_set_gamma_size(crtc, 256);
1396
1397	ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1398			     0, 0x0000, NULL, &head->base.lut.nvbo);
1399	if (!ret) {
1400		ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1401		if (!ret) {
1402			ret = nouveau_bo_map(head->base.lut.nvbo);
1403			if (ret)
1404				nouveau_bo_unpin(head->base.lut.nvbo);
1405		}
1406		if (ret)
1407			nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1408	}
1409
1410	if (ret)
1411		goto out;
1412
1413	nv50_crtc_lut_load(crtc);
1414
1415	/* allocate cursor resources */
1416	ret = nv50_curs_create(disp->disp, index, &head->curs);
1417	if (ret)
1418		goto out;
1419
1420	ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1421			     0, 0x0000, NULL, &head->base.cursor.nvbo);
1422	if (!ret) {
1423		ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1424		if (!ret) {
1425			ret = nouveau_bo_map(head->base.cursor.nvbo);
1426			if (ret)
1427				nouveau_bo_unpin(head->base.lut.nvbo);
1428		}
1429		if (ret)
1430			nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1431	}
1432
1433	if (ret)
1434		goto out;
1435
1436	/* allocate page flip / sync resources */
1437	ret = nv50_base_create(disp->disp, index, disp->sync->bo.offset,
1438			      &head->sync);
1439	if (ret)
1440		goto out;
1441
1442	head->sync.addr = EVO_FLIP_SEM0(index);
1443	head->sync.data = 0x00000000;
1444
1445	/* allocate overlay resources */
1446	ret = nv50_oimm_create(disp->disp, index, &head->oimm);
1447	if (ret)
1448		goto out;
1449
1450	ret = nv50_ovly_create(disp->disp, index, disp->sync->bo.offset,
1451			      &head->ovly);
1452	if (ret)
1453		goto out;
1454
1455out:
1456	if (ret)
1457		nv50_crtc_destroy(crtc);
1458	return ret;
1459}
1460
1461/******************************************************************************
1462 * DAC
1463 *****************************************************************************/
1464static void
1465nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1466{
1467	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1468	struct nv50_disp *disp = nv50_disp(encoder->dev);
1469	int or = nv_encoder->or;
1470	u32 dpms_ctrl;
1471
1472	dpms_ctrl = 0x00000000;
1473	if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1474		dpms_ctrl |= 0x00000001;
1475	if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1476		dpms_ctrl |= 0x00000004;
1477
1478	nvif_exec(disp->disp, NV50_DISP_DAC_PWR + or, &dpms_ctrl, sizeof(dpms_ctrl));
1479}
1480
1481static bool
1482nv50_dac_mode_fixup(struct drm_encoder *encoder,
1483		    const struct drm_display_mode *mode,
1484		    struct drm_display_mode *adjusted_mode)
1485{
1486	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1487	struct nouveau_connector *nv_connector;
1488
1489	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1490	if (nv_connector && nv_connector->native_mode) {
1491		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1492			int id = adjusted_mode->base.id;
1493			*adjusted_mode = *nv_connector->native_mode;
1494			adjusted_mode->base.id = id;
1495		}
1496	}
1497
1498	return true;
1499}
1500
1501static void
1502nv50_dac_commit(struct drm_encoder *encoder)
1503{
1504}
1505
1506static void
1507nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1508		  struct drm_display_mode *adjusted_mode)
1509{
1510	struct nv50_mast *mast = nv50_mast(encoder->dev);
1511	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1512	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1513	u32 *push;
1514
1515	nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1516
1517	push = evo_wait(mast, 8);
1518	if (push) {
1519		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1520			u32 syncs = 0x00000000;
1521
1522			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1523				syncs |= 0x00000001;
1524			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1525				syncs |= 0x00000002;
1526
1527			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1528			evo_data(push, 1 << nv_crtc->index);
1529			evo_data(push, syncs);
1530		} else {
1531			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1532			u32 syncs = 0x00000001;
1533
1534			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1535				syncs |= 0x00000008;
1536			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1537				syncs |= 0x00000010;
1538
1539			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1540				magic |= 0x00000001;
1541
1542			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1543			evo_data(push, syncs);
1544			evo_data(push, magic);
1545			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1546			evo_data(push, 1 << nv_crtc->index);
1547		}
1548
1549		evo_kick(push, mast);
1550	}
1551
1552	nv_encoder->crtc = encoder->crtc;
1553}
1554
1555static void
1556nv50_dac_disconnect(struct drm_encoder *encoder)
1557{
1558	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1559	struct nv50_mast *mast = nv50_mast(encoder->dev);
1560	const int or = nv_encoder->or;
1561	u32 *push;
1562
1563	if (nv_encoder->crtc) {
1564		nv50_crtc_prepare(nv_encoder->crtc);
1565
1566		push = evo_wait(mast, 4);
1567		if (push) {
1568			if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1569				evo_mthd(push, 0x0400 + (or * 0x080), 1);
1570				evo_data(push, 0x00000000);
1571			} else {
1572				evo_mthd(push, 0x0180 + (or * 0x020), 1);
1573				evo_data(push, 0x00000000);
1574			}
1575			evo_kick(push, mast);
1576		}
1577	}
1578
1579	nv_encoder->crtc = NULL;
1580}
1581
1582static enum drm_connector_status
1583nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1584{
1585	struct nv50_disp *disp = nv50_disp(encoder->dev);
1586	int ret, or = nouveau_encoder(encoder)->or;
1587	u32 load = nouveau_drm(encoder->dev)->vbios.dactestval;
1588	if (load == 0)
1589		load = 340;
1590
1591	ret = nvif_exec(disp->disp, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
1592	if (ret || !load)
1593		return connector_status_disconnected;
1594
1595	return connector_status_connected;
1596}
1597
1598static void
1599nv50_dac_destroy(struct drm_encoder *encoder)
1600{
1601	drm_encoder_cleanup(encoder);
1602	kfree(encoder);
1603}
1604
1605static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1606	.dpms = nv50_dac_dpms,
1607	.mode_fixup = nv50_dac_mode_fixup,
1608	.prepare = nv50_dac_disconnect,
1609	.commit = nv50_dac_commit,
1610	.mode_set = nv50_dac_mode_set,
1611	.disable = nv50_dac_disconnect,
1612	.get_crtc = nv50_display_crtc_get,
1613	.detect = nv50_dac_detect
1614};
1615
1616static const struct drm_encoder_funcs nv50_dac_func = {
1617	.destroy = nv50_dac_destroy,
1618};
1619
1620static int
1621nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1622{
1623	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1624	struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
1625	struct nouveau_encoder *nv_encoder;
1626	struct drm_encoder *encoder;
1627	int type = DRM_MODE_ENCODER_DAC;
1628
1629	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1630	if (!nv_encoder)
1631		return -ENOMEM;
1632	nv_encoder->dcb = dcbe;
1633	nv_encoder->or = ffs(dcbe->or) - 1;
1634	nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
1635
1636	encoder = to_drm_encoder(nv_encoder);
1637	encoder->possible_crtcs = dcbe->heads;
1638	encoder->possible_clones = 0;
1639	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type);
1640	drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
1641
1642	drm_mode_connector_attach_encoder(connector, encoder);
1643	return 0;
1644}
1645
1646/******************************************************************************
1647 * Audio
1648 *****************************************************************************/
1649static void
1650nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1651{
1652	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1653	struct nouveau_connector *nv_connector;
1654	struct nv50_disp *disp = nv50_disp(encoder->dev);
1655
1656	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1657	if (!drm_detect_monitor_audio(nv_connector->edid))
1658		return;
1659
1660	drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1661
1662	nvif_exec(disp->disp, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or,
1663			      nv_connector->base.eld,
1664			      nv_connector->base.eld[2] * 4);
1665}
1666
1667static void
1668nv50_audio_disconnect(struct drm_encoder *encoder)
1669{
1670	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1671	struct nv50_disp *disp = nv50_disp(encoder->dev);
1672
1673	nvif_exec(disp->disp, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0);
1674}
1675
1676/******************************************************************************
1677 * HDMI
1678 *****************************************************************************/
1679static void
1680nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1681{
1682	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1683	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1684	struct nouveau_connector *nv_connector;
1685	struct nv50_disp *disp = nv50_disp(encoder->dev);
1686	const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1687	u32 rekey = 56; /* binary driver, and tegra constant */
1688	u32 max_ac_packet;
1689	u32 data;
1690
1691	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1692	if (!drm_detect_hdmi_monitor(nv_connector->edid))
1693		return;
1694
1695	max_ac_packet  = mode->htotal - mode->hdisplay;
1696	max_ac_packet -= rekey;
1697	max_ac_packet -= 18; /* constant from tegra */
1698	max_ac_packet /= 32;
1699
1700	data = NV84_DISP_SOR_HDMI_PWR_STATE_ON | (max_ac_packet << 16) | rekey;
1701	nvif_exec(disp->disp, NV84_DISP_SOR_HDMI_PWR + moff, &data, sizeof(data));
1702
1703	nv50_audio_mode_set(encoder, mode);
1704}
1705
1706static void
1707nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1708{
1709	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1710	struct nv50_disp *disp = nv50_disp(encoder->dev);
1711	const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1712	u32 data = 0;
1713
1714	nv50_audio_disconnect(encoder);
1715
1716	nvif_exec(disp->disp, NV84_DISP_SOR_HDMI_PWR + moff, &data, sizeof(data));
1717}
1718
1719/******************************************************************************
1720 * SOR
1721 *****************************************************************************/
1722static void
1723nv50_sor_dpms(struct drm_encoder *encoder, int mode)
1724{
1725	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1726	struct drm_device *dev = encoder->dev;
1727	struct nv50_disp *disp = nv50_disp(dev);
1728	struct drm_encoder *partner;
1729	u32 mthd, data;
1730
1731	nv_encoder->last_dpms = mode;
1732
1733	list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1734		struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1735
1736		if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1737			continue;
1738
1739		if (nv_partner != nv_encoder &&
1740		    nv_partner->dcb->or == nv_encoder->dcb->or) {
1741			if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1742				return;
1743			break;
1744		}
1745	}
1746
1747	mthd  = (ffs(nv_encoder->dcb->heads) - 1) << 3;
1748	mthd |= (ffs(nv_encoder->dcb->sorconf.link) - 1) << 2;
1749	mthd |= nv_encoder->or;
1750
1751	if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1752		data = 1;
1753		nvif_exec(disp->disp, NV50_DISP_SOR_PWR | mthd, &data, sizeof(data));
1754		mthd |= NV94_DISP_SOR_DP_PWR;
1755	} else {
1756		mthd |= NV50_DISP_SOR_PWR;
1757	}
1758
1759	data = (mode == DRM_MODE_DPMS_ON);
1760	nvif_exec(disp->disp, mthd, &data, sizeof(data));
1761}
1762
1763static bool
1764nv50_sor_mode_fixup(struct drm_encoder *encoder,
1765		    const struct drm_display_mode *mode,
1766		    struct drm_display_mode *adjusted_mode)
1767{
1768	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1769	struct nouveau_connector *nv_connector;
1770
1771	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1772	if (nv_connector && nv_connector->native_mode) {
1773		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1774			int id = adjusted_mode->base.id;
1775			*adjusted_mode = *nv_connector->native_mode;
1776			adjusted_mode->base.id = id;
1777		}
1778	}
1779
1780	return true;
1781}
1782
1783static void
1784nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
1785{
1786	struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
1787	u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
1788	if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
1789		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1790			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
1791			evo_data(push, (nv_encoder->ctrl = temp));
1792		} else {
1793			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1794			evo_data(push, (nv_encoder->ctrl = temp));
1795		}
1796		evo_kick(push, mast);
1797	}
1798}
1799
1800static void
1801nv50_sor_disconnect(struct drm_encoder *encoder)
1802{
1803	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1804	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1805
1806	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1807	nv_encoder->crtc = NULL;
1808
1809	if (nv_crtc) {
1810		nv50_crtc_prepare(&nv_crtc->base);
1811		nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
1812		nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
1813	}
1814}
1815
1816static void
1817nv50_sor_commit(struct drm_encoder *encoder)
1818{
1819}
1820
1821static void
1822nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1823		  struct drm_display_mode *mode)
1824{
1825	struct nv50_disp *disp = nv50_disp(encoder->dev);
1826	struct nv50_mast *mast = nv50_mast(encoder->dev);
1827	struct drm_device *dev = encoder->dev;
1828	struct nouveau_drm *drm = nouveau_drm(dev);
1829	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1830	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1831	struct nouveau_connector *nv_connector;
1832	struct nvbios *bios = &drm->vbios;
1833	u32 lvds = 0, mask, ctrl;
1834	u8 owner = 1 << nv_crtc->index;
1835	u8 proto = 0xf;
1836	u8 depth = 0x0;
1837
1838	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1839	nv_encoder->crtc = encoder->crtc;
1840
1841	switch (nv_encoder->dcb->type) {
1842	case DCB_OUTPUT_TMDS:
1843		if (nv_encoder->dcb->sorconf.link & 1) {
1844			if (mode->clock < 165000)
1845				proto = 0x1;
1846			else
1847				proto = 0x5;
1848		} else {
1849			proto = 0x2;
1850		}
1851
1852		nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
1853		break;
1854	case DCB_OUTPUT_LVDS:
1855		proto = 0x0;
1856
1857		if (bios->fp_no_ddc) {
1858			if (bios->fp.dual_link)
1859				lvds |= 0x0100;
1860			if (bios->fp.if_is_24bit)
1861				lvds |= 0x0200;
1862		} else {
1863			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1864				if (((u8 *)nv_connector->edid)[121] == 2)
1865					lvds |= 0x0100;
1866			} else
1867			if (mode->clock >= bios->fp.duallink_transition_clk) {
1868				lvds |= 0x0100;
1869			}
1870
1871			if (lvds & 0x0100) {
1872				if (bios->fp.strapless_is_24bit & 2)
1873					lvds |= 0x0200;
1874			} else {
1875				if (bios->fp.strapless_is_24bit & 1)
1876					lvds |= 0x0200;
1877			}
1878
1879			if (nv_connector->base.display_info.bpc == 8)
1880				lvds |= 0x0200;
1881		}
1882
1883		nvif_exec(disp->disp, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, &lvds, sizeof(lvds));
1884		break;
1885	case DCB_OUTPUT_DP:
1886		if (nv_connector->base.display_info.bpc == 6) {
1887			nv_encoder->dp.datarate = mode->clock * 18 / 8;
1888			depth = 0x2;
1889		} else
1890		if (nv_connector->base.display_info.bpc == 8) {
1891			nv_encoder->dp.datarate = mode->clock * 24 / 8;
1892			depth = 0x5;
1893		} else {
1894			nv_encoder->dp.datarate = mode->clock * 30 / 8;
1895			depth = 0x6;
1896		}
1897
1898		if (nv_encoder->dcb->sorconf.link & 1)
1899			proto = 0x8;
1900		else
1901			proto = 0x9;
1902		break;
1903	default:
1904		BUG_ON(1);
1905		break;
1906	}
1907
1908	nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
1909
1910	if (nv50_vers(mast) >= NVD0_DISP_CLASS) {
1911		u32 *push = evo_wait(mast, 3);
1912		if (push) {
1913			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1914			u32 syncs = 0x00000001;
1915
1916			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1917				syncs |= 0x00000008;
1918			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1919				syncs |= 0x00000010;
1920
1921			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1922				magic |= 0x00000001;
1923
1924			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1925			evo_data(push, syncs | (depth << 6));
1926			evo_data(push, magic);
1927			evo_kick(push, mast);
1928		}
1929
1930		ctrl = proto << 8;
1931		mask = 0x00000f00;
1932	} else {
1933		ctrl = (depth << 16) | (proto << 8);
1934		if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1935			ctrl |= 0x00001000;
1936		if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1937			ctrl |= 0x00002000;
1938		mask = 0x000f3f00;
1939	}
1940
1941	nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
1942}
1943
1944static void
1945nv50_sor_destroy(struct drm_encoder *encoder)
1946{
1947	drm_encoder_cleanup(encoder);
1948	kfree(encoder);
1949}
1950
1951static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
1952	.dpms = nv50_sor_dpms,
1953	.mode_fixup = nv50_sor_mode_fixup,
1954	.prepare = nv50_sor_disconnect,
1955	.commit = nv50_sor_commit,
1956	.mode_set = nv50_sor_mode_set,
1957	.disable = nv50_sor_disconnect,
1958	.get_crtc = nv50_display_crtc_get,
1959};
1960
1961static const struct drm_encoder_funcs nv50_sor_func = {
1962	.destroy = nv50_sor_destroy,
1963};
1964
1965static int
1966nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1967{
1968	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1969	struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
1970	struct nouveau_encoder *nv_encoder;
1971	struct drm_encoder *encoder;
1972	int type;
1973
1974	switch (dcbe->type) {
1975	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1976	case DCB_OUTPUT_TMDS:
1977	case DCB_OUTPUT_DP:
1978	default:
1979		type = DRM_MODE_ENCODER_TMDS;
1980		break;
1981	}
1982
1983	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1984	if (!nv_encoder)
1985		return -ENOMEM;
1986	nv_encoder->dcb = dcbe;
1987	nv_encoder->or = ffs(dcbe->or) - 1;
1988	nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
1989	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1990
1991	encoder = to_drm_encoder(nv_encoder);
1992	encoder->possible_crtcs = dcbe->heads;
1993	encoder->possible_clones = 0;
1994	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type);
1995	drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
1996
1997	drm_mode_connector_attach_encoder(connector, encoder);
1998	return 0;
1999}
2000
2001/******************************************************************************
2002 * PIOR
2003 *****************************************************************************/
2004
2005static void
2006nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2007{
2008	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2009	struct nv50_disp *disp = nv50_disp(encoder->dev);
2010	u32 mthd = (nv_encoder->dcb->type << 12) | nv_encoder->or;
2011	u32 ctrl = (mode == DRM_MODE_DPMS_ON);
2012	nvif_exec(disp->disp, NV50_DISP_PIOR_PWR + mthd, &ctrl, sizeof(ctrl));
2013}
2014
2015static bool
2016nv50_pior_mode_fixup(struct drm_encoder *encoder,
2017		     const struct drm_display_mode *mode,
2018		     struct drm_display_mode *adjusted_mode)
2019{
2020	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2021	struct nouveau_connector *nv_connector;
2022
2023	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2024	if (nv_connector && nv_connector->native_mode) {
2025		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
2026			int id = adjusted_mode->base.id;
2027			*adjusted_mode = *nv_connector->native_mode;
2028			adjusted_mode->base.id = id;
2029		}
2030	}
2031
2032	adjusted_mode->clock *= 2;
2033	return true;
2034}
2035
2036static void
2037nv50_pior_commit(struct drm_encoder *encoder)
2038{
2039}
2040
2041static void
2042nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2043		   struct drm_display_mode *adjusted_mode)
2044{
2045	struct nv50_mast *mast = nv50_mast(encoder->dev);
2046	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2047	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2048	struct nouveau_connector *nv_connector;
2049	u8 owner = 1 << nv_crtc->index;
2050	u8 proto, depth;
2051	u32 *push;
2052
2053	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2054	switch (nv_connector->base.display_info.bpc) {
2055	case 10: depth = 0x6; break;
2056	case  8: depth = 0x5; break;
2057	case  6: depth = 0x2; break;
2058	default: depth = 0x0; break;
2059	}
2060
2061	switch (nv_encoder->dcb->type) {
2062	case DCB_OUTPUT_TMDS:
2063	case DCB_OUTPUT_DP:
2064		proto = 0x0;
2065		break;
2066	default:
2067		BUG_ON(1);
2068		break;
2069	}
2070
2071	nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2072
2073	push = evo_wait(mast, 8);
2074	if (push) {
2075		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
2076			u32 ctrl = (depth << 16) | (proto << 8) | owner;
2077			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2078				ctrl |= 0x00001000;
2079			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2080				ctrl |= 0x00002000;
2081			evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2082			evo_data(push, ctrl);
2083		}
2084
2085		evo_kick(push, mast);
2086	}
2087
2088	nv_encoder->crtc = encoder->crtc;
2089}
2090
2091static void
2092nv50_pior_disconnect(struct drm_encoder *encoder)
2093{
2094	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2095	struct nv50_mast *mast = nv50_mast(encoder->dev);
2096	const int or = nv_encoder->or;
2097	u32 *push;
2098
2099	if (nv_encoder->crtc) {
2100		nv50_crtc_prepare(nv_encoder->crtc);
2101
2102		push = evo_wait(mast, 4);
2103		if (push) {
2104			if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
2105				evo_mthd(push, 0x0700 + (or * 0x040), 1);
2106				evo_data(push, 0x00000000);
2107			}
2108			evo_kick(push, mast);
2109		}
2110	}
2111
2112	nv_encoder->crtc = NULL;
2113}
2114
2115static void
2116nv50_pior_destroy(struct drm_encoder *encoder)
2117{
2118	drm_encoder_cleanup(encoder);
2119	kfree(encoder);
2120}
2121
2122static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2123	.dpms = nv50_pior_dpms,
2124	.mode_fixup = nv50_pior_mode_fixup,
2125	.prepare = nv50_pior_disconnect,
2126	.commit = nv50_pior_commit,
2127	.mode_set = nv50_pior_mode_set,
2128	.disable = nv50_pior_disconnect,
2129	.get_crtc = nv50_display_crtc_get,
2130};
2131
2132static const struct drm_encoder_funcs nv50_pior_func = {
2133	.destroy = nv50_pior_destroy,
2134};
2135
2136static int
2137nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2138{
2139	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2140	struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
2141	struct nouveau_i2c_port *ddc = NULL;
2142	struct nouveau_encoder *nv_encoder;
2143	struct drm_encoder *encoder;
2144	int type;
2145
2146	switch (dcbe->type) {
2147	case DCB_OUTPUT_TMDS:
2148		ddc  = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev));
2149		type = DRM_MODE_ENCODER_TMDS;
2150		break;
2151	case DCB_OUTPUT_DP:
2152		ddc  = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev));
2153		type = DRM_MODE_ENCODER_TMDS;
2154		break;
2155	default:
2156		return -ENODEV;
2157	}
2158
2159	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2160	if (!nv_encoder)
2161		return -ENOMEM;
2162	nv_encoder->dcb = dcbe;
2163	nv_encoder->or = ffs(dcbe->or) - 1;
2164	nv_encoder->i2c = ddc;
2165
2166	encoder = to_drm_encoder(nv_encoder);
2167	encoder->possible_crtcs = dcbe->heads;
2168	encoder->possible_clones = 0;
2169	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type);
2170	drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
2171
2172	drm_mode_connector_attach_encoder(connector, encoder);
2173	return 0;
2174}
2175
2176/******************************************************************************
2177 * Framebuffer
2178 *****************************************************************************/
2179
2180static void
2181nv50_fbdma_fini(struct nv50_fbdma *fbdma)
2182{
2183	int i;
2184	for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
2185		nvif_object_fini(&fbdma->base[i]);
2186	nvif_object_fini(&fbdma->core);
2187	list_del(&fbdma->head);
2188	kfree(fbdma);
2189}
2190
2191static int
2192nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
2193{
2194	struct nouveau_drm *drm = nouveau_drm(dev);
2195	struct nv50_disp *disp = nv50_disp(dev);
2196	struct nv50_mast *mast = nv50_mast(dev);
2197	struct __attribute__ ((packed)) {
2198		struct nv_dma_v0 base;
2199		union {
2200			struct nv50_dma_v0 nv50;
2201			struct gf100_dma_v0 gf100;
2202			struct gf110_dma_v0 gf110;
2203		};
2204	} args = {};
2205	struct nv50_fbdma *fbdma;
2206	struct drm_crtc *crtc;
2207	u32 size = sizeof(args.base);
2208	int ret;
2209
2210	list_for_each_entry(fbdma, &disp->fbdma, head) {
2211		if (fbdma->core.handle == name)
2212			return 0;
2213	}
2214
2215	fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
2216	if (!fbdma)
2217		return -ENOMEM;
2218	list_add(&fbdma->head, &disp->fbdma);
2219
2220	args.base.target = NV_DMA_V0_TARGET_VRAM;
2221	args.base.access = NV_DMA_V0_ACCESS_RDWR;
2222	args.base.start = offset;
2223	args.base.limit = offset + length - 1;
2224
2225	if (drm->device.info.chipset < 0x80) {
2226		args.nv50.part = NV50_DMA_V0_PART_256;
2227		size += sizeof(args.nv50);
2228	} else
2229	if (drm->device.info.chipset < 0xc0) {
2230		args.nv50.part = NV50_DMA_V0_PART_256;
2231		args.nv50.kind = kind;
2232		size += sizeof(args.nv50);
2233	} else
2234	if (drm->device.info.chipset < 0xd0) {
2235		args.gf100.kind = kind;
2236		size += sizeof(args.gf100);
2237	} else {
2238		args.gf110.page = GF110_DMA_V0_PAGE_LP;
2239		args.gf110.kind = kind;
2240		size += sizeof(args.gf110);
2241	}
2242
2243	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2244		struct nv50_head *head = nv50_head(crtc);
2245		int ret = nvif_object_init(&head->sync.base.base.user, NULL,
2246					    name, NV_DMA_IN_MEMORY, &args, size,
2247					   &fbdma->base[head->base.index]);
2248		if (ret) {
2249			nv50_fbdma_fini(fbdma);
2250			return ret;
2251		}
2252	}
2253
2254	ret = nvif_object_init(&mast->base.base.user, NULL, name,
2255				NV_DMA_IN_MEMORY, &args, size,
2256			       &fbdma->core);
2257	if (ret) {
2258		nv50_fbdma_fini(fbdma);
2259		return ret;
2260	}
2261
2262	return 0;
2263}
2264
2265static void
2266nv50_fb_dtor(struct drm_framebuffer *fb)
2267{
2268}
2269
2270static int
2271nv50_fb_ctor(struct drm_framebuffer *fb)
2272{
2273	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
2274	struct nouveau_drm *drm = nouveau_drm(fb->dev);
2275	struct nouveau_bo *nvbo = nv_fb->nvbo;
2276	struct nv50_disp *disp = nv50_disp(fb->dev);
2277	struct nouveau_fb *pfb = nvkm_fb(&drm->device);
2278	u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
2279	u8 tile = nvbo->tile_mode;
2280
2281	if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG) {
2282		NV_ERROR(drm, "framebuffer requires contiguous bo\n");
2283		return -EINVAL;
2284	}
2285
2286	if (drm->device.info.chipset >= 0xc0)
2287		tile >>= 4; /* yep.. */
2288
2289	switch (fb->depth) {
2290	case  8: nv_fb->r_format = 0x1e00; break;
2291	case 15: nv_fb->r_format = 0xe900; break;
2292	case 16: nv_fb->r_format = 0xe800; break;
2293	case 24:
2294	case 32: nv_fb->r_format = 0xcf00; break;
2295	case 30: nv_fb->r_format = 0xd100; break;
2296	default:
2297		 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
2298		 return -EINVAL;
2299	}
2300
2301	if (disp->disp->oclass < NV84_DISP_CLASS) {
2302		nv_fb->r_pitch   = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2303					    (fb->pitches[0] | 0x00100000);
2304		nv_fb->r_format |= kind << 16;
2305	} else
2306	if (disp->disp->oclass < NVD0_DISP_CLASS) {
2307		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2308					   (fb->pitches[0] | 0x00100000);
2309	} else {
2310		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2311					   (fb->pitches[0] | 0x01000000);
2312	}
2313	nv_fb->r_handle = 0xffff0000 | kind;
2314
2315	return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0, pfb->ram->size, kind);
2316}
2317
2318/******************************************************************************
2319 * Init
2320 *****************************************************************************/
2321
2322void
2323nv50_display_fini(struct drm_device *dev)
2324{
2325}
2326
2327int
2328nv50_display_init(struct drm_device *dev)
2329{
2330	struct nv50_disp *disp = nv50_disp(dev);
2331	struct drm_crtc *crtc;
2332	u32 *push;
2333
2334	push = evo_wait(nv50_mast(dev), 32);
2335	if (!push)
2336		return -EBUSY;
2337
2338	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2339		struct nv50_sync *sync = nv50_sync(crtc);
2340		nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
2341	}
2342
2343	evo_mthd(push, 0x0088, 1);
2344	evo_data(push, nv50_mast(dev)->base.sync.handle);
2345	evo_kick(push, nv50_mast(dev));
2346	return 0;
2347}
2348
2349void
2350nv50_display_destroy(struct drm_device *dev)
2351{
2352	struct nv50_disp *disp = nv50_disp(dev);
2353	struct nv50_fbdma *fbdma, *fbtmp;
2354
2355	list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
2356		nv50_fbdma_fini(fbdma);
2357	}
2358
2359	nv50_dmac_destroy(&disp->mast.base, disp->disp);
2360
2361	nouveau_bo_unmap(disp->sync);
2362	if (disp->sync)
2363		nouveau_bo_unpin(disp->sync);
2364	nouveau_bo_ref(NULL, &disp->sync);
2365
2366	nouveau_display(dev)->priv = NULL;
2367	kfree(disp);
2368}
2369
2370int
2371nv50_display_create(struct drm_device *dev)
2372{
2373	struct nvif_device *device = &nouveau_drm(dev)->device;
2374	struct nouveau_drm *drm = nouveau_drm(dev);
2375	struct dcb_table *dcb = &drm->vbios.dcb;
2376	struct drm_connector *connector, *tmp;
2377	struct nv50_disp *disp;
2378	struct dcb_output *dcbe;
2379	int crtcs, ret, i;
2380
2381	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2382	if (!disp)
2383		return -ENOMEM;
2384	INIT_LIST_HEAD(&disp->fbdma);
2385
2386	nouveau_display(dev)->priv = disp;
2387	nouveau_display(dev)->dtor = nv50_display_destroy;
2388	nouveau_display(dev)->init = nv50_display_init;
2389	nouveau_display(dev)->fini = nv50_display_fini;
2390	nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
2391	nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
2392	disp->disp = &nouveau_display(dev)->disp;
2393
2394	/* small shared memory area we use for notifiers and semaphores */
2395	ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2396			     0, 0x0000, NULL, &disp->sync);
2397	if (!ret) {
2398		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
2399		if (!ret) {
2400			ret = nouveau_bo_map(disp->sync);
2401			if (ret)
2402				nouveau_bo_unpin(disp->sync);
2403		}
2404		if (ret)
2405			nouveau_bo_ref(NULL, &disp->sync);
2406	}
2407
2408	if (ret)
2409		goto out;
2410
2411	/* allocate master evo channel */
2412	ret = nv50_core_create(disp->disp, disp->sync->bo.offset,
2413			      &disp->mast);
2414	if (ret)
2415		goto out;
2416
2417	/* create crtc objects to represent the hw heads */
2418	if (disp->disp->oclass >= NVD0_DISP_CLASS)
2419		crtcs = nvif_rd32(device, 0x022448);
2420	else
2421		crtcs = 2;
2422
2423	for (i = 0; i < crtcs; i++) {
2424		ret = nv50_crtc_create(dev, i);
2425		if (ret)
2426			goto out;
2427	}
2428
2429	/* create encoder/connector objects based on VBIOS DCB table */
2430	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2431		connector = nouveau_connector_create(dev, dcbe->connector);
2432		if (IS_ERR(connector))
2433			continue;
2434
2435		if (dcbe->location == DCB_LOC_ON_CHIP) {
2436			switch (dcbe->type) {
2437			case DCB_OUTPUT_TMDS:
2438			case DCB_OUTPUT_LVDS:
2439			case DCB_OUTPUT_DP:
2440				ret = nv50_sor_create(connector, dcbe);
2441				break;
2442			case DCB_OUTPUT_ANALOG:
2443				ret = nv50_dac_create(connector, dcbe);
2444				break;
2445			default:
2446				ret = -ENODEV;
2447				break;
2448			}
2449		} else {
2450			ret = nv50_pior_create(connector, dcbe);
2451		}
2452
2453		if (ret) {
2454			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2455				     dcbe->location, dcbe->type,
2456				     ffs(dcbe->or) - 1, ret);
2457			ret = 0;
2458		}
2459	}
2460
2461	/* cull any connectors we created that don't have an encoder */
2462	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2463		if (connector->encoder_ids[0])
2464			continue;
2465
2466		NV_WARN(drm, "%s has no encoders, removing\n",
2467			connector->name);
2468		connector->funcs->destroy(connector);
2469	}
2470
2471out:
2472	if (ret)
2473		nv50_display_destroy(dev);
2474	return ret;
2475}
2476