nv50_display.c revision 120b0c39c75688864e4a25e71cf3ed40e8e18651
1/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
25#include <linux/dma-mapping.h>
26
27#include <drm/drmP.h>
28#include <drm/drm_crtc_helper.h>
29#include <drm/drm_dp_helper.h>
30
31#include <nvif/class.h>
32
33#include "nouveau_drm.h"
34#include "nouveau_dma.h"
35#include "nouveau_gem.h"
36#include "nouveau_connector.h"
37#include "nouveau_encoder.h"
38#include "nouveau_crtc.h"
39#include "nouveau_fence.h"
40#include "nv50_display.h"
41
42#define EVO_DMA_NR 9
43
44#define EVO_MASTER  (0x00)
45#define EVO_FLIP(c) (0x01 + (c))
46#define EVO_OVLY(c) (0x05 + (c))
47#define EVO_OIMM(c) (0x09 + (c))
48#define EVO_CURS(c) (0x0d + (c))
49
50/* offsets in shared sync bo of various structures */
51#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
52#define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
53#define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
54#define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
55
56#define EVO_CORE_HANDLE      (0xd1500000)
57#define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
58#define EVO_CHAN_OCLASS(t,c) (((c)->oclass & 0xff00) | ((t) & 0x00ff))
59#define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) |                               \
60			      (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
61
62/******************************************************************************
63 * EVO channel
64 *****************************************************************************/
65
66struct nv50_chan {
67	struct nvif_object user;
68};
69
70static int
71nv50_chan_create(struct nvif_object *disp, const u32 *oclass, u8 head,
72		 void *data, u32 size, struct nv50_chan *chan)
73{
74	while (oclass[0]) {
75		int ret = nvif_object_init(disp, NULL, (oclass[0] << 16) | head,
76					   oclass[0], data, size,
77					  &chan->user);
78		if (oclass++, ret == 0)
79			return ret;
80	}
81	return -ENOSYS;
82}
83
84static void
85nv50_chan_destroy(struct nv50_chan *chan)
86{
87	nvif_object_fini(&chan->user);
88}
89
90/******************************************************************************
91 * PIO EVO channel
92 *****************************************************************************/
93
94struct nv50_pioc {
95	struct nv50_chan base;
96};
97
98static void
99nv50_pioc_destroy(struct nv50_pioc *pioc)
100{
101	nv50_chan_destroy(&pioc->base);
102}
103
104static int
105nv50_pioc_create(struct nvif_object *disp, const u32 *oclass, u8 head,
106		 void *data, u32 size, struct nv50_pioc *pioc)
107{
108	return nv50_chan_create(disp, oclass, head, data, size, &pioc->base);
109}
110
111/******************************************************************************
112 * Cursor Immediate
113 *****************************************************************************/
114
115struct nv50_curs {
116	struct nv50_pioc base;
117};
118
119static int
120nv50_curs_create(struct nvif_object *disp, int head, struct nv50_curs *curs)
121{
122	struct nv50_display_curs_class args = {
123		.head = head,
124	};
125	static const u32 oclass[] = {
126		GM107_DISP_CURS_CLASS,
127		NVF0_DISP_CURS_CLASS,
128		NVE0_DISP_CURS_CLASS,
129		NVD0_DISP_CURS_CLASS,
130		NVA3_DISP_CURS_CLASS,
131		NV94_DISP_CURS_CLASS,
132		NVA0_DISP_CURS_CLASS,
133		NV84_DISP_CURS_CLASS,
134		NV50_DISP_CURS_CLASS,
135		0
136	};
137
138	return nv50_pioc_create(disp, oclass, head, &args, sizeof(args),
139			       &curs->base);
140}
141
142/******************************************************************************
143 * Overlay Immediate
144 *****************************************************************************/
145
146struct nv50_oimm {
147	struct nv50_pioc base;
148};
149
150static int
151nv50_oimm_create(struct nvif_object *disp, int head, struct nv50_oimm *oimm)
152{
153	struct nv50_display_oimm_class args = {
154		.head = head,
155	};
156	static const u32 oclass[] = {
157		GM107_DISP_OIMM_CLASS,
158		NVF0_DISP_OIMM_CLASS,
159		NVE0_DISP_OIMM_CLASS,
160		NVD0_DISP_OIMM_CLASS,
161		NVA3_DISP_OIMM_CLASS,
162		NV94_DISP_OIMM_CLASS,
163		NVA0_DISP_OIMM_CLASS,
164		NV84_DISP_OIMM_CLASS,
165		NV50_DISP_OIMM_CLASS,
166		0
167	};
168
169	return nv50_pioc_create(disp, oclass, head, &args, sizeof(args),
170			       &oimm->base);
171}
172
173/******************************************************************************
174 * DMA EVO channel
175 *****************************************************************************/
176
177struct nv50_dmac {
178	struct nv50_chan base;
179	dma_addr_t handle;
180	u32 *ptr;
181
182	struct nvif_object sync;
183	struct nvif_object vram;
184
185	/* Protects against concurrent pushbuf access to this channel, lock is
186	 * grabbed by evo_wait (if the pushbuf reservation is successful) and
187	 * dropped again by evo_kick. */
188	struct mutex lock;
189};
190
191static void
192nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
193{
194	nvif_object_fini(&dmac->vram);
195	nvif_object_fini(&dmac->sync);
196
197	nv50_chan_destroy(&dmac->base);
198
199	if (dmac->ptr) {
200		struct pci_dev *pdev = nvkm_device(nvif_device(disp))->pdev;
201		pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
202	}
203}
204
205static int
206nv50_dmac_create(struct nvif_object *disp, const u32 *oclass, u8 head,
207		 void *data, u32 size, u64 syncbuf,
208		 struct nv50_dmac *dmac)
209{
210	struct nouveau_fb *pfb = nvkm_fb(nvif_device(disp));
211	struct nvif_object pushbuf;
212	u32 handle = *(u32 *)data;
213	int ret;
214
215	mutex_init(&dmac->lock);
216
217	dmac->ptr = pci_alloc_consistent(nvkm_device(nvif_device(disp))->pdev,
218					 PAGE_SIZE, &dmac->handle);
219	if (!dmac->ptr)
220		return -ENOMEM;
221
222	ret = nvif_object_init(nvif_object(nvif_device(disp)), NULL, handle,
223			       NV_DMA_FROM_MEMORY,
224			       &(struct nv_dma_v0) {
225					.target = NV_DMA_V0_TARGET_PCI_US,
226					.access = NV_DMA_V0_ACCESS_RD,
227					.start = dmac->handle + 0x0000,
228					.limit = dmac->handle + 0x0fff,
229			       }, sizeof(struct nv_dma_v0), &pushbuf);
230	if (ret)
231		return ret;
232
233	ret = nv50_chan_create(disp, oclass, head, data, size, &dmac->base);
234	nvif_object_fini(&pushbuf);
235	if (ret)
236		return ret;
237
238	ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000000,
239			       NV_DMA_IN_MEMORY,
240			       &(struct nv_dma_v0) {
241					.target = NV_DMA_V0_TARGET_VRAM,
242					.access = NV_DMA_V0_ACCESS_RDWR,
243					.start = syncbuf + 0x0000,
244					.limit = syncbuf + 0x0fff,
245			       }, sizeof(struct nv_dma_v0),
246			       &dmac->sync);
247	if (ret)
248		return ret;
249
250	ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000001,
251			       NV_DMA_IN_MEMORY,
252			       &(struct nv_dma_v0) {
253					.target = NV_DMA_V0_TARGET_VRAM,
254					.access = NV_DMA_V0_ACCESS_RDWR,
255					.start = 0,
256					.limit = pfb->ram->size - 1,
257			       }, sizeof(struct nv_dma_v0),
258			       &dmac->vram);
259	if (ret)
260		return ret;
261
262	return ret;
263}
264
265/******************************************************************************
266 * Core
267 *****************************************************************************/
268
269struct nv50_mast {
270	struct nv50_dmac base;
271};
272
273static int
274nv50_core_create(struct nvif_object *disp, u64 syncbuf, struct nv50_mast *core)
275{
276	struct nv50_display_mast_class args = {
277		.pushbuf = EVO_PUSH_HANDLE(MAST, 0),
278	};
279	static const u32 oclass[] = {
280		GM107_DISP_MAST_CLASS,
281		NVF0_DISP_MAST_CLASS,
282		NVE0_DISP_MAST_CLASS,
283		NVD0_DISP_MAST_CLASS,
284		NVA3_DISP_MAST_CLASS,
285		NV94_DISP_MAST_CLASS,
286		NVA0_DISP_MAST_CLASS,
287		NV84_DISP_MAST_CLASS,
288		NV50_DISP_MAST_CLASS,
289		0
290	};
291
292	return nv50_dmac_create(disp, oclass, 0, &args, sizeof(args), syncbuf,
293			       &core->base);
294}
295
296/******************************************************************************
297 * Base
298 *****************************************************************************/
299
300struct nv50_sync {
301	struct nv50_dmac base;
302	u32 addr;
303	u32 data;
304};
305
306static int
307nv50_base_create(struct nvif_object *disp, int head, u64 syncbuf,
308		 struct nv50_sync *base)
309{
310	struct nv50_display_sync_class args = {
311		.pushbuf = EVO_PUSH_HANDLE(SYNC, head),
312		.head = head,
313	};
314	static const u32 oclass[] = {
315		GM107_DISP_SYNC_CLASS,
316		NVF0_DISP_SYNC_CLASS,
317		NVE0_DISP_SYNC_CLASS,
318		NVD0_DISP_SYNC_CLASS,
319		NVA3_DISP_SYNC_CLASS,
320		NV94_DISP_SYNC_CLASS,
321		NVA0_DISP_SYNC_CLASS,
322		NV84_DISP_SYNC_CLASS,
323		NV50_DISP_SYNC_CLASS,
324		0
325	};
326
327	return nv50_dmac_create(disp, oclass, head, &args, sizeof(args),
328				syncbuf, &base->base);
329}
330
331/******************************************************************************
332 * Overlay
333 *****************************************************************************/
334
335struct nv50_ovly {
336	struct nv50_dmac base;
337};
338
339static int
340nv50_ovly_create(struct nvif_object *disp, int head, u64 syncbuf,
341		 struct nv50_ovly *ovly)
342{
343	struct nv50_display_ovly_class args = {
344		.pushbuf = EVO_PUSH_HANDLE(OVLY, head),
345		.head = head,
346	};
347	static const u32 oclass[] = {
348		GM107_DISP_OVLY_CLASS,
349		NVF0_DISP_OVLY_CLASS,
350		NVE0_DISP_OVLY_CLASS,
351		NVD0_DISP_OVLY_CLASS,
352		NVA3_DISP_OVLY_CLASS,
353		NV94_DISP_OVLY_CLASS,
354		NVA0_DISP_OVLY_CLASS,
355		NV84_DISP_OVLY_CLASS,
356		NV50_DISP_OVLY_CLASS,
357		0
358	};
359
360	return nv50_dmac_create(disp, oclass, head, &args, sizeof(args),
361				syncbuf, &ovly->base);
362}
363
364struct nv50_head {
365	struct nouveau_crtc base;
366	struct nouveau_bo *image;
367	struct nv50_curs curs;
368	struct nv50_sync sync;
369	struct nv50_ovly ovly;
370	struct nv50_oimm oimm;
371};
372
373#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
374#define nv50_curs(c) (&nv50_head(c)->curs)
375#define nv50_sync(c) (&nv50_head(c)->sync)
376#define nv50_ovly(c) (&nv50_head(c)->ovly)
377#define nv50_oimm(c) (&nv50_head(c)->oimm)
378#define nv50_chan(c) (&(c)->base.base)
379#define nv50_vers(c) nv50_chan(c)->user.oclass
380
381struct nv50_fbdma {
382	struct list_head head;
383	struct nvif_object core;
384	struct nvif_object base[4];
385};
386
387struct nv50_disp {
388	struct nvif_object *disp;
389	struct nv50_mast mast;
390
391	struct list_head fbdma;
392
393	struct nouveau_bo *sync;
394};
395
396static struct nv50_disp *
397nv50_disp(struct drm_device *dev)
398{
399	return nouveau_display(dev)->priv;
400}
401
402#define nv50_mast(d) (&nv50_disp(d)->mast)
403
404static struct drm_crtc *
405nv50_display_crtc_get(struct drm_encoder *encoder)
406{
407	return nouveau_encoder(encoder)->crtc;
408}
409
410/******************************************************************************
411 * EVO channel helpers
412 *****************************************************************************/
413static u32 *
414evo_wait(void *evoc, int nr)
415{
416	struct nv50_dmac *dmac = evoc;
417	u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
418
419	mutex_lock(&dmac->lock);
420	if (put + nr >= (PAGE_SIZE / 4) - 8) {
421		dmac->ptr[put] = 0x20000000;
422
423		nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
424		if (!nvkm_wait(&dmac->base.user, 0x0004, ~0, 0x00000000)) {
425			mutex_unlock(&dmac->lock);
426			nv_error(nvkm_object(&dmac->base.user), "channel stalled\n");
427			return NULL;
428		}
429
430		put = 0;
431	}
432
433	return dmac->ptr + put;
434}
435
436static void
437evo_kick(u32 *push, void *evoc)
438{
439	struct nv50_dmac *dmac = evoc;
440	nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
441	mutex_unlock(&dmac->lock);
442}
443
444#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
445#define evo_data(p,d)   *((p)++) = (d)
446
447static bool
448evo_sync_wait(void *data)
449{
450	if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
451		return true;
452	usleep_range(1, 2);
453	return false;
454}
455
456static int
457evo_sync(struct drm_device *dev)
458{
459	struct nvif_device *device = &nouveau_drm(dev)->device;
460	struct nv50_disp *disp = nv50_disp(dev);
461	struct nv50_mast *mast = nv50_mast(dev);
462	u32 *push = evo_wait(mast, 8);
463	if (push) {
464		nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
465		evo_mthd(push, 0x0084, 1);
466		evo_data(push, 0x80000000 | EVO_MAST_NTFY);
467		evo_mthd(push, 0x0080, 2);
468		evo_data(push, 0x00000000);
469		evo_data(push, 0x00000000);
470		evo_kick(push, mast);
471		if (nv_wait_cb(nvkm_device(device), evo_sync_wait, disp->sync))
472			return 0;
473	}
474
475	return -EBUSY;
476}
477
478/******************************************************************************
479 * Page flipping channel
480 *****************************************************************************/
481struct nouveau_bo *
482nv50_display_crtc_sema(struct drm_device *dev, int crtc)
483{
484	return nv50_disp(dev)->sync;
485}
486
487struct nv50_display_flip {
488	struct nv50_disp *disp;
489	struct nv50_sync *chan;
490};
491
492static bool
493nv50_display_flip_wait(void *data)
494{
495	struct nv50_display_flip *flip = data;
496	if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
497					      flip->chan->data)
498		return true;
499	usleep_range(1, 2);
500	return false;
501}
502
503void
504nv50_display_flip_stop(struct drm_crtc *crtc)
505{
506	struct nvif_device *device = &nouveau_drm(crtc->dev)->device;
507	struct nv50_display_flip flip = {
508		.disp = nv50_disp(crtc->dev),
509		.chan = nv50_sync(crtc),
510	};
511	u32 *push;
512
513	push = evo_wait(flip.chan, 8);
514	if (push) {
515		evo_mthd(push, 0x0084, 1);
516		evo_data(push, 0x00000000);
517		evo_mthd(push, 0x0094, 1);
518		evo_data(push, 0x00000000);
519		evo_mthd(push, 0x00c0, 1);
520		evo_data(push, 0x00000000);
521		evo_mthd(push, 0x0080, 1);
522		evo_data(push, 0x00000000);
523		evo_kick(push, flip.chan);
524	}
525
526	nv_wait_cb(nvkm_device(device), nv50_display_flip_wait, &flip);
527}
528
529int
530nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
531		       struct nouveau_channel *chan, u32 swap_interval)
532{
533	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
534	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
535	struct nv50_head *head = nv50_head(crtc);
536	struct nv50_sync *sync = nv50_sync(crtc);
537	u32 *push;
538	int ret;
539
540	swap_interval <<= 4;
541	if (swap_interval == 0)
542		swap_interval |= 0x100;
543	if (chan == NULL)
544		evo_sync(crtc->dev);
545
546	push = evo_wait(sync, 128);
547	if (unlikely(push == NULL))
548		return -EBUSY;
549
550	if (chan && chan->object->oclass < G82_CHANNEL_GPFIFO) {
551		ret = RING_SPACE(chan, 8);
552		if (ret)
553			return ret;
554
555		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
556		OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
557		OUT_RING  (chan, sync->addr ^ 0x10);
558		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
559		OUT_RING  (chan, sync->data + 1);
560		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
561		OUT_RING  (chan, sync->addr);
562		OUT_RING  (chan, sync->data);
563	} else
564	if (chan && chan->object->oclass < FERMI_CHANNEL_GPFIFO) {
565		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
566		ret = RING_SPACE(chan, 12);
567		if (ret)
568			return ret;
569
570		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
571		OUT_RING  (chan, chan->vram.handle);
572		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
573		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
574		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
575		OUT_RING  (chan, sync->data + 1);
576		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
577		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
578		OUT_RING  (chan, upper_32_bits(addr));
579		OUT_RING  (chan, lower_32_bits(addr));
580		OUT_RING  (chan, sync->data);
581		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
582	} else
583	if (chan) {
584		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
585		ret = RING_SPACE(chan, 10);
586		if (ret)
587			return ret;
588
589		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
590		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
591		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
592		OUT_RING  (chan, sync->data + 1);
593		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
594				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
595		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
596		OUT_RING  (chan, upper_32_bits(addr));
597		OUT_RING  (chan, lower_32_bits(addr));
598		OUT_RING  (chan, sync->data);
599		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
600				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
601	}
602
603	if (chan) {
604		sync->addr ^= 0x10;
605		sync->data++;
606		FIRE_RING (chan);
607	}
608
609	/* queue the flip */
610	evo_mthd(push, 0x0100, 1);
611	evo_data(push, 0xfffe0000);
612	evo_mthd(push, 0x0084, 1);
613	evo_data(push, swap_interval);
614	if (!(swap_interval & 0x00000100)) {
615		evo_mthd(push, 0x00e0, 1);
616		evo_data(push, 0x40000000);
617	}
618	evo_mthd(push, 0x0088, 4);
619	evo_data(push, sync->addr);
620	evo_data(push, sync->data++);
621	evo_data(push, sync->data);
622	evo_data(push, sync->base.sync.handle);
623	evo_mthd(push, 0x00a0, 2);
624	evo_data(push, 0x00000000);
625	evo_data(push, 0x00000000);
626	evo_mthd(push, 0x00c0, 1);
627	evo_data(push, nv_fb->r_handle);
628	evo_mthd(push, 0x0110, 2);
629	evo_data(push, 0x00000000);
630	evo_data(push, 0x00000000);
631	if (nv50_vers(sync) < NVD0_DISP_SYNC_CLASS) {
632		evo_mthd(push, 0x0800, 5);
633		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
634		evo_data(push, 0);
635		evo_data(push, (fb->height << 16) | fb->width);
636		evo_data(push, nv_fb->r_pitch);
637		evo_data(push, nv_fb->r_format);
638	} else {
639		evo_mthd(push, 0x0400, 5);
640		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
641		evo_data(push, 0);
642		evo_data(push, (fb->height << 16) | fb->width);
643		evo_data(push, nv_fb->r_pitch);
644		evo_data(push, nv_fb->r_format);
645	}
646	evo_mthd(push, 0x0080, 1);
647	evo_data(push, 0x00000000);
648	evo_kick(push, sync);
649
650	nouveau_bo_ref(nv_fb->nvbo, &head->image);
651	return 0;
652}
653
654/******************************************************************************
655 * CRTC
656 *****************************************************************************/
657static int
658nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
659{
660	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
661	struct nouveau_connector *nv_connector;
662	struct drm_connector *connector;
663	u32 *push, mode = 0x00;
664
665	nv_connector = nouveau_crtc_connector_get(nv_crtc);
666	connector = &nv_connector->base;
667	if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
668		if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
669			mode = DITHERING_MODE_DYNAMIC2X2;
670	} else {
671		mode = nv_connector->dithering_mode;
672	}
673
674	if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
675		if (connector->display_info.bpc >= 8)
676			mode |= DITHERING_DEPTH_8BPC;
677	} else {
678		mode |= nv_connector->dithering_depth;
679	}
680
681	push = evo_wait(mast, 4);
682	if (push) {
683		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
684			evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
685			evo_data(push, mode);
686		} else
687		if (nv50_vers(mast) < NVE0_DISP_MAST_CLASS) {
688			evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
689			evo_data(push, mode);
690		} else {
691			evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
692			evo_data(push, mode);
693		}
694
695		if (update) {
696			evo_mthd(push, 0x0080, 1);
697			evo_data(push, 0x00000000);
698		}
699		evo_kick(push, mast);
700	}
701
702	return 0;
703}
704
705static int
706nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
707{
708	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
709	struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
710	struct drm_crtc *crtc = &nv_crtc->base;
711	struct nouveau_connector *nv_connector;
712	int mode = DRM_MODE_SCALE_NONE;
713	u32 oX, oY, *push;
714
715	/* start off at the resolution we programmed the crtc for, this
716	 * effectively handles NONE/FULL scaling
717	 */
718	nv_connector = nouveau_crtc_connector_get(nv_crtc);
719	if (nv_connector && nv_connector->native_mode)
720		mode = nv_connector->scaling_mode;
721
722	if (mode != DRM_MODE_SCALE_NONE)
723		omode = nv_connector->native_mode;
724	else
725		omode = umode;
726
727	oX = omode->hdisplay;
728	oY = omode->vdisplay;
729	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
730		oY *= 2;
731
732	/* add overscan compensation if necessary, will keep the aspect
733	 * ratio the same as the backend mode unless overridden by the
734	 * user setting both hborder and vborder properties.
735	 */
736	if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
737			     (nv_connector->underscan == UNDERSCAN_AUTO &&
738			      nv_connector->edid &&
739			      drm_detect_hdmi_monitor(nv_connector->edid)))) {
740		u32 bX = nv_connector->underscan_hborder;
741		u32 bY = nv_connector->underscan_vborder;
742		u32 aspect = (oY << 19) / oX;
743
744		if (bX) {
745			oX -= (bX * 2);
746			if (bY) oY -= (bY * 2);
747			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
748		} else {
749			oX -= (oX >> 4) + 32;
750			if (bY) oY -= (bY * 2);
751			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
752		}
753	}
754
755	/* handle CENTER/ASPECT scaling, taking into account the areas
756	 * removed already for overscan compensation
757	 */
758	switch (mode) {
759	case DRM_MODE_SCALE_CENTER:
760		oX = min((u32)umode->hdisplay, oX);
761		oY = min((u32)umode->vdisplay, oY);
762		/* fall-through */
763	case DRM_MODE_SCALE_ASPECT:
764		if (oY < oX) {
765			u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
766			oX = ((oY * aspect) + (aspect / 2)) >> 19;
767		} else {
768			u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
769			oY = ((oX * aspect) + (aspect / 2)) >> 19;
770		}
771		break;
772	default:
773		break;
774	}
775
776	push = evo_wait(mast, 8);
777	if (push) {
778		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
779			/*XXX: SCALE_CTRL_ACTIVE??? */
780			evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
781			evo_data(push, (oY << 16) | oX);
782			evo_data(push, (oY << 16) | oX);
783			evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
784			evo_data(push, 0x00000000);
785			evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
786			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
787		} else {
788			evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
789			evo_data(push, (oY << 16) | oX);
790			evo_data(push, (oY << 16) | oX);
791			evo_data(push, (oY << 16) | oX);
792			evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
793			evo_data(push, 0x00000000);
794			evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
795			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
796		}
797
798		evo_kick(push, mast);
799
800		if (update) {
801			nv50_display_flip_stop(crtc);
802			nv50_display_flip_next(crtc, crtc->primary->fb,
803					       NULL, 1);
804		}
805	}
806
807	return 0;
808}
809
810static int
811nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
812{
813	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
814	u32 *push, hue, vib;
815	int adj;
816
817	adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
818	vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
819	hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
820
821	push = evo_wait(mast, 16);
822	if (push) {
823		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
824			evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
825			evo_data(push, (hue << 20) | (vib << 8));
826		} else {
827			evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
828			evo_data(push, (hue << 20) | (vib << 8));
829		}
830
831		if (update) {
832			evo_mthd(push, 0x0080, 1);
833			evo_data(push, 0x00000000);
834		}
835		evo_kick(push, mast);
836	}
837
838	return 0;
839}
840
841static int
842nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
843		    int x, int y, bool update)
844{
845	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
846	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
847	u32 *push;
848
849	push = evo_wait(mast, 16);
850	if (push) {
851		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
852			evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
853			evo_data(push, nvfb->nvbo->bo.offset >> 8);
854			evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
855			evo_data(push, (fb->height << 16) | fb->width);
856			evo_data(push, nvfb->r_pitch);
857			evo_data(push, nvfb->r_format);
858			evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
859			evo_data(push, (y << 16) | x);
860			if (nv50_vers(mast) > NV50_DISP_MAST_CLASS) {
861				evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
862				evo_data(push, nvfb->r_handle);
863			}
864		} else {
865			evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
866			evo_data(push, nvfb->nvbo->bo.offset >> 8);
867			evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
868			evo_data(push, (fb->height << 16) | fb->width);
869			evo_data(push, nvfb->r_pitch);
870			evo_data(push, nvfb->r_format);
871			evo_data(push, nvfb->r_handle);
872			evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
873			evo_data(push, (y << 16) | x);
874		}
875
876		if (update) {
877			evo_mthd(push, 0x0080, 1);
878			evo_data(push, 0x00000000);
879		}
880		evo_kick(push, mast);
881	}
882
883	nv_crtc->fb.handle = nvfb->r_handle;
884	return 0;
885}
886
887static void
888nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
889{
890	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
891	u32 *push = evo_wait(mast, 16);
892	if (push) {
893		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
894			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
895			evo_data(push, 0x85000000);
896			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
897		} else
898		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
899			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
900			evo_data(push, 0x85000000);
901			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
902			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
903			evo_data(push, mast->base.vram.handle);
904		} else {
905			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
906			evo_data(push, 0x85000000);
907			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
908			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
909			evo_data(push, mast->base.vram.handle);
910		}
911		evo_kick(push, mast);
912	}
913}
914
915static void
916nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
917{
918	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
919	u32 *push = evo_wait(mast, 16);
920	if (push) {
921		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
922			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
923			evo_data(push, 0x05000000);
924		} else
925		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
926			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
927			evo_data(push, 0x05000000);
928			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
929			evo_data(push, 0x00000000);
930		} else {
931			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
932			evo_data(push, 0x05000000);
933			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
934			evo_data(push, 0x00000000);
935		}
936		evo_kick(push, mast);
937	}
938}
939
940static void
941nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
942{
943	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
944
945	if (show)
946		nv50_crtc_cursor_show(nv_crtc);
947	else
948		nv50_crtc_cursor_hide(nv_crtc);
949
950	if (update) {
951		u32 *push = evo_wait(mast, 2);
952		if (push) {
953			evo_mthd(push, 0x0080, 1);
954			evo_data(push, 0x00000000);
955			evo_kick(push, mast);
956		}
957	}
958}
959
960static void
961nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
962{
963}
964
965static void
966nv50_crtc_prepare(struct drm_crtc *crtc)
967{
968	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
969	struct nv50_mast *mast = nv50_mast(crtc->dev);
970	u32 *push;
971
972	nv50_display_flip_stop(crtc);
973
974	push = evo_wait(mast, 6);
975	if (push) {
976		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
977			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
978			evo_data(push, 0x00000000);
979			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
980			evo_data(push, 0x40000000);
981		} else
982		if (nv50_vers(mast) <  NVD0_DISP_MAST_CLASS) {
983			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
984			evo_data(push, 0x00000000);
985			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
986			evo_data(push, 0x40000000);
987			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
988			evo_data(push, 0x00000000);
989		} else {
990			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
991			evo_data(push, 0x00000000);
992			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
993			evo_data(push, 0x03000000);
994			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
995			evo_data(push, 0x00000000);
996		}
997
998		evo_kick(push, mast);
999	}
1000
1001	nv50_crtc_cursor_show_hide(nv_crtc, false, false);
1002}
1003
1004static void
1005nv50_crtc_commit(struct drm_crtc *crtc)
1006{
1007	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1008	struct nv50_mast *mast = nv50_mast(crtc->dev);
1009	u32 *push;
1010
1011	push = evo_wait(mast, 32);
1012	if (push) {
1013		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
1014			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1015			evo_data(push, nv_crtc->fb.handle);
1016			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1017			evo_data(push, 0xc0000000);
1018			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1019		} else
1020		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1021			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1022			evo_data(push, nv_crtc->fb.handle);
1023			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1024			evo_data(push, 0xc0000000);
1025			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1026			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1027			evo_data(push, mast->base.vram.handle);
1028		} else {
1029			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1030			evo_data(push, nv_crtc->fb.handle);
1031			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
1032			evo_data(push, 0x83000000);
1033			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1034			evo_data(push, 0x00000000);
1035			evo_data(push, 0x00000000);
1036			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1037			evo_data(push, mast->base.vram.handle);
1038			evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
1039			evo_data(push, 0xffffff00);
1040		}
1041
1042		evo_kick(push, mast);
1043	}
1044
1045	nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
1046	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1047}
1048
1049static bool
1050nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1051		     struct drm_display_mode *adjusted_mode)
1052{
1053	drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1054	return true;
1055}
1056
1057static int
1058nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1059{
1060	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1061	struct nv50_head *head = nv50_head(crtc);
1062	int ret;
1063
1064	ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
1065	if (ret == 0) {
1066		if (head->image)
1067			nouveau_bo_unpin(head->image);
1068		nouveau_bo_ref(nvfb->nvbo, &head->image);
1069	}
1070
1071	return ret;
1072}
1073
1074static int
1075nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1076		   struct drm_display_mode *mode, int x, int y,
1077		   struct drm_framebuffer *old_fb)
1078{
1079	struct nv50_mast *mast = nv50_mast(crtc->dev);
1080	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1081	struct nouveau_connector *nv_connector;
1082	u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1083	u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1084	u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1085	u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1086	u32 vblan2e = 0, vblan2s = 1;
1087	u32 *push;
1088	int ret;
1089
1090	hactive = mode->htotal;
1091	hsynce  = mode->hsync_end - mode->hsync_start - 1;
1092	hbackp  = mode->htotal - mode->hsync_end;
1093	hblanke = hsynce + hbackp;
1094	hfrontp = mode->hsync_start - mode->hdisplay;
1095	hblanks = mode->htotal - hfrontp - 1;
1096
1097	vactive = mode->vtotal * vscan / ilace;
1098	vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1099	vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1100	vblanke = vsynce + vbackp;
1101	vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1102	vblanks = vactive - vfrontp - 1;
1103	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1104		vblan2e = vactive + vsynce + vbackp;
1105		vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1106		vactive = (vactive * 2) + 1;
1107	}
1108
1109	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1110	if (ret)
1111		return ret;
1112
1113	push = evo_wait(mast, 64);
1114	if (push) {
1115		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1116			evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1117			evo_data(push, 0x00800000 | mode->clock);
1118			evo_data(push, (ilace == 2) ? 2 : 0);
1119			evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1120			evo_data(push, 0x00000000);
1121			evo_data(push, (vactive << 16) | hactive);
1122			evo_data(push, ( vsynce << 16) | hsynce);
1123			evo_data(push, (vblanke << 16) | hblanke);
1124			evo_data(push, (vblanks << 16) | hblanks);
1125			evo_data(push, (vblan2e << 16) | vblan2s);
1126			evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1127			evo_data(push, 0x00000000);
1128			evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1129			evo_data(push, 0x00000311);
1130			evo_data(push, 0x00000100);
1131		} else {
1132			evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1133			evo_data(push, 0x00000000);
1134			evo_data(push, (vactive << 16) | hactive);
1135			evo_data(push, ( vsynce << 16) | hsynce);
1136			evo_data(push, (vblanke << 16) | hblanke);
1137			evo_data(push, (vblanks << 16) | hblanks);
1138			evo_data(push, (vblan2e << 16) | vblan2s);
1139			evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1140			evo_data(push, 0x00000000); /* ??? */
1141			evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1142			evo_data(push, mode->clock * 1000);
1143			evo_data(push, 0x00200000); /* ??? */
1144			evo_data(push, mode->clock * 1000);
1145			evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1146			evo_data(push, 0x00000311);
1147			evo_data(push, 0x00000100);
1148		}
1149
1150		evo_kick(push, mast);
1151	}
1152
1153	nv_connector = nouveau_crtc_connector_get(nv_crtc);
1154	nv50_crtc_set_dither(nv_crtc, false);
1155	nv50_crtc_set_scale(nv_crtc, false);
1156	nv50_crtc_set_color_vibrance(nv_crtc, false);
1157	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1158	return 0;
1159}
1160
1161static int
1162nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1163			struct drm_framebuffer *old_fb)
1164{
1165	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1166	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1167	int ret;
1168
1169	if (!crtc->primary->fb) {
1170		NV_DEBUG(drm, "No FB bound\n");
1171		return 0;
1172	}
1173
1174	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1175	if (ret)
1176		return ret;
1177
1178	nv50_display_flip_stop(crtc);
1179	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1180	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1181	return 0;
1182}
1183
1184static int
1185nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1186			       struct drm_framebuffer *fb, int x, int y,
1187			       enum mode_set_atomic state)
1188{
1189	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1190	nv50_display_flip_stop(crtc);
1191	nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1192	return 0;
1193}
1194
1195static void
1196nv50_crtc_lut_load(struct drm_crtc *crtc)
1197{
1198	struct nv50_disp *disp = nv50_disp(crtc->dev);
1199	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1200	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1201	int i;
1202
1203	for (i = 0; i < 256; i++) {
1204		u16 r = nv_crtc->lut.r[i] >> 2;
1205		u16 g = nv_crtc->lut.g[i] >> 2;
1206		u16 b = nv_crtc->lut.b[i] >> 2;
1207
1208		if (disp->disp->oclass < NVD0_DISP_CLASS) {
1209			writew(r + 0x0000, lut + (i * 0x08) + 0);
1210			writew(g + 0x0000, lut + (i * 0x08) + 2);
1211			writew(b + 0x0000, lut + (i * 0x08) + 4);
1212		} else {
1213			writew(r + 0x6000, lut + (i * 0x20) + 0);
1214			writew(g + 0x6000, lut + (i * 0x20) + 2);
1215			writew(b + 0x6000, lut + (i * 0x20) + 4);
1216		}
1217	}
1218}
1219
1220static void
1221nv50_crtc_disable(struct drm_crtc *crtc)
1222{
1223	struct nv50_head *head = nv50_head(crtc);
1224	evo_sync(crtc->dev);
1225	if (head->image)
1226		nouveau_bo_unpin(head->image);
1227	nouveau_bo_ref(NULL, &head->image);
1228}
1229
1230static int
1231nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1232		     uint32_t handle, uint32_t width, uint32_t height)
1233{
1234	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1235	struct drm_device *dev = crtc->dev;
1236	struct drm_gem_object *gem;
1237	struct nouveau_bo *nvbo;
1238	bool visible = (handle != 0);
1239	int i, ret = 0;
1240
1241	if (visible) {
1242		if (width != 64 || height != 64)
1243			return -EINVAL;
1244
1245		gem = drm_gem_object_lookup(dev, file_priv, handle);
1246		if (unlikely(!gem))
1247			return -ENOENT;
1248		nvbo = nouveau_gem_object(gem);
1249
1250		ret = nouveau_bo_map(nvbo);
1251		if (ret == 0) {
1252			for (i = 0; i < 64 * 64; i++) {
1253				u32 v = nouveau_bo_rd32(nvbo, i);
1254				nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1255			}
1256			nouveau_bo_unmap(nvbo);
1257		}
1258
1259		drm_gem_object_unreference_unlocked(gem);
1260	}
1261
1262	if (visible != nv_crtc->cursor.visible) {
1263		nv50_crtc_cursor_show_hide(nv_crtc, visible, true);
1264		nv_crtc->cursor.visible = visible;
1265	}
1266
1267	return ret;
1268}
1269
1270static int
1271nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1272{
1273	struct nv50_curs *curs = nv50_curs(crtc);
1274	struct nv50_chan *chan = nv50_chan(curs);
1275	nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff));
1276	nvif_wr32(&chan->user, 0x0080, 0x00000000);
1277	return 0;
1278}
1279
1280static void
1281nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1282		    uint32_t start, uint32_t size)
1283{
1284	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1285	u32 end = min_t(u32, start + size, 256);
1286	u32 i;
1287
1288	for (i = start; i < end; i++) {
1289		nv_crtc->lut.r[i] = r[i];
1290		nv_crtc->lut.g[i] = g[i];
1291		nv_crtc->lut.b[i] = b[i];
1292	}
1293
1294	nv50_crtc_lut_load(crtc);
1295}
1296
1297static void
1298nv50_crtc_destroy(struct drm_crtc *crtc)
1299{
1300	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1301	struct nv50_disp *disp = nv50_disp(crtc->dev);
1302	struct nv50_head *head = nv50_head(crtc);
1303	struct nv50_fbdma *fbdma;
1304
1305	list_for_each_entry(fbdma, &disp->fbdma, head) {
1306		nvif_object_fini(&fbdma->base[nv_crtc->index]);
1307	}
1308
1309	nv50_dmac_destroy(&head->ovly.base, disp->disp);
1310	nv50_pioc_destroy(&head->oimm.base);
1311	nv50_dmac_destroy(&head->sync.base, disp->disp);
1312	nv50_pioc_destroy(&head->curs.base);
1313
1314	/*XXX: this shouldn't be necessary, but the core doesn't call
1315	 *     disconnect() during the cleanup paths
1316	 */
1317	if (head->image)
1318		nouveau_bo_unpin(head->image);
1319	nouveau_bo_ref(NULL, &head->image);
1320
1321	nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1322	if (nv_crtc->cursor.nvbo)
1323		nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1324	nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1325
1326	nouveau_bo_unmap(nv_crtc->lut.nvbo);
1327	if (nv_crtc->lut.nvbo)
1328		nouveau_bo_unpin(nv_crtc->lut.nvbo);
1329	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1330
1331	drm_crtc_cleanup(crtc);
1332	kfree(crtc);
1333}
1334
1335static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1336	.dpms = nv50_crtc_dpms,
1337	.prepare = nv50_crtc_prepare,
1338	.commit = nv50_crtc_commit,
1339	.mode_fixup = nv50_crtc_mode_fixup,
1340	.mode_set = nv50_crtc_mode_set,
1341	.mode_set_base = nv50_crtc_mode_set_base,
1342	.mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1343	.load_lut = nv50_crtc_lut_load,
1344	.disable = nv50_crtc_disable,
1345};
1346
1347static const struct drm_crtc_funcs nv50_crtc_func = {
1348	.cursor_set = nv50_crtc_cursor_set,
1349	.cursor_move = nv50_crtc_cursor_move,
1350	.gamma_set = nv50_crtc_gamma_set,
1351	.set_config = nouveau_crtc_set_config,
1352	.destroy = nv50_crtc_destroy,
1353	.page_flip = nouveau_crtc_page_flip,
1354};
1355
1356static void
1357nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
1358{
1359}
1360
1361static void
1362nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
1363{
1364}
1365
1366static int
1367nv50_crtc_create(struct drm_device *dev, int index)
1368{
1369	struct nv50_disp *disp = nv50_disp(dev);
1370	struct nv50_head *head;
1371	struct drm_crtc *crtc;
1372	int ret, i;
1373
1374	head = kzalloc(sizeof(*head), GFP_KERNEL);
1375	if (!head)
1376		return -ENOMEM;
1377
1378	head->base.index = index;
1379	head->base.set_dither = nv50_crtc_set_dither;
1380	head->base.set_scale = nv50_crtc_set_scale;
1381	head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
1382	head->base.color_vibrance = 50;
1383	head->base.vibrant_hue = 0;
1384	head->base.cursor.set_offset = nv50_cursor_set_offset;
1385	head->base.cursor.set_pos = nv50_cursor_set_pos;
1386	for (i = 0; i < 256; i++) {
1387		head->base.lut.r[i] = i << 8;
1388		head->base.lut.g[i] = i << 8;
1389		head->base.lut.b[i] = i << 8;
1390	}
1391
1392	crtc = &head->base.base;
1393	drm_crtc_init(dev, crtc, &nv50_crtc_func);
1394	drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1395	drm_mode_crtc_set_gamma_size(crtc, 256);
1396
1397	ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1398			     0, 0x0000, NULL, &head->base.lut.nvbo);
1399	if (!ret) {
1400		ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1401		if (!ret) {
1402			ret = nouveau_bo_map(head->base.lut.nvbo);
1403			if (ret)
1404				nouveau_bo_unpin(head->base.lut.nvbo);
1405		}
1406		if (ret)
1407			nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1408	}
1409
1410	if (ret)
1411		goto out;
1412
1413	nv50_crtc_lut_load(crtc);
1414
1415	/* allocate cursor resources */
1416	ret = nv50_curs_create(disp->disp, index, &head->curs);
1417	if (ret)
1418		goto out;
1419
1420	ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1421			     0, 0x0000, NULL, &head->base.cursor.nvbo);
1422	if (!ret) {
1423		ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1424		if (!ret) {
1425			ret = nouveau_bo_map(head->base.cursor.nvbo);
1426			if (ret)
1427				nouveau_bo_unpin(head->base.lut.nvbo);
1428		}
1429		if (ret)
1430			nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1431	}
1432
1433	if (ret)
1434		goto out;
1435
1436	/* allocate page flip / sync resources */
1437	ret = nv50_base_create(disp->disp, index, disp->sync->bo.offset,
1438			      &head->sync);
1439	if (ret)
1440		goto out;
1441
1442	head->sync.addr = EVO_FLIP_SEM0(index);
1443	head->sync.data = 0x00000000;
1444
1445	/* allocate overlay resources */
1446	ret = nv50_oimm_create(disp->disp, index, &head->oimm);
1447	if (ret)
1448		goto out;
1449
1450	ret = nv50_ovly_create(disp->disp, index, disp->sync->bo.offset,
1451			      &head->ovly);
1452	if (ret)
1453		goto out;
1454
1455out:
1456	if (ret)
1457		nv50_crtc_destroy(crtc);
1458	return ret;
1459}
1460
1461/******************************************************************************
1462 * DAC
1463 *****************************************************************************/
1464static void
1465nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1466{
1467	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1468	struct nv50_disp *disp = nv50_disp(encoder->dev);
1469	struct {
1470		struct nv50_disp_mthd_v1 base;
1471		struct nv50_disp_dac_pwr_v0 pwr;
1472	} args = {
1473		.base.version = 1,
1474		.base.method = NV50_DISP_MTHD_V1_DAC_PWR,
1475		.base.hasht  = nv_encoder->dcb->hasht,
1476		.base.hashm  = nv_encoder->dcb->hashm,
1477		.pwr.state = 1,
1478		.pwr.data  = 1,
1479		.pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND &&
1480			      mode != DRM_MODE_DPMS_OFF),
1481		.pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY &&
1482			      mode != DRM_MODE_DPMS_OFF),
1483	};
1484
1485	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1486}
1487
1488static bool
1489nv50_dac_mode_fixup(struct drm_encoder *encoder,
1490		    const struct drm_display_mode *mode,
1491		    struct drm_display_mode *adjusted_mode)
1492{
1493	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1494	struct nouveau_connector *nv_connector;
1495
1496	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1497	if (nv_connector && nv_connector->native_mode) {
1498		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1499			int id = adjusted_mode->base.id;
1500			*adjusted_mode = *nv_connector->native_mode;
1501			adjusted_mode->base.id = id;
1502		}
1503	}
1504
1505	return true;
1506}
1507
1508static void
1509nv50_dac_commit(struct drm_encoder *encoder)
1510{
1511}
1512
1513static void
1514nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1515		  struct drm_display_mode *adjusted_mode)
1516{
1517	struct nv50_mast *mast = nv50_mast(encoder->dev);
1518	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1519	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1520	u32 *push;
1521
1522	nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1523
1524	push = evo_wait(mast, 8);
1525	if (push) {
1526		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1527			u32 syncs = 0x00000000;
1528
1529			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1530				syncs |= 0x00000001;
1531			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1532				syncs |= 0x00000002;
1533
1534			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1535			evo_data(push, 1 << nv_crtc->index);
1536			evo_data(push, syncs);
1537		} else {
1538			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1539			u32 syncs = 0x00000001;
1540
1541			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1542				syncs |= 0x00000008;
1543			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1544				syncs |= 0x00000010;
1545
1546			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1547				magic |= 0x00000001;
1548
1549			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1550			evo_data(push, syncs);
1551			evo_data(push, magic);
1552			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1553			evo_data(push, 1 << nv_crtc->index);
1554		}
1555
1556		evo_kick(push, mast);
1557	}
1558
1559	nv_encoder->crtc = encoder->crtc;
1560}
1561
1562static void
1563nv50_dac_disconnect(struct drm_encoder *encoder)
1564{
1565	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1566	struct nv50_mast *mast = nv50_mast(encoder->dev);
1567	const int or = nv_encoder->or;
1568	u32 *push;
1569
1570	if (nv_encoder->crtc) {
1571		nv50_crtc_prepare(nv_encoder->crtc);
1572
1573		push = evo_wait(mast, 4);
1574		if (push) {
1575			if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1576				evo_mthd(push, 0x0400 + (or * 0x080), 1);
1577				evo_data(push, 0x00000000);
1578			} else {
1579				evo_mthd(push, 0x0180 + (or * 0x020), 1);
1580				evo_data(push, 0x00000000);
1581			}
1582			evo_kick(push, mast);
1583		}
1584	}
1585
1586	nv_encoder->crtc = NULL;
1587}
1588
1589static enum drm_connector_status
1590nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1591{
1592	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1593	struct nv50_disp *disp = nv50_disp(encoder->dev);
1594	struct {
1595		struct nv50_disp_mthd_v1 base;
1596		struct nv50_disp_dac_load_v0 load;
1597	} args = {
1598		.base.version = 1,
1599		.base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
1600		.base.hasht  = nv_encoder->dcb->hasht,
1601		.base.hashm  = nv_encoder->dcb->hashm,
1602	};
1603	int ret;
1604
1605	args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
1606	if (args.load.data == 0)
1607		args.load.data = 340;
1608
1609	ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
1610	if (ret || !args.load.load)
1611		return connector_status_disconnected;
1612
1613	return connector_status_connected;
1614}
1615
1616static void
1617nv50_dac_destroy(struct drm_encoder *encoder)
1618{
1619	drm_encoder_cleanup(encoder);
1620	kfree(encoder);
1621}
1622
1623static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1624	.dpms = nv50_dac_dpms,
1625	.mode_fixup = nv50_dac_mode_fixup,
1626	.prepare = nv50_dac_disconnect,
1627	.commit = nv50_dac_commit,
1628	.mode_set = nv50_dac_mode_set,
1629	.disable = nv50_dac_disconnect,
1630	.get_crtc = nv50_display_crtc_get,
1631	.detect = nv50_dac_detect
1632};
1633
1634static const struct drm_encoder_funcs nv50_dac_func = {
1635	.destroy = nv50_dac_destroy,
1636};
1637
1638static int
1639nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1640{
1641	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1642	struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
1643	struct nouveau_encoder *nv_encoder;
1644	struct drm_encoder *encoder;
1645	int type = DRM_MODE_ENCODER_DAC;
1646
1647	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1648	if (!nv_encoder)
1649		return -ENOMEM;
1650	nv_encoder->dcb = dcbe;
1651	nv_encoder->or = ffs(dcbe->or) - 1;
1652	nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
1653
1654	encoder = to_drm_encoder(nv_encoder);
1655	encoder->possible_crtcs = dcbe->heads;
1656	encoder->possible_clones = 0;
1657	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type);
1658	drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
1659
1660	drm_mode_connector_attach_encoder(connector, encoder);
1661	return 0;
1662}
1663
1664/******************************************************************************
1665 * Audio
1666 *****************************************************************************/
1667static void
1668nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1669{
1670	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1671	struct nouveau_connector *nv_connector;
1672	struct nv50_disp *disp = nv50_disp(encoder->dev);
1673	struct {
1674		struct nv50_disp_mthd_v1 base;
1675		struct nv50_disp_sor_hda_eld_v0 eld;
1676		u8 data[sizeof(nv_connector->base.eld)];
1677	} args = {
1678		.base.version = 1,
1679		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1680		.base.hasht   = nv_encoder->dcb->hasht,
1681		.base.hashm   = nv_encoder->dcb->hashm,
1682	};
1683
1684	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1685	if (!drm_detect_monitor_audio(nv_connector->edid))
1686		return;
1687
1688	drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1689	memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
1690
1691	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1692}
1693
1694static void
1695nv50_audio_disconnect(struct drm_encoder *encoder)
1696{
1697	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1698	struct nv50_disp *disp = nv50_disp(encoder->dev);
1699	struct {
1700		struct nv50_disp_mthd_v1 base;
1701		struct nv50_disp_sor_hda_eld_v0 eld;
1702	} args = {
1703		.base.version = 1,
1704		.base.method  = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
1705		.base.hasht   = nv_encoder->dcb->hasht,
1706		.base.hashm   = nv_encoder->dcb->hashm,
1707	};
1708
1709	nvif_mthd(disp->disp, 0, &args, sizeof(args));
1710}
1711
1712/******************************************************************************
1713 * HDMI
1714 *****************************************************************************/
1715static void
1716nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1717{
1718	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1719	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1720	struct nouveau_connector *nv_connector;
1721	struct nv50_disp *disp = nv50_disp(encoder->dev);
1722	const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1723	u32 rekey = 56; /* binary driver, and tegra constant */
1724	u32 max_ac_packet;
1725	u32 data;
1726
1727	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1728	if (!drm_detect_hdmi_monitor(nv_connector->edid))
1729		return;
1730
1731	max_ac_packet  = mode->htotal - mode->hdisplay;
1732	max_ac_packet -= rekey;
1733	max_ac_packet -= 18; /* constant from tegra */
1734	max_ac_packet /= 32;
1735
1736	data = NV84_DISP_SOR_HDMI_PWR_STATE_ON | (max_ac_packet << 16) | rekey;
1737	nvif_exec(disp->disp, NV84_DISP_SOR_HDMI_PWR + moff, &data, sizeof(data));
1738
1739	nv50_audio_mode_set(encoder, mode);
1740}
1741
1742static void
1743nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1744{
1745	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1746	struct nv50_disp *disp = nv50_disp(encoder->dev);
1747	const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1748	u32 data = 0;
1749
1750	nv50_audio_disconnect(encoder);
1751
1752	nvif_exec(disp->disp, NV84_DISP_SOR_HDMI_PWR + moff, &data, sizeof(data));
1753}
1754
1755/******************************************************************************
1756 * SOR
1757 *****************************************************************************/
1758static void
1759nv50_sor_dpms(struct drm_encoder *encoder, int mode)
1760{
1761	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1762	struct nv50_disp *disp = nv50_disp(encoder->dev);
1763	struct {
1764		struct nv50_disp_mthd_v1 base;
1765		struct nv50_disp_sor_pwr_v0 pwr;
1766	} args = {
1767		.base.version = 1,
1768		.base.method = NV50_DISP_MTHD_V1_SOR_PWR,
1769		.base.hasht  = nv_encoder->dcb->hasht,
1770		.base.hashm  = nv_encoder->dcb->hashm,
1771		.pwr.state = mode == DRM_MODE_DPMS_ON,
1772	};
1773	struct drm_device *dev = encoder->dev;
1774	struct drm_encoder *partner;
1775	u32 mthd, data;
1776
1777	nv_encoder->last_dpms = mode;
1778
1779	list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1780		struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1781
1782		if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1783			continue;
1784
1785		if (nv_partner != nv_encoder &&
1786		    nv_partner->dcb->or == nv_encoder->dcb->or) {
1787			if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1788				return;
1789			break;
1790		}
1791	}
1792
1793	mthd  = (ffs(nv_encoder->dcb->heads) - 1) << 3;
1794	mthd |= (ffs(nv_encoder->dcb->sorconf.link) - 1) << 2;
1795	mthd |= nv_encoder->or;
1796
1797	if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1798		args.pwr.state = 1;
1799		nvif_mthd(disp->disp, 0, &args, sizeof(args));
1800		data  = (mode == DRM_MODE_DPMS_ON);
1801		mthd |= NV94_DISP_SOR_DP_PWR;
1802		nvif_exec(disp->disp, mthd, &data, sizeof(data));
1803	} else {
1804		nvif_mthd(disp->disp, 0, &args, sizeof(args));
1805	}
1806}
1807
1808static bool
1809nv50_sor_mode_fixup(struct drm_encoder *encoder,
1810		    const struct drm_display_mode *mode,
1811		    struct drm_display_mode *adjusted_mode)
1812{
1813	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1814	struct nouveau_connector *nv_connector;
1815
1816	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1817	if (nv_connector && nv_connector->native_mode) {
1818		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1819			int id = adjusted_mode->base.id;
1820			*adjusted_mode = *nv_connector->native_mode;
1821			adjusted_mode->base.id = id;
1822		}
1823	}
1824
1825	return true;
1826}
1827
1828static void
1829nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
1830{
1831	struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
1832	u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
1833	if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
1834		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1835			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
1836			evo_data(push, (nv_encoder->ctrl = temp));
1837		} else {
1838			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1839			evo_data(push, (nv_encoder->ctrl = temp));
1840		}
1841		evo_kick(push, mast);
1842	}
1843}
1844
1845static void
1846nv50_sor_disconnect(struct drm_encoder *encoder)
1847{
1848	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1849	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1850
1851	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1852	nv_encoder->crtc = NULL;
1853
1854	if (nv_crtc) {
1855		nv50_crtc_prepare(&nv_crtc->base);
1856		nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
1857		nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
1858	}
1859}
1860
1861static void
1862nv50_sor_commit(struct drm_encoder *encoder)
1863{
1864}
1865
1866static void
1867nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1868		  struct drm_display_mode *mode)
1869{
1870	struct nv50_disp *disp = nv50_disp(encoder->dev);
1871	struct nv50_mast *mast = nv50_mast(encoder->dev);
1872	struct drm_device *dev = encoder->dev;
1873	struct nouveau_drm *drm = nouveau_drm(dev);
1874	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1875	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1876	struct nouveau_connector *nv_connector;
1877	struct nvbios *bios = &drm->vbios;
1878	u32 lvds = 0, mask, ctrl;
1879	u8 owner = 1 << nv_crtc->index;
1880	u8 proto = 0xf;
1881	u8 depth = 0x0;
1882
1883	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1884	nv_encoder->crtc = encoder->crtc;
1885
1886	switch (nv_encoder->dcb->type) {
1887	case DCB_OUTPUT_TMDS:
1888		if (nv_encoder->dcb->sorconf.link & 1) {
1889			if (mode->clock < 165000)
1890				proto = 0x1;
1891			else
1892				proto = 0x5;
1893		} else {
1894			proto = 0x2;
1895		}
1896
1897		nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
1898		break;
1899	case DCB_OUTPUT_LVDS:
1900		proto = 0x0;
1901
1902		if (bios->fp_no_ddc) {
1903			if (bios->fp.dual_link)
1904				lvds |= 0x0100;
1905			if (bios->fp.if_is_24bit)
1906				lvds |= 0x0200;
1907		} else {
1908			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1909				if (((u8 *)nv_connector->edid)[121] == 2)
1910					lvds |= 0x0100;
1911			} else
1912			if (mode->clock >= bios->fp.duallink_transition_clk) {
1913				lvds |= 0x0100;
1914			}
1915
1916			if (lvds & 0x0100) {
1917				if (bios->fp.strapless_is_24bit & 2)
1918					lvds |= 0x0200;
1919			} else {
1920				if (bios->fp.strapless_is_24bit & 1)
1921					lvds |= 0x0200;
1922			}
1923
1924			if (nv_connector->base.display_info.bpc == 8)
1925				lvds |= 0x0200;
1926		}
1927
1928		nvif_exec(disp->disp, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, &lvds, sizeof(lvds));
1929		break;
1930	case DCB_OUTPUT_DP:
1931		if (nv_connector->base.display_info.bpc == 6) {
1932			nv_encoder->dp.datarate = mode->clock * 18 / 8;
1933			depth = 0x2;
1934		} else
1935		if (nv_connector->base.display_info.bpc == 8) {
1936			nv_encoder->dp.datarate = mode->clock * 24 / 8;
1937			depth = 0x5;
1938		} else {
1939			nv_encoder->dp.datarate = mode->clock * 30 / 8;
1940			depth = 0x6;
1941		}
1942
1943		if (nv_encoder->dcb->sorconf.link & 1)
1944			proto = 0x8;
1945		else
1946			proto = 0x9;
1947		break;
1948	default:
1949		BUG_ON(1);
1950		break;
1951	}
1952
1953	nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
1954
1955	if (nv50_vers(mast) >= NVD0_DISP_CLASS) {
1956		u32 *push = evo_wait(mast, 3);
1957		if (push) {
1958			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1959			u32 syncs = 0x00000001;
1960
1961			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1962				syncs |= 0x00000008;
1963			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1964				syncs |= 0x00000010;
1965
1966			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1967				magic |= 0x00000001;
1968
1969			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1970			evo_data(push, syncs | (depth << 6));
1971			evo_data(push, magic);
1972			evo_kick(push, mast);
1973		}
1974
1975		ctrl = proto << 8;
1976		mask = 0x00000f00;
1977	} else {
1978		ctrl = (depth << 16) | (proto << 8);
1979		if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1980			ctrl |= 0x00001000;
1981		if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1982			ctrl |= 0x00002000;
1983		mask = 0x000f3f00;
1984	}
1985
1986	nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
1987}
1988
1989static void
1990nv50_sor_destroy(struct drm_encoder *encoder)
1991{
1992	drm_encoder_cleanup(encoder);
1993	kfree(encoder);
1994}
1995
1996static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
1997	.dpms = nv50_sor_dpms,
1998	.mode_fixup = nv50_sor_mode_fixup,
1999	.prepare = nv50_sor_disconnect,
2000	.commit = nv50_sor_commit,
2001	.mode_set = nv50_sor_mode_set,
2002	.disable = nv50_sor_disconnect,
2003	.get_crtc = nv50_display_crtc_get,
2004};
2005
2006static const struct drm_encoder_funcs nv50_sor_func = {
2007	.destroy = nv50_sor_destroy,
2008};
2009
2010static int
2011nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
2012{
2013	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2014	struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
2015	struct nouveau_encoder *nv_encoder;
2016	struct drm_encoder *encoder;
2017	int type;
2018
2019	switch (dcbe->type) {
2020	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
2021	case DCB_OUTPUT_TMDS:
2022	case DCB_OUTPUT_DP:
2023	default:
2024		type = DRM_MODE_ENCODER_TMDS;
2025		break;
2026	}
2027
2028	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2029	if (!nv_encoder)
2030		return -ENOMEM;
2031	nv_encoder->dcb = dcbe;
2032	nv_encoder->or = ffs(dcbe->or) - 1;
2033	nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
2034	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
2035
2036	encoder = to_drm_encoder(nv_encoder);
2037	encoder->possible_crtcs = dcbe->heads;
2038	encoder->possible_clones = 0;
2039	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type);
2040	drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
2041
2042	drm_mode_connector_attach_encoder(connector, encoder);
2043	return 0;
2044}
2045
2046/******************************************************************************
2047 * PIOR
2048 *****************************************************************************/
2049
2050static void
2051nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2052{
2053	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2054	struct nv50_disp *disp = nv50_disp(encoder->dev);
2055	u32 mthd = (nv_encoder->dcb->type << 12) | nv_encoder->or;
2056	u32 ctrl = (mode == DRM_MODE_DPMS_ON);
2057	nvif_exec(disp->disp, NV50_DISP_PIOR_PWR + mthd, &ctrl, sizeof(ctrl));
2058}
2059
2060static bool
2061nv50_pior_mode_fixup(struct drm_encoder *encoder,
2062		     const struct drm_display_mode *mode,
2063		     struct drm_display_mode *adjusted_mode)
2064{
2065	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2066	struct nouveau_connector *nv_connector;
2067
2068	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2069	if (nv_connector && nv_connector->native_mode) {
2070		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
2071			int id = adjusted_mode->base.id;
2072			*adjusted_mode = *nv_connector->native_mode;
2073			adjusted_mode->base.id = id;
2074		}
2075	}
2076
2077	adjusted_mode->clock *= 2;
2078	return true;
2079}
2080
2081static void
2082nv50_pior_commit(struct drm_encoder *encoder)
2083{
2084}
2085
2086static void
2087nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2088		   struct drm_display_mode *adjusted_mode)
2089{
2090	struct nv50_mast *mast = nv50_mast(encoder->dev);
2091	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2092	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2093	struct nouveau_connector *nv_connector;
2094	u8 owner = 1 << nv_crtc->index;
2095	u8 proto, depth;
2096	u32 *push;
2097
2098	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2099	switch (nv_connector->base.display_info.bpc) {
2100	case 10: depth = 0x6; break;
2101	case  8: depth = 0x5; break;
2102	case  6: depth = 0x2; break;
2103	default: depth = 0x0; break;
2104	}
2105
2106	switch (nv_encoder->dcb->type) {
2107	case DCB_OUTPUT_TMDS:
2108	case DCB_OUTPUT_DP:
2109		proto = 0x0;
2110		break;
2111	default:
2112		BUG_ON(1);
2113		break;
2114	}
2115
2116	nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2117
2118	push = evo_wait(mast, 8);
2119	if (push) {
2120		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
2121			u32 ctrl = (depth << 16) | (proto << 8) | owner;
2122			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2123				ctrl |= 0x00001000;
2124			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2125				ctrl |= 0x00002000;
2126			evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2127			evo_data(push, ctrl);
2128		}
2129
2130		evo_kick(push, mast);
2131	}
2132
2133	nv_encoder->crtc = encoder->crtc;
2134}
2135
2136static void
2137nv50_pior_disconnect(struct drm_encoder *encoder)
2138{
2139	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2140	struct nv50_mast *mast = nv50_mast(encoder->dev);
2141	const int or = nv_encoder->or;
2142	u32 *push;
2143
2144	if (nv_encoder->crtc) {
2145		nv50_crtc_prepare(nv_encoder->crtc);
2146
2147		push = evo_wait(mast, 4);
2148		if (push) {
2149			if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
2150				evo_mthd(push, 0x0700 + (or * 0x040), 1);
2151				evo_data(push, 0x00000000);
2152			}
2153			evo_kick(push, mast);
2154		}
2155	}
2156
2157	nv_encoder->crtc = NULL;
2158}
2159
2160static void
2161nv50_pior_destroy(struct drm_encoder *encoder)
2162{
2163	drm_encoder_cleanup(encoder);
2164	kfree(encoder);
2165}
2166
2167static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2168	.dpms = nv50_pior_dpms,
2169	.mode_fixup = nv50_pior_mode_fixup,
2170	.prepare = nv50_pior_disconnect,
2171	.commit = nv50_pior_commit,
2172	.mode_set = nv50_pior_mode_set,
2173	.disable = nv50_pior_disconnect,
2174	.get_crtc = nv50_display_crtc_get,
2175};
2176
2177static const struct drm_encoder_funcs nv50_pior_func = {
2178	.destroy = nv50_pior_destroy,
2179};
2180
2181static int
2182nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2183{
2184	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2185	struct nouveau_i2c *i2c = nvkm_i2c(&drm->device);
2186	struct nouveau_i2c_port *ddc = NULL;
2187	struct nouveau_encoder *nv_encoder;
2188	struct drm_encoder *encoder;
2189	int type;
2190
2191	switch (dcbe->type) {
2192	case DCB_OUTPUT_TMDS:
2193		ddc  = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev));
2194		type = DRM_MODE_ENCODER_TMDS;
2195		break;
2196	case DCB_OUTPUT_DP:
2197		ddc  = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev));
2198		type = DRM_MODE_ENCODER_TMDS;
2199		break;
2200	default:
2201		return -ENODEV;
2202	}
2203
2204	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2205	if (!nv_encoder)
2206		return -ENOMEM;
2207	nv_encoder->dcb = dcbe;
2208	nv_encoder->or = ffs(dcbe->or) - 1;
2209	nv_encoder->i2c = ddc;
2210
2211	encoder = to_drm_encoder(nv_encoder);
2212	encoder->possible_crtcs = dcbe->heads;
2213	encoder->possible_clones = 0;
2214	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type);
2215	drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
2216
2217	drm_mode_connector_attach_encoder(connector, encoder);
2218	return 0;
2219}
2220
2221/******************************************************************************
2222 * Framebuffer
2223 *****************************************************************************/
2224
2225static void
2226nv50_fbdma_fini(struct nv50_fbdma *fbdma)
2227{
2228	int i;
2229	for (i = 0; i < ARRAY_SIZE(fbdma->base); i++)
2230		nvif_object_fini(&fbdma->base[i]);
2231	nvif_object_fini(&fbdma->core);
2232	list_del(&fbdma->head);
2233	kfree(fbdma);
2234}
2235
2236static int
2237nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind)
2238{
2239	struct nouveau_drm *drm = nouveau_drm(dev);
2240	struct nv50_disp *disp = nv50_disp(dev);
2241	struct nv50_mast *mast = nv50_mast(dev);
2242	struct __attribute__ ((packed)) {
2243		struct nv_dma_v0 base;
2244		union {
2245			struct nv50_dma_v0 nv50;
2246			struct gf100_dma_v0 gf100;
2247			struct gf110_dma_v0 gf110;
2248		};
2249	} args = {};
2250	struct nv50_fbdma *fbdma;
2251	struct drm_crtc *crtc;
2252	u32 size = sizeof(args.base);
2253	int ret;
2254
2255	list_for_each_entry(fbdma, &disp->fbdma, head) {
2256		if (fbdma->core.handle == name)
2257			return 0;
2258	}
2259
2260	fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL);
2261	if (!fbdma)
2262		return -ENOMEM;
2263	list_add(&fbdma->head, &disp->fbdma);
2264
2265	args.base.target = NV_DMA_V0_TARGET_VRAM;
2266	args.base.access = NV_DMA_V0_ACCESS_RDWR;
2267	args.base.start = offset;
2268	args.base.limit = offset + length - 1;
2269
2270	if (drm->device.info.chipset < 0x80) {
2271		args.nv50.part = NV50_DMA_V0_PART_256;
2272		size += sizeof(args.nv50);
2273	} else
2274	if (drm->device.info.chipset < 0xc0) {
2275		args.nv50.part = NV50_DMA_V0_PART_256;
2276		args.nv50.kind = kind;
2277		size += sizeof(args.nv50);
2278	} else
2279	if (drm->device.info.chipset < 0xd0) {
2280		args.gf100.kind = kind;
2281		size += sizeof(args.gf100);
2282	} else {
2283		args.gf110.page = GF110_DMA_V0_PAGE_LP;
2284		args.gf110.kind = kind;
2285		size += sizeof(args.gf110);
2286	}
2287
2288	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2289		struct nv50_head *head = nv50_head(crtc);
2290		int ret = nvif_object_init(&head->sync.base.base.user, NULL,
2291					    name, NV_DMA_IN_MEMORY, &args, size,
2292					   &fbdma->base[head->base.index]);
2293		if (ret) {
2294			nv50_fbdma_fini(fbdma);
2295			return ret;
2296		}
2297	}
2298
2299	ret = nvif_object_init(&mast->base.base.user, NULL, name,
2300				NV_DMA_IN_MEMORY, &args, size,
2301			       &fbdma->core);
2302	if (ret) {
2303		nv50_fbdma_fini(fbdma);
2304		return ret;
2305	}
2306
2307	return 0;
2308}
2309
2310static void
2311nv50_fb_dtor(struct drm_framebuffer *fb)
2312{
2313}
2314
2315static int
2316nv50_fb_ctor(struct drm_framebuffer *fb)
2317{
2318	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
2319	struct nouveau_drm *drm = nouveau_drm(fb->dev);
2320	struct nouveau_bo *nvbo = nv_fb->nvbo;
2321	struct nv50_disp *disp = nv50_disp(fb->dev);
2322	struct nouveau_fb *pfb = nvkm_fb(&drm->device);
2323	u8 kind = nouveau_bo_tile_layout(nvbo) >> 8;
2324	u8 tile = nvbo->tile_mode;
2325
2326	if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG) {
2327		NV_ERROR(drm, "framebuffer requires contiguous bo\n");
2328		return -EINVAL;
2329	}
2330
2331	if (drm->device.info.chipset >= 0xc0)
2332		tile >>= 4; /* yep.. */
2333
2334	switch (fb->depth) {
2335	case  8: nv_fb->r_format = 0x1e00; break;
2336	case 15: nv_fb->r_format = 0xe900; break;
2337	case 16: nv_fb->r_format = 0xe800; break;
2338	case 24:
2339	case 32: nv_fb->r_format = 0xcf00; break;
2340	case 30: nv_fb->r_format = 0xd100; break;
2341	default:
2342		 NV_ERROR(drm, "unknown depth %d\n", fb->depth);
2343		 return -EINVAL;
2344	}
2345
2346	if (disp->disp->oclass < NV84_DISP_CLASS) {
2347		nv_fb->r_pitch   = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2348					    (fb->pitches[0] | 0x00100000);
2349		nv_fb->r_format |= kind << 16;
2350	} else
2351	if (disp->disp->oclass < NVD0_DISP_CLASS) {
2352		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2353					   (fb->pitches[0] | 0x00100000);
2354	} else {
2355		nv_fb->r_pitch  = kind ? (((fb->pitches[0] / 4) << 4) | tile) :
2356					   (fb->pitches[0] | 0x01000000);
2357	}
2358	nv_fb->r_handle = 0xffff0000 | kind;
2359
2360	return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0, pfb->ram->size, kind);
2361}
2362
2363/******************************************************************************
2364 * Init
2365 *****************************************************************************/
2366
2367void
2368nv50_display_fini(struct drm_device *dev)
2369{
2370}
2371
2372int
2373nv50_display_init(struct drm_device *dev)
2374{
2375	struct nv50_disp *disp = nv50_disp(dev);
2376	struct drm_crtc *crtc;
2377	u32 *push;
2378
2379	push = evo_wait(nv50_mast(dev), 32);
2380	if (!push)
2381		return -EBUSY;
2382
2383	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2384		struct nv50_sync *sync = nv50_sync(crtc);
2385		nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
2386	}
2387
2388	evo_mthd(push, 0x0088, 1);
2389	evo_data(push, nv50_mast(dev)->base.sync.handle);
2390	evo_kick(push, nv50_mast(dev));
2391	return 0;
2392}
2393
2394void
2395nv50_display_destroy(struct drm_device *dev)
2396{
2397	struct nv50_disp *disp = nv50_disp(dev);
2398	struct nv50_fbdma *fbdma, *fbtmp;
2399
2400	list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) {
2401		nv50_fbdma_fini(fbdma);
2402	}
2403
2404	nv50_dmac_destroy(&disp->mast.base, disp->disp);
2405
2406	nouveau_bo_unmap(disp->sync);
2407	if (disp->sync)
2408		nouveau_bo_unpin(disp->sync);
2409	nouveau_bo_ref(NULL, &disp->sync);
2410
2411	nouveau_display(dev)->priv = NULL;
2412	kfree(disp);
2413}
2414
2415int
2416nv50_display_create(struct drm_device *dev)
2417{
2418	struct nvif_device *device = &nouveau_drm(dev)->device;
2419	struct nouveau_drm *drm = nouveau_drm(dev);
2420	struct dcb_table *dcb = &drm->vbios.dcb;
2421	struct drm_connector *connector, *tmp;
2422	struct nv50_disp *disp;
2423	struct dcb_output *dcbe;
2424	int crtcs, ret, i;
2425
2426	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2427	if (!disp)
2428		return -ENOMEM;
2429	INIT_LIST_HEAD(&disp->fbdma);
2430
2431	nouveau_display(dev)->priv = disp;
2432	nouveau_display(dev)->dtor = nv50_display_destroy;
2433	nouveau_display(dev)->init = nv50_display_init;
2434	nouveau_display(dev)->fini = nv50_display_fini;
2435	nouveau_display(dev)->fb_ctor = nv50_fb_ctor;
2436	nouveau_display(dev)->fb_dtor = nv50_fb_dtor;
2437	disp->disp = &nouveau_display(dev)->disp;
2438
2439	/* small shared memory area we use for notifiers and semaphores */
2440	ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2441			     0, 0x0000, NULL, &disp->sync);
2442	if (!ret) {
2443		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
2444		if (!ret) {
2445			ret = nouveau_bo_map(disp->sync);
2446			if (ret)
2447				nouveau_bo_unpin(disp->sync);
2448		}
2449		if (ret)
2450			nouveau_bo_ref(NULL, &disp->sync);
2451	}
2452
2453	if (ret)
2454		goto out;
2455
2456	/* allocate master evo channel */
2457	ret = nv50_core_create(disp->disp, disp->sync->bo.offset,
2458			      &disp->mast);
2459	if (ret)
2460		goto out;
2461
2462	/* create crtc objects to represent the hw heads */
2463	if (disp->disp->oclass >= NVD0_DISP_CLASS)
2464		crtcs = nvif_rd32(device, 0x022448);
2465	else
2466		crtcs = 2;
2467
2468	for (i = 0; i < crtcs; i++) {
2469		ret = nv50_crtc_create(dev, i);
2470		if (ret)
2471			goto out;
2472	}
2473
2474	/* create encoder/connector objects based on VBIOS DCB table */
2475	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2476		connector = nouveau_connector_create(dev, dcbe->connector);
2477		if (IS_ERR(connector))
2478			continue;
2479
2480		if (dcbe->location == DCB_LOC_ON_CHIP) {
2481			switch (dcbe->type) {
2482			case DCB_OUTPUT_TMDS:
2483			case DCB_OUTPUT_LVDS:
2484			case DCB_OUTPUT_DP:
2485				ret = nv50_sor_create(connector, dcbe);
2486				break;
2487			case DCB_OUTPUT_ANALOG:
2488				ret = nv50_dac_create(connector, dcbe);
2489				break;
2490			default:
2491				ret = -ENODEV;
2492				break;
2493			}
2494		} else {
2495			ret = nv50_pior_create(connector, dcbe);
2496		}
2497
2498		if (ret) {
2499			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2500				     dcbe->location, dcbe->type,
2501				     ffs(dcbe->or) - 1, ret);
2502			ret = 0;
2503		}
2504	}
2505
2506	/* cull any connectors we created that don't have an encoder */
2507	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2508		if (connector->encoder_ids[0])
2509			continue;
2510
2511		NV_WARN(drm, "%s has no encoders, removing\n",
2512			connector->name);
2513		connector->funcs->destroy(connector);
2514	}
2515
2516out:
2517	if (ret)
2518		nv50_display_destroy(dev);
2519	return ret;
2520}
2521