nv50_display.c revision 5a885f0b757ba4483d790c40813d8a66278bdda7
1	/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
25#include <linux/dma-mapping.h>
26
27#include <drm/drmP.h>
28#include <drm/drm_crtc_helper.h>
29
30#include "nouveau_drm.h"
31#include "nouveau_dma.h"
32#include "nouveau_gem.h"
33#include "nouveau_connector.h"
34#include "nouveau_encoder.h"
35#include "nouveau_crtc.h"
36#include "nouveau_fence.h"
37#include "nv50_display.h"
38
39#include <core/client.h>
40#include <core/gpuobj.h>
41#include <core/class.h>
42
43#include <subdev/timer.h>
44#include <subdev/bar.h>
45#include <subdev/fb.h>
46#include <subdev/i2c.h>
47
48#define EVO_DMA_NR 9
49
50#define EVO_MASTER  (0x00)
51#define EVO_FLIP(c) (0x01 + (c))
52#define EVO_OVLY(c) (0x05 + (c))
53#define EVO_OIMM(c) (0x09 + (c))
54#define EVO_CURS(c) (0x0d + (c))
55
56/* offsets in shared sync bo of various structures */
57#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
58#define EVO_MAST_NTFY     EVO_SYNC(  0, 0x00)
59#define EVO_FLIP_SEM0(c)  EVO_SYNC((c), 0x00)
60#define EVO_FLIP_SEM1(c)  EVO_SYNC((c), 0x10)
61
62#define EVO_CORE_HANDLE      (0xd1500000)
63#define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
64#define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
65#define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) |                               \
66			      (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
67
68/******************************************************************************
69 * EVO channel
70 *****************************************************************************/
71
72struct nv50_chan {
73	struct nouveau_object *user;
74	u32 handle;
75};
76
77static int
78nv50_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
79		 void *data, u32 size, struct nv50_chan *chan)
80{
81	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
82	const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
83	const u32 handle = EVO_CHAN_HANDLE(bclass, head);
84	int ret;
85
86	ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
87				 oclass, data, size, &chan->user);
88	if (ret)
89		return ret;
90
91	chan->handle = handle;
92	return 0;
93}
94
95static void
96nv50_chan_destroy(struct nouveau_object *core, struct nv50_chan *chan)
97{
98	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
99	if (chan->handle)
100		nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
101}
102
103/******************************************************************************
104 * PIO EVO channel
105 *****************************************************************************/
106
107struct nv50_pioc {
108	struct nv50_chan base;
109};
110
111static void
112nv50_pioc_destroy(struct nouveau_object *core, struct nv50_pioc *pioc)
113{
114	nv50_chan_destroy(core, &pioc->base);
115}
116
117static int
118nv50_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
119		 void *data, u32 size, struct nv50_pioc *pioc)
120{
121	return nv50_chan_create(core, bclass, head, data, size, &pioc->base);
122}
123
124/******************************************************************************
125 * DMA EVO channel
126 *****************************************************************************/
127
128struct nv50_dmac {
129	struct nv50_chan base;
130	dma_addr_t handle;
131	u32 *ptr;
132};
133
134static void
135nv50_dmac_destroy(struct nouveau_object *core, struct nv50_dmac *dmac)
136{
137	if (dmac->ptr) {
138		struct pci_dev *pdev = nv_device(core)->pdev;
139		pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
140	}
141
142	nv50_chan_destroy(core, &dmac->base);
143}
144
145static int
146nv50_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
147{
148	struct nouveau_fb *pfb = nouveau_fb(core);
149	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
150	struct nouveau_object *object;
151	int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
152				     NV_DMA_IN_MEMORY_CLASS,
153				     &(struct nv_dma_class) {
154					.flags = NV_DMA_TARGET_VRAM |
155						 NV_DMA_ACCESS_RDWR,
156					.start = 0,
157					.limit = pfb->ram.size - 1,
158					.conf0 = NV50_DMA_CONF0_ENABLE |
159					         NV50_DMA_CONF0_PART_256,
160				     }, sizeof(struct nv_dma_class), &object);
161	if (ret)
162		return ret;
163
164	ret = nouveau_object_new(client, parent, NvEvoFB16,
165				 NV_DMA_IN_MEMORY_CLASS,
166				 &(struct nv_dma_class) {
167					.flags = NV_DMA_TARGET_VRAM |
168						 NV_DMA_ACCESS_RDWR,
169					.start = 0,
170					.limit = pfb->ram.size - 1,
171					.conf0 = NV50_DMA_CONF0_ENABLE | 0x70 |
172					         NV50_DMA_CONF0_PART_256,
173				 }, sizeof(struct nv_dma_class), &object);
174	if (ret)
175		return ret;
176
177	ret = nouveau_object_new(client, parent, NvEvoFB32,
178				 NV_DMA_IN_MEMORY_CLASS,
179				 &(struct nv_dma_class) {
180					.flags = NV_DMA_TARGET_VRAM |
181						 NV_DMA_ACCESS_RDWR,
182					.start = 0,
183					.limit = pfb->ram.size - 1,
184					.conf0 = NV50_DMA_CONF0_ENABLE | 0x7a |
185					         NV50_DMA_CONF0_PART_256,
186				 }, sizeof(struct nv_dma_class), &object);
187	return ret;
188}
189
190static int
191nvc0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
192{
193	struct nouveau_fb *pfb = nouveau_fb(core);
194	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
195	struct nouveau_object *object;
196	int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
197				     NV_DMA_IN_MEMORY_CLASS,
198				     &(struct nv_dma_class) {
199					.flags = NV_DMA_TARGET_VRAM |
200						 NV_DMA_ACCESS_RDWR,
201					.start = 0,
202					.limit = pfb->ram.size - 1,
203					.conf0 = NVC0_DMA_CONF0_ENABLE,
204				     }, sizeof(struct nv_dma_class), &object);
205	if (ret)
206		return ret;
207
208	ret = nouveau_object_new(client, parent, NvEvoFB16,
209				 NV_DMA_IN_MEMORY_CLASS,
210				 &(struct nv_dma_class) {
211					.flags = NV_DMA_TARGET_VRAM |
212						 NV_DMA_ACCESS_RDWR,
213					.start = 0,
214					.limit = pfb->ram.size - 1,
215					.conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
216				 }, sizeof(struct nv_dma_class), &object);
217	if (ret)
218		return ret;
219
220	ret = nouveau_object_new(client, parent, NvEvoFB32,
221				 NV_DMA_IN_MEMORY_CLASS,
222				 &(struct nv_dma_class) {
223					.flags = NV_DMA_TARGET_VRAM |
224						 NV_DMA_ACCESS_RDWR,
225					.start = 0,
226					.limit = pfb->ram.size - 1,
227					.conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
228				 }, sizeof(struct nv_dma_class), &object);
229	return ret;
230}
231
232static int
233nvd0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
234{
235	struct nouveau_fb *pfb = nouveau_fb(core);
236	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
237	struct nouveau_object *object;
238	int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
239				     NV_DMA_IN_MEMORY_CLASS,
240				     &(struct nv_dma_class) {
241					.flags = NV_DMA_TARGET_VRAM |
242						 NV_DMA_ACCESS_RDWR,
243					.start = 0,
244					.limit = pfb->ram.size - 1,
245					.conf0 = NVD0_DMA_CONF0_ENABLE |
246						 NVD0_DMA_CONF0_PAGE_LP,
247				     }, sizeof(struct nv_dma_class), &object);
248	if (ret)
249		return ret;
250
251	ret = nouveau_object_new(client, parent, NvEvoFB32,
252				 NV_DMA_IN_MEMORY_CLASS,
253				 &(struct nv_dma_class) {
254					.flags = NV_DMA_TARGET_VRAM |
255						 NV_DMA_ACCESS_RDWR,
256					.start = 0,
257					.limit = pfb->ram.size - 1,
258					.conf0 = NVD0_DMA_CONF0_ENABLE | 0xfe |
259						 NVD0_DMA_CONF0_PAGE_LP,
260				 }, sizeof(struct nv_dma_class), &object);
261	return ret;
262}
263
264static int
265nv50_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
266		 void *data, u32 size, u64 syncbuf,
267		 struct nv50_dmac *dmac)
268{
269	struct nouveau_fb *pfb = nouveau_fb(core);
270	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
271	struct nouveau_object *object;
272	u32 pushbuf = *(u32 *)data;
273	int ret;
274
275	dmac->ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE,
276					&dmac->handle);
277	if (!dmac->ptr)
278		return -ENOMEM;
279
280	ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
281				 NV_DMA_FROM_MEMORY_CLASS,
282				 &(struct nv_dma_class) {
283					.flags = NV_DMA_TARGET_PCI_US |
284						 NV_DMA_ACCESS_RD,
285					.start = dmac->handle + 0x0000,
286					.limit = dmac->handle + 0x0fff,
287				 }, sizeof(struct nv_dma_class), &object);
288	if (ret)
289		return ret;
290
291	ret = nv50_chan_create(core, bclass, head, data, size, &dmac->base);
292	if (ret)
293		return ret;
294
295	ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
296				 NV_DMA_IN_MEMORY_CLASS,
297				 &(struct nv_dma_class) {
298					.flags = NV_DMA_TARGET_VRAM |
299						 NV_DMA_ACCESS_RDWR,
300					.start = syncbuf + 0x0000,
301					.limit = syncbuf + 0x0fff,
302				 }, sizeof(struct nv_dma_class), &object);
303	if (ret)
304		return ret;
305
306	ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
307				 NV_DMA_IN_MEMORY_CLASS,
308				 &(struct nv_dma_class) {
309					.flags = NV_DMA_TARGET_VRAM |
310						 NV_DMA_ACCESS_RDWR,
311					.start = 0,
312					.limit = pfb->ram.size - 1,
313				 }, sizeof(struct nv_dma_class), &object);
314	if (ret)
315		return ret;
316
317	if (nv_device(core)->card_type < NV_C0)
318		ret = nv50_dmac_create_fbdma(core, dmac->base.handle);
319	else
320	if (nv_device(core)->card_type < NV_D0)
321		ret = nvc0_dmac_create_fbdma(core, dmac->base.handle);
322	else
323		ret = nvd0_dmac_create_fbdma(core, dmac->base.handle);
324	return ret;
325}
326
327struct nv50_mast {
328	struct nv50_dmac base;
329};
330
331struct nv50_curs {
332	struct nv50_pioc base;
333};
334
335struct nv50_sync {
336	struct nv50_dmac base;
337	struct {
338		u32 offset;
339		u16 value;
340	} sem;
341};
342
343struct nv50_ovly {
344	struct nv50_dmac base;
345};
346
347struct nv50_oimm {
348	struct nv50_pioc base;
349};
350
351struct nv50_head {
352	struct nouveau_crtc base;
353	struct nv50_curs curs;
354	struct nv50_sync sync;
355	struct nv50_ovly ovly;
356	struct nv50_oimm oimm;
357};
358
359#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
360#define nv50_curs(c) (&nv50_head(c)->curs)
361#define nv50_sync(c) (&nv50_head(c)->sync)
362#define nv50_ovly(c) (&nv50_head(c)->ovly)
363#define nv50_oimm(c) (&nv50_head(c)->oimm)
364#define nv50_chan(c) (&(c)->base.base)
365#define nv50_vers(c) nv_mclass(nv50_chan(c)->user)
366
367struct nv50_disp {
368	struct nouveau_object *core;
369	struct nv50_mast mast;
370
371	u32 modeset;
372
373	struct nouveau_bo *sync;
374};
375
376static struct nv50_disp *
377nv50_disp(struct drm_device *dev)
378{
379	return nouveau_display(dev)->priv;
380}
381
382#define nv50_mast(d) (&nv50_disp(d)->mast)
383
384static struct drm_crtc *
385nv50_display_crtc_get(struct drm_encoder *encoder)
386{
387	return nouveau_encoder(encoder)->crtc;
388}
389
390/******************************************************************************
391 * EVO channel helpers
392 *****************************************************************************/
393static u32 *
394evo_wait(void *evoc, int nr)
395{
396	struct nv50_dmac *dmac = evoc;
397	u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
398
399	if (put + nr >= (PAGE_SIZE / 4) - 8) {
400		dmac->ptr[put] = 0x20000000;
401
402		nv_wo32(dmac->base.user, 0x0000, 0x00000000);
403		if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
404			NV_ERROR(dmac->base.user, "channel stalled\n");
405			return NULL;
406		}
407
408		put = 0;
409	}
410
411	return dmac->ptr + put;
412}
413
414static void
415evo_kick(u32 *push, void *evoc)
416{
417	struct nv50_dmac *dmac = evoc;
418	nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
419}
420
421#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
422#define evo_data(p,d)   *((p)++) = (d)
423
424static bool
425evo_sync_wait(void *data)
426{
427	if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
428		return true;
429	usleep_range(1, 2);
430	return false;
431}
432
433static int
434evo_sync(struct drm_device *dev)
435{
436	struct nouveau_device *device = nouveau_dev(dev);
437	struct nv50_disp *disp = nv50_disp(dev);
438	struct nv50_mast *mast = nv50_mast(dev);
439	u32 *push = evo_wait(mast, 8);
440	if (push) {
441		nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
442		evo_mthd(push, 0x0084, 1);
443		evo_data(push, 0x80000000 | EVO_MAST_NTFY);
444		evo_mthd(push, 0x0080, 2);
445		evo_data(push, 0x00000000);
446		evo_data(push, 0x00000000);
447		evo_kick(push, mast);
448		if (nv_wait_cb(device, evo_sync_wait, disp->sync))
449			return 0;
450	}
451
452	return -EBUSY;
453}
454
455/******************************************************************************
456 * Page flipping channel
457 *****************************************************************************/
458struct nouveau_bo *
459nv50_display_crtc_sema(struct drm_device *dev, int crtc)
460{
461	return nv50_disp(dev)->sync;
462}
463
464void
465nv50_display_flip_stop(struct drm_crtc *crtc)
466{
467	struct nv50_sync *sync = nv50_sync(crtc);
468	u32 *push;
469
470	push = evo_wait(sync, 8);
471	if (push) {
472		evo_mthd(push, 0x0084, 1);
473		evo_data(push, 0x00000000);
474		evo_mthd(push, 0x0094, 1);
475		evo_data(push, 0x00000000);
476		evo_mthd(push, 0x00c0, 1);
477		evo_data(push, 0x00000000);
478		evo_mthd(push, 0x0080, 1);
479		evo_data(push, 0x00000000);
480		evo_kick(push, sync);
481	}
482}
483
484int
485nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
486		       struct nouveau_channel *chan, u32 swap_interval)
487{
488	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
489	struct nv50_disp *disp = nv50_disp(crtc->dev);
490	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
491	struct nv50_sync *sync = nv50_sync(crtc);
492	u32 *push;
493	int ret;
494
495	swap_interval <<= 4;
496	if (swap_interval == 0)
497		swap_interval |= 0x100;
498
499	push = evo_wait(sync, 128);
500	if (unlikely(push == NULL))
501		return -EBUSY;
502
503	/* synchronise with the rendering channel, if necessary */
504	if (likely(chan)) {
505		ret = RING_SPACE(chan, 10);
506		if (ret)
507			return ret;
508
509		if (nv_mclass(chan->object) < NV84_CHANNEL_IND_CLASS) {
510			BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
511			OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
512			OUT_RING  (chan, sync->sem.offset);
513			BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
514			OUT_RING  (chan, 0xf00d0000 | sync->sem.value);
515			BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
516			OUT_RING  (chan, sync->sem.offset ^ 0x10);
517			OUT_RING  (chan, 0x74b1e000);
518			BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
519			OUT_RING  (chan, NvSema);
520		} else
521		if (nv_mclass(chan->object) < NVC0_CHANNEL_IND_CLASS) {
522			u64 offset = nv84_fence_crtc(chan, nv_crtc->index);
523			offset += sync->sem.offset;
524
525			BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
526			OUT_RING  (chan, upper_32_bits(offset));
527			OUT_RING  (chan, lower_32_bits(offset));
528			OUT_RING  (chan, 0xf00d0000 | sync->sem.value);
529			OUT_RING  (chan, 0x00000002);
530			BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
531			OUT_RING  (chan, upper_32_bits(offset));
532			OUT_RING  (chan, lower_32_bits(offset ^ 0x10));
533			OUT_RING  (chan, 0x74b1e000);
534			OUT_RING  (chan, 0x00000001);
535		} else {
536			u64 offset = nv84_fence_crtc(chan, nv_crtc->index);
537			offset += sync->sem.offset;
538
539			BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
540			OUT_RING  (chan, upper_32_bits(offset));
541			OUT_RING  (chan, lower_32_bits(offset));
542			OUT_RING  (chan, 0xf00d0000 | sync->sem.value);
543			OUT_RING  (chan, 0x00001002);
544			BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
545			OUT_RING  (chan, upper_32_bits(offset));
546			OUT_RING  (chan, lower_32_bits(offset ^ 0x10));
547			OUT_RING  (chan, 0x74b1e000);
548			OUT_RING  (chan, 0x00001001);
549		}
550
551		FIRE_RING (chan);
552	} else {
553		nouveau_bo_wr32(disp->sync, sync->sem.offset / 4,
554				0xf00d0000 | sync->sem.value);
555		evo_sync(crtc->dev);
556	}
557
558	/* queue the flip */
559	evo_mthd(push, 0x0100, 1);
560	evo_data(push, 0xfffe0000);
561	evo_mthd(push, 0x0084, 1);
562	evo_data(push, swap_interval);
563	if (!(swap_interval & 0x00000100)) {
564		evo_mthd(push, 0x00e0, 1);
565		evo_data(push, 0x40000000);
566	}
567	evo_mthd(push, 0x0088, 4);
568	evo_data(push, sync->sem.offset);
569	evo_data(push, 0xf00d0000 | sync->sem.value);
570	evo_data(push, 0x74b1e000);
571	evo_data(push, NvEvoSync);
572	evo_mthd(push, 0x00a0, 2);
573	evo_data(push, 0x00000000);
574	evo_data(push, 0x00000000);
575	evo_mthd(push, 0x00c0, 1);
576	evo_data(push, nv_fb->r_dma);
577	evo_mthd(push, 0x0110, 2);
578	evo_data(push, 0x00000000);
579	evo_data(push, 0x00000000);
580	if (nv50_vers(sync) < NVD0_DISP_SYNC_CLASS) {
581		evo_mthd(push, 0x0800, 5);
582		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
583		evo_data(push, 0);
584		evo_data(push, (fb->height << 16) | fb->width);
585		evo_data(push, nv_fb->r_pitch);
586		evo_data(push, nv_fb->r_format);
587	} else {
588		evo_mthd(push, 0x0400, 5);
589		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
590		evo_data(push, 0);
591		evo_data(push, (fb->height << 16) | fb->width);
592		evo_data(push, nv_fb->r_pitch);
593		evo_data(push, nv_fb->r_format);
594	}
595	evo_mthd(push, 0x0080, 1);
596	evo_data(push, 0x00000000);
597	evo_kick(push, sync);
598
599	sync->sem.offset ^= 0x10;
600	sync->sem.value++;
601	return 0;
602}
603
604/******************************************************************************
605 * CRTC
606 *****************************************************************************/
607static int
608nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
609{
610	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
611	struct nouveau_connector *nv_connector;
612	struct drm_connector *connector;
613	u32 *push, mode = 0x00;
614
615	nv_connector = nouveau_crtc_connector_get(nv_crtc);
616	connector = &nv_connector->base;
617	if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
618		if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
619			mode = DITHERING_MODE_DYNAMIC2X2;
620	} else {
621		mode = nv_connector->dithering_mode;
622	}
623
624	if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
625		if (connector->display_info.bpc >= 8)
626			mode |= DITHERING_DEPTH_8BPC;
627	} else {
628		mode |= nv_connector->dithering_depth;
629	}
630
631	push = evo_wait(mast, 4);
632	if (push) {
633		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
634			evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
635			evo_data(push, mode);
636		} else
637		if (nv50_vers(mast) < NVE0_DISP_MAST_CLASS) {
638			evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
639			evo_data(push, mode);
640		} else {
641			evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
642			evo_data(push, mode);
643		}
644
645		if (update) {
646			evo_mthd(push, 0x0080, 1);
647			evo_data(push, 0x00000000);
648		}
649		evo_kick(push, mast);
650	}
651
652	return 0;
653}
654
655static int
656nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
657{
658	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
659	struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
660	struct drm_crtc *crtc = &nv_crtc->base;
661	struct nouveau_connector *nv_connector;
662	int mode = DRM_MODE_SCALE_NONE;
663	u32 oX, oY, *push;
664
665	/* start off at the resolution we programmed the crtc for, this
666	 * effectively handles NONE/FULL scaling
667	 */
668	nv_connector = nouveau_crtc_connector_get(nv_crtc);
669	if (nv_connector && nv_connector->native_mode)
670		mode = nv_connector->scaling_mode;
671
672	if (mode != DRM_MODE_SCALE_NONE)
673		omode = nv_connector->native_mode;
674	else
675		omode = umode;
676
677	oX = omode->hdisplay;
678	oY = omode->vdisplay;
679	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
680		oY *= 2;
681
682	/* add overscan compensation if necessary, will keep the aspect
683	 * ratio the same as the backend mode unless overridden by the
684	 * user setting both hborder and vborder properties.
685	 */
686	if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
687			     (nv_connector->underscan == UNDERSCAN_AUTO &&
688			      nv_connector->edid &&
689			      drm_detect_hdmi_monitor(nv_connector->edid)))) {
690		u32 bX = nv_connector->underscan_hborder;
691		u32 bY = nv_connector->underscan_vborder;
692		u32 aspect = (oY << 19) / oX;
693
694		if (bX) {
695			oX -= (bX * 2);
696			if (bY) oY -= (bY * 2);
697			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
698		} else {
699			oX -= (oX >> 4) + 32;
700			if (bY) oY -= (bY * 2);
701			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
702		}
703	}
704
705	/* handle CENTER/ASPECT scaling, taking into account the areas
706	 * removed already for overscan compensation
707	 */
708	switch (mode) {
709	case DRM_MODE_SCALE_CENTER:
710		oX = min((u32)umode->hdisplay, oX);
711		oY = min((u32)umode->vdisplay, oY);
712		/* fall-through */
713	case DRM_MODE_SCALE_ASPECT:
714		if (oY < oX) {
715			u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
716			oX = ((oY * aspect) + (aspect / 2)) >> 19;
717		} else {
718			u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
719			oY = ((oX * aspect) + (aspect / 2)) >> 19;
720		}
721		break;
722	default:
723		break;
724	}
725
726	push = evo_wait(mast, 8);
727	if (push) {
728		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
729			/*XXX: SCALE_CTRL_ACTIVE??? */
730			evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
731			evo_data(push, (oY << 16) | oX);
732			evo_data(push, (oY << 16) | oX);
733			evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
734			evo_data(push, 0x00000000);
735			evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
736			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
737		} else {
738			evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
739			evo_data(push, (oY << 16) | oX);
740			evo_data(push, (oY << 16) | oX);
741			evo_data(push, (oY << 16) | oX);
742			evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
743			evo_data(push, 0x00000000);
744			evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
745			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
746		}
747
748		evo_kick(push, mast);
749
750		if (update) {
751			nv50_display_flip_stop(crtc);
752			nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
753		}
754	}
755
756	return 0;
757}
758
759static int
760nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
761{
762	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
763	u32 *push, hue, vib;
764	int adj;
765
766	adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
767	vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
768	hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
769
770	push = evo_wait(mast, 16);
771	if (push) {
772		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
773			evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
774			evo_data(push, (hue << 20) | (vib << 8));
775		} else {
776			evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
777			evo_data(push, (hue << 20) | (vib << 8));
778		}
779
780		if (update) {
781			evo_mthd(push, 0x0080, 1);
782			evo_data(push, 0x00000000);
783		}
784		evo_kick(push, mast);
785	}
786
787	return 0;
788}
789
790static int
791nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
792		    int x, int y, bool update)
793{
794	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
795	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
796	u32 *push;
797
798	push = evo_wait(mast, 16);
799	if (push) {
800		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
801			evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
802			evo_data(push, nvfb->nvbo->bo.offset >> 8);
803			evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
804			evo_data(push, (fb->height << 16) | fb->width);
805			evo_data(push, nvfb->r_pitch);
806			evo_data(push, nvfb->r_format);
807			evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
808			evo_data(push, (y << 16) | x);
809			if (nv50_vers(mast) > NV50_DISP_MAST_CLASS) {
810				evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
811				evo_data(push, nvfb->r_dma);
812			}
813		} else {
814			evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
815			evo_data(push, nvfb->nvbo->bo.offset >> 8);
816			evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
817			evo_data(push, (fb->height << 16) | fb->width);
818			evo_data(push, nvfb->r_pitch);
819			evo_data(push, nvfb->r_format);
820			evo_data(push, nvfb->r_dma);
821			evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
822			evo_data(push, (y << 16) | x);
823		}
824
825		if (update) {
826			evo_mthd(push, 0x0080, 1);
827			evo_data(push, 0x00000000);
828		}
829		evo_kick(push, mast);
830	}
831
832	nv_crtc->fb.tile_flags = nvfb->r_dma;
833	return 0;
834}
835
836static void
837nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
838{
839	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
840	u32 *push = evo_wait(mast, 16);
841	if (push) {
842		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
843			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
844			evo_data(push, 0x85000000);
845			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
846		} else
847		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
848			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
849			evo_data(push, 0x85000000);
850			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
851			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
852			evo_data(push, NvEvoVRAM);
853		} else {
854			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
855			evo_data(push, 0x85000000);
856			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
857			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
858			evo_data(push, NvEvoVRAM);
859		}
860		evo_kick(push, mast);
861	}
862}
863
864static void
865nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
866{
867	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
868	u32 *push = evo_wait(mast, 16);
869	if (push) {
870		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
871			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
872			evo_data(push, 0x05000000);
873		} else
874		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
875			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
876			evo_data(push, 0x05000000);
877			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
878			evo_data(push, 0x00000000);
879		} else {
880			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
881			evo_data(push, 0x05000000);
882			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
883			evo_data(push, 0x00000000);
884		}
885		evo_kick(push, mast);
886	}
887}
888
889static void
890nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
891{
892	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
893
894	if (show)
895		nv50_crtc_cursor_show(nv_crtc);
896	else
897		nv50_crtc_cursor_hide(nv_crtc);
898
899	if (update) {
900		u32 *push = evo_wait(mast, 2);
901		if (push) {
902			evo_mthd(push, 0x0080, 1);
903			evo_data(push, 0x00000000);
904			evo_kick(push, mast);
905		}
906	}
907}
908
909static void
910nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
911{
912}
913
914static void
915nv50_crtc_prepare(struct drm_crtc *crtc)
916{
917	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
918	struct nv50_mast *mast = nv50_mast(crtc->dev);
919	u32 *push;
920
921	nv50_display_flip_stop(crtc);
922
923	push = evo_wait(mast, 2);
924	if (push) {
925		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
926			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
927			evo_data(push, 0x00000000);
928			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
929			evo_data(push, 0x40000000);
930		} else
931		if (nv50_vers(mast) <  NVD0_DISP_MAST_CLASS) {
932			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
933			evo_data(push, 0x00000000);
934			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
935			evo_data(push, 0x40000000);
936			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
937			evo_data(push, 0x00000000);
938		} else {
939			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
940			evo_data(push, 0x00000000);
941			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
942			evo_data(push, 0x03000000);
943			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
944			evo_data(push, 0x00000000);
945		}
946
947		evo_kick(push, mast);
948	}
949
950	nv50_crtc_cursor_show_hide(nv_crtc, false, false);
951}
952
953static void
954nv50_crtc_commit(struct drm_crtc *crtc)
955{
956	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
957	struct nv50_mast *mast = nv50_mast(crtc->dev);
958	u32 *push;
959
960	push = evo_wait(mast, 32);
961	if (push) {
962		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
963			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
964			evo_data(push, NvEvoVRAM_LP);
965			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
966			evo_data(push, 0xc0000000);
967			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
968		} else
969		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
970			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
971			evo_data(push, nv_crtc->fb.tile_flags);
972			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
973			evo_data(push, 0xc0000000);
974			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
975			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
976			evo_data(push, NvEvoVRAM);
977		} else {
978			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
979			evo_data(push, nv_crtc->fb.tile_flags);
980			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
981			evo_data(push, 0x83000000);
982			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
983			evo_data(push, 0x00000000);
984			evo_data(push, 0x00000000);
985			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
986			evo_data(push, NvEvoVRAM);
987			evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
988			evo_data(push, 0xffffff00);
989		}
990
991		evo_kick(push, mast);
992	}
993
994	nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
995	nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
996}
997
998static bool
999nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1000		     struct drm_display_mode *adjusted_mode)
1001{
1002	return true;
1003}
1004
1005static int
1006nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1007{
1008	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
1009	int ret;
1010
1011	ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
1012	if (ret)
1013		return ret;
1014
1015	if (old_fb) {
1016		nvfb = nouveau_framebuffer(old_fb);
1017		nouveau_bo_unpin(nvfb->nvbo);
1018	}
1019
1020	return 0;
1021}
1022
1023static int
1024nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1025		   struct drm_display_mode *mode, int x, int y,
1026		   struct drm_framebuffer *old_fb)
1027{
1028	struct nv50_mast *mast = nv50_mast(crtc->dev);
1029	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1030	struct nouveau_connector *nv_connector;
1031	u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1032	u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1033	u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1034	u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1035	u32 vblan2e = 0, vblan2s = 1;
1036	u32 *push;
1037	int ret;
1038
1039	hactive = mode->htotal;
1040	hsynce  = mode->hsync_end - mode->hsync_start - 1;
1041	hbackp  = mode->htotal - mode->hsync_end;
1042	hblanke = hsynce + hbackp;
1043	hfrontp = mode->hsync_start - mode->hdisplay;
1044	hblanks = mode->htotal - hfrontp - 1;
1045
1046	vactive = mode->vtotal * vscan / ilace;
1047	vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1048	vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1049	vblanke = vsynce + vbackp;
1050	vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1051	vblanks = vactive - vfrontp - 1;
1052	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1053		vblan2e = vactive + vsynce + vbackp;
1054		vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1055		vactive = (vactive * 2) + 1;
1056	}
1057
1058	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1059	if (ret)
1060		return ret;
1061
1062	push = evo_wait(mast, 64);
1063	if (push) {
1064		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1065			evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1066			evo_data(push, 0x00800000 | mode->clock);
1067			evo_data(push, (ilace == 2) ? 2 : 0);
1068			evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1069			evo_data(push, 0x00000000);
1070			evo_data(push, (vactive << 16) | hactive);
1071			evo_data(push, ( vsynce << 16) | hsynce);
1072			evo_data(push, (vblanke << 16) | hblanke);
1073			evo_data(push, (vblanks << 16) | hblanks);
1074			evo_data(push, (vblan2e << 16) | vblan2s);
1075			evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1076			evo_data(push, 0x00000000);
1077			evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1078			evo_data(push, 0x00000311);
1079			evo_data(push, 0x00000100);
1080		} else {
1081			evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1082			evo_data(push, 0x00000000);
1083			evo_data(push, (vactive << 16) | hactive);
1084			evo_data(push, ( vsynce << 16) | hsynce);
1085			evo_data(push, (vblanke << 16) | hblanke);
1086			evo_data(push, (vblanks << 16) | hblanks);
1087			evo_data(push, (vblan2e << 16) | vblan2s);
1088			evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1089			evo_data(push, 0x00000000); /* ??? */
1090			evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1091			evo_data(push, mode->clock * 1000);
1092			evo_data(push, 0x00200000); /* ??? */
1093			evo_data(push, mode->clock * 1000);
1094			evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1095			evo_data(push, 0x00000311);
1096			evo_data(push, 0x00000100);
1097		}
1098
1099		evo_kick(push, mast);
1100	}
1101
1102	nv_connector = nouveau_crtc_connector_get(nv_crtc);
1103	nv50_crtc_set_dither(nv_crtc, false);
1104	nv50_crtc_set_scale(nv_crtc, false);
1105	nv50_crtc_set_color_vibrance(nv_crtc, false);
1106	nv50_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
1107	return 0;
1108}
1109
1110static int
1111nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1112			struct drm_framebuffer *old_fb)
1113{
1114	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1115	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1116	int ret;
1117
1118	if (!crtc->fb) {
1119		NV_DEBUG(drm, "No FB bound\n");
1120		return 0;
1121	}
1122
1123	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1124	if (ret)
1125		return ret;
1126
1127	nv50_display_flip_stop(crtc);
1128	nv50_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
1129	nv50_display_flip_next(crtc, crtc->fb, NULL, 1);
1130	return 0;
1131}
1132
1133static int
1134nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1135			       struct drm_framebuffer *fb, int x, int y,
1136			       enum mode_set_atomic state)
1137{
1138	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1139	nv50_display_flip_stop(crtc);
1140	nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1141	return 0;
1142}
1143
1144static void
1145nv50_crtc_lut_load(struct drm_crtc *crtc)
1146{
1147	struct nv50_disp *disp = nv50_disp(crtc->dev);
1148	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1149	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1150	int i;
1151
1152	for (i = 0; i < 256; i++) {
1153		u16 r = nv_crtc->lut.r[i] >> 2;
1154		u16 g = nv_crtc->lut.g[i] >> 2;
1155		u16 b = nv_crtc->lut.b[i] >> 2;
1156
1157		if (nv_mclass(disp->core) < NVD0_DISP_CLASS) {
1158			writew(r + 0x0000, lut + (i * 0x08) + 0);
1159			writew(g + 0x0000, lut + (i * 0x08) + 2);
1160			writew(b + 0x0000, lut + (i * 0x08) + 4);
1161		} else {
1162			writew(r + 0x6000, lut + (i * 0x20) + 0);
1163			writew(g + 0x6000, lut + (i * 0x20) + 2);
1164			writew(b + 0x6000, lut + (i * 0x20) + 4);
1165		}
1166	}
1167}
1168
1169static int
1170nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1171		     uint32_t handle, uint32_t width, uint32_t height)
1172{
1173	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1174	struct drm_device *dev = crtc->dev;
1175	struct drm_gem_object *gem;
1176	struct nouveau_bo *nvbo;
1177	bool visible = (handle != 0);
1178	int i, ret = 0;
1179
1180	if (visible) {
1181		if (width != 64 || height != 64)
1182			return -EINVAL;
1183
1184		gem = drm_gem_object_lookup(dev, file_priv, handle);
1185		if (unlikely(!gem))
1186			return -ENOENT;
1187		nvbo = nouveau_gem_object(gem);
1188
1189		ret = nouveau_bo_map(nvbo);
1190		if (ret == 0) {
1191			for (i = 0; i < 64 * 64; i++) {
1192				u32 v = nouveau_bo_rd32(nvbo, i);
1193				nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1194			}
1195			nouveau_bo_unmap(nvbo);
1196		}
1197
1198		drm_gem_object_unreference_unlocked(gem);
1199	}
1200
1201	if (visible != nv_crtc->cursor.visible) {
1202		nv50_crtc_cursor_show_hide(nv_crtc, visible, true);
1203		nv_crtc->cursor.visible = visible;
1204	}
1205
1206	return ret;
1207}
1208
1209static int
1210nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1211{
1212	struct nv50_curs *curs = nv50_curs(crtc);
1213	struct nv50_chan *chan = nv50_chan(curs);
1214	nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
1215	nv_wo32(chan->user, 0x0080, 0x00000000);
1216	return 0;
1217}
1218
1219static void
1220nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1221		    uint32_t start, uint32_t size)
1222{
1223	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1224	u32 end = max(start + size, (u32)256);
1225	u32 i;
1226
1227	for (i = start; i < end; i++) {
1228		nv_crtc->lut.r[i] = r[i];
1229		nv_crtc->lut.g[i] = g[i];
1230		nv_crtc->lut.b[i] = b[i];
1231	}
1232
1233	nv50_crtc_lut_load(crtc);
1234}
1235
1236static void
1237nv50_crtc_destroy(struct drm_crtc *crtc)
1238{
1239	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1240	struct nv50_disp *disp = nv50_disp(crtc->dev);
1241	struct nv50_head *head = nv50_head(crtc);
1242	nv50_dmac_destroy(disp->core, &head->ovly.base);
1243	nv50_pioc_destroy(disp->core, &head->oimm.base);
1244	nv50_dmac_destroy(disp->core, &head->sync.base);
1245	nv50_pioc_destroy(disp->core, &head->curs.base);
1246	nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1247	if (nv_crtc->cursor.nvbo)
1248		nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1249	nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1250	nouveau_bo_unmap(nv_crtc->lut.nvbo);
1251	if (nv_crtc->lut.nvbo)
1252		nouveau_bo_unpin(nv_crtc->lut.nvbo);
1253	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1254	drm_crtc_cleanup(crtc);
1255	kfree(crtc);
1256}
1257
1258static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1259	.dpms = nv50_crtc_dpms,
1260	.prepare = nv50_crtc_prepare,
1261	.commit = nv50_crtc_commit,
1262	.mode_fixup = nv50_crtc_mode_fixup,
1263	.mode_set = nv50_crtc_mode_set,
1264	.mode_set_base = nv50_crtc_mode_set_base,
1265	.mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1266	.load_lut = nv50_crtc_lut_load,
1267};
1268
1269static const struct drm_crtc_funcs nv50_crtc_func = {
1270	.cursor_set = nv50_crtc_cursor_set,
1271	.cursor_move = nv50_crtc_cursor_move,
1272	.gamma_set = nv50_crtc_gamma_set,
1273	.set_config = drm_crtc_helper_set_config,
1274	.destroy = nv50_crtc_destroy,
1275	.page_flip = nouveau_crtc_page_flip,
1276};
1277
1278static void
1279nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
1280{
1281}
1282
1283static void
1284nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
1285{
1286}
1287
1288static int
1289nv50_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
1290{
1291	struct nv50_disp *disp = nv50_disp(dev);
1292	struct nv50_head *head;
1293	struct drm_crtc *crtc;
1294	int ret, i;
1295
1296	head = kzalloc(sizeof(*head), GFP_KERNEL);
1297	if (!head)
1298		return -ENOMEM;
1299
1300	head->base.index = index;
1301	head->base.set_dither = nv50_crtc_set_dither;
1302	head->base.set_scale = nv50_crtc_set_scale;
1303	head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
1304	head->base.color_vibrance = 50;
1305	head->base.vibrant_hue = 0;
1306	head->base.cursor.set_offset = nv50_cursor_set_offset;
1307	head->base.cursor.set_pos = nv50_cursor_set_pos;
1308	for (i = 0; i < 256; i++) {
1309		head->base.lut.r[i] = i << 8;
1310		head->base.lut.g[i] = i << 8;
1311		head->base.lut.b[i] = i << 8;
1312	}
1313
1314	crtc = &head->base.base;
1315	drm_crtc_init(dev, crtc, &nv50_crtc_func);
1316	drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1317	drm_mode_crtc_set_gamma_size(crtc, 256);
1318
1319	ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1320			     0, 0x0000, NULL, &head->base.lut.nvbo);
1321	if (!ret) {
1322		ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1323		if (!ret) {
1324			ret = nouveau_bo_map(head->base.lut.nvbo);
1325			if (ret)
1326				nouveau_bo_unpin(head->base.lut.nvbo);
1327		}
1328		if (ret)
1329			nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1330	}
1331
1332	if (ret)
1333		goto out;
1334
1335	nv50_crtc_lut_load(crtc);
1336
1337	/* allocate cursor resources */
1338	ret = nv50_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
1339			      &(struct nv50_display_curs_class) {
1340					.head = index,
1341			      }, sizeof(struct nv50_display_curs_class),
1342			      &head->curs.base);
1343	if (ret)
1344		goto out;
1345
1346	ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1347			     0, 0x0000, NULL, &head->base.cursor.nvbo);
1348	if (!ret) {
1349		ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1350		if (!ret) {
1351			ret = nouveau_bo_map(head->base.cursor.nvbo);
1352			if (ret)
1353				nouveau_bo_unpin(head->base.lut.nvbo);
1354		}
1355		if (ret)
1356			nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1357	}
1358
1359	if (ret)
1360		goto out;
1361
1362	/* allocate page flip / sync resources */
1363	ret = nv50_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
1364			      &(struct nv50_display_sync_class) {
1365					.pushbuf = EVO_PUSH_HANDLE(SYNC, index),
1366					.head = index,
1367			      }, sizeof(struct nv50_display_sync_class),
1368			      disp->sync->bo.offset, &head->sync.base);
1369	if (ret)
1370		goto out;
1371
1372	head->sync.sem.offset = EVO_SYNC(1 + index, 0x00);
1373
1374	/* allocate overlay resources */
1375	ret = nv50_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
1376			      &(struct nv50_display_oimm_class) {
1377					.head = index,
1378			      }, sizeof(struct nv50_display_oimm_class),
1379			      &head->oimm.base);
1380	if (ret)
1381		goto out;
1382
1383	ret = nv50_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
1384			      &(struct nv50_display_ovly_class) {
1385					.pushbuf = EVO_PUSH_HANDLE(OVLY, index),
1386					.head = index,
1387			      }, sizeof(struct nv50_display_ovly_class),
1388			      disp->sync->bo.offset, &head->ovly.base);
1389	if (ret)
1390		goto out;
1391
1392out:
1393	if (ret)
1394		nv50_crtc_destroy(crtc);
1395	return ret;
1396}
1397
1398/******************************************************************************
1399 * DAC
1400 *****************************************************************************/
1401static void
1402nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1403{
1404	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1405	struct nv50_disp *disp = nv50_disp(encoder->dev);
1406	int or = nv_encoder->or;
1407	u32 dpms_ctrl;
1408
1409	dpms_ctrl = 0x00000000;
1410	if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1411		dpms_ctrl |= 0x00000001;
1412	if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1413		dpms_ctrl |= 0x00000004;
1414
1415	nv_call(disp->core, NV50_DISP_DAC_PWR + or, dpms_ctrl);
1416}
1417
1418static bool
1419nv50_dac_mode_fixup(struct drm_encoder *encoder,
1420		    const struct drm_display_mode *mode,
1421		    struct drm_display_mode *adjusted_mode)
1422{
1423	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1424	struct nouveau_connector *nv_connector;
1425
1426	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1427	if (nv_connector && nv_connector->native_mode) {
1428		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1429			int id = adjusted_mode->base.id;
1430			*adjusted_mode = *nv_connector->native_mode;
1431			adjusted_mode->base.id = id;
1432		}
1433	}
1434
1435	return true;
1436}
1437
1438static void
1439nv50_dac_commit(struct drm_encoder *encoder)
1440{
1441}
1442
1443static void
1444nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1445		  struct drm_display_mode *adjusted_mode)
1446{
1447	struct nv50_mast *mast = nv50_mast(encoder->dev);
1448	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1449	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1450	u32 *push;
1451
1452	nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1453
1454	push = evo_wait(mast, 8);
1455	if (push) {
1456		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1457			u32 syncs = 0x00000000;
1458
1459			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1460				syncs |= 0x00000001;
1461			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1462				syncs |= 0x00000002;
1463
1464			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1465			evo_data(push, 1 << nv_crtc->index);
1466			evo_data(push, syncs);
1467		} else {
1468			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1469			u32 syncs = 0x00000001;
1470
1471			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1472				syncs |= 0x00000008;
1473			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1474				syncs |= 0x00000010;
1475
1476			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1477				magic |= 0x00000001;
1478
1479			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1480			evo_data(push, syncs);
1481			evo_data(push, magic);
1482			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1483			evo_data(push, 1 << nv_crtc->index);
1484		}
1485
1486		evo_kick(push, mast);
1487	}
1488
1489	nv_encoder->crtc = encoder->crtc;
1490}
1491
1492static void
1493nv50_dac_disconnect(struct drm_encoder *encoder)
1494{
1495	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1496	struct nv50_mast *mast = nv50_mast(encoder->dev);
1497	const int or = nv_encoder->or;
1498	u32 *push;
1499
1500	if (nv_encoder->crtc) {
1501		nv50_crtc_prepare(nv_encoder->crtc);
1502
1503		push = evo_wait(mast, 4);
1504		if (push) {
1505			if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1506				evo_mthd(push, 0x0400 + (or * 0x080), 1);
1507				evo_data(push, 0x00000000);
1508			} else {
1509				evo_mthd(push, 0x0180 + (or * 0x020), 1);
1510				evo_data(push, 0x00000000);
1511			}
1512
1513			evo_mthd(push, 0x0080, 1);
1514			evo_data(push, 0x00000000);
1515			evo_kick(push, mast);
1516		}
1517	}
1518
1519	nv_encoder->crtc = NULL;
1520}
1521
1522static enum drm_connector_status
1523nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1524{
1525	struct nv50_disp *disp = nv50_disp(encoder->dev);
1526	int ret, or = nouveau_encoder(encoder)->or;
1527	u32 load = 0;
1528
1529	ret = nv_exec(disp->core, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
1530	if (ret || load != 7)
1531		return connector_status_disconnected;
1532
1533	return connector_status_connected;
1534}
1535
1536static void
1537nv50_dac_destroy(struct drm_encoder *encoder)
1538{
1539	drm_encoder_cleanup(encoder);
1540	kfree(encoder);
1541}
1542
1543static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1544	.dpms = nv50_dac_dpms,
1545	.mode_fixup = nv50_dac_mode_fixup,
1546	.prepare = nv50_dac_disconnect,
1547	.commit = nv50_dac_commit,
1548	.mode_set = nv50_dac_mode_set,
1549	.disable = nv50_dac_disconnect,
1550	.get_crtc = nv50_display_crtc_get,
1551	.detect = nv50_dac_detect
1552};
1553
1554static const struct drm_encoder_funcs nv50_dac_func = {
1555	.destroy = nv50_dac_destroy,
1556};
1557
1558static int
1559nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1560{
1561	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1562	struct nouveau_i2c *i2c = nouveau_i2c(drm->device);
1563	struct nouveau_encoder *nv_encoder;
1564	struct drm_encoder *encoder;
1565	int type = DRM_MODE_ENCODER_DAC;
1566
1567	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1568	if (!nv_encoder)
1569		return -ENOMEM;
1570	nv_encoder->dcb = dcbe;
1571	nv_encoder->or = ffs(dcbe->or) - 1;
1572	nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
1573
1574	encoder = to_drm_encoder(nv_encoder);
1575	encoder->possible_crtcs = dcbe->heads;
1576	encoder->possible_clones = 0;
1577	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type);
1578	drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
1579
1580	drm_mode_connector_attach_encoder(connector, encoder);
1581	return 0;
1582}
1583
1584/******************************************************************************
1585 * Audio
1586 *****************************************************************************/
1587static void
1588nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1589{
1590	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1591	struct nouveau_connector *nv_connector;
1592	struct nv50_disp *disp = nv50_disp(encoder->dev);
1593
1594	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1595	if (!drm_detect_monitor_audio(nv_connector->edid))
1596		return;
1597
1598	drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1599
1600	nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or,
1601			    nv_connector->base.eld,
1602			    nv_connector->base.eld[2] * 4);
1603}
1604
1605static void
1606nv50_audio_disconnect(struct drm_encoder *encoder)
1607{
1608	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1609	struct nv50_disp *disp = nv50_disp(encoder->dev);
1610
1611	nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0);
1612}
1613
1614/******************************************************************************
1615 * HDMI
1616 *****************************************************************************/
1617static void
1618nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1619{
1620	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1621	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1622	struct nouveau_connector *nv_connector;
1623	struct nv50_disp *disp = nv50_disp(encoder->dev);
1624	const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1625	u32 rekey = 56; /* binary driver, and tegra constant */
1626	u32 max_ac_packet;
1627
1628	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1629	if (!drm_detect_hdmi_monitor(nv_connector->edid))
1630		return;
1631
1632	max_ac_packet  = mode->htotal - mode->hdisplay;
1633	max_ac_packet -= rekey;
1634	max_ac_packet -= 18; /* constant from tegra */
1635	max_ac_packet /= 32;
1636
1637	nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff,
1638			    NV84_DISP_SOR_HDMI_PWR_STATE_ON |
1639			    (max_ac_packet << 16) | rekey);
1640
1641	nv50_audio_mode_set(encoder, mode);
1642}
1643
1644static void
1645nv50_hdmi_disconnect(struct drm_encoder *encoder)
1646{
1647	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1648	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1649	struct nv50_disp *disp = nv50_disp(encoder->dev);
1650	const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1651
1652	nv50_audio_disconnect(encoder);
1653
1654	nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff, 0x00000000);
1655}
1656
1657/******************************************************************************
1658 * SOR
1659 *****************************************************************************/
1660static void
1661nv50_sor_dpms(struct drm_encoder *encoder, int mode)
1662{
1663	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1664	struct drm_device *dev = encoder->dev;
1665	struct nv50_disp *disp = nv50_disp(dev);
1666	struct drm_encoder *partner;
1667	int or = nv_encoder->or;
1668
1669	nv_encoder->last_dpms = mode;
1670
1671	list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1672		struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1673
1674		if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1675			continue;
1676
1677		if (nv_partner != nv_encoder &&
1678		    nv_partner->dcb->or == nv_encoder->dcb->or) {
1679			if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1680				return;
1681			break;
1682		}
1683	}
1684
1685	nv_call(disp->core, NV50_DISP_SOR_PWR + or, (mode == DRM_MODE_DPMS_ON));
1686}
1687
1688static bool
1689nv50_sor_mode_fixup(struct drm_encoder *encoder,
1690		    const struct drm_display_mode *mode,
1691		    struct drm_display_mode *adjusted_mode)
1692{
1693	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1694	struct nouveau_connector *nv_connector;
1695
1696	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1697	if (nv_connector && nv_connector->native_mode) {
1698		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1699			int id = adjusted_mode->base.id;
1700			*adjusted_mode = *nv_connector->native_mode;
1701			adjusted_mode->base.id = id;
1702		}
1703	}
1704
1705	return true;
1706}
1707
1708static void
1709nv50_sor_disconnect(struct drm_encoder *encoder)
1710{
1711	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1712	struct nv50_mast *mast = nv50_mast(encoder->dev);
1713	const int or = nv_encoder->or;
1714	u32 *push;
1715
1716	if (nv_encoder->crtc) {
1717		nv50_crtc_prepare(nv_encoder->crtc);
1718
1719		push = evo_wait(mast, 4);
1720		if (push) {
1721			if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1722				evo_mthd(push, 0x0600 + (or * 0x40), 1);
1723				evo_data(push, 0x00000000);
1724			} else {
1725				evo_mthd(push, 0x0200 + (or * 0x20), 1);
1726				evo_data(push, 0x00000000);
1727			}
1728
1729			evo_mthd(push, 0x0080, 1);
1730			evo_data(push, 0x00000000);
1731			evo_kick(push, mast);
1732		}
1733
1734		nv50_hdmi_disconnect(encoder);
1735	}
1736
1737	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1738	nv_encoder->crtc = NULL;
1739}
1740
1741static void
1742nv50_sor_commit(struct drm_encoder *encoder)
1743{
1744}
1745
1746static void
1747nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1748		  struct drm_display_mode *mode)
1749{
1750	struct nv50_disp *disp = nv50_disp(encoder->dev);
1751	struct nv50_mast *mast = nv50_mast(encoder->dev);
1752	struct drm_device *dev = encoder->dev;
1753	struct nouveau_drm *drm = nouveau_drm(dev);
1754	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1755	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1756	struct nouveau_connector *nv_connector;
1757	struct nvbios *bios = &drm->vbios;
1758	u32 *push, lvds = 0;
1759	u8 owner = 1 << nv_crtc->index;
1760	u8 proto = 0xf;
1761	u8 depth = 0x0;
1762
1763	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1764	switch (nv_encoder->dcb->type) {
1765	case DCB_OUTPUT_TMDS:
1766		if (nv_encoder->dcb->sorconf.link & 1) {
1767			if (mode->clock < 165000)
1768				proto = 0x1;
1769			else
1770				proto = 0x5;
1771		} else {
1772			proto = 0x2;
1773		}
1774
1775		nv50_hdmi_mode_set(encoder, mode);
1776		break;
1777	case DCB_OUTPUT_LVDS:
1778		proto = 0x0;
1779
1780		if (bios->fp_no_ddc) {
1781			if (bios->fp.dual_link)
1782				lvds |= 0x0100;
1783			if (bios->fp.if_is_24bit)
1784				lvds |= 0x0200;
1785		} else {
1786			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1787				if (((u8 *)nv_connector->edid)[121] == 2)
1788					lvds |= 0x0100;
1789			} else
1790			if (mode->clock >= bios->fp.duallink_transition_clk) {
1791				lvds |= 0x0100;
1792			}
1793
1794			if (lvds & 0x0100) {
1795				if (bios->fp.strapless_is_24bit & 2)
1796					lvds |= 0x0200;
1797			} else {
1798				if (bios->fp.strapless_is_24bit & 1)
1799					lvds |= 0x0200;
1800			}
1801
1802			if (nv_connector->base.display_info.bpc == 8)
1803				lvds |= 0x0200;
1804		}
1805
1806		nv_call(disp->core, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, lvds);
1807		break;
1808	case DCB_OUTPUT_DP:
1809		if (nv_connector->base.display_info.bpc == 6) {
1810			nv_encoder->dp.datarate = mode->clock * 18 / 8;
1811			depth = 0x2;
1812		} else
1813		if (nv_connector->base.display_info.bpc == 8) {
1814			nv_encoder->dp.datarate = mode->clock * 24 / 8;
1815			depth = 0x5;
1816		} else {
1817			nv_encoder->dp.datarate = mode->clock * 30 / 8;
1818			depth = 0x6;
1819		}
1820
1821		if (nv_encoder->dcb->sorconf.link & 1)
1822			proto = 0x8;
1823		else
1824			proto = 0x9;
1825		break;
1826	default:
1827		BUG_ON(1);
1828		break;
1829	}
1830
1831	nv50_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1832
1833	push = evo_wait(nv50_mast(dev), 8);
1834	if (push) {
1835		if (nv50_vers(mast) < NVD0_DISP_CLASS) {
1836			u32 ctrl = (depth << 16) | (proto << 8) | owner;
1837			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1838				ctrl |= 0x00001000;
1839			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1840				ctrl |= 0x00002000;
1841			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x040), 1);
1842			evo_data(push, ctrl);
1843		} else {
1844			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1845			u32 syncs = 0x00000001;
1846
1847			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1848				syncs |= 0x00000008;
1849			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1850				syncs |= 0x00000010;
1851
1852			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1853				magic |= 0x00000001;
1854
1855			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1856			evo_data(push, syncs | (depth << 6));
1857			evo_data(push, magic);
1858			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 1);
1859			evo_data(push, owner | (proto << 8));
1860		}
1861
1862		evo_kick(push, mast);
1863	}
1864
1865	nv_encoder->crtc = encoder->crtc;
1866}
1867
1868static void
1869nv50_sor_destroy(struct drm_encoder *encoder)
1870{
1871	drm_encoder_cleanup(encoder);
1872	kfree(encoder);
1873}
1874
1875static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
1876	.dpms = nv50_sor_dpms,
1877	.mode_fixup = nv50_sor_mode_fixup,
1878	.prepare = nv50_sor_disconnect,
1879	.commit = nv50_sor_commit,
1880	.mode_set = nv50_sor_mode_set,
1881	.disable = nv50_sor_disconnect,
1882	.get_crtc = nv50_display_crtc_get,
1883};
1884
1885static const struct drm_encoder_funcs nv50_sor_func = {
1886	.destroy = nv50_sor_destroy,
1887};
1888
1889static int
1890nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1891{
1892	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1893	struct nouveau_i2c *i2c = nouveau_i2c(drm->device);
1894	struct nouveau_encoder *nv_encoder;
1895	struct drm_encoder *encoder;
1896	int type;
1897
1898	switch (dcbe->type) {
1899	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1900	case DCB_OUTPUT_TMDS:
1901	case DCB_OUTPUT_DP:
1902	default:
1903		type = DRM_MODE_ENCODER_TMDS;
1904		break;
1905	}
1906
1907	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1908	if (!nv_encoder)
1909		return -ENOMEM;
1910	nv_encoder->dcb = dcbe;
1911	nv_encoder->or = ffs(dcbe->or) - 1;
1912	nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
1913	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1914
1915	encoder = to_drm_encoder(nv_encoder);
1916	encoder->possible_crtcs = dcbe->heads;
1917	encoder->possible_clones = 0;
1918	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type);
1919	drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
1920
1921	drm_mode_connector_attach_encoder(connector, encoder);
1922	return 0;
1923}
1924
1925/******************************************************************************
1926 * Init
1927 *****************************************************************************/
1928void
1929nv50_display_fini(struct drm_device *dev)
1930{
1931}
1932
1933int
1934nv50_display_init(struct drm_device *dev)
1935{
1936	u32 *push = evo_wait(nv50_mast(dev), 32);
1937	if (push) {
1938		evo_mthd(push, 0x0088, 1);
1939		evo_data(push, NvEvoSync);
1940		evo_kick(push, nv50_mast(dev));
1941		return 0;
1942	}
1943
1944	return -EBUSY;
1945}
1946
1947void
1948nv50_display_destroy(struct drm_device *dev)
1949{
1950	struct nv50_disp *disp = nv50_disp(dev);
1951
1952	nv50_dmac_destroy(disp->core, &disp->mast.base);
1953
1954	nouveau_bo_unmap(disp->sync);
1955	if (disp->sync)
1956		nouveau_bo_unpin(disp->sync);
1957	nouveau_bo_ref(NULL, &disp->sync);
1958
1959	nouveau_display(dev)->priv = NULL;
1960	kfree(disp);
1961}
1962
1963int
1964nv50_display_create(struct drm_device *dev)
1965{
1966	static const u16 oclass[] = {
1967		NVE0_DISP_CLASS,
1968		NVD0_DISP_CLASS,
1969		NVA3_DISP_CLASS,
1970		NV94_DISP_CLASS,
1971		NVA0_DISP_CLASS,
1972		NV84_DISP_CLASS,
1973		NV50_DISP_CLASS,
1974	};
1975	struct nouveau_device *device = nouveau_dev(dev);
1976	struct nouveau_drm *drm = nouveau_drm(dev);
1977	struct dcb_table *dcb = &drm->vbios.dcb;
1978	struct drm_connector *connector, *tmp;
1979	struct nv50_disp *disp;
1980	struct dcb_output *dcbe;
1981	int crtcs, ret, i;
1982
1983	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1984	if (!disp)
1985		return -ENOMEM;
1986
1987	nouveau_display(dev)->priv = disp;
1988	nouveau_display(dev)->dtor = nv50_display_destroy;
1989	nouveau_display(dev)->init = nv50_display_init;
1990	nouveau_display(dev)->fini = nv50_display_fini;
1991
1992	/* small shared memory area we use for notifiers and semaphores */
1993	ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
1994			     0, 0x0000, NULL, &disp->sync);
1995	if (!ret) {
1996		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
1997		if (!ret) {
1998			ret = nouveau_bo_map(disp->sync);
1999			if (ret)
2000				nouveau_bo_unpin(disp->sync);
2001		}
2002		if (ret)
2003			nouveau_bo_ref(NULL, &disp->sync);
2004	}
2005
2006	if (ret)
2007		goto out;
2008
2009	/* attempt to allocate a supported evo display class */
2010	ret = -ENODEV;
2011	for (i = 0; ret && i < ARRAY_SIZE(oclass); i++) {
2012		ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
2013					 0xd1500000, oclass[i], NULL, 0,
2014					 &disp->core);
2015	}
2016
2017	if (ret)
2018		goto out;
2019
2020	/* allocate master evo channel */
2021	ret = nv50_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
2022			      &(struct nv50_display_mast_class) {
2023					.pushbuf = EVO_PUSH_HANDLE(MAST, 0),
2024			      }, sizeof(struct nv50_display_mast_class),
2025			      disp->sync->bo.offset, &disp->mast.base);
2026	if (ret)
2027		goto out;
2028
2029	/* create crtc objects to represent the hw heads */
2030	if (nv_mclass(disp->core) >= NVD0_DISP_CLASS)
2031		crtcs = nv_rd32(device, 0x022448);
2032	else
2033		crtcs = 2;
2034
2035	for (i = 0; i < crtcs; i++) {
2036		ret = nv50_crtc_create(dev, disp->core, i);
2037		if (ret)
2038			goto out;
2039	}
2040
2041	/* create encoder/connector objects based on VBIOS DCB table */
2042	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2043		connector = nouveau_connector_create(dev, dcbe->connector);
2044		if (IS_ERR(connector))
2045			continue;
2046
2047		if (dcbe->location != DCB_LOC_ON_CHIP) {
2048			NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
2049				dcbe->type, ffs(dcbe->or) - 1);
2050			continue;
2051		}
2052
2053		switch (dcbe->type) {
2054		case DCB_OUTPUT_TMDS:
2055		case DCB_OUTPUT_LVDS:
2056		case DCB_OUTPUT_DP:
2057			nv50_sor_create(connector, dcbe);
2058			break;
2059		case DCB_OUTPUT_ANALOG:
2060			nv50_dac_create(connector, dcbe);
2061			break;
2062		default:
2063			NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
2064				dcbe->type, ffs(dcbe->or) - 1);
2065			continue;
2066		}
2067	}
2068
2069	/* cull any connectors we created that don't have an encoder */
2070	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2071		if (connector->encoder_ids[0])
2072			continue;
2073
2074		NV_WARN(drm, "%s has no encoders, removing\n",
2075			drm_get_connector_name(connector));
2076		connector->funcs->destroy(connector);
2077	}
2078
2079out:
2080	if (ret)
2081		nv50_display_destroy(dev);
2082	return ret;
2083}
2084