nv50_display.c revision e84a35a8054397b0a4efc86ba82d9bc8b3895c75
1/*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
25#include <linux/dma-mapping.h>
26
27#include <drm/drmP.h>
28#include <drm/drm_crtc_helper.h>
29#include <drm/drm_dp_helper.h>
30
31#include "nouveau_drm.h"
32#include "nouveau_dma.h"
33#include "nouveau_gem.h"
34#include "nouveau_connector.h"
35#include "nouveau_encoder.h"
36#include "nouveau_crtc.h"
37#include "nouveau_fence.h"
38#include "nv50_display.h"
39
40#include <core/client.h>
41#include <core/gpuobj.h>
42#include <core/class.h>
43
44#include <subdev/timer.h>
45#include <subdev/bar.h>
46#include <subdev/fb.h>
47#include <subdev/i2c.h>
48
49#define EVO_DMA_NR 9
50
51#define EVO_MASTER  (0x00)
52#define EVO_FLIP(c) (0x01 + (c))
53#define EVO_OVLY(c) (0x05 + (c))
54#define EVO_OIMM(c) (0x09 + (c))
55#define EVO_CURS(c) (0x0d + (c))
56
57/* offsets in shared sync bo of various structures */
58#define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
59#define EVO_MAST_NTFY     EVO_SYNC(      0, 0x00)
60#define EVO_FLIP_SEM0(c)  EVO_SYNC((c) + 1, 0x00)
61#define EVO_FLIP_SEM1(c)  EVO_SYNC((c) + 1, 0x10)
62
63#define EVO_CORE_HANDLE      (0xd1500000)
64#define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
65#define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
66#define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) |                               \
67			      (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
68
69/******************************************************************************
70 * EVO channel
71 *****************************************************************************/
72
73struct nv50_chan {
74	struct nouveau_object *user;
75	u32 handle;
76};
77
78static int
79nv50_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
80		 void *data, u32 size, struct nv50_chan *chan)
81{
82	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
83	const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
84	const u32 handle = EVO_CHAN_HANDLE(bclass, head);
85	int ret;
86
87	ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
88				 oclass, data, size, &chan->user);
89	if (ret)
90		return ret;
91
92	chan->handle = handle;
93	return 0;
94}
95
96static void
97nv50_chan_destroy(struct nouveau_object *core, struct nv50_chan *chan)
98{
99	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
100	if (chan->handle)
101		nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
102}
103
104/******************************************************************************
105 * PIO EVO channel
106 *****************************************************************************/
107
108struct nv50_pioc {
109	struct nv50_chan base;
110};
111
112static void
113nv50_pioc_destroy(struct nouveau_object *core, struct nv50_pioc *pioc)
114{
115	nv50_chan_destroy(core, &pioc->base);
116}
117
118static int
119nv50_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
120		 void *data, u32 size, struct nv50_pioc *pioc)
121{
122	return nv50_chan_create(core, bclass, head, data, size, &pioc->base);
123}
124
125/******************************************************************************
126 * DMA EVO channel
127 *****************************************************************************/
128
129struct nv50_dmac {
130	struct nv50_chan base;
131	dma_addr_t handle;
132	u32 *ptr;
133
134	/* Protects against concurrent pushbuf access to this channel, lock is
135	 * grabbed by evo_wait (if the pushbuf reservation is successful) and
136	 * dropped again by evo_kick. */
137	struct mutex lock;
138};
139
140static void
141nv50_dmac_destroy(struct nouveau_object *core, struct nv50_dmac *dmac)
142{
143	if (dmac->ptr) {
144		struct pci_dev *pdev = nv_device(core)->pdev;
145		pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
146	}
147
148	nv50_chan_destroy(core, &dmac->base);
149}
150
151static int
152nv50_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
153{
154	struct nouveau_fb *pfb = nouveau_fb(core);
155	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
156	struct nouveau_object *object;
157	int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
158				     NV_DMA_IN_MEMORY_CLASS,
159				     &(struct nv_dma_class) {
160					.flags = NV_DMA_TARGET_VRAM |
161						 NV_DMA_ACCESS_RDWR,
162					.start = 0,
163					.limit = pfb->ram->size - 1,
164					.conf0 = NV50_DMA_CONF0_ENABLE |
165					         NV50_DMA_CONF0_PART_256,
166				     }, sizeof(struct nv_dma_class), &object);
167	if (ret)
168		return ret;
169
170	ret = nouveau_object_new(client, parent, NvEvoFB16,
171				 NV_DMA_IN_MEMORY_CLASS,
172				 &(struct nv_dma_class) {
173					.flags = NV_DMA_TARGET_VRAM |
174						 NV_DMA_ACCESS_RDWR,
175					.start = 0,
176					.limit = pfb->ram->size - 1,
177					.conf0 = NV50_DMA_CONF0_ENABLE | 0x70 |
178					         NV50_DMA_CONF0_PART_256,
179				 }, sizeof(struct nv_dma_class), &object);
180	if (ret)
181		return ret;
182
183	ret = nouveau_object_new(client, parent, NvEvoFB32,
184				 NV_DMA_IN_MEMORY_CLASS,
185				 &(struct nv_dma_class) {
186					.flags = NV_DMA_TARGET_VRAM |
187						 NV_DMA_ACCESS_RDWR,
188					.start = 0,
189					.limit = pfb->ram->size - 1,
190					.conf0 = NV50_DMA_CONF0_ENABLE | 0x7a |
191					         NV50_DMA_CONF0_PART_256,
192				 }, sizeof(struct nv_dma_class), &object);
193	return ret;
194}
195
196static int
197nvc0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
198{
199	struct nouveau_fb *pfb = nouveau_fb(core);
200	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
201	struct nouveau_object *object;
202	int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
203				     NV_DMA_IN_MEMORY_CLASS,
204				     &(struct nv_dma_class) {
205					.flags = NV_DMA_TARGET_VRAM |
206						 NV_DMA_ACCESS_RDWR,
207					.start = 0,
208					.limit = pfb->ram->size - 1,
209					.conf0 = NVC0_DMA_CONF0_ENABLE,
210				     }, sizeof(struct nv_dma_class), &object);
211	if (ret)
212		return ret;
213
214	ret = nouveau_object_new(client, parent, NvEvoFB16,
215				 NV_DMA_IN_MEMORY_CLASS,
216				 &(struct nv_dma_class) {
217					.flags = NV_DMA_TARGET_VRAM |
218						 NV_DMA_ACCESS_RDWR,
219					.start = 0,
220					.limit = pfb->ram->size - 1,
221					.conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
222				 }, sizeof(struct nv_dma_class), &object);
223	if (ret)
224		return ret;
225
226	ret = nouveau_object_new(client, parent, NvEvoFB32,
227				 NV_DMA_IN_MEMORY_CLASS,
228				 &(struct nv_dma_class) {
229					.flags = NV_DMA_TARGET_VRAM |
230						 NV_DMA_ACCESS_RDWR,
231					.start = 0,
232					.limit = pfb->ram->size - 1,
233					.conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
234				 }, sizeof(struct nv_dma_class), &object);
235	return ret;
236}
237
238static int
239nvd0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
240{
241	struct nouveau_fb *pfb = nouveau_fb(core);
242	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
243	struct nouveau_object *object;
244	int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
245				     NV_DMA_IN_MEMORY_CLASS,
246				     &(struct nv_dma_class) {
247					.flags = NV_DMA_TARGET_VRAM |
248						 NV_DMA_ACCESS_RDWR,
249					.start = 0,
250					.limit = pfb->ram->size - 1,
251					.conf0 = NVD0_DMA_CONF0_ENABLE |
252						 NVD0_DMA_CONF0_PAGE_LP,
253				     }, sizeof(struct nv_dma_class), &object);
254	if (ret)
255		return ret;
256
257	ret = nouveau_object_new(client, parent, NvEvoFB32,
258				 NV_DMA_IN_MEMORY_CLASS,
259				 &(struct nv_dma_class) {
260					.flags = NV_DMA_TARGET_VRAM |
261						 NV_DMA_ACCESS_RDWR,
262					.start = 0,
263					.limit = pfb->ram->size - 1,
264					.conf0 = NVD0_DMA_CONF0_ENABLE | 0xfe |
265						 NVD0_DMA_CONF0_PAGE_LP,
266				 }, sizeof(struct nv_dma_class), &object);
267	return ret;
268}
269
270static int
271nv50_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
272		 void *data, u32 size, u64 syncbuf,
273		 struct nv50_dmac *dmac)
274{
275	struct nouveau_fb *pfb = nouveau_fb(core);
276	struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
277	struct nouveau_object *object;
278	u32 pushbuf = *(u32 *)data;
279	int ret;
280
281	mutex_init(&dmac->lock);
282
283	dmac->ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE,
284					&dmac->handle);
285	if (!dmac->ptr)
286		return -ENOMEM;
287
288	ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
289				 NV_DMA_FROM_MEMORY_CLASS,
290				 &(struct nv_dma_class) {
291					.flags = NV_DMA_TARGET_PCI_US |
292						 NV_DMA_ACCESS_RD,
293					.start = dmac->handle + 0x0000,
294					.limit = dmac->handle + 0x0fff,
295				 }, sizeof(struct nv_dma_class), &object);
296	if (ret)
297		return ret;
298
299	ret = nv50_chan_create(core, bclass, head, data, size, &dmac->base);
300	if (ret)
301		return ret;
302
303	ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
304				 NV_DMA_IN_MEMORY_CLASS,
305				 &(struct nv_dma_class) {
306					.flags = NV_DMA_TARGET_VRAM |
307						 NV_DMA_ACCESS_RDWR,
308					.start = syncbuf + 0x0000,
309					.limit = syncbuf + 0x0fff,
310				 }, sizeof(struct nv_dma_class), &object);
311	if (ret)
312		return ret;
313
314	ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
315				 NV_DMA_IN_MEMORY_CLASS,
316				 &(struct nv_dma_class) {
317					.flags = NV_DMA_TARGET_VRAM |
318						 NV_DMA_ACCESS_RDWR,
319					.start = 0,
320					.limit = pfb->ram->size - 1,
321				 }, sizeof(struct nv_dma_class), &object);
322	if (ret)
323		return ret;
324
325	if (nv_device(core)->card_type < NV_C0)
326		ret = nv50_dmac_create_fbdma(core, dmac->base.handle);
327	else
328	if (nv_device(core)->card_type < NV_D0)
329		ret = nvc0_dmac_create_fbdma(core, dmac->base.handle);
330	else
331		ret = nvd0_dmac_create_fbdma(core, dmac->base.handle);
332	return ret;
333}
334
335struct nv50_mast {
336	struct nv50_dmac base;
337};
338
339struct nv50_curs {
340	struct nv50_pioc base;
341};
342
343struct nv50_sync {
344	struct nv50_dmac base;
345	u32 addr;
346	u32 data;
347};
348
349struct nv50_ovly {
350	struct nv50_dmac base;
351};
352
353struct nv50_oimm {
354	struct nv50_pioc base;
355};
356
357struct nv50_head {
358	struct nouveau_crtc base;
359	struct nouveau_bo *image;
360	struct nv50_curs curs;
361	struct nv50_sync sync;
362	struct nv50_ovly ovly;
363	struct nv50_oimm oimm;
364};
365
366#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
367#define nv50_curs(c) (&nv50_head(c)->curs)
368#define nv50_sync(c) (&nv50_head(c)->sync)
369#define nv50_ovly(c) (&nv50_head(c)->ovly)
370#define nv50_oimm(c) (&nv50_head(c)->oimm)
371#define nv50_chan(c) (&(c)->base.base)
372#define nv50_vers(c) nv_mclass(nv50_chan(c)->user)
373
374struct nv50_disp {
375	struct nouveau_object *core;
376	struct nv50_mast mast;
377
378	u32 modeset;
379
380	struct nouveau_bo *sync;
381};
382
383static struct nv50_disp *
384nv50_disp(struct drm_device *dev)
385{
386	return nouveau_display(dev)->priv;
387}
388
389#define nv50_mast(d) (&nv50_disp(d)->mast)
390
391static struct drm_crtc *
392nv50_display_crtc_get(struct drm_encoder *encoder)
393{
394	return nouveau_encoder(encoder)->crtc;
395}
396
397/******************************************************************************
398 * EVO channel helpers
399 *****************************************************************************/
400static u32 *
401evo_wait(void *evoc, int nr)
402{
403	struct nv50_dmac *dmac = evoc;
404	u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
405
406	mutex_lock(&dmac->lock);
407	if (put + nr >= (PAGE_SIZE / 4) - 8) {
408		dmac->ptr[put] = 0x20000000;
409
410		nv_wo32(dmac->base.user, 0x0000, 0x00000000);
411		if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
412			mutex_unlock(&dmac->lock);
413			NV_ERROR(dmac->base.user, "channel stalled\n");
414			return NULL;
415		}
416
417		put = 0;
418	}
419
420	return dmac->ptr + put;
421}
422
423static void
424evo_kick(u32 *push, void *evoc)
425{
426	struct nv50_dmac *dmac = evoc;
427	nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
428	mutex_unlock(&dmac->lock);
429}
430
431#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
432#define evo_data(p,d)   *((p)++) = (d)
433
434static bool
435evo_sync_wait(void *data)
436{
437	if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000)
438		return true;
439	usleep_range(1, 2);
440	return false;
441}
442
443static int
444evo_sync(struct drm_device *dev)
445{
446	struct nouveau_device *device = nouveau_dev(dev);
447	struct nv50_disp *disp = nv50_disp(dev);
448	struct nv50_mast *mast = nv50_mast(dev);
449	u32 *push = evo_wait(mast, 8);
450	if (push) {
451		nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
452		evo_mthd(push, 0x0084, 1);
453		evo_data(push, 0x80000000 | EVO_MAST_NTFY);
454		evo_mthd(push, 0x0080, 2);
455		evo_data(push, 0x00000000);
456		evo_data(push, 0x00000000);
457		evo_kick(push, mast);
458		if (nv_wait_cb(device, evo_sync_wait, disp->sync))
459			return 0;
460	}
461
462	return -EBUSY;
463}
464
465/******************************************************************************
466 * Page flipping channel
467 *****************************************************************************/
468struct nouveau_bo *
469nv50_display_crtc_sema(struct drm_device *dev, int crtc)
470{
471	return nv50_disp(dev)->sync;
472}
473
474struct nv50_display_flip {
475	struct nv50_disp *disp;
476	struct nv50_sync *chan;
477};
478
479static bool
480nv50_display_flip_wait(void *data)
481{
482	struct nv50_display_flip *flip = data;
483	if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) ==
484					      flip->chan->data)
485		return true;
486	usleep_range(1, 2);
487	return false;
488}
489
490void
491nv50_display_flip_stop(struct drm_crtc *crtc)
492{
493	struct nouveau_device *device = nouveau_dev(crtc->dev);
494	struct nv50_display_flip flip = {
495		.disp = nv50_disp(crtc->dev),
496		.chan = nv50_sync(crtc),
497	};
498	u32 *push;
499
500	push = evo_wait(flip.chan, 8);
501	if (push) {
502		evo_mthd(push, 0x0084, 1);
503		evo_data(push, 0x00000000);
504		evo_mthd(push, 0x0094, 1);
505		evo_data(push, 0x00000000);
506		evo_mthd(push, 0x00c0, 1);
507		evo_data(push, 0x00000000);
508		evo_mthd(push, 0x0080, 1);
509		evo_data(push, 0x00000000);
510		evo_kick(push, flip.chan);
511	}
512
513	nv_wait_cb(device, nv50_display_flip_wait, &flip);
514}
515
516int
517nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
518		       struct nouveau_channel *chan, u32 swap_interval)
519{
520	struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
521	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
522	struct nv50_head *head = nv50_head(crtc);
523	struct nv50_sync *sync = nv50_sync(crtc);
524	u32 *push;
525	int ret;
526
527	swap_interval <<= 4;
528	if (swap_interval == 0)
529		swap_interval |= 0x100;
530	if (chan == NULL)
531		evo_sync(crtc->dev);
532
533	push = evo_wait(sync, 128);
534	if (unlikely(push == NULL))
535		return -EBUSY;
536
537	if (chan && nv_mclass(chan->object) < NV84_CHANNEL_IND_CLASS) {
538		ret = RING_SPACE(chan, 8);
539		if (ret)
540			return ret;
541
542		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
543		OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
544		OUT_RING  (chan, sync->addr ^ 0x10);
545		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
546		OUT_RING  (chan, sync->data + 1);
547		BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
548		OUT_RING  (chan, sync->addr);
549		OUT_RING  (chan, sync->data);
550	} else
551	if (chan && nv_mclass(chan->object) < NVC0_CHANNEL_IND_CLASS) {
552		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
553		ret = RING_SPACE(chan, 12);
554		if (ret)
555			return ret;
556
557		BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
558		OUT_RING  (chan, chan->vram);
559		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
560		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
561		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
562		OUT_RING  (chan, sync->data + 1);
563		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG);
564		BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
565		OUT_RING  (chan, upper_32_bits(addr));
566		OUT_RING  (chan, lower_32_bits(addr));
567		OUT_RING  (chan, sync->data);
568		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL);
569	} else
570	if (chan) {
571		u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr;
572		ret = RING_SPACE(chan, 10);
573		if (ret)
574			return ret;
575
576		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
577		OUT_RING  (chan, upper_32_bits(addr ^ 0x10));
578		OUT_RING  (chan, lower_32_bits(addr ^ 0x10));
579		OUT_RING  (chan, sync->data + 1);
580		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG |
581				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
582		BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
583		OUT_RING  (chan, upper_32_bits(addr));
584		OUT_RING  (chan, lower_32_bits(addr));
585		OUT_RING  (chan, sync->data);
586		OUT_RING  (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL |
587				 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD);
588	}
589
590	if (chan) {
591		sync->addr ^= 0x10;
592		sync->data++;
593		FIRE_RING (chan);
594	}
595
596	/* queue the flip */
597	evo_mthd(push, 0x0100, 1);
598	evo_data(push, 0xfffe0000);
599	evo_mthd(push, 0x0084, 1);
600	evo_data(push, swap_interval);
601	if (!(swap_interval & 0x00000100)) {
602		evo_mthd(push, 0x00e0, 1);
603		evo_data(push, 0x40000000);
604	}
605	evo_mthd(push, 0x0088, 4);
606	evo_data(push, sync->addr);
607	evo_data(push, sync->data++);
608	evo_data(push, sync->data);
609	evo_data(push, NvEvoSync);
610	evo_mthd(push, 0x00a0, 2);
611	evo_data(push, 0x00000000);
612	evo_data(push, 0x00000000);
613	evo_mthd(push, 0x00c0, 1);
614	evo_data(push, nv_fb->r_dma);
615	evo_mthd(push, 0x0110, 2);
616	evo_data(push, 0x00000000);
617	evo_data(push, 0x00000000);
618	if (nv50_vers(sync) < NVD0_DISP_SYNC_CLASS) {
619		evo_mthd(push, 0x0800, 5);
620		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
621		evo_data(push, 0);
622		evo_data(push, (fb->height << 16) | fb->width);
623		evo_data(push, nv_fb->r_pitch);
624		evo_data(push, nv_fb->r_format);
625	} else {
626		evo_mthd(push, 0x0400, 5);
627		evo_data(push, nv_fb->nvbo->bo.offset >> 8);
628		evo_data(push, 0);
629		evo_data(push, (fb->height << 16) | fb->width);
630		evo_data(push, nv_fb->r_pitch);
631		evo_data(push, nv_fb->r_format);
632	}
633	evo_mthd(push, 0x0080, 1);
634	evo_data(push, 0x00000000);
635	evo_kick(push, sync);
636
637	nouveau_bo_ref(nv_fb->nvbo, &head->image);
638	return 0;
639}
640
641/******************************************************************************
642 * CRTC
643 *****************************************************************************/
644static int
645nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
646{
647	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
648	struct nouveau_connector *nv_connector;
649	struct drm_connector *connector;
650	u32 *push, mode = 0x00;
651
652	nv_connector = nouveau_crtc_connector_get(nv_crtc);
653	connector = &nv_connector->base;
654	if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
655		if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3)
656			mode = DITHERING_MODE_DYNAMIC2X2;
657	} else {
658		mode = nv_connector->dithering_mode;
659	}
660
661	if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
662		if (connector->display_info.bpc >= 8)
663			mode |= DITHERING_DEPTH_8BPC;
664	} else {
665		mode |= nv_connector->dithering_depth;
666	}
667
668	push = evo_wait(mast, 4);
669	if (push) {
670		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
671			evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
672			evo_data(push, mode);
673		} else
674		if (nv50_vers(mast) < NVE0_DISP_MAST_CLASS) {
675			evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
676			evo_data(push, mode);
677		} else {
678			evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
679			evo_data(push, mode);
680		}
681
682		if (update) {
683			evo_mthd(push, 0x0080, 1);
684			evo_data(push, 0x00000000);
685		}
686		evo_kick(push, mast);
687	}
688
689	return 0;
690}
691
692static int
693nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
694{
695	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
696	struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
697	struct drm_crtc *crtc = &nv_crtc->base;
698	struct nouveau_connector *nv_connector;
699	int mode = DRM_MODE_SCALE_NONE;
700	u32 oX, oY, *push;
701
702	/* start off at the resolution we programmed the crtc for, this
703	 * effectively handles NONE/FULL scaling
704	 */
705	nv_connector = nouveau_crtc_connector_get(nv_crtc);
706	if (nv_connector && nv_connector->native_mode)
707		mode = nv_connector->scaling_mode;
708
709	if (mode != DRM_MODE_SCALE_NONE)
710		omode = nv_connector->native_mode;
711	else
712		omode = umode;
713
714	oX = omode->hdisplay;
715	oY = omode->vdisplay;
716	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
717		oY *= 2;
718
719	/* add overscan compensation if necessary, will keep the aspect
720	 * ratio the same as the backend mode unless overridden by the
721	 * user setting both hborder and vborder properties.
722	 */
723	if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
724			     (nv_connector->underscan == UNDERSCAN_AUTO &&
725			      nv_connector->edid &&
726			      drm_detect_hdmi_monitor(nv_connector->edid)))) {
727		u32 bX = nv_connector->underscan_hborder;
728		u32 bY = nv_connector->underscan_vborder;
729		u32 aspect = (oY << 19) / oX;
730
731		if (bX) {
732			oX -= (bX * 2);
733			if (bY) oY -= (bY * 2);
734			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
735		} else {
736			oX -= (oX >> 4) + 32;
737			if (bY) oY -= (bY * 2);
738			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
739		}
740	}
741
742	/* handle CENTER/ASPECT scaling, taking into account the areas
743	 * removed already for overscan compensation
744	 */
745	switch (mode) {
746	case DRM_MODE_SCALE_CENTER:
747		oX = min((u32)umode->hdisplay, oX);
748		oY = min((u32)umode->vdisplay, oY);
749		/* fall-through */
750	case DRM_MODE_SCALE_ASPECT:
751		if (oY < oX) {
752			u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
753			oX = ((oY * aspect) + (aspect / 2)) >> 19;
754		} else {
755			u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
756			oY = ((oX * aspect) + (aspect / 2)) >> 19;
757		}
758		break;
759	default:
760		break;
761	}
762
763	push = evo_wait(mast, 8);
764	if (push) {
765		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
766			/*XXX: SCALE_CTRL_ACTIVE??? */
767			evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
768			evo_data(push, (oY << 16) | oX);
769			evo_data(push, (oY << 16) | oX);
770			evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
771			evo_data(push, 0x00000000);
772			evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
773			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
774		} else {
775			evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
776			evo_data(push, (oY << 16) | oX);
777			evo_data(push, (oY << 16) | oX);
778			evo_data(push, (oY << 16) | oX);
779			evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
780			evo_data(push, 0x00000000);
781			evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
782			evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
783		}
784
785		evo_kick(push, mast);
786
787		if (update) {
788			nv50_display_flip_stop(crtc);
789			nv50_display_flip_next(crtc, crtc->primary->fb,
790					       NULL, 1);
791		}
792	}
793
794	return 0;
795}
796
797static int
798nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
799{
800	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
801	u32 *push, hue, vib;
802	int adj;
803
804	adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
805	vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
806	hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
807
808	push = evo_wait(mast, 16);
809	if (push) {
810		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
811			evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1);
812			evo_data(push, (hue << 20) | (vib << 8));
813		} else {
814			evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1);
815			evo_data(push, (hue << 20) | (vib << 8));
816		}
817
818		if (update) {
819			evo_mthd(push, 0x0080, 1);
820			evo_data(push, 0x00000000);
821		}
822		evo_kick(push, mast);
823	}
824
825	return 0;
826}
827
828static int
829nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
830		    int x, int y, bool update)
831{
832	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
833	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
834	u32 *push;
835
836	push = evo_wait(mast, 16);
837	if (push) {
838		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
839			evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
840			evo_data(push, nvfb->nvbo->bo.offset >> 8);
841			evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
842			evo_data(push, (fb->height << 16) | fb->width);
843			evo_data(push, nvfb->r_pitch);
844			evo_data(push, nvfb->r_format);
845			evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
846			evo_data(push, (y << 16) | x);
847			if (nv50_vers(mast) > NV50_DISP_MAST_CLASS) {
848				evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
849				evo_data(push, nvfb->r_dma);
850			}
851		} else {
852			evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
853			evo_data(push, nvfb->nvbo->bo.offset >> 8);
854			evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
855			evo_data(push, (fb->height << 16) | fb->width);
856			evo_data(push, nvfb->r_pitch);
857			evo_data(push, nvfb->r_format);
858			evo_data(push, nvfb->r_dma);
859			evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
860			evo_data(push, (y << 16) | x);
861		}
862
863		if (update) {
864			evo_mthd(push, 0x0080, 1);
865			evo_data(push, 0x00000000);
866		}
867		evo_kick(push, mast);
868	}
869
870	nv_crtc->fb.tile_flags = nvfb->r_dma;
871	return 0;
872}
873
874static void
875nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
876{
877	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
878	u32 *push = evo_wait(mast, 16);
879	if (push) {
880		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
881			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
882			evo_data(push, 0x85000000);
883			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
884		} else
885		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
886			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
887			evo_data(push, 0x85000000);
888			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
889			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
890			evo_data(push, NvEvoVRAM);
891		} else {
892			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
893			evo_data(push, 0x85000000);
894			evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
895			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
896			evo_data(push, NvEvoVRAM);
897		}
898		evo_kick(push, mast);
899	}
900}
901
902static void
903nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
904{
905	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
906	u32 *push = evo_wait(mast, 16);
907	if (push) {
908		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
909			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
910			evo_data(push, 0x05000000);
911		} else
912		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
913			evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
914			evo_data(push, 0x05000000);
915			evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
916			evo_data(push, 0x00000000);
917		} else {
918			evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
919			evo_data(push, 0x05000000);
920			evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
921			evo_data(push, 0x00000000);
922		}
923		evo_kick(push, mast);
924	}
925}
926
927static void
928nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
929{
930	struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev);
931
932	if (show)
933		nv50_crtc_cursor_show(nv_crtc);
934	else
935		nv50_crtc_cursor_hide(nv_crtc);
936
937	if (update) {
938		u32 *push = evo_wait(mast, 2);
939		if (push) {
940			evo_mthd(push, 0x0080, 1);
941			evo_data(push, 0x00000000);
942			evo_kick(push, mast);
943		}
944	}
945}
946
947static void
948nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
949{
950}
951
952static void
953nv50_crtc_prepare(struct drm_crtc *crtc)
954{
955	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
956	struct nv50_mast *mast = nv50_mast(crtc->dev);
957	u32 *push;
958
959	nv50_display_flip_stop(crtc);
960
961	push = evo_wait(mast, 6);
962	if (push) {
963		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
964			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
965			evo_data(push, 0x00000000);
966			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
967			evo_data(push, 0x40000000);
968		} else
969		if (nv50_vers(mast) <  NVD0_DISP_MAST_CLASS) {
970			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
971			evo_data(push, 0x00000000);
972			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
973			evo_data(push, 0x40000000);
974			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
975			evo_data(push, 0x00000000);
976		} else {
977			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
978			evo_data(push, 0x00000000);
979			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
980			evo_data(push, 0x03000000);
981			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
982			evo_data(push, 0x00000000);
983		}
984
985		evo_kick(push, mast);
986	}
987
988	nv50_crtc_cursor_show_hide(nv_crtc, false, false);
989}
990
991static void
992nv50_crtc_commit(struct drm_crtc *crtc)
993{
994	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
995	struct nv50_mast *mast = nv50_mast(crtc->dev);
996	u32 *push;
997
998	push = evo_wait(mast, 32);
999	if (push) {
1000		if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) {
1001			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1002			evo_data(push, NvEvoVRAM_LP);
1003			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1004			evo_data(push, 0xc0000000);
1005			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1006		} else
1007		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1008			evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
1009			evo_data(push, nv_crtc->fb.tile_flags);
1010			evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
1011			evo_data(push, 0xc0000000);
1012			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1013			evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
1014			evo_data(push, NvEvoVRAM);
1015		} else {
1016			evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
1017			evo_data(push, nv_crtc->fb.tile_flags);
1018			evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
1019			evo_data(push, 0x83000000);
1020			evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
1021			evo_data(push, 0x00000000);
1022			evo_data(push, 0x00000000);
1023			evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
1024			evo_data(push, NvEvoVRAM);
1025			evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
1026			evo_data(push, 0xffffff00);
1027		}
1028
1029		evo_kick(push, mast);
1030	}
1031
1032	nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
1033	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1034}
1035
1036static bool
1037nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
1038		     struct drm_display_mode *adjusted_mode)
1039{
1040	drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V);
1041	return true;
1042}
1043
1044static int
1045nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
1046{
1047	struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb);
1048	struct nv50_head *head = nv50_head(crtc);
1049	int ret;
1050
1051	ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
1052	if (ret == 0) {
1053		if (head->image)
1054			nouveau_bo_unpin(head->image);
1055		nouveau_bo_ref(nvfb->nvbo, &head->image);
1056	}
1057
1058	return ret;
1059}
1060
1061static int
1062nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
1063		   struct drm_display_mode *mode, int x, int y,
1064		   struct drm_framebuffer *old_fb)
1065{
1066	struct nv50_mast *mast = nv50_mast(crtc->dev);
1067	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1068	struct nouveau_connector *nv_connector;
1069	u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
1070	u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
1071	u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
1072	u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
1073	u32 vblan2e = 0, vblan2s = 1;
1074	u32 *push;
1075	int ret;
1076
1077	hactive = mode->htotal;
1078	hsynce  = mode->hsync_end - mode->hsync_start - 1;
1079	hbackp  = mode->htotal - mode->hsync_end;
1080	hblanke = hsynce + hbackp;
1081	hfrontp = mode->hsync_start - mode->hdisplay;
1082	hblanks = mode->htotal - hfrontp - 1;
1083
1084	vactive = mode->vtotal * vscan / ilace;
1085	vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1086	vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1087	vblanke = vsynce + vbackp;
1088	vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1089	vblanks = vactive - vfrontp - 1;
1090	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1091		vblan2e = vactive + vsynce + vbackp;
1092		vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1093		vactive = (vactive * 2) + 1;
1094	}
1095
1096	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1097	if (ret)
1098		return ret;
1099
1100	push = evo_wait(mast, 64);
1101	if (push) {
1102		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1103			evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1104			evo_data(push, 0x00800000 | mode->clock);
1105			evo_data(push, (ilace == 2) ? 2 : 0);
1106			evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1107			evo_data(push, 0x00000000);
1108			evo_data(push, (vactive << 16) | hactive);
1109			evo_data(push, ( vsynce << 16) | hsynce);
1110			evo_data(push, (vblanke << 16) | hblanke);
1111			evo_data(push, (vblanks << 16) | hblanks);
1112			evo_data(push, (vblan2e << 16) | vblan2s);
1113			evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1114			evo_data(push, 0x00000000);
1115			evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1116			evo_data(push, 0x00000311);
1117			evo_data(push, 0x00000100);
1118		} else {
1119			evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1120			evo_data(push, 0x00000000);
1121			evo_data(push, (vactive << 16) | hactive);
1122			evo_data(push, ( vsynce << 16) | hsynce);
1123			evo_data(push, (vblanke << 16) | hblanke);
1124			evo_data(push, (vblanks << 16) | hblanks);
1125			evo_data(push, (vblan2e << 16) | vblan2s);
1126			evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1127			evo_data(push, 0x00000000); /* ??? */
1128			evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1129			evo_data(push, mode->clock * 1000);
1130			evo_data(push, 0x00200000); /* ??? */
1131			evo_data(push, mode->clock * 1000);
1132			evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1133			evo_data(push, 0x00000311);
1134			evo_data(push, 0x00000100);
1135		}
1136
1137		evo_kick(push, mast);
1138	}
1139
1140	nv_connector = nouveau_crtc_connector_get(nv_crtc);
1141	nv50_crtc_set_dither(nv_crtc, false);
1142	nv50_crtc_set_scale(nv_crtc, false);
1143	nv50_crtc_set_color_vibrance(nv_crtc, false);
1144	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false);
1145	return 0;
1146}
1147
1148static int
1149nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1150			struct drm_framebuffer *old_fb)
1151{
1152	struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1153	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1154	int ret;
1155
1156	if (!crtc->primary->fb) {
1157		NV_DEBUG(drm, "No FB bound\n");
1158		return 0;
1159	}
1160
1161	ret = nv50_crtc_swap_fbs(crtc, old_fb);
1162	if (ret)
1163		return ret;
1164
1165	nv50_display_flip_stop(crtc);
1166	nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true);
1167	nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1);
1168	return 0;
1169}
1170
1171static int
1172nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1173			       struct drm_framebuffer *fb, int x, int y,
1174			       enum mode_set_atomic state)
1175{
1176	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1177	nv50_display_flip_stop(crtc);
1178	nv50_crtc_set_image(nv_crtc, fb, x, y, true);
1179	return 0;
1180}
1181
1182static void
1183nv50_crtc_lut_load(struct drm_crtc *crtc)
1184{
1185	struct nv50_disp *disp = nv50_disp(crtc->dev);
1186	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1187	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1188	int i;
1189
1190	for (i = 0; i < 256; i++) {
1191		u16 r = nv_crtc->lut.r[i] >> 2;
1192		u16 g = nv_crtc->lut.g[i] >> 2;
1193		u16 b = nv_crtc->lut.b[i] >> 2;
1194
1195		if (nv_mclass(disp->core) < NVD0_DISP_CLASS) {
1196			writew(r + 0x0000, lut + (i * 0x08) + 0);
1197			writew(g + 0x0000, lut + (i * 0x08) + 2);
1198			writew(b + 0x0000, lut + (i * 0x08) + 4);
1199		} else {
1200			writew(r + 0x6000, lut + (i * 0x20) + 0);
1201			writew(g + 0x6000, lut + (i * 0x20) + 2);
1202			writew(b + 0x6000, lut + (i * 0x20) + 4);
1203		}
1204	}
1205}
1206
1207static void
1208nv50_crtc_disable(struct drm_crtc *crtc)
1209{
1210	struct nv50_head *head = nv50_head(crtc);
1211	if (head->image)
1212		nouveau_bo_unpin(head->image);
1213	nouveau_bo_ref(NULL, &head->image);
1214}
1215
1216static int
1217nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1218		     uint32_t handle, uint32_t width, uint32_t height)
1219{
1220	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1221	struct drm_device *dev = crtc->dev;
1222	struct drm_gem_object *gem;
1223	struct nouveau_bo *nvbo;
1224	bool visible = (handle != 0);
1225	int i, ret = 0;
1226
1227	if (visible) {
1228		if (width != 64 || height != 64)
1229			return -EINVAL;
1230
1231		gem = drm_gem_object_lookup(dev, file_priv, handle);
1232		if (unlikely(!gem))
1233			return -ENOENT;
1234		nvbo = nouveau_gem_object(gem);
1235
1236		ret = nouveau_bo_map(nvbo);
1237		if (ret == 0) {
1238			for (i = 0; i < 64 * 64; i++) {
1239				u32 v = nouveau_bo_rd32(nvbo, i);
1240				nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1241			}
1242			nouveau_bo_unmap(nvbo);
1243		}
1244
1245		drm_gem_object_unreference_unlocked(gem);
1246	}
1247
1248	if (visible != nv_crtc->cursor.visible) {
1249		nv50_crtc_cursor_show_hide(nv_crtc, visible, true);
1250		nv_crtc->cursor.visible = visible;
1251	}
1252
1253	return ret;
1254}
1255
1256static int
1257nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1258{
1259	struct nv50_curs *curs = nv50_curs(crtc);
1260	struct nv50_chan *chan = nv50_chan(curs);
1261	nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
1262	nv_wo32(chan->user, 0x0080, 0x00000000);
1263	return 0;
1264}
1265
1266static void
1267nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1268		    uint32_t start, uint32_t size)
1269{
1270	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1271	u32 end = min_t(u32, start + size, 256);
1272	u32 i;
1273
1274	for (i = start; i < end; i++) {
1275		nv_crtc->lut.r[i] = r[i];
1276		nv_crtc->lut.g[i] = g[i];
1277		nv_crtc->lut.b[i] = b[i];
1278	}
1279
1280	nv50_crtc_lut_load(crtc);
1281}
1282
1283static void
1284nv50_crtc_destroy(struct drm_crtc *crtc)
1285{
1286	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1287	struct nv50_disp *disp = nv50_disp(crtc->dev);
1288	struct nv50_head *head = nv50_head(crtc);
1289
1290	nv50_dmac_destroy(disp->core, &head->ovly.base);
1291	nv50_pioc_destroy(disp->core, &head->oimm.base);
1292	nv50_dmac_destroy(disp->core, &head->sync.base);
1293	nv50_pioc_destroy(disp->core, &head->curs.base);
1294
1295	/*XXX: this shouldn't be necessary, but the core doesn't call
1296	 *     disconnect() during the cleanup paths
1297	 */
1298	if (head->image)
1299		nouveau_bo_unpin(head->image);
1300	nouveau_bo_ref(NULL, &head->image);
1301
1302	nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1303	if (nv_crtc->cursor.nvbo)
1304		nouveau_bo_unpin(nv_crtc->cursor.nvbo);
1305	nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1306
1307	nouveau_bo_unmap(nv_crtc->lut.nvbo);
1308	if (nv_crtc->lut.nvbo)
1309		nouveau_bo_unpin(nv_crtc->lut.nvbo);
1310	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1311
1312	drm_crtc_cleanup(crtc);
1313	kfree(crtc);
1314}
1315
1316static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = {
1317	.dpms = nv50_crtc_dpms,
1318	.prepare = nv50_crtc_prepare,
1319	.commit = nv50_crtc_commit,
1320	.mode_fixup = nv50_crtc_mode_fixup,
1321	.mode_set = nv50_crtc_mode_set,
1322	.mode_set_base = nv50_crtc_mode_set_base,
1323	.mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
1324	.load_lut = nv50_crtc_lut_load,
1325	.disable = nv50_crtc_disable,
1326};
1327
1328static const struct drm_crtc_funcs nv50_crtc_func = {
1329	.cursor_set = nv50_crtc_cursor_set,
1330	.cursor_move = nv50_crtc_cursor_move,
1331	.gamma_set = nv50_crtc_gamma_set,
1332	.set_config = nouveau_crtc_set_config,
1333	.destroy = nv50_crtc_destroy,
1334	.page_flip = nouveau_crtc_page_flip,
1335};
1336
1337static void
1338nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
1339{
1340}
1341
1342static void
1343nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
1344{
1345}
1346
1347static int
1348nv50_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
1349{
1350	struct nv50_disp *disp = nv50_disp(dev);
1351	struct nv50_head *head;
1352	struct drm_crtc *crtc;
1353	int ret, i;
1354
1355	head = kzalloc(sizeof(*head), GFP_KERNEL);
1356	if (!head)
1357		return -ENOMEM;
1358
1359	head->base.index = index;
1360	head->base.set_dither = nv50_crtc_set_dither;
1361	head->base.set_scale = nv50_crtc_set_scale;
1362	head->base.set_color_vibrance = nv50_crtc_set_color_vibrance;
1363	head->base.color_vibrance = 50;
1364	head->base.vibrant_hue = 0;
1365	head->base.cursor.set_offset = nv50_cursor_set_offset;
1366	head->base.cursor.set_pos = nv50_cursor_set_pos;
1367	for (i = 0; i < 256; i++) {
1368		head->base.lut.r[i] = i << 8;
1369		head->base.lut.g[i] = i << 8;
1370		head->base.lut.b[i] = i << 8;
1371	}
1372
1373	crtc = &head->base.base;
1374	drm_crtc_init(dev, crtc, &nv50_crtc_func);
1375	drm_crtc_helper_add(crtc, &nv50_crtc_hfunc);
1376	drm_mode_crtc_set_gamma_size(crtc, 256);
1377
1378	ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1379			     0, 0x0000, NULL, &head->base.lut.nvbo);
1380	if (!ret) {
1381		ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1382		if (!ret) {
1383			ret = nouveau_bo_map(head->base.lut.nvbo);
1384			if (ret)
1385				nouveau_bo_unpin(head->base.lut.nvbo);
1386		}
1387		if (ret)
1388			nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1389	}
1390
1391	if (ret)
1392		goto out;
1393
1394	nv50_crtc_lut_load(crtc);
1395
1396	/* allocate cursor resources */
1397	ret = nv50_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
1398			      &(struct nv50_display_curs_class) {
1399					.head = index,
1400			      }, sizeof(struct nv50_display_curs_class),
1401			      &head->curs.base);
1402	if (ret)
1403		goto out;
1404
1405	ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1406			     0, 0x0000, NULL, &head->base.cursor.nvbo);
1407	if (!ret) {
1408		ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1409		if (!ret) {
1410			ret = nouveau_bo_map(head->base.cursor.nvbo);
1411			if (ret)
1412				nouveau_bo_unpin(head->base.lut.nvbo);
1413		}
1414		if (ret)
1415			nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1416	}
1417
1418	if (ret)
1419		goto out;
1420
1421	/* allocate page flip / sync resources */
1422	ret = nv50_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
1423			      &(struct nv50_display_sync_class) {
1424					.pushbuf = EVO_PUSH_HANDLE(SYNC, index),
1425					.head = index,
1426			      }, sizeof(struct nv50_display_sync_class),
1427			      disp->sync->bo.offset, &head->sync.base);
1428	if (ret)
1429		goto out;
1430
1431	head->sync.addr = EVO_FLIP_SEM0(index);
1432	head->sync.data = 0x00000000;
1433
1434	/* allocate overlay resources */
1435	ret = nv50_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
1436			      &(struct nv50_display_oimm_class) {
1437					.head = index,
1438			      }, sizeof(struct nv50_display_oimm_class),
1439			      &head->oimm.base);
1440	if (ret)
1441		goto out;
1442
1443	ret = nv50_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
1444			      &(struct nv50_display_ovly_class) {
1445					.pushbuf = EVO_PUSH_HANDLE(OVLY, index),
1446					.head = index,
1447			      }, sizeof(struct nv50_display_ovly_class),
1448			      disp->sync->bo.offset, &head->ovly.base);
1449	if (ret)
1450		goto out;
1451
1452out:
1453	if (ret)
1454		nv50_crtc_destroy(crtc);
1455	return ret;
1456}
1457
1458/******************************************************************************
1459 * DAC
1460 *****************************************************************************/
1461static void
1462nv50_dac_dpms(struct drm_encoder *encoder, int mode)
1463{
1464	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1465	struct nv50_disp *disp = nv50_disp(encoder->dev);
1466	int or = nv_encoder->or;
1467	u32 dpms_ctrl;
1468
1469	dpms_ctrl = 0x00000000;
1470	if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1471		dpms_ctrl |= 0x00000001;
1472	if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1473		dpms_ctrl |= 0x00000004;
1474
1475	nv_call(disp->core, NV50_DISP_DAC_PWR + or, dpms_ctrl);
1476}
1477
1478static bool
1479nv50_dac_mode_fixup(struct drm_encoder *encoder,
1480		    const struct drm_display_mode *mode,
1481		    struct drm_display_mode *adjusted_mode)
1482{
1483	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1484	struct nouveau_connector *nv_connector;
1485
1486	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1487	if (nv_connector && nv_connector->native_mode) {
1488		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1489			int id = adjusted_mode->base.id;
1490			*adjusted_mode = *nv_connector->native_mode;
1491			adjusted_mode->base.id = id;
1492		}
1493	}
1494
1495	return true;
1496}
1497
1498static void
1499nv50_dac_commit(struct drm_encoder *encoder)
1500{
1501}
1502
1503static void
1504nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1505		  struct drm_display_mode *adjusted_mode)
1506{
1507	struct nv50_mast *mast = nv50_mast(encoder->dev);
1508	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1509	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1510	u32 *push;
1511
1512	nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1513
1514	push = evo_wait(mast, 8);
1515	if (push) {
1516		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1517			u32 syncs = 0x00000000;
1518
1519			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1520				syncs |= 0x00000001;
1521			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1522				syncs |= 0x00000002;
1523
1524			evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1525			evo_data(push, 1 << nv_crtc->index);
1526			evo_data(push, syncs);
1527		} else {
1528			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1529			u32 syncs = 0x00000001;
1530
1531			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1532				syncs |= 0x00000008;
1533			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1534				syncs |= 0x00000010;
1535
1536			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1537				magic |= 0x00000001;
1538
1539			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1540			evo_data(push, syncs);
1541			evo_data(push, magic);
1542			evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1543			evo_data(push, 1 << nv_crtc->index);
1544		}
1545
1546		evo_kick(push, mast);
1547	}
1548
1549	nv_encoder->crtc = encoder->crtc;
1550}
1551
1552static void
1553nv50_dac_disconnect(struct drm_encoder *encoder)
1554{
1555	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1556	struct nv50_mast *mast = nv50_mast(encoder->dev);
1557	const int or = nv_encoder->or;
1558	u32 *push;
1559
1560	if (nv_encoder->crtc) {
1561		nv50_crtc_prepare(nv_encoder->crtc);
1562
1563		push = evo_wait(mast, 4);
1564		if (push) {
1565			if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1566				evo_mthd(push, 0x0400 + (or * 0x080), 1);
1567				evo_data(push, 0x00000000);
1568			} else {
1569				evo_mthd(push, 0x0180 + (or * 0x020), 1);
1570				evo_data(push, 0x00000000);
1571			}
1572			evo_kick(push, mast);
1573		}
1574	}
1575
1576	nv_encoder->crtc = NULL;
1577}
1578
1579static enum drm_connector_status
1580nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1581{
1582	struct nv50_disp *disp = nv50_disp(encoder->dev);
1583	int ret, or = nouveau_encoder(encoder)->or;
1584	u32 load = nouveau_drm(encoder->dev)->vbios.dactestval;
1585	if (load == 0)
1586		load = 340;
1587
1588	ret = nv_exec(disp->core, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
1589	if (ret || !load)
1590		return connector_status_disconnected;
1591
1592	return connector_status_connected;
1593}
1594
1595static void
1596nv50_dac_destroy(struct drm_encoder *encoder)
1597{
1598	drm_encoder_cleanup(encoder);
1599	kfree(encoder);
1600}
1601
1602static const struct drm_encoder_helper_funcs nv50_dac_hfunc = {
1603	.dpms = nv50_dac_dpms,
1604	.mode_fixup = nv50_dac_mode_fixup,
1605	.prepare = nv50_dac_disconnect,
1606	.commit = nv50_dac_commit,
1607	.mode_set = nv50_dac_mode_set,
1608	.disable = nv50_dac_disconnect,
1609	.get_crtc = nv50_display_crtc_get,
1610	.detect = nv50_dac_detect
1611};
1612
1613static const struct drm_encoder_funcs nv50_dac_func = {
1614	.destroy = nv50_dac_destroy,
1615};
1616
1617static int
1618nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1619{
1620	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1621	struct nouveau_i2c *i2c = nouveau_i2c(drm->device);
1622	struct nouveau_encoder *nv_encoder;
1623	struct drm_encoder *encoder;
1624	int type = DRM_MODE_ENCODER_DAC;
1625
1626	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1627	if (!nv_encoder)
1628		return -ENOMEM;
1629	nv_encoder->dcb = dcbe;
1630	nv_encoder->or = ffs(dcbe->or) - 1;
1631	nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
1632
1633	encoder = to_drm_encoder(nv_encoder);
1634	encoder->possible_crtcs = dcbe->heads;
1635	encoder->possible_clones = 0;
1636	drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type);
1637	drm_encoder_helper_add(encoder, &nv50_dac_hfunc);
1638
1639	drm_mode_connector_attach_encoder(connector, encoder);
1640	return 0;
1641}
1642
1643/******************************************************************************
1644 * Audio
1645 *****************************************************************************/
1646static void
1647nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1648{
1649	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1650	struct nouveau_connector *nv_connector;
1651	struct nv50_disp *disp = nv50_disp(encoder->dev);
1652
1653	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1654	if (!drm_detect_monitor_audio(nv_connector->edid))
1655		return;
1656
1657	drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1658
1659	nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or,
1660			    nv_connector->base.eld,
1661			    nv_connector->base.eld[2] * 4);
1662}
1663
1664static void
1665nv50_audio_disconnect(struct drm_encoder *encoder)
1666{
1667	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1668	struct nv50_disp *disp = nv50_disp(encoder->dev);
1669
1670	nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0);
1671}
1672
1673/******************************************************************************
1674 * HDMI
1675 *****************************************************************************/
1676static void
1677nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1678{
1679	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1680	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1681	struct nouveau_connector *nv_connector;
1682	struct nv50_disp *disp = nv50_disp(encoder->dev);
1683	const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1684	u32 rekey = 56; /* binary driver, and tegra constant */
1685	u32 max_ac_packet;
1686
1687	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1688	if (!drm_detect_hdmi_monitor(nv_connector->edid))
1689		return;
1690
1691	max_ac_packet  = mode->htotal - mode->hdisplay;
1692	max_ac_packet -= rekey;
1693	max_ac_packet -= 18; /* constant from tegra */
1694	max_ac_packet /= 32;
1695
1696	nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff,
1697			    NV84_DISP_SOR_HDMI_PWR_STATE_ON |
1698			    (max_ac_packet << 16) | rekey);
1699
1700	nv50_audio_mode_set(encoder, mode);
1701}
1702
1703static void
1704nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
1705{
1706	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1707	struct nv50_disp *disp = nv50_disp(encoder->dev);
1708	const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1709
1710	nv50_audio_disconnect(encoder);
1711
1712	nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff, 0x00000000);
1713}
1714
1715/******************************************************************************
1716 * SOR
1717 *****************************************************************************/
1718static void
1719nv50_sor_dpms(struct drm_encoder *encoder, int mode)
1720{
1721	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1722	struct drm_device *dev = encoder->dev;
1723	struct nv50_disp *disp = nv50_disp(dev);
1724	struct drm_encoder *partner;
1725	u32 mthd;
1726
1727	nv_encoder->last_dpms = mode;
1728
1729	list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1730		struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1731
1732		if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1733			continue;
1734
1735		if (nv_partner != nv_encoder &&
1736		    nv_partner->dcb->or == nv_encoder->dcb->or) {
1737			if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1738				return;
1739			break;
1740		}
1741	}
1742
1743	mthd  = (ffs(nv_encoder->dcb->sorconf.link) - 1) << 2;
1744	mthd |= nv_encoder->or;
1745
1746	if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1747		nv_call(disp->core, NV50_DISP_SOR_PWR | mthd, 1);
1748		mthd |= NV94_DISP_SOR_DP_PWR;
1749	} else {
1750		mthd |= NV50_DISP_SOR_PWR;
1751	}
1752
1753	nv_call(disp->core, mthd, (mode == DRM_MODE_DPMS_ON));
1754}
1755
1756static bool
1757nv50_sor_mode_fixup(struct drm_encoder *encoder,
1758		    const struct drm_display_mode *mode,
1759		    struct drm_display_mode *adjusted_mode)
1760{
1761	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1762	struct nouveau_connector *nv_connector;
1763
1764	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1765	if (nv_connector && nv_connector->native_mode) {
1766		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1767			int id = adjusted_mode->base.id;
1768			*adjusted_mode = *nv_connector->native_mode;
1769			adjusted_mode->base.id = id;
1770		}
1771	}
1772
1773	return true;
1774}
1775
1776static void
1777nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data)
1778{
1779	struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev);
1780	u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push;
1781	if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) {
1782		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
1783			evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
1784			evo_data(push, (nv_encoder->ctrl = temp));
1785		} else {
1786			evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1787			evo_data(push, (nv_encoder->ctrl = temp));
1788		}
1789		evo_kick(push, mast);
1790	}
1791}
1792
1793static void
1794nv50_sor_disconnect(struct drm_encoder *encoder)
1795{
1796	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1797	struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1798
1799	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1800	nv_encoder->crtc = NULL;
1801
1802	if (nv_crtc) {
1803		nv50_crtc_prepare(&nv_crtc->base);
1804		nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0);
1805		nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc);
1806	}
1807}
1808
1809static void
1810nv50_sor_commit(struct drm_encoder *encoder)
1811{
1812}
1813
1814static void
1815nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1816		  struct drm_display_mode *mode)
1817{
1818	struct nv50_disp *disp = nv50_disp(encoder->dev);
1819	struct nv50_mast *mast = nv50_mast(encoder->dev);
1820	struct drm_device *dev = encoder->dev;
1821	struct nouveau_drm *drm = nouveau_drm(dev);
1822	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1823	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1824	struct nouveau_connector *nv_connector;
1825	struct nvbios *bios = &drm->vbios;
1826	u32 lvds = 0, mask, ctrl;
1827	u8 owner = 1 << nv_crtc->index;
1828	u8 proto = 0xf;
1829	u8 depth = 0x0;
1830
1831	nv_connector = nouveau_encoder_connector_get(nv_encoder);
1832	nv_encoder->crtc = encoder->crtc;
1833
1834	switch (nv_encoder->dcb->type) {
1835	case DCB_OUTPUT_TMDS:
1836		if (nv_encoder->dcb->sorconf.link & 1) {
1837			if (mode->clock < 165000)
1838				proto = 0x1;
1839			else
1840				proto = 0x5;
1841		} else {
1842			proto = 0x2;
1843		}
1844
1845		nv50_hdmi_mode_set(&nv_encoder->base.base, mode);
1846		break;
1847	case DCB_OUTPUT_LVDS:
1848		proto = 0x0;
1849
1850		if (bios->fp_no_ddc) {
1851			if (bios->fp.dual_link)
1852				lvds |= 0x0100;
1853			if (bios->fp.if_is_24bit)
1854				lvds |= 0x0200;
1855		} else {
1856			if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1857				if (((u8 *)nv_connector->edid)[121] == 2)
1858					lvds |= 0x0100;
1859			} else
1860			if (mode->clock >= bios->fp.duallink_transition_clk) {
1861				lvds |= 0x0100;
1862			}
1863
1864			if (lvds & 0x0100) {
1865				if (bios->fp.strapless_is_24bit & 2)
1866					lvds |= 0x0200;
1867			} else {
1868				if (bios->fp.strapless_is_24bit & 1)
1869					lvds |= 0x0200;
1870			}
1871
1872			if (nv_connector->base.display_info.bpc == 8)
1873				lvds |= 0x0200;
1874		}
1875
1876		nv_call(disp->core, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, lvds);
1877		break;
1878	case DCB_OUTPUT_DP:
1879		if (nv_connector->base.display_info.bpc == 6) {
1880			nv_encoder->dp.datarate = mode->clock * 18 / 8;
1881			depth = 0x2;
1882		} else
1883		if (nv_connector->base.display_info.bpc == 8) {
1884			nv_encoder->dp.datarate = mode->clock * 24 / 8;
1885			depth = 0x5;
1886		} else {
1887			nv_encoder->dp.datarate = mode->clock * 30 / 8;
1888			depth = 0x6;
1889		}
1890
1891		if (nv_encoder->dcb->sorconf.link & 1)
1892			proto = 0x8;
1893		else
1894			proto = 0x9;
1895		break;
1896	default:
1897		BUG_ON(1);
1898		break;
1899	}
1900
1901	nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON);
1902
1903	if (nv50_vers(mast) >= NVD0_DISP_CLASS) {
1904		u32 *push = evo_wait(mast, 3);
1905		if (push) {
1906			u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1907			u32 syncs = 0x00000001;
1908
1909			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1910				syncs |= 0x00000008;
1911			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1912				syncs |= 0x00000010;
1913
1914			if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1915				magic |= 0x00000001;
1916
1917			evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1918			evo_data(push, syncs | (depth << 6));
1919			evo_data(push, magic);
1920			evo_kick(push, mast);
1921		}
1922
1923		ctrl = proto << 8;
1924		mask = 0x00000f00;
1925	} else {
1926		ctrl = (depth << 16) | (proto << 8);
1927		if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1928			ctrl |= 0x00001000;
1929		if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1930			ctrl |= 0x00002000;
1931		mask = 0x000f3f00;
1932	}
1933
1934	nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner);
1935}
1936
1937static void
1938nv50_sor_destroy(struct drm_encoder *encoder)
1939{
1940	drm_encoder_cleanup(encoder);
1941	kfree(encoder);
1942}
1943
1944static const struct drm_encoder_helper_funcs nv50_sor_hfunc = {
1945	.dpms = nv50_sor_dpms,
1946	.mode_fixup = nv50_sor_mode_fixup,
1947	.prepare = nv50_sor_disconnect,
1948	.commit = nv50_sor_commit,
1949	.mode_set = nv50_sor_mode_set,
1950	.disable = nv50_sor_disconnect,
1951	.get_crtc = nv50_display_crtc_get,
1952};
1953
1954static const struct drm_encoder_funcs nv50_sor_func = {
1955	.destroy = nv50_sor_destroy,
1956};
1957
1958static int
1959nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1960{
1961	struct nouveau_drm *drm = nouveau_drm(connector->dev);
1962	struct nouveau_i2c *i2c = nouveau_i2c(drm->device);
1963	struct nouveau_encoder *nv_encoder;
1964	struct drm_encoder *encoder;
1965	int type;
1966
1967	switch (dcbe->type) {
1968	case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
1969	case DCB_OUTPUT_TMDS:
1970	case DCB_OUTPUT_DP:
1971	default:
1972		type = DRM_MODE_ENCODER_TMDS;
1973		break;
1974	}
1975
1976	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1977	if (!nv_encoder)
1978		return -ENOMEM;
1979	nv_encoder->dcb = dcbe;
1980	nv_encoder->or = ffs(dcbe->or) - 1;
1981	nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index);
1982	nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1983
1984	encoder = to_drm_encoder(nv_encoder);
1985	encoder->possible_crtcs = dcbe->heads;
1986	encoder->possible_clones = 0;
1987	drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type);
1988	drm_encoder_helper_add(encoder, &nv50_sor_hfunc);
1989
1990	drm_mode_connector_attach_encoder(connector, encoder);
1991	return 0;
1992}
1993
1994/******************************************************************************
1995 * PIOR
1996 *****************************************************************************/
1997
1998static void
1999nv50_pior_dpms(struct drm_encoder *encoder, int mode)
2000{
2001	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2002	struct nv50_disp *disp = nv50_disp(encoder->dev);
2003	u32 mthd = (nv_encoder->dcb->type << 12) | nv_encoder->or;
2004	u32 ctrl = (mode == DRM_MODE_DPMS_ON);
2005	nv_call(disp->core, NV50_DISP_PIOR_PWR + mthd, ctrl);
2006}
2007
2008static bool
2009nv50_pior_mode_fixup(struct drm_encoder *encoder,
2010		     const struct drm_display_mode *mode,
2011		     struct drm_display_mode *adjusted_mode)
2012{
2013	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2014	struct nouveau_connector *nv_connector;
2015
2016	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2017	if (nv_connector && nv_connector->native_mode) {
2018		if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
2019			int id = adjusted_mode->base.id;
2020			*adjusted_mode = *nv_connector->native_mode;
2021			adjusted_mode->base.id = id;
2022		}
2023	}
2024
2025	adjusted_mode->clock *= 2;
2026	return true;
2027}
2028
2029static void
2030nv50_pior_commit(struct drm_encoder *encoder)
2031{
2032}
2033
2034static void
2035nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
2036		   struct drm_display_mode *adjusted_mode)
2037{
2038	struct nv50_mast *mast = nv50_mast(encoder->dev);
2039	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2040	struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2041	struct nouveau_connector *nv_connector;
2042	u8 owner = 1 << nv_crtc->index;
2043	u8 proto, depth;
2044	u32 *push;
2045
2046	nv_connector = nouveau_encoder_connector_get(nv_encoder);
2047	switch (nv_connector->base.display_info.bpc) {
2048	case 10: depth = 0x6; break;
2049	case  8: depth = 0x5; break;
2050	case  6: depth = 0x2; break;
2051	default: depth = 0x0; break;
2052	}
2053
2054	switch (nv_encoder->dcb->type) {
2055	case DCB_OUTPUT_TMDS:
2056	case DCB_OUTPUT_DP:
2057		proto = 0x0;
2058		break;
2059	default:
2060		BUG_ON(1);
2061		break;
2062	}
2063
2064	nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON);
2065
2066	push = evo_wait(mast, 8);
2067	if (push) {
2068		if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
2069			u32 ctrl = (depth << 16) | (proto << 8) | owner;
2070			if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2071				ctrl |= 0x00001000;
2072			if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2073				ctrl |= 0x00002000;
2074			evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
2075			evo_data(push, ctrl);
2076		}
2077
2078		evo_kick(push, mast);
2079	}
2080
2081	nv_encoder->crtc = encoder->crtc;
2082}
2083
2084static void
2085nv50_pior_disconnect(struct drm_encoder *encoder)
2086{
2087	struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2088	struct nv50_mast *mast = nv50_mast(encoder->dev);
2089	const int or = nv_encoder->or;
2090	u32 *push;
2091
2092	if (nv_encoder->crtc) {
2093		nv50_crtc_prepare(nv_encoder->crtc);
2094
2095		push = evo_wait(mast, 4);
2096		if (push) {
2097			if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) {
2098				evo_mthd(push, 0x0700 + (or * 0x040), 1);
2099				evo_data(push, 0x00000000);
2100			}
2101			evo_kick(push, mast);
2102		}
2103	}
2104
2105	nv_encoder->crtc = NULL;
2106}
2107
2108static void
2109nv50_pior_destroy(struct drm_encoder *encoder)
2110{
2111	drm_encoder_cleanup(encoder);
2112	kfree(encoder);
2113}
2114
2115static const struct drm_encoder_helper_funcs nv50_pior_hfunc = {
2116	.dpms = nv50_pior_dpms,
2117	.mode_fixup = nv50_pior_mode_fixup,
2118	.prepare = nv50_pior_disconnect,
2119	.commit = nv50_pior_commit,
2120	.mode_set = nv50_pior_mode_set,
2121	.disable = nv50_pior_disconnect,
2122	.get_crtc = nv50_display_crtc_get,
2123};
2124
2125static const struct drm_encoder_funcs nv50_pior_func = {
2126	.destroy = nv50_pior_destroy,
2127};
2128
2129static int
2130nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
2131{
2132	struct nouveau_drm *drm = nouveau_drm(connector->dev);
2133	struct nouveau_i2c *i2c = nouveau_i2c(drm->device);
2134	struct nouveau_i2c_port *ddc = NULL;
2135	struct nouveau_encoder *nv_encoder;
2136	struct drm_encoder *encoder;
2137	int type;
2138
2139	switch (dcbe->type) {
2140	case DCB_OUTPUT_TMDS:
2141		ddc  = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev));
2142		type = DRM_MODE_ENCODER_TMDS;
2143		break;
2144	case DCB_OUTPUT_DP:
2145		ddc  = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev));
2146		type = DRM_MODE_ENCODER_TMDS;
2147		break;
2148	default:
2149		return -ENODEV;
2150	}
2151
2152	nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2153	if (!nv_encoder)
2154		return -ENOMEM;
2155	nv_encoder->dcb = dcbe;
2156	nv_encoder->or = ffs(dcbe->or) - 1;
2157	nv_encoder->i2c = ddc;
2158
2159	encoder = to_drm_encoder(nv_encoder);
2160	encoder->possible_crtcs = dcbe->heads;
2161	encoder->possible_clones = 0;
2162	drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type);
2163	drm_encoder_helper_add(encoder, &nv50_pior_hfunc);
2164
2165	drm_mode_connector_attach_encoder(connector, encoder);
2166	return 0;
2167}
2168
2169/******************************************************************************
2170 * Init
2171 *****************************************************************************/
2172void
2173nv50_display_fini(struct drm_device *dev)
2174{
2175}
2176
2177int
2178nv50_display_init(struct drm_device *dev)
2179{
2180	struct nv50_disp *disp = nv50_disp(dev);
2181	struct drm_crtc *crtc;
2182	u32 *push;
2183
2184	push = evo_wait(nv50_mast(dev), 32);
2185	if (!push)
2186		return -EBUSY;
2187
2188	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
2189		struct nv50_sync *sync = nv50_sync(crtc);
2190		nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data);
2191	}
2192
2193	evo_mthd(push, 0x0088, 1);
2194	evo_data(push, NvEvoSync);
2195	evo_kick(push, nv50_mast(dev));
2196	return 0;
2197}
2198
2199void
2200nv50_display_destroy(struct drm_device *dev)
2201{
2202	struct nv50_disp *disp = nv50_disp(dev);
2203
2204	nv50_dmac_destroy(disp->core, &disp->mast.base);
2205
2206	nouveau_bo_unmap(disp->sync);
2207	if (disp->sync)
2208		nouveau_bo_unpin(disp->sync);
2209	nouveau_bo_ref(NULL, &disp->sync);
2210
2211	nouveau_display(dev)->priv = NULL;
2212	kfree(disp);
2213}
2214
2215int
2216nv50_display_create(struct drm_device *dev)
2217{
2218	struct nouveau_device *device = nouveau_dev(dev);
2219	struct nouveau_drm *drm = nouveau_drm(dev);
2220	struct dcb_table *dcb = &drm->vbios.dcb;
2221	struct drm_connector *connector, *tmp;
2222	struct nv50_disp *disp;
2223	struct dcb_output *dcbe;
2224	int crtcs, ret, i;
2225
2226	disp = kzalloc(sizeof(*disp), GFP_KERNEL);
2227	if (!disp)
2228		return -ENOMEM;
2229
2230	nouveau_display(dev)->priv = disp;
2231	nouveau_display(dev)->dtor = nv50_display_destroy;
2232	nouveau_display(dev)->init = nv50_display_init;
2233	nouveau_display(dev)->fini = nv50_display_fini;
2234	disp->core = nouveau_display(dev)->core;
2235
2236	/* small shared memory area we use for notifiers and semaphores */
2237	ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
2238			     0, 0x0000, NULL, &disp->sync);
2239	if (!ret) {
2240		ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
2241		if (!ret) {
2242			ret = nouveau_bo_map(disp->sync);
2243			if (ret)
2244				nouveau_bo_unpin(disp->sync);
2245		}
2246		if (ret)
2247			nouveau_bo_ref(NULL, &disp->sync);
2248	}
2249
2250	if (ret)
2251		goto out;
2252
2253	/* allocate master evo channel */
2254	ret = nv50_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
2255			      &(struct nv50_display_mast_class) {
2256					.pushbuf = EVO_PUSH_HANDLE(MAST, 0),
2257			      }, sizeof(struct nv50_display_mast_class),
2258			      disp->sync->bo.offset, &disp->mast.base);
2259	if (ret)
2260		goto out;
2261
2262	/* create crtc objects to represent the hw heads */
2263	if (nv_mclass(disp->core) >= NVD0_DISP_CLASS)
2264		crtcs = nv_rd32(device, 0x022448);
2265	else
2266		crtcs = 2;
2267
2268	for (i = 0; i < crtcs; i++) {
2269		ret = nv50_crtc_create(dev, disp->core, i);
2270		if (ret)
2271			goto out;
2272	}
2273
2274	/* create encoder/connector objects based on VBIOS DCB table */
2275	for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
2276		connector = nouveau_connector_create(dev, dcbe->connector);
2277		if (IS_ERR(connector))
2278			continue;
2279
2280		if (dcbe->location == DCB_LOC_ON_CHIP) {
2281			switch (dcbe->type) {
2282			case DCB_OUTPUT_TMDS:
2283			case DCB_OUTPUT_LVDS:
2284			case DCB_OUTPUT_DP:
2285				ret = nv50_sor_create(connector, dcbe);
2286				break;
2287			case DCB_OUTPUT_ANALOG:
2288				ret = nv50_dac_create(connector, dcbe);
2289				break;
2290			default:
2291				ret = -ENODEV;
2292				break;
2293			}
2294		} else {
2295			ret = nv50_pior_create(connector, dcbe);
2296		}
2297
2298		if (ret) {
2299			NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
2300				     dcbe->location, dcbe->type,
2301				     ffs(dcbe->or) - 1, ret);
2302			ret = 0;
2303		}
2304	}
2305
2306	/* cull any connectors we created that don't have an encoder */
2307	list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
2308		if (connector->encoder_ids[0])
2309			continue;
2310
2311		NV_WARN(drm, "%s has no encoders, removing\n",
2312			connector->name);
2313		connector->funcs->destroy(connector);
2314	}
2315
2316out:
2317	if (ret)
2318		nv50_display_destroy(dev);
2319	return ret;
2320}
2321