nv50_display.c revision d55b4af909bc16f7982c2b8b8656f0898158627b
1/* 2 * Copyright 2011 Red Hat Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: Ben Skeggs 23 */ 24 25#include <linux/dma-mapping.h> 26 27#include <drm/drmP.h> 28#include <drm/drm_crtc_helper.h> 29#include <drm/drm_dp_helper.h> 30 31#include <nvif/class.h> 32 33#include "nouveau_drm.h" 34#include "nouveau_dma.h" 35#include "nouveau_gem.h" 36#include "nouveau_connector.h" 37#include "nouveau_encoder.h" 38#include "nouveau_crtc.h" 39#include "nouveau_fence.h" 40#include "nv50_display.h" 41 42#define EVO_DMA_NR 9 43 44#define EVO_MASTER (0x00) 45#define EVO_FLIP(c) (0x01 + (c)) 46#define EVO_OVLY(c) (0x05 + (c)) 47#define EVO_OIMM(c) (0x09 + (c)) 48#define EVO_CURS(c) (0x0d + (c)) 49 50/* offsets in shared sync bo of various structures */ 51#define EVO_SYNC(c, o) ((c) * 0x0100 + (o)) 52#define EVO_MAST_NTFY EVO_SYNC( 0, 0x00) 53#define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00) 54#define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10) 55 56#define EVO_CORE_HANDLE (0xd1500000) 57#define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i)) 58#define EVO_CHAN_OCLASS(t,c) (((c)->oclass & 0xff00) | ((t) & 0x00ff)) 59#define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) | \ 60 (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8)) 61 62/****************************************************************************** 63 * EVO channel 64 *****************************************************************************/ 65 66struct nv50_chan { 67 struct nvif_object user; 68}; 69 70static int 71nv50_chan_create(struct nvif_object *disp, const u32 *oclass, u8 head, 72 void *data, u32 size, struct nv50_chan *chan) 73{ 74 while (oclass[0]) { 75 int ret = nvif_object_init(disp, NULL, (oclass[0] << 16) | head, 76 oclass[0], data, size, 77 &chan->user); 78 if (oclass++, ret == 0) 79 return ret; 80 } 81 return -ENOSYS; 82} 83 84static void 85nv50_chan_destroy(struct nv50_chan *chan) 86{ 87 nvif_object_fini(&chan->user); 88} 89 90/****************************************************************************** 91 * PIO EVO channel 92 *****************************************************************************/ 93 94struct nv50_pioc { 95 struct nv50_chan base; 96}; 97 98static void 99nv50_pioc_destroy(struct nv50_pioc *pioc) 100{ 101 nv50_chan_destroy(&pioc->base); 102} 103 104static int 105nv50_pioc_create(struct nvif_object *disp, const u32 *oclass, u8 head, 106 void *data, u32 size, struct nv50_pioc *pioc) 107{ 108 return nv50_chan_create(disp, oclass, head, data, size, &pioc->base); 109} 110 111/****************************************************************************** 112 * Cursor Immediate 113 *****************************************************************************/ 114 115struct nv50_curs { 116 struct nv50_pioc base; 117}; 118 119static int 120nv50_curs_create(struct nvif_object *disp, int head, struct nv50_curs *curs) 121{ 122 struct nv50_display_curs_class args = { 123 .head = head, 124 }; 125 static const u32 oclass[] = { 126 GM107_DISP_CURS_CLASS, 127 NVF0_DISP_CURS_CLASS, 128 NVE0_DISP_CURS_CLASS, 129 NVD0_DISP_CURS_CLASS, 130 NVA3_DISP_CURS_CLASS, 131 NV94_DISP_CURS_CLASS, 132 NVA0_DISP_CURS_CLASS, 133 NV84_DISP_CURS_CLASS, 134 NV50_DISP_CURS_CLASS, 135 0 136 }; 137 138 return nv50_pioc_create(disp, oclass, head, &args, sizeof(args), 139 &curs->base); 140} 141 142/****************************************************************************** 143 * Overlay Immediate 144 *****************************************************************************/ 145 146struct nv50_oimm { 147 struct nv50_pioc base; 148}; 149 150static int 151nv50_oimm_create(struct nvif_object *disp, int head, struct nv50_oimm *oimm) 152{ 153 struct nv50_display_oimm_class args = { 154 .head = head, 155 }; 156 static const u32 oclass[] = { 157 GM107_DISP_OIMM_CLASS, 158 NVF0_DISP_OIMM_CLASS, 159 NVE0_DISP_OIMM_CLASS, 160 NVD0_DISP_OIMM_CLASS, 161 NVA3_DISP_OIMM_CLASS, 162 NV94_DISP_OIMM_CLASS, 163 NVA0_DISP_OIMM_CLASS, 164 NV84_DISP_OIMM_CLASS, 165 NV50_DISP_OIMM_CLASS, 166 0 167 }; 168 169 return nv50_pioc_create(disp, oclass, head, &args, sizeof(args), 170 &oimm->base); 171} 172 173/****************************************************************************** 174 * DMA EVO channel 175 *****************************************************************************/ 176 177struct nv50_dmac { 178 struct nv50_chan base; 179 dma_addr_t handle; 180 u32 *ptr; 181 182 struct nvif_object sync; 183 struct nvif_object vram; 184 185 /* Protects against concurrent pushbuf access to this channel, lock is 186 * grabbed by evo_wait (if the pushbuf reservation is successful) and 187 * dropped again by evo_kick. */ 188 struct mutex lock; 189}; 190 191static void 192nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp) 193{ 194 nvif_object_fini(&dmac->vram); 195 nvif_object_fini(&dmac->sync); 196 197 nv50_chan_destroy(&dmac->base); 198 199 if (dmac->ptr) { 200 struct pci_dev *pdev = nvkm_device(nvif_device(disp))->pdev; 201 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle); 202 } 203} 204 205static int 206nv50_dmac_create(struct nvif_object *disp, const u32 *oclass, u8 head, 207 void *data, u32 size, u64 syncbuf, 208 struct nv50_dmac *dmac) 209{ 210 struct nouveau_fb *pfb = nvkm_fb(nvif_device(disp)); 211 struct nvif_object pushbuf; 212 u32 handle = *(u32 *)data; 213 int ret; 214 215 mutex_init(&dmac->lock); 216 217 dmac->ptr = pci_alloc_consistent(nvkm_device(nvif_device(disp))->pdev, 218 PAGE_SIZE, &dmac->handle); 219 if (!dmac->ptr) 220 return -ENOMEM; 221 222 ret = nvif_object_init(nvif_object(nvif_device(disp)), NULL, handle, 223 NV_DMA_FROM_MEMORY, 224 &(struct nv_dma_v0) { 225 .target = NV_DMA_V0_TARGET_PCI_US, 226 .access = NV_DMA_V0_ACCESS_RD, 227 .start = dmac->handle + 0x0000, 228 .limit = dmac->handle + 0x0fff, 229 }, sizeof(struct nv_dma_v0), &pushbuf); 230 if (ret) 231 return ret; 232 233 ret = nv50_chan_create(disp, oclass, head, data, size, &dmac->base); 234 nvif_object_fini(&pushbuf); 235 if (ret) 236 return ret; 237 238 ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000000, 239 NV_DMA_IN_MEMORY, 240 &(struct nv_dma_v0) { 241 .target = NV_DMA_V0_TARGET_VRAM, 242 .access = NV_DMA_V0_ACCESS_RDWR, 243 .start = syncbuf + 0x0000, 244 .limit = syncbuf + 0x0fff, 245 }, sizeof(struct nv_dma_v0), 246 &dmac->sync); 247 if (ret) 248 return ret; 249 250 ret = nvif_object_init(&dmac->base.user, NULL, 0xf0000001, 251 NV_DMA_IN_MEMORY, 252 &(struct nv_dma_v0) { 253 .target = NV_DMA_V0_TARGET_VRAM, 254 .access = NV_DMA_V0_ACCESS_RDWR, 255 .start = 0, 256 .limit = pfb->ram->size - 1, 257 }, sizeof(struct nv_dma_v0), 258 &dmac->vram); 259 if (ret) 260 return ret; 261 262 return ret; 263} 264 265/****************************************************************************** 266 * Core 267 *****************************************************************************/ 268 269struct nv50_mast { 270 struct nv50_dmac base; 271}; 272 273static int 274nv50_core_create(struct nvif_object *disp, u64 syncbuf, struct nv50_mast *core) 275{ 276 struct nv50_display_mast_class args = { 277 .pushbuf = EVO_PUSH_HANDLE(MAST, 0), 278 }; 279 static const u32 oclass[] = { 280 GM107_DISP_MAST_CLASS, 281 NVF0_DISP_MAST_CLASS, 282 NVE0_DISP_MAST_CLASS, 283 NVD0_DISP_MAST_CLASS, 284 NVA3_DISP_MAST_CLASS, 285 NV94_DISP_MAST_CLASS, 286 NVA0_DISP_MAST_CLASS, 287 NV84_DISP_MAST_CLASS, 288 NV50_DISP_MAST_CLASS, 289 0 290 }; 291 292 return nv50_dmac_create(disp, oclass, 0, &args, sizeof(args), syncbuf, 293 &core->base); 294} 295 296/****************************************************************************** 297 * Base 298 *****************************************************************************/ 299 300struct nv50_sync { 301 struct nv50_dmac base; 302 u32 addr; 303 u32 data; 304}; 305 306static int 307nv50_base_create(struct nvif_object *disp, int head, u64 syncbuf, 308 struct nv50_sync *base) 309{ 310 struct nv50_display_sync_class args = { 311 .pushbuf = EVO_PUSH_HANDLE(SYNC, head), 312 .head = head, 313 }; 314 static const u32 oclass[] = { 315 GM107_DISP_SYNC_CLASS, 316 NVF0_DISP_SYNC_CLASS, 317 NVE0_DISP_SYNC_CLASS, 318 NVD0_DISP_SYNC_CLASS, 319 NVA3_DISP_SYNC_CLASS, 320 NV94_DISP_SYNC_CLASS, 321 NVA0_DISP_SYNC_CLASS, 322 NV84_DISP_SYNC_CLASS, 323 NV50_DISP_SYNC_CLASS, 324 0 325 }; 326 327 return nv50_dmac_create(disp, oclass, head, &args, sizeof(args), 328 syncbuf, &base->base); 329} 330 331/****************************************************************************** 332 * Overlay 333 *****************************************************************************/ 334 335struct nv50_ovly { 336 struct nv50_dmac base; 337}; 338 339static int 340nv50_ovly_create(struct nvif_object *disp, int head, u64 syncbuf, 341 struct nv50_ovly *ovly) 342{ 343 struct nv50_display_ovly_class args = { 344 .pushbuf = EVO_PUSH_HANDLE(OVLY, head), 345 .head = head, 346 }; 347 static const u32 oclass[] = { 348 GM107_DISP_OVLY_CLASS, 349 NVF0_DISP_OVLY_CLASS, 350 NVE0_DISP_OVLY_CLASS, 351 NVD0_DISP_OVLY_CLASS, 352 NVA3_DISP_OVLY_CLASS, 353 NV94_DISP_OVLY_CLASS, 354 NVA0_DISP_OVLY_CLASS, 355 NV84_DISP_OVLY_CLASS, 356 NV50_DISP_OVLY_CLASS, 357 0 358 }; 359 360 return nv50_dmac_create(disp, oclass, head, &args, sizeof(args), 361 syncbuf, &ovly->base); 362} 363 364struct nv50_head { 365 struct nouveau_crtc base; 366 struct nouveau_bo *image; 367 struct nv50_curs curs; 368 struct nv50_sync sync; 369 struct nv50_ovly ovly; 370 struct nv50_oimm oimm; 371}; 372 373#define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c)) 374#define nv50_curs(c) (&nv50_head(c)->curs) 375#define nv50_sync(c) (&nv50_head(c)->sync) 376#define nv50_ovly(c) (&nv50_head(c)->ovly) 377#define nv50_oimm(c) (&nv50_head(c)->oimm) 378#define nv50_chan(c) (&(c)->base.base) 379#define nv50_vers(c) nv50_chan(c)->user.oclass 380 381struct nv50_fbdma { 382 struct list_head head; 383 struct nvif_object core; 384 struct nvif_object base[4]; 385}; 386 387struct nv50_disp { 388 struct nvif_object *disp; 389 struct nv50_mast mast; 390 391 struct list_head fbdma; 392 393 struct nouveau_bo *sync; 394}; 395 396static struct nv50_disp * 397nv50_disp(struct drm_device *dev) 398{ 399 return nouveau_display(dev)->priv; 400} 401 402#define nv50_mast(d) (&nv50_disp(d)->mast) 403 404static struct drm_crtc * 405nv50_display_crtc_get(struct drm_encoder *encoder) 406{ 407 return nouveau_encoder(encoder)->crtc; 408} 409 410/****************************************************************************** 411 * EVO channel helpers 412 *****************************************************************************/ 413static u32 * 414evo_wait(void *evoc, int nr) 415{ 416 struct nv50_dmac *dmac = evoc; 417 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4; 418 419 mutex_lock(&dmac->lock); 420 if (put + nr >= (PAGE_SIZE / 4) - 8) { 421 dmac->ptr[put] = 0x20000000; 422 423 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000); 424 if (!nvkm_wait(&dmac->base.user, 0x0004, ~0, 0x00000000)) { 425 mutex_unlock(&dmac->lock); 426 nv_error(nvkm_object(&dmac->base.user), "channel stalled\n"); 427 return NULL; 428 } 429 430 put = 0; 431 } 432 433 return dmac->ptr + put; 434} 435 436static void 437evo_kick(u32 *push, void *evoc) 438{ 439 struct nv50_dmac *dmac = evoc; 440 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2); 441 mutex_unlock(&dmac->lock); 442} 443 444#define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m)) 445#define evo_data(p,d) *((p)++) = (d) 446 447static bool 448evo_sync_wait(void *data) 449{ 450 if (nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000) 451 return true; 452 usleep_range(1, 2); 453 return false; 454} 455 456static int 457evo_sync(struct drm_device *dev) 458{ 459 struct nvif_device *device = &nouveau_drm(dev)->device; 460 struct nv50_disp *disp = nv50_disp(dev); 461 struct nv50_mast *mast = nv50_mast(dev); 462 u32 *push = evo_wait(mast, 8); 463 if (push) { 464 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000); 465 evo_mthd(push, 0x0084, 1); 466 evo_data(push, 0x80000000 | EVO_MAST_NTFY); 467 evo_mthd(push, 0x0080, 2); 468 evo_data(push, 0x00000000); 469 evo_data(push, 0x00000000); 470 evo_kick(push, mast); 471 if (nv_wait_cb(nvkm_device(device), evo_sync_wait, disp->sync)) 472 return 0; 473 } 474 475 return -EBUSY; 476} 477 478/****************************************************************************** 479 * Page flipping channel 480 *****************************************************************************/ 481struct nouveau_bo * 482nv50_display_crtc_sema(struct drm_device *dev, int crtc) 483{ 484 return nv50_disp(dev)->sync; 485} 486 487struct nv50_display_flip { 488 struct nv50_disp *disp; 489 struct nv50_sync *chan; 490}; 491 492static bool 493nv50_display_flip_wait(void *data) 494{ 495 struct nv50_display_flip *flip = data; 496 if (nouveau_bo_rd32(flip->disp->sync, flip->chan->addr / 4) == 497 flip->chan->data) 498 return true; 499 usleep_range(1, 2); 500 return false; 501} 502 503void 504nv50_display_flip_stop(struct drm_crtc *crtc) 505{ 506 struct nvif_device *device = &nouveau_drm(crtc->dev)->device; 507 struct nv50_display_flip flip = { 508 .disp = nv50_disp(crtc->dev), 509 .chan = nv50_sync(crtc), 510 }; 511 u32 *push; 512 513 push = evo_wait(flip.chan, 8); 514 if (push) { 515 evo_mthd(push, 0x0084, 1); 516 evo_data(push, 0x00000000); 517 evo_mthd(push, 0x0094, 1); 518 evo_data(push, 0x00000000); 519 evo_mthd(push, 0x00c0, 1); 520 evo_data(push, 0x00000000); 521 evo_mthd(push, 0x0080, 1); 522 evo_data(push, 0x00000000); 523 evo_kick(push, flip.chan); 524 } 525 526 nv_wait_cb(nvkm_device(device), nv50_display_flip_wait, &flip); 527} 528 529int 530nv50_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb, 531 struct nouveau_channel *chan, u32 swap_interval) 532{ 533 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb); 534 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 535 struct nv50_head *head = nv50_head(crtc); 536 struct nv50_sync *sync = nv50_sync(crtc); 537 u32 *push; 538 int ret; 539 540 swap_interval <<= 4; 541 if (swap_interval == 0) 542 swap_interval |= 0x100; 543 if (chan == NULL) 544 evo_sync(crtc->dev); 545 546 push = evo_wait(sync, 128); 547 if (unlikely(push == NULL)) 548 return -EBUSY; 549 550 if (chan && chan->object->oclass < G82_CHANNEL_GPFIFO) { 551 ret = RING_SPACE(chan, 8); 552 if (ret) 553 return ret; 554 555 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2); 556 OUT_RING (chan, NvEvoSema0 + nv_crtc->index); 557 OUT_RING (chan, sync->addr ^ 0x10); 558 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1); 559 OUT_RING (chan, sync->data + 1); 560 BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2); 561 OUT_RING (chan, sync->addr); 562 OUT_RING (chan, sync->data); 563 } else 564 if (chan && chan->object->oclass < FERMI_CHANNEL_GPFIFO) { 565 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr; 566 ret = RING_SPACE(chan, 12); 567 if (ret) 568 return ret; 569 570 BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1); 571 OUT_RING (chan, chan->vram.handle); 572 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 573 OUT_RING (chan, upper_32_bits(addr ^ 0x10)); 574 OUT_RING (chan, lower_32_bits(addr ^ 0x10)); 575 OUT_RING (chan, sync->data + 1); 576 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG); 577 BEGIN_NV04(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 578 OUT_RING (chan, upper_32_bits(addr)); 579 OUT_RING (chan, lower_32_bits(addr)); 580 OUT_RING (chan, sync->data); 581 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL); 582 } else 583 if (chan) { 584 u64 addr = nv84_fence_crtc(chan, nv_crtc->index) + sync->addr; 585 ret = RING_SPACE(chan, 10); 586 if (ret) 587 return ret; 588 589 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 590 OUT_RING (chan, upper_32_bits(addr ^ 0x10)); 591 OUT_RING (chan, lower_32_bits(addr ^ 0x10)); 592 OUT_RING (chan, sync->data + 1); 593 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_WRITE_LONG | 594 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD); 595 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4); 596 OUT_RING (chan, upper_32_bits(addr)); 597 OUT_RING (chan, lower_32_bits(addr)); 598 OUT_RING (chan, sync->data); 599 OUT_RING (chan, NV84_SUBCHAN_SEMAPHORE_TRIGGER_ACQUIRE_EQUAL | 600 NVC0_SUBCHAN_SEMAPHORE_TRIGGER_YIELD); 601 } 602 603 if (chan) { 604 sync->addr ^= 0x10; 605 sync->data++; 606 FIRE_RING (chan); 607 } 608 609 /* queue the flip */ 610 evo_mthd(push, 0x0100, 1); 611 evo_data(push, 0xfffe0000); 612 evo_mthd(push, 0x0084, 1); 613 evo_data(push, swap_interval); 614 if (!(swap_interval & 0x00000100)) { 615 evo_mthd(push, 0x00e0, 1); 616 evo_data(push, 0x40000000); 617 } 618 evo_mthd(push, 0x0088, 4); 619 evo_data(push, sync->addr); 620 evo_data(push, sync->data++); 621 evo_data(push, sync->data); 622 evo_data(push, sync->base.sync.handle); 623 evo_mthd(push, 0x00a0, 2); 624 evo_data(push, 0x00000000); 625 evo_data(push, 0x00000000); 626 evo_mthd(push, 0x00c0, 1); 627 evo_data(push, nv_fb->r_handle); 628 evo_mthd(push, 0x0110, 2); 629 evo_data(push, 0x00000000); 630 evo_data(push, 0x00000000); 631 if (nv50_vers(sync) < NVD0_DISP_SYNC_CLASS) { 632 evo_mthd(push, 0x0800, 5); 633 evo_data(push, nv_fb->nvbo->bo.offset >> 8); 634 evo_data(push, 0); 635 evo_data(push, (fb->height << 16) | fb->width); 636 evo_data(push, nv_fb->r_pitch); 637 evo_data(push, nv_fb->r_format); 638 } else { 639 evo_mthd(push, 0x0400, 5); 640 evo_data(push, nv_fb->nvbo->bo.offset >> 8); 641 evo_data(push, 0); 642 evo_data(push, (fb->height << 16) | fb->width); 643 evo_data(push, nv_fb->r_pitch); 644 evo_data(push, nv_fb->r_format); 645 } 646 evo_mthd(push, 0x0080, 1); 647 evo_data(push, 0x00000000); 648 evo_kick(push, sync); 649 650 nouveau_bo_ref(nv_fb->nvbo, &head->image); 651 return 0; 652} 653 654/****************************************************************************** 655 * CRTC 656 *****************************************************************************/ 657static int 658nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update) 659{ 660 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 661 struct nouveau_connector *nv_connector; 662 struct drm_connector *connector; 663 u32 *push, mode = 0x00; 664 665 nv_connector = nouveau_crtc_connector_get(nv_crtc); 666 connector = &nv_connector->base; 667 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) { 668 if (nv_crtc->base.primary->fb->depth > connector->display_info.bpc * 3) 669 mode = DITHERING_MODE_DYNAMIC2X2; 670 } else { 671 mode = nv_connector->dithering_mode; 672 } 673 674 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) { 675 if (connector->display_info.bpc >= 8) 676 mode |= DITHERING_DEPTH_8BPC; 677 } else { 678 mode |= nv_connector->dithering_depth; 679 } 680 681 push = evo_wait(mast, 4); 682 if (push) { 683 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 684 evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1); 685 evo_data(push, mode); 686 } else 687 if (nv50_vers(mast) < NVE0_DISP_MAST_CLASS) { 688 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1); 689 evo_data(push, mode); 690 } else { 691 evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1); 692 evo_data(push, mode); 693 } 694 695 if (update) { 696 evo_mthd(push, 0x0080, 1); 697 evo_data(push, 0x00000000); 698 } 699 evo_kick(push, mast); 700 } 701 702 return 0; 703} 704 705static int 706nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update) 707{ 708 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 709 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode; 710 struct drm_crtc *crtc = &nv_crtc->base; 711 struct nouveau_connector *nv_connector; 712 int mode = DRM_MODE_SCALE_NONE; 713 u32 oX, oY, *push; 714 715 /* start off at the resolution we programmed the crtc for, this 716 * effectively handles NONE/FULL scaling 717 */ 718 nv_connector = nouveau_crtc_connector_get(nv_crtc); 719 if (nv_connector && nv_connector->native_mode) 720 mode = nv_connector->scaling_mode; 721 722 if (mode != DRM_MODE_SCALE_NONE) 723 omode = nv_connector->native_mode; 724 else 725 omode = umode; 726 727 oX = omode->hdisplay; 728 oY = omode->vdisplay; 729 if (omode->flags & DRM_MODE_FLAG_DBLSCAN) 730 oY *= 2; 731 732 /* add overscan compensation if necessary, will keep the aspect 733 * ratio the same as the backend mode unless overridden by the 734 * user setting both hborder and vborder properties. 735 */ 736 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON || 737 (nv_connector->underscan == UNDERSCAN_AUTO && 738 nv_connector->edid && 739 drm_detect_hdmi_monitor(nv_connector->edid)))) { 740 u32 bX = nv_connector->underscan_hborder; 741 u32 bY = nv_connector->underscan_vborder; 742 u32 aspect = (oY << 19) / oX; 743 744 if (bX) { 745 oX -= (bX * 2); 746 if (bY) oY -= (bY * 2); 747 else oY = ((oX * aspect) + (aspect / 2)) >> 19; 748 } else { 749 oX -= (oX >> 4) + 32; 750 if (bY) oY -= (bY * 2); 751 else oY = ((oX * aspect) + (aspect / 2)) >> 19; 752 } 753 } 754 755 /* handle CENTER/ASPECT scaling, taking into account the areas 756 * removed already for overscan compensation 757 */ 758 switch (mode) { 759 case DRM_MODE_SCALE_CENTER: 760 oX = min((u32)umode->hdisplay, oX); 761 oY = min((u32)umode->vdisplay, oY); 762 /* fall-through */ 763 case DRM_MODE_SCALE_ASPECT: 764 if (oY < oX) { 765 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay; 766 oX = ((oY * aspect) + (aspect / 2)) >> 19; 767 } else { 768 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay; 769 oY = ((oX * aspect) + (aspect / 2)) >> 19; 770 } 771 break; 772 default: 773 break; 774 } 775 776 push = evo_wait(mast, 8); 777 if (push) { 778 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 779 /*XXX: SCALE_CTRL_ACTIVE??? */ 780 evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2); 781 evo_data(push, (oY << 16) | oX); 782 evo_data(push, (oY << 16) | oX); 783 evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1); 784 evo_data(push, 0x00000000); 785 evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1); 786 evo_data(push, umode->vdisplay << 16 | umode->hdisplay); 787 } else { 788 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3); 789 evo_data(push, (oY << 16) | oX); 790 evo_data(push, (oY << 16) | oX); 791 evo_data(push, (oY << 16) | oX); 792 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1); 793 evo_data(push, 0x00000000); 794 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1); 795 evo_data(push, umode->vdisplay << 16 | umode->hdisplay); 796 } 797 798 evo_kick(push, mast); 799 800 if (update) { 801 nv50_display_flip_stop(crtc); 802 nv50_display_flip_next(crtc, crtc->primary->fb, 803 NULL, 1); 804 } 805 } 806 807 return 0; 808} 809 810static int 811nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update) 812{ 813 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 814 u32 *push, hue, vib; 815 int adj; 816 817 adj = (nv_crtc->color_vibrance > 0) ? 50 : 0; 818 vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff; 819 hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff; 820 821 push = evo_wait(mast, 16); 822 if (push) { 823 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 824 evo_mthd(push, 0x08a8 + (nv_crtc->index * 0x400), 1); 825 evo_data(push, (hue << 20) | (vib << 8)); 826 } else { 827 evo_mthd(push, 0x0498 + (nv_crtc->index * 0x300), 1); 828 evo_data(push, (hue << 20) | (vib << 8)); 829 } 830 831 if (update) { 832 evo_mthd(push, 0x0080, 1); 833 evo_data(push, 0x00000000); 834 } 835 evo_kick(push, mast); 836 } 837 838 return 0; 839} 840 841static int 842nv50_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb, 843 int x, int y, bool update) 844{ 845 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb); 846 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 847 u32 *push; 848 849 push = evo_wait(mast, 16); 850 if (push) { 851 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 852 evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1); 853 evo_data(push, nvfb->nvbo->bo.offset >> 8); 854 evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3); 855 evo_data(push, (fb->height << 16) | fb->width); 856 evo_data(push, nvfb->r_pitch); 857 evo_data(push, nvfb->r_format); 858 evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1); 859 evo_data(push, (y << 16) | x); 860 if (nv50_vers(mast) > NV50_DISP_MAST_CLASS) { 861 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 862 evo_data(push, nvfb->r_handle); 863 } 864 } else { 865 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1); 866 evo_data(push, nvfb->nvbo->bo.offset >> 8); 867 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4); 868 evo_data(push, (fb->height << 16) | fb->width); 869 evo_data(push, nvfb->r_pitch); 870 evo_data(push, nvfb->r_format); 871 evo_data(push, nvfb->r_handle); 872 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1); 873 evo_data(push, (y << 16) | x); 874 } 875 876 if (update) { 877 evo_mthd(push, 0x0080, 1); 878 evo_data(push, 0x00000000); 879 } 880 evo_kick(push, mast); 881 } 882 883 nv_crtc->fb.handle = nvfb->r_handle; 884 return 0; 885} 886 887static void 888nv50_crtc_cursor_show(struct nouveau_crtc *nv_crtc) 889{ 890 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 891 u32 *push = evo_wait(mast, 16); 892 if (push) { 893 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) { 894 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2); 895 evo_data(push, 0x85000000); 896 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 897 } else 898 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 899 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2); 900 evo_data(push, 0x85000000); 901 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 902 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1); 903 evo_data(push, mast->base.vram.handle); 904 } else { 905 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2); 906 evo_data(push, 0x85000000); 907 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); 908 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1); 909 evo_data(push, mast->base.vram.handle); 910 } 911 evo_kick(push, mast); 912 } 913} 914 915static void 916nv50_crtc_cursor_hide(struct nouveau_crtc *nv_crtc) 917{ 918 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 919 u32 *push = evo_wait(mast, 16); 920 if (push) { 921 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) { 922 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1); 923 evo_data(push, 0x05000000); 924 } else 925 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 926 evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1); 927 evo_data(push, 0x05000000); 928 evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1); 929 evo_data(push, 0x00000000); 930 } else { 931 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1); 932 evo_data(push, 0x05000000); 933 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1); 934 evo_data(push, 0x00000000); 935 } 936 evo_kick(push, mast); 937 } 938} 939 940static void 941nv50_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update) 942{ 943 struct nv50_mast *mast = nv50_mast(nv_crtc->base.dev); 944 945 if (show) 946 nv50_crtc_cursor_show(nv_crtc); 947 else 948 nv50_crtc_cursor_hide(nv_crtc); 949 950 if (update) { 951 u32 *push = evo_wait(mast, 2); 952 if (push) { 953 evo_mthd(push, 0x0080, 1); 954 evo_data(push, 0x00000000); 955 evo_kick(push, mast); 956 } 957 } 958} 959 960static void 961nv50_crtc_dpms(struct drm_crtc *crtc, int mode) 962{ 963} 964 965static void 966nv50_crtc_prepare(struct drm_crtc *crtc) 967{ 968 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 969 struct nv50_mast *mast = nv50_mast(crtc->dev); 970 u32 *push; 971 972 nv50_display_flip_stop(crtc); 973 974 push = evo_wait(mast, 6); 975 if (push) { 976 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) { 977 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 978 evo_data(push, 0x00000000); 979 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1); 980 evo_data(push, 0x40000000); 981 } else 982 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 983 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 984 evo_data(push, 0x00000000); 985 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1); 986 evo_data(push, 0x40000000); 987 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1); 988 evo_data(push, 0x00000000); 989 } else { 990 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1); 991 evo_data(push, 0x00000000); 992 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1); 993 evo_data(push, 0x03000000); 994 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1); 995 evo_data(push, 0x00000000); 996 } 997 998 evo_kick(push, mast); 999 } 1000 1001 nv50_crtc_cursor_show_hide(nv_crtc, false, false); 1002} 1003 1004static void 1005nv50_crtc_commit(struct drm_crtc *crtc) 1006{ 1007 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1008 struct nv50_mast *mast = nv50_mast(crtc->dev); 1009 u32 *push; 1010 1011 push = evo_wait(mast, 32); 1012 if (push) { 1013 if (nv50_vers(mast) < NV84_DISP_MAST_CLASS) { 1014 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 1015 evo_data(push, nv_crtc->fb.handle); 1016 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2); 1017 evo_data(push, 0xc0000000); 1018 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1019 } else 1020 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 1021 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1); 1022 evo_data(push, nv_crtc->fb.handle); 1023 evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2); 1024 evo_data(push, 0xc0000000); 1025 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1026 evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1); 1027 evo_data(push, mast->base.vram.handle); 1028 } else { 1029 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1); 1030 evo_data(push, nv_crtc->fb.handle); 1031 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4); 1032 evo_data(push, 0x83000000); 1033 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); 1034 evo_data(push, 0x00000000); 1035 evo_data(push, 0x00000000); 1036 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1); 1037 evo_data(push, mast->base.vram.handle); 1038 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1); 1039 evo_data(push, 0xffffff00); 1040 } 1041 1042 evo_kick(push, mast); 1043 } 1044 1045 nv50_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true); 1046 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1); 1047} 1048 1049static bool 1050nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode, 1051 struct drm_display_mode *adjusted_mode) 1052{ 1053 drm_mode_set_crtcinfo(adjusted_mode, CRTC_INTERLACE_HALVE_V); 1054 return true; 1055} 1056 1057static int 1058nv50_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb) 1059{ 1060 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->primary->fb); 1061 struct nv50_head *head = nv50_head(crtc); 1062 int ret; 1063 1064 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM); 1065 if (ret == 0) { 1066 if (head->image) 1067 nouveau_bo_unpin(head->image); 1068 nouveau_bo_ref(nvfb->nvbo, &head->image); 1069 } 1070 1071 return ret; 1072} 1073 1074static int 1075nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode, 1076 struct drm_display_mode *mode, int x, int y, 1077 struct drm_framebuffer *old_fb) 1078{ 1079 struct nv50_mast *mast = nv50_mast(crtc->dev); 1080 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1081 struct nouveau_connector *nv_connector; 1082 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1; 1083 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1; 1084 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks; 1085 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks; 1086 u32 vblan2e = 0, vblan2s = 1; 1087 u32 *push; 1088 int ret; 1089 1090 hactive = mode->htotal; 1091 hsynce = mode->hsync_end - mode->hsync_start - 1; 1092 hbackp = mode->htotal - mode->hsync_end; 1093 hblanke = hsynce + hbackp; 1094 hfrontp = mode->hsync_start - mode->hdisplay; 1095 hblanks = mode->htotal - hfrontp - 1; 1096 1097 vactive = mode->vtotal * vscan / ilace; 1098 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1; 1099 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace; 1100 vblanke = vsynce + vbackp; 1101 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace; 1102 vblanks = vactive - vfrontp - 1; 1103 if (mode->flags & DRM_MODE_FLAG_INTERLACE) { 1104 vblan2e = vactive + vsynce + vbackp; 1105 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace); 1106 vactive = (vactive * 2) + 1; 1107 } 1108 1109 ret = nv50_crtc_swap_fbs(crtc, old_fb); 1110 if (ret) 1111 return ret; 1112 1113 push = evo_wait(mast, 64); 1114 if (push) { 1115 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 1116 evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2); 1117 evo_data(push, 0x00800000 | mode->clock); 1118 evo_data(push, (ilace == 2) ? 2 : 0); 1119 evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6); 1120 evo_data(push, 0x00000000); 1121 evo_data(push, (vactive << 16) | hactive); 1122 evo_data(push, ( vsynce << 16) | hsynce); 1123 evo_data(push, (vblanke << 16) | hblanke); 1124 evo_data(push, (vblanks << 16) | hblanks); 1125 evo_data(push, (vblan2e << 16) | vblan2s); 1126 evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1); 1127 evo_data(push, 0x00000000); 1128 evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2); 1129 evo_data(push, 0x00000311); 1130 evo_data(push, 0x00000100); 1131 } else { 1132 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6); 1133 evo_data(push, 0x00000000); 1134 evo_data(push, (vactive << 16) | hactive); 1135 evo_data(push, ( vsynce << 16) | hsynce); 1136 evo_data(push, (vblanke << 16) | hblanke); 1137 evo_data(push, (vblanks << 16) | hblanks); 1138 evo_data(push, (vblan2e << 16) | vblan2s); 1139 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1); 1140 evo_data(push, 0x00000000); /* ??? */ 1141 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3); 1142 evo_data(push, mode->clock * 1000); 1143 evo_data(push, 0x00200000); /* ??? */ 1144 evo_data(push, mode->clock * 1000); 1145 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2); 1146 evo_data(push, 0x00000311); 1147 evo_data(push, 0x00000100); 1148 } 1149 1150 evo_kick(push, mast); 1151 } 1152 1153 nv_connector = nouveau_crtc_connector_get(nv_crtc); 1154 nv50_crtc_set_dither(nv_crtc, false); 1155 nv50_crtc_set_scale(nv_crtc, false); 1156 nv50_crtc_set_color_vibrance(nv_crtc, false); 1157 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, false); 1158 return 0; 1159} 1160 1161static int 1162nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y, 1163 struct drm_framebuffer *old_fb) 1164{ 1165 struct nouveau_drm *drm = nouveau_drm(crtc->dev); 1166 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1167 int ret; 1168 1169 if (!crtc->primary->fb) { 1170 NV_DEBUG(drm, "No FB bound\n"); 1171 return 0; 1172 } 1173 1174 ret = nv50_crtc_swap_fbs(crtc, old_fb); 1175 if (ret) 1176 return ret; 1177 1178 nv50_display_flip_stop(crtc); 1179 nv50_crtc_set_image(nv_crtc, crtc->primary->fb, x, y, true); 1180 nv50_display_flip_next(crtc, crtc->primary->fb, NULL, 1); 1181 return 0; 1182} 1183 1184static int 1185nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc, 1186 struct drm_framebuffer *fb, int x, int y, 1187 enum mode_set_atomic state) 1188{ 1189 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1190 nv50_display_flip_stop(crtc); 1191 nv50_crtc_set_image(nv_crtc, fb, x, y, true); 1192 return 0; 1193} 1194 1195static void 1196nv50_crtc_lut_load(struct drm_crtc *crtc) 1197{ 1198 struct nv50_disp *disp = nv50_disp(crtc->dev); 1199 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1200 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo); 1201 int i; 1202 1203 for (i = 0; i < 256; i++) { 1204 u16 r = nv_crtc->lut.r[i] >> 2; 1205 u16 g = nv_crtc->lut.g[i] >> 2; 1206 u16 b = nv_crtc->lut.b[i] >> 2; 1207 1208 if (disp->disp->oclass < NVD0_DISP_CLASS) { 1209 writew(r + 0x0000, lut + (i * 0x08) + 0); 1210 writew(g + 0x0000, lut + (i * 0x08) + 2); 1211 writew(b + 0x0000, lut + (i * 0x08) + 4); 1212 } else { 1213 writew(r + 0x6000, lut + (i * 0x20) + 0); 1214 writew(g + 0x6000, lut + (i * 0x20) + 2); 1215 writew(b + 0x6000, lut + (i * 0x20) + 4); 1216 } 1217 } 1218} 1219 1220static void 1221nv50_crtc_disable(struct drm_crtc *crtc) 1222{ 1223 struct nv50_head *head = nv50_head(crtc); 1224 evo_sync(crtc->dev); 1225 if (head->image) 1226 nouveau_bo_unpin(head->image); 1227 nouveau_bo_ref(NULL, &head->image); 1228} 1229 1230static int 1231nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv, 1232 uint32_t handle, uint32_t width, uint32_t height) 1233{ 1234 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1235 struct drm_device *dev = crtc->dev; 1236 struct drm_gem_object *gem; 1237 struct nouveau_bo *nvbo; 1238 bool visible = (handle != 0); 1239 int i, ret = 0; 1240 1241 if (visible) { 1242 if (width != 64 || height != 64) 1243 return -EINVAL; 1244 1245 gem = drm_gem_object_lookup(dev, file_priv, handle); 1246 if (unlikely(!gem)) 1247 return -ENOENT; 1248 nvbo = nouveau_gem_object(gem); 1249 1250 ret = nouveau_bo_map(nvbo); 1251 if (ret == 0) { 1252 for (i = 0; i < 64 * 64; i++) { 1253 u32 v = nouveau_bo_rd32(nvbo, i); 1254 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v); 1255 } 1256 nouveau_bo_unmap(nvbo); 1257 } 1258 1259 drm_gem_object_unreference_unlocked(gem); 1260 } 1261 1262 if (visible != nv_crtc->cursor.visible) { 1263 nv50_crtc_cursor_show_hide(nv_crtc, visible, true); 1264 nv_crtc->cursor.visible = visible; 1265 } 1266 1267 return ret; 1268} 1269 1270static int 1271nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y) 1272{ 1273 struct nv50_curs *curs = nv50_curs(crtc); 1274 struct nv50_chan *chan = nv50_chan(curs); 1275 nvif_wr32(&chan->user, 0x0084, (y << 16) | (x & 0xffff)); 1276 nvif_wr32(&chan->user, 0x0080, 0x00000000); 1277 return 0; 1278} 1279 1280static void 1281nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b, 1282 uint32_t start, uint32_t size) 1283{ 1284 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1285 u32 end = min_t(u32, start + size, 256); 1286 u32 i; 1287 1288 for (i = start; i < end; i++) { 1289 nv_crtc->lut.r[i] = r[i]; 1290 nv_crtc->lut.g[i] = g[i]; 1291 nv_crtc->lut.b[i] = b[i]; 1292 } 1293 1294 nv50_crtc_lut_load(crtc); 1295} 1296 1297static void 1298nv50_crtc_destroy(struct drm_crtc *crtc) 1299{ 1300 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc); 1301 struct nv50_disp *disp = nv50_disp(crtc->dev); 1302 struct nv50_head *head = nv50_head(crtc); 1303 struct nv50_fbdma *fbdma; 1304 1305 list_for_each_entry(fbdma, &disp->fbdma, head) { 1306 nvif_object_fini(&fbdma->base[nv_crtc->index]); 1307 } 1308 1309 nv50_dmac_destroy(&head->ovly.base, disp->disp); 1310 nv50_pioc_destroy(&head->oimm.base); 1311 nv50_dmac_destroy(&head->sync.base, disp->disp); 1312 nv50_pioc_destroy(&head->curs.base); 1313 1314 /*XXX: this shouldn't be necessary, but the core doesn't call 1315 * disconnect() during the cleanup paths 1316 */ 1317 if (head->image) 1318 nouveau_bo_unpin(head->image); 1319 nouveau_bo_ref(NULL, &head->image); 1320 1321 nouveau_bo_unmap(nv_crtc->cursor.nvbo); 1322 if (nv_crtc->cursor.nvbo) 1323 nouveau_bo_unpin(nv_crtc->cursor.nvbo); 1324 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo); 1325 1326 nouveau_bo_unmap(nv_crtc->lut.nvbo); 1327 if (nv_crtc->lut.nvbo) 1328 nouveau_bo_unpin(nv_crtc->lut.nvbo); 1329 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo); 1330 1331 drm_crtc_cleanup(crtc); 1332 kfree(crtc); 1333} 1334 1335static const struct drm_crtc_helper_funcs nv50_crtc_hfunc = { 1336 .dpms = nv50_crtc_dpms, 1337 .prepare = nv50_crtc_prepare, 1338 .commit = nv50_crtc_commit, 1339 .mode_fixup = nv50_crtc_mode_fixup, 1340 .mode_set = nv50_crtc_mode_set, 1341 .mode_set_base = nv50_crtc_mode_set_base, 1342 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic, 1343 .load_lut = nv50_crtc_lut_load, 1344 .disable = nv50_crtc_disable, 1345}; 1346 1347static const struct drm_crtc_funcs nv50_crtc_func = { 1348 .cursor_set = nv50_crtc_cursor_set, 1349 .cursor_move = nv50_crtc_cursor_move, 1350 .gamma_set = nv50_crtc_gamma_set, 1351 .set_config = nouveau_crtc_set_config, 1352 .destroy = nv50_crtc_destroy, 1353 .page_flip = nouveau_crtc_page_flip, 1354}; 1355 1356static void 1357nv50_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y) 1358{ 1359} 1360 1361static void 1362nv50_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset) 1363{ 1364} 1365 1366static int 1367nv50_crtc_create(struct drm_device *dev, int index) 1368{ 1369 struct nv50_disp *disp = nv50_disp(dev); 1370 struct nv50_head *head; 1371 struct drm_crtc *crtc; 1372 int ret, i; 1373 1374 head = kzalloc(sizeof(*head), GFP_KERNEL); 1375 if (!head) 1376 return -ENOMEM; 1377 1378 head->base.index = index; 1379 head->base.set_dither = nv50_crtc_set_dither; 1380 head->base.set_scale = nv50_crtc_set_scale; 1381 head->base.set_color_vibrance = nv50_crtc_set_color_vibrance; 1382 head->base.color_vibrance = 50; 1383 head->base.vibrant_hue = 0; 1384 head->base.cursor.set_offset = nv50_cursor_set_offset; 1385 head->base.cursor.set_pos = nv50_cursor_set_pos; 1386 for (i = 0; i < 256; i++) { 1387 head->base.lut.r[i] = i << 8; 1388 head->base.lut.g[i] = i << 8; 1389 head->base.lut.b[i] = i << 8; 1390 } 1391 1392 crtc = &head->base.base; 1393 drm_crtc_init(dev, crtc, &nv50_crtc_func); 1394 drm_crtc_helper_add(crtc, &nv50_crtc_hfunc); 1395 drm_mode_crtc_set_gamma_size(crtc, 256); 1396 1397 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM, 1398 0, 0x0000, NULL, &head->base.lut.nvbo); 1399 if (!ret) { 1400 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM); 1401 if (!ret) { 1402 ret = nouveau_bo_map(head->base.lut.nvbo); 1403 if (ret) 1404 nouveau_bo_unpin(head->base.lut.nvbo); 1405 } 1406 if (ret) 1407 nouveau_bo_ref(NULL, &head->base.lut.nvbo); 1408 } 1409 1410 if (ret) 1411 goto out; 1412 1413 nv50_crtc_lut_load(crtc); 1414 1415 /* allocate cursor resources */ 1416 ret = nv50_curs_create(disp->disp, index, &head->curs); 1417 if (ret) 1418 goto out; 1419 1420 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM, 1421 0, 0x0000, NULL, &head->base.cursor.nvbo); 1422 if (!ret) { 1423 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM); 1424 if (!ret) { 1425 ret = nouveau_bo_map(head->base.cursor.nvbo); 1426 if (ret) 1427 nouveau_bo_unpin(head->base.lut.nvbo); 1428 } 1429 if (ret) 1430 nouveau_bo_ref(NULL, &head->base.cursor.nvbo); 1431 } 1432 1433 if (ret) 1434 goto out; 1435 1436 /* allocate page flip / sync resources */ 1437 ret = nv50_base_create(disp->disp, index, disp->sync->bo.offset, 1438 &head->sync); 1439 if (ret) 1440 goto out; 1441 1442 head->sync.addr = EVO_FLIP_SEM0(index); 1443 head->sync.data = 0x00000000; 1444 1445 /* allocate overlay resources */ 1446 ret = nv50_oimm_create(disp->disp, index, &head->oimm); 1447 if (ret) 1448 goto out; 1449 1450 ret = nv50_ovly_create(disp->disp, index, disp->sync->bo.offset, 1451 &head->ovly); 1452 if (ret) 1453 goto out; 1454 1455out: 1456 if (ret) 1457 nv50_crtc_destroy(crtc); 1458 return ret; 1459} 1460 1461/****************************************************************************** 1462 * DAC 1463 *****************************************************************************/ 1464static void 1465nv50_dac_dpms(struct drm_encoder *encoder, int mode) 1466{ 1467 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1468 struct nv50_disp *disp = nv50_disp(encoder->dev); 1469 struct { 1470 struct nv50_disp_mthd_v1 base; 1471 struct nv50_disp_dac_pwr_v0 pwr; 1472 } args = { 1473 .base.version = 1, 1474 .base.method = NV50_DISP_MTHD_V1_DAC_PWR, 1475 .base.hasht = nv_encoder->dcb->hasht, 1476 .base.hashm = nv_encoder->dcb->hashm, 1477 .pwr.state = 1, 1478 .pwr.data = 1, 1479 .pwr.vsync = (mode != DRM_MODE_DPMS_SUSPEND && 1480 mode != DRM_MODE_DPMS_OFF), 1481 .pwr.hsync = (mode != DRM_MODE_DPMS_STANDBY && 1482 mode != DRM_MODE_DPMS_OFF), 1483 }; 1484 1485 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1486} 1487 1488static bool 1489nv50_dac_mode_fixup(struct drm_encoder *encoder, 1490 const struct drm_display_mode *mode, 1491 struct drm_display_mode *adjusted_mode) 1492{ 1493 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1494 struct nouveau_connector *nv_connector; 1495 1496 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1497 if (nv_connector && nv_connector->native_mode) { 1498 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) { 1499 int id = adjusted_mode->base.id; 1500 *adjusted_mode = *nv_connector->native_mode; 1501 adjusted_mode->base.id = id; 1502 } 1503 } 1504 1505 return true; 1506} 1507 1508static void 1509nv50_dac_commit(struct drm_encoder *encoder) 1510{ 1511} 1512 1513static void 1514nv50_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 1515 struct drm_display_mode *adjusted_mode) 1516{ 1517 struct nv50_mast *mast = nv50_mast(encoder->dev); 1518 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1519 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1520 u32 *push; 1521 1522 nv50_dac_dpms(encoder, DRM_MODE_DPMS_ON); 1523 1524 push = evo_wait(mast, 8); 1525 if (push) { 1526 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 1527 u32 syncs = 0x00000000; 1528 1529 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 1530 syncs |= 0x00000001; 1531 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 1532 syncs |= 0x00000002; 1533 1534 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2); 1535 evo_data(push, 1 << nv_crtc->index); 1536 evo_data(push, syncs); 1537 } else { 1538 u32 magic = 0x31ec6000 | (nv_crtc->index << 25); 1539 u32 syncs = 0x00000001; 1540 1541 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 1542 syncs |= 0x00000008; 1543 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 1544 syncs |= 0x00000010; 1545 1546 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 1547 magic |= 0x00000001; 1548 1549 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2); 1550 evo_data(push, syncs); 1551 evo_data(push, magic); 1552 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1); 1553 evo_data(push, 1 << nv_crtc->index); 1554 } 1555 1556 evo_kick(push, mast); 1557 } 1558 1559 nv_encoder->crtc = encoder->crtc; 1560} 1561 1562static void 1563nv50_dac_disconnect(struct drm_encoder *encoder) 1564{ 1565 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1566 struct nv50_mast *mast = nv50_mast(encoder->dev); 1567 const int or = nv_encoder->or; 1568 u32 *push; 1569 1570 if (nv_encoder->crtc) { 1571 nv50_crtc_prepare(nv_encoder->crtc); 1572 1573 push = evo_wait(mast, 4); 1574 if (push) { 1575 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 1576 evo_mthd(push, 0x0400 + (or * 0x080), 1); 1577 evo_data(push, 0x00000000); 1578 } else { 1579 evo_mthd(push, 0x0180 + (or * 0x020), 1); 1580 evo_data(push, 0x00000000); 1581 } 1582 evo_kick(push, mast); 1583 } 1584 } 1585 1586 nv_encoder->crtc = NULL; 1587} 1588 1589static enum drm_connector_status 1590nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector) 1591{ 1592 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1593 struct nv50_disp *disp = nv50_disp(encoder->dev); 1594 struct { 1595 struct nv50_disp_mthd_v1 base; 1596 struct nv50_disp_dac_load_v0 load; 1597 } args = { 1598 .base.version = 1, 1599 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD, 1600 .base.hasht = nv_encoder->dcb->hasht, 1601 .base.hashm = nv_encoder->dcb->hashm, 1602 }; 1603 int ret; 1604 1605 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval; 1606 if (args.load.data == 0) 1607 args.load.data = 340; 1608 1609 ret = nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1610 if (ret || !args.load.load) 1611 return connector_status_disconnected; 1612 1613 return connector_status_connected; 1614} 1615 1616static void 1617nv50_dac_destroy(struct drm_encoder *encoder) 1618{ 1619 drm_encoder_cleanup(encoder); 1620 kfree(encoder); 1621} 1622 1623static const struct drm_encoder_helper_funcs nv50_dac_hfunc = { 1624 .dpms = nv50_dac_dpms, 1625 .mode_fixup = nv50_dac_mode_fixup, 1626 .prepare = nv50_dac_disconnect, 1627 .commit = nv50_dac_commit, 1628 .mode_set = nv50_dac_mode_set, 1629 .disable = nv50_dac_disconnect, 1630 .get_crtc = nv50_display_crtc_get, 1631 .detect = nv50_dac_detect 1632}; 1633 1634static const struct drm_encoder_funcs nv50_dac_func = { 1635 .destroy = nv50_dac_destroy, 1636}; 1637 1638static int 1639nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe) 1640{ 1641 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1642 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device); 1643 struct nouveau_encoder *nv_encoder; 1644 struct drm_encoder *encoder; 1645 int type = DRM_MODE_ENCODER_DAC; 1646 1647 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 1648 if (!nv_encoder) 1649 return -ENOMEM; 1650 nv_encoder->dcb = dcbe; 1651 nv_encoder->or = ffs(dcbe->or) - 1; 1652 nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index); 1653 1654 encoder = to_drm_encoder(nv_encoder); 1655 encoder->possible_crtcs = dcbe->heads; 1656 encoder->possible_clones = 0; 1657 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type); 1658 drm_encoder_helper_add(encoder, &nv50_dac_hfunc); 1659 1660 drm_mode_connector_attach_encoder(connector, encoder); 1661 return 0; 1662} 1663 1664/****************************************************************************** 1665 * Audio 1666 *****************************************************************************/ 1667static void 1668nv50_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) 1669{ 1670 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1671 struct nouveau_connector *nv_connector; 1672 struct nv50_disp *disp = nv50_disp(encoder->dev); 1673 1674 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1675 if (!drm_detect_monitor_audio(nv_connector->edid)) 1676 return; 1677 1678 drm_edid_to_eld(&nv_connector->base, nv_connector->edid); 1679 1680 nvif_exec(disp->disp, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, 1681 nv_connector->base.eld, 1682 nv_connector->base.eld[2] * 4); 1683} 1684 1685static void 1686nv50_audio_disconnect(struct drm_encoder *encoder) 1687{ 1688 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1689 struct nv50_disp *disp = nv50_disp(encoder->dev); 1690 1691 nvif_exec(disp->disp, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0); 1692} 1693 1694/****************************************************************************** 1695 * HDMI 1696 *****************************************************************************/ 1697static void 1698nv50_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode) 1699{ 1700 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1701 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1702 struct nouveau_connector *nv_connector; 1703 struct nv50_disp *disp = nv50_disp(encoder->dev); 1704 const u32 moff = (nv_crtc->index << 3) | nv_encoder->or; 1705 u32 rekey = 56; /* binary driver, and tegra constant */ 1706 u32 max_ac_packet; 1707 u32 data; 1708 1709 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1710 if (!drm_detect_hdmi_monitor(nv_connector->edid)) 1711 return; 1712 1713 max_ac_packet = mode->htotal - mode->hdisplay; 1714 max_ac_packet -= rekey; 1715 max_ac_packet -= 18; /* constant from tegra */ 1716 max_ac_packet /= 32; 1717 1718 data = NV84_DISP_SOR_HDMI_PWR_STATE_ON | (max_ac_packet << 16) | rekey; 1719 nvif_exec(disp->disp, NV84_DISP_SOR_HDMI_PWR + moff, &data, sizeof(data)); 1720 1721 nv50_audio_mode_set(encoder, mode); 1722} 1723 1724static void 1725nv50_hdmi_disconnect(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc) 1726{ 1727 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1728 struct nv50_disp *disp = nv50_disp(encoder->dev); 1729 const u32 moff = (nv_crtc->index << 3) | nv_encoder->or; 1730 u32 data = 0; 1731 1732 nv50_audio_disconnect(encoder); 1733 1734 nvif_exec(disp->disp, NV84_DISP_SOR_HDMI_PWR + moff, &data, sizeof(data)); 1735} 1736 1737/****************************************************************************** 1738 * SOR 1739 *****************************************************************************/ 1740static void 1741nv50_sor_dpms(struct drm_encoder *encoder, int mode) 1742{ 1743 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1744 struct nv50_disp *disp = nv50_disp(encoder->dev); 1745 struct { 1746 struct nv50_disp_mthd_v1 base; 1747 struct nv50_disp_sor_pwr_v0 pwr; 1748 } args = { 1749 .base.version = 1, 1750 .base.method = NV50_DISP_MTHD_V1_SOR_PWR, 1751 .base.hasht = nv_encoder->dcb->hasht, 1752 .base.hashm = nv_encoder->dcb->hashm, 1753 .pwr.state = mode == DRM_MODE_DPMS_ON, 1754 }; 1755 struct drm_device *dev = encoder->dev; 1756 struct drm_encoder *partner; 1757 u32 mthd, data; 1758 1759 nv_encoder->last_dpms = mode; 1760 1761 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) { 1762 struct nouveau_encoder *nv_partner = nouveau_encoder(partner); 1763 1764 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS) 1765 continue; 1766 1767 if (nv_partner != nv_encoder && 1768 nv_partner->dcb->or == nv_encoder->dcb->or) { 1769 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON) 1770 return; 1771 break; 1772 } 1773 } 1774 1775 mthd = (ffs(nv_encoder->dcb->heads) - 1) << 3; 1776 mthd |= (ffs(nv_encoder->dcb->sorconf.link) - 1) << 2; 1777 mthd |= nv_encoder->or; 1778 1779 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) { 1780 args.pwr.state = 1; 1781 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1782 data = (mode == DRM_MODE_DPMS_ON); 1783 mthd |= NV94_DISP_SOR_DP_PWR; 1784 nvif_exec(disp->disp, mthd, &data, sizeof(data)); 1785 } else { 1786 nvif_mthd(disp->disp, 0, &args, sizeof(args)); 1787 } 1788} 1789 1790static bool 1791nv50_sor_mode_fixup(struct drm_encoder *encoder, 1792 const struct drm_display_mode *mode, 1793 struct drm_display_mode *adjusted_mode) 1794{ 1795 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1796 struct nouveau_connector *nv_connector; 1797 1798 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1799 if (nv_connector && nv_connector->native_mode) { 1800 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) { 1801 int id = adjusted_mode->base.id; 1802 *adjusted_mode = *nv_connector->native_mode; 1803 adjusted_mode->base.id = id; 1804 } 1805 } 1806 1807 return true; 1808} 1809 1810static void 1811nv50_sor_ctrl(struct nouveau_encoder *nv_encoder, u32 mask, u32 data) 1812{ 1813 struct nv50_mast *mast = nv50_mast(nv_encoder->base.base.dev); 1814 u32 temp = (nv_encoder->ctrl & ~mask) | (data & mask), *push; 1815 if (temp != nv_encoder->ctrl && (push = evo_wait(mast, 2))) { 1816 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 1817 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1); 1818 evo_data(push, (nv_encoder->ctrl = temp)); 1819 } else { 1820 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1); 1821 evo_data(push, (nv_encoder->ctrl = temp)); 1822 } 1823 evo_kick(push, mast); 1824 } 1825} 1826 1827static void 1828nv50_sor_disconnect(struct drm_encoder *encoder) 1829{ 1830 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1831 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc); 1832 1833 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF; 1834 nv_encoder->crtc = NULL; 1835 1836 if (nv_crtc) { 1837 nv50_crtc_prepare(&nv_crtc->base); 1838 nv50_sor_ctrl(nv_encoder, 1 << nv_crtc->index, 0); 1839 nv50_hdmi_disconnect(&nv_encoder->base.base, nv_crtc); 1840 } 1841} 1842 1843static void 1844nv50_sor_commit(struct drm_encoder *encoder) 1845{ 1846} 1847 1848static void 1849nv50_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode, 1850 struct drm_display_mode *mode) 1851{ 1852 struct nv50_disp *disp = nv50_disp(encoder->dev); 1853 struct nv50_mast *mast = nv50_mast(encoder->dev); 1854 struct drm_device *dev = encoder->dev; 1855 struct nouveau_drm *drm = nouveau_drm(dev); 1856 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 1857 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 1858 struct nouveau_connector *nv_connector; 1859 struct nvbios *bios = &drm->vbios; 1860 u32 lvds = 0, mask, ctrl; 1861 u8 owner = 1 << nv_crtc->index; 1862 u8 proto = 0xf; 1863 u8 depth = 0x0; 1864 1865 nv_connector = nouveau_encoder_connector_get(nv_encoder); 1866 nv_encoder->crtc = encoder->crtc; 1867 1868 switch (nv_encoder->dcb->type) { 1869 case DCB_OUTPUT_TMDS: 1870 if (nv_encoder->dcb->sorconf.link & 1) { 1871 if (mode->clock < 165000) 1872 proto = 0x1; 1873 else 1874 proto = 0x5; 1875 } else { 1876 proto = 0x2; 1877 } 1878 1879 nv50_hdmi_mode_set(&nv_encoder->base.base, mode); 1880 break; 1881 case DCB_OUTPUT_LVDS: 1882 proto = 0x0; 1883 1884 if (bios->fp_no_ddc) { 1885 if (bios->fp.dual_link) 1886 lvds |= 0x0100; 1887 if (bios->fp.if_is_24bit) 1888 lvds |= 0x0200; 1889 } else { 1890 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) { 1891 if (((u8 *)nv_connector->edid)[121] == 2) 1892 lvds |= 0x0100; 1893 } else 1894 if (mode->clock >= bios->fp.duallink_transition_clk) { 1895 lvds |= 0x0100; 1896 } 1897 1898 if (lvds & 0x0100) { 1899 if (bios->fp.strapless_is_24bit & 2) 1900 lvds |= 0x0200; 1901 } else { 1902 if (bios->fp.strapless_is_24bit & 1) 1903 lvds |= 0x0200; 1904 } 1905 1906 if (nv_connector->base.display_info.bpc == 8) 1907 lvds |= 0x0200; 1908 } 1909 1910 nvif_exec(disp->disp, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, &lvds, sizeof(lvds)); 1911 break; 1912 case DCB_OUTPUT_DP: 1913 if (nv_connector->base.display_info.bpc == 6) { 1914 nv_encoder->dp.datarate = mode->clock * 18 / 8; 1915 depth = 0x2; 1916 } else 1917 if (nv_connector->base.display_info.bpc == 8) { 1918 nv_encoder->dp.datarate = mode->clock * 24 / 8; 1919 depth = 0x5; 1920 } else { 1921 nv_encoder->dp.datarate = mode->clock * 30 / 8; 1922 depth = 0x6; 1923 } 1924 1925 if (nv_encoder->dcb->sorconf.link & 1) 1926 proto = 0x8; 1927 else 1928 proto = 0x9; 1929 break; 1930 default: 1931 BUG_ON(1); 1932 break; 1933 } 1934 1935 nv50_sor_dpms(&nv_encoder->base.base, DRM_MODE_DPMS_ON); 1936 1937 if (nv50_vers(mast) >= NVD0_DISP_CLASS) { 1938 u32 *push = evo_wait(mast, 3); 1939 if (push) { 1940 u32 magic = 0x31ec6000 | (nv_crtc->index << 25); 1941 u32 syncs = 0x00000001; 1942 1943 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 1944 syncs |= 0x00000008; 1945 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 1946 syncs |= 0x00000010; 1947 1948 if (mode->flags & DRM_MODE_FLAG_INTERLACE) 1949 magic |= 0x00000001; 1950 1951 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2); 1952 evo_data(push, syncs | (depth << 6)); 1953 evo_data(push, magic); 1954 evo_kick(push, mast); 1955 } 1956 1957 ctrl = proto << 8; 1958 mask = 0x00000f00; 1959 } else { 1960 ctrl = (depth << 16) | (proto << 8); 1961 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 1962 ctrl |= 0x00001000; 1963 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 1964 ctrl |= 0x00002000; 1965 mask = 0x000f3f00; 1966 } 1967 1968 nv50_sor_ctrl(nv_encoder, mask | owner, ctrl | owner); 1969} 1970 1971static void 1972nv50_sor_destroy(struct drm_encoder *encoder) 1973{ 1974 drm_encoder_cleanup(encoder); 1975 kfree(encoder); 1976} 1977 1978static const struct drm_encoder_helper_funcs nv50_sor_hfunc = { 1979 .dpms = nv50_sor_dpms, 1980 .mode_fixup = nv50_sor_mode_fixup, 1981 .prepare = nv50_sor_disconnect, 1982 .commit = nv50_sor_commit, 1983 .mode_set = nv50_sor_mode_set, 1984 .disable = nv50_sor_disconnect, 1985 .get_crtc = nv50_display_crtc_get, 1986}; 1987 1988static const struct drm_encoder_funcs nv50_sor_func = { 1989 .destroy = nv50_sor_destroy, 1990}; 1991 1992static int 1993nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe) 1994{ 1995 struct nouveau_drm *drm = nouveau_drm(connector->dev); 1996 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device); 1997 struct nouveau_encoder *nv_encoder; 1998 struct drm_encoder *encoder; 1999 int type; 2000 2001 switch (dcbe->type) { 2002 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break; 2003 case DCB_OUTPUT_TMDS: 2004 case DCB_OUTPUT_DP: 2005 default: 2006 type = DRM_MODE_ENCODER_TMDS; 2007 break; 2008 } 2009 2010 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 2011 if (!nv_encoder) 2012 return -ENOMEM; 2013 nv_encoder->dcb = dcbe; 2014 nv_encoder->or = ffs(dcbe->or) - 1; 2015 nv_encoder->i2c = i2c->find(i2c, dcbe->i2c_index); 2016 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF; 2017 2018 encoder = to_drm_encoder(nv_encoder); 2019 encoder->possible_crtcs = dcbe->heads; 2020 encoder->possible_clones = 0; 2021 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type); 2022 drm_encoder_helper_add(encoder, &nv50_sor_hfunc); 2023 2024 drm_mode_connector_attach_encoder(connector, encoder); 2025 return 0; 2026} 2027 2028/****************************************************************************** 2029 * PIOR 2030 *****************************************************************************/ 2031 2032static void 2033nv50_pior_dpms(struct drm_encoder *encoder, int mode) 2034{ 2035 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2036 struct nv50_disp *disp = nv50_disp(encoder->dev); 2037 u32 mthd = (nv_encoder->dcb->type << 12) | nv_encoder->or; 2038 u32 ctrl = (mode == DRM_MODE_DPMS_ON); 2039 nvif_exec(disp->disp, NV50_DISP_PIOR_PWR + mthd, &ctrl, sizeof(ctrl)); 2040} 2041 2042static bool 2043nv50_pior_mode_fixup(struct drm_encoder *encoder, 2044 const struct drm_display_mode *mode, 2045 struct drm_display_mode *adjusted_mode) 2046{ 2047 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2048 struct nouveau_connector *nv_connector; 2049 2050 nv_connector = nouveau_encoder_connector_get(nv_encoder); 2051 if (nv_connector && nv_connector->native_mode) { 2052 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) { 2053 int id = adjusted_mode->base.id; 2054 *adjusted_mode = *nv_connector->native_mode; 2055 adjusted_mode->base.id = id; 2056 } 2057 } 2058 2059 adjusted_mode->clock *= 2; 2060 return true; 2061} 2062 2063static void 2064nv50_pior_commit(struct drm_encoder *encoder) 2065{ 2066} 2067 2068static void 2069nv50_pior_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 2070 struct drm_display_mode *adjusted_mode) 2071{ 2072 struct nv50_mast *mast = nv50_mast(encoder->dev); 2073 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2074 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc); 2075 struct nouveau_connector *nv_connector; 2076 u8 owner = 1 << nv_crtc->index; 2077 u8 proto, depth; 2078 u32 *push; 2079 2080 nv_connector = nouveau_encoder_connector_get(nv_encoder); 2081 switch (nv_connector->base.display_info.bpc) { 2082 case 10: depth = 0x6; break; 2083 case 8: depth = 0x5; break; 2084 case 6: depth = 0x2; break; 2085 default: depth = 0x0; break; 2086 } 2087 2088 switch (nv_encoder->dcb->type) { 2089 case DCB_OUTPUT_TMDS: 2090 case DCB_OUTPUT_DP: 2091 proto = 0x0; 2092 break; 2093 default: 2094 BUG_ON(1); 2095 break; 2096 } 2097 2098 nv50_pior_dpms(encoder, DRM_MODE_DPMS_ON); 2099 2100 push = evo_wait(mast, 8); 2101 if (push) { 2102 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 2103 u32 ctrl = (depth << 16) | (proto << 8) | owner; 2104 if (mode->flags & DRM_MODE_FLAG_NHSYNC) 2105 ctrl |= 0x00001000; 2106 if (mode->flags & DRM_MODE_FLAG_NVSYNC) 2107 ctrl |= 0x00002000; 2108 evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1); 2109 evo_data(push, ctrl); 2110 } 2111 2112 evo_kick(push, mast); 2113 } 2114 2115 nv_encoder->crtc = encoder->crtc; 2116} 2117 2118static void 2119nv50_pior_disconnect(struct drm_encoder *encoder) 2120{ 2121 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder); 2122 struct nv50_mast *mast = nv50_mast(encoder->dev); 2123 const int or = nv_encoder->or; 2124 u32 *push; 2125 2126 if (nv_encoder->crtc) { 2127 nv50_crtc_prepare(nv_encoder->crtc); 2128 2129 push = evo_wait(mast, 4); 2130 if (push) { 2131 if (nv50_vers(mast) < NVD0_DISP_MAST_CLASS) { 2132 evo_mthd(push, 0x0700 + (or * 0x040), 1); 2133 evo_data(push, 0x00000000); 2134 } 2135 evo_kick(push, mast); 2136 } 2137 } 2138 2139 nv_encoder->crtc = NULL; 2140} 2141 2142static void 2143nv50_pior_destroy(struct drm_encoder *encoder) 2144{ 2145 drm_encoder_cleanup(encoder); 2146 kfree(encoder); 2147} 2148 2149static const struct drm_encoder_helper_funcs nv50_pior_hfunc = { 2150 .dpms = nv50_pior_dpms, 2151 .mode_fixup = nv50_pior_mode_fixup, 2152 .prepare = nv50_pior_disconnect, 2153 .commit = nv50_pior_commit, 2154 .mode_set = nv50_pior_mode_set, 2155 .disable = nv50_pior_disconnect, 2156 .get_crtc = nv50_display_crtc_get, 2157}; 2158 2159static const struct drm_encoder_funcs nv50_pior_func = { 2160 .destroy = nv50_pior_destroy, 2161}; 2162 2163static int 2164nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe) 2165{ 2166 struct nouveau_drm *drm = nouveau_drm(connector->dev); 2167 struct nouveau_i2c *i2c = nvkm_i2c(&drm->device); 2168 struct nouveau_i2c_port *ddc = NULL; 2169 struct nouveau_encoder *nv_encoder; 2170 struct drm_encoder *encoder; 2171 int type; 2172 2173 switch (dcbe->type) { 2174 case DCB_OUTPUT_TMDS: 2175 ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTDDC(dcbe->extdev)); 2176 type = DRM_MODE_ENCODER_TMDS; 2177 break; 2178 case DCB_OUTPUT_DP: 2179 ddc = i2c->find_type(i2c, NV_I2C_TYPE_EXTAUX(dcbe->extdev)); 2180 type = DRM_MODE_ENCODER_TMDS; 2181 break; 2182 default: 2183 return -ENODEV; 2184 } 2185 2186 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL); 2187 if (!nv_encoder) 2188 return -ENOMEM; 2189 nv_encoder->dcb = dcbe; 2190 nv_encoder->or = ffs(dcbe->or) - 1; 2191 nv_encoder->i2c = ddc; 2192 2193 encoder = to_drm_encoder(nv_encoder); 2194 encoder->possible_crtcs = dcbe->heads; 2195 encoder->possible_clones = 0; 2196 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type); 2197 drm_encoder_helper_add(encoder, &nv50_pior_hfunc); 2198 2199 drm_mode_connector_attach_encoder(connector, encoder); 2200 return 0; 2201} 2202 2203/****************************************************************************** 2204 * Framebuffer 2205 *****************************************************************************/ 2206 2207static void 2208nv50_fbdma_fini(struct nv50_fbdma *fbdma) 2209{ 2210 int i; 2211 for (i = 0; i < ARRAY_SIZE(fbdma->base); i++) 2212 nvif_object_fini(&fbdma->base[i]); 2213 nvif_object_fini(&fbdma->core); 2214 list_del(&fbdma->head); 2215 kfree(fbdma); 2216} 2217 2218static int 2219nv50_fbdma_init(struct drm_device *dev, u32 name, u64 offset, u64 length, u8 kind) 2220{ 2221 struct nouveau_drm *drm = nouveau_drm(dev); 2222 struct nv50_disp *disp = nv50_disp(dev); 2223 struct nv50_mast *mast = nv50_mast(dev); 2224 struct __attribute__ ((packed)) { 2225 struct nv_dma_v0 base; 2226 union { 2227 struct nv50_dma_v0 nv50; 2228 struct gf100_dma_v0 gf100; 2229 struct gf110_dma_v0 gf110; 2230 }; 2231 } args = {}; 2232 struct nv50_fbdma *fbdma; 2233 struct drm_crtc *crtc; 2234 u32 size = sizeof(args.base); 2235 int ret; 2236 2237 list_for_each_entry(fbdma, &disp->fbdma, head) { 2238 if (fbdma->core.handle == name) 2239 return 0; 2240 } 2241 2242 fbdma = kzalloc(sizeof(*fbdma), GFP_KERNEL); 2243 if (!fbdma) 2244 return -ENOMEM; 2245 list_add(&fbdma->head, &disp->fbdma); 2246 2247 args.base.target = NV_DMA_V0_TARGET_VRAM; 2248 args.base.access = NV_DMA_V0_ACCESS_RDWR; 2249 args.base.start = offset; 2250 args.base.limit = offset + length - 1; 2251 2252 if (drm->device.info.chipset < 0x80) { 2253 args.nv50.part = NV50_DMA_V0_PART_256; 2254 size += sizeof(args.nv50); 2255 } else 2256 if (drm->device.info.chipset < 0xc0) { 2257 args.nv50.part = NV50_DMA_V0_PART_256; 2258 args.nv50.kind = kind; 2259 size += sizeof(args.nv50); 2260 } else 2261 if (drm->device.info.chipset < 0xd0) { 2262 args.gf100.kind = kind; 2263 size += sizeof(args.gf100); 2264 } else { 2265 args.gf110.page = GF110_DMA_V0_PAGE_LP; 2266 args.gf110.kind = kind; 2267 size += sizeof(args.gf110); 2268 } 2269 2270 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 2271 struct nv50_head *head = nv50_head(crtc); 2272 int ret = nvif_object_init(&head->sync.base.base.user, NULL, 2273 name, NV_DMA_IN_MEMORY, &args, size, 2274 &fbdma->base[head->base.index]); 2275 if (ret) { 2276 nv50_fbdma_fini(fbdma); 2277 return ret; 2278 } 2279 } 2280 2281 ret = nvif_object_init(&mast->base.base.user, NULL, name, 2282 NV_DMA_IN_MEMORY, &args, size, 2283 &fbdma->core); 2284 if (ret) { 2285 nv50_fbdma_fini(fbdma); 2286 return ret; 2287 } 2288 2289 return 0; 2290} 2291 2292static void 2293nv50_fb_dtor(struct drm_framebuffer *fb) 2294{ 2295} 2296 2297static int 2298nv50_fb_ctor(struct drm_framebuffer *fb) 2299{ 2300 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb); 2301 struct nouveau_drm *drm = nouveau_drm(fb->dev); 2302 struct nouveau_bo *nvbo = nv_fb->nvbo; 2303 struct nv50_disp *disp = nv50_disp(fb->dev); 2304 struct nouveau_fb *pfb = nvkm_fb(&drm->device); 2305 u8 kind = nouveau_bo_tile_layout(nvbo) >> 8; 2306 u8 tile = nvbo->tile_mode; 2307 2308 if (nvbo->tile_flags & NOUVEAU_GEM_TILE_NONCONTIG) { 2309 NV_ERROR(drm, "framebuffer requires contiguous bo\n"); 2310 return -EINVAL; 2311 } 2312 2313 if (drm->device.info.chipset >= 0xc0) 2314 tile >>= 4; /* yep.. */ 2315 2316 switch (fb->depth) { 2317 case 8: nv_fb->r_format = 0x1e00; break; 2318 case 15: nv_fb->r_format = 0xe900; break; 2319 case 16: nv_fb->r_format = 0xe800; break; 2320 case 24: 2321 case 32: nv_fb->r_format = 0xcf00; break; 2322 case 30: nv_fb->r_format = 0xd100; break; 2323 default: 2324 NV_ERROR(drm, "unknown depth %d\n", fb->depth); 2325 return -EINVAL; 2326 } 2327 2328 if (disp->disp->oclass < NV84_DISP_CLASS) { 2329 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2330 (fb->pitches[0] | 0x00100000); 2331 nv_fb->r_format |= kind << 16; 2332 } else 2333 if (disp->disp->oclass < NVD0_DISP_CLASS) { 2334 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2335 (fb->pitches[0] | 0x00100000); 2336 } else { 2337 nv_fb->r_pitch = kind ? (((fb->pitches[0] / 4) << 4) | tile) : 2338 (fb->pitches[0] | 0x01000000); 2339 } 2340 nv_fb->r_handle = 0xffff0000 | kind; 2341 2342 return nv50_fbdma_init(fb->dev, nv_fb->r_handle, 0, pfb->ram->size, kind); 2343} 2344 2345/****************************************************************************** 2346 * Init 2347 *****************************************************************************/ 2348 2349void 2350nv50_display_fini(struct drm_device *dev) 2351{ 2352} 2353 2354int 2355nv50_display_init(struct drm_device *dev) 2356{ 2357 struct nv50_disp *disp = nv50_disp(dev); 2358 struct drm_crtc *crtc; 2359 u32 *push; 2360 2361 push = evo_wait(nv50_mast(dev), 32); 2362 if (!push) 2363 return -EBUSY; 2364 2365 list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) { 2366 struct nv50_sync *sync = nv50_sync(crtc); 2367 nouveau_bo_wr32(disp->sync, sync->addr / 4, sync->data); 2368 } 2369 2370 evo_mthd(push, 0x0088, 1); 2371 evo_data(push, nv50_mast(dev)->base.sync.handle); 2372 evo_kick(push, nv50_mast(dev)); 2373 return 0; 2374} 2375 2376void 2377nv50_display_destroy(struct drm_device *dev) 2378{ 2379 struct nv50_disp *disp = nv50_disp(dev); 2380 struct nv50_fbdma *fbdma, *fbtmp; 2381 2382 list_for_each_entry_safe(fbdma, fbtmp, &disp->fbdma, head) { 2383 nv50_fbdma_fini(fbdma); 2384 } 2385 2386 nv50_dmac_destroy(&disp->mast.base, disp->disp); 2387 2388 nouveau_bo_unmap(disp->sync); 2389 if (disp->sync) 2390 nouveau_bo_unpin(disp->sync); 2391 nouveau_bo_ref(NULL, &disp->sync); 2392 2393 nouveau_display(dev)->priv = NULL; 2394 kfree(disp); 2395} 2396 2397int 2398nv50_display_create(struct drm_device *dev) 2399{ 2400 struct nvif_device *device = &nouveau_drm(dev)->device; 2401 struct nouveau_drm *drm = nouveau_drm(dev); 2402 struct dcb_table *dcb = &drm->vbios.dcb; 2403 struct drm_connector *connector, *tmp; 2404 struct nv50_disp *disp; 2405 struct dcb_output *dcbe; 2406 int crtcs, ret, i; 2407 2408 disp = kzalloc(sizeof(*disp), GFP_KERNEL); 2409 if (!disp) 2410 return -ENOMEM; 2411 INIT_LIST_HEAD(&disp->fbdma); 2412 2413 nouveau_display(dev)->priv = disp; 2414 nouveau_display(dev)->dtor = nv50_display_destroy; 2415 nouveau_display(dev)->init = nv50_display_init; 2416 nouveau_display(dev)->fini = nv50_display_fini; 2417 nouveau_display(dev)->fb_ctor = nv50_fb_ctor; 2418 nouveau_display(dev)->fb_dtor = nv50_fb_dtor; 2419 disp->disp = &nouveau_display(dev)->disp; 2420 2421 /* small shared memory area we use for notifiers and semaphores */ 2422 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM, 2423 0, 0x0000, NULL, &disp->sync); 2424 if (!ret) { 2425 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM); 2426 if (!ret) { 2427 ret = nouveau_bo_map(disp->sync); 2428 if (ret) 2429 nouveau_bo_unpin(disp->sync); 2430 } 2431 if (ret) 2432 nouveau_bo_ref(NULL, &disp->sync); 2433 } 2434 2435 if (ret) 2436 goto out; 2437 2438 /* allocate master evo channel */ 2439 ret = nv50_core_create(disp->disp, disp->sync->bo.offset, 2440 &disp->mast); 2441 if (ret) 2442 goto out; 2443 2444 /* create crtc objects to represent the hw heads */ 2445 if (disp->disp->oclass >= NVD0_DISP_CLASS) 2446 crtcs = nvif_rd32(device, 0x022448); 2447 else 2448 crtcs = 2; 2449 2450 for (i = 0; i < crtcs; i++) { 2451 ret = nv50_crtc_create(dev, i); 2452 if (ret) 2453 goto out; 2454 } 2455 2456 /* create encoder/connector objects based on VBIOS DCB table */ 2457 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) { 2458 connector = nouveau_connector_create(dev, dcbe->connector); 2459 if (IS_ERR(connector)) 2460 continue; 2461 2462 if (dcbe->location == DCB_LOC_ON_CHIP) { 2463 switch (dcbe->type) { 2464 case DCB_OUTPUT_TMDS: 2465 case DCB_OUTPUT_LVDS: 2466 case DCB_OUTPUT_DP: 2467 ret = nv50_sor_create(connector, dcbe); 2468 break; 2469 case DCB_OUTPUT_ANALOG: 2470 ret = nv50_dac_create(connector, dcbe); 2471 break; 2472 default: 2473 ret = -ENODEV; 2474 break; 2475 } 2476 } else { 2477 ret = nv50_pior_create(connector, dcbe); 2478 } 2479 2480 if (ret) { 2481 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n", 2482 dcbe->location, dcbe->type, 2483 ffs(dcbe->or) - 1, ret); 2484 ret = 0; 2485 } 2486 } 2487 2488 /* cull any connectors we created that don't have an encoder */ 2489 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) { 2490 if (connector->encoder_ids[0]) 2491 continue; 2492 2493 NV_WARN(drm, "%s has no encoders, removing\n", 2494 connector->name); 2495 connector->funcs->destroy(connector); 2496 } 2497 2498out: 2499 if (ret) 2500 nv50_display_destroy(dev); 2501 return ret; 2502} 2503