intel_dp.c revision c898261c0dad617f0f1080bedc02d507a2fcfb92
1/* 2 * Copyright © 2008 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 */ 27 28#include <linux/i2c.h> 29#include <linux/slab.h> 30#include <linux/export.h> 31#include "drmP.h" 32#include "drm.h" 33#include "drm_crtc.h" 34#include "drm_crtc_helper.h" 35#include "intel_drv.h" 36#include "i915_drm.h" 37#include "i915_drv.h" 38#include "drm_dp_helper.h" 39 40#define DP_RECEIVER_CAP_SIZE 0xf 41#define DP_LINK_STATUS_SIZE 6 42#define DP_LINK_CHECK_TIMEOUT (10 * 1000) 43 44#define DP_LINK_CONFIGURATION_SIZE 9 45 46struct intel_dp { 47 struct intel_encoder base; 48 uint32_t output_reg; 49 uint32_t DP; 50 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]; 51 bool has_audio; 52 int force_audio; 53 uint32_t color_range; 54 int dpms_mode; 55 uint8_t link_bw; 56 uint8_t lane_count; 57 uint8_t dpcd[DP_RECEIVER_CAP_SIZE]; 58 struct i2c_adapter adapter; 59 struct i2c_algo_dp_aux_data algo; 60 bool is_pch_edp; 61 uint8_t train_set[4]; 62 int panel_power_up_delay; 63 int panel_power_down_delay; 64 int panel_power_cycle_delay; 65 int backlight_on_delay; 66 int backlight_off_delay; 67 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 68 struct delayed_work panel_vdd_work; 69 bool want_panel_vdd; 70}; 71 72/** 73 * is_edp - is the given port attached to an eDP panel (either CPU or PCH) 74 * @intel_dp: DP struct 75 * 76 * If a CPU or PCH DP output is attached to an eDP panel, this function 77 * will return true, and false otherwise. 78 */ 79static bool is_edp(struct intel_dp *intel_dp) 80{ 81 return intel_dp->base.type == INTEL_OUTPUT_EDP; 82} 83 84/** 85 * is_pch_edp - is the port on the PCH and attached to an eDP panel? 86 * @intel_dp: DP struct 87 * 88 * Returns true if the given DP struct corresponds to a PCH DP port attached 89 * to an eDP panel, false otherwise. Helpful for determining whether we 90 * may need FDI resources for a given DP output or not. 91 */ 92static bool is_pch_edp(struct intel_dp *intel_dp) 93{ 94 return intel_dp->is_pch_edp; 95} 96 97/** 98 * is_cpu_edp - is the port on the CPU and attached to an eDP panel? 99 * @intel_dp: DP struct 100 * 101 * Returns true if the given DP struct corresponds to a CPU eDP port. 102 */ 103static bool is_cpu_edp(struct intel_dp *intel_dp) 104{ 105 return is_edp(intel_dp) && !is_pch_edp(intel_dp); 106} 107 108static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder) 109{ 110 return container_of(encoder, struct intel_dp, base.base); 111} 112 113static struct intel_dp *intel_attached_dp(struct drm_connector *connector) 114{ 115 return container_of(intel_attached_encoder(connector), 116 struct intel_dp, base); 117} 118 119/** 120 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP? 121 * @encoder: DRM encoder 122 * 123 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed 124 * by intel_display.c. 125 */ 126bool intel_encoder_is_pch_edp(struct drm_encoder *encoder) 127{ 128 struct intel_dp *intel_dp; 129 130 if (!encoder) 131 return false; 132 133 intel_dp = enc_to_intel_dp(encoder); 134 135 return is_pch_edp(intel_dp); 136} 137 138static void intel_dp_start_link_train(struct intel_dp *intel_dp); 139static void intel_dp_complete_link_train(struct intel_dp *intel_dp); 140static void intel_dp_link_down(struct intel_dp *intel_dp); 141 142void 143intel_edp_link_config(struct intel_encoder *intel_encoder, 144 int *lane_num, int *link_bw) 145{ 146 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 147 148 *lane_num = intel_dp->lane_count; 149 if (intel_dp->link_bw == DP_LINK_BW_1_62) 150 *link_bw = 162000; 151 else if (intel_dp->link_bw == DP_LINK_BW_2_7) 152 *link_bw = 270000; 153} 154 155static int 156intel_dp_max_lane_count(struct intel_dp *intel_dp) 157{ 158 int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; 159 switch (max_lane_count) { 160 case 1: case 2: case 4: 161 break; 162 default: 163 max_lane_count = 4; 164 } 165 return max_lane_count; 166} 167 168static int 169intel_dp_max_link_bw(struct intel_dp *intel_dp) 170{ 171 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; 172 173 switch (max_link_bw) { 174 case DP_LINK_BW_1_62: 175 case DP_LINK_BW_2_7: 176 break; 177 default: 178 max_link_bw = DP_LINK_BW_1_62; 179 break; 180 } 181 return max_link_bw; 182} 183 184static int 185intel_dp_link_clock(uint8_t link_bw) 186{ 187 if (link_bw == DP_LINK_BW_2_7) 188 return 270000; 189 else 190 return 162000; 191} 192 193/* 194 * The units on the numbers in the next two are... bizarre. Examples will 195 * make it clearer; this one parallels an example in the eDP spec. 196 * 197 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as: 198 * 199 * 270000 * 1 * 8 / 10 == 216000 200 * 201 * The actual data capacity of that configuration is 2.16Gbit/s, so the 202 * units are decakilobits. ->clock in a drm_display_mode is in kilohertz - 203 * or equivalently, kilopixels per second - so for 1680x1050R it'd be 204 * 119000. At 18bpp that's 2142000 kilobits per second. 205 * 206 * Thus the strange-looking division by 10 in intel_dp_link_required, to 207 * get the result in decakilobits instead of kilobits. 208 */ 209 210static int 211intel_dp_link_required(int pixel_clock, int bpp) 212{ 213 return (pixel_clock * bpp + 9) / 10; 214} 215 216static int 217intel_dp_max_data_rate(int max_link_clock, int max_lanes) 218{ 219 return (max_link_clock * max_lanes * 8) / 10; 220} 221 222static int 223intel_dp_mode_valid(struct drm_connector *connector, 224 struct drm_display_mode *mode) 225{ 226 struct intel_dp *intel_dp = intel_attached_dp(connector); 227 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp)); 228 int max_lanes = intel_dp_max_lane_count(intel_dp); 229 int max_rate, mode_rate; 230 231 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 232 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) 233 return MODE_PANEL; 234 235 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay) 236 return MODE_PANEL; 237 } 238 239 mode_rate = intel_dp_link_required(mode->clock, 24); 240 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes); 241 242 if (mode_rate > max_rate) { 243 mode_rate = intel_dp_link_required(mode->clock, 18); 244 if (mode_rate > max_rate) 245 return MODE_CLOCK_HIGH; 246 else 247 mode->private_flags |= INTEL_MODE_DP_FORCE_6BPC; 248 } 249 250 if (mode->clock < 10000) 251 return MODE_CLOCK_LOW; 252 253 return MODE_OK; 254} 255 256static uint32_t 257pack_aux(uint8_t *src, int src_bytes) 258{ 259 int i; 260 uint32_t v = 0; 261 262 if (src_bytes > 4) 263 src_bytes = 4; 264 for (i = 0; i < src_bytes; i++) 265 v |= ((uint32_t) src[i]) << ((3-i) * 8); 266 return v; 267} 268 269static void 270unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes) 271{ 272 int i; 273 if (dst_bytes > 4) 274 dst_bytes = 4; 275 for (i = 0; i < dst_bytes; i++) 276 dst[i] = src >> ((3-i) * 8); 277} 278 279/* hrawclock is 1/4 the FSB frequency */ 280static int 281intel_hrawclk(struct drm_device *dev) 282{ 283 struct drm_i915_private *dev_priv = dev->dev_private; 284 uint32_t clkcfg; 285 286 clkcfg = I915_READ(CLKCFG); 287 switch (clkcfg & CLKCFG_FSB_MASK) { 288 case CLKCFG_FSB_400: 289 return 100; 290 case CLKCFG_FSB_533: 291 return 133; 292 case CLKCFG_FSB_667: 293 return 166; 294 case CLKCFG_FSB_800: 295 return 200; 296 case CLKCFG_FSB_1067: 297 return 266; 298 case CLKCFG_FSB_1333: 299 return 333; 300 /* these two are just a guess; one of them might be right */ 301 case CLKCFG_FSB_1600: 302 case CLKCFG_FSB_1600_ALT: 303 return 400; 304 default: 305 return 133; 306 } 307} 308 309static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) 310{ 311 struct drm_device *dev = intel_dp->base.base.dev; 312 struct drm_i915_private *dev_priv = dev->dev_private; 313 314 return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0; 315} 316 317static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) 318{ 319 struct drm_device *dev = intel_dp->base.base.dev; 320 struct drm_i915_private *dev_priv = dev->dev_private; 321 322 return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0; 323} 324 325static void 326intel_dp_check_edp(struct intel_dp *intel_dp) 327{ 328 struct drm_device *dev = intel_dp->base.base.dev; 329 struct drm_i915_private *dev_priv = dev->dev_private; 330 331 if (!is_edp(intel_dp)) 332 return; 333 if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) { 334 WARN(1, "eDP powered off while attempting aux channel communication.\n"); 335 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n", 336 I915_READ(PCH_PP_STATUS), 337 I915_READ(PCH_PP_CONTROL)); 338 } 339} 340 341static int 342intel_dp_aux_ch(struct intel_dp *intel_dp, 343 uint8_t *send, int send_bytes, 344 uint8_t *recv, int recv_size) 345{ 346 uint32_t output_reg = intel_dp->output_reg; 347 struct drm_device *dev = intel_dp->base.base.dev; 348 struct drm_i915_private *dev_priv = dev->dev_private; 349 uint32_t ch_ctl = output_reg + 0x10; 350 uint32_t ch_data = ch_ctl + 4; 351 int i; 352 int recv_bytes; 353 uint32_t status; 354 uint32_t aux_clock_divider; 355 int try, precharge; 356 357 intel_dp_check_edp(intel_dp); 358 /* The clock divider is based off the hrawclk, 359 * and would like to run at 2MHz. So, take the 360 * hrawclk value and divide by 2 and use that 361 * 362 * Note that PCH attached eDP panels should use a 125MHz input 363 * clock divider. 364 */ 365 if (is_cpu_edp(intel_dp)) { 366 if (IS_GEN6(dev) || IS_GEN7(dev)) 367 aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */ 368 else 369 aux_clock_divider = 225; /* eDP input clock at 450Mhz */ 370 } else if (HAS_PCH_SPLIT(dev)) 371 aux_clock_divider = 62; /* IRL input clock fixed at 125Mhz */ 372 else 373 aux_clock_divider = intel_hrawclk(dev) / 2; 374 375 if (IS_GEN6(dev)) 376 precharge = 3; 377 else 378 precharge = 5; 379 380 /* Try to wait for any previous AUX channel activity */ 381 for (try = 0; try < 3; try++) { 382 status = I915_READ(ch_ctl); 383 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 384 break; 385 msleep(1); 386 } 387 388 if (try == 3) { 389 WARN(1, "dp_aux_ch not started status 0x%08x\n", 390 I915_READ(ch_ctl)); 391 return -EBUSY; 392 } 393 394 /* Must try at least 3 times according to DP spec */ 395 for (try = 0; try < 5; try++) { 396 /* Load the send data into the aux channel data registers */ 397 for (i = 0; i < send_bytes; i += 4) 398 I915_WRITE(ch_data + i, 399 pack_aux(send + i, send_bytes - i)); 400 401 /* Send the command and wait for it to complete */ 402 I915_WRITE(ch_ctl, 403 DP_AUX_CH_CTL_SEND_BUSY | 404 DP_AUX_CH_CTL_TIME_OUT_400us | 405 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) | 406 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) | 407 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) | 408 DP_AUX_CH_CTL_DONE | 409 DP_AUX_CH_CTL_TIME_OUT_ERROR | 410 DP_AUX_CH_CTL_RECEIVE_ERROR); 411 for (;;) { 412 status = I915_READ(ch_ctl); 413 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 414 break; 415 udelay(100); 416 } 417 418 /* Clear done status and any errors */ 419 I915_WRITE(ch_ctl, 420 status | 421 DP_AUX_CH_CTL_DONE | 422 DP_AUX_CH_CTL_TIME_OUT_ERROR | 423 DP_AUX_CH_CTL_RECEIVE_ERROR); 424 if (status & DP_AUX_CH_CTL_DONE) 425 break; 426 } 427 428 if ((status & DP_AUX_CH_CTL_DONE) == 0) { 429 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status); 430 return -EBUSY; 431 } 432 433 /* Check for timeout or receive error. 434 * Timeouts occur when the sink is not connected 435 */ 436 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) { 437 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status); 438 return -EIO; 439 } 440 441 /* Timeouts occur when the device isn't connected, so they're 442 * "normal" -- don't fill the kernel log with these */ 443 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) { 444 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status); 445 return -ETIMEDOUT; 446 } 447 448 /* Unload any bytes sent back from the other side */ 449 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >> 450 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT); 451 if (recv_bytes > recv_size) 452 recv_bytes = recv_size; 453 454 for (i = 0; i < recv_bytes; i += 4) 455 unpack_aux(I915_READ(ch_data + i), 456 recv + i, recv_bytes - i); 457 458 return recv_bytes; 459} 460 461/* Write data to the aux channel in native mode */ 462static int 463intel_dp_aux_native_write(struct intel_dp *intel_dp, 464 uint16_t address, uint8_t *send, int send_bytes) 465{ 466 int ret; 467 uint8_t msg[20]; 468 int msg_bytes; 469 uint8_t ack; 470 471 intel_dp_check_edp(intel_dp); 472 if (send_bytes > 16) 473 return -1; 474 msg[0] = AUX_NATIVE_WRITE << 4; 475 msg[1] = address >> 8; 476 msg[2] = address & 0xff; 477 msg[3] = send_bytes - 1; 478 memcpy(&msg[4], send, send_bytes); 479 msg_bytes = send_bytes + 4; 480 for (;;) { 481 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1); 482 if (ret < 0) 483 return ret; 484 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 485 break; 486 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 487 udelay(100); 488 else 489 return -EIO; 490 } 491 return send_bytes; 492} 493 494/* Write a single byte to the aux channel in native mode */ 495static int 496intel_dp_aux_native_write_1(struct intel_dp *intel_dp, 497 uint16_t address, uint8_t byte) 498{ 499 return intel_dp_aux_native_write(intel_dp, address, &byte, 1); 500} 501 502/* read bytes from a native aux channel */ 503static int 504intel_dp_aux_native_read(struct intel_dp *intel_dp, 505 uint16_t address, uint8_t *recv, int recv_bytes) 506{ 507 uint8_t msg[4]; 508 int msg_bytes; 509 uint8_t reply[20]; 510 int reply_bytes; 511 uint8_t ack; 512 int ret; 513 514 intel_dp_check_edp(intel_dp); 515 msg[0] = AUX_NATIVE_READ << 4; 516 msg[1] = address >> 8; 517 msg[2] = address & 0xff; 518 msg[3] = recv_bytes - 1; 519 520 msg_bytes = 4; 521 reply_bytes = recv_bytes + 1; 522 523 for (;;) { 524 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, 525 reply, reply_bytes); 526 if (ret == 0) 527 return -EPROTO; 528 if (ret < 0) 529 return ret; 530 ack = reply[0]; 531 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) { 532 memcpy(recv, reply + 1, ret - 1); 533 return ret - 1; 534 } 535 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 536 udelay(100); 537 else 538 return -EIO; 539 } 540} 541 542static int 543intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 544 uint8_t write_byte, uint8_t *read_byte) 545{ 546 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 547 struct intel_dp *intel_dp = container_of(adapter, 548 struct intel_dp, 549 adapter); 550 uint16_t address = algo_data->address; 551 uint8_t msg[5]; 552 uint8_t reply[2]; 553 unsigned retry; 554 int msg_bytes; 555 int reply_bytes; 556 int ret; 557 558 intel_dp_check_edp(intel_dp); 559 /* Set up the command byte */ 560 if (mode & MODE_I2C_READ) 561 msg[0] = AUX_I2C_READ << 4; 562 else 563 msg[0] = AUX_I2C_WRITE << 4; 564 565 if (!(mode & MODE_I2C_STOP)) 566 msg[0] |= AUX_I2C_MOT << 4; 567 568 msg[1] = address >> 8; 569 msg[2] = address; 570 571 switch (mode) { 572 case MODE_I2C_WRITE: 573 msg[3] = 0; 574 msg[4] = write_byte; 575 msg_bytes = 5; 576 reply_bytes = 1; 577 break; 578 case MODE_I2C_READ: 579 msg[3] = 0; 580 msg_bytes = 4; 581 reply_bytes = 2; 582 break; 583 default: 584 msg_bytes = 3; 585 reply_bytes = 1; 586 break; 587 } 588 589 for (retry = 0; retry < 5; retry++) { 590 ret = intel_dp_aux_ch(intel_dp, 591 msg, msg_bytes, 592 reply, reply_bytes); 593 if (ret < 0) { 594 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 595 return ret; 596 } 597 598 switch (reply[0] & AUX_NATIVE_REPLY_MASK) { 599 case AUX_NATIVE_REPLY_ACK: 600 /* I2C-over-AUX Reply field is only valid 601 * when paired with AUX ACK. 602 */ 603 break; 604 case AUX_NATIVE_REPLY_NACK: 605 DRM_DEBUG_KMS("aux_ch native nack\n"); 606 return -EREMOTEIO; 607 case AUX_NATIVE_REPLY_DEFER: 608 udelay(100); 609 continue; 610 default: 611 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", 612 reply[0]); 613 return -EREMOTEIO; 614 } 615 616 switch (reply[0] & AUX_I2C_REPLY_MASK) { 617 case AUX_I2C_REPLY_ACK: 618 if (mode == MODE_I2C_READ) { 619 *read_byte = reply[1]; 620 } 621 return reply_bytes - 1; 622 case AUX_I2C_REPLY_NACK: 623 DRM_DEBUG_KMS("aux_i2c nack\n"); 624 return -EREMOTEIO; 625 case AUX_I2C_REPLY_DEFER: 626 DRM_DEBUG_KMS("aux_i2c defer\n"); 627 udelay(100); 628 break; 629 default: 630 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]); 631 return -EREMOTEIO; 632 } 633 } 634 635 DRM_ERROR("too many retries, giving up\n"); 636 return -EREMOTEIO; 637} 638 639static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp); 640static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync); 641 642static int 643intel_dp_i2c_init(struct intel_dp *intel_dp, 644 struct intel_connector *intel_connector, const char *name) 645{ 646 int ret; 647 648 DRM_DEBUG_KMS("i2c_init %s\n", name); 649 intel_dp->algo.running = false; 650 intel_dp->algo.address = 0; 651 intel_dp->algo.aux_ch = intel_dp_i2c_aux_ch; 652 653 memset(&intel_dp->adapter, '\0', sizeof(intel_dp->adapter)); 654 intel_dp->adapter.owner = THIS_MODULE; 655 intel_dp->adapter.class = I2C_CLASS_DDC; 656 strncpy(intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1); 657 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0'; 658 intel_dp->adapter.algo_data = &intel_dp->algo; 659 intel_dp->adapter.dev.parent = &intel_connector->base.kdev; 660 661 ironlake_edp_panel_vdd_on(intel_dp); 662 ret = i2c_dp_aux_add_bus(&intel_dp->adapter); 663 ironlake_edp_panel_vdd_off(intel_dp, false); 664 return ret; 665} 666 667static bool 668intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode, 669 struct drm_display_mode *adjusted_mode) 670{ 671 struct drm_device *dev = encoder->dev; 672 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 673 int lane_count, clock; 674 int max_lane_count = intel_dp_max_lane_count(intel_dp); 675 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0; 676 int bpp = mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 24; 677 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 678 679 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 680 intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode); 681 intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN, 682 mode, adjusted_mode); 683 /* 684 * the mode->clock is used to calculate the Data&Link M/N 685 * of the pipe. For the eDP the fixed clock should be used. 686 */ 687 mode->clock = intel_dp->panel_fixed_mode->clock; 688 } 689 690 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) { 691 for (clock = 0; clock <= max_clock; clock++) { 692 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count); 693 694 if (intel_dp_link_required(mode->clock, bpp) 695 <= link_avail) { 696 intel_dp->link_bw = bws[clock]; 697 intel_dp->lane_count = lane_count; 698 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw); 699 DRM_DEBUG_KMS("Display port link bw %02x lane " 700 "count %d clock %d\n", 701 intel_dp->link_bw, intel_dp->lane_count, 702 adjusted_mode->clock); 703 return true; 704 } 705 } 706 } 707 708 return false; 709} 710 711struct intel_dp_m_n { 712 uint32_t tu; 713 uint32_t gmch_m; 714 uint32_t gmch_n; 715 uint32_t link_m; 716 uint32_t link_n; 717}; 718 719static void 720intel_reduce_ratio(uint32_t *num, uint32_t *den) 721{ 722 while (*num > 0xffffff || *den > 0xffffff) { 723 *num >>= 1; 724 *den >>= 1; 725 } 726} 727 728static void 729intel_dp_compute_m_n(int bpp, 730 int nlanes, 731 int pixel_clock, 732 int link_clock, 733 struct intel_dp_m_n *m_n) 734{ 735 m_n->tu = 64; 736 m_n->gmch_m = (pixel_clock * bpp) >> 3; 737 m_n->gmch_n = link_clock * nlanes; 738 intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n); 739 m_n->link_m = pixel_clock; 740 m_n->link_n = link_clock; 741 intel_reduce_ratio(&m_n->link_m, &m_n->link_n); 742} 743 744void 745intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, 746 struct drm_display_mode *adjusted_mode) 747{ 748 struct drm_device *dev = crtc->dev; 749 struct drm_mode_config *mode_config = &dev->mode_config; 750 struct drm_encoder *encoder; 751 struct drm_i915_private *dev_priv = dev->dev_private; 752 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 753 int lane_count = 4; 754 struct intel_dp_m_n m_n; 755 int pipe = intel_crtc->pipe; 756 757 /* 758 * Find the lane count in the intel_encoder private 759 */ 760 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 761 struct intel_dp *intel_dp; 762 763 if (encoder->crtc != crtc) 764 continue; 765 766 intel_dp = enc_to_intel_dp(encoder); 767 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || 768 intel_dp->base.type == INTEL_OUTPUT_EDP) 769 { 770 lane_count = intel_dp->lane_count; 771 break; 772 } 773 } 774 775 /* 776 * Compute the GMCH and Link ratios. The '3' here is 777 * the number of bytes_per_pixel post-LUT, which we always 778 * set up for 8-bits of R/G/B, or 3 bytes total. 779 */ 780 intel_dp_compute_m_n(intel_crtc->bpp, lane_count, 781 mode->clock, adjusted_mode->clock, &m_n); 782 783 if (HAS_PCH_SPLIT(dev)) { 784 I915_WRITE(TRANSDATA_M1(pipe), 785 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 786 m_n.gmch_m); 787 I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n); 788 I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m); 789 I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n); 790 } else { 791 I915_WRITE(PIPE_GMCH_DATA_M(pipe), 792 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 793 m_n.gmch_m); 794 I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); 795 I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); 796 I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); 797 } 798} 799 800static void ironlake_edp_pll_on(struct drm_encoder *encoder); 801static void ironlake_edp_pll_off(struct drm_encoder *encoder); 802 803static void 804intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 805 struct drm_display_mode *adjusted_mode) 806{ 807 struct drm_device *dev = encoder->dev; 808 struct drm_i915_private *dev_priv = dev->dev_private; 809 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 810 struct drm_crtc *crtc = intel_dp->base.base.crtc; 811 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 812 813 /* Turn on the eDP PLL if needed */ 814 if (is_edp(intel_dp)) { 815 if (!is_pch_edp(intel_dp)) 816 ironlake_edp_pll_on(encoder); 817 else 818 ironlake_edp_pll_off(encoder); 819 } 820 821 /* 822 * There are four kinds of DP registers: 823 * 824 * IBX PCH 825 * SNB CPU 826 * IVB CPU 827 * CPT PCH 828 * 829 * IBX PCH and CPU are the same for almost everything, 830 * except that the CPU DP PLL is configured in this 831 * register 832 * 833 * CPT PCH is quite different, having many bits moved 834 * to the TRANS_DP_CTL register instead. That 835 * configuration happens (oddly) in ironlake_pch_enable 836 */ 837 838 /* Preserve the BIOS-computed detected bit. This is 839 * supposed to be read-only. 840 */ 841 intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED; 842 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 843 844 /* Handle DP bits in common between all three register formats */ 845 846 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 847 848 switch (intel_dp->lane_count) { 849 case 1: 850 intel_dp->DP |= DP_PORT_WIDTH_1; 851 break; 852 case 2: 853 intel_dp->DP |= DP_PORT_WIDTH_2; 854 break; 855 case 4: 856 intel_dp->DP |= DP_PORT_WIDTH_4; 857 break; 858 } 859 if (intel_dp->has_audio) { 860 DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n", 861 pipe_name(intel_crtc->pipe)); 862 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 863 intel_write_eld(encoder, adjusted_mode); 864 } 865 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 866 intel_dp->link_configuration[0] = intel_dp->link_bw; 867 intel_dp->link_configuration[1] = intel_dp->lane_count; 868 intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B; 869 /* 870 * Check for DPCD version > 1.1 and enhanced framing support 871 */ 872 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 873 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { 874 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 875 } 876 877 /* Split out the IBX/CPU vs CPT settings */ 878 879 if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) { 880 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 881 intel_dp->DP |= DP_SYNC_HS_HIGH; 882 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 883 intel_dp->DP |= DP_SYNC_VS_HIGH; 884 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 885 886 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN) 887 intel_dp->DP |= DP_ENHANCED_FRAMING; 888 889 intel_dp->DP |= intel_crtc->pipe << 29; 890 891 /* don't miss out required setting for eDP */ 892 intel_dp->DP |= DP_PLL_ENABLE; 893 if (adjusted_mode->clock < 200000) 894 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 895 else 896 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 897 } else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) { 898 intel_dp->DP |= intel_dp->color_range; 899 900 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 901 intel_dp->DP |= DP_SYNC_HS_HIGH; 902 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 903 intel_dp->DP |= DP_SYNC_VS_HIGH; 904 intel_dp->DP |= DP_LINK_TRAIN_OFF; 905 906 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN) 907 intel_dp->DP |= DP_ENHANCED_FRAMING; 908 909 if (intel_crtc->pipe == 1) 910 intel_dp->DP |= DP_PIPEB_SELECT; 911 912 if (is_cpu_edp(intel_dp)) { 913 /* don't miss out required setting for eDP */ 914 intel_dp->DP |= DP_PLL_ENABLE; 915 if (adjusted_mode->clock < 200000) 916 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 917 else 918 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 919 } 920 } else { 921 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 922 } 923} 924 925#define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK) 926#define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE) 927 928#define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK) 929#define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE) 930 931#define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK) 932#define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE) 933 934static void ironlake_wait_panel_status(struct intel_dp *intel_dp, 935 u32 mask, 936 u32 value) 937{ 938 struct drm_device *dev = intel_dp->base.base.dev; 939 struct drm_i915_private *dev_priv = dev->dev_private; 940 941 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n", 942 mask, value, 943 I915_READ(PCH_PP_STATUS), 944 I915_READ(PCH_PP_CONTROL)); 945 946 if (_wait_for((I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10)) { 947 DRM_ERROR("Panel status timeout: status %08x control %08x\n", 948 I915_READ(PCH_PP_STATUS), 949 I915_READ(PCH_PP_CONTROL)); 950 } 951} 952 953static void ironlake_wait_panel_on(struct intel_dp *intel_dp) 954{ 955 DRM_DEBUG_KMS("Wait for panel power on\n"); 956 ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE); 957} 958 959static void ironlake_wait_panel_off(struct intel_dp *intel_dp) 960{ 961 DRM_DEBUG_KMS("Wait for panel power off time\n"); 962 ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE); 963} 964 965static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp) 966{ 967 DRM_DEBUG_KMS("Wait for panel power cycle\n"); 968 ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE); 969} 970 971 972/* Read the current pp_control value, unlocking the register if it 973 * is locked 974 */ 975 976static u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv) 977{ 978 u32 control = I915_READ(PCH_PP_CONTROL); 979 980 control &= ~PANEL_UNLOCK_MASK; 981 control |= PANEL_UNLOCK_REGS; 982 return control; 983} 984 985static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) 986{ 987 struct drm_device *dev = intel_dp->base.base.dev; 988 struct drm_i915_private *dev_priv = dev->dev_private; 989 u32 pp; 990 991 if (!is_edp(intel_dp)) 992 return; 993 DRM_DEBUG_KMS("Turn eDP VDD on\n"); 994 995 WARN(intel_dp->want_panel_vdd, 996 "eDP VDD already requested on\n"); 997 998 intel_dp->want_panel_vdd = true; 999 1000 if (ironlake_edp_have_panel_vdd(intel_dp)) { 1001 DRM_DEBUG_KMS("eDP VDD already on\n"); 1002 return; 1003 } 1004 1005 if (!ironlake_edp_have_panel_power(intel_dp)) 1006 ironlake_wait_panel_power_cycle(intel_dp); 1007 1008 pp = ironlake_get_pp_control(dev_priv); 1009 pp |= EDP_FORCE_VDD; 1010 I915_WRITE(PCH_PP_CONTROL, pp); 1011 POSTING_READ(PCH_PP_CONTROL); 1012 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1013 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1014 1015 /* 1016 * If the panel wasn't on, delay before accessing aux channel 1017 */ 1018 if (!ironlake_edp_have_panel_power(intel_dp)) { 1019 DRM_DEBUG_KMS("eDP was not running\n"); 1020 msleep(intel_dp->panel_power_up_delay); 1021 } 1022} 1023 1024static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp) 1025{ 1026 struct drm_device *dev = intel_dp->base.base.dev; 1027 struct drm_i915_private *dev_priv = dev->dev_private; 1028 u32 pp; 1029 1030 if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) { 1031 pp = ironlake_get_pp_control(dev_priv); 1032 pp &= ~EDP_FORCE_VDD; 1033 I915_WRITE(PCH_PP_CONTROL, pp); 1034 POSTING_READ(PCH_PP_CONTROL); 1035 1036 /* Make sure sequencer is idle before allowing subsequent activity */ 1037 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1038 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1039 1040 msleep(intel_dp->panel_power_down_delay); 1041 } 1042} 1043 1044static void ironlake_panel_vdd_work(struct work_struct *__work) 1045{ 1046 struct intel_dp *intel_dp = container_of(to_delayed_work(__work), 1047 struct intel_dp, panel_vdd_work); 1048 struct drm_device *dev = intel_dp->base.base.dev; 1049 1050 mutex_lock(&dev->mode_config.mutex); 1051 ironlake_panel_vdd_off_sync(intel_dp); 1052 mutex_unlock(&dev->mode_config.mutex); 1053} 1054 1055static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) 1056{ 1057 if (!is_edp(intel_dp)) 1058 return; 1059 1060 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd); 1061 WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on"); 1062 1063 intel_dp->want_panel_vdd = false; 1064 1065 if (sync) { 1066 ironlake_panel_vdd_off_sync(intel_dp); 1067 } else { 1068 /* 1069 * Queue the timer to fire a long 1070 * time from now (relative to the power down delay) 1071 * to keep the panel power up across a sequence of operations 1072 */ 1073 schedule_delayed_work(&intel_dp->panel_vdd_work, 1074 msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5)); 1075 } 1076} 1077 1078static void ironlake_edp_panel_on(struct intel_dp *intel_dp) 1079{ 1080 struct drm_device *dev = intel_dp->base.base.dev; 1081 struct drm_i915_private *dev_priv = dev->dev_private; 1082 u32 pp; 1083 1084 if (!is_edp(intel_dp)) 1085 return; 1086 1087 DRM_DEBUG_KMS("Turn eDP power on\n"); 1088 1089 if (ironlake_edp_have_panel_power(intel_dp)) { 1090 DRM_DEBUG_KMS("eDP power already on\n"); 1091 return; 1092 } 1093 1094 ironlake_wait_panel_power_cycle(intel_dp); 1095 1096 pp = ironlake_get_pp_control(dev_priv); 1097 if (IS_GEN5(dev)) { 1098 /* ILK workaround: disable reset around power sequence */ 1099 pp &= ~PANEL_POWER_RESET; 1100 I915_WRITE(PCH_PP_CONTROL, pp); 1101 POSTING_READ(PCH_PP_CONTROL); 1102 } 1103 1104 pp |= POWER_TARGET_ON; 1105 if (!IS_GEN5(dev)) 1106 pp |= PANEL_POWER_RESET; 1107 1108 I915_WRITE(PCH_PP_CONTROL, pp); 1109 POSTING_READ(PCH_PP_CONTROL); 1110 1111 ironlake_wait_panel_on(intel_dp); 1112 1113 if (IS_GEN5(dev)) { 1114 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1115 I915_WRITE(PCH_PP_CONTROL, pp); 1116 POSTING_READ(PCH_PP_CONTROL); 1117 } 1118} 1119 1120static void ironlake_edp_panel_off(struct intel_dp *intel_dp) 1121{ 1122 struct drm_device *dev = intel_dp->base.base.dev; 1123 struct drm_i915_private *dev_priv = dev->dev_private; 1124 u32 pp; 1125 1126 if (!is_edp(intel_dp)) 1127 return; 1128 1129 DRM_DEBUG_KMS("Turn eDP power off\n"); 1130 1131 WARN(intel_dp->want_panel_vdd, "Cannot turn power off while VDD is on\n"); 1132 1133 pp = ironlake_get_pp_control(dev_priv); 1134 pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE); 1135 I915_WRITE(PCH_PP_CONTROL, pp); 1136 POSTING_READ(PCH_PP_CONTROL); 1137 1138 ironlake_wait_panel_off(intel_dp); 1139} 1140 1141static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) 1142{ 1143 struct drm_device *dev = intel_dp->base.base.dev; 1144 struct drm_i915_private *dev_priv = dev->dev_private; 1145 u32 pp; 1146 1147 if (!is_edp(intel_dp)) 1148 return; 1149 1150 DRM_DEBUG_KMS("\n"); 1151 /* 1152 * If we enable the backlight right away following a panel power 1153 * on, we may see slight flicker as the panel syncs with the eDP 1154 * link. So delay a bit to make sure the image is solid before 1155 * allowing it to appear. 1156 */ 1157 msleep(intel_dp->backlight_on_delay); 1158 pp = ironlake_get_pp_control(dev_priv); 1159 pp |= EDP_BLC_ENABLE; 1160 I915_WRITE(PCH_PP_CONTROL, pp); 1161 POSTING_READ(PCH_PP_CONTROL); 1162} 1163 1164static void ironlake_edp_backlight_off(struct intel_dp *intel_dp) 1165{ 1166 struct drm_device *dev = intel_dp->base.base.dev; 1167 struct drm_i915_private *dev_priv = dev->dev_private; 1168 u32 pp; 1169 1170 if (!is_edp(intel_dp)) 1171 return; 1172 1173 DRM_DEBUG_KMS("\n"); 1174 pp = ironlake_get_pp_control(dev_priv); 1175 pp &= ~EDP_BLC_ENABLE; 1176 I915_WRITE(PCH_PP_CONTROL, pp); 1177 POSTING_READ(PCH_PP_CONTROL); 1178 msleep(intel_dp->backlight_off_delay); 1179} 1180 1181static void ironlake_edp_pll_on(struct drm_encoder *encoder) 1182{ 1183 struct drm_device *dev = encoder->dev; 1184 struct drm_i915_private *dev_priv = dev->dev_private; 1185 u32 dpa_ctl; 1186 1187 DRM_DEBUG_KMS("\n"); 1188 dpa_ctl = I915_READ(DP_A); 1189 dpa_ctl |= DP_PLL_ENABLE; 1190 I915_WRITE(DP_A, dpa_ctl); 1191 POSTING_READ(DP_A); 1192 udelay(200); 1193} 1194 1195static void ironlake_edp_pll_off(struct drm_encoder *encoder) 1196{ 1197 struct drm_device *dev = encoder->dev; 1198 struct drm_i915_private *dev_priv = dev->dev_private; 1199 u32 dpa_ctl; 1200 1201 dpa_ctl = I915_READ(DP_A); 1202 dpa_ctl &= ~DP_PLL_ENABLE; 1203 I915_WRITE(DP_A, dpa_ctl); 1204 POSTING_READ(DP_A); 1205 udelay(200); 1206} 1207 1208/* If the sink supports it, try to set the power state appropriately */ 1209static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode) 1210{ 1211 int ret, i; 1212 1213 /* Should have a valid DPCD by this point */ 1214 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 1215 return; 1216 1217 if (mode != DRM_MODE_DPMS_ON) { 1218 ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER, 1219 DP_SET_POWER_D3); 1220 if (ret != 1) 1221 DRM_DEBUG_DRIVER("failed to write sink power state\n"); 1222 } else { 1223 /* 1224 * When turning on, we need to retry for 1ms to give the sink 1225 * time to wake up. 1226 */ 1227 for (i = 0; i < 3; i++) { 1228 ret = intel_dp_aux_native_write_1(intel_dp, 1229 DP_SET_POWER, 1230 DP_SET_POWER_D0); 1231 if (ret == 1) 1232 break; 1233 msleep(1); 1234 } 1235 } 1236} 1237 1238static void intel_dp_prepare(struct drm_encoder *encoder) 1239{ 1240 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1241 1242 ironlake_edp_backlight_off(intel_dp); 1243 ironlake_edp_panel_off(intel_dp); 1244 1245 /* Wake up the sink first */ 1246 ironlake_edp_panel_vdd_on(intel_dp); 1247 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1248 intel_dp_link_down(intel_dp); 1249 ironlake_edp_panel_vdd_off(intel_dp, false); 1250 1251 /* Make sure the panel is off before trying to 1252 * change the mode 1253 */ 1254} 1255 1256static void intel_dp_commit(struct drm_encoder *encoder) 1257{ 1258 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1259 struct drm_device *dev = encoder->dev; 1260 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1261 1262 ironlake_edp_panel_vdd_on(intel_dp); 1263 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1264 intel_dp_start_link_train(intel_dp); 1265 ironlake_edp_panel_on(intel_dp); 1266 ironlake_edp_panel_vdd_off(intel_dp, true); 1267 intel_dp_complete_link_train(intel_dp); 1268 ironlake_edp_backlight_on(intel_dp); 1269 1270 intel_dp->dpms_mode = DRM_MODE_DPMS_ON; 1271 1272 if (HAS_PCH_CPT(dev)) 1273 intel_cpt_verify_modeset(dev, intel_crtc->pipe); 1274} 1275 1276static void 1277intel_dp_dpms(struct drm_encoder *encoder, int mode) 1278{ 1279 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1280 struct drm_device *dev = encoder->dev; 1281 struct drm_i915_private *dev_priv = dev->dev_private; 1282 uint32_t dp_reg = I915_READ(intel_dp->output_reg); 1283 1284 if (mode != DRM_MODE_DPMS_ON) { 1285 ironlake_edp_backlight_off(intel_dp); 1286 ironlake_edp_panel_off(intel_dp); 1287 1288 ironlake_edp_panel_vdd_on(intel_dp); 1289 intel_dp_sink_dpms(intel_dp, mode); 1290 intel_dp_link_down(intel_dp); 1291 ironlake_edp_panel_vdd_off(intel_dp, false); 1292 1293 if (is_cpu_edp(intel_dp)) 1294 ironlake_edp_pll_off(encoder); 1295 } else { 1296 if (is_cpu_edp(intel_dp)) 1297 ironlake_edp_pll_on(encoder); 1298 1299 ironlake_edp_panel_vdd_on(intel_dp); 1300 intel_dp_sink_dpms(intel_dp, mode); 1301 if (!(dp_reg & DP_PORT_EN)) { 1302 intel_dp_start_link_train(intel_dp); 1303 ironlake_edp_panel_on(intel_dp); 1304 ironlake_edp_panel_vdd_off(intel_dp, true); 1305 intel_dp_complete_link_train(intel_dp); 1306 } else 1307 ironlake_edp_panel_vdd_off(intel_dp, false); 1308 ironlake_edp_backlight_on(intel_dp); 1309 } 1310 intel_dp->dpms_mode = mode; 1311} 1312 1313/* 1314 * Native read with retry for link status and receiver capability reads for 1315 * cases where the sink may still be asleep. 1316 */ 1317static bool 1318intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address, 1319 uint8_t *recv, int recv_bytes) 1320{ 1321 int ret, i; 1322 1323 /* 1324 * Sinks are *supposed* to come up within 1ms from an off state, 1325 * but we're also supposed to retry 3 times per the spec. 1326 */ 1327 for (i = 0; i < 3; i++) { 1328 ret = intel_dp_aux_native_read(intel_dp, address, recv, 1329 recv_bytes); 1330 if (ret == recv_bytes) 1331 return true; 1332 msleep(1); 1333 } 1334 1335 return false; 1336} 1337 1338/* 1339 * Fetch AUX CH registers 0x202 - 0x207 which contain 1340 * link status information 1341 */ 1342static bool 1343intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1344{ 1345 return intel_dp_aux_native_read_retry(intel_dp, 1346 DP_LANE0_1_STATUS, 1347 link_status, 1348 DP_LINK_STATUS_SIZE); 1349} 1350 1351static uint8_t 1352intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1353 int r) 1354{ 1355 return link_status[r - DP_LANE0_1_STATUS]; 1356} 1357 1358static uint8_t 1359intel_get_adjust_request_voltage(uint8_t adjust_request[2], 1360 int lane) 1361{ 1362 int s = ((lane & 1) ? 1363 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 1364 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 1365 uint8_t l = adjust_request[lane>>1]; 1366 1367 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 1368} 1369 1370static uint8_t 1371intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2], 1372 int lane) 1373{ 1374 int s = ((lane & 1) ? 1375 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 1376 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 1377 uint8_t l = adjust_request[lane>>1]; 1378 1379 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1380} 1381 1382 1383#if 0 1384static char *voltage_names[] = { 1385 "0.4V", "0.6V", "0.8V", "1.2V" 1386}; 1387static char *pre_emph_names[] = { 1388 "0dB", "3.5dB", "6dB", "9.5dB" 1389}; 1390static char *link_train_names[] = { 1391 "pattern 1", "pattern 2", "idle", "off" 1392}; 1393#endif 1394 1395/* 1396 * These are source-specific values; current Intel hardware supports 1397 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB 1398 */ 1399 1400static uint8_t 1401intel_dp_voltage_max(struct intel_dp *intel_dp) 1402{ 1403 struct drm_device *dev = intel_dp->base.base.dev; 1404 1405 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) 1406 return DP_TRAIN_VOLTAGE_SWING_800; 1407 else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 1408 return DP_TRAIN_VOLTAGE_SWING_1200; 1409 else 1410 return DP_TRAIN_VOLTAGE_SWING_800; 1411} 1412 1413static uint8_t 1414intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing) 1415{ 1416 struct drm_device *dev = intel_dp->base.base.dev; 1417 1418 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1419 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1420 case DP_TRAIN_VOLTAGE_SWING_400: 1421 return DP_TRAIN_PRE_EMPHASIS_6; 1422 case DP_TRAIN_VOLTAGE_SWING_600: 1423 case DP_TRAIN_VOLTAGE_SWING_800: 1424 return DP_TRAIN_PRE_EMPHASIS_3_5; 1425 default: 1426 return DP_TRAIN_PRE_EMPHASIS_0; 1427 } 1428 } else { 1429 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1430 case DP_TRAIN_VOLTAGE_SWING_400: 1431 return DP_TRAIN_PRE_EMPHASIS_6; 1432 case DP_TRAIN_VOLTAGE_SWING_600: 1433 return DP_TRAIN_PRE_EMPHASIS_6; 1434 case DP_TRAIN_VOLTAGE_SWING_800: 1435 return DP_TRAIN_PRE_EMPHASIS_3_5; 1436 case DP_TRAIN_VOLTAGE_SWING_1200: 1437 default: 1438 return DP_TRAIN_PRE_EMPHASIS_0; 1439 } 1440 } 1441} 1442 1443static void 1444intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1445{ 1446 uint8_t v = 0; 1447 uint8_t p = 0; 1448 int lane; 1449 uint8_t *adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS); 1450 uint8_t voltage_max; 1451 uint8_t preemph_max; 1452 1453 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1454 uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane); 1455 uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane); 1456 1457 if (this_v > v) 1458 v = this_v; 1459 if (this_p > p) 1460 p = this_p; 1461 } 1462 1463 voltage_max = intel_dp_voltage_max(intel_dp); 1464 if (v >= voltage_max) 1465 v = voltage_max | DP_TRAIN_MAX_SWING_REACHED; 1466 1467 preemph_max = intel_dp_pre_emphasis_max(intel_dp, v); 1468 if (p >= preemph_max) 1469 p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1470 1471 for (lane = 0; lane < 4; lane++) 1472 intel_dp->train_set[lane] = v | p; 1473} 1474 1475static uint32_t 1476intel_dp_signal_levels(uint8_t train_set) 1477{ 1478 uint32_t signal_levels = 0; 1479 1480 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 1481 case DP_TRAIN_VOLTAGE_SWING_400: 1482 default: 1483 signal_levels |= DP_VOLTAGE_0_4; 1484 break; 1485 case DP_TRAIN_VOLTAGE_SWING_600: 1486 signal_levels |= DP_VOLTAGE_0_6; 1487 break; 1488 case DP_TRAIN_VOLTAGE_SWING_800: 1489 signal_levels |= DP_VOLTAGE_0_8; 1490 break; 1491 case DP_TRAIN_VOLTAGE_SWING_1200: 1492 signal_levels |= DP_VOLTAGE_1_2; 1493 break; 1494 } 1495 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1496 case DP_TRAIN_PRE_EMPHASIS_0: 1497 default: 1498 signal_levels |= DP_PRE_EMPHASIS_0; 1499 break; 1500 case DP_TRAIN_PRE_EMPHASIS_3_5: 1501 signal_levels |= DP_PRE_EMPHASIS_3_5; 1502 break; 1503 case DP_TRAIN_PRE_EMPHASIS_6: 1504 signal_levels |= DP_PRE_EMPHASIS_6; 1505 break; 1506 case DP_TRAIN_PRE_EMPHASIS_9_5: 1507 signal_levels |= DP_PRE_EMPHASIS_9_5; 1508 break; 1509 } 1510 return signal_levels; 1511} 1512 1513/* Gen6's DP voltage swing and pre-emphasis control */ 1514static uint32_t 1515intel_gen6_edp_signal_levels(uint8_t train_set) 1516{ 1517 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1518 DP_TRAIN_PRE_EMPHASIS_MASK); 1519 switch (signal_levels) { 1520 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1521 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1522 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1523 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1524 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1525 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1526 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6: 1527 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1528 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1529 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1530 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1531 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1532 case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0: 1533 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1534 default: 1535 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1536 "0x%x\n", signal_levels); 1537 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1538 } 1539} 1540 1541/* Gen7's DP voltage swing and pre-emphasis control */ 1542static uint32_t 1543intel_gen7_edp_signal_levels(uint8_t train_set) 1544{ 1545 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1546 DP_TRAIN_PRE_EMPHASIS_MASK); 1547 switch (signal_levels) { 1548 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1549 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1550 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1551 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1552 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1553 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1554 1555 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1556 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1557 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1558 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1559 1560 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1561 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1562 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1563 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1564 1565 default: 1566 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1567 "0x%x\n", signal_levels); 1568 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1569 } 1570} 1571 1572static uint8_t 1573intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1574 int lane) 1575{ 1576 int s = (lane & 1) * 4; 1577 uint8_t l = link_status[lane>>1]; 1578 1579 return (l >> s) & 0xf; 1580} 1581 1582/* Check for clock recovery is done on all channels */ 1583static bool 1584intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count) 1585{ 1586 int lane; 1587 uint8_t lane_status; 1588 1589 for (lane = 0; lane < lane_count; lane++) { 1590 lane_status = intel_get_lane_status(link_status, lane); 1591 if ((lane_status & DP_LANE_CR_DONE) == 0) 1592 return false; 1593 } 1594 return true; 1595} 1596 1597/* Check to see if channel eq is done on all channels */ 1598#define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\ 1599 DP_LANE_CHANNEL_EQ_DONE|\ 1600 DP_LANE_SYMBOL_LOCKED) 1601static bool 1602intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1603{ 1604 uint8_t lane_align; 1605 uint8_t lane_status; 1606 int lane; 1607 1608 lane_align = intel_dp_link_status(link_status, 1609 DP_LANE_ALIGN_STATUS_UPDATED); 1610 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 1611 return false; 1612 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1613 lane_status = intel_get_lane_status(link_status, lane); 1614 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) 1615 return false; 1616 } 1617 return true; 1618} 1619 1620static bool 1621intel_dp_set_link_train(struct intel_dp *intel_dp, 1622 uint32_t dp_reg_value, 1623 uint8_t dp_train_pat) 1624{ 1625 struct drm_device *dev = intel_dp->base.base.dev; 1626 struct drm_i915_private *dev_priv = dev->dev_private; 1627 int ret; 1628 1629 I915_WRITE(intel_dp->output_reg, dp_reg_value); 1630 POSTING_READ(intel_dp->output_reg); 1631 1632 intel_dp_aux_native_write_1(intel_dp, 1633 DP_TRAINING_PATTERN_SET, 1634 dp_train_pat); 1635 1636 ret = intel_dp_aux_native_write(intel_dp, 1637 DP_TRAINING_LANE0_SET, 1638 intel_dp->train_set, 1639 intel_dp->lane_count); 1640 if (ret != intel_dp->lane_count) 1641 return false; 1642 1643 return true; 1644} 1645 1646/* Enable corresponding port and start training pattern 1 */ 1647static void 1648intel_dp_start_link_train(struct intel_dp *intel_dp) 1649{ 1650 struct drm_device *dev = intel_dp->base.base.dev; 1651 struct drm_i915_private *dev_priv = dev->dev_private; 1652 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1653 int i; 1654 uint8_t voltage; 1655 bool clock_recovery = false; 1656 int voltage_tries, loop_tries; 1657 u32 reg; 1658 uint32_t DP = intel_dp->DP; 1659 1660 /* 1661 * On CPT we have to enable the port in training pattern 1, which 1662 * will happen below in intel_dp_set_link_train. Otherwise, enable 1663 * the port and wait for it to become active. 1664 */ 1665 if (!HAS_PCH_CPT(dev)) { 1666 I915_WRITE(intel_dp->output_reg, intel_dp->DP); 1667 POSTING_READ(intel_dp->output_reg); 1668 intel_wait_for_vblank(dev, intel_crtc->pipe); 1669 } 1670 1671 /* Write the link configuration data */ 1672 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET, 1673 intel_dp->link_configuration, 1674 DP_LINK_CONFIGURATION_SIZE); 1675 1676 DP |= DP_PORT_EN; 1677 1678 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1679 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1680 else 1681 DP &= ~DP_LINK_TRAIN_MASK; 1682 memset(intel_dp->train_set, 0, 4); 1683 voltage = 0xff; 1684 voltage_tries = 0; 1685 loop_tries = 0; 1686 clock_recovery = false; 1687 for (;;) { 1688 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1689 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1690 uint32_t signal_levels; 1691 1692 1693 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1694 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); 1695 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; 1696 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { 1697 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1698 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1699 } else { 1700 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); 1701 DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels); 1702 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1703 } 1704 1705 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1706 reg = DP | DP_LINK_TRAIN_PAT_1_CPT; 1707 else 1708 reg = DP | DP_LINK_TRAIN_PAT_1; 1709 1710 if (!intel_dp_set_link_train(intel_dp, reg, 1711 DP_TRAINING_PATTERN_1 | 1712 DP_LINK_SCRAMBLING_DISABLE)) 1713 break; 1714 /* Set training pattern 1 */ 1715 1716 udelay(100); 1717 if (!intel_dp_get_link_status(intel_dp, link_status)) { 1718 DRM_ERROR("failed to get link status\n"); 1719 break; 1720 } 1721 1722 if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { 1723 DRM_DEBUG_KMS("clock recovery OK\n"); 1724 clock_recovery = true; 1725 break; 1726 } 1727 1728 /* Check to see if we've tried the max voltage */ 1729 for (i = 0; i < intel_dp->lane_count; i++) 1730 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1731 break; 1732 if (i == intel_dp->lane_count) { 1733 ++loop_tries; 1734 if (loop_tries == 5) { 1735 DRM_DEBUG_KMS("too many full retries, give up\n"); 1736 break; 1737 } 1738 memset(intel_dp->train_set, 0, 4); 1739 voltage_tries = 0; 1740 continue; 1741 } 1742 1743 /* Check to see if we've tried the same voltage 5 times */ 1744 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 1745 ++voltage_tries; 1746 if (voltage_tries == 5) { 1747 DRM_DEBUG_KMS("too many voltage retries, give up\n"); 1748 break; 1749 } 1750 } else 1751 voltage_tries = 0; 1752 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1753 1754 /* Compute new intel_dp->train_set as requested by target */ 1755 intel_get_adjust_train(intel_dp, link_status); 1756 } 1757 1758 intel_dp->DP = DP; 1759} 1760 1761static void 1762intel_dp_complete_link_train(struct intel_dp *intel_dp) 1763{ 1764 struct drm_device *dev = intel_dp->base.base.dev; 1765 struct drm_i915_private *dev_priv = dev->dev_private; 1766 bool channel_eq = false; 1767 int tries, cr_tries; 1768 u32 reg; 1769 uint32_t DP = intel_dp->DP; 1770 1771 /* channel equalization */ 1772 tries = 0; 1773 cr_tries = 0; 1774 channel_eq = false; 1775 for (;;) { 1776 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1777 uint32_t signal_levels; 1778 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1779 1780 if (cr_tries > 5) { 1781 DRM_ERROR("failed to train DP, aborting\n"); 1782 intel_dp_link_down(intel_dp); 1783 break; 1784 } 1785 1786 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1787 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); 1788 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; 1789 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { 1790 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1791 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1792 } else { 1793 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); 1794 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1795 } 1796 1797 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1798 reg = DP | DP_LINK_TRAIN_PAT_2_CPT; 1799 else 1800 reg = DP | DP_LINK_TRAIN_PAT_2; 1801 1802 /* channel eq pattern */ 1803 if (!intel_dp_set_link_train(intel_dp, reg, 1804 DP_TRAINING_PATTERN_2 | 1805 DP_LINK_SCRAMBLING_DISABLE)) 1806 break; 1807 1808 udelay(400); 1809 if (!intel_dp_get_link_status(intel_dp, link_status)) 1810 break; 1811 1812 /* Make sure clock is still ok */ 1813 if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { 1814 intel_dp_start_link_train(intel_dp); 1815 cr_tries++; 1816 continue; 1817 } 1818 1819 if (intel_channel_eq_ok(intel_dp, link_status)) { 1820 channel_eq = true; 1821 break; 1822 } 1823 1824 /* Try 5 times, then try clock recovery if that fails */ 1825 if (tries > 5) { 1826 intel_dp_link_down(intel_dp); 1827 intel_dp_start_link_train(intel_dp); 1828 tries = 0; 1829 cr_tries++; 1830 continue; 1831 } 1832 1833 /* Compute new intel_dp->train_set as requested by target */ 1834 intel_get_adjust_train(intel_dp, link_status); 1835 ++tries; 1836 } 1837 1838 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1839 reg = DP | DP_LINK_TRAIN_OFF_CPT; 1840 else 1841 reg = DP | DP_LINK_TRAIN_OFF; 1842 1843 I915_WRITE(intel_dp->output_reg, reg); 1844 POSTING_READ(intel_dp->output_reg); 1845 intel_dp_aux_native_write_1(intel_dp, 1846 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE); 1847} 1848 1849static void 1850intel_dp_link_down(struct intel_dp *intel_dp) 1851{ 1852 struct drm_device *dev = intel_dp->base.base.dev; 1853 struct drm_i915_private *dev_priv = dev->dev_private; 1854 uint32_t DP = intel_dp->DP; 1855 1856 if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0) 1857 return; 1858 1859 DRM_DEBUG_KMS("\n"); 1860 1861 if (is_edp(intel_dp)) { 1862 DP &= ~DP_PLL_ENABLE; 1863 I915_WRITE(intel_dp->output_reg, DP); 1864 POSTING_READ(intel_dp->output_reg); 1865 udelay(100); 1866 } 1867 1868 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) { 1869 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1870 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT); 1871 } else { 1872 DP &= ~DP_LINK_TRAIN_MASK; 1873 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE); 1874 } 1875 POSTING_READ(intel_dp->output_reg); 1876 1877 msleep(17); 1878 1879 if (is_edp(intel_dp)) { 1880 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1881 DP |= DP_LINK_TRAIN_OFF_CPT; 1882 else 1883 DP |= DP_LINK_TRAIN_OFF; 1884 } 1885 1886 if (!HAS_PCH_CPT(dev) && 1887 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) { 1888 struct drm_crtc *crtc = intel_dp->base.base.crtc; 1889 1890 /* Hardware workaround: leaving our transcoder select 1891 * set to transcoder B while it's off will prevent the 1892 * corresponding HDMI output on transcoder A. 1893 * 1894 * Combine this with another hardware workaround: 1895 * transcoder select bit can only be cleared while the 1896 * port is enabled. 1897 */ 1898 DP &= ~DP_PIPEB_SELECT; 1899 I915_WRITE(intel_dp->output_reg, DP); 1900 1901 /* Changes to enable or select take place the vblank 1902 * after being written. 1903 */ 1904 if (crtc == NULL) { 1905 /* We can arrive here never having been attached 1906 * to a CRTC, for instance, due to inheriting 1907 * random state from the BIOS. 1908 * 1909 * If the pipe is not running, play safe and 1910 * wait for the clocks to stabilise before 1911 * continuing. 1912 */ 1913 POSTING_READ(intel_dp->output_reg); 1914 msleep(50); 1915 } else 1916 intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe); 1917 } 1918 1919 DP &= ~DP_AUDIO_OUTPUT_ENABLE; 1920 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN); 1921 POSTING_READ(intel_dp->output_reg); 1922 msleep(intel_dp->panel_power_down_delay); 1923} 1924 1925static bool 1926intel_dp_get_dpcd(struct intel_dp *intel_dp) 1927{ 1928 if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd, 1929 sizeof(intel_dp->dpcd)) && 1930 (intel_dp->dpcd[DP_DPCD_REV] != 0)) { 1931 return true; 1932 } 1933 1934 return false; 1935} 1936 1937static bool 1938intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector) 1939{ 1940 int ret; 1941 1942 ret = intel_dp_aux_native_read_retry(intel_dp, 1943 DP_DEVICE_SERVICE_IRQ_VECTOR, 1944 sink_irq_vector, 1); 1945 if (!ret) 1946 return false; 1947 1948 return true; 1949} 1950 1951static void 1952intel_dp_handle_test_request(struct intel_dp *intel_dp) 1953{ 1954 /* NAK by default */ 1955 intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK); 1956} 1957 1958/* 1959 * According to DP spec 1960 * 5.1.2: 1961 * 1. Read DPCD 1962 * 2. Configure link according to Receiver Capabilities 1963 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3 1964 * 4. Check link status on receipt of hot-plug interrupt 1965 */ 1966 1967static void 1968intel_dp_check_link_status(struct intel_dp *intel_dp) 1969{ 1970 u8 sink_irq_vector; 1971 u8 link_status[DP_LINK_STATUS_SIZE]; 1972 1973 if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON) 1974 return; 1975 1976 if (!intel_dp->base.base.crtc) 1977 return; 1978 1979 /* Try to read receiver status if the link appears to be up */ 1980 if (!intel_dp_get_link_status(intel_dp, link_status)) { 1981 intel_dp_link_down(intel_dp); 1982 return; 1983 } 1984 1985 /* Now read the DPCD to see if it's actually running */ 1986 if (!intel_dp_get_dpcd(intel_dp)) { 1987 intel_dp_link_down(intel_dp); 1988 return; 1989 } 1990 1991 /* Try to read the source of the interrupt */ 1992 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 1993 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) { 1994 /* Clear interrupt source */ 1995 intel_dp_aux_native_write_1(intel_dp, 1996 DP_DEVICE_SERVICE_IRQ_VECTOR, 1997 sink_irq_vector); 1998 1999 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST) 2000 intel_dp_handle_test_request(intel_dp); 2001 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ)) 2002 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n"); 2003 } 2004 2005 if (!intel_channel_eq_ok(intel_dp, link_status)) { 2006 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n", 2007 drm_get_encoder_name(&intel_dp->base.base)); 2008 intel_dp_start_link_train(intel_dp); 2009 intel_dp_complete_link_train(intel_dp); 2010 } 2011} 2012 2013static enum drm_connector_status 2014intel_dp_detect_dpcd(struct intel_dp *intel_dp) 2015{ 2016 if (intel_dp_get_dpcd(intel_dp)) 2017 return connector_status_connected; 2018 return connector_status_disconnected; 2019} 2020 2021static enum drm_connector_status 2022ironlake_dp_detect(struct intel_dp *intel_dp) 2023{ 2024 enum drm_connector_status status; 2025 2026 /* Can't disconnect eDP, but you can close the lid... */ 2027 if (is_edp(intel_dp)) { 2028 status = intel_panel_detect(intel_dp->base.base.dev); 2029 if (status == connector_status_unknown) 2030 status = connector_status_connected; 2031 return status; 2032 } 2033 2034 return intel_dp_detect_dpcd(intel_dp); 2035} 2036 2037static enum drm_connector_status 2038g4x_dp_detect(struct intel_dp *intel_dp) 2039{ 2040 struct drm_device *dev = intel_dp->base.base.dev; 2041 struct drm_i915_private *dev_priv = dev->dev_private; 2042 uint32_t temp, bit; 2043 2044 switch (intel_dp->output_reg) { 2045 case DP_B: 2046 bit = DPB_HOTPLUG_INT_STATUS; 2047 break; 2048 case DP_C: 2049 bit = DPC_HOTPLUG_INT_STATUS; 2050 break; 2051 case DP_D: 2052 bit = DPD_HOTPLUG_INT_STATUS; 2053 break; 2054 default: 2055 return connector_status_unknown; 2056 } 2057 2058 temp = I915_READ(PORT_HOTPLUG_STAT); 2059 2060 if ((temp & bit) == 0) 2061 return connector_status_disconnected; 2062 2063 return intel_dp_detect_dpcd(intel_dp); 2064} 2065 2066static struct edid * 2067intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter) 2068{ 2069 struct intel_dp *intel_dp = intel_attached_dp(connector); 2070 struct edid *edid; 2071 2072 ironlake_edp_panel_vdd_on(intel_dp); 2073 edid = drm_get_edid(connector, adapter); 2074 ironlake_edp_panel_vdd_off(intel_dp, false); 2075 return edid; 2076} 2077 2078static int 2079intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter) 2080{ 2081 struct intel_dp *intel_dp = intel_attached_dp(connector); 2082 int ret; 2083 2084 ironlake_edp_panel_vdd_on(intel_dp); 2085 ret = intel_ddc_get_modes(connector, adapter); 2086 ironlake_edp_panel_vdd_off(intel_dp, false); 2087 return ret; 2088} 2089 2090 2091/** 2092 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection. 2093 * 2094 * \return true if DP port is connected. 2095 * \return false if DP port is disconnected. 2096 */ 2097static enum drm_connector_status 2098intel_dp_detect(struct drm_connector *connector, bool force) 2099{ 2100 struct intel_dp *intel_dp = intel_attached_dp(connector); 2101 struct drm_device *dev = intel_dp->base.base.dev; 2102 enum drm_connector_status status; 2103 struct edid *edid = NULL; 2104 2105 intel_dp->has_audio = false; 2106 2107 if (HAS_PCH_SPLIT(dev)) 2108 status = ironlake_dp_detect(intel_dp); 2109 else 2110 status = g4x_dp_detect(intel_dp); 2111 2112 DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n", 2113 intel_dp->dpcd[0], intel_dp->dpcd[1], intel_dp->dpcd[2], 2114 intel_dp->dpcd[3], intel_dp->dpcd[4], intel_dp->dpcd[5], 2115 intel_dp->dpcd[6], intel_dp->dpcd[7]); 2116 2117 if (status != connector_status_connected) 2118 return status; 2119 2120 if (intel_dp->force_audio) { 2121 intel_dp->has_audio = intel_dp->force_audio > 0; 2122 } else { 2123 edid = intel_dp_get_edid(connector, &intel_dp->adapter); 2124 if (edid) { 2125 intel_dp->has_audio = drm_detect_monitor_audio(edid); 2126 connector->display_info.raw_edid = NULL; 2127 kfree(edid); 2128 } 2129 } 2130 2131 return connector_status_connected; 2132} 2133 2134static int intel_dp_get_modes(struct drm_connector *connector) 2135{ 2136 struct intel_dp *intel_dp = intel_attached_dp(connector); 2137 struct drm_device *dev = intel_dp->base.base.dev; 2138 struct drm_i915_private *dev_priv = dev->dev_private; 2139 int ret; 2140 2141 /* We should parse the EDID data and find out if it has an audio sink 2142 */ 2143 2144 ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter); 2145 if (ret) { 2146 if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) { 2147 struct drm_display_mode *newmode; 2148 list_for_each_entry(newmode, &connector->probed_modes, 2149 head) { 2150 if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) { 2151 intel_dp->panel_fixed_mode = 2152 drm_mode_duplicate(dev, newmode); 2153 break; 2154 } 2155 } 2156 } 2157 return ret; 2158 } 2159 2160 /* if eDP has no EDID, try to use fixed panel mode from VBT */ 2161 if (is_edp(intel_dp)) { 2162 /* initialize panel mode from VBT if available for eDP */ 2163 if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) { 2164 intel_dp->panel_fixed_mode = 2165 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); 2166 if (intel_dp->panel_fixed_mode) { 2167 intel_dp->panel_fixed_mode->type |= 2168 DRM_MODE_TYPE_PREFERRED; 2169 } 2170 } 2171 if (intel_dp->panel_fixed_mode) { 2172 struct drm_display_mode *mode; 2173 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode); 2174 drm_mode_probed_add(connector, mode); 2175 return 1; 2176 } 2177 } 2178 return 0; 2179} 2180 2181static bool 2182intel_dp_detect_audio(struct drm_connector *connector) 2183{ 2184 struct intel_dp *intel_dp = intel_attached_dp(connector); 2185 struct edid *edid; 2186 bool has_audio = false; 2187 2188 edid = intel_dp_get_edid(connector, &intel_dp->adapter); 2189 if (edid) { 2190 has_audio = drm_detect_monitor_audio(edid); 2191 2192 connector->display_info.raw_edid = NULL; 2193 kfree(edid); 2194 } 2195 2196 return has_audio; 2197} 2198 2199static int 2200intel_dp_set_property(struct drm_connector *connector, 2201 struct drm_property *property, 2202 uint64_t val) 2203{ 2204 struct drm_i915_private *dev_priv = connector->dev->dev_private; 2205 struct intel_dp *intel_dp = intel_attached_dp(connector); 2206 int ret; 2207 2208 ret = drm_connector_property_set_value(connector, property, val); 2209 if (ret) 2210 return ret; 2211 2212 if (property == dev_priv->force_audio_property) { 2213 int i = val; 2214 bool has_audio; 2215 2216 if (i == intel_dp->force_audio) 2217 return 0; 2218 2219 intel_dp->force_audio = i; 2220 2221 if (i == 0) 2222 has_audio = intel_dp_detect_audio(connector); 2223 else 2224 has_audio = i > 0; 2225 2226 if (has_audio == intel_dp->has_audio) 2227 return 0; 2228 2229 intel_dp->has_audio = has_audio; 2230 goto done; 2231 } 2232 2233 if (property == dev_priv->broadcast_rgb_property) { 2234 if (val == !!intel_dp->color_range) 2235 return 0; 2236 2237 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0; 2238 goto done; 2239 } 2240 2241 return -EINVAL; 2242 2243done: 2244 if (intel_dp->base.base.crtc) { 2245 struct drm_crtc *crtc = intel_dp->base.base.crtc; 2246 drm_crtc_helper_set_mode(crtc, &crtc->mode, 2247 crtc->x, crtc->y, 2248 crtc->fb); 2249 } 2250 2251 return 0; 2252} 2253 2254static void 2255intel_dp_destroy(struct drm_connector *connector) 2256{ 2257 struct drm_device *dev = connector->dev; 2258 2259 if (intel_dpd_is_edp(dev)) 2260 intel_panel_destroy_backlight(dev); 2261 2262 drm_sysfs_connector_remove(connector); 2263 drm_connector_cleanup(connector); 2264 kfree(connector); 2265} 2266 2267static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 2268{ 2269 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2270 2271 i2c_del_adapter(&intel_dp->adapter); 2272 drm_encoder_cleanup(encoder); 2273 if (is_edp(intel_dp)) { 2274 cancel_delayed_work_sync(&intel_dp->panel_vdd_work); 2275 ironlake_panel_vdd_off_sync(intel_dp); 2276 } 2277 kfree(intel_dp); 2278} 2279 2280static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { 2281 .dpms = intel_dp_dpms, 2282 .mode_fixup = intel_dp_mode_fixup, 2283 .prepare = intel_dp_prepare, 2284 .mode_set = intel_dp_mode_set, 2285 .commit = intel_dp_commit, 2286}; 2287 2288static const struct drm_connector_funcs intel_dp_connector_funcs = { 2289 .dpms = drm_helper_connector_dpms, 2290 .detect = intel_dp_detect, 2291 .fill_modes = drm_helper_probe_single_connector_modes, 2292 .set_property = intel_dp_set_property, 2293 .destroy = intel_dp_destroy, 2294}; 2295 2296static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = { 2297 .get_modes = intel_dp_get_modes, 2298 .mode_valid = intel_dp_mode_valid, 2299 .best_encoder = intel_best_encoder, 2300}; 2301 2302static const struct drm_encoder_funcs intel_dp_enc_funcs = { 2303 .destroy = intel_dp_encoder_destroy, 2304}; 2305 2306static void 2307intel_dp_hot_plug(struct intel_encoder *intel_encoder) 2308{ 2309 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 2310 2311 intel_dp_check_link_status(intel_dp); 2312} 2313 2314/* Return which DP Port should be selected for Transcoder DP control */ 2315int 2316intel_trans_dp_port_sel(struct drm_crtc *crtc) 2317{ 2318 struct drm_device *dev = crtc->dev; 2319 struct drm_mode_config *mode_config = &dev->mode_config; 2320 struct drm_encoder *encoder; 2321 2322 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 2323 struct intel_dp *intel_dp; 2324 2325 if (encoder->crtc != crtc) 2326 continue; 2327 2328 intel_dp = enc_to_intel_dp(encoder); 2329 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || 2330 intel_dp->base.type == INTEL_OUTPUT_EDP) 2331 return intel_dp->output_reg; 2332 } 2333 2334 return -1; 2335} 2336 2337/* check the VBT to see whether the eDP is on DP-D port */ 2338bool intel_dpd_is_edp(struct drm_device *dev) 2339{ 2340 struct drm_i915_private *dev_priv = dev->dev_private; 2341 struct child_device_config *p_child; 2342 int i; 2343 2344 if (!dev_priv->child_dev_num) 2345 return false; 2346 2347 for (i = 0; i < dev_priv->child_dev_num; i++) { 2348 p_child = dev_priv->child_dev + i; 2349 2350 if (p_child->dvo_port == PORT_IDPD && 2351 p_child->device_type == DEVICE_TYPE_eDP) 2352 return true; 2353 } 2354 return false; 2355} 2356 2357static void 2358intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector) 2359{ 2360 intel_attach_force_audio_property(connector); 2361 intel_attach_broadcast_rgb_property(connector); 2362} 2363 2364void 2365intel_dp_init(struct drm_device *dev, int output_reg) 2366{ 2367 struct drm_i915_private *dev_priv = dev->dev_private; 2368 struct drm_connector *connector; 2369 struct intel_dp *intel_dp; 2370 struct intel_encoder *intel_encoder; 2371 struct intel_connector *intel_connector; 2372 const char *name = NULL; 2373 int type; 2374 2375 intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL); 2376 if (!intel_dp) 2377 return; 2378 2379 intel_dp->output_reg = output_reg; 2380 intel_dp->dpms_mode = -1; 2381 2382 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); 2383 if (!intel_connector) { 2384 kfree(intel_dp); 2385 return; 2386 } 2387 intel_encoder = &intel_dp->base; 2388 2389 if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D) 2390 if (intel_dpd_is_edp(dev)) 2391 intel_dp->is_pch_edp = true; 2392 2393 if (output_reg == DP_A || is_pch_edp(intel_dp)) { 2394 type = DRM_MODE_CONNECTOR_eDP; 2395 intel_encoder->type = INTEL_OUTPUT_EDP; 2396 } else { 2397 type = DRM_MODE_CONNECTOR_DisplayPort; 2398 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT; 2399 } 2400 2401 connector = &intel_connector->base; 2402 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type); 2403 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs); 2404 2405 connector->polled = DRM_CONNECTOR_POLL_HPD; 2406 2407 if (output_reg == DP_B || output_reg == PCH_DP_B) 2408 intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT); 2409 else if (output_reg == DP_C || output_reg == PCH_DP_C) 2410 intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT); 2411 else if (output_reg == DP_D || output_reg == PCH_DP_D) 2412 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT); 2413 2414 if (is_edp(intel_dp)) { 2415 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT); 2416 INIT_DELAYED_WORK(&intel_dp->panel_vdd_work, 2417 ironlake_panel_vdd_work); 2418 } 2419 2420 intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2); 2421 connector->interlace_allowed = true; 2422 connector->doublescan_allowed = 0; 2423 2424 drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs, 2425 DRM_MODE_ENCODER_TMDS); 2426 drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs); 2427 2428 intel_connector_attach_encoder(intel_connector, intel_encoder); 2429 drm_sysfs_connector_add(connector); 2430 2431 /* Set up the DDC bus. */ 2432 switch (output_reg) { 2433 case DP_A: 2434 name = "DPDDC-A"; 2435 break; 2436 case DP_B: 2437 case PCH_DP_B: 2438 dev_priv->hotplug_supported_mask |= 2439 HDMIB_HOTPLUG_INT_STATUS; 2440 name = "DPDDC-B"; 2441 break; 2442 case DP_C: 2443 case PCH_DP_C: 2444 dev_priv->hotplug_supported_mask |= 2445 HDMIC_HOTPLUG_INT_STATUS; 2446 name = "DPDDC-C"; 2447 break; 2448 case DP_D: 2449 case PCH_DP_D: 2450 dev_priv->hotplug_supported_mask |= 2451 HDMID_HOTPLUG_INT_STATUS; 2452 name = "DPDDC-D"; 2453 break; 2454 } 2455 2456 /* Cache some DPCD data in the eDP case */ 2457 if (is_edp(intel_dp)) { 2458 bool ret; 2459 struct edp_power_seq cur, vbt; 2460 u32 pp_on, pp_off, pp_div; 2461 2462 pp_on = I915_READ(PCH_PP_ON_DELAYS); 2463 pp_off = I915_READ(PCH_PP_OFF_DELAYS); 2464 pp_div = I915_READ(PCH_PP_DIVISOR); 2465 2466 /* Pull timing values out of registers */ 2467 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> 2468 PANEL_POWER_UP_DELAY_SHIFT; 2469 2470 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> 2471 PANEL_LIGHT_ON_DELAY_SHIFT; 2472 2473 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> 2474 PANEL_LIGHT_OFF_DELAY_SHIFT; 2475 2476 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> 2477 PANEL_POWER_DOWN_DELAY_SHIFT; 2478 2479 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> 2480 PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000; 2481 2482 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2483 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); 2484 2485 vbt = dev_priv->edp.pps; 2486 2487 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2488 vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12); 2489 2490#define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10) 2491 2492 intel_dp->panel_power_up_delay = get_delay(t1_t3); 2493 intel_dp->backlight_on_delay = get_delay(t8); 2494 intel_dp->backlight_off_delay = get_delay(t9); 2495 intel_dp->panel_power_down_delay = get_delay(t10); 2496 intel_dp->panel_power_cycle_delay = get_delay(t11_t12); 2497 2498 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", 2499 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, 2500 intel_dp->panel_power_cycle_delay); 2501 2502 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2503 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2504 2505 ironlake_edp_panel_vdd_on(intel_dp); 2506 ret = intel_dp_get_dpcd(intel_dp); 2507 ironlake_edp_panel_vdd_off(intel_dp, false); 2508 2509 if (ret) { 2510 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) 2511 dev_priv->no_aux_handshake = 2512 intel_dp->dpcd[DP_MAX_DOWNSPREAD] & 2513 DP_NO_AUX_HANDSHAKE_LINK_TRAINING; 2514 } else { 2515 /* if this fails, presume the device is a ghost */ 2516 DRM_INFO("failed to retrieve link info, disabling eDP\n"); 2517 intel_dp_encoder_destroy(&intel_dp->base.base); 2518 intel_dp_destroy(&intel_connector->base); 2519 return; 2520 } 2521 } 2522 2523 intel_dp_i2c_init(intel_dp, intel_connector, name); 2524 2525 intel_encoder->hot_plug = intel_dp_hot_plug; 2526 2527 if (is_edp(intel_dp)) { 2528 dev_priv->int_edp_connector = connector; 2529 intel_panel_setup_backlight(dev); 2530 } 2531 2532 intel_dp_add_properties(intel_dp, connector); 2533 2534 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written 2535 * 0xd. Failure to do so will result in spurious interrupts being 2536 * generated on the port when a cable is not attached. 2537 */ 2538 if (IS_G4X(dev) && !IS_GM45(dev)) { 2539 u32 temp = I915_READ(PEG_BAND_GAP_DATA); 2540 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd); 2541 } 2542} 2543