1/* 2 * Copyright © 2008 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 */ 27 28#include <linux/i2c.h> 29#include <linux/slab.h> 30#include <linux/export.h> 31#include "drmP.h" 32#include "drm.h" 33#include "drm_crtc.h" 34#include "drm_crtc_helper.h" 35#include "intel_drv.h" 36#include "i915_drm.h" 37#include "i915_drv.h" 38#include "drm_dp_helper.h" 39 40#define DP_RECEIVER_CAP_SIZE 0xf 41#define DP_LINK_STATUS_SIZE 6 42#define DP_LINK_CHECK_TIMEOUT (10 * 1000) 43 44#define DP_LINK_CONFIGURATION_SIZE 9 45 46struct intel_dp { 47 struct intel_encoder base; 48 uint32_t output_reg; 49 uint32_t DP; 50 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]; 51 bool has_audio; 52 enum hdmi_force_audio force_audio; 53 uint32_t color_range; 54 int dpms_mode; 55 uint8_t link_bw; 56 uint8_t lane_count; 57 uint8_t dpcd[DP_RECEIVER_CAP_SIZE]; 58 struct i2c_adapter adapter; 59 struct i2c_algo_dp_aux_data algo; 60 bool is_pch_edp; 61 uint8_t train_set[4]; 62 int panel_power_up_delay; 63 int panel_power_down_delay; 64 int panel_power_cycle_delay; 65 int backlight_on_delay; 66 int backlight_off_delay; 67 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 68 struct delayed_work panel_vdd_work; 69 bool want_panel_vdd; 70}; 71 72/** 73 * is_edp - is the given port attached to an eDP panel (either CPU or PCH) 74 * @intel_dp: DP struct 75 * 76 * If a CPU or PCH DP output is attached to an eDP panel, this function 77 * will return true, and false otherwise. 78 */ 79static bool is_edp(struct intel_dp *intel_dp) 80{ 81 return intel_dp->base.type == INTEL_OUTPUT_EDP; 82} 83 84/** 85 * is_pch_edp - is the port on the PCH and attached to an eDP panel? 86 * @intel_dp: DP struct 87 * 88 * Returns true if the given DP struct corresponds to a PCH DP port attached 89 * to an eDP panel, false otherwise. Helpful for determining whether we 90 * may need FDI resources for a given DP output or not. 91 */ 92static bool is_pch_edp(struct intel_dp *intel_dp) 93{ 94 return intel_dp->is_pch_edp; 95} 96 97/** 98 * is_cpu_edp - is the port on the CPU and attached to an eDP panel? 99 * @intel_dp: DP struct 100 * 101 * Returns true if the given DP struct corresponds to a CPU eDP port. 102 */ 103static bool is_cpu_edp(struct intel_dp *intel_dp) 104{ 105 return is_edp(intel_dp) && !is_pch_edp(intel_dp); 106} 107 108static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder) 109{ 110 return container_of(encoder, struct intel_dp, base.base); 111} 112 113static struct intel_dp *intel_attached_dp(struct drm_connector *connector) 114{ 115 return container_of(intel_attached_encoder(connector), 116 struct intel_dp, base); 117} 118 119/** 120 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP? 121 * @encoder: DRM encoder 122 * 123 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed 124 * by intel_display.c. 125 */ 126bool intel_encoder_is_pch_edp(struct drm_encoder *encoder) 127{ 128 struct intel_dp *intel_dp; 129 130 if (!encoder) 131 return false; 132 133 intel_dp = enc_to_intel_dp(encoder); 134 135 return is_pch_edp(intel_dp); 136} 137 138static void intel_dp_start_link_train(struct intel_dp *intel_dp); 139static void intel_dp_complete_link_train(struct intel_dp *intel_dp); 140static void intel_dp_link_down(struct intel_dp *intel_dp); 141 142void 143intel_edp_link_config(struct intel_encoder *intel_encoder, 144 int *lane_num, int *link_bw) 145{ 146 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 147 148 *lane_num = intel_dp->lane_count; 149 if (intel_dp->link_bw == DP_LINK_BW_1_62) 150 *link_bw = 162000; 151 else if (intel_dp->link_bw == DP_LINK_BW_2_7) 152 *link_bw = 270000; 153} 154 155static int 156intel_dp_max_lane_count(struct intel_dp *intel_dp) 157{ 158 int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; 159 switch (max_lane_count) { 160 case 1: case 2: case 4: 161 break; 162 default: 163 max_lane_count = 4; 164 } 165 return max_lane_count; 166} 167 168static int 169intel_dp_max_link_bw(struct intel_dp *intel_dp) 170{ 171 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; 172 173 switch (max_link_bw) { 174 case DP_LINK_BW_1_62: 175 case DP_LINK_BW_2_7: 176 break; 177 default: 178 max_link_bw = DP_LINK_BW_1_62; 179 break; 180 } 181 return max_link_bw; 182} 183 184static int 185intel_dp_link_clock(uint8_t link_bw) 186{ 187 if (link_bw == DP_LINK_BW_2_7) 188 return 270000; 189 else 190 return 162000; 191} 192 193/* 194 * The units on the numbers in the next two are... bizarre. Examples will 195 * make it clearer; this one parallels an example in the eDP spec. 196 * 197 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as: 198 * 199 * 270000 * 1 * 8 / 10 == 216000 200 * 201 * The actual data capacity of that configuration is 2.16Gbit/s, so the 202 * units are decakilobits. ->clock in a drm_display_mode is in kilohertz - 203 * or equivalently, kilopixels per second - so for 1680x1050R it'd be 204 * 119000. At 18bpp that's 2142000 kilobits per second. 205 * 206 * Thus the strange-looking division by 10 in intel_dp_link_required, to 207 * get the result in decakilobits instead of kilobits. 208 */ 209 210static int 211intel_dp_link_required(int pixel_clock, int bpp) 212{ 213 return (pixel_clock * bpp + 9) / 10; 214} 215 216static int 217intel_dp_max_data_rate(int max_link_clock, int max_lanes) 218{ 219 return (max_link_clock * max_lanes * 8) / 10; 220} 221 222static bool 223intel_dp_adjust_dithering(struct intel_dp *intel_dp, 224 struct drm_display_mode *mode, 225 struct drm_display_mode *adjusted_mode) 226{ 227 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp)); 228 int max_lanes = intel_dp_max_lane_count(intel_dp); 229 int max_rate, mode_rate; 230 231 mode_rate = intel_dp_link_required(mode->clock, 24); 232 max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes); 233 234 if (mode_rate > max_rate) { 235 mode_rate = intel_dp_link_required(mode->clock, 18); 236 if (mode_rate > max_rate) 237 return false; 238 239 if (adjusted_mode) 240 adjusted_mode->private_flags 241 |= INTEL_MODE_DP_FORCE_6BPC; 242 243 return true; 244 } 245 246 return true; 247} 248 249static int 250intel_dp_mode_valid(struct drm_connector *connector, 251 struct drm_display_mode *mode) 252{ 253 struct intel_dp *intel_dp = intel_attached_dp(connector); 254 255 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 256 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) 257 return MODE_PANEL; 258 259 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay) 260 return MODE_PANEL; 261 } 262 263 if (!intel_dp_adjust_dithering(intel_dp, mode, NULL)) 264 return MODE_CLOCK_HIGH; 265 266 if (mode->clock < 10000) 267 return MODE_CLOCK_LOW; 268 269 return MODE_OK; 270} 271 272static uint32_t 273pack_aux(uint8_t *src, int src_bytes) 274{ 275 int i; 276 uint32_t v = 0; 277 278 if (src_bytes > 4) 279 src_bytes = 4; 280 for (i = 0; i < src_bytes; i++) 281 v |= ((uint32_t) src[i]) << ((3-i) * 8); 282 return v; 283} 284 285static void 286unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes) 287{ 288 int i; 289 if (dst_bytes > 4) 290 dst_bytes = 4; 291 for (i = 0; i < dst_bytes; i++) 292 dst[i] = src >> ((3-i) * 8); 293} 294 295/* hrawclock is 1/4 the FSB frequency */ 296static int 297intel_hrawclk(struct drm_device *dev) 298{ 299 struct drm_i915_private *dev_priv = dev->dev_private; 300 uint32_t clkcfg; 301 302 clkcfg = I915_READ(CLKCFG); 303 switch (clkcfg & CLKCFG_FSB_MASK) { 304 case CLKCFG_FSB_400: 305 return 100; 306 case CLKCFG_FSB_533: 307 return 133; 308 case CLKCFG_FSB_667: 309 return 166; 310 case CLKCFG_FSB_800: 311 return 200; 312 case CLKCFG_FSB_1067: 313 return 266; 314 case CLKCFG_FSB_1333: 315 return 333; 316 /* these two are just a guess; one of them might be right */ 317 case CLKCFG_FSB_1600: 318 case CLKCFG_FSB_1600_ALT: 319 return 400; 320 default: 321 return 133; 322 } 323} 324 325static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) 326{ 327 struct drm_device *dev = intel_dp->base.base.dev; 328 struct drm_i915_private *dev_priv = dev->dev_private; 329 330 return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0; 331} 332 333static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) 334{ 335 struct drm_device *dev = intel_dp->base.base.dev; 336 struct drm_i915_private *dev_priv = dev->dev_private; 337 338 return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0; 339} 340 341static void 342intel_dp_check_edp(struct intel_dp *intel_dp) 343{ 344 struct drm_device *dev = intel_dp->base.base.dev; 345 struct drm_i915_private *dev_priv = dev->dev_private; 346 347 if (!is_edp(intel_dp)) 348 return; 349 if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) { 350 WARN(1, "eDP powered off while attempting aux channel communication.\n"); 351 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n", 352 I915_READ(PCH_PP_STATUS), 353 I915_READ(PCH_PP_CONTROL)); 354 } 355} 356 357static int 358intel_dp_aux_ch(struct intel_dp *intel_dp, 359 uint8_t *send, int send_bytes, 360 uint8_t *recv, int recv_size) 361{ 362 uint32_t output_reg = intel_dp->output_reg; 363 struct drm_device *dev = intel_dp->base.base.dev; 364 struct drm_i915_private *dev_priv = dev->dev_private; 365 uint32_t ch_ctl = output_reg + 0x10; 366 uint32_t ch_data = ch_ctl + 4; 367 int i; 368 int recv_bytes; 369 uint32_t status; 370 uint32_t aux_clock_divider; 371 int try, precharge; 372 373 intel_dp_check_edp(intel_dp); 374 /* The clock divider is based off the hrawclk, 375 * and would like to run at 2MHz. So, take the 376 * hrawclk value and divide by 2 and use that 377 * 378 * Note that PCH attached eDP panels should use a 125MHz input 379 * clock divider. 380 */ 381 if (is_cpu_edp(intel_dp)) { 382 if (IS_GEN6(dev) || IS_GEN7(dev)) 383 aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */ 384 else 385 aux_clock_divider = 225; /* eDP input clock at 450Mhz */ 386 } else if (HAS_PCH_SPLIT(dev)) 387 aux_clock_divider = 63; /* IRL input clock fixed at 125Mhz */ 388 else 389 aux_clock_divider = intel_hrawclk(dev) / 2; 390 391 if (IS_GEN6(dev)) 392 precharge = 3; 393 else 394 precharge = 5; 395 396 /* Try to wait for any previous AUX channel activity */ 397 for (try = 0; try < 3; try++) { 398 status = I915_READ(ch_ctl); 399 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 400 break; 401 msleep(1); 402 } 403 404 if (try == 3) { 405 WARN(1, "dp_aux_ch not started status 0x%08x\n", 406 I915_READ(ch_ctl)); 407 return -EBUSY; 408 } 409 410 /* Must try at least 3 times according to DP spec */ 411 for (try = 0; try < 5; try++) { 412 /* Load the send data into the aux channel data registers */ 413 for (i = 0; i < send_bytes; i += 4) 414 I915_WRITE(ch_data + i, 415 pack_aux(send + i, send_bytes - i)); 416 417 /* Send the command and wait for it to complete */ 418 I915_WRITE(ch_ctl, 419 DP_AUX_CH_CTL_SEND_BUSY | 420 DP_AUX_CH_CTL_TIME_OUT_400us | 421 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) | 422 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) | 423 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) | 424 DP_AUX_CH_CTL_DONE | 425 DP_AUX_CH_CTL_TIME_OUT_ERROR | 426 DP_AUX_CH_CTL_RECEIVE_ERROR); 427 for (;;) { 428 status = I915_READ(ch_ctl); 429 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 430 break; 431 udelay(100); 432 } 433 434 /* Clear done status and any errors */ 435 I915_WRITE(ch_ctl, 436 status | 437 DP_AUX_CH_CTL_DONE | 438 DP_AUX_CH_CTL_TIME_OUT_ERROR | 439 DP_AUX_CH_CTL_RECEIVE_ERROR); 440 441 if (status & (DP_AUX_CH_CTL_TIME_OUT_ERROR | 442 DP_AUX_CH_CTL_RECEIVE_ERROR)) 443 continue; 444 if (status & DP_AUX_CH_CTL_DONE) 445 break; 446 } 447 448 if ((status & DP_AUX_CH_CTL_DONE) == 0) { 449 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status); 450 return -EBUSY; 451 } 452 453 /* Check for timeout or receive error. 454 * Timeouts occur when the sink is not connected 455 */ 456 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) { 457 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status); 458 return -EIO; 459 } 460 461 /* Timeouts occur when the device isn't connected, so they're 462 * "normal" -- don't fill the kernel log with these */ 463 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) { 464 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status); 465 return -ETIMEDOUT; 466 } 467 468 /* Unload any bytes sent back from the other side */ 469 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >> 470 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT); 471 if (recv_bytes > recv_size) 472 recv_bytes = recv_size; 473 474 for (i = 0; i < recv_bytes; i += 4) 475 unpack_aux(I915_READ(ch_data + i), 476 recv + i, recv_bytes - i); 477 478 return recv_bytes; 479} 480 481/* Write data to the aux channel in native mode */ 482static int 483intel_dp_aux_native_write(struct intel_dp *intel_dp, 484 uint16_t address, uint8_t *send, int send_bytes) 485{ 486 int ret; 487 uint8_t msg[20]; 488 int msg_bytes; 489 uint8_t ack; 490 491 intel_dp_check_edp(intel_dp); 492 if (send_bytes > 16) 493 return -1; 494 msg[0] = AUX_NATIVE_WRITE << 4; 495 msg[1] = address >> 8; 496 msg[2] = address & 0xff; 497 msg[3] = send_bytes - 1; 498 memcpy(&msg[4], send, send_bytes); 499 msg_bytes = send_bytes + 4; 500 for (;;) { 501 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1); 502 if (ret < 0) 503 return ret; 504 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 505 break; 506 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 507 udelay(100); 508 else 509 return -EIO; 510 } 511 return send_bytes; 512} 513 514/* Write a single byte to the aux channel in native mode */ 515static int 516intel_dp_aux_native_write_1(struct intel_dp *intel_dp, 517 uint16_t address, uint8_t byte) 518{ 519 return intel_dp_aux_native_write(intel_dp, address, &byte, 1); 520} 521 522/* read bytes from a native aux channel */ 523static int 524intel_dp_aux_native_read(struct intel_dp *intel_dp, 525 uint16_t address, uint8_t *recv, int recv_bytes) 526{ 527 uint8_t msg[4]; 528 int msg_bytes; 529 uint8_t reply[20]; 530 int reply_bytes; 531 uint8_t ack; 532 int ret; 533 534 intel_dp_check_edp(intel_dp); 535 msg[0] = AUX_NATIVE_READ << 4; 536 msg[1] = address >> 8; 537 msg[2] = address & 0xff; 538 msg[3] = recv_bytes - 1; 539 540 msg_bytes = 4; 541 reply_bytes = recv_bytes + 1; 542 543 for (;;) { 544 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, 545 reply, reply_bytes); 546 if (ret == 0) 547 return -EPROTO; 548 if (ret < 0) 549 return ret; 550 ack = reply[0]; 551 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) { 552 memcpy(recv, reply + 1, ret - 1); 553 return ret - 1; 554 } 555 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 556 udelay(100); 557 else 558 return -EIO; 559 } 560} 561 562static int 563intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 564 uint8_t write_byte, uint8_t *read_byte) 565{ 566 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 567 struct intel_dp *intel_dp = container_of(adapter, 568 struct intel_dp, 569 adapter); 570 uint16_t address = algo_data->address; 571 uint8_t msg[5]; 572 uint8_t reply[2]; 573 unsigned retry; 574 int msg_bytes; 575 int reply_bytes; 576 int ret; 577 578 intel_dp_check_edp(intel_dp); 579 /* Set up the command byte */ 580 if (mode & MODE_I2C_READ) 581 msg[0] = AUX_I2C_READ << 4; 582 else 583 msg[0] = AUX_I2C_WRITE << 4; 584 585 if (!(mode & MODE_I2C_STOP)) 586 msg[0] |= AUX_I2C_MOT << 4; 587 588 msg[1] = address >> 8; 589 msg[2] = address; 590 591 switch (mode) { 592 case MODE_I2C_WRITE: 593 msg[3] = 0; 594 msg[4] = write_byte; 595 msg_bytes = 5; 596 reply_bytes = 1; 597 break; 598 case MODE_I2C_READ: 599 msg[3] = 0; 600 msg_bytes = 4; 601 reply_bytes = 2; 602 break; 603 default: 604 msg_bytes = 3; 605 reply_bytes = 1; 606 break; 607 } 608 609 for (retry = 0; retry < 5; retry++) { 610 ret = intel_dp_aux_ch(intel_dp, 611 msg, msg_bytes, 612 reply, reply_bytes); 613 if (ret < 0) { 614 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 615 return ret; 616 } 617 618 switch (reply[0] & AUX_NATIVE_REPLY_MASK) { 619 case AUX_NATIVE_REPLY_ACK: 620 /* I2C-over-AUX Reply field is only valid 621 * when paired with AUX ACK. 622 */ 623 break; 624 case AUX_NATIVE_REPLY_NACK: 625 DRM_DEBUG_KMS("aux_ch native nack\n"); 626 return -EREMOTEIO; 627 case AUX_NATIVE_REPLY_DEFER: 628 udelay(100); 629 continue; 630 default: 631 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", 632 reply[0]); 633 return -EREMOTEIO; 634 } 635 636 switch (reply[0] & AUX_I2C_REPLY_MASK) { 637 case AUX_I2C_REPLY_ACK: 638 if (mode == MODE_I2C_READ) { 639 *read_byte = reply[1]; 640 } 641 return reply_bytes - 1; 642 case AUX_I2C_REPLY_NACK: 643 DRM_DEBUG_KMS("aux_i2c nack\n"); 644 return -EREMOTEIO; 645 case AUX_I2C_REPLY_DEFER: 646 DRM_DEBUG_KMS("aux_i2c defer\n"); 647 udelay(100); 648 break; 649 default: 650 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]); 651 return -EREMOTEIO; 652 } 653 } 654 655 DRM_ERROR("too many retries, giving up\n"); 656 return -EREMOTEIO; 657} 658 659static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp); 660static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync); 661 662static int 663intel_dp_i2c_init(struct intel_dp *intel_dp, 664 struct intel_connector *intel_connector, const char *name) 665{ 666 int ret; 667 668 DRM_DEBUG_KMS("i2c_init %s\n", name); 669 intel_dp->algo.running = false; 670 intel_dp->algo.address = 0; 671 intel_dp->algo.aux_ch = intel_dp_i2c_aux_ch; 672 673 memset(&intel_dp->adapter, '\0', sizeof(intel_dp->adapter)); 674 intel_dp->adapter.owner = THIS_MODULE; 675 intel_dp->adapter.class = I2C_CLASS_DDC; 676 strncpy(intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1); 677 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0'; 678 intel_dp->adapter.algo_data = &intel_dp->algo; 679 intel_dp->adapter.dev.parent = &intel_connector->base.kdev; 680 681 ironlake_edp_panel_vdd_on(intel_dp); 682 ret = i2c_dp_aux_add_bus(&intel_dp->adapter); 683 ironlake_edp_panel_vdd_off(intel_dp, false); 684 return ret; 685} 686 687static bool 688intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode, 689 struct drm_display_mode *adjusted_mode) 690{ 691 struct drm_device *dev = encoder->dev; 692 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 693 int lane_count, clock; 694 int max_lane_count = intel_dp_max_lane_count(intel_dp); 695 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0; 696 int bpp; 697 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 698 699 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 700 intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode); 701 intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN, 702 mode, adjusted_mode); 703 /* 704 * the mode->clock is used to calculate the Data&Link M/N 705 * of the pipe. For the eDP the fixed clock should be used. 706 */ 707 mode->clock = intel_dp->panel_fixed_mode->clock; 708 } 709 710 if (!intel_dp_adjust_dithering(intel_dp, mode, adjusted_mode)) 711 return false; 712 713 bpp = adjusted_mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 24; 714 715 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) { 716 for (clock = 0; clock <= max_clock; clock++) { 717 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count); 718 719 if (intel_dp_link_required(mode->clock, bpp) 720 <= link_avail) { 721 intel_dp->link_bw = bws[clock]; 722 intel_dp->lane_count = lane_count; 723 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw); 724 DRM_DEBUG_KMS("Display port link bw %02x lane " 725 "count %d clock %d\n", 726 intel_dp->link_bw, intel_dp->lane_count, 727 adjusted_mode->clock); 728 return true; 729 } 730 } 731 } 732 733 return false; 734} 735 736struct intel_dp_m_n { 737 uint32_t tu; 738 uint32_t gmch_m; 739 uint32_t gmch_n; 740 uint32_t link_m; 741 uint32_t link_n; 742}; 743 744static void 745intel_reduce_ratio(uint32_t *num, uint32_t *den) 746{ 747 while (*num > 0xffffff || *den > 0xffffff) { 748 *num >>= 1; 749 *den >>= 1; 750 } 751} 752 753static void 754intel_dp_compute_m_n(int bpp, 755 int nlanes, 756 int pixel_clock, 757 int link_clock, 758 struct intel_dp_m_n *m_n) 759{ 760 m_n->tu = 64; 761 m_n->gmch_m = (pixel_clock * bpp) >> 3; 762 m_n->gmch_n = link_clock * nlanes; 763 intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n); 764 m_n->link_m = pixel_clock; 765 m_n->link_n = link_clock; 766 intel_reduce_ratio(&m_n->link_m, &m_n->link_n); 767} 768 769void 770intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, 771 struct drm_display_mode *adjusted_mode) 772{ 773 struct drm_device *dev = crtc->dev; 774 struct drm_mode_config *mode_config = &dev->mode_config; 775 struct drm_encoder *encoder; 776 struct drm_i915_private *dev_priv = dev->dev_private; 777 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 778 int lane_count = 4; 779 struct intel_dp_m_n m_n; 780 int pipe = intel_crtc->pipe; 781 782 /* 783 * Find the lane count in the intel_encoder private 784 */ 785 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 786 struct intel_dp *intel_dp; 787 788 if (encoder->crtc != crtc) 789 continue; 790 791 intel_dp = enc_to_intel_dp(encoder); 792 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || 793 intel_dp->base.type == INTEL_OUTPUT_EDP) 794 { 795 lane_count = intel_dp->lane_count; 796 break; 797 } 798 } 799 800 /* 801 * Compute the GMCH and Link ratios. The '3' here is 802 * the number of bytes_per_pixel post-LUT, which we always 803 * set up for 8-bits of R/G/B, or 3 bytes total. 804 */ 805 intel_dp_compute_m_n(intel_crtc->bpp, lane_count, 806 mode->clock, adjusted_mode->clock, &m_n); 807 808 if (HAS_PCH_SPLIT(dev)) { 809 I915_WRITE(TRANSDATA_M1(pipe), 810 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 811 m_n.gmch_m); 812 I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n); 813 I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m); 814 I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n); 815 } else { 816 I915_WRITE(PIPE_GMCH_DATA_M(pipe), 817 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 818 m_n.gmch_m); 819 I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); 820 I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); 821 I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); 822 } 823} 824 825static void ironlake_edp_pll_on(struct drm_encoder *encoder); 826static void ironlake_edp_pll_off(struct drm_encoder *encoder); 827 828static void 829intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 830 struct drm_display_mode *adjusted_mode) 831{ 832 struct drm_device *dev = encoder->dev; 833 struct drm_i915_private *dev_priv = dev->dev_private; 834 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 835 struct drm_crtc *crtc = intel_dp->base.base.crtc; 836 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 837 838 /* Turn on the eDP PLL if needed */ 839 if (is_edp(intel_dp)) { 840 if (!is_pch_edp(intel_dp)) 841 ironlake_edp_pll_on(encoder); 842 else 843 ironlake_edp_pll_off(encoder); 844 } 845 846 /* 847 * There are four kinds of DP registers: 848 * 849 * IBX PCH 850 * SNB CPU 851 * IVB CPU 852 * CPT PCH 853 * 854 * IBX PCH and CPU are the same for almost everything, 855 * except that the CPU DP PLL is configured in this 856 * register 857 * 858 * CPT PCH is quite different, having many bits moved 859 * to the TRANS_DP_CTL register instead. That 860 * configuration happens (oddly) in ironlake_pch_enable 861 */ 862 863 /* Preserve the BIOS-computed detected bit. This is 864 * supposed to be read-only. 865 */ 866 intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED; 867 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 868 869 /* Handle DP bits in common between all three register formats */ 870 871 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 872 873 switch (intel_dp->lane_count) { 874 case 1: 875 intel_dp->DP |= DP_PORT_WIDTH_1; 876 break; 877 case 2: 878 intel_dp->DP |= DP_PORT_WIDTH_2; 879 break; 880 case 4: 881 intel_dp->DP |= DP_PORT_WIDTH_4; 882 break; 883 } 884 if (intel_dp->has_audio) { 885 DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n", 886 pipe_name(intel_crtc->pipe)); 887 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 888 intel_write_eld(encoder, adjusted_mode); 889 } 890 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 891 intel_dp->link_configuration[0] = intel_dp->link_bw; 892 intel_dp->link_configuration[1] = intel_dp->lane_count; 893 intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B; 894 /* 895 * Check for DPCD version > 1.1 and enhanced framing support 896 */ 897 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 898 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { 899 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 900 } 901 902 /* Split out the IBX/CPU vs CPT settings */ 903 904 if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) { 905 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 906 intel_dp->DP |= DP_SYNC_HS_HIGH; 907 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 908 intel_dp->DP |= DP_SYNC_VS_HIGH; 909 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 910 911 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN) 912 intel_dp->DP |= DP_ENHANCED_FRAMING; 913 914 intel_dp->DP |= intel_crtc->pipe << 29; 915 916 /* don't miss out required setting for eDP */ 917 intel_dp->DP |= DP_PLL_ENABLE; 918 if (adjusted_mode->clock < 200000) 919 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 920 else 921 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 922 } else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) { 923 intel_dp->DP |= intel_dp->color_range; 924 925 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 926 intel_dp->DP |= DP_SYNC_HS_HIGH; 927 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 928 intel_dp->DP |= DP_SYNC_VS_HIGH; 929 intel_dp->DP |= DP_LINK_TRAIN_OFF; 930 931 if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN) 932 intel_dp->DP |= DP_ENHANCED_FRAMING; 933 934 if (intel_crtc->pipe == 1) 935 intel_dp->DP |= DP_PIPEB_SELECT; 936 937 if (is_cpu_edp(intel_dp)) { 938 /* don't miss out required setting for eDP */ 939 intel_dp->DP |= DP_PLL_ENABLE; 940 if (adjusted_mode->clock < 200000) 941 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 942 else 943 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 944 } 945 } else { 946 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 947 } 948} 949 950#define IDLE_ON_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK) 951#define IDLE_ON_VALUE (PP_ON | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_ON_IDLE) 952 953#define IDLE_OFF_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | 0 | PP_SEQUENCE_STATE_MASK) 954#define IDLE_OFF_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE) 955 956#define IDLE_CYCLE_MASK (PP_ON | 0 | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK) 957#define IDLE_CYCLE_VALUE (0 | 0 | PP_SEQUENCE_NONE | 0 | PP_SEQUENCE_STATE_OFF_IDLE) 958 959static void ironlake_wait_panel_status(struct intel_dp *intel_dp, 960 u32 mask, 961 u32 value) 962{ 963 struct drm_device *dev = intel_dp->base.base.dev; 964 struct drm_i915_private *dev_priv = dev->dev_private; 965 966 DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n", 967 mask, value, 968 I915_READ(PCH_PP_STATUS), 969 I915_READ(PCH_PP_CONTROL)); 970 971 if (_wait_for((I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10)) { 972 DRM_ERROR("Panel status timeout: status %08x control %08x\n", 973 I915_READ(PCH_PP_STATUS), 974 I915_READ(PCH_PP_CONTROL)); 975 } 976} 977 978static void ironlake_wait_panel_on(struct intel_dp *intel_dp) 979{ 980 DRM_DEBUG_KMS("Wait for panel power on\n"); 981 ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE); 982} 983 984static void ironlake_wait_panel_off(struct intel_dp *intel_dp) 985{ 986 DRM_DEBUG_KMS("Wait for panel power off time\n"); 987 ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE); 988} 989 990static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp) 991{ 992 DRM_DEBUG_KMS("Wait for panel power cycle\n"); 993 ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE); 994} 995 996 997/* Read the current pp_control value, unlocking the register if it 998 * is locked 999 */ 1000 1001static u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv) 1002{ 1003 u32 control = I915_READ(PCH_PP_CONTROL); 1004 1005 control &= ~PANEL_UNLOCK_MASK; 1006 control |= PANEL_UNLOCK_REGS; 1007 return control; 1008} 1009 1010static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) 1011{ 1012 struct drm_device *dev = intel_dp->base.base.dev; 1013 struct drm_i915_private *dev_priv = dev->dev_private; 1014 u32 pp; 1015 1016 if (!is_edp(intel_dp)) 1017 return; 1018 DRM_DEBUG_KMS("Turn eDP VDD on\n"); 1019 1020 WARN(intel_dp->want_panel_vdd, 1021 "eDP VDD already requested on\n"); 1022 1023 intel_dp->want_panel_vdd = true; 1024 1025 if (ironlake_edp_have_panel_vdd(intel_dp)) { 1026 DRM_DEBUG_KMS("eDP VDD already on\n"); 1027 return; 1028 } 1029 1030 if (!ironlake_edp_have_panel_power(intel_dp)) 1031 ironlake_wait_panel_power_cycle(intel_dp); 1032 1033 pp = ironlake_get_pp_control(dev_priv); 1034 pp |= EDP_FORCE_VDD; 1035 I915_WRITE(PCH_PP_CONTROL, pp); 1036 POSTING_READ(PCH_PP_CONTROL); 1037 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1038 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1039 1040 /* 1041 * If the panel wasn't on, delay before accessing aux channel 1042 */ 1043 if (!ironlake_edp_have_panel_power(intel_dp)) { 1044 DRM_DEBUG_KMS("eDP was not running\n"); 1045 msleep(intel_dp->panel_power_up_delay); 1046 } 1047} 1048 1049static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp) 1050{ 1051 struct drm_device *dev = intel_dp->base.base.dev; 1052 struct drm_i915_private *dev_priv = dev->dev_private; 1053 u32 pp; 1054 1055 if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) { 1056 pp = ironlake_get_pp_control(dev_priv); 1057 pp &= ~EDP_FORCE_VDD; 1058 I915_WRITE(PCH_PP_CONTROL, pp); 1059 POSTING_READ(PCH_PP_CONTROL); 1060 1061 /* Make sure sequencer is idle before allowing subsequent activity */ 1062 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 1063 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 1064 1065 msleep(intel_dp->panel_power_down_delay); 1066 } 1067} 1068 1069static void ironlake_panel_vdd_work(struct work_struct *__work) 1070{ 1071 struct intel_dp *intel_dp = container_of(to_delayed_work(__work), 1072 struct intel_dp, panel_vdd_work); 1073 struct drm_device *dev = intel_dp->base.base.dev; 1074 1075 mutex_lock(&dev->mode_config.mutex); 1076 ironlake_panel_vdd_off_sync(intel_dp); 1077 mutex_unlock(&dev->mode_config.mutex); 1078} 1079 1080static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) 1081{ 1082 if (!is_edp(intel_dp)) 1083 return; 1084 1085 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd); 1086 WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on"); 1087 1088 intel_dp->want_panel_vdd = false; 1089 1090 if (sync) { 1091 ironlake_panel_vdd_off_sync(intel_dp); 1092 } else { 1093 /* 1094 * Queue the timer to fire a long 1095 * time from now (relative to the power down delay) 1096 * to keep the panel power up across a sequence of operations 1097 */ 1098 schedule_delayed_work(&intel_dp->panel_vdd_work, 1099 msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5)); 1100 } 1101} 1102 1103static void ironlake_edp_panel_on(struct intel_dp *intel_dp) 1104{ 1105 struct drm_device *dev = intel_dp->base.base.dev; 1106 struct drm_i915_private *dev_priv = dev->dev_private; 1107 u32 pp; 1108 1109 if (!is_edp(intel_dp)) 1110 return; 1111 1112 DRM_DEBUG_KMS("Turn eDP power on\n"); 1113 1114 if (ironlake_edp_have_panel_power(intel_dp)) { 1115 DRM_DEBUG_KMS("eDP power already on\n"); 1116 return; 1117 } 1118 1119 ironlake_wait_panel_power_cycle(intel_dp); 1120 1121 pp = ironlake_get_pp_control(dev_priv); 1122 if (IS_GEN5(dev)) { 1123 /* ILK workaround: disable reset around power sequence */ 1124 pp &= ~PANEL_POWER_RESET; 1125 I915_WRITE(PCH_PP_CONTROL, pp); 1126 POSTING_READ(PCH_PP_CONTROL); 1127 } 1128 1129 pp |= POWER_TARGET_ON; 1130 if (!IS_GEN5(dev)) 1131 pp |= PANEL_POWER_RESET; 1132 1133 I915_WRITE(PCH_PP_CONTROL, pp); 1134 POSTING_READ(PCH_PP_CONTROL); 1135 1136 ironlake_wait_panel_on(intel_dp); 1137 1138 if (IS_GEN5(dev)) { 1139 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1140 I915_WRITE(PCH_PP_CONTROL, pp); 1141 POSTING_READ(PCH_PP_CONTROL); 1142 } 1143} 1144 1145static void ironlake_edp_panel_off(struct intel_dp *intel_dp) 1146{ 1147 struct drm_device *dev = intel_dp->base.base.dev; 1148 struct drm_i915_private *dev_priv = dev->dev_private; 1149 u32 pp; 1150 1151 if (!is_edp(intel_dp)) 1152 return; 1153 1154 DRM_DEBUG_KMS("Turn eDP power off\n"); 1155 1156 WARN(!intel_dp->want_panel_vdd, "Need VDD to turn off panel\n"); 1157 1158 pp = ironlake_get_pp_control(dev_priv); 1159 pp &= ~(POWER_TARGET_ON | PANEL_POWER_RESET | EDP_BLC_ENABLE); 1160 I915_WRITE(PCH_PP_CONTROL, pp); 1161 POSTING_READ(PCH_PP_CONTROL); 1162 1163 ironlake_wait_panel_off(intel_dp); 1164} 1165 1166static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) 1167{ 1168 struct drm_device *dev = intel_dp->base.base.dev; 1169 struct drm_i915_private *dev_priv = dev->dev_private; 1170 u32 pp; 1171 1172 if (!is_edp(intel_dp)) 1173 return; 1174 1175 DRM_DEBUG_KMS("\n"); 1176 /* 1177 * If we enable the backlight right away following a panel power 1178 * on, we may see slight flicker as the panel syncs with the eDP 1179 * link. So delay a bit to make sure the image is solid before 1180 * allowing it to appear. 1181 */ 1182 msleep(intel_dp->backlight_on_delay); 1183 pp = ironlake_get_pp_control(dev_priv); 1184 pp |= EDP_BLC_ENABLE; 1185 I915_WRITE(PCH_PP_CONTROL, pp); 1186 POSTING_READ(PCH_PP_CONTROL); 1187} 1188 1189static void ironlake_edp_backlight_off(struct intel_dp *intel_dp) 1190{ 1191 struct drm_device *dev = intel_dp->base.base.dev; 1192 struct drm_i915_private *dev_priv = dev->dev_private; 1193 u32 pp; 1194 1195 if (!is_edp(intel_dp)) 1196 return; 1197 1198 DRM_DEBUG_KMS("\n"); 1199 pp = ironlake_get_pp_control(dev_priv); 1200 pp &= ~EDP_BLC_ENABLE; 1201 I915_WRITE(PCH_PP_CONTROL, pp); 1202 POSTING_READ(PCH_PP_CONTROL); 1203 msleep(intel_dp->backlight_off_delay); 1204} 1205 1206static void ironlake_edp_pll_on(struct drm_encoder *encoder) 1207{ 1208 struct drm_device *dev = encoder->dev; 1209 struct drm_i915_private *dev_priv = dev->dev_private; 1210 u32 dpa_ctl; 1211 1212 DRM_DEBUG_KMS("\n"); 1213 dpa_ctl = I915_READ(DP_A); 1214 dpa_ctl |= DP_PLL_ENABLE; 1215 I915_WRITE(DP_A, dpa_ctl); 1216 POSTING_READ(DP_A); 1217 udelay(200); 1218} 1219 1220static void ironlake_edp_pll_off(struct drm_encoder *encoder) 1221{ 1222 struct drm_device *dev = encoder->dev; 1223 struct drm_i915_private *dev_priv = dev->dev_private; 1224 u32 dpa_ctl; 1225 1226 dpa_ctl = I915_READ(DP_A); 1227 dpa_ctl &= ~DP_PLL_ENABLE; 1228 I915_WRITE(DP_A, dpa_ctl); 1229 POSTING_READ(DP_A); 1230 udelay(200); 1231} 1232 1233/* If the sink supports it, try to set the power state appropriately */ 1234static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode) 1235{ 1236 int ret, i; 1237 1238 /* Should have a valid DPCD by this point */ 1239 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 1240 return; 1241 1242 if (mode != DRM_MODE_DPMS_ON) { 1243 ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER, 1244 DP_SET_POWER_D3); 1245 if (ret != 1) 1246 DRM_DEBUG_DRIVER("failed to write sink power state\n"); 1247 } else { 1248 /* 1249 * When turning on, we need to retry for 1ms to give the sink 1250 * time to wake up. 1251 */ 1252 for (i = 0; i < 3; i++) { 1253 ret = intel_dp_aux_native_write_1(intel_dp, 1254 DP_SET_POWER, 1255 DP_SET_POWER_D0); 1256 if (ret == 1) 1257 break; 1258 msleep(1); 1259 } 1260 } 1261} 1262 1263static void intel_dp_prepare(struct drm_encoder *encoder) 1264{ 1265 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1266 1267 1268 /* Make sure the panel is off before trying to change the mode. But also 1269 * ensure that we have vdd while we switch off the panel. */ 1270 ironlake_edp_panel_vdd_on(intel_dp); 1271 ironlake_edp_backlight_off(intel_dp); 1272 ironlake_edp_panel_off(intel_dp); 1273 1274 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1275 intel_dp_link_down(intel_dp); 1276 ironlake_edp_panel_vdd_off(intel_dp, false); 1277} 1278 1279static void intel_dp_commit(struct drm_encoder *encoder) 1280{ 1281 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1282 struct drm_device *dev = encoder->dev; 1283 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1284 1285 ironlake_edp_panel_vdd_on(intel_dp); 1286 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1287 intel_dp_start_link_train(intel_dp); 1288 ironlake_edp_panel_on(intel_dp); 1289 ironlake_edp_panel_vdd_off(intel_dp, true); 1290 intel_dp_complete_link_train(intel_dp); 1291 ironlake_edp_backlight_on(intel_dp); 1292 1293 intel_dp->dpms_mode = DRM_MODE_DPMS_ON; 1294 1295 if (HAS_PCH_CPT(dev)) 1296 intel_cpt_verify_modeset(dev, intel_crtc->pipe); 1297} 1298 1299static void 1300intel_dp_dpms(struct drm_encoder *encoder, int mode) 1301{ 1302 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1303 struct drm_device *dev = encoder->dev; 1304 struct drm_i915_private *dev_priv = dev->dev_private; 1305 uint32_t dp_reg = I915_READ(intel_dp->output_reg); 1306 1307 if (mode != DRM_MODE_DPMS_ON) { 1308 /* Switching the panel off requires vdd. */ 1309 ironlake_edp_panel_vdd_on(intel_dp); 1310 ironlake_edp_backlight_off(intel_dp); 1311 ironlake_edp_panel_off(intel_dp); 1312 1313 intel_dp_sink_dpms(intel_dp, mode); 1314 intel_dp_link_down(intel_dp); 1315 ironlake_edp_panel_vdd_off(intel_dp, false); 1316 1317 if (is_cpu_edp(intel_dp)) 1318 ironlake_edp_pll_off(encoder); 1319 } else { 1320 if (is_cpu_edp(intel_dp)) 1321 ironlake_edp_pll_on(encoder); 1322 1323 ironlake_edp_panel_vdd_on(intel_dp); 1324 intel_dp_sink_dpms(intel_dp, mode); 1325 if (!(dp_reg & DP_PORT_EN)) { 1326 intel_dp_start_link_train(intel_dp); 1327 ironlake_edp_panel_on(intel_dp); 1328 ironlake_edp_panel_vdd_off(intel_dp, true); 1329 intel_dp_complete_link_train(intel_dp); 1330 } else 1331 ironlake_edp_panel_vdd_off(intel_dp, false); 1332 ironlake_edp_backlight_on(intel_dp); 1333 } 1334 intel_dp->dpms_mode = mode; 1335} 1336 1337/* 1338 * Native read with retry for link status and receiver capability reads for 1339 * cases where the sink may still be asleep. 1340 */ 1341static bool 1342intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address, 1343 uint8_t *recv, int recv_bytes) 1344{ 1345 int ret, i; 1346 1347 /* 1348 * Sinks are *supposed* to come up within 1ms from an off state, 1349 * but we're also supposed to retry 3 times per the spec. 1350 */ 1351 for (i = 0; i < 3; i++) { 1352 ret = intel_dp_aux_native_read(intel_dp, address, recv, 1353 recv_bytes); 1354 if (ret == recv_bytes) 1355 return true; 1356 msleep(1); 1357 } 1358 1359 return false; 1360} 1361 1362/* 1363 * Fetch AUX CH registers 0x202 - 0x207 which contain 1364 * link status information 1365 */ 1366static bool 1367intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1368{ 1369 return intel_dp_aux_native_read_retry(intel_dp, 1370 DP_LANE0_1_STATUS, 1371 link_status, 1372 DP_LINK_STATUS_SIZE); 1373} 1374 1375static uint8_t 1376intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1377 int r) 1378{ 1379 return link_status[r - DP_LANE0_1_STATUS]; 1380} 1381 1382static uint8_t 1383intel_get_adjust_request_voltage(uint8_t adjust_request[2], 1384 int lane) 1385{ 1386 int s = ((lane & 1) ? 1387 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 1388 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 1389 uint8_t l = adjust_request[lane>>1]; 1390 1391 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 1392} 1393 1394static uint8_t 1395intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2], 1396 int lane) 1397{ 1398 int s = ((lane & 1) ? 1399 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 1400 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 1401 uint8_t l = adjust_request[lane>>1]; 1402 1403 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1404} 1405 1406 1407#if 0 1408static char *voltage_names[] = { 1409 "0.4V", "0.6V", "0.8V", "1.2V" 1410}; 1411static char *pre_emph_names[] = { 1412 "0dB", "3.5dB", "6dB", "9.5dB" 1413}; 1414static char *link_train_names[] = { 1415 "pattern 1", "pattern 2", "idle", "off" 1416}; 1417#endif 1418 1419/* 1420 * These are source-specific values; current Intel hardware supports 1421 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB 1422 */ 1423 1424static uint8_t 1425intel_dp_voltage_max(struct intel_dp *intel_dp) 1426{ 1427 struct drm_device *dev = intel_dp->base.base.dev; 1428 1429 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) 1430 return DP_TRAIN_VOLTAGE_SWING_800; 1431 else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) 1432 return DP_TRAIN_VOLTAGE_SWING_1200; 1433 else 1434 return DP_TRAIN_VOLTAGE_SWING_800; 1435} 1436 1437static uint8_t 1438intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing) 1439{ 1440 struct drm_device *dev = intel_dp->base.base.dev; 1441 1442 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1443 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1444 case DP_TRAIN_VOLTAGE_SWING_400: 1445 return DP_TRAIN_PRE_EMPHASIS_6; 1446 case DP_TRAIN_VOLTAGE_SWING_600: 1447 case DP_TRAIN_VOLTAGE_SWING_800: 1448 return DP_TRAIN_PRE_EMPHASIS_3_5; 1449 default: 1450 return DP_TRAIN_PRE_EMPHASIS_0; 1451 } 1452 } else { 1453 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1454 case DP_TRAIN_VOLTAGE_SWING_400: 1455 return DP_TRAIN_PRE_EMPHASIS_6; 1456 case DP_TRAIN_VOLTAGE_SWING_600: 1457 return DP_TRAIN_PRE_EMPHASIS_6; 1458 case DP_TRAIN_VOLTAGE_SWING_800: 1459 return DP_TRAIN_PRE_EMPHASIS_3_5; 1460 case DP_TRAIN_VOLTAGE_SWING_1200: 1461 default: 1462 return DP_TRAIN_PRE_EMPHASIS_0; 1463 } 1464 } 1465} 1466 1467static void 1468intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1469{ 1470 uint8_t v = 0; 1471 uint8_t p = 0; 1472 int lane; 1473 uint8_t *adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS); 1474 uint8_t voltage_max; 1475 uint8_t preemph_max; 1476 1477 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1478 uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane); 1479 uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane); 1480 1481 if (this_v > v) 1482 v = this_v; 1483 if (this_p > p) 1484 p = this_p; 1485 } 1486 1487 voltage_max = intel_dp_voltage_max(intel_dp); 1488 if (v >= voltage_max) 1489 v = voltage_max | DP_TRAIN_MAX_SWING_REACHED; 1490 1491 preemph_max = intel_dp_pre_emphasis_max(intel_dp, v); 1492 if (p >= preemph_max) 1493 p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1494 1495 for (lane = 0; lane < 4; lane++) 1496 intel_dp->train_set[lane] = v | p; 1497} 1498 1499static uint32_t 1500intel_dp_signal_levels(uint8_t train_set) 1501{ 1502 uint32_t signal_levels = 0; 1503 1504 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 1505 case DP_TRAIN_VOLTAGE_SWING_400: 1506 default: 1507 signal_levels |= DP_VOLTAGE_0_4; 1508 break; 1509 case DP_TRAIN_VOLTAGE_SWING_600: 1510 signal_levels |= DP_VOLTAGE_0_6; 1511 break; 1512 case DP_TRAIN_VOLTAGE_SWING_800: 1513 signal_levels |= DP_VOLTAGE_0_8; 1514 break; 1515 case DP_TRAIN_VOLTAGE_SWING_1200: 1516 signal_levels |= DP_VOLTAGE_1_2; 1517 break; 1518 } 1519 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1520 case DP_TRAIN_PRE_EMPHASIS_0: 1521 default: 1522 signal_levels |= DP_PRE_EMPHASIS_0; 1523 break; 1524 case DP_TRAIN_PRE_EMPHASIS_3_5: 1525 signal_levels |= DP_PRE_EMPHASIS_3_5; 1526 break; 1527 case DP_TRAIN_PRE_EMPHASIS_6: 1528 signal_levels |= DP_PRE_EMPHASIS_6; 1529 break; 1530 case DP_TRAIN_PRE_EMPHASIS_9_5: 1531 signal_levels |= DP_PRE_EMPHASIS_9_5; 1532 break; 1533 } 1534 return signal_levels; 1535} 1536 1537/* Gen6's DP voltage swing and pre-emphasis control */ 1538static uint32_t 1539intel_gen6_edp_signal_levels(uint8_t train_set) 1540{ 1541 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1542 DP_TRAIN_PRE_EMPHASIS_MASK); 1543 switch (signal_levels) { 1544 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1545 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1546 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1547 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1548 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1549 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1550 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6: 1551 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1552 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1553 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1554 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1555 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1556 case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0: 1557 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1558 default: 1559 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1560 "0x%x\n", signal_levels); 1561 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1562 } 1563} 1564 1565/* Gen7's DP voltage swing and pre-emphasis control */ 1566static uint32_t 1567intel_gen7_edp_signal_levels(uint8_t train_set) 1568{ 1569 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1570 DP_TRAIN_PRE_EMPHASIS_MASK); 1571 switch (signal_levels) { 1572 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1573 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1574 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1575 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1576 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1577 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1578 1579 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1580 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1581 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1582 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1583 1584 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1585 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1586 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1587 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1588 1589 default: 1590 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1591 "0x%x\n", signal_levels); 1592 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1593 } 1594} 1595 1596static uint8_t 1597intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1598 int lane) 1599{ 1600 int s = (lane & 1) * 4; 1601 uint8_t l = link_status[lane>>1]; 1602 1603 return (l >> s) & 0xf; 1604} 1605 1606/* Check for clock recovery is done on all channels */ 1607static bool 1608intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count) 1609{ 1610 int lane; 1611 uint8_t lane_status; 1612 1613 for (lane = 0; lane < lane_count; lane++) { 1614 lane_status = intel_get_lane_status(link_status, lane); 1615 if ((lane_status & DP_LANE_CR_DONE) == 0) 1616 return false; 1617 } 1618 return true; 1619} 1620 1621/* Check to see if channel eq is done on all channels */ 1622#define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\ 1623 DP_LANE_CHANNEL_EQ_DONE|\ 1624 DP_LANE_SYMBOL_LOCKED) 1625static bool 1626intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE]) 1627{ 1628 uint8_t lane_align; 1629 uint8_t lane_status; 1630 int lane; 1631 1632 lane_align = intel_dp_link_status(link_status, 1633 DP_LANE_ALIGN_STATUS_UPDATED); 1634 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 1635 return false; 1636 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1637 lane_status = intel_get_lane_status(link_status, lane); 1638 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) 1639 return false; 1640 } 1641 return true; 1642} 1643 1644static bool 1645intel_dp_set_link_train(struct intel_dp *intel_dp, 1646 uint32_t dp_reg_value, 1647 uint8_t dp_train_pat) 1648{ 1649 struct drm_device *dev = intel_dp->base.base.dev; 1650 struct drm_i915_private *dev_priv = dev->dev_private; 1651 int ret; 1652 1653 I915_WRITE(intel_dp->output_reg, dp_reg_value); 1654 POSTING_READ(intel_dp->output_reg); 1655 1656 intel_dp_aux_native_write_1(intel_dp, 1657 DP_TRAINING_PATTERN_SET, 1658 dp_train_pat); 1659 1660 ret = intel_dp_aux_native_write(intel_dp, 1661 DP_TRAINING_LANE0_SET, 1662 intel_dp->train_set, 1663 intel_dp->lane_count); 1664 if (ret != intel_dp->lane_count) 1665 return false; 1666 1667 return true; 1668} 1669 1670/* Enable corresponding port and start training pattern 1 */ 1671static void 1672intel_dp_start_link_train(struct intel_dp *intel_dp) 1673{ 1674 struct drm_device *dev = intel_dp->base.base.dev; 1675 struct drm_i915_private *dev_priv = dev->dev_private; 1676 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1677 int i; 1678 uint8_t voltage; 1679 bool clock_recovery = false; 1680 int voltage_tries, loop_tries; 1681 u32 reg; 1682 uint32_t DP = intel_dp->DP; 1683 1684 /* 1685 * On CPT we have to enable the port in training pattern 1, which 1686 * will happen below in intel_dp_set_link_train. Otherwise, enable 1687 * the port and wait for it to become active. 1688 */ 1689 if (!HAS_PCH_CPT(dev)) { 1690 I915_WRITE(intel_dp->output_reg, intel_dp->DP); 1691 POSTING_READ(intel_dp->output_reg); 1692 intel_wait_for_vblank(dev, intel_crtc->pipe); 1693 } 1694 1695 /* Write the link configuration data */ 1696 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET, 1697 intel_dp->link_configuration, 1698 DP_LINK_CONFIGURATION_SIZE); 1699 1700 DP |= DP_PORT_EN; 1701 1702 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1703 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1704 else 1705 DP &= ~DP_LINK_TRAIN_MASK; 1706 memset(intel_dp->train_set, 0, 4); 1707 voltage = 0xff; 1708 voltage_tries = 0; 1709 loop_tries = 0; 1710 clock_recovery = false; 1711 for (;;) { 1712 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1713 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1714 uint32_t signal_levels; 1715 1716 1717 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1718 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); 1719 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; 1720 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { 1721 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1722 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1723 } else { 1724 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); 1725 DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels); 1726 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1727 } 1728 1729 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1730 reg = DP | DP_LINK_TRAIN_PAT_1_CPT; 1731 else 1732 reg = DP | DP_LINK_TRAIN_PAT_1; 1733 1734 if (!intel_dp_set_link_train(intel_dp, reg, 1735 DP_TRAINING_PATTERN_1 | 1736 DP_LINK_SCRAMBLING_DISABLE)) 1737 break; 1738 /* Set training pattern 1 */ 1739 1740 udelay(100); 1741 if (!intel_dp_get_link_status(intel_dp, link_status)) { 1742 DRM_ERROR("failed to get link status\n"); 1743 break; 1744 } 1745 1746 if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { 1747 DRM_DEBUG_KMS("clock recovery OK\n"); 1748 clock_recovery = true; 1749 break; 1750 } 1751 1752 /* Check to see if we've tried the max voltage */ 1753 for (i = 0; i < intel_dp->lane_count; i++) 1754 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1755 break; 1756 if (i == intel_dp->lane_count) { 1757 ++loop_tries; 1758 if (loop_tries == 5) { 1759 DRM_DEBUG_KMS("too many full retries, give up\n"); 1760 break; 1761 } 1762 memset(intel_dp->train_set, 0, 4); 1763 voltage_tries = 0; 1764 continue; 1765 } 1766 1767 /* Check to see if we've tried the same voltage 5 times */ 1768 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 1769 ++voltage_tries; 1770 if (voltage_tries == 5) { 1771 DRM_DEBUG_KMS("too many voltage retries, give up\n"); 1772 break; 1773 } 1774 } else 1775 voltage_tries = 0; 1776 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1777 1778 /* Compute new intel_dp->train_set as requested by target */ 1779 intel_get_adjust_train(intel_dp, link_status); 1780 } 1781 1782 intel_dp->DP = DP; 1783} 1784 1785static void 1786intel_dp_complete_link_train(struct intel_dp *intel_dp) 1787{ 1788 struct drm_device *dev = intel_dp->base.base.dev; 1789 struct drm_i915_private *dev_priv = dev->dev_private; 1790 bool channel_eq = false; 1791 int tries, cr_tries; 1792 u32 reg; 1793 uint32_t DP = intel_dp->DP; 1794 1795 /* channel equalization */ 1796 tries = 0; 1797 cr_tries = 0; 1798 channel_eq = false; 1799 for (;;) { 1800 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1801 uint32_t signal_levels; 1802 uint8_t link_status[DP_LINK_STATUS_SIZE]; 1803 1804 if (cr_tries > 5) { 1805 DRM_ERROR("failed to train DP, aborting\n"); 1806 intel_dp_link_down(intel_dp); 1807 break; 1808 } 1809 1810 if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) { 1811 signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]); 1812 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels; 1813 } else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) { 1814 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1815 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1816 } else { 1817 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]); 1818 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1819 } 1820 1821 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1822 reg = DP | DP_LINK_TRAIN_PAT_2_CPT; 1823 else 1824 reg = DP | DP_LINK_TRAIN_PAT_2; 1825 1826 /* channel eq pattern */ 1827 if (!intel_dp_set_link_train(intel_dp, reg, 1828 DP_TRAINING_PATTERN_2 | 1829 DP_LINK_SCRAMBLING_DISABLE)) 1830 break; 1831 1832 udelay(400); 1833 if (!intel_dp_get_link_status(intel_dp, link_status)) 1834 break; 1835 1836 /* Make sure clock is still ok */ 1837 if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) { 1838 intel_dp_start_link_train(intel_dp); 1839 cr_tries++; 1840 continue; 1841 } 1842 1843 if (intel_channel_eq_ok(intel_dp, link_status)) { 1844 channel_eq = true; 1845 break; 1846 } 1847 1848 /* Try 5 times, then try clock recovery if that fails */ 1849 if (tries > 5) { 1850 intel_dp_link_down(intel_dp); 1851 intel_dp_start_link_train(intel_dp); 1852 tries = 0; 1853 cr_tries++; 1854 continue; 1855 } 1856 1857 /* Compute new intel_dp->train_set as requested by target */ 1858 intel_get_adjust_train(intel_dp, link_status); 1859 ++tries; 1860 } 1861 1862 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1863 reg = DP | DP_LINK_TRAIN_OFF_CPT; 1864 else 1865 reg = DP | DP_LINK_TRAIN_OFF; 1866 1867 I915_WRITE(intel_dp->output_reg, reg); 1868 POSTING_READ(intel_dp->output_reg); 1869 intel_dp_aux_native_write_1(intel_dp, 1870 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE); 1871} 1872 1873static void 1874intel_dp_link_down(struct intel_dp *intel_dp) 1875{ 1876 struct drm_device *dev = intel_dp->base.base.dev; 1877 struct drm_i915_private *dev_priv = dev->dev_private; 1878 uint32_t DP = intel_dp->DP; 1879 1880 if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0) 1881 return; 1882 1883 DRM_DEBUG_KMS("\n"); 1884 1885 if (is_edp(intel_dp)) { 1886 DP &= ~DP_PLL_ENABLE; 1887 I915_WRITE(intel_dp->output_reg, DP); 1888 POSTING_READ(intel_dp->output_reg); 1889 udelay(100); 1890 } 1891 1892 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) { 1893 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1894 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT); 1895 } else { 1896 DP &= ~DP_LINK_TRAIN_MASK; 1897 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE); 1898 } 1899 POSTING_READ(intel_dp->output_reg); 1900 1901 msleep(17); 1902 1903 if (is_edp(intel_dp)) { 1904 if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) 1905 DP |= DP_LINK_TRAIN_OFF_CPT; 1906 else 1907 DP |= DP_LINK_TRAIN_OFF; 1908 } 1909 1910 if (!HAS_PCH_CPT(dev) && 1911 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) { 1912 struct drm_crtc *crtc = intel_dp->base.base.crtc; 1913 1914 /* Hardware workaround: leaving our transcoder select 1915 * set to transcoder B while it's off will prevent the 1916 * corresponding HDMI output on transcoder A. 1917 * 1918 * Combine this with another hardware workaround: 1919 * transcoder select bit can only be cleared while the 1920 * port is enabled. 1921 */ 1922 DP &= ~DP_PIPEB_SELECT; 1923 I915_WRITE(intel_dp->output_reg, DP); 1924 1925 /* Changes to enable or select take place the vblank 1926 * after being written. 1927 */ 1928 if (crtc == NULL) { 1929 /* We can arrive here never having been attached 1930 * to a CRTC, for instance, due to inheriting 1931 * random state from the BIOS. 1932 * 1933 * If the pipe is not running, play safe and 1934 * wait for the clocks to stabilise before 1935 * continuing. 1936 */ 1937 POSTING_READ(intel_dp->output_reg); 1938 msleep(50); 1939 } else 1940 intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe); 1941 } 1942 1943 DP &= ~DP_AUDIO_OUTPUT_ENABLE; 1944 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN); 1945 POSTING_READ(intel_dp->output_reg); 1946 msleep(intel_dp->panel_power_down_delay); 1947} 1948 1949static bool 1950intel_dp_get_dpcd(struct intel_dp *intel_dp) 1951{ 1952 if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd, 1953 sizeof(intel_dp->dpcd)) && 1954 (intel_dp->dpcd[DP_DPCD_REV] != 0)) { 1955 return true; 1956 } 1957 1958 return false; 1959} 1960 1961static bool 1962intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector) 1963{ 1964 int ret; 1965 1966 ret = intel_dp_aux_native_read_retry(intel_dp, 1967 DP_DEVICE_SERVICE_IRQ_VECTOR, 1968 sink_irq_vector, 1); 1969 if (!ret) 1970 return false; 1971 1972 return true; 1973} 1974 1975static void 1976intel_dp_handle_test_request(struct intel_dp *intel_dp) 1977{ 1978 /* NAK by default */ 1979 intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK); 1980} 1981 1982/* 1983 * According to DP spec 1984 * 5.1.2: 1985 * 1. Read DPCD 1986 * 2. Configure link according to Receiver Capabilities 1987 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3 1988 * 4. Check link status on receipt of hot-plug interrupt 1989 */ 1990 1991static void 1992intel_dp_check_link_status(struct intel_dp *intel_dp) 1993{ 1994 u8 sink_irq_vector; 1995 u8 link_status[DP_LINK_STATUS_SIZE]; 1996 1997 if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON) 1998 return; 1999 2000 if (!intel_dp->base.base.crtc) 2001 return; 2002 2003 /* Try to read receiver status if the link appears to be up */ 2004 if (!intel_dp_get_link_status(intel_dp, link_status)) { 2005 intel_dp_link_down(intel_dp); 2006 return; 2007 } 2008 2009 /* Now read the DPCD to see if it's actually running */ 2010 if (!intel_dp_get_dpcd(intel_dp)) { 2011 intel_dp_link_down(intel_dp); 2012 return; 2013 } 2014 2015 /* Try to read the source of the interrupt */ 2016 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 2017 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) { 2018 /* Clear interrupt source */ 2019 intel_dp_aux_native_write_1(intel_dp, 2020 DP_DEVICE_SERVICE_IRQ_VECTOR, 2021 sink_irq_vector); 2022 2023 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST) 2024 intel_dp_handle_test_request(intel_dp); 2025 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ)) 2026 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n"); 2027 } 2028 2029 if (!intel_channel_eq_ok(intel_dp, link_status)) { 2030 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n", 2031 drm_get_encoder_name(&intel_dp->base.base)); 2032 intel_dp_start_link_train(intel_dp); 2033 intel_dp_complete_link_train(intel_dp); 2034 } 2035} 2036 2037static enum drm_connector_status 2038intel_dp_detect_dpcd(struct intel_dp *intel_dp) 2039{ 2040 if (intel_dp_get_dpcd(intel_dp)) 2041 return connector_status_connected; 2042 return connector_status_disconnected; 2043} 2044 2045static enum drm_connector_status 2046ironlake_dp_detect(struct intel_dp *intel_dp) 2047{ 2048 enum drm_connector_status status; 2049 2050 /* Can't disconnect eDP, but you can close the lid... */ 2051 if (is_edp(intel_dp)) { 2052 status = intel_panel_detect(intel_dp->base.base.dev); 2053 if (status == connector_status_unknown) 2054 status = connector_status_connected; 2055 return status; 2056 } 2057 2058 return intel_dp_detect_dpcd(intel_dp); 2059} 2060 2061static enum drm_connector_status 2062g4x_dp_detect(struct intel_dp *intel_dp) 2063{ 2064 struct drm_device *dev = intel_dp->base.base.dev; 2065 struct drm_i915_private *dev_priv = dev->dev_private; 2066 uint32_t temp, bit; 2067 2068 switch (intel_dp->output_reg) { 2069 case DP_B: 2070 bit = DPB_HOTPLUG_INT_STATUS; 2071 break; 2072 case DP_C: 2073 bit = DPC_HOTPLUG_INT_STATUS; 2074 break; 2075 case DP_D: 2076 bit = DPD_HOTPLUG_INT_STATUS; 2077 break; 2078 default: 2079 return connector_status_unknown; 2080 } 2081 2082 temp = I915_READ(PORT_HOTPLUG_STAT); 2083 2084 if ((temp & bit) == 0) 2085 return connector_status_disconnected; 2086 2087 return intel_dp_detect_dpcd(intel_dp); 2088} 2089 2090static struct edid * 2091intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter) 2092{ 2093 struct intel_dp *intel_dp = intel_attached_dp(connector); 2094 struct edid *edid; 2095 2096 ironlake_edp_panel_vdd_on(intel_dp); 2097 edid = drm_get_edid(connector, adapter); 2098 ironlake_edp_panel_vdd_off(intel_dp, false); 2099 return edid; 2100} 2101 2102static int 2103intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter) 2104{ 2105 struct intel_dp *intel_dp = intel_attached_dp(connector); 2106 int ret; 2107 2108 ironlake_edp_panel_vdd_on(intel_dp); 2109 ret = intel_ddc_get_modes(connector, adapter); 2110 ironlake_edp_panel_vdd_off(intel_dp, false); 2111 return ret; 2112} 2113 2114 2115/** 2116 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection. 2117 * 2118 * \return true if DP port is connected. 2119 * \return false if DP port is disconnected. 2120 */ 2121static enum drm_connector_status 2122intel_dp_detect(struct drm_connector *connector, bool force) 2123{ 2124 struct intel_dp *intel_dp = intel_attached_dp(connector); 2125 struct drm_device *dev = intel_dp->base.base.dev; 2126 enum drm_connector_status status; 2127 struct edid *edid = NULL; 2128 2129 intel_dp->has_audio = false; 2130 2131 if (HAS_PCH_SPLIT(dev)) 2132 status = ironlake_dp_detect(intel_dp); 2133 else 2134 status = g4x_dp_detect(intel_dp); 2135 2136 DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n", 2137 intel_dp->dpcd[0], intel_dp->dpcd[1], intel_dp->dpcd[2], 2138 intel_dp->dpcd[3], intel_dp->dpcd[4], intel_dp->dpcd[5], 2139 intel_dp->dpcd[6], intel_dp->dpcd[7]); 2140 2141 if (status != connector_status_connected) 2142 return status; 2143 2144 if (intel_dp->force_audio != HDMI_AUDIO_AUTO) { 2145 intel_dp->has_audio = (intel_dp->force_audio == HDMI_AUDIO_ON); 2146 } else { 2147 edid = intel_dp_get_edid(connector, &intel_dp->adapter); 2148 if (edid) { 2149 intel_dp->has_audio = drm_detect_monitor_audio(edid); 2150 connector->display_info.raw_edid = NULL; 2151 kfree(edid); 2152 } 2153 } 2154 2155 return connector_status_connected; 2156} 2157 2158static int intel_dp_get_modes(struct drm_connector *connector) 2159{ 2160 struct intel_dp *intel_dp = intel_attached_dp(connector); 2161 struct drm_device *dev = intel_dp->base.base.dev; 2162 struct drm_i915_private *dev_priv = dev->dev_private; 2163 int ret; 2164 2165 /* We should parse the EDID data and find out if it has an audio sink 2166 */ 2167 2168 ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter); 2169 if (ret) { 2170 if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) { 2171 struct drm_display_mode *newmode; 2172 list_for_each_entry(newmode, &connector->probed_modes, 2173 head) { 2174 if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) { 2175 intel_dp->panel_fixed_mode = 2176 drm_mode_duplicate(dev, newmode); 2177 break; 2178 } 2179 } 2180 } 2181 return ret; 2182 } 2183 2184 /* if eDP has no EDID, try to use fixed panel mode from VBT */ 2185 if (is_edp(intel_dp)) { 2186 /* initialize panel mode from VBT if available for eDP */ 2187 if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) { 2188 intel_dp->panel_fixed_mode = 2189 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); 2190 if (intel_dp->panel_fixed_mode) { 2191 intel_dp->panel_fixed_mode->type |= 2192 DRM_MODE_TYPE_PREFERRED; 2193 } 2194 } 2195 if (intel_dp->panel_fixed_mode) { 2196 struct drm_display_mode *mode; 2197 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode); 2198 drm_mode_probed_add(connector, mode); 2199 return 1; 2200 } 2201 } 2202 return 0; 2203} 2204 2205static bool 2206intel_dp_detect_audio(struct drm_connector *connector) 2207{ 2208 struct intel_dp *intel_dp = intel_attached_dp(connector); 2209 struct edid *edid; 2210 bool has_audio = false; 2211 2212 edid = intel_dp_get_edid(connector, &intel_dp->adapter); 2213 if (edid) { 2214 has_audio = drm_detect_monitor_audio(edid); 2215 2216 connector->display_info.raw_edid = NULL; 2217 kfree(edid); 2218 } 2219 2220 return has_audio; 2221} 2222 2223static int 2224intel_dp_set_property(struct drm_connector *connector, 2225 struct drm_property *property, 2226 uint64_t val) 2227{ 2228 struct drm_i915_private *dev_priv = connector->dev->dev_private; 2229 struct intel_dp *intel_dp = intel_attached_dp(connector); 2230 int ret; 2231 2232 ret = drm_connector_property_set_value(connector, property, val); 2233 if (ret) 2234 return ret; 2235 2236 if (property == dev_priv->force_audio_property) { 2237 int i = val; 2238 bool has_audio; 2239 2240 if (i == intel_dp->force_audio) 2241 return 0; 2242 2243 intel_dp->force_audio = i; 2244 2245 if (i == HDMI_AUDIO_AUTO) 2246 has_audio = intel_dp_detect_audio(connector); 2247 else 2248 has_audio = (i == HDMI_AUDIO_ON); 2249 2250 if (has_audio == intel_dp->has_audio) 2251 return 0; 2252 2253 intel_dp->has_audio = has_audio; 2254 goto done; 2255 } 2256 2257 if (property == dev_priv->broadcast_rgb_property) { 2258 if (val == !!intel_dp->color_range) 2259 return 0; 2260 2261 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0; 2262 goto done; 2263 } 2264 2265 return -EINVAL; 2266 2267done: 2268 if (intel_dp->base.base.crtc) { 2269 struct drm_crtc *crtc = intel_dp->base.base.crtc; 2270 drm_crtc_helper_set_mode(crtc, &crtc->mode, 2271 crtc->x, crtc->y, 2272 crtc->fb); 2273 } 2274 2275 return 0; 2276} 2277 2278static void 2279intel_dp_destroy(struct drm_connector *connector) 2280{ 2281 struct drm_device *dev = connector->dev; 2282 2283 if (intel_dpd_is_edp(dev)) 2284 intel_panel_destroy_backlight(dev); 2285 2286 drm_sysfs_connector_remove(connector); 2287 drm_connector_cleanup(connector); 2288 kfree(connector); 2289} 2290 2291static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 2292{ 2293 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2294 2295 i2c_del_adapter(&intel_dp->adapter); 2296 drm_encoder_cleanup(encoder); 2297 if (is_edp(intel_dp)) { 2298 cancel_delayed_work_sync(&intel_dp->panel_vdd_work); 2299 ironlake_panel_vdd_off_sync(intel_dp); 2300 } 2301 kfree(intel_dp); 2302} 2303 2304static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { 2305 .dpms = intel_dp_dpms, 2306 .mode_fixup = intel_dp_mode_fixup, 2307 .prepare = intel_dp_prepare, 2308 .mode_set = intel_dp_mode_set, 2309 .commit = intel_dp_commit, 2310}; 2311 2312static const struct drm_connector_funcs intel_dp_connector_funcs = { 2313 .dpms = drm_helper_connector_dpms, 2314 .detect = intel_dp_detect, 2315 .fill_modes = drm_helper_probe_single_connector_modes, 2316 .set_property = intel_dp_set_property, 2317 .destroy = intel_dp_destroy, 2318}; 2319 2320static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = { 2321 .get_modes = intel_dp_get_modes, 2322 .mode_valid = intel_dp_mode_valid, 2323 .best_encoder = intel_best_encoder, 2324}; 2325 2326static const struct drm_encoder_funcs intel_dp_enc_funcs = { 2327 .destroy = intel_dp_encoder_destroy, 2328}; 2329 2330static void 2331intel_dp_hot_plug(struct intel_encoder *intel_encoder) 2332{ 2333 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 2334 2335 intel_dp_check_link_status(intel_dp); 2336} 2337 2338/* Return which DP Port should be selected for Transcoder DP control */ 2339int 2340intel_trans_dp_port_sel(struct drm_crtc *crtc) 2341{ 2342 struct drm_device *dev = crtc->dev; 2343 struct drm_mode_config *mode_config = &dev->mode_config; 2344 struct drm_encoder *encoder; 2345 2346 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 2347 struct intel_dp *intel_dp; 2348 2349 if (encoder->crtc != crtc) 2350 continue; 2351 2352 intel_dp = enc_to_intel_dp(encoder); 2353 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT || 2354 intel_dp->base.type == INTEL_OUTPUT_EDP) 2355 return intel_dp->output_reg; 2356 } 2357 2358 return -1; 2359} 2360 2361/* check the VBT to see whether the eDP is on DP-D port */ 2362bool intel_dpd_is_edp(struct drm_device *dev) 2363{ 2364 struct drm_i915_private *dev_priv = dev->dev_private; 2365 struct child_device_config *p_child; 2366 int i; 2367 2368 if (!dev_priv->child_dev_num) 2369 return false; 2370 2371 for (i = 0; i < dev_priv->child_dev_num; i++) { 2372 p_child = dev_priv->child_dev + i; 2373 2374 if (p_child->dvo_port == PORT_IDPD && 2375 p_child->device_type == DEVICE_TYPE_eDP) 2376 return true; 2377 } 2378 return false; 2379} 2380 2381static void 2382intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector) 2383{ 2384 intel_attach_force_audio_property(connector); 2385 intel_attach_broadcast_rgb_property(connector); 2386} 2387 2388void 2389intel_dp_init(struct drm_device *dev, int output_reg) 2390{ 2391 struct drm_i915_private *dev_priv = dev->dev_private; 2392 struct drm_connector *connector; 2393 struct intel_dp *intel_dp; 2394 struct intel_encoder *intel_encoder; 2395 struct intel_connector *intel_connector; 2396 const char *name = NULL; 2397 int type; 2398 2399 intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL); 2400 if (!intel_dp) 2401 return; 2402 2403 intel_dp->output_reg = output_reg; 2404 intel_dp->dpms_mode = -1; 2405 2406 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); 2407 if (!intel_connector) { 2408 kfree(intel_dp); 2409 return; 2410 } 2411 intel_encoder = &intel_dp->base; 2412 2413 if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D) 2414 if (intel_dpd_is_edp(dev)) 2415 intel_dp->is_pch_edp = true; 2416 2417 if (output_reg == DP_A || is_pch_edp(intel_dp)) { 2418 type = DRM_MODE_CONNECTOR_eDP; 2419 intel_encoder->type = INTEL_OUTPUT_EDP; 2420 } else { 2421 type = DRM_MODE_CONNECTOR_DisplayPort; 2422 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT; 2423 } 2424 2425 connector = &intel_connector->base; 2426 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type); 2427 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs); 2428 2429 connector->polled = DRM_CONNECTOR_POLL_HPD; 2430 2431 if (output_reg == DP_B || output_reg == PCH_DP_B) 2432 intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT); 2433 else if (output_reg == DP_C || output_reg == PCH_DP_C) 2434 intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT); 2435 else if (output_reg == DP_D || output_reg == PCH_DP_D) 2436 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT); 2437 2438 if (is_edp(intel_dp)) { 2439 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT); 2440 INIT_DELAYED_WORK(&intel_dp->panel_vdd_work, 2441 ironlake_panel_vdd_work); 2442 } 2443 2444 intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2); 2445 connector->interlace_allowed = true; 2446 connector->doublescan_allowed = 0; 2447 2448 drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs, 2449 DRM_MODE_ENCODER_TMDS); 2450 drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs); 2451 2452 intel_connector_attach_encoder(intel_connector, intel_encoder); 2453 drm_sysfs_connector_add(connector); 2454 2455 /* Set up the DDC bus. */ 2456 switch (output_reg) { 2457 case DP_A: 2458 name = "DPDDC-A"; 2459 break; 2460 case DP_B: 2461 case PCH_DP_B: 2462 dev_priv->hotplug_supported_mask |= 2463 HDMIB_HOTPLUG_INT_STATUS; 2464 name = "DPDDC-B"; 2465 break; 2466 case DP_C: 2467 case PCH_DP_C: 2468 dev_priv->hotplug_supported_mask |= 2469 HDMIC_HOTPLUG_INT_STATUS; 2470 name = "DPDDC-C"; 2471 break; 2472 case DP_D: 2473 case PCH_DP_D: 2474 dev_priv->hotplug_supported_mask |= 2475 HDMID_HOTPLUG_INT_STATUS; 2476 name = "DPDDC-D"; 2477 break; 2478 } 2479 2480 /* Cache some DPCD data in the eDP case */ 2481 if (is_edp(intel_dp)) { 2482 bool ret; 2483 struct edp_power_seq cur, vbt; 2484 u32 pp_on, pp_off, pp_div; 2485 2486 pp_on = I915_READ(PCH_PP_ON_DELAYS); 2487 pp_off = I915_READ(PCH_PP_OFF_DELAYS); 2488 pp_div = I915_READ(PCH_PP_DIVISOR); 2489 2490 /* Pull timing values out of registers */ 2491 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> 2492 PANEL_POWER_UP_DELAY_SHIFT; 2493 2494 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> 2495 PANEL_LIGHT_ON_DELAY_SHIFT; 2496 2497 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> 2498 PANEL_LIGHT_OFF_DELAY_SHIFT; 2499 2500 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> 2501 PANEL_POWER_DOWN_DELAY_SHIFT; 2502 2503 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> 2504 PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000; 2505 2506 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2507 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); 2508 2509 vbt = dev_priv->edp.pps; 2510 2511 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2512 vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12); 2513 2514#define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10) 2515 2516 intel_dp->panel_power_up_delay = get_delay(t1_t3); 2517 intel_dp->backlight_on_delay = get_delay(t8); 2518 intel_dp->backlight_off_delay = get_delay(t9); 2519 intel_dp->panel_power_down_delay = get_delay(t10); 2520 intel_dp->panel_power_cycle_delay = get_delay(t11_t12); 2521 2522 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", 2523 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, 2524 intel_dp->panel_power_cycle_delay); 2525 2526 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2527 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2528 2529 ironlake_edp_panel_vdd_on(intel_dp); 2530 ret = intel_dp_get_dpcd(intel_dp); 2531 ironlake_edp_panel_vdd_off(intel_dp, false); 2532 2533 if (ret) { 2534 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) 2535 dev_priv->no_aux_handshake = 2536 intel_dp->dpcd[DP_MAX_DOWNSPREAD] & 2537 DP_NO_AUX_HANDSHAKE_LINK_TRAINING; 2538 } else { 2539 /* if this fails, presume the device is a ghost */ 2540 DRM_INFO("failed to retrieve link info, disabling eDP\n"); 2541 intel_dp_encoder_destroy(&intel_dp->base.base); 2542 intel_dp_destroy(&intel_connector->base); 2543 return; 2544 } 2545 } 2546 2547 intel_dp_i2c_init(intel_dp, intel_connector, name); 2548 2549 intel_encoder->hot_plug = intel_dp_hot_plug; 2550 2551 if (is_edp(intel_dp)) { 2552 dev_priv->int_edp_connector = connector; 2553 intel_panel_setup_backlight(dev); 2554 } 2555 2556 intel_dp_add_properties(intel_dp, connector); 2557 2558 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written 2559 * 0xd. Failure to do so will result in spurious interrupts being 2560 * generated on the port when a cable is not attached. 2561 */ 2562 if (IS_G4X(dev) && !IS_GM45(dev)) { 2563 u32 temp = I915_READ(PEG_BAND_GAP_DATA); 2564 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd); 2565 } 2566} 2567