intel_dp.c revision a60f0e38d72a5e24085d6e7e27a4cadc20ae268a
1/* 2 * Copyright © 2008 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 */ 27 28#include <linux/i2c.h> 29#include <linux/slab.h> 30#include "drmP.h" 31#include "drm.h" 32#include "drm_crtc.h" 33#include "drm_crtc_helper.h" 34#include "intel_drv.h" 35#include "i915_drm.h" 36#include "i915_drv.h" 37#include "drm_dp_helper.h" 38 39#define DP_RECEIVER_CAP_SIZE 0xf 40#define DP_LINK_STATUS_SIZE 6 41#define DP_LINK_CHECK_TIMEOUT (10 * 1000) 42 43#define DP_LINK_CONFIGURATION_SIZE 9 44 45struct intel_dp { 46 struct intel_encoder base; 47 uint32_t output_reg; 48 uint32_t DP; 49 uint8_t link_configuration[DP_LINK_CONFIGURATION_SIZE]; 50 bool has_audio; 51 int force_audio; 52 uint32_t color_range; 53 int dpms_mode; 54 uint8_t link_bw; 55 uint8_t lane_count; 56 uint8_t dpcd[DP_RECEIVER_CAP_SIZE]; 57 struct i2c_adapter adapter; 58 struct i2c_algo_dp_aux_data algo; 59 bool is_pch_edp; 60 uint8_t train_set[4]; 61 uint8_t link_status[DP_LINK_STATUS_SIZE]; 62 int panel_power_up_delay; 63 int panel_power_down_delay; 64 int panel_power_cycle_delay; 65 int backlight_on_delay; 66 int backlight_off_delay; 67 struct drm_display_mode *panel_fixed_mode; /* for eDP */ 68 struct delayed_work panel_vdd_work; 69 bool want_panel_vdd; 70 unsigned long panel_off_jiffies; 71}; 72 73/** 74 * is_edp - is the given port attached to an eDP panel (either CPU or PCH) 75 * @intel_dp: DP struct 76 * 77 * If a CPU or PCH DP output is attached to an eDP panel, this function 78 * will return true, and false otherwise. 79 */ 80static bool is_edp(struct intel_dp *intel_dp) 81{ 82 return intel_dp->base.type == INTEL_OUTPUT_EDP; 83} 84 85/** 86 * is_pch_edp - is the port on the PCH and attached to an eDP panel? 87 * @intel_dp: DP struct 88 * 89 * Returns true if the given DP struct corresponds to a PCH DP port attached 90 * to an eDP panel, false otherwise. Helpful for determining whether we 91 * may need FDI resources for a given DP output or not. 92 */ 93static bool is_pch_edp(struct intel_dp *intel_dp) 94{ 95 return intel_dp->is_pch_edp; 96} 97 98static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder) 99{ 100 return container_of(encoder, struct intel_dp, base.base); 101} 102 103static struct intel_dp *intel_attached_dp(struct drm_connector *connector) 104{ 105 return container_of(intel_attached_encoder(connector), 106 struct intel_dp, base); 107} 108 109/** 110 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP? 111 * @encoder: DRM encoder 112 * 113 * Return true if @encoder corresponds to a PCH attached eDP panel. Needed 114 * by intel_display.c. 115 */ 116bool intel_encoder_is_pch_edp(struct drm_encoder *encoder) 117{ 118 struct intel_dp *intel_dp; 119 120 if (!encoder) 121 return false; 122 123 intel_dp = enc_to_intel_dp(encoder); 124 125 return is_pch_edp(intel_dp); 126} 127 128static void intel_dp_start_link_train(struct intel_dp *intel_dp); 129static void intel_dp_complete_link_train(struct intel_dp *intel_dp); 130static void intel_dp_link_down(struct intel_dp *intel_dp); 131 132void 133intel_edp_link_config(struct intel_encoder *intel_encoder, 134 int *lane_num, int *link_bw) 135{ 136 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 137 138 *lane_num = intel_dp->lane_count; 139 if (intel_dp->link_bw == DP_LINK_BW_1_62) 140 *link_bw = 162000; 141 else if (intel_dp->link_bw == DP_LINK_BW_2_7) 142 *link_bw = 270000; 143} 144 145static int 146intel_dp_max_lane_count(struct intel_dp *intel_dp) 147{ 148 int max_lane_count = 4; 149 150 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) { 151 max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f; 152 switch (max_lane_count) { 153 case 1: case 2: case 4: 154 break; 155 default: 156 max_lane_count = 4; 157 } 158 } 159 return max_lane_count; 160} 161 162static int 163intel_dp_max_link_bw(struct intel_dp *intel_dp) 164{ 165 int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE]; 166 167 switch (max_link_bw) { 168 case DP_LINK_BW_1_62: 169 case DP_LINK_BW_2_7: 170 break; 171 default: 172 max_link_bw = DP_LINK_BW_1_62; 173 break; 174 } 175 return max_link_bw; 176} 177 178static int 179intel_dp_link_clock(uint8_t link_bw) 180{ 181 if (link_bw == DP_LINK_BW_2_7) 182 return 270000; 183 else 184 return 162000; 185} 186 187/* 188 * The units on the numbers in the next two are... bizarre. Examples will 189 * make it clearer; this one parallels an example in the eDP spec. 190 * 191 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as: 192 * 193 * 270000 * 1 * 8 / 10 == 216000 194 * 195 * The actual data capacity of that configuration is 2.16Gbit/s, so the 196 * units are decakilobits. ->clock in a drm_display_mode is in kilohertz - 197 * or equivalently, kilopixels per second - so for 1680x1050R it'd be 198 * 119000. At 18bpp that's 2142000 kilobits per second. 199 * 200 * Thus the strange-looking division by 10 in intel_dp_link_required, to 201 * get the result in decakilobits instead of kilobits. 202 */ 203 204static int 205intel_dp_link_required(struct intel_dp *intel_dp, int pixel_clock) 206{ 207 struct drm_crtc *crtc = intel_dp->base.base.crtc; 208 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 209 int bpp = 24; 210 211 if (intel_crtc) 212 bpp = intel_crtc->bpp; 213 214 return (pixel_clock * bpp + 9) / 10; 215} 216 217static int 218intel_dp_max_data_rate(int max_link_clock, int max_lanes) 219{ 220 return (max_link_clock * max_lanes * 8) / 10; 221} 222 223static int 224intel_dp_mode_valid(struct drm_connector *connector, 225 struct drm_display_mode *mode) 226{ 227 struct intel_dp *intel_dp = intel_attached_dp(connector); 228 int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp)); 229 int max_lanes = intel_dp_max_lane_count(intel_dp); 230 231 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 232 if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay) 233 return MODE_PANEL; 234 235 if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay) 236 return MODE_PANEL; 237 } 238 239 if (intel_dp_link_required(intel_dp, mode->clock) 240 > intel_dp_max_data_rate(max_link_clock, max_lanes)) 241 return MODE_CLOCK_HIGH; 242 243 if (mode->clock < 10000) 244 return MODE_CLOCK_LOW; 245 246 return MODE_OK; 247} 248 249static uint32_t 250pack_aux(uint8_t *src, int src_bytes) 251{ 252 int i; 253 uint32_t v = 0; 254 255 if (src_bytes > 4) 256 src_bytes = 4; 257 for (i = 0; i < src_bytes; i++) 258 v |= ((uint32_t) src[i]) << ((3-i) * 8); 259 return v; 260} 261 262static void 263unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes) 264{ 265 int i; 266 if (dst_bytes > 4) 267 dst_bytes = 4; 268 for (i = 0; i < dst_bytes; i++) 269 dst[i] = src >> ((3-i) * 8); 270} 271 272/* hrawclock is 1/4 the FSB frequency */ 273static int 274intel_hrawclk(struct drm_device *dev) 275{ 276 struct drm_i915_private *dev_priv = dev->dev_private; 277 uint32_t clkcfg; 278 279 clkcfg = I915_READ(CLKCFG); 280 switch (clkcfg & CLKCFG_FSB_MASK) { 281 case CLKCFG_FSB_400: 282 return 100; 283 case CLKCFG_FSB_533: 284 return 133; 285 case CLKCFG_FSB_667: 286 return 166; 287 case CLKCFG_FSB_800: 288 return 200; 289 case CLKCFG_FSB_1067: 290 return 266; 291 case CLKCFG_FSB_1333: 292 return 333; 293 /* these two are just a guess; one of them might be right */ 294 case CLKCFG_FSB_1600: 295 case CLKCFG_FSB_1600_ALT: 296 return 400; 297 default: 298 return 133; 299 } 300} 301 302static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp) 303{ 304 struct drm_device *dev = intel_dp->base.base.dev; 305 struct drm_i915_private *dev_priv = dev->dev_private; 306 307 return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0; 308} 309 310static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp) 311{ 312 struct drm_device *dev = intel_dp->base.base.dev; 313 struct drm_i915_private *dev_priv = dev->dev_private; 314 315 return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0; 316} 317 318static void 319intel_dp_check_edp(struct intel_dp *intel_dp) 320{ 321 struct drm_device *dev = intel_dp->base.base.dev; 322 struct drm_i915_private *dev_priv = dev->dev_private; 323 324 if (!is_edp(intel_dp)) 325 return; 326 if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) { 327 WARN(1, "eDP powered off while attempting aux channel communication.\n"); 328 DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n", 329 I915_READ(PCH_PP_STATUS), 330 I915_READ(PCH_PP_CONTROL)); 331 } 332} 333 334static int 335intel_dp_aux_ch(struct intel_dp *intel_dp, 336 uint8_t *send, int send_bytes, 337 uint8_t *recv, int recv_size) 338{ 339 uint32_t output_reg = intel_dp->output_reg; 340 struct drm_device *dev = intel_dp->base.base.dev; 341 struct drm_i915_private *dev_priv = dev->dev_private; 342 uint32_t ch_ctl = output_reg + 0x10; 343 uint32_t ch_data = ch_ctl + 4; 344 int i; 345 int recv_bytes; 346 uint32_t status; 347 uint32_t aux_clock_divider; 348 int try, precharge; 349 350 intel_dp_check_edp(intel_dp); 351 /* The clock divider is based off the hrawclk, 352 * and would like to run at 2MHz. So, take the 353 * hrawclk value and divide by 2 and use that 354 * 355 * Note that PCH attached eDP panels should use a 125MHz input 356 * clock divider. 357 */ 358 if (is_edp(intel_dp) && !is_pch_edp(intel_dp)) { 359 if (IS_GEN6(dev)) 360 aux_clock_divider = 200; /* SNB eDP input clock at 400Mhz */ 361 else 362 aux_clock_divider = 225; /* eDP input clock at 450Mhz */ 363 } else if (HAS_PCH_SPLIT(dev)) 364 aux_clock_divider = 62; /* IRL input clock fixed at 125Mhz */ 365 else 366 aux_clock_divider = intel_hrawclk(dev) / 2; 367 368 if (IS_GEN6(dev)) 369 precharge = 3; 370 else 371 precharge = 5; 372 373 /* Try to wait for any previous AUX channel activity */ 374 for (try = 0; try < 3; try++) { 375 status = I915_READ(ch_ctl); 376 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 377 break; 378 msleep(1); 379 } 380 381 if (try == 3) { 382 WARN(1, "dp_aux_ch not started status 0x%08x\n", 383 I915_READ(ch_ctl)); 384 return -EBUSY; 385 } 386 387 /* Must try at least 3 times according to DP spec */ 388 for (try = 0; try < 5; try++) { 389 /* Load the send data into the aux channel data registers */ 390 for (i = 0; i < send_bytes; i += 4) 391 I915_WRITE(ch_data + i, 392 pack_aux(send + i, send_bytes - i)); 393 394 /* Send the command and wait for it to complete */ 395 I915_WRITE(ch_ctl, 396 DP_AUX_CH_CTL_SEND_BUSY | 397 DP_AUX_CH_CTL_TIME_OUT_400us | 398 (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) | 399 (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) | 400 (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) | 401 DP_AUX_CH_CTL_DONE | 402 DP_AUX_CH_CTL_TIME_OUT_ERROR | 403 DP_AUX_CH_CTL_RECEIVE_ERROR); 404 for (;;) { 405 status = I915_READ(ch_ctl); 406 if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0) 407 break; 408 udelay(100); 409 } 410 411 /* Clear done status and any errors */ 412 I915_WRITE(ch_ctl, 413 status | 414 DP_AUX_CH_CTL_DONE | 415 DP_AUX_CH_CTL_TIME_OUT_ERROR | 416 DP_AUX_CH_CTL_RECEIVE_ERROR); 417 if (status & DP_AUX_CH_CTL_DONE) 418 break; 419 } 420 421 if ((status & DP_AUX_CH_CTL_DONE) == 0) { 422 DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status); 423 return -EBUSY; 424 } 425 426 /* Check for timeout or receive error. 427 * Timeouts occur when the sink is not connected 428 */ 429 if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) { 430 DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status); 431 return -EIO; 432 } 433 434 /* Timeouts occur when the device isn't connected, so they're 435 * "normal" -- don't fill the kernel log with these */ 436 if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) { 437 DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status); 438 return -ETIMEDOUT; 439 } 440 441 /* Unload any bytes sent back from the other side */ 442 recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >> 443 DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT); 444 if (recv_bytes > recv_size) 445 recv_bytes = recv_size; 446 447 for (i = 0; i < recv_bytes; i += 4) 448 unpack_aux(I915_READ(ch_data + i), 449 recv + i, recv_bytes - i); 450 451 return recv_bytes; 452} 453 454/* Write data to the aux channel in native mode */ 455static int 456intel_dp_aux_native_write(struct intel_dp *intel_dp, 457 uint16_t address, uint8_t *send, int send_bytes) 458{ 459 int ret; 460 uint8_t msg[20]; 461 int msg_bytes; 462 uint8_t ack; 463 464 intel_dp_check_edp(intel_dp); 465 if (send_bytes > 16) 466 return -1; 467 msg[0] = AUX_NATIVE_WRITE << 4; 468 msg[1] = address >> 8; 469 msg[2] = address & 0xff; 470 msg[3] = send_bytes - 1; 471 memcpy(&msg[4], send, send_bytes); 472 msg_bytes = send_bytes + 4; 473 for (;;) { 474 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1); 475 if (ret < 0) 476 return ret; 477 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) 478 break; 479 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 480 udelay(100); 481 else 482 return -EIO; 483 } 484 return send_bytes; 485} 486 487/* Write a single byte to the aux channel in native mode */ 488static int 489intel_dp_aux_native_write_1(struct intel_dp *intel_dp, 490 uint16_t address, uint8_t byte) 491{ 492 return intel_dp_aux_native_write(intel_dp, address, &byte, 1); 493} 494 495/* read bytes from a native aux channel */ 496static int 497intel_dp_aux_native_read(struct intel_dp *intel_dp, 498 uint16_t address, uint8_t *recv, int recv_bytes) 499{ 500 uint8_t msg[4]; 501 int msg_bytes; 502 uint8_t reply[20]; 503 int reply_bytes; 504 uint8_t ack; 505 int ret; 506 507 intel_dp_check_edp(intel_dp); 508 msg[0] = AUX_NATIVE_READ << 4; 509 msg[1] = address >> 8; 510 msg[2] = address & 0xff; 511 msg[3] = recv_bytes - 1; 512 513 msg_bytes = 4; 514 reply_bytes = recv_bytes + 1; 515 516 for (;;) { 517 ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, 518 reply, reply_bytes); 519 if (ret == 0) 520 return -EPROTO; 521 if (ret < 0) 522 return ret; 523 ack = reply[0]; 524 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) { 525 memcpy(recv, reply + 1, ret - 1); 526 return ret - 1; 527 } 528 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER) 529 udelay(100); 530 else 531 return -EIO; 532 } 533} 534 535static int 536intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode, 537 uint8_t write_byte, uint8_t *read_byte) 538{ 539 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data; 540 struct intel_dp *intel_dp = container_of(adapter, 541 struct intel_dp, 542 adapter); 543 uint16_t address = algo_data->address; 544 uint8_t msg[5]; 545 uint8_t reply[2]; 546 unsigned retry; 547 int msg_bytes; 548 int reply_bytes; 549 int ret; 550 551 intel_dp_check_edp(intel_dp); 552 /* Set up the command byte */ 553 if (mode & MODE_I2C_READ) 554 msg[0] = AUX_I2C_READ << 4; 555 else 556 msg[0] = AUX_I2C_WRITE << 4; 557 558 if (!(mode & MODE_I2C_STOP)) 559 msg[0] |= AUX_I2C_MOT << 4; 560 561 msg[1] = address >> 8; 562 msg[2] = address; 563 564 switch (mode) { 565 case MODE_I2C_WRITE: 566 msg[3] = 0; 567 msg[4] = write_byte; 568 msg_bytes = 5; 569 reply_bytes = 1; 570 break; 571 case MODE_I2C_READ: 572 msg[3] = 0; 573 msg_bytes = 4; 574 reply_bytes = 2; 575 break; 576 default: 577 msg_bytes = 3; 578 reply_bytes = 1; 579 break; 580 } 581 582 for (retry = 0; retry < 5; retry++) { 583 ret = intel_dp_aux_ch(intel_dp, 584 msg, msg_bytes, 585 reply, reply_bytes); 586 if (ret < 0) { 587 DRM_DEBUG_KMS("aux_ch failed %d\n", ret); 588 return ret; 589 } 590 591 switch (reply[0] & AUX_NATIVE_REPLY_MASK) { 592 case AUX_NATIVE_REPLY_ACK: 593 /* I2C-over-AUX Reply field is only valid 594 * when paired with AUX ACK. 595 */ 596 break; 597 case AUX_NATIVE_REPLY_NACK: 598 DRM_DEBUG_KMS("aux_ch native nack\n"); 599 return -EREMOTEIO; 600 case AUX_NATIVE_REPLY_DEFER: 601 udelay(100); 602 continue; 603 default: 604 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", 605 reply[0]); 606 return -EREMOTEIO; 607 } 608 609 switch (reply[0] & AUX_I2C_REPLY_MASK) { 610 case AUX_I2C_REPLY_ACK: 611 if (mode == MODE_I2C_READ) { 612 *read_byte = reply[1]; 613 } 614 return reply_bytes - 1; 615 case AUX_I2C_REPLY_NACK: 616 DRM_DEBUG_KMS("aux_i2c nack\n"); 617 return -EREMOTEIO; 618 case AUX_I2C_REPLY_DEFER: 619 DRM_DEBUG_KMS("aux_i2c defer\n"); 620 udelay(100); 621 break; 622 default: 623 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]); 624 return -EREMOTEIO; 625 } 626 } 627 628 DRM_ERROR("too many retries, giving up\n"); 629 return -EREMOTEIO; 630} 631 632static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp); 633static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync); 634 635static int 636intel_dp_i2c_init(struct intel_dp *intel_dp, 637 struct intel_connector *intel_connector, const char *name) 638{ 639 int ret; 640 641 DRM_DEBUG_KMS("i2c_init %s\n", name); 642 intel_dp->algo.running = false; 643 intel_dp->algo.address = 0; 644 intel_dp->algo.aux_ch = intel_dp_i2c_aux_ch; 645 646 memset(&intel_dp->adapter, '\0', sizeof(intel_dp->adapter)); 647 intel_dp->adapter.owner = THIS_MODULE; 648 intel_dp->adapter.class = I2C_CLASS_DDC; 649 strncpy(intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1); 650 intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0'; 651 intel_dp->adapter.algo_data = &intel_dp->algo; 652 intel_dp->adapter.dev.parent = &intel_connector->base.kdev; 653 654 ironlake_edp_panel_vdd_on(intel_dp); 655 ret = i2c_dp_aux_add_bus(&intel_dp->adapter); 656 ironlake_edp_panel_vdd_off(intel_dp, false); 657 return ret; 658} 659 660static bool 661intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode, 662 struct drm_display_mode *adjusted_mode) 663{ 664 struct drm_device *dev = encoder->dev; 665 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 666 int lane_count, clock; 667 int max_lane_count = intel_dp_max_lane_count(intel_dp); 668 int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0; 669 static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 }; 670 671 if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) { 672 intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode); 673 intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN, 674 mode, adjusted_mode); 675 /* 676 * the mode->clock is used to calculate the Data&Link M/N 677 * of the pipe. For the eDP the fixed clock should be used. 678 */ 679 mode->clock = intel_dp->panel_fixed_mode->clock; 680 } 681 682 for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) { 683 for (clock = 0; clock <= max_clock; clock++) { 684 int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count); 685 686 if (intel_dp_link_required(intel_dp, mode->clock) 687 <= link_avail) { 688 intel_dp->link_bw = bws[clock]; 689 intel_dp->lane_count = lane_count; 690 adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw); 691 DRM_DEBUG_KMS("Display port link bw %02x lane " 692 "count %d clock %d\n", 693 intel_dp->link_bw, intel_dp->lane_count, 694 adjusted_mode->clock); 695 return true; 696 } 697 } 698 } 699 700 return false; 701} 702 703struct intel_dp_m_n { 704 uint32_t tu; 705 uint32_t gmch_m; 706 uint32_t gmch_n; 707 uint32_t link_m; 708 uint32_t link_n; 709}; 710 711static void 712intel_reduce_ratio(uint32_t *num, uint32_t *den) 713{ 714 while (*num > 0xffffff || *den > 0xffffff) { 715 *num >>= 1; 716 *den >>= 1; 717 } 718} 719 720static void 721intel_dp_compute_m_n(int bpp, 722 int nlanes, 723 int pixel_clock, 724 int link_clock, 725 struct intel_dp_m_n *m_n) 726{ 727 m_n->tu = 64; 728 m_n->gmch_m = (pixel_clock * bpp) >> 3; 729 m_n->gmch_n = link_clock * nlanes; 730 intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n); 731 m_n->link_m = pixel_clock; 732 m_n->link_n = link_clock; 733 intel_reduce_ratio(&m_n->link_m, &m_n->link_n); 734} 735 736void 737intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode, 738 struct drm_display_mode *adjusted_mode) 739{ 740 struct drm_device *dev = crtc->dev; 741 struct drm_mode_config *mode_config = &dev->mode_config; 742 struct drm_encoder *encoder; 743 struct drm_i915_private *dev_priv = dev->dev_private; 744 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 745 int lane_count = 4; 746 struct intel_dp_m_n m_n; 747 int pipe = intel_crtc->pipe; 748 749 /* 750 * Find the lane count in the intel_encoder private 751 */ 752 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 753 struct intel_dp *intel_dp; 754 755 if (encoder->crtc != crtc) 756 continue; 757 758 intel_dp = enc_to_intel_dp(encoder); 759 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) { 760 lane_count = intel_dp->lane_count; 761 break; 762 } else if (is_edp(intel_dp)) { 763 lane_count = dev_priv->edp.lanes; 764 break; 765 } 766 } 767 768 /* 769 * Compute the GMCH and Link ratios. The '3' here is 770 * the number of bytes_per_pixel post-LUT, which we always 771 * set up for 8-bits of R/G/B, or 3 bytes total. 772 */ 773 intel_dp_compute_m_n(intel_crtc->bpp, lane_count, 774 mode->clock, adjusted_mode->clock, &m_n); 775 776 if (HAS_PCH_SPLIT(dev)) { 777 I915_WRITE(TRANSDATA_M1(pipe), 778 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 779 m_n.gmch_m); 780 I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n); 781 I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m); 782 I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n); 783 } else { 784 I915_WRITE(PIPE_GMCH_DATA_M(pipe), 785 ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) | 786 m_n.gmch_m); 787 I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n); 788 I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m); 789 I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n); 790 } 791} 792 793static void ironlake_edp_pll_on(struct drm_encoder *encoder); 794static void ironlake_edp_pll_off(struct drm_encoder *encoder); 795 796static void 797intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode, 798 struct drm_display_mode *adjusted_mode) 799{ 800 struct drm_device *dev = encoder->dev; 801 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 802 struct drm_crtc *crtc = intel_dp->base.base.crtc; 803 struct intel_crtc *intel_crtc = to_intel_crtc(crtc); 804 805 /* Turn on the eDP PLL if needed */ 806 if (is_edp(intel_dp)) { 807 if (!is_pch_edp(intel_dp)) 808 ironlake_edp_pll_on(encoder); 809 else 810 ironlake_edp_pll_off(encoder); 811 } 812 813 intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 814 intel_dp->DP |= intel_dp->color_range; 815 816 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 817 intel_dp->DP |= DP_SYNC_HS_HIGH; 818 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 819 intel_dp->DP |= DP_SYNC_VS_HIGH; 820 821 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp)) 822 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 823 else 824 intel_dp->DP |= DP_LINK_TRAIN_OFF; 825 826 switch (intel_dp->lane_count) { 827 case 1: 828 intel_dp->DP |= DP_PORT_WIDTH_1; 829 break; 830 case 2: 831 intel_dp->DP |= DP_PORT_WIDTH_2; 832 break; 833 case 4: 834 intel_dp->DP |= DP_PORT_WIDTH_4; 835 break; 836 } 837 if (intel_dp->has_audio) { 838 DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n", 839 pipe_name(intel_crtc->pipe)); 840 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 841 intel_write_eld(encoder, adjusted_mode); 842 } 843 844 memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE); 845 intel_dp->link_configuration[0] = intel_dp->link_bw; 846 intel_dp->link_configuration[1] = intel_dp->lane_count; 847 intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B; 848 849 /* 850 * Check for DPCD version > 1.1 and enhanced framing support 851 */ 852 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 853 (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) { 854 intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN; 855 intel_dp->DP |= DP_ENHANCED_FRAMING; 856 } 857 858 /* CPT DP's pipe select is decided in TRANS_DP_CTL */ 859 if (intel_crtc->pipe == 1 && !HAS_PCH_CPT(dev)) 860 intel_dp->DP |= DP_PIPEB_SELECT; 861 862 if (is_edp(intel_dp) && !is_pch_edp(intel_dp)) { 863 /* don't miss out required setting for eDP */ 864 intel_dp->DP |= DP_PLL_ENABLE; 865 if (adjusted_mode->clock < 200000) 866 intel_dp->DP |= DP_PLL_FREQ_160MHZ; 867 else 868 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 869 } 870} 871 872static void ironlake_wait_panel_off(struct intel_dp *intel_dp) 873{ 874 unsigned long off_time; 875 unsigned long delay; 876 877 DRM_DEBUG_KMS("Wait for panel power off time\n"); 878 879 if (ironlake_edp_have_panel_power(intel_dp) || 880 ironlake_edp_have_panel_vdd(intel_dp)) 881 { 882 DRM_DEBUG_KMS("Panel still on, no delay needed\n"); 883 return; 884 } 885 886 off_time = intel_dp->panel_off_jiffies + msecs_to_jiffies(intel_dp->panel_power_down_delay); 887 if (time_after(jiffies, off_time)) { 888 DRM_DEBUG_KMS("Time already passed"); 889 return; 890 } 891 delay = jiffies_to_msecs(off_time - jiffies); 892 if (delay > intel_dp->panel_power_down_delay) 893 delay = intel_dp->panel_power_down_delay; 894 DRM_DEBUG_KMS("Waiting an additional %ld ms\n", delay); 895 msleep(delay); 896} 897 898static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp) 899{ 900 struct drm_device *dev = intel_dp->base.base.dev; 901 struct drm_i915_private *dev_priv = dev->dev_private; 902 u32 pp; 903 904 if (!is_edp(intel_dp)) 905 return; 906 DRM_DEBUG_KMS("Turn eDP VDD on\n"); 907 908 WARN(intel_dp->want_panel_vdd, 909 "eDP VDD already requested on\n"); 910 911 intel_dp->want_panel_vdd = true; 912 if (ironlake_edp_have_panel_vdd(intel_dp)) { 913 DRM_DEBUG_KMS("eDP VDD already on\n"); 914 return; 915 } 916 917 ironlake_wait_panel_off(intel_dp); 918 pp = I915_READ(PCH_PP_CONTROL); 919 pp &= ~PANEL_UNLOCK_MASK; 920 pp |= PANEL_UNLOCK_REGS; 921 pp |= EDP_FORCE_VDD; 922 I915_WRITE(PCH_PP_CONTROL, pp); 923 POSTING_READ(PCH_PP_CONTROL); 924 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 925 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 926 927 /* 928 * If the panel wasn't on, delay before accessing aux channel 929 */ 930 if (!ironlake_edp_have_panel_power(intel_dp)) { 931 DRM_DEBUG_KMS("eDP was not running\n"); 932 msleep(intel_dp->panel_power_up_delay); 933 } 934} 935 936static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp) 937{ 938 struct drm_device *dev = intel_dp->base.base.dev; 939 struct drm_i915_private *dev_priv = dev->dev_private; 940 u32 pp; 941 942 if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) { 943 pp = I915_READ(PCH_PP_CONTROL); 944 pp &= ~PANEL_UNLOCK_MASK; 945 pp |= PANEL_UNLOCK_REGS; 946 pp &= ~EDP_FORCE_VDD; 947 I915_WRITE(PCH_PP_CONTROL, pp); 948 POSTING_READ(PCH_PP_CONTROL); 949 950 /* Make sure sequencer is idle before allowing subsequent activity */ 951 DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n", 952 I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL)); 953 intel_dp->panel_off_jiffies = jiffies; 954 } 955} 956 957static void ironlake_panel_vdd_work(struct work_struct *__work) 958{ 959 struct intel_dp *intel_dp = container_of(to_delayed_work(__work), 960 struct intel_dp, panel_vdd_work); 961 struct drm_device *dev = intel_dp->base.base.dev; 962 963 mutex_lock(&dev->struct_mutex); 964 ironlake_panel_vdd_off_sync(intel_dp); 965 mutex_unlock(&dev->struct_mutex); 966} 967 968static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync) 969{ 970 if (!is_edp(intel_dp)) 971 return; 972 973 DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd); 974 WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on"); 975 976 intel_dp->want_panel_vdd = false; 977 978 if (sync) { 979 ironlake_panel_vdd_off_sync(intel_dp); 980 } else { 981 /* 982 * Queue the timer to fire a long 983 * time from now (relative to the power down delay) 984 * to keep the panel power up across a sequence of operations 985 */ 986 schedule_delayed_work(&intel_dp->panel_vdd_work, 987 msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5)); 988 } 989} 990 991/* Returns true if the panel was already on when called */ 992static void ironlake_edp_panel_on(struct intel_dp *intel_dp) 993{ 994 struct drm_device *dev = intel_dp->base.base.dev; 995 struct drm_i915_private *dev_priv = dev->dev_private; 996 u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_STATE_ON_IDLE; 997 998 if (!is_edp(intel_dp)) 999 return; 1000 if (ironlake_edp_have_panel_power(intel_dp)) 1001 return; 1002 1003 ironlake_wait_panel_off(intel_dp); 1004 pp = I915_READ(PCH_PP_CONTROL); 1005 pp &= ~PANEL_UNLOCK_MASK; 1006 pp |= PANEL_UNLOCK_REGS; 1007 1008 if (IS_GEN5(dev)) { 1009 /* ILK workaround: disable reset around power sequence */ 1010 pp &= ~PANEL_POWER_RESET; 1011 I915_WRITE(PCH_PP_CONTROL, pp); 1012 POSTING_READ(PCH_PP_CONTROL); 1013 } 1014 1015 pp |= POWER_TARGET_ON; 1016 I915_WRITE(PCH_PP_CONTROL, pp); 1017 POSTING_READ(PCH_PP_CONTROL); 1018 1019 if (wait_for((I915_READ(PCH_PP_STATUS) & idle_on_mask) == idle_on_mask, 1020 5000)) 1021 DRM_ERROR("panel on wait timed out: 0x%08x\n", 1022 I915_READ(PCH_PP_STATUS)); 1023 1024 if (IS_GEN5(dev)) { 1025 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1026 I915_WRITE(PCH_PP_CONTROL, pp); 1027 POSTING_READ(PCH_PP_CONTROL); 1028 } 1029} 1030 1031static void ironlake_edp_panel_off(struct drm_encoder *encoder) 1032{ 1033 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1034 struct drm_device *dev = encoder->dev; 1035 struct drm_i915_private *dev_priv = dev->dev_private; 1036 u32 pp, idle_off_mask = PP_ON | PP_SEQUENCE_MASK | 1037 PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK; 1038 1039 if (!is_edp(intel_dp)) 1040 return; 1041 pp = I915_READ(PCH_PP_CONTROL); 1042 pp &= ~PANEL_UNLOCK_MASK; 1043 pp |= PANEL_UNLOCK_REGS; 1044 1045 if (IS_GEN5(dev)) { 1046 /* ILK workaround: disable reset around power sequence */ 1047 pp &= ~PANEL_POWER_RESET; 1048 I915_WRITE(PCH_PP_CONTROL, pp); 1049 POSTING_READ(PCH_PP_CONTROL); 1050 } 1051 1052 intel_dp->panel_off_jiffies = jiffies; 1053 1054 if (IS_GEN5(dev)) { 1055 pp &= ~POWER_TARGET_ON; 1056 I915_WRITE(PCH_PP_CONTROL, pp); 1057 POSTING_READ(PCH_PP_CONTROL); 1058 pp &= ~POWER_TARGET_ON; 1059 I915_WRITE(PCH_PP_CONTROL, pp); 1060 POSTING_READ(PCH_PP_CONTROL); 1061 msleep(intel_dp->panel_power_cycle_delay); 1062 1063 if (wait_for((I915_READ(PCH_PP_STATUS) & idle_off_mask) == 0, 5000)) 1064 DRM_ERROR("panel off wait timed out: 0x%08x\n", 1065 I915_READ(PCH_PP_STATUS)); 1066 1067 pp |= PANEL_POWER_RESET; /* restore panel reset bit */ 1068 I915_WRITE(PCH_PP_CONTROL, pp); 1069 POSTING_READ(PCH_PP_CONTROL); 1070 } 1071} 1072 1073static void ironlake_edp_backlight_on(struct intel_dp *intel_dp) 1074{ 1075 struct drm_device *dev = intel_dp->base.base.dev; 1076 struct drm_i915_private *dev_priv = dev->dev_private; 1077 u32 pp; 1078 1079 if (!is_edp(intel_dp)) 1080 return; 1081 1082 DRM_DEBUG_KMS("\n"); 1083 /* 1084 * If we enable the backlight right away following a panel power 1085 * on, we may see slight flicker as the panel syncs with the eDP 1086 * link. So delay a bit to make sure the image is solid before 1087 * allowing it to appear. 1088 */ 1089 msleep(intel_dp->backlight_on_delay); 1090 pp = I915_READ(PCH_PP_CONTROL); 1091 pp &= ~PANEL_UNLOCK_MASK; 1092 pp |= PANEL_UNLOCK_REGS; 1093 pp |= EDP_BLC_ENABLE; 1094 I915_WRITE(PCH_PP_CONTROL, pp); 1095 POSTING_READ(PCH_PP_CONTROL); 1096} 1097 1098static void ironlake_edp_backlight_off(struct intel_dp *intel_dp) 1099{ 1100 struct drm_device *dev = intel_dp->base.base.dev; 1101 struct drm_i915_private *dev_priv = dev->dev_private; 1102 u32 pp; 1103 1104 if (!is_edp(intel_dp)) 1105 return; 1106 1107 DRM_DEBUG_KMS("\n"); 1108 pp = I915_READ(PCH_PP_CONTROL); 1109 pp &= ~PANEL_UNLOCK_MASK; 1110 pp |= PANEL_UNLOCK_REGS; 1111 pp &= ~EDP_BLC_ENABLE; 1112 I915_WRITE(PCH_PP_CONTROL, pp); 1113 POSTING_READ(PCH_PP_CONTROL); 1114 msleep(intel_dp->backlight_off_delay); 1115} 1116 1117static void ironlake_edp_pll_on(struct drm_encoder *encoder) 1118{ 1119 struct drm_device *dev = encoder->dev; 1120 struct drm_i915_private *dev_priv = dev->dev_private; 1121 u32 dpa_ctl; 1122 1123 DRM_DEBUG_KMS("\n"); 1124 dpa_ctl = I915_READ(DP_A); 1125 dpa_ctl |= DP_PLL_ENABLE; 1126 I915_WRITE(DP_A, dpa_ctl); 1127 POSTING_READ(DP_A); 1128 udelay(200); 1129} 1130 1131static void ironlake_edp_pll_off(struct drm_encoder *encoder) 1132{ 1133 struct drm_device *dev = encoder->dev; 1134 struct drm_i915_private *dev_priv = dev->dev_private; 1135 u32 dpa_ctl; 1136 1137 dpa_ctl = I915_READ(DP_A); 1138 dpa_ctl &= ~DP_PLL_ENABLE; 1139 I915_WRITE(DP_A, dpa_ctl); 1140 POSTING_READ(DP_A); 1141 udelay(200); 1142} 1143 1144/* If the sink supports it, try to set the power state appropriately */ 1145static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode) 1146{ 1147 int ret, i; 1148 1149 /* Should have a valid DPCD by this point */ 1150 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 1151 return; 1152 1153 if (mode != DRM_MODE_DPMS_ON) { 1154 ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER, 1155 DP_SET_POWER_D3); 1156 if (ret != 1) 1157 DRM_DEBUG_DRIVER("failed to write sink power state\n"); 1158 } else { 1159 /* 1160 * When turning on, we need to retry for 1ms to give the sink 1161 * time to wake up. 1162 */ 1163 for (i = 0; i < 3; i++) { 1164 ret = intel_dp_aux_native_write_1(intel_dp, 1165 DP_SET_POWER, 1166 DP_SET_POWER_D0); 1167 if (ret == 1) 1168 break; 1169 msleep(1); 1170 } 1171 } 1172} 1173 1174static void intel_dp_prepare(struct drm_encoder *encoder) 1175{ 1176 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1177 1178 /* Wake up the sink first */ 1179 ironlake_edp_panel_vdd_on(intel_dp); 1180 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1181 ironlake_edp_panel_vdd_off(intel_dp, false); 1182 1183 /* Make sure the panel is off before trying to 1184 * change the mode 1185 */ 1186 ironlake_edp_backlight_off(intel_dp); 1187 intel_dp_link_down(intel_dp); 1188 ironlake_edp_panel_off(encoder); 1189} 1190 1191static void intel_dp_commit(struct drm_encoder *encoder) 1192{ 1193 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1194 struct drm_device *dev = encoder->dev; 1195 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1196 1197 ironlake_edp_panel_vdd_on(intel_dp); 1198 intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON); 1199 intel_dp_start_link_train(intel_dp); 1200 ironlake_edp_panel_on(intel_dp); 1201 ironlake_edp_panel_vdd_off(intel_dp, true); 1202 1203 intel_dp_complete_link_train(intel_dp); 1204 ironlake_edp_backlight_on(intel_dp); 1205 1206 intel_dp->dpms_mode = DRM_MODE_DPMS_ON; 1207 1208 if (HAS_PCH_CPT(dev)) 1209 intel_cpt_verify_modeset(dev, intel_crtc->pipe); 1210} 1211 1212static void 1213intel_dp_dpms(struct drm_encoder *encoder, int mode) 1214{ 1215 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1216 struct drm_device *dev = encoder->dev; 1217 struct drm_i915_private *dev_priv = dev->dev_private; 1218 uint32_t dp_reg = I915_READ(intel_dp->output_reg); 1219 1220 if (mode != DRM_MODE_DPMS_ON) { 1221 ironlake_edp_panel_vdd_on(intel_dp); 1222 if (is_edp(intel_dp)) 1223 ironlake_edp_backlight_off(intel_dp); 1224 intel_dp_sink_dpms(intel_dp, mode); 1225 intel_dp_link_down(intel_dp); 1226 ironlake_edp_panel_off(encoder); 1227 if (is_edp(intel_dp) && !is_pch_edp(intel_dp)) 1228 ironlake_edp_pll_off(encoder); 1229 ironlake_edp_panel_vdd_off(intel_dp, false); 1230 } else { 1231 ironlake_edp_panel_vdd_on(intel_dp); 1232 intel_dp_sink_dpms(intel_dp, mode); 1233 if (!(dp_reg & DP_PORT_EN)) { 1234 intel_dp_start_link_train(intel_dp); 1235 ironlake_edp_panel_on(intel_dp); 1236 ironlake_edp_panel_vdd_off(intel_dp, true); 1237 intel_dp_complete_link_train(intel_dp); 1238 ironlake_edp_backlight_on(intel_dp); 1239 } else 1240 ironlake_edp_panel_vdd_off(intel_dp, false); 1241 ironlake_edp_backlight_on(intel_dp); 1242 } 1243 intel_dp->dpms_mode = mode; 1244} 1245 1246/* 1247 * Native read with retry for link status and receiver capability reads for 1248 * cases where the sink may still be asleep. 1249 */ 1250static bool 1251intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address, 1252 uint8_t *recv, int recv_bytes) 1253{ 1254 int ret, i; 1255 1256 /* 1257 * Sinks are *supposed* to come up within 1ms from an off state, 1258 * but we're also supposed to retry 3 times per the spec. 1259 */ 1260 for (i = 0; i < 3; i++) { 1261 ret = intel_dp_aux_native_read(intel_dp, address, recv, 1262 recv_bytes); 1263 if (ret == recv_bytes) 1264 return true; 1265 msleep(1); 1266 } 1267 1268 return false; 1269} 1270 1271/* 1272 * Fetch AUX CH registers 0x202 - 0x207 which contain 1273 * link status information 1274 */ 1275static bool 1276intel_dp_get_link_status(struct intel_dp *intel_dp) 1277{ 1278 return intel_dp_aux_native_read_retry(intel_dp, 1279 DP_LANE0_1_STATUS, 1280 intel_dp->link_status, 1281 DP_LINK_STATUS_SIZE); 1282} 1283 1284static uint8_t 1285intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1286 int r) 1287{ 1288 return link_status[r - DP_LANE0_1_STATUS]; 1289} 1290 1291static uint8_t 1292intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE], 1293 int lane) 1294{ 1295 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 1296 int s = ((lane & 1) ? 1297 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT : 1298 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT); 1299 uint8_t l = intel_dp_link_status(link_status, i); 1300 1301 return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT; 1302} 1303 1304static uint8_t 1305intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE], 1306 int lane) 1307{ 1308 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1); 1309 int s = ((lane & 1) ? 1310 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT : 1311 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT); 1312 uint8_t l = intel_dp_link_status(link_status, i); 1313 1314 return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT; 1315} 1316 1317 1318#if 0 1319static char *voltage_names[] = { 1320 "0.4V", "0.6V", "0.8V", "1.2V" 1321}; 1322static char *pre_emph_names[] = { 1323 "0dB", "3.5dB", "6dB", "9.5dB" 1324}; 1325static char *link_train_names[] = { 1326 "pattern 1", "pattern 2", "idle", "off" 1327}; 1328#endif 1329 1330/* 1331 * These are source-specific values; current Intel hardware supports 1332 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB 1333 */ 1334#define I830_DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_800 1335 1336static uint8_t 1337intel_dp_pre_emphasis_max(uint8_t voltage_swing) 1338{ 1339 switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) { 1340 case DP_TRAIN_VOLTAGE_SWING_400: 1341 return DP_TRAIN_PRE_EMPHASIS_6; 1342 case DP_TRAIN_VOLTAGE_SWING_600: 1343 return DP_TRAIN_PRE_EMPHASIS_6; 1344 case DP_TRAIN_VOLTAGE_SWING_800: 1345 return DP_TRAIN_PRE_EMPHASIS_3_5; 1346 case DP_TRAIN_VOLTAGE_SWING_1200: 1347 default: 1348 return DP_TRAIN_PRE_EMPHASIS_0; 1349 } 1350} 1351 1352static void 1353intel_get_adjust_train(struct intel_dp *intel_dp) 1354{ 1355 uint8_t v = 0; 1356 uint8_t p = 0; 1357 int lane; 1358 1359 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1360 uint8_t this_v = intel_get_adjust_request_voltage(intel_dp->link_status, lane); 1361 uint8_t this_p = intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane); 1362 1363 if (this_v > v) 1364 v = this_v; 1365 if (this_p > p) 1366 p = this_p; 1367 } 1368 1369 if (v >= I830_DP_VOLTAGE_MAX) 1370 v = I830_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED; 1371 1372 if (p >= intel_dp_pre_emphasis_max(v)) 1373 p = intel_dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED; 1374 1375 for (lane = 0; lane < 4; lane++) 1376 intel_dp->train_set[lane] = v | p; 1377} 1378 1379static uint32_t 1380intel_dp_signal_levels(uint8_t train_set, int lane_count) 1381{ 1382 uint32_t signal_levels = 0; 1383 1384 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 1385 case DP_TRAIN_VOLTAGE_SWING_400: 1386 default: 1387 signal_levels |= DP_VOLTAGE_0_4; 1388 break; 1389 case DP_TRAIN_VOLTAGE_SWING_600: 1390 signal_levels |= DP_VOLTAGE_0_6; 1391 break; 1392 case DP_TRAIN_VOLTAGE_SWING_800: 1393 signal_levels |= DP_VOLTAGE_0_8; 1394 break; 1395 case DP_TRAIN_VOLTAGE_SWING_1200: 1396 signal_levels |= DP_VOLTAGE_1_2; 1397 break; 1398 } 1399 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1400 case DP_TRAIN_PRE_EMPHASIS_0: 1401 default: 1402 signal_levels |= DP_PRE_EMPHASIS_0; 1403 break; 1404 case DP_TRAIN_PRE_EMPHASIS_3_5: 1405 signal_levels |= DP_PRE_EMPHASIS_3_5; 1406 break; 1407 case DP_TRAIN_PRE_EMPHASIS_6: 1408 signal_levels |= DP_PRE_EMPHASIS_6; 1409 break; 1410 case DP_TRAIN_PRE_EMPHASIS_9_5: 1411 signal_levels |= DP_PRE_EMPHASIS_9_5; 1412 break; 1413 } 1414 return signal_levels; 1415} 1416 1417/* Gen6's DP voltage swing and pre-emphasis control */ 1418static uint32_t 1419intel_gen6_edp_signal_levels(uint8_t train_set) 1420{ 1421 int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1422 DP_TRAIN_PRE_EMPHASIS_MASK); 1423 switch (signal_levels) { 1424 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0: 1425 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0: 1426 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1427 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5: 1428 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1429 case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6: 1430 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6: 1431 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1432 case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5: 1433 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5: 1434 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1435 case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0: 1436 case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0: 1437 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1438 default: 1439 DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:" 1440 "0x%x\n", signal_levels); 1441 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1442 } 1443} 1444 1445static uint8_t 1446intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE], 1447 int lane) 1448{ 1449 int i = DP_LANE0_1_STATUS + (lane >> 1); 1450 int s = (lane & 1) * 4; 1451 uint8_t l = intel_dp_link_status(link_status, i); 1452 1453 return (l >> s) & 0xf; 1454} 1455 1456/* Check for clock recovery is done on all channels */ 1457static bool 1458intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count) 1459{ 1460 int lane; 1461 uint8_t lane_status; 1462 1463 for (lane = 0; lane < lane_count; lane++) { 1464 lane_status = intel_get_lane_status(link_status, lane); 1465 if ((lane_status & DP_LANE_CR_DONE) == 0) 1466 return false; 1467 } 1468 return true; 1469} 1470 1471/* Check to see if channel eq is done on all channels */ 1472#define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\ 1473 DP_LANE_CHANNEL_EQ_DONE|\ 1474 DP_LANE_SYMBOL_LOCKED) 1475static bool 1476intel_channel_eq_ok(struct intel_dp *intel_dp) 1477{ 1478 uint8_t lane_align; 1479 uint8_t lane_status; 1480 int lane; 1481 1482 lane_align = intel_dp_link_status(intel_dp->link_status, 1483 DP_LANE_ALIGN_STATUS_UPDATED); 1484 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0) 1485 return false; 1486 for (lane = 0; lane < intel_dp->lane_count; lane++) { 1487 lane_status = intel_get_lane_status(intel_dp->link_status, lane); 1488 if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS) 1489 return false; 1490 } 1491 return true; 1492} 1493 1494static bool 1495intel_dp_set_link_train(struct intel_dp *intel_dp, 1496 uint32_t dp_reg_value, 1497 uint8_t dp_train_pat) 1498{ 1499 struct drm_device *dev = intel_dp->base.base.dev; 1500 struct drm_i915_private *dev_priv = dev->dev_private; 1501 int ret; 1502 1503 I915_WRITE(intel_dp->output_reg, dp_reg_value); 1504 POSTING_READ(intel_dp->output_reg); 1505 1506 intel_dp_aux_native_write_1(intel_dp, 1507 DP_TRAINING_PATTERN_SET, 1508 dp_train_pat); 1509 1510 ret = intel_dp_aux_native_write(intel_dp, 1511 DP_TRAINING_LANE0_SET, 1512 intel_dp->train_set, 4); 1513 if (ret != 4) 1514 return false; 1515 1516 return true; 1517} 1518 1519/* Enable corresponding port and start training pattern 1 */ 1520static void 1521intel_dp_start_link_train(struct intel_dp *intel_dp) 1522{ 1523 struct drm_device *dev = intel_dp->base.base.dev; 1524 struct drm_i915_private *dev_priv = dev->dev_private; 1525 struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc); 1526 int i; 1527 uint8_t voltage; 1528 bool clock_recovery = false; 1529 int tries; 1530 u32 reg; 1531 uint32_t DP = intel_dp->DP; 1532 1533 /* 1534 * On CPT we have to enable the port in training pattern 1, which 1535 * will happen below in intel_dp_set_link_train. Otherwise, enable 1536 * the port and wait for it to become active. 1537 */ 1538 if (!HAS_PCH_CPT(dev)) { 1539 I915_WRITE(intel_dp->output_reg, intel_dp->DP); 1540 POSTING_READ(intel_dp->output_reg); 1541 intel_wait_for_vblank(dev, intel_crtc->pipe); 1542 } 1543 1544 /* Write the link configuration data */ 1545 intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET, 1546 intel_dp->link_configuration, 1547 DP_LINK_CONFIGURATION_SIZE); 1548 1549 DP |= DP_PORT_EN; 1550 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp)) 1551 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1552 else 1553 DP &= ~DP_LINK_TRAIN_MASK; 1554 memset(intel_dp->train_set, 0, 4); 1555 voltage = 0xff; 1556 tries = 0; 1557 clock_recovery = false; 1558 for (;;) { 1559 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1560 uint32_t signal_levels; 1561 if (IS_GEN6(dev) && is_edp(intel_dp)) { 1562 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1563 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1564 } else { 1565 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count); 1566 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1567 } 1568 1569 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp)) 1570 reg = DP | DP_LINK_TRAIN_PAT_1_CPT; 1571 else 1572 reg = DP | DP_LINK_TRAIN_PAT_1; 1573 1574 if (!intel_dp_set_link_train(intel_dp, reg, 1575 DP_TRAINING_PATTERN_1 | 1576 DP_LINK_SCRAMBLING_DISABLE)) 1577 break; 1578 /* Set training pattern 1 */ 1579 1580 udelay(100); 1581 if (!intel_dp_get_link_status(intel_dp)) 1582 break; 1583 1584 if (intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) { 1585 clock_recovery = true; 1586 break; 1587 } 1588 1589 /* Check to see if we've tried the max voltage */ 1590 for (i = 0; i < intel_dp->lane_count; i++) 1591 if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0) 1592 break; 1593 if (i == intel_dp->lane_count) 1594 break; 1595 1596 /* Check to see if we've tried the same voltage 5 times */ 1597 if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) { 1598 ++tries; 1599 if (tries == 5) 1600 break; 1601 } else 1602 tries = 0; 1603 voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK; 1604 1605 /* Compute new intel_dp->train_set as requested by target */ 1606 intel_get_adjust_train(intel_dp); 1607 } 1608 1609 intel_dp->DP = DP; 1610} 1611 1612static void 1613intel_dp_complete_link_train(struct intel_dp *intel_dp) 1614{ 1615 struct drm_device *dev = intel_dp->base.base.dev; 1616 struct drm_i915_private *dev_priv = dev->dev_private; 1617 bool channel_eq = false; 1618 int tries, cr_tries; 1619 u32 reg; 1620 uint32_t DP = intel_dp->DP; 1621 1622 /* channel equalization */ 1623 tries = 0; 1624 cr_tries = 0; 1625 channel_eq = false; 1626 for (;;) { 1627 /* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */ 1628 uint32_t signal_levels; 1629 1630 if (cr_tries > 5) { 1631 DRM_ERROR("failed to train DP, aborting\n"); 1632 intel_dp_link_down(intel_dp); 1633 break; 1634 } 1635 1636 if (IS_GEN6(dev) && is_edp(intel_dp)) { 1637 signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]); 1638 DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels; 1639 } else { 1640 signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count); 1641 DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels; 1642 } 1643 1644 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp)) 1645 reg = DP | DP_LINK_TRAIN_PAT_2_CPT; 1646 else 1647 reg = DP | DP_LINK_TRAIN_PAT_2; 1648 1649 /* channel eq pattern */ 1650 if (!intel_dp_set_link_train(intel_dp, reg, 1651 DP_TRAINING_PATTERN_2 | 1652 DP_LINK_SCRAMBLING_DISABLE)) 1653 break; 1654 1655 udelay(400); 1656 if (!intel_dp_get_link_status(intel_dp)) 1657 break; 1658 1659 /* Make sure clock is still ok */ 1660 if (!intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) { 1661 intel_dp_start_link_train(intel_dp); 1662 cr_tries++; 1663 continue; 1664 } 1665 1666 if (intel_channel_eq_ok(intel_dp)) { 1667 channel_eq = true; 1668 break; 1669 } 1670 1671 /* Try 5 times, then try clock recovery if that fails */ 1672 if (tries > 5) { 1673 intel_dp_link_down(intel_dp); 1674 intel_dp_start_link_train(intel_dp); 1675 tries = 0; 1676 cr_tries++; 1677 continue; 1678 } 1679 1680 /* Compute new intel_dp->train_set as requested by target */ 1681 intel_get_adjust_train(intel_dp); 1682 ++tries; 1683 } 1684 1685 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp)) 1686 reg = DP | DP_LINK_TRAIN_OFF_CPT; 1687 else 1688 reg = DP | DP_LINK_TRAIN_OFF; 1689 1690 I915_WRITE(intel_dp->output_reg, reg); 1691 POSTING_READ(intel_dp->output_reg); 1692 intel_dp_aux_native_write_1(intel_dp, 1693 DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE); 1694} 1695 1696static void 1697intel_dp_link_down(struct intel_dp *intel_dp) 1698{ 1699 struct drm_device *dev = intel_dp->base.base.dev; 1700 struct drm_i915_private *dev_priv = dev->dev_private; 1701 uint32_t DP = intel_dp->DP; 1702 1703 if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0) 1704 return; 1705 1706 DRM_DEBUG_KMS("\n"); 1707 1708 if (is_edp(intel_dp)) { 1709 DP &= ~DP_PLL_ENABLE; 1710 I915_WRITE(intel_dp->output_reg, DP); 1711 POSTING_READ(intel_dp->output_reg); 1712 udelay(100); 1713 } 1714 1715 if (HAS_PCH_CPT(dev) && !is_edp(intel_dp)) { 1716 DP &= ~DP_LINK_TRAIN_MASK_CPT; 1717 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT); 1718 } else { 1719 DP &= ~DP_LINK_TRAIN_MASK; 1720 I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE); 1721 } 1722 POSTING_READ(intel_dp->output_reg); 1723 1724 msleep(17); 1725 1726 if (is_edp(intel_dp)) 1727 DP |= DP_LINK_TRAIN_OFF; 1728 1729 if (!HAS_PCH_CPT(dev) && 1730 I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) { 1731 struct drm_crtc *crtc = intel_dp->base.base.crtc; 1732 1733 /* Hardware workaround: leaving our transcoder select 1734 * set to transcoder B while it's off will prevent the 1735 * corresponding HDMI output on transcoder A. 1736 * 1737 * Combine this with another hardware workaround: 1738 * transcoder select bit can only be cleared while the 1739 * port is enabled. 1740 */ 1741 DP &= ~DP_PIPEB_SELECT; 1742 I915_WRITE(intel_dp->output_reg, DP); 1743 1744 /* Changes to enable or select take place the vblank 1745 * after being written. 1746 */ 1747 if (crtc == NULL) { 1748 /* We can arrive here never having been attached 1749 * to a CRTC, for instance, due to inheriting 1750 * random state from the BIOS. 1751 * 1752 * If the pipe is not running, play safe and 1753 * wait for the clocks to stabilise before 1754 * continuing. 1755 */ 1756 POSTING_READ(intel_dp->output_reg); 1757 msleep(50); 1758 } else 1759 intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe); 1760 } 1761 1762 I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN); 1763 POSTING_READ(intel_dp->output_reg); 1764 msleep(intel_dp->panel_power_down_delay); 1765} 1766 1767static bool 1768intel_dp_get_dpcd(struct intel_dp *intel_dp) 1769{ 1770 if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd, 1771 sizeof(intel_dp->dpcd)) && 1772 (intel_dp->dpcd[DP_DPCD_REV] != 0)) { 1773 return true; 1774 } 1775 1776 return false; 1777} 1778 1779static bool 1780intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector) 1781{ 1782 int ret; 1783 1784 ret = intel_dp_aux_native_read_retry(intel_dp, 1785 DP_DEVICE_SERVICE_IRQ_VECTOR, 1786 sink_irq_vector, 1); 1787 if (!ret) 1788 return false; 1789 1790 return true; 1791} 1792 1793static void 1794intel_dp_handle_test_request(struct intel_dp *intel_dp) 1795{ 1796 /* NAK by default */ 1797 intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK); 1798} 1799 1800/* 1801 * According to DP spec 1802 * 5.1.2: 1803 * 1. Read DPCD 1804 * 2. Configure link according to Receiver Capabilities 1805 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3 1806 * 4. Check link status on receipt of hot-plug interrupt 1807 */ 1808 1809static void 1810intel_dp_check_link_status(struct intel_dp *intel_dp) 1811{ 1812 u8 sink_irq_vector; 1813 1814 if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON) 1815 return; 1816 1817 if (!intel_dp->base.base.crtc) 1818 return; 1819 1820 /* Try to read receiver status if the link appears to be up */ 1821 if (!intel_dp_get_link_status(intel_dp)) { 1822 intel_dp_link_down(intel_dp); 1823 return; 1824 } 1825 1826 /* Now read the DPCD to see if it's actually running */ 1827 if (!intel_dp_get_dpcd(intel_dp)) { 1828 intel_dp_link_down(intel_dp); 1829 return; 1830 } 1831 1832 /* Try to read the source of the interrupt */ 1833 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 && 1834 intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) { 1835 /* Clear interrupt source */ 1836 intel_dp_aux_native_write_1(intel_dp, 1837 DP_DEVICE_SERVICE_IRQ_VECTOR, 1838 sink_irq_vector); 1839 1840 if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST) 1841 intel_dp_handle_test_request(intel_dp); 1842 if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ)) 1843 DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n"); 1844 } 1845 1846 if (!intel_channel_eq_ok(intel_dp)) { 1847 DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n", 1848 drm_get_encoder_name(&intel_dp->base.base)); 1849 intel_dp_start_link_train(intel_dp); 1850 intel_dp_complete_link_train(intel_dp); 1851 } 1852} 1853 1854static enum drm_connector_status 1855intel_dp_detect_dpcd(struct intel_dp *intel_dp) 1856{ 1857 if (intel_dp_get_dpcd(intel_dp)) 1858 return connector_status_connected; 1859 return connector_status_disconnected; 1860} 1861 1862static enum drm_connector_status 1863ironlake_dp_detect(struct intel_dp *intel_dp) 1864{ 1865 enum drm_connector_status status; 1866 1867 /* Can't disconnect eDP, but you can close the lid... */ 1868 if (is_edp(intel_dp)) { 1869 status = intel_panel_detect(intel_dp->base.base.dev); 1870 if (status == connector_status_unknown) 1871 status = connector_status_connected; 1872 return status; 1873 } 1874 1875 return intel_dp_detect_dpcd(intel_dp); 1876} 1877 1878static enum drm_connector_status 1879g4x_dp_detect(struct intel_dp *intel_dp) 1880{ 1881 struct drm_device *dev = intel_dp->base.base.dev; 1882 struct drm_i915_private *dev_priv = dev->dev_private; 1883 uint32_t temp, bit; 1884 1885 switch (intel_dp->output_reg) { 1886 case DP_B: 1887 bit = DPB_HOTPLUG_INT_STATUS; 1888 break; 1889 case DP_C: 1890 bit = DPC_HOTPLUG_INT_STATUS; 1891 break; 1892 case DP_D: 1893 bit = DPD_HOTPLUG_INT_STATUS; 1894 break; 1895 default: 1896 return connector_status_unknown; 1897 } 1898 1899 temp = I915_READ(PORT_HOTPLUG_STAT); 1900 1901 if ((temp & bit) == 0) 1902 return connector_status_disconnected; 1903 1904 return intel_dp_detect_dpcd(intel_dp); 1905} 1906 1907static struct edid * 1908intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter) 1909{ 1910 struct intel_dp *intel_dp = intel_attached_dp(connector); 1911 struct edid *edid; 1912 1913 ironlake_edp_panel_vdd_on(intel_dp); 1914 edid = drm_get_edid(connector, adapter); 1915 ironlake_edp_panel_vdd_off(intel_dp, false); 1916 return edid; 1917} 1918 1919static int 1920intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter) 1921{ 1922 struct intel_dp *intel_dp = intel_attached_dp(connector); 1923 int ret; 1924 1925 ironlake_edp_panel_vdd_on(intel_dp); 1926 ret = intel_ddc_get_modes(connector, adapter); 1927 ironlake_edp_panel_vdd_off(intel_dp, false); 1928 return ret; 1929} 1930 1931 1932/** 1933 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection. 1934 * 1935 * \return true if DP port is connected. 1936 * \return false if DP port is disconnected. 1937 */ 1938static enum drm_connector_status 1939intel_dp_detect(struct drm_connector *connector, bool force) 1940{ 1941 struct intel_dp *intel_dp = intel_attached_dp(connector); 1942 struct drm_device *dev = intel_dp->base.base.dev; 1943 enum drm_connector_status status; 1944 struct edid *edid = NULL; 1945 1946 intel_dp->has_audio = false; 1947 1948 if (HAS_PCH_SPLIT(dev)) 1949 status = ironlake_dp_detect(intel_dp); 1950 else 1951 status = g4x_dp_detect(intel_dp); 1952 1953 DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n", 1954 intel_dp->dpcd[0], intel_dp->dpcd[1], intel_dp->dpcd[2], 1955 intel_dp->dpcd[3], intel_dp->dpcd[4], intel_dp->dpcd[5], 1956 intel_dp->dpcd[6], intel_dp->dpcd[7]); 1957 1958 if (status != connector_status_connected) 1959 return status; 1960 1961 if (intel_dp->force_audio) { 1962 intel_dp->has_audio = intel_dp->force_audio > 0; 1963 } else { 1964 edid = intel_dp_get_edid(connector, &intel_dp->adapter); 1965 if (edid) { 1966 intel_dp->has_audio = drm_detect_monitor_audio(edid); 1967 connector->display_info.raw_edid = NULL; 1968 kfree(edid); 1969 } 1970 } 1971 1972 return connector_status_connected; 1973} 1974 1975static int intel_dp_get_modes(struct drm_connector *connector) 1976{ 1977 struct intel_dp *intel_dp = intel_attached_dp(connector); 1978 struct drm_device *dev = intel_dp->base.base.dev; 1979 struct drm_i915_private *dev_priv = dev->dev_private; 1980 int ret; 1981 1982 /* We should parse the EDID data and find out if it has an audio sink 1983 */ 1984 1985 ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter); 1986 if (ret) { 1987 if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) { 1988 struct drm_display_mode *newmode; 1989 list_for_each_entry(newmode, &connector->probed_modes, 1990 head) { 1991 if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) { 1992 intel_dp->panel_fixed_mode = 1993 drm_mode_duplicate(dev, newmode); 1994 break; 1995 } 1996 } 1997 } 1998 return ret; 1999 } 2000 2001 /* if eDP has no EDID, try to use fixed panel mode from VBT */ 2002 if (is_edp(intel_dp)) { 2003 /* initialize panel mode from VBT if available for eDP */ 2004 if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) { 2005 intel_dp->panel_fixed_mode = 2006 drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode); 2007 if (intel_dp->panel_fixed_mode) { 2008 intel_dp->panel_fixed_mode->type |= 2009 DRM_MODE_TYPE_PREFERRED; 2010 } 2011 } 2012 if (intel_dp->panel_fixed_mode) { 2013 struct drm_display_mode *mode; 2014 mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode); 2015 drm_mode_probed_add(connector, mode); 2016 return 1; 2017 } 2018 } 2019 return 0; 2020} 2021 2022static bool 2023intel_dp_detect_audio(struct drm_connector *connector) 2024{ 2025 struct intel_dp *intel_dp = intel_attached_dp(connector); 2026 struct edid *edid; 2027 bool has_audio = false; 2028 2029 edid = intel_dp_get_edid(connector, &intel_dp->adapter); 2030 if (edid) { 2031 has_audio = drm_detect_monitor_audio(edid); 2032 2033 connector->display_info.raw_edid = NULL; 2034 kfree(edid); 2035 } 2036 2037 return has_audio; 2038} 2039 2040static int 2041intel_dp_set_property(struct drm_connector *connector, 2042 struct drm_property *property, 2043 uint64_t val) 2044{ 2045 struct drm_i915_private *dev_priv = connector->dev->dev_private; 2046 struct intel_dp *intel_dp = intel_attached_dp(connector); 2047 int ret; 2048 2049 ret = drm_connector_property_set_value(connector, property, val); 2050 if (ret) 2051 return ret; 2052 2053 if (property == dev_priv->force_audio_property) { 2054 int i = val; 2055 bool has_audio; 2056 2057 if (i == intel_dp->force_audio) 2058 return 0; 2059 2060 intel_dp->force_audio = i; 2061 2062 if (i == 0) 2063 has_audio = intel_dp_detect_audio(connector); 2064 else 2065 has_audio = i > 0; 2066 2067 if (has_audio == intel_dp->has_audio) 2068 return 0; 2069 2070 intel_dp->has_audio = has_audio; 2071 goto done; 2072 } 2073 2074 if (property == dev_priv->broadcast_rgb_property) { 2075 if (val == !!intel_dp->color_range) 2076 return 0; 2077 2078 intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0; 2079 goto done; 2080 } 2081 2082 return -EINVAL; 2083 2084done: 2085 if (intel_dp->base.base.crtc) { 2086 struct drm_crtc *crtc = intel_dp->base.base.crtc; 2087 drm_crtc_helper_set_mode(crtc, &crtc->mode, 2088 crtc->x, crtc->y, 2089 crtc->fb); 2090 } 2091 2092 return 0; 2093} 2094 2095static void 2096intel_dp_destroy(struct drm_connector *connector) 2097{ 2098 struct drm_device *dev = connector->dev; 2099 2100 if (intel_dpd_is_edp(dev)) 2101 intel_panel_destroy_backlight(dev); 2102 2103 drm_sysfs_connector_remove(connector); 2104 drm_connector_cleanup(connector); 2105 kfree(connector); 2106} 2107 2108static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 2109{ 2110 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2111 2112 i2c_del_adapter(&intel_dp->adapter); 2113 drm_encoder_cleanup(encoder); 2114 if (is_edp(intel_dp)) { 2115 cancel_delayed_work_sync(&intel_dp->panel_vdd_work); 2116 ironlake_panel_vdd_off_sync(intel_dp); 2117 } 2118 kfree(intel_dp); 2119} 2120 2121static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = { 2122 .dpms = intel_dp_dpms, 2123 .mode_fixup = intel_dp_mode_fixup, 2124 .prepare = intel_dp_prepare, 2125 .mode_set = intel_dp_mode_set, 2126 .commit = intel_dp_commit, 2127}; 2128 2129static const struct drm_connector_funcs intel_dp_connector_funcs = { 2130 .dpms = drm_helper_connector_dpms, 2131 .detect = intel_dp_detect, 2132 .fill_modes = drm_helper_probe_single_connector_modes, 2133 .set_property = intel_dp_set_property, 2134 .destroy = intel_dp_destroy, 2135}; 2136 2137static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = { 2138 .get_modes = intel_dp_get_modes, 2139 .mode_valid = intel_dp_mode_valid, 2140 .best_encoder = intel_best_encoder, 2141}; 2142 2143static const struct drm_encoder_funcs intel_dp_enc_funcs = { 2144 .destroy = intel_dp_encoder_destroy, 2145}; 2146 2147static void 2148intel_dp_hot_plug(struct intel_encoder *intel_encoder) 2149{ 2150 struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base); 2151 2152 intel_dp_check_link_status(intel_dp); 2153} 2154 2155/* Return which DP Port should be selected for Transcoder DP control */ 2156int 2157intel_trans_dp_port_sel(struct drm_crtc *crtc) 2158{ 2159 struct drm_device *dev = crtc->dev; 2160 struct drm_mode_config *mode_config = &dev->mode_config; 2161 struct drm_encoder *encoder; 2162 2163 list_for_each_entry(encoder, &mode_config->encoder_list, head) { 2164 struct intel_dp *intel_dp; 2165 2166 if (encoder->crtc != crtc) 2167 continue; 2168 2169 intel_dp = enc_to_intel_dp(encoder); 2170 if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) 2171 return intel_dp->output_reg; 2172 } 2173 2174 return -1; 2175} 2176 2177/* check the VBT to see whether the eDP is on DP-D port */ 2178bool intel_dpd_is_edp(struct drm_device *dev) 2179{ 2180 struct drm_i915_private *dev_priv = dev->dev_private; 2181 struct child_device_config *p_child; 2182 int i; 2183 2184 if (!dev_priv->child_dev_num) 2185 return false; 2186 2187 for (i = 0; i < dev_priv->child_dev_num; i++) { 2188 p_child = dev_priv->child_dev + i; 2189 2190 if (p_child->dvo_port == PORT_IDPD && 2191 p_child->device_type == DEVICE_TYPE_eDP) 2192 return true; 2193 } 2194 return false; 2195} 2196 2197static void 2198intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector) 2199{ 2200 intel_attach_force_audio_property(connector); 2201 intel_attach_broadcast_rgb_property(connector); 2202} 2203 2204void 2205intel_dp_init(struct drm_device *dev, int output_reg) 2206{ 2207 struct drm_i915_private *dev_priv = dev->dev_private; 2208 struct drm_connector *connector; 2209 struct intel_dp *intel_dp; 2210 struct intel_encoder *intel_encoder; 2211 struct intel_connector *intel_connector; 2212 const char *name = NULL; 2213 int type; 2214 2215 intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL); 2216 if (!intel_dp) 2217 return; 2218 2219 intel_dp->output_reg = output_reg; 2220 intel_dp->dpms_mode = -1; 2221 2222 intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL); 2223 if (!intel_connector) { 2224 kfree(intel_dp); 2225 return; 2226 } 2227 intel_encoder = &intel_dp->base; 2228 2229 if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D) 2230 if (intel_dpd_is_edp(dev)) 2231 intel_dp->is_pch_edp = true; 2232 2233 if (output_reg == DP_A || is_pch_edp(intel_dp)) { 2234 type = DRM_MODE_CONNECTOR_eDP; 2235 intel_encoder->type = INTEL_OUTPUT_EDP; 2236 } else { 2237 type = DRM_MODE_CONNECTOR_DisplayPort; 2238 intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT; 2239 } 2240 2241 connector = &intel_connector->base; 2242 drm_connector_init(dev, connector, &intel_dp_connector_funcs, type); 2243 drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs); 2244 2245 connector->polled = DRM_CONNECTOR_POLL_HPD; 2246 2247 if (output_reg == DP_B || output_reg == PCH_DP_B) 2248 intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT); 2249 else if (output_reg == DP_C || output_reg == PCH_DP_C) 2250 intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT); 2251 else if (output_reg == DP_D || output_reg == PCH_DP_D) 2252 intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT); 2253 2254 if (is_edp(intel_dp)) { 2255 intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT); 2256 INIT_DELAYED_WORK(&intel_dp->panel_vdd_work, 2257 ironlake_panel_vdd_work); 2258 } 2259 2260 intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2); 2261 connector->interlace_allowed = true; 2262 connector->doublescan_allowed = 0; 2263 2264 drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs, 2265 DRM_MODE_ENCODER_TMDS); 2266 drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs); 2267 2268 intel_connector_attach_encoder(intel_connector, intel_encoder); 2269 drm_sysfs_connector_add(connector); 2270 2271 /* Set up the DDC bus. */ 2272 switch (output_reg) { 2273 case DP_A: 2274 name = "DPDDC-A"; 2275 break; 2276 case DP_B: 2277 case PCH_DP_B: 2278 dev_priv->hotplug_supported_mask |= 2279 HDMIB_HOTPLUG_INT_STATUS; 2280 name = "DPDDC-B"; 2281 break; 2282 case DP_C: 2283 case PCH_DP_C: 2284 dev_priv->hotplug_supported_mask |= 2285 HDMIC_HOTPLUG_INT_STATUS; 2286 name = "DPDDC-C"; 2287 break; 2288 case DP_D: 2289 case PCH_DP_D: 2290 dev_priv->hotplug_supported_mask |= 2291 HDMID_HOTPLUG_INT_STATUS; 2292 name = "DPDDC-D"; 2293 break; 2294 } 2295 2296 /* Cache some DPCD data in the eDP case */ 2297 if (is_edp(intel_dp)) { 2298 bool ret; 2299 struct edp_power_seq cur, vbt; 2300 u32 pp_on, pp_off, pp_div; 2301 2302 pp_on = I915_READ(PCH_PP_ON_DELAYS); 2303 pp_off = I915_READ(PCH_PP_OFF_DELAYS); 2304 pp_div = I915_READ(PCH_PP_DIVISOR); 2305 2306 /* Pull timing values out of registers */ 2307 cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >> 2308 PANEL_POWER_UP_DELAY_SHIFT; 2309 2310 cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >> 2311 PANEL_LIGHT_ON_DELAY_SHIFT; 2312 2313 cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >> 2314 PANEL_LIGHT_OFF_DELAY_SHIFT; 2315 2316 cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >> 2317 PANEL_POWER_DOWN_DELAY_SHIFT; 2318 2319 cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >> 2320 PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000; 2321 2322 DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2323 cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12); 2324 2325 vbt = dev_priv->edp.pps; 2326 2327 DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n", 2328 vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12); 2329 2330#define get_delay(field) ((max(cur.field, vbt.field) + 9) / 10) 2331 2332 intel_dp->panel_power_up_delay = get_delay(t1_t3); 2333 intel_dp->backlight_on_delay = get_delay(t8); 2334 intel_dp->backlight_off_delay = get_delay(t9); 2335 intel_dp->panel_power_down_delay = get_delay(t10); 2336 intel_dp->panel_power_cycle_delay = get_delay(t11_t12); 2337 2338 DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n", 2339 intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay, 2340 intel_dp->panel_power_cycle_delay); 2341 2342 DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n", 2343 intel_dp->backlight_on_delay, intel_dp->backlight_off_delay); 2344 2345 intel_dp->panel_off_jiffies = jiffies - intel_dp->panel_power_down_delay; 2346 2347 ironlake_edp_panel_vdd_on(intel_dp); 2348 ret = intel_dp_get_dpcd(intel_dp); 2349 ironlake_edp_panel_vdd_off(intel_dp, false); 2350 if (ret) { 2351 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) 2352 dev_priv->no_aux_handshake = 2353 intel_dp->dpcd[DP_MAX_DOWNSPREAD] & 2354 DP_NO_AUX_HANDSHAKE_LINK_TRAINING; 2355 } else { 2356 /* if this fails, presume the device is a ghost */ 2357 DRM_INFO("failed to retrieve link info, disabling eDP\n"); 2358 intel_dp_encoder_destroy(&intel_dp->base.base); 2359 intel_dp_destroy(&intel_connector->base); 2360 return; 2361 } 2362 } 2363 2364 intel_dp_i2c_init(intel_dp, intel_connector, name); 2365 2366 intel_encoder->hot_plug = intel_dp_hot_plug; 2367 2368 if (is_edp(intel_dp)) { 2369 dev_priv->int_edp_connector = connector; 2370 intel_panel_setup_backlight(dev); 2371 } 2372 2373 intel_dp_add_properties(intel_dp, connector); 2374 2375 /* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written 2376 * 0xd. Failure to do so will result in spurious interrupts being 2377 * generated on the port when a cable is not attached. 2378 */ 2379 if (IS_G4X(dev) && !IS_GM45(dev)) { 2380 u32 temp = I915_READ(PEG_BAND_GAP_DATA); 2381 I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd); 2382 } 2383} 2384