intel_dp.c revision 82d165557ef094d4b4dfc05871aee618ec7102b0
1/*
2 * Copyright © 2008 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 *    Keith Packard <keithp@keithp.com>
25 *
26 */
27
28#include <linux/i2c.h>
29#include <linux/slab.h>
30#include "drmP.h"
31#include "drm.h"
32#include "drm_crtc.h"
33#include "drm_crtc_helper.h"
34#include "intel_drv.h"
35#include "i915_drm.h"
36#include "i915_drv.h"
37#include "drm_dp_helper.h"
38
39#define DP_RECEIVER_CAP_SIZE	0xf
40#define DP_LINK_STATUS_SIZE	6
41#define DP_LINK_CHECK_TIMEOUT	(10 * 1000)
42
43#define DP_LINK_CONFIGURATION_SIZE	9
44
45struct intel_dp {
46	struct intel_encoder base;
47	uint32_t output_reg;
48	uint32_t DP;
49	uint8_t  link_configuration[DP_LINK_CONFIGURATION_SIZE];
50	bool has_audio;
51	int force_audio;
52	uint32_t color_range;
53	int dpms_mode;
54	uint8_t link_bw;
55	uint8_t lane_count;
56	uint8_t dpcd[DP_RECEIVER_CAP_SIZE];
57	struct i2c_adapter adapter;
58	struct i2c_algo_dp_aux_data algo;
59	bool is_pch_edp;
60	uint8_t	train_set[4];
61	uint8_t link_status[DP_LINK_STATUS_SIZE];
62	int panel_power_up_delay;
63	int panel_power_down_delay;
64	int panel_power_cycle_delay;
65	int backlight_on_delay;
66	int backlight_off_delay;
67	struct drm_display_mode *panel_fixed_mode;  /* for eDP */
68	struct delayed_work panel_vdd_work;
69	bool want_panel_vdd;
70	unsigned long panel_off_jiffies;
71};
72
73/**
74 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
75 * @intel_dp: DP struct
76 *
77 * If a CPU or PCH DP output is attached to an eDP panel, this function
78 * will return true, and false otherwise.
79 */
80static bool is_edp(struct intel_dp *intel_dp)
81{
82	return intel_dp->base.type == INTEL_OUTPUT_EDP;
83}
84
85/**
86 * is_pch_edp - is the port on the PCH and attached to an eDP panel?
87 * @intel_dp: DP struct
88 *
89 * Returns true if the given DP struct corresponds to a PCH DP port attached
90 * to an eDP panel, false otherwise.  Helpful for determining whether we
91 * may need FDI resources for a given DP output or not.
92 */
93static bool is_pch_edp(struct intel_dp *intel_dp)
94{
95	return intel_dp->is_pch_edp;
96}
97
98/**
99 * is_cpu_edp - is the port on the CPU and attached to an eDP panel?
100 * @intel_dp: DP struct
101 *
102 * Returns true if the given DP struct corresponds to a CPU eDP port.
103 */
104static bool is_cpu_edp(struct intel_dp *intel_dp)
105{
106	return is_edp(intel_dp) && !is_pch_edp(intel_dp);
107}
108
109static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
110{
111	return container_of(encoder, struct intel_dp, base.base);
112}
113
114static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
115{
116	return container_of(intel_attached_encoder(connector),
117			    struct intel_dp, base);
118}
119
120/**
121 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP?
122 * @encoder: DRM encoder
123 *
124 * Return true if @encoder corresponds to a PCH attached eDP panel.  Needed
125 * by intel_display.c.
126 */
127bool intel_encoder_is_pch_edp(struct drm_encoder *encoder)
128{
129	struct intel_dp *intel_dp;
130
131	if (!encoder)
132		return false;
133
134	intel_dp = enc_to_intel_dp(encoder);
135
136	return is_pch_edp(intel_dp);
137}
138
139static void intel_dp_start_link_train(struct intel_dp *intel_dp);
140static void intel_dp_complete_link_train(struct intel_dp *intel_dp);
141static void intel_dp_link_down(struct intel_dp *intel_dp);
142
143void
144intel_edp_link_config(struct intel_encoder *intel_encoder,
145		       int *lane_num, int *link_bw)
146{
147	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
148
149	*lane_num = intel_dp->lane_count;
150	if (intel_dp->link_bw == DP_LINK_BW_1_62)
151		*link_bw = 162000;
152	else if (intel_dp->link_bw == DP_LINK_BW_2_7)
153		*link_bw = 270000;
154}
155
156static int
157intel_dp_max_lane_count(struct intel_dp *intel_dp)
158{
159	int max_lane_count = 4;
160
161	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) {
162		max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
163		switch (max_lane_count) {
164		case 1: case 2: case 4:
165			break;
166		default:
167			max_lane_count = 4;
168		}
169	}
170	return max_lane_count;
171}
172
173static int
174intel_dp_max_link_bw(struct intel_dp *intel_dp)
175{
176	int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
177
178	switch (max_link_bw) {
179	case DP_LINK_BW_1_62:
180	case DP_LINK_BW_2_7:
181		break;
182	default:
183		max_link_bw = DP_LINK_BW_1_62;
184		break;
185	}
186	return max_link_bw;
187}
188
189static int
190intel_dp_link_clock(uint8_t link_bw)
191{
192	if (link_bw == DP_LINK_BW_2_7)
193		return 270000;
194	else
195		return 162000;
196}
197
198/*
199 * The units on the numbers in the next two are... bizarre.  Examples will
200 * make it clearer; this one parallels an example in the eDP spec.
201 *
202 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as:
203 *
204 *     270000 * 1 * 8 / 10 == 216000
205 *
206 * The actual data capacity of that configuration is 2.16Gbit/s, so the
207 * units are decakilobits.  ->clock in a drm_display_mode is in kilohertz -
208 * or equivalently, kilopixels per second - so for 1680x1050R it'd be
209 * 119000.  At 18bpp that's 2142000 kilobits per second.
210 *
211 * Thus the strange-looking division by 10 in intel_dp_link_required, to
212 * get the result in decakilobits instead of kilobits.
213 */
214
215static int
216intel_dp_link_required(struct intel_dp *intel_dp, int pixel_clock)
217{
218	struct drm_crtc *crtc = intel_dp->base.base.crtc;
219	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
220	int bpp = 24;
221
222	if (intel_crtc)
223		bpp = intel_crtc->bpp;
224
225	return (pixel_clock * bpp + 9) / 10;
226}
227
228static int
229intel_dp_max_data_rate(int max_link_clock, int max_lanes)
230{
231	return (max_link_clock * max_lanes * 8) / 10;
232}
233
234static int
235intel_dp_mode_valid(struct drm_connector *connector,
236		    struct drm_display_mode *mode)
237{
238	struct intel_dp *intel_dp = intel_attached_dp(connector);
239	int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
240	int max_lanes = intel_dp_max_lane_count(intel_dp);
241
242	if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
243		if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
244			return MODE_PANEL;
245
246		if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
247			return MODE_PANEL;
248	}
249
250	if (intel_dp_link_required(intel_dp, mode->clock)
251	    > intel_dp_max_data_rate(max_link_clock, max_lanes))
252		return MODE_CLOCK_HIGH;
253
254	if (mode->clock < 10000)
255		return MODE_CLOCK_LOW;
256
257	return MODE_OK;
258}
259
260static uint32_t
261pack_aux(uint8_t *src, int src_bytes)
262{
263	int	i;
264	uint32_t v = 0;
265
266	if (src_bytes > 4)
267		src_bytes = 4;
268	for (i = 0; i < src_bytes; i++)
269		v |= ((uint32_t) src[i]) << ((3-i) * 8);
270	return v;
271}
272
273static void
274unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
275{
276	int i;
277	if (dst_bytes > 4)
278		dst_bytes = 4;
279	for (i = 0; i < dst_bytes; i++)
280		dst[i] = src >> ((3-i) * 8);
281}
282
283/* hrawclock is 1/4 the FSB frequency */
284static int
285intel_hrawclk(struct drm_device *dev)
286{
287	struct drm_i915_private *dev_priv = dev->dev_private;
288	uint32_t clkcfg;
289
290	clkcfg = I915_READ(CLKCFG);
291	switch (clkcfg & CLKCFG_FSB_MASK) {
292	case CLKCFG_FSB_400:
293		return 100;
294	case CLKCFG_FSB_533:
295		return 133;
296	case CLKCFG_FSB_667:
297		return 166;
298	case CLKCFG_FSB_800:
299		return 200;
300	case CLKCFG_FSB_1067:
301		return 266;
302	case CLKCFG_FSB_1333:
303		return 333;
304	/* these two are just a guess; one of them might be right */
305	case CLKCFG_FSB_1600:
306	case CLKCFG_FSB_1600_ALT:
307		return 400;
308	default:
309		return 133;
310	}
311}
312
313static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
314{
315	struct drm_device *dev = intel_dp->base.base.dev;
316	struct drm_i915_private *dev_priv = dev->dev_private;
317
318	return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0;
319}
320
321static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
322{
323	struct drm_device *dev = intel_dp->base.base.dev;
324	struct drm_i915_private *dev_priv = dev->dev_private;
325
326	return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0;
327}
328
329static void
330intel_dp_check_edp(struct intel_dp *intel_dp)
331{
332	struct drm_device *dev = intel_dp->base.base.dev;
333	struct drm_i915_private *dev_priv = dev->dev_private;
334
335	if (!is_edp(intel_dp))
336		return;
337	if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
338		WARN(1, "eDP powered off while attempting aux channel communication.\n");
339		DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
340			      I915_READ(PCH_PP_STATUS),
341			      I915_READ(PCH_PP_CONTROL));
342	}
343}
344
345static int
346intel_dp_aux_ch(struct intel_dp *intel_dp,
347		uint8_t *send, int send_bytes,
348		uint8_t *recv, int recv_size)
349{
350	uint32_t output_reg = intel_dp->output_reg;
351	struct drm_device *dev = intel_dp->base.base.dev;
352	struct drm_i915_private *dev_priv = dev->dev_private;
353	uint32_t ch_ctl = output_reg + 0x10;
354	uint32_t ch_data = ch_ctl + 4;
355	int i;
356	int recv_bytes;
357	uint32_t status;
358	uint32_t aux_clock_divider;
359	int try, precharge;
360
361	intel_dp_check_edp(intel_dp);
362	/* The clock divider is based off the hrawclk,
363	 * and would like to run at 2MHz. So, take the
364	 * hrawclk value and divide by 2 and use that
365	 *
366	 * Note that PCH attached eDP panels should use a 125MHz input
367	 * clock divider.
368	 */
369	if (is_cpu_edp(intel_dp)) {
370		if (IS_GEN6(dev))
371			aux_clock_divider = 200; /* SNB eDP input clock at 400Mhz */
372		else
373			aux_clock_divider = 225; /* eDP input clock at 450Mhz */
374	} else if (HAS_PCH_SPLIT(dev))
375		aux_clock_divider = 62; /* IRL input clock fixed at 125Mhz */
376	else
377		aux_clock_divider = intel_hrawclk(dev) / 2;
378
379	if (IS_GEN6(dev))
380		precharge = 3;
381	else
382		precharge = 5;
383
384	/* Try to wait for any previous AUX channel activity */
385	for (try = 0; try < 3; try++) {
386		status = I915_READ(ch_ctl);
387		if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
388			break;
389		msleep(1);
390	}
391
392	if (try == 3) {
393		WARN(1, "dp_aux_ch not started status 0x%08x\n",
394		     I915_READ(ch_ctl));
395		return -EBUSY;
396	}
397
398	/* Must try at least 3 times according to DP spec */
399	for (try = 0; try < 5; try++) {
400		/* Load the send data into the aux channel data registers */
401		for (i = 0; i < send_bytes; i += 4)
402			I915_WRITE(ch_data + i,
403				   pack_aux(send + i, send_bytes - i));
404
405		/* Send the command and wait for it to complete */
406		I915_WRITE(ch_ctl,
407			   DP_AUX_CH_CTL_SEND_BUSY |
408			   DP_AUX_CH_CTL_TIME_OUT_400us |
409			   (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
410			   (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
411			   (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
412			   DP_AUX_CH_CTL_DONE |
413			   DP_AUX_CH_CTL_TIME_OUT_ERROR |
414			   DP_AUX_CH_CTL_RECEIVE_ERROR);
415		for (;;) {
416			status = I915_READ(ch_ctl);
417			if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
418				break;
419			udelay(100);
420		}
421
422		/* Clear done status and any errors */
423		I915_WRITE(ch_ctl,
424			   status |
425			   DP_AUX_CH_CTL_DONE |
426			   DP_AUX_CH_CTL_TIME_OUT_ERROR |
427			   DP_AUX_CH_CTL_RECEIVE_ERROR);
428		if (status & DP_AUX_CH_CTL_DONE)
429			break;
430	}
431
432	if ((status & DP_AUX_CH_CTL_DONE) == 0) {
433		DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
434		return -EBUSY;
435	}
436
437	/* Check for timeout or receive error.
438	 * Timeouts occur when the sink is not connected
439	 */
440	if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
441		DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
442		return -EIO;
443	}
444
445	/* Timeouts occur when the device isn't connected, so they're
446	 * "normal" -- don't fill the kernel log with these */
447	if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
448		DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
449		return -ETIMEDOUT;
450	}
451
452	/* Unload any bytes sent back from the other side */
453	recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
454		      DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
455	if (recv_bytes > recv_size)
456		recv_bytes = recv_size;
457
458	for (i = 0; i < recv_bytes; i += 4)
459		unpack_aux(I915_READ(ch_data + i),
460			   recv + i, recv_bytes - i);
461
462	return recv_bytes;
463}
464
465/* Write data to the aux channel in native mode */
466static int
467intel_dp_aux_native_write(struct intel_dp *intel_dp,
468			  uint16_t address, uint8_t *send, int send_bytes)
469{
470	int ret;
471	uint8_t	msg[20];
472	int msg_bytes;
473	uint8_t	ack;
474
475	intel_dp_check_edp(intel_dp);
476	if (send_bytes > 16)
477		return -1;
478	msg[0] = AUX_NATIVE_WRITE << 4;
479	msg[1] = address >> 8;
480	msg[2] = address & 0xff;
481	msg[3] = send_bytes - 1;
482	memcpy(&msg[4], send, send_bytes);
483	msg_bytes = send_bytes + 4;
484	for (;;) {
485		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
486		if (ret < 0)
487			return ret;
488		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
489			break;
490		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
491			udelay(100);
492		else
493			return -EIO;
494	}
495	return send_bytes;
496}
497
498/* Write a single byte to the aux channel in native mode */
499static int
500intel_dp_aux_native_write_1(struct intel_dp *intel_dp,
501			    uint16_t address, uint8_t byte)
502{
503	return intel_dp_aux_native_write(intel_dp, address, &byte, 1);
504}
505
506/* read bytes from a native aux channel */
507static int
508intel_dp_aux_native_read(struct intel_dp *intel_dp,
509			 uint16_t address, uint8_t *recv, int recv_bytes)
510{
511	uint8_t msg[4];
512	int msg_bytes;
513	uint8_t reply[20];
514	int reply_bytes;
515	uint8_t ack;
516	int ret;
517
518	intel_dp_check_edp(intel_dp);
519	msg[0] = AUX_NATIVE_READ << 4;
520	msg[1] = address >> 8;
521	msg[2] = address & 0xff;
522	msg[3] = recv_bytes - 1;
523
524	msg_bytes = 4;
525	reply_bytes = recv_bytes + 1;
526
527	for (;;) {
528		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes,
529				      reply, reply_bytes);
530		if (ret == 0)
531			return -EPROTO;
532		if (ret < 0)
533			return ret;
534		ack = reply[0];
535		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) {
536			memcpy(recv, reply + 1, ret - 1);
537			return ret - 1;
538		}
539		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
540			udelay(100);
541		else
542			return -EIO;
543	}
544}
545
546static int
547intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
548		    uint8_t write_byte, uint8_t *read_byte)
549{
550	struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
551	struct intel_dp *intel_dp = container_of(adapter,
552						struct intel_dp,
553						adapter);
554	uint16_t address = algo_data->address;
555	uint8_t msg[5];
556	uint8_t reply[2];
557	unsigned retry;
558	int msg_bytes;
559	int reply_bytes;
560	int ret;
561
562	intel_dp_check_edp(intel_dp);
563	/* Set up the command byte */
564	if (mode & MODE_I2C_READ)
565		msg[0] = AUX_I2C_READ << 4;
566	else
567		msg[0] = AUX_I2C_WRITE << 4;
568
569	if (!(mode & MODE_I2C_STOP))
570		msg[0] |= AUX_I2C_MOT << 4;
571
572	msg[1] = address >> 8;
573	msg[2] = address;
574
575	switch (mode) {
576	case MODE_I2C_WRITE:
577		msg[3] = 0;
578		msg[4] = write_byte;
579		msg_bytes = 5;
580		reply_bytes = 1;
581		break;
582	case MODE_I2C_READ:
583		msg[3] = 0;
584		msg_bytes = 4;
585		reply_bytes = 2;
586		break;
587	default:
588		msg_bytes = 3;
589		reply_bytes = 1;
590		break;
591	}
592
593	for (retry = 0; retry < 5; retry++) {
594		ret = intel_dp_aux_ch(intel_dp,
595				      msg, msg_bytes,
596				      reply, reply_bytes);
597		if (ret < 0) {
598			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
599			return ret;
600		}
601
602		switch (reply[0] & AUX_NATIVE_REPLY_MASK) {
603		case AUX_NATIVE_REPLY_ACK:
604			/* I2C-over-AUX Reply field is only valid
605			 * when paired with AUX ACK.
606			 */
607			break;
608		case AUX_NATIVE_REPLY_NACK:
609			DRM_DEBUG_KMS("aux_ch native nack\n");
610			return -EREMOTEIO;
611		case AUX_NATIVE_REPLY_DEFER:
612			udelay(100);
613			continue;
614		default:
615			DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
616				  reply[0]);
617			return -EREMOTEIO;
618		}
619
620		switch (reply[0] & AUX_I2C_REPLY_MASK) {
621		case AUX_I2C_REPLY_ACK:
622			if (mode == MODE_I2C_READ) {
623				*read_byte = reply[1];
624			}
625			return reply_bytes - 1;
626		case AUX_I2C_REPLY_NACK:
627			DRM_DEBUG_KMS("aux_i2c nack\n");
628			return -EREMOTEIO;
629		case AUX_I2C_REPLY_DEFER:
630			DRM_DEBUG_KMS("aux_i2c defer\n");
631			udelay(100);
632			break;
633		default:
634			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
635			return -EREMOTEIO;
636		}
637	}
638
639	DRM_ERROR("too many retries, giving up\n");
640	return -EREMOTEIO;
641}
642
643static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp);
644static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
645
646static int
647intel_dp_i2c_init(struct intel_dp *intel_dp,
648		  struct intel_connector *intel_connector, const char *name)
649{
650	int	ret;
651
652	DRM_DEBUG_KMS("i2c_init %s\n", name);
653	intel_dp->algo.running = false;
654	intel_dp->algo.address = 0;
655	intel_dp->algo.aux_ch = intel_dp_i2c_aux_ch;
656
657	memset(&intel_dp->adapter, '\0', sizeof(intel_dp->adapter));
658	intel_dp->adapter.owner = THIS_MODULE;
659	intel_dp->adapter.class = I2C_CLASS_DDC;
660	strncpy(intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
661	intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
662	intel_dp->adapter.algo_data = &intel_dp->algo;
663	intel_dp->adapter.dev.parent = &intel_connector->base.kdev;
664
665	ironlake_edp_panel_vdd_on(intel_dp);
666	ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
667	ironlake_edp_panel_vdd_off(intel_dp, false);
668	return ret;
669}
670
671static bool
672intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
673		    struct drm_display_mode *adjusted_mode)
674{
675	struct drm_device *dev = encoder->dev;
676	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
677	int lane_count, clock;
678	int max_lane_count = intel_dp_max_lane_count(intel_dp);
679	int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
680	static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
681
682	if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
683		intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
684		intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
685					mode, adjusted_mode);
686		/*
687		 * the mode->clock is used to calculate the Data&Link M/N
688		 * of the pipe. For the eDP the fixed clock should be used.
689		 */
690		mode->clock = intel_dp->panel_fixed_mode->clock;
691	}
692
693	for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
694		for (clock = 0; clock <= max_clock; clock++) {
695			int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
696
697			if (intel_dp_link_required(intel_dp, mode->clock)
698					<= link_avail) {
699				intel_dp->link_bw = bws[clock];
700				intel_dp->lane_count = lane_count;
701				adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
702				DRM_DEBUG_KMS("Display port link bw %02x lane "
703						"count %d clock %d\n",
704				       intel_dp->link_bw, intel_dp->lane_count,
705				       adjusted_mode->clock);
706				return true;
707			}
708		}
709	}
710
711	return false;
712}
713
714struct intel_dp_m_n {
715	uint32_t	tu;
716	uint32_t	gmch_m;
717	uint32_t	gmch_n;
718	uint32_t	link_m;
719	uint32_t	link_n;
720};
721
722static void
723intel_reduce_ratio(uint32_t *num, uint32_t *den)
724{
725	while (*num > 0xffffff || *den > 0xffffff) {
726		*num >>= 1;
727		*den >>= 1;
728	}
729}
730
731static void
732intel_dp_compute_m_n(int bpp,
733		     int nlanes,
734		     int pixel_clock,
735		     int link_clock,
736		     struct intel_dp_m_n *m_n)
737{
738	m_n->tu = 64;
739	m_n->gmch_m = (pixel_clock * bpp) >> 3;
740	m_n->gmch_n = link_clock * nlanes;
741	intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
742	m_n->link_m = pixel_clock;
743	m_n->link_n = link_clock;
744	intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
745}
746
747void
748intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
749		 struct drm_display_mode *adjusted_mode)
750{
751	struct drm_device *dev = crtc->dev;
752	struct drm_mode_config *mode_config = &dev->mode_config;
753	struct drm_encoder *encoder;
754	struct drm_i915_private *dev_priv = dev->dev_private;
755	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
756	int lane_count = 4;
757	struct intel_dp_m_n m_n;
758	int pipe = intel_crtc->pipe;
759
760	/*
761	 * Find the lane count in the intel_encoder private
762	 */
763	list_for_each_entry(encoder, &mode_config->encoder_list, head) {
764		struct intel_dp *intel_dp;
765
766		if (encoder->crtc != crtc)
767			continue;
768
769		intel_dp = enc_to_intel_dp(encoder);
770		if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT) {
771			lane_count = intel_dp->lane_count;
772			break;
773		} else if (is_edp(intel_dp)) {
774			lane_count = dev_priv->edp.lanes;
775			break;
776		}
777	}
778
779	/*
780	 * Compute the GMCH and Link ratios. The '3' here is
781	 * the number of bytes_per_pixel post-LUT, which we always
782	 * set up for 8-bits of R/G/B, or 3 bytes total.
783	 */
784	intel_dp_compute_m_n(intel_crtc->bpp, lane_count,
785			     mode->clock, adjusted_mode->clock, &m_n);
786
787	if (HAS_PCH_SPLIT(dev)) {
788		I915_WRITE(TRANSDATA_M1(pipe),
789			   ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
790			   m_n.gmch_m);
791		I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n);
792		I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m);
793		I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n);
794	} else {
795		I915_WRITE(PIPE_GMCH_DATA_M(pipe),
796			   ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
797			   m_n.gmch_m);
798		I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
799		I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
800		I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
801	}
802}
803
804static void ironlake_edp_pll_on(struct drm_encoder *encoder);
805static void ironlake_edp_pll_off(struct drm_encoder *encoder);
806
807static void
808intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
809		  struct drm_display_mode *adjusted_mode)
810{
811	struct drm_device *dev = encoder->dev;
812	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
813	struct drm_crtc *crtc = intel_dp->base.base.crtc;
814	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
815
816	/* Turn on the eDP PLL if needed */
817	if (is_edp(intel_dp)) {
818		if (!is_pch_edp(intel_dp))
819			ironlake_edp_pll_on(encoder);
820		else
821			ironlake_edp_pll_off(encoder);
822	}
823
824	intel_dp->DP = DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
825	intel_dp->DP |= intel_dp->color_range;
826
827	if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
828		intel_dp->DP |= DP_SYNC_HS_HIGH;
829	if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
830		intel_dp->DP |= DP_SYNC_VS_HIGH;
831
832	if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
833		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
834	else
835		intel_dp->DP |= DP_LINK_TRAIN_OFF;
836
837	switch (intel_dp->lane_count) {
838	case 1:
839		intel_dp->DP |= DP_PORT_WIDTH_1;
840		break;
841	case 2:
842		intel_dp->DP |= DP_PORT_WIDTH_2;
843		break;
844	case 4:
845		intel_dp->DP |= DP_PORT_WIDTH_4;
846		break;
847	}
848	if (intel_dp->has_audio) {
849		DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n",
850				 pipe_name(intel_crtc->pipe));
851		intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
852		intel_write_eld(encoder, adjusted_mode);
853	}
854
855	memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
856	intel_dp->link_configuration[0] = intel_dp->link_bw;
857	intel_dp->link_configuration[1] = intel_dp->lane_count;
858	intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B;
859
860	/*
861	 * Check for DPCD version > 1.1 and enhanced framing support
862	 */
863	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
864	    (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
865		intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
866		intel_dp->DP |= DP_ENHANCED_FRAMING;
867	}
868
869	/* CPT DP's pipe select is decided in TRANS_DP_CTL */
870	if (intel_crtc->pipe == 1 && !HAS_PCH_CPT(dev))
871		intel_dp->DP |= DP_PIPEB_SELECT;
872
873	if (is_cpu_edp(intel_dp)) {
874		/* don't miss out required setting for eDP */
875		intel_dp->DP |= DP_PLL_ENABLE;
876		if (adjusted_mode->clock < 200000)
877			intel_dp->DP |= DP_PLL_FREQ_160MHZ;
878		else
879			intel_dp->DP |= DP_PLL_FREQ_270MHZ;
880	}
881}
882
883static void ironlake_wait_panel_off(struct intel_dp *intel_dp)
884{
885	unsigned long	off_time;
886	unsigned long	delay;
887
888	DRM_DEBUG_KMS("Wait for panel power off time\n");
889
890	if (ironlake_edp_have_panel_power(intel_dp) ||
891	    ironlake_edp_have_panel_vdd(intel_dp))
892	{
893		DRM_DEBUG_KMS("Panel still on, no delay needed\n");
894		return;
895	}
896
897	off_time = intel_dp->panel_off_jiffies + msecs_to_jiffies(intel_dp->panel_power_down_delay);
898	if (time_after(jiffies, off_time)) {
899		DRM_DEBUG_KMS("Time already passed");
900		return;
901	}
902	delay = jiffies_to_msecs(off_time - jiffies);
903	if (delay > intel_dp->panel_power_down_delay)
904		delay = intel_dp->panel_power_down_delay;
905	DRM_DEBUG_KMS("Waiting an additional %ld ms\n", delay);
906	msleep(delay);
907}
908
909static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
910{
911	struct drm_device *dev = intel_dp->base.base.dev;
912	struct drm_i915_private *dev_priv = dev->dev_private;
913	u32 pp;
914
915	if (!is_edp(intel_dp))
916		return;
917	DRM_DEBUG_KMS("Turn eDP VDD on\n");
918
919	WARN(intel_dp->want_panel_vdd,
920	     "eDP VDD already requested on\n");
921
922	intel_dp->want_panel_vdd = true;
923	if (ironlake_edp_have_panel_vdd(intel_dp)) {
924		DRM_DEBUG_KMS("eDP VDD already on\n");
925		return;
926	}
927
928	ironlake_wait_panel_off(intel_dp);
929	pp = I915_READ(PCH_PP_CONTROL);
930	pp &= ~PANEL_UNLOCK_MASK;
931	pp |= PANEL_UNLOCK_REGS;
932	pp |= EDP_FORCE_VDD;
933	I915_WRITE(PCH_PP_CONTROL, pp);
934	POSTING_READ(PCH_PP_CONTROL);
935	DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
936		      I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
937
938	/*
939	 * If the panel wasn't on, delay before accessing aux channel
940	 */
941	if (!ironlake_edp_have_panel_power(intel_dp)) {
942		DRM_DEBUG_KMS("eDP was not running\n");
943		msleep(intel_dp->panel_power_up_delay);
944	}
945}
946
947static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
948{
949	struct drm_device *dev = intel_dp->base.base.dev;
950	struct drm_i915_private *dev_priv = dev->dev_private;
951	u32 pp;
952
953	if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
954		pp = I915_READ(PCH_PP_CONTROL);
955		pp &= ~PANEL_UNLOCK_MASK;
956		pp |= PANEL_UNLOCK_REGS;
957		pp &= ~EDP_FORCE_VDD;
958		I915_WRITE(PCH_PP_CONTROL, pp);
959		POSTING_READ(PCH_PP_CONTROL);
960
961		/* Make sure sequencer is idle before allowing subsequent activity */
962		DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
963			      I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
964		intel_dp->panel_off_jiffies = jiffies;
965	}
966}
967
968static void ironlake_panel_vdd_work(struct work_struct *__work)
969{
970	struct intel_dp *intel_dp = container_of(to_delayed_work(__work),
971						 struct intel_dp, panel_vdd_work);
972	struct drm_device *dev = intel_dp->base.base.dev;
973
974	mutex_lock(&dev->struct_mutex);
975	ironlake_panel_vdd_off_sync(intel_dp);
976	mutex_unlock(&dev->struct_mutex);
977}
978
979static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
980{
981	if (!is_edp(intel_dp))
982		return;
983
984	DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd);
985	WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on");
986
987	intel_dp->want_panel_vdd = false;
988
989	if (sync) {
990		ironlake_panel_vdd_off_sync(intel_dp);
991	} else {
992		/*
993		 * Queue the timer to fire a long
994		 * time from now (relative to the power down delay)
995		 * to keep the panel power up across a sequence of operations
996		 */
997		schedule_delayed_work(&intel_dp->panel_vdd_work,
998				      msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5));
999	}
1000}
1001
1002/* Returns true if the panel was already on when called */
1003static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1004{
1005	struct drm_device *dev = intel_dp->base.base.dev;
1006	struct drm_i915_private *dev_priv = dev->dev_private;
1007	u32 pp, idle_on_mask = PP_ON | PP_SEQUENCE_STATE_ON_IDLE;
1008
1009	if (!is_edp(intel_dp))
1010		return;
1011	if (ironlake_edp_have_panel_power(intel_dp))
1012		return;
1013
1014	ironlake_wait_panel_off(intel_dp);
1015	pp = I915_READ(PCH_PP_CONTROL);
1016	pp &= ~PANEL_UNLOCK_MASK;
1017	pp |= PANEL_UNLOCK_REGS;
1018
1019	if (IS_GEN5(dev)) {
1020		/* ILK workaround: disable reset around power sequence */
1021		pp &= ~PANEL_POWER_RESET;
1022		I915_WRITE(PCH_PP_CONTROL, pp);
1023		POSTING_READ(PCH_PP_CONTROL);
1024	}
1025
1026	pp |= POWER_TARGET_ON;
1027	I915_WRITE(PCH_PP_CONTROL, pp);
1028	POSTING_READ(PCH_PP_CONTROL);
1029
1030	if (wait_for((I915_READ(PCH_PP_STATUS) & idle_on_mask) == idle_on_mask,
1031		     5000))
1032		DRM_ERROR("panel on wait timed out: 0x%08x\n",
1033			  I915_READ(PCH_PP_STATUS));
1034
1035	if (IS_GEN5(dev)) {
1036		pp |= PANEL_POWER_RESET; /* restore panel reset bit */
1037		I915_WRITE(PCH_PP_CONTROL, pp);
1038		POSTING_READ(PCH_PP_CONTROL);
1039	}
1040}
1041
1042static void ironlake_edp_panel_off(struct drm_encoder *encoder)
1043{
1044	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1045	struct drm_device *dev = encoder->dev;
1046	struct drm_i915_private *dev_priv = dev->dev_private;
1047	u32 pp, idle_off_mask = PP_ON | PP_SEQUENCE_MASK |
1048		PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK;
1049
1050	if (!is_edp(intel_dp))
1051		return;
1052	pp = I915_READ(PCH_PP_CONTROL);
1053	pp &= ~PANEL_UNLOCK_MASK;
1054	pp |= PANEL_UNLOCK_REGS;
1055
1056	if (IS_GEN5(dev)) {
1057		/* ILK workaround: disable reset around power sequence */
1058		pp &= ~PANEL_POWER_RESET;
1059		I915_WRITE(PCH_PP_CONTROL, pp);
1060		POSTING_READ(PCH_PP_CONTROL);
1061	}
1062
1063	intel_dp->panel_off_jiffies = jiffies;
1064
1065	if (IS_GEN5(dev)) {
1066		pp &= ~POWER_TARGET_ON;
1067		I915_WRITE(PCH_PP_CONTROL, pp);
1068		POSTING_READ(PCH_PP_CONTROL);
1069		pp &= ~POWER_TARGET_ON;
1070		I915_WRITE(PCH_PP_CONTROL, pp);
1071		POSTING_READ(PCH_PP_CONTROL);
1072		msleep(intel_dp->panel_power_cycle_delay);
1073
1074		if (wait_for((I915_READ(PCH_PP_STATUS) & idle_off_mask) == 0, 5000))
1075			DRM_ERROR("panel off wait timed out: 0x%08x\n",
1076				  I915_READ(PCH_PP_STATUS));
1077
1078		pp |= PANEL_POWER_RESET; /* restore panel reset bit */
1079		I915_WRITE(PCH_PP_CONTROL, pp);
1080		POSTING_READ(PCH_PP_CONTROL);
1081	}
1082}
1083
1084static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
1085{
1086	struct drm_device *dev = intel_dp->base.base.dev;
1087	struct drm_i915_private *dev_priv = dev->dev_private;
1088	u32 pp;
1089
1090	if (!is_edp(intel_dp))
1091		return;
1092
1093	DRM_DEBUG_KMS("\n");
1094	/*
1095	 * If we enable the backlight right away following a panel power
1096	 * on, we may see slight flicker as the panel syncs with the eDP
1097	 * link.  So delay a bit to make sure the image is solid before
1098	 * allowing it to appear.
1099	 */
1100	msleep(intel_dp->backlight_on_delay);
1101	pp = I915_READ(PCH_PP_CONTROL);
1102	pp &= ~PANEL_UNLOCK_MASK;
1103	pp |= PANEL_UNLOCK_REGS;
1104	pp |= EDP_BLC_ENABLE;
1105	I915_WRITE(PCH_PP_CONTROL, pp);
1106	POSTING_READ(PCH_PP_CONTROL);
1107}
1108
1109static void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
1110{
1111	struct drm_device *dev = intel_dp->base.base.dev;
1112	struct drm_i915_private *dev_priv = dev->dev_private;
1113	u32 pp;
1114
1115	if (!is_edp(intel_dp))
1116		return;
1117
1118	DRM_DEBUG_KMS("\n");
1119	pp = I915_READ(PCH_PP_CONTROL);
1120	pp &= ~PANEL_UNLOCK_MASK;
1121	pp |= PANEL_UNLOCK_REGS;
1122	pp &= ~EDP_BLC_ENABLE;
1123	I915_WRITE(PCH_PP_CONTROL, pp);
1124	POSTING_READ(PCH_PP_CONTROL);
1125	msleep(intel_dp->backlight_off_delay);
1126}
1127
1128static void ironlake_edp_pll_on(struct drm_encoder *encoder)
1129{
1130	struct drm_device *dev = encoder->dev;
1131	struct drm_i915_private *dev_priv = dev->dev_private;
1132	u32 dpa_ctl;
1133
1134	DRM_DEBUG_KMS("\n");
1135	dpa_ctl = I915_READ(DP_A);
1136	dpa_ctl |= DP_PLL_ENABLE;
1137	I915_WRITE(DP_A, dpa_ctl);
1138	POSTING_READ(DP_A);
1139	udelay(200);
1140}
1141
1142static void ironlake_edp_pll_off(struct drm_encoder *encoder)
1143{
1144	struct drm_device *dev = encoder->dev;
1145	struct drm_i915_private *dev_priv = dev->dev_private;
1146	u32 dpa_ctl;
1147
1148	dpa_ctl = I915_READ(DP_A);
1149	dpa_ctl &= ~DP_PLL_ENABLE;
1150	I915_WRITE(DP_A, dpa_ctl);
1151	POSTING_READ(DP_A);
1152	udelay(200);
1153}
1154
1155/* If the sink supports it, try to set the power state appropriately */
1156static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
1157{
1158	int ret, i;
1159
1160	/* Should have a valid DPCD by this point */
1161	if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1162		return;
1163
1164	if (mode != DRM_MODE_DPMS_ON) {
1165		ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER,
1166						  DP_SET_POWER_D3);
1167		if (ret != 1)
1168			DRM_DEBUG_DRIVER("failed to write sink power state\n");
1169	} else {
1170		/*
1171		 * When turning on, we need to retry for 1ms to give the sink
1172		 * time to wake up.
1173		 */
1174		for (i = 0; i < 3; i++) {
1175			ret = intel_dp_aux_native_write_1(intel_dp,
1176							  DP_SET_POWER,
1177							  DP_SET_POWER_D0);
1178			if (ret == 1)
1179				break;
1180			msleep(1);
1181		}
1182	}
1183}
1184
1185static void intel_dp_prepare(struct drm_encoder *encoder)
1186{
1187	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1188
1189	/* Wake up the sink first */
1190	ironlake_edp_panel_vdd_on(intel_dp);
1191	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1192	ironlake_edp_panel_vdd_off(intel_dp, false);
1193
1194	/* Make sure the panel is off before trying to
1195	 * change the mode
1196	 */
1197	ironlake_edp_backlight_off(intel_dp);
1198	intel_dp_link_down(intel_dp);
1199	ironlake_edp_panel_off(encoder);
1200}
1201
1202static void intel_dp_commit(struct drm_encoder *encoder)
1203{
1204	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1205	struct drm_device *dev = encoder->dev;
1206	struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1207
1208	ironlake_edp_panel_vdd_on(intel_dp);
1209	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1210	intel_dp_start_link_train(intel_dp);
1211	ironlake_edp_panel_on(intel_dp);
1212	ironlake_edp_panel_vdd_off(intel_dp, true);
1213
1214	intel_dp_complete_link_train(intel_dp);
1215	ironlake_edp_backlight_on(intel_dp);
1216
1217	intel_dp->dpms_mode = DRM_MODE_DPMS_ON;
1218
1219	if (HAS_PCH_CPT(dev))
1220		intel_cpt_verify_modeset(dev, intel_crtc->pipe);
1221}
1222
1223static void
1224intel_dp_dpms(struct drm_encoder *encoder, int mode)
1225{
1226	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1227	struct drm_device *dev = encoder->dev;
1228	struct drm_i915_private *dev_priv = dev->dev_private;
1229	uint32_t dp_reg = I915_READ(intel_dp->output_reg);
1230
1231	if (mode != DRM_MODE_DPMS_ON) {
1232		ironlake_edp_panel_vdd_on(intel_dp);
1233		if (is_edp(intel_dp))
1234			ironlake_edp_backlight_off(intel_dp);
1235		intel_dp_sink_dpms(intel_dp, mode);
1236		intel_dp_link_down(intel_dp);
1237		ironlake_edp_panel_off(encoder);
1238		if (is_edp(intel_dp) && !is_pch_edp(intel_dp))
1239			ironlake_edp_pll_off(encoder);
1240		ironlake_edp_panel_vdd_off(intel_dp, false);
1241	} else {
1242		ironlake_edp_panel_vdd_on(intel_dp);
1243		intel_dp_sink_dpms(intel_dp, mode);
1244		if (!(dp_reg & DP_PORT_EN)) {
1245			intel_dp_start_link_train(intel_dp);
1246			ironlake_edp_panel_on(intel_dp);
1247			ironlake_edp_panel_vdd_off(intel_dp, true);
1248			intel_dp_complete_link_train(intel_dp);
1249			ironlake_edp_backlight_on(intel_dp);
1250		} else
1251			ironlake_edp_panel_vdd_off(intel_dp, false);
1252		ironlake_edp_backlight_on(intel_dp);
1253	}
1254	intel_dp->dpms_mode = mode;
1255}
1256
1257/*
1258 * Native read with retry for link status and receiver capability reads for
1259 * cases where the sink may still be asleep.
1260 */
1261static bool
1262intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address,
1263			       uint8_t *recv, int recv_bytes)
1264{
1265	int ret, i;
1266
1267	/*
1268	 * Sinks are *supposed* to come up within 1ms from an off state,
1269	 * but we're also supposed to retry 3 times per the spec.
1270	 */
1271	for (i = 0; i < 3; i++) {
1272		ret = intel_dp_aux_native_read(intel_dp, address, recv,
1273					       recv_bytes);
1274		if (ret == recv_bytes)
1275			return true;
1276		msleep(1);
1277	}
1278
1279	return false;
1280}
1281
1282/*
1283 * Fetch AUX CH registers 0x202 - 0x207 which contain
1284 * link status information
1285 */
1286static bool
1287intel_dp_get_link_status(struct intel_dp *intel_dp)
1288{
1289	return intel_dp_aux_native_read_retry(intel_dp,
1290					      DP_LANE0_1_STATUS,
1291					      intel_dp->link_status,
1292					      DP_LINK_STATUS_SIZE);
1293}
1294
1295static uint8_t
1296intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1297		     int r)
1298{
1299	return link_status[r - DP_LANE0_1_STATUS];
1300}
1301
1302static uint8_t
1303intel_get_adjust_request_voltage(uint8_t link_status[DP_LINK_STATUS_SIZE],
1304				 int lane)
1305{
1306	int	    i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1307	int	    s = ((lane & 1) ?
1308			 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1309			 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1310	uint8_t l = intel_dp_link_status(link_status, i);
1311
1312	return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1313}
1314
1315static uint8_t
1316intel_get_adjust_request_pre_emphasis(uint8_t link_status[DP_LINK_STATUS_SIZE],
1317				      int lane)
1318{
1319	int	    i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
1320	int	    s = ((lane & 1) ?
1321			 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1322			 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1323	uint8_t l = intel_dp_link_status(link_status, i);
1324
1325	return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1326}
1327
1328
1329#if 0
1330static char	*voltage_names[] = {
1331	"0.4V", "0.6V", "0.8V", "1.2V"
1332};
1333static char	*pre_emph_names[] = {
1334	"0dB", "3.5dB", "6dB", "9.5dB"
1335};
1336static char	*link_train_names[] = {
1337	"pattern 1", "pattern 2", "idle", "off"
1338};
1339#endif
1340
1341/*
1342 * These are source-specific values; current Intel hardware supports
1343 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1344 */
1345#define I830_DP_VOLTAGE_MAX	    DP_TRAIN_VOLTAGE_SWING_800
1346
1347static uint8_t
1348intel_dp_pre_emphasis_max(uint8_t voltage_swing)
1349{
1350	switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1351	case DP_TRAIN_VOLTAGE_SWING_400:
1352		return DP_TRAIN_PRE_EMPHASIS_6;
1353	case DP_TRAIN_VOLTAGE_SWING_600:
1354		return DP_TRAIN_PRE_EMPHASIS_6;
1355	case DP_TRAIN_VOLTAGE_SWING_800:
1356		return DP_TRAIN_PRE_EMPHASIS_3_5;
1357	case DP_TRAIN_VOLTAGE_SWING_1200:
1358	default:
1359		return DP_TRAIN_PRE_EMPHASIS_0;
1360	}
1361}
1362
1363static void
1364intel_get_adjust_train(struct intel_dp *intel_dp)
1365{
1366	uint8_t v = 0;
1367	uint8_t p = 0;
1368	int lane;
1369
1370	for (lane = 0; lane < intel_dp->lane_count; lane++) {
1371		uint8_t this_v = intel_get_adjust_request_voltage(intel_dp->link_status, lane);
1372		uint8_t this_p = intel_get_adjust_request_pre_emphasis(intel_dp->link_status, lane);
1373
1374		if (this_v > v)
1375			v = this_v;
1376		if (this_p > p)
1377			p = this_p;
1378	}
1379
1380	if (v >= I830_DP_VOLTAGE_MAX)
1381		v = I830_DP_VOLTAGE_MAX | DP_TRAIN_MAX_SWING_REACHED;
1382
1383	if (p >= intel_dp_pre_emphasis_max(v))
1384		p = intel_dp_pre_emphasis_max(v) | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1385
1386	for (lane = 0; lane < 4; lane++)
1387		intel_dp->train_set[lane] = v | p;
1388}
1389
1390static uint32_t
1391intel_dp_signal_levels(uint8_t train_set, int lane_count)
1392{
1393	uint32_t	signal_levels = 0;
1394
1395	switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
1396	case DP_TRAIN_VOLTAGE_SWING_400:
1397	default:
1398		signal_levels |= DP_VOLTAGE_0_4;
1399		break;
1400	case DP_TRAIN_VOLTAGE_SWING_600:
1401		signal_levels |= DP_VOLTAGE_0_6;
1402		break;
1403	case DP_TRAIN_VOLTAGE_SWING_800:
1404		signal_levels |= DP_VOLTAGE_0_8;
1405		break;
1406	case DP_TRAIN_VOLTAGE_SWING_1200:
1407		signal_levels |= DP_VOLTAGE_1_2;
1408		break;
1409	}
1410	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1411	case DP_TRAIN_PRE_EMPHASIS_0:
1412	default:
1413		signal_levels |= DP_PRE_EMPHASIS_0;
1414		break;
1415	case DP_TRAIN_PRE_EMPHASIS_3_5:
1416		signal_levels |= DP_PRE_EMPHASIS_3_5;
1417		break;
1418	case DP_TRAIN_PRE_EMPHASIS_6:
1419		signal_levels |= DP_PRE_EMPHASIS_6;
1420		break;
1421	case DP_TRAIN_PRE_EMPHASIS_9_5:
1422		signal_levels |= DP_PRE_EMPHASIS_9_5;
1423		break;
1424	}
1425	return signal_levels;
1426}
1427
1428/* Gen6's DP voltage swing and pre-emphasis control */
1429static uint32_t
1430intel_gen6_edp_signal_levels(uint8_t train_set)
1431{
1432	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1433					 DP_TRAIN_PRE_EMPHASIS_MASK);
1434	switch (signal_levels) {
1435	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1436	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1437		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1438	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1439		return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1440	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1441	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6:
1442		return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1443	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1444	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1445		return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1446	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1447	case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0:
1448		return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1449	default:
1450		DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1451			      "0x%x\n", signal_levels);
1452		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1453	}
1454}
1455
1456static uint8_t
1457intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1458		      int lane)
1459{
1460	int i = DP_LANE0_1_STATUS + (lane >> 1);
1461	int s = (lane & 1) * 4;
1462	uint8_t l = intel_dp_link_status(link_status, i);
1463
1464	return (l >> s) & 0xf;
1465}
1466
1467/* Check for clock recovery is done on all channels */
1468static bool
1469intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1470{
1471	int lane;
1472	uint8_t lane_status;
1473
1474	for (lane = 0; lane < lane_count; lane++) {
1475		lane_status = intel_get_lane_status(link_status, lane);
1476		if ((lane_status & DP_LANE_CR_DONE) == 0)
1477			return false;
1478	}
1479	return true;
1480}
1481
1482/* Check to see if channel eq is done on all channels */
1483#define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1484			 DP_LANE_CHANNEL_EQ_DONE|\
1485			 DP_LANE_SYMBOL_LOCKED)
1486static bool
1487intel_channel_eq_ok(struct intel_dp *intel_dp)
1488{
1489	uint8_t lane_align;
1490	uint8_t lane_status;
1491	int lane;
1492
1493	lane_align = intel_dp_link_status(intel_dp->link_status,
1494					  DP_LANE_ALIGN_STATUS_UPDATED);
1495	if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1496		return false;
1497	for (lane = 0; lane < intel_dp->lane_count; lane++) {
1498		lane_status = intel_get_lane_status(intel_dp->link_status, lane);
1499		if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1500			return false;
1501	}
1502	return true;
1503}
1504
1505static bool
1506intel_dp_set_link_train(struct intel_dp *intel_dp,
1507			uint32_t dp_reg_value,
1508			uint8_t dp_train_pat)
1509{
1510	struct drm_device *dev = intel_dp->base.base.dev;
1511	struct drm_i915_private *dev_priv = dev->dev_private;
1512	int ret;
1513
1514	I915_WRITE(intel_dp->output_reg, dp_reg_value);
1515	POSTING_READ(intel_dp->output_reg);
1516
1517	intel_dp_aux_native_write_1(intel_dp,
1518				    DP_TRAINING_PATTERN_SET,
1519				    dp_train_pat);
1520
1521	ret = intel_dp_aux_native_write(intel_dp,
1522					DP_TRAINING_LANE0_SET,
1523					intel_dp->train_set, 4);
1524	if (ret != 4)
1525		return false;
1526
1527	return true;
1528}
1529
1530/* Enable corresponding port and start training pattern 1 */
1531static void
1532intel_dp_start_link_train(struct intel_dp *intel_dp)
1533{
1534	struct drm_device *dev = intel_dp->base.base.dev;
1535	struct drm_i915_private *dev_priv = dev->dev_private;
1536	struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1537	int i;
1538	uint8_t voltage;
1539	bool clock_recovery = false;
1540	int tries;
1541	u32 reg;
1542	uint32_t DP = intel_dp->DP;
1543
1544	/*
1545	 * On CPT we have to enable the port in training pattern 1, which
1546	 * will happen below in intel_dp_set_link_train.  Otherwise, enable
1547	 * the port and wait for it to become active.
1548	 */
1549	if (!HAS_PCH_CPT(dev)) {
1550		I915_WRITE(intel_dp->output_reg, intel_dp->DP);
1551		POSTING_READ(intel_dp->output_reg);
1552		intel_wait_for_vblank(dev, intel_crtc->pipe);
1553	}
1554
1555	/* Write the link configuration data */
1556	intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
1557				  intel_dp->link_configuration,
1558				  DP_LINK_CONFIGURATION_SIZE);
1559
1560	DP |= DP_PORT_EN;
1561	if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1562		DP &= ~DP_LINK_TRAIN_MASK_CPT;
1563	else
1564		DP &= ~DP_LINK_TRAIN_MASK;
1565	memset(intel_dp->train_set, 0, 4);
1566	voltage = 0xff;
1567	tries = 0;
1568	clock_recovery = false;
1569	for (;;) {
1570		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1571		uint32_t    signal_levels;
1572		if (IS_GEN6(dev) && is_edp(intel_dp)) {
1573			signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1574			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1575		} else {
1576			signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count);
1577			DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1578		}
1579
1580		if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1581			reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
1582		else
1583			reg = DP | DP_LINK_TRAIN_PAT_1;
1584
1585		if (!intel_dp_set_link_train(intel_dp, reg,
1586					     DP_TRAINING_PATTERN_1 |
1587					     DP_LINK_SCRAMBLING_DISABLE))
1588			break;
1589		/* Set training pattern 1 */
1590
1591		udelay(100);
1592		if (!intel_dp_get_link_status(intel_dp))
1593			break;
1594
1595		if (intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1596			clock_recovery = true;
1597			break;
1598		}
1599
1600		/* Check to see if we've tried the max voltage */
1601		for (i = 0; i < intel_dp->lane_count; i++)
1602			if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1603				break;
1604		if (i == intel_dp->lane_count)
1605			break;
1606
1607		/* Check to see if we've tried the same voltage 5 times */
1608		if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1609			++tries;
1610			if (tries == 5)
1611				break;
1612		} else
1613			tries = 0;
1614		voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1615
1616		/* Compute new intel_dp->train_set as requested by target */
1617		intel_get_adjust_train(intel_dp);
1618	}
1619
1620	intel_dp->DP = DP;
1621}
1622
1623static void
1624intel_dp_complete_link_train(struct intel_dp *intel_dp)
1625{
1626	struct drm_device *dev = intel_dp->base.base.dev;
1627	struct drm_i915_private *dev_priv = dev->dev_private;
1628	bool channel_eq = false;
1629	int tries, cr_tries;
1630	u32 reg;
1631	uint32_t DP = intel_dp->DP;
1632
1633	/* channel equalization */
1634	tries = 0;
1635	cr_tries = 0;
1636	channel_eq = false;
1637	for (;;) {
1638		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1639		uint32_t    signal_levels;
1640
1641		if (cr_tries > 5) {
1642			DRM_ERROR("failed to train DP, aborting\n");
1643			intel_dp_link_down(intel_dp);
1644			break;
1645		}
1646
1647		if (IS_GEN6(dev) && is_edp(intel_dp)) {
1648			signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1649			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1650		} else {
1651			signal_levels = intel_dp_signal_levels(intel_dp->train_set[0], intel_dp->lane_count);
1652			DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1653		}
1654
1655		if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1656			reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
1657		else
1658			reg = DP | DP_LINK_TRAIN_PAT_2;
1659
1660		/* channel eq pattern */
1661		if (!intel_dp_set_link_train(intel_dp, reg,
1662					     DP_TRAINING_PATTERN_2 |
1663					     DP_LINK_SCRAMBLING_DISABLE))
1664			break;
1665
1666		udelay(400);
1667		if (!intel_dp_get_link_status(intel_dp))
1668			break;
1669
1670		/* Make sure clock is still ok */
1671		if (!intel_clock_recovery_ok(intel_dp->link_status, intel_dp->lane_count)) {
1672			intel_dp_start_link_train(intel_dp);
1673			cr_tries++;
1674			continue;
1675		}
1676
1677		if (intel_channel_eq_ok(intel_dp)) {
1678			channel_eq = true;
1679			break;
1680		}
1681
1682		/* Try 5 times, then try clock recovery if that fails */
1683		if (tries > 5) {
1684			intel_dp_link_down(intel_dp);
1685			intel_dp_start_link_train(intel_dp);
1686			tries = 0;
1687			cr_tries++;
1688			continue;
1689		}
1690
1691		/* Compute new intel_dp->train_set as requested by target */
1692		intel_get_adjust_train(intel_dp);
1693		++tries;
1694	}
1695
1696	if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1697		reg = DP | DP_LINK_TRAIN_OFF_CPT;
1698	else
1699		reg = DP | DP_LINK_TRAIN_OFF;
1700
1701	I915_WRITE(intel_dp->output_reg, reg);
1702	POSTING_READ(intel_dp->output_reg);
1703	intel_dp_aux_native_write_1(intel_dp,
1704				    DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1705}
1706
1707static void
1708intel_dp_link_down(struct intel_dp *intel_dp)
1709{
1710	struct drm_device *dev = intel_dp->base.base.dev;
1711	struct drm_i915_private *dev_priv = dev->dev_private;
1712	uint32_t DP = intel_dp->DP;
1713
1714	if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1715		return;
1716
1717	DRM_DEBUG_KMS("\n");
1718
1719	if (is_edp(intel_dp)) {
1720		DP &= ~DP_PLL_ENABLE;
1721		I915_WRITE(intel_dp->output_reg, DP);
1722		POSTING_READ(intel_dp->output_reg);
1723		udelay(100);
1724	}
1725
1726	if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp)) {
1727		DP &= ~DP_LINK_TRAIN_MASK_CPT;
1728		I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
1729	} else {
1730		DP &= ~DP_LINK_TRAIN_MASK;
1731		I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1732	}
1733	POSTING_READ(intel_dp->output_reg);
1734
1735	msleep(17);
1736
1737	if (is_edp(intel_dp))
1738		DP |= DP_LINK_TRAIN_OFF;
1739
1740	if (!HAS_PCH_CPT(dev) &&
1741	    I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
1742		struct drm_crtc *crtc = intel_dp->base.base.crtc;
1743
1744		/* Hardware workaround: leaving our transcoder select
1745		 * set to transcoder B while it's off will prevent the
1746		 * corresponding HDMI output on transcoder A.
1747		 *
1748		 * Combine this with another hardware workaround:
1749		 * transcoder select bit can only be cleared while the
1750		 * port is enabled.
1751		 */
1752		DP &= ~DP_PIPEB_SELECT;
1753		I915_WRITE(intel_dp->output_reg, DP);
1754
1755		/* Changes to enable or select take place the vblank
1756		 * after being written.
1757		 */
1758		if (crtc == NULL) {
1759			/* We can arrive here never having been attached
1760			 * to a CRTC, for instance, due to inheriting
1761			 * random state from the BIOS.
1762			 *
1763			 * If the pipe is not running, play safe and
1764			 * wait for the clocks to stabilise before
1765			 * continuing.
1766			 */
1767			POSTING_READ(intel_dp->output_reg);
1768			msleep(50);
1769		} else
1770			intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe);
1771	}
1772
1773	I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1774	POSTING_READ(intel_dp->output_reg);
1775	msleep(intel_dp->panel_power_down_delay);
1776}
1777
1778static bool
1779intel_dp_get_dpcd(struct intel_dp *intel_dp)
1780{
1781	if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd,
1782					   sizeof(intel_dp->dpcd)) &&
1783	    (intel_dp->dpcd[DP_DPCD_REV] != 0)) {
1784		return true;
1785	}
1786
1787	return false;
1788}
1789
1790static bool
1791intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector)
1792{
1793	int ret;
1794
1795	ret = intel_dp_aux_native_read_retry(intel_dp,
1796					     DP_DEVICE_SERVICE_IRQ_VECTOR,
1797					     sink_irq_vector, 1);
1798	if (!ret)
1799		return false;
1800
1801	return true;
1802}
1803
1804static void
1805intel_dp_handle_test_request(struct intel_dp *intel_dp)
1806{
1807	/* NAK by default */
1808	intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK);
1809}
1810
1811/*
1812 * According to DP spec
1813 * 5.1.2:
1814 *  1. Read DPCD
1815 *  2. Configure link according to Receiver Capabilities
1816 *  3. Use Link Training from 2.5.3.3 and 3.5.1.3
1817 *  4. Check link status on receipt of hot-plug interrupt
1818 */
1819
1820static void
1821intel_dp_check_link_status(struct intel_dp *intel_dp)
1822{
1823	u8 sink_irq_vector;
1824
1825	if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON)
1826		return;
1827
1828	if (!intel_dp->base.base.crtc)
1829		return;
1830
1831	/* Try to read receiver status if the link appears to be up */
1832	if (!intel_dp_get_link_status(intel_dp)) {
1833		intel_dp_link_down(intel_dp);
1834		return;
1835	}
1836
1837	/* Now read the DPCD to see if it's actually running */
1838	if (!intel_dp_get_dpcd(intel_dp)) {
1839		intel_dp_link_down(intel_dp);
1840		return;
1841	}
1842
1843	/* Try to read the source of the interrupt */
1844	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
1845	    intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) {
1846		/* Clear interrupt source */
1847		intel_dp_aux_native_write_1(intel_dp,
1848					    DP_DEVICE_SERVICE_IRQ_VECTOR,
1849					    sink_irq_vector);
1850
1851		if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
1852			intel_dp_handle_test_request(intel_dp);
1853		if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
1854			DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
1855	}
1856
1857	if (!intel_channel_eq_ok(intel_dp)) {
1858		DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
1859			      drm_get_encoder_name(&intel_dp->base.base));
1860		intel_dp_start_link_train(intel_dp);
1861		intel_dp_complete_link_train(intel_dp);
1862	}
1863}
1864
1865static enum drm_connector_status
1866intel_dp_detect_dpcd(struct intel_dp *intel_dp)
1867{
1868	if (intel_dp_get_dpcd(intel_dp))
1869		return connector_status_connected;
1870	return connector_status_disconnected;
1871}
1872
1873static enum drm_connector_status
1874ironlake_dp_detect(struct intel_dp *intel_dp)
1875{
1876	enum drm_connector_status status;
1877
1878	/* Can't disconnect eDP, but you can close the lid... */
1879	if (is_edp(intel_dp)) {
1880		status = intel_panel_detect(intel_dp->base.base.dev);
1881		if (status == connector_status_unknown)
1882			status = connector_status_connected;
1883		return status;
1884	}
1885
1886	return intel_dp_detect_dpcd(intel_dp);
1887}
1888
1889static enum drm_connector_status
1890g4x_dp_detect(struct intel_dp *intel_dp)
1891{
1892	struct drm_device *dev = intel_dp->base.base.dev;
1893	struct drm_i915_private *dev_priv = dev->dev_private;
1894	uint32_t temp, bit;
1895
1896	switch (intel_dp->output_reg) {
1897	case DP_B:
1898		bit = DPB_HOTPLUG_INT_STATUS;
1899		break;
1900	case DP_C:
1901		bit = DPC_HOTPLUG_INT_STATUS;
1902		break;
1903	case DP_D:
1904		bit = DPD_HOTPLUG_INT_STATUS;
1905		break;
1906	default:
1907		return connector_status_unknown;
1908	}
1909
1910	temp = I915_READ(PORT_HOTPLUG_STAT);
1911
1912	if ((temp & bit) == 0)
1913		return connector_status_disconnected;
1914
1915	return intel_dp_detect_dpcd(intel_dp);
1916}
1917
1918static struct edid *
1919intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
1920{
1921	struct intel_dp *intel_dp = intel_attached_dp(connector);
1922	struct edid	*edid;
1923
1924	ironlake_edp_panel_vdd_on(intel_dp);
1925	edid = drm_get_edid(connector, adapter);
1926	ironlake_edp_panel_vdd_off(intel_dp, false);
1927	return edid;
1928}
1929
1930static int
1931intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter)
1932{
1933	struct intel_dp *intel_dp = intel_attached_dp(connector);
1934	int	ret;
1935
1936	ironlake_edp_panel_vdd_on(intel_dp);
1937	ret = intel_ddc_get_modes(connector, adapter);
1938	ironlake_edp_panel_vdd_off(intel_dp, false);
1939	return ret;
1940}
1941
1942
1943/**
1944 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
1945 *
1946 * \return true if DP port is connected.
1947 * \return false if DP port is disconnected.
1948 */
1949static enum drm_connector_status
1950intel_dp_detect(struct drm_connector *connector, bool force)
1951{
1952	struct intel_dp *intel_dp = intel_attached_dp(connector);
1953	struct drm_device *dev = intel_dp->base.base.dev;
1954	enum drm_connector_status status;
1955	struct edid *edid = NULL;
1956
1957	intel_dp->has_audio = false;
1958
1959	if (HAS_PCH_SPLIT(dev))
1960		status = ironlake_dp_detect(intel_dp);
1961	else
1962		status = g4x_dp_detect(intel_dp);
1963
1964	DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n",
1965		      intel_dp->dpcd[0], intel_dp->dpcd[1], intel_dp->dpcd[2],
1966		      intel_dp->dpcd[3], intel_dp->dpcd[4], intel_dp->dpcd[5],
1967		      intel_dp->dpcd[6], intel_dp->dpcd[7]);
1968
1969	if (status != connector_status_connected)
1970		return status;
1971
1972	if (intel_dp->force_audio) {
1973		intel_dp->has_audio = intel_dp->force_audio > 0;
1974	} else {
1975		edid = intel_dp_get_edid(connector, &intel_dp->adapter);
1976		if (edid) {
1977			intel_dp->has_audio = drm_detect_monitor_audio(edid);
1978			connector->display_info.raw_edid = NULL;
1979			kfree(edid);
1980		}
1981	}
1982
1983	return connector_status_connected;
1984}
1985
1986static int intel_dp_get_modes(struct drm_connector *connector)
1987{
1988	struct intel_dp *intel_dp = intel_attached_dp(connector);
1989	struct drm_device *dev = intel_dp->base.base.dev;
1990	struct drm_i915_private *dev_priv = dev->dev_private;
1991	int ret;
1992
1993	/* We should parse the EDID data and find out if it has an audio sink
1994	 */
1995
1996	ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter);
1997	if (ret) {
1998		if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) {
1999			struct drm_display_mode *newmode;
2000			list_for_each_entry(newmode, &connector->probed_modes,
2001					    head) {
2002				if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) {
2003					intel_dp->panel_fixed_mode =
2004						drm_mode_duplicate(dev, newmode);
2005					break;
2006				}
2007			}
2008		}
2009		return ret;
2010	}
2011
2012	/* if eDP has no EDID, try to use fixed panel mode from VBT */
2013	if (is_edp(intel_dp)) {
2014		/* initialize panel mode from VBT if available for eDP */
2015		if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) {
2016			intel_dp->panel_fixed_mode =
2017				drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
2018			if (intel_dp->panel_fixed_mode) {
2019				intel_dp->panel_fixed_mode->type |=
2020					DRM_MODE_TYPE_PREFERRED;
2021			}
2022		}
2023		if (intel_dp->panel_fixed_mode) {
2024			struct drm_display_mode *mode;
2025			mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
2026			drm_mode_probed_add(connector, mode);
2027			return 1;
2028		}
2029	}
2030	return 0;
2031}
2032
2033static bool
2034intel_dp_detect_audio(struct drm_connector *connector)
2035{
2036	struct intel_dp *intel_dp = intel_attached_dp(connector);
2037	struct edid *edid;
2038	bool has_audio = false;
2039
2040	edid = intel_dp_get_edid(connector, &intel_dp->adapter);
2041	if (edid) {
2042		has_audio = drm_detect_monitor_audio(edid);
2043
2044		connector->display_info.raw_edid = NULL;
2045		kfree(edid);
2046	}
2047
2048	return has_audio;
2049}
2050
2051static int
2052intel_dp_set_property(struct drm_connector *connector,
2053		      struct drm_property *property,
2054		      uint64_t val)
2055{
2056	struct drm_i915_private *dev_priv = connector->dev->dev_private;
2057	struct intel_dp *intel_dp = intel_attached_dp(connector);
2058	int ret;
2059
2060	ret = drm_connector_property_set_value(connector, property, val);
2061	if (ret)
2062		return ret;
2063
2064	if (property == dev_priv->force_audio_property) {
2065		int i = val;
2066		bool has_audio;
2067
2068		if (i == intel_dp->force_audio)
2069			return 0;
2070
2071		intel_dp->force_audio = i;
2072
2073		if (i == 0)
2074			has_audio = intel_dp_detect_audio(connector);
2075		else
2076			has_audio = i > 0;
2077
2078		if (has_audio == intel_dp->has_audio)
2079			return 0;
2080
2081		intel_dp->has_audio = has_audio;
2082		goto done;
2083	}
2084
2085	if (property == dev_priv->broadcast_rgb_property) {
2086		if (val == !!intel_dp->color_range)
2087			return 0;
2088
2089		intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
2090		goto done;
2091	}
2092
2093	return -EINVAL;
2094
2095done:
2096	if (intel_dp->base.base.crtc) {
2097		struct drm_crtc *crtc = intel_dp->base.base.crtc;
2098		drm_crtc_helper_set_mode(crtc, &crtc->mode,
2099					 crtc->x, crtc->y,
2100					 crtc->fb);
2101	}
2102
2103	return 0;
2104}
2105
2106static void
2107intel_dp_destroy(struct drm_connector *connector)
2108{
2109	struct drm_device *dev = connector->dev;
2110
2111	if (intel_dpd_is_edp(dev))
2112		intel_panel_destroy_backlight(dev);
2113
2114	drm_sysfs_connector_remove(connector);
2115	drm_connector_cleanup(connector);
2116	kfree(connector);
2117}
2118
2119static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
2120{
2121	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2122
2123	i2c_del_adapter(&intel_dp->adapter);
2124	drm_encoder_cleanup(encoder);
2125	if (is_edp(intel_dp)) {
2126		cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
2127		ironlake_panel_vdd_off_sync(intel_dp);
2128	}
2129	kfree(intel_dp);
2130}
2131
2132static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = {
2133	.dpms = intel_dp_dpms,
2134	.mode_fixup = intel_dp_mode_fixup,
2135	.prepare = intel_dp_prepare,
2136	.mode_set = intel_dp_mode_set,
2137	.commit = intel_dp_commit,
2138};
2139
2140static const struct drm_connector_funcs intel_dp_connector_funcs = {
2141	.dpms = drm_helper_connector_dpms,
2142	.detect = intel_dp_detect,
2143	.fill_modes = drm_helper_probe_single_connector_modes,
2144	.set_property = intel_dp_set_property,
2145	.destroy = intel_dp_destroy,
2146};
2147
2148static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
2149	.get_modes = intel_dp_get_modes,
2150	.mode_valid = intel_dp_mode_valid,
2151	.best_encoder = intel_best_encoder,
2152};
2153
2154static const struct drm_encoder_funcs intel_dp_enc_funcs = {
2155	.destroy = intel_dp_encoder_destroy,
2156};
2157
2158static void
2159intel_dp_hot_plug(struct intel_encoder *intel_encoder)
2160{
2161	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
2162
2163	intel_dp_check_link_status(intel_dp);
2164}
2165
2166/* Return which DP Port should be selected for Transcoder DP control */
2167int
2168intel_trans_dp_port_sel(struct drm_crtc *crtc)
2169{
2170	struct drm_device *dev = crtc->dev;
2171	struct drm_mode_config *mode_config = &dev->mode_config;
2172	struct drm_encoder *encoder;
2173
2174	list_for_each_entry(encoder, &mode_config->encoder_list, head) {
2175		struct intel_dp *intel_dp;
2176
2177		if (encoder->crtc != crtc)
2178			continue;
2179
2180		intel_dp = enc_to_intel_dp(encoder);
2181		if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT)
2182			return intel_dp->output_reg;
2183	}
2184
2185	return -1;
2186}
2187
2188/* check the VBT to see whether the eDP is on DP-D port */
2189bool intel_dpd_is_edp(struct drm_device *dev)
2190{
2191	struct drm_i915_private *dev_priv = dev->dev_private;
2192	struct child_device_config *p_child;
2193	int i;
2194
2195	if (!dev_priv->child_dev_num)
2196		return false;
2197
2198	for (i = 0; i < dev_priv->child_dev_num; i++) {
2199		p_child = dev_priv->child_dev + i;
2200
2201		if (p_child->dvo_port == PORT_IDPD &&
2202		    p_child->device_type == DEVICE_TYPE_eDP)
2203			return true;
2204	}
2205	return false;
2206}
2207
2208static void
2209intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
2210{
2211	intel_attach_force_audio_property(connector);
2212	intel_attach_broadcast_rgb_property(connector);
2213}
2214
2215void
2216intel_dp_init(struct drm_device *dev, int output_reg)
2217{
2218	struct drm_i915_private *dev_priv = dev->dev_private;
2219	struct drm_connector *connector;
2220	struct intel_dp *intel_dp;
2221	struct intel_encoder *intel_encoder;
2222	struct intel_connector *intel_connector;
2223	const char *name = NULL;
2224	int type;
2225
2226	intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL);
2227	if (!intel_dp)
2228		return;
2229
2230	intel_dp->output_reg = output_reg;
2231	intel_dp->dpms_mode = -1;
2232
2233	intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
2234	if (!intel_connector) {
2235		kfree(intel_dp);
2236		return;
2237	}
2238	intel_encoder = &intel_dp->base;
2239
2240	if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D)
2241		if (intel_dpd_is_edp(dev))
2242			intel_dp->is_pch_edp = true;
2243
2244	if (output_reg == DP_A || is_pch_edp(intel_dp)) {
2245		type = DRM_MODE_CONNECTOR_eDP;
2246		intel_encoder->type = INTEL_OUTPUT_EDP;
2247	} else {
2248		type = DRM_MODE_CONNECTOR_DisplayPort;
2249		intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
2250	}
2251
2252	connector = &intel_connector->base;
2253	drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
2254	drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
2255
2256	connector->polled = DRM_CONNECTOR_POLL_HPD;
2257
2258	if (output_reg == DP_B || output_reg == PCH_DP_B)
2259		intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT);
2260	else if (output_reg == DP_C || output_reg == PCH_DP_C)
2261		intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT);
2262	else if (output_reg == DP_D || output_reg == PCH_DP_D)
2263		intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT);
2264
2265	if (is_edp(intel_dp)) {
2266		intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT);
2267		INIT_DELAYED_WORK(&intel_dp->panel_vdd_work,
2268				  ironlake_panel_vdd_work);
2269	}
2270
2271	intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
2272	connector->interlace_allowed = true;
2273	connector->doublescan_allowed = 0;
2274
2275	drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
2276			 DRM_MODE_ENCODER_TMDS);
2277	drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs);
2278
2279	intel_connector_attach_encoder(intel_connector, intel_encoder);
2280	drm_sysfs_connector_add(connector);
2281
2282	/* Set up the DDC bus. */
2283	switch (output_reg) {
2284		case DP_A:
2285			name = "DPDDC-A";
2286			break;
2287		case DP_B:
2288		case PCH_DP_B:
2289			dev_priv->hotplug_supported_mask |=
2290				HDMIB_HOTPLUG_INT_STATUS;
2291			name = "DPDDC-B";
2292			break;
2293		case DP_C:
2294		case PCH_DP_C:
2295			dev_priv->hotplug_supported_mask |=
2296				HDMIC_HOTPLUG_INT_STATUS;
2297			name = "DPDDC-C";
2298			break;
2299		case DP_D:
2300		case PCH_DP_D:
2301			dev_priv->hotplug_supported_mask |=
2302				HDMID_HOTPLUG_INT_STATUS;
2303			name = "DPDDC-D";
2304			break;
2305	}
2306
2307	/* Cache some DPCD data in the eDP case */
2308	if (is_edp(intel_dp)) {
2309		bool ret;
2310		struct edp_power_seq	cur, vbt;
2311		u32 pp_on, pp_off, pp_div;
2312
2313		pp_on = I915_READ(PCH_PP_ON_DELAYS);
2314		pp_off = I915_READ(PCH_PP_OFF_DELAYS);
2315		pp_div = I915_READ(PCH_PP_DIVISOR);
2316
2317		/* Pull timing values out of registers */
2318		cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2319			PANEL_POWER_UP_DELAY_SHIFT;
2320
2321		cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2322			PANEL_LIGHT_ON_DELAY_SHIFT;
2323
2324		cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2325			PANEL_LIGHT_OFF_DELAY_SHIFT;
2326
2327		cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2328			PANEL_POWER_DOWN_DELAY_SHIFT;
2329
2330		cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2331			       PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
2332
2333		DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2334			      cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2335
2336		vbt = dev_priv->edp.pps;
2337
2338		DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2339			      vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12);
2340
2341#define get_delay(field)	((max(cur.field, vbt.field) + 9) / 10)
2342
2343		intel_dp->panel_power_up_delay = get_delay(t1_t3);
2344		intel_dp->backlight_on_delay = get_delay(t8);
2345		intel_dp->backlight_off_delay = get_delay(t9);
2346		intel_dp->panel_power_down_delay = get_delay(t10);
2347		intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
2348
2349		DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2350			      intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2351			      intel_dp->panel_power_cycle_delay);
2352
2353		DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2354			      intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2355
2356		intel_dp->panel_off_jiffies = jiffies - intel_dp->panel_power_down_delay;
2357
2358		ironlake_edp_panel_vdd_on(intel_dp);
2359		ret = intel_dp_get_dpcd(intel_dp);
2360		ironlake_edp_panel_vdd_off(intel_dp, false);
2361		if (ret) {
2362			if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
2363				dev_priv->no_aux_handshake =
2364					intel_dp->dpcd[DP_MAX_DOWNSPREAD] &
2365					DP_NO_AUX_HANDSHAKE_LINK_TRAINING;
2366		} else {
2367			/* if this fails, presume the device is a ghost */
2368			DRM_INFO("failed to retrieve link info, disabling eDP\n");
2369			intel_dp_encoder_destroy(&intel_dp->base.base);
2370			intel_dp_destroy(&intel_connector->base);
2371			return;
2372		}
2373	}
2374
2375	intel_dp_i2c_init(intel_dp, intel_connector, name);
2376
2377	intel_encoder->hot_plug = intel_dp_hot_plug;
2378
2379	if (is_edp(intel_dp)) {
2380		dev_priv->int_edp_connector = connector;
2381		intel_panel_setup_backlight(dev);
2382	}
2383
2384	intel_dp_add_properties(intel_dp, connector);
2385
2386	/* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
2387	 * 0xd.  Failure to do so will result in spurious interrupts being
2388	 * generated on the port when a cable is not attached.
2389	 */
2390	if (IS_G4X(dev) && !IS_GM45(dev)) {
2391		u32 temp = I915_READ(PEG_BAND_GAP_DATA);
2392		I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);
2393	}
2394}
2395