intel_dp.c revision 6c2b7c1208b762abc0df318ae53d18d9e5414e1b
1/*
2 * Copyright © 2008 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors:
24 *    Keith Packard <keithp@keithp.com>
25 *
26 */
27
28#include <linux/i2c.h>
29#include <linux/slab.h>
30#include <linux/export.h>
31#include "drmP.h"
32#include "drm.h"
33#include "drm_crtc.h"
34#include "drm_crtc_helper.h"
35#include "drm_edid.h"
36#include "intel_drv.h"
37#include "i915_drm.h"
38#include "i915_drv.h"
39#include "drm_dp_helper.h"
40
41#define DP_RECEIVER_CAP_SIZE	0xf
42#define DP_LINK_STATUS_SIZE	6
43#define DP_LINK_CHECK_TIMEOUT	(10 * 1000)
44
45#define DP_LINK_CONFIGURATION_SIZE	9
46
47struct intel_dp {
48	struct intel_encoder base;
49	uint32_t output_reg;
50	uint32_t DP;
51	uint8_t  link_configuration[DP_LINK_CONFIGURATION_SIZE];
52	bool has_audio;
53	enum hdmi_force_audio force_audio;
54	uint32_t color_range;
55	int dpms_mode;
56	uint8_t link_bw;
57	uint8_t lane_count;
58	uint8_t dpcd[DP_RECEIVER_CAP_SIZE];
59	struct i2c_adapter adapter;
60	struct i2c_algo_dp_aux_data algo;
61	bool is_pch_edp;
62	uint8_t	train_set[4];
63	int panel_power_up_delay;
64	int panel_power_down_delay;
65	int panel_power_cycle_delay;
66	int backlight_on_delay;
67	int backlight_off_delay;
68	struct drm_display_mode *panel_fixed_mode;  /* for eDP */
69	struct delayed_work panel_vdd_work;
70	bool want_panel_vdd;
71	struct edid *edid; /* cached EDID for eDP */
72	int edid_mode_count;
73};
74
75/**
76 * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
77 * @intel_dp: DP struct
78 *
79 * If a CPU or PCH DP output is attached to an eDP panel, this function
80 * will return true, and false otherwise.
81 */
82static bool is_edp(struct intel_dp *intel_dp)
83{
84	return intel_dp->base.type == INTEL_OUTPUT_EDP;
85}
86
87/**
88 * is_pch_edp - is the port on the PCH and attached to an eDP panel?
89 * @intel_dp: DP struct
90 *
91 * Returns true if the given DP struct corresponds to a PCH DP port attached
92 * to an eDP panel, false otherwise.  Helpful for determining whether we
93 * may need FDI resources for a given DP output or not.
94 */
95static bool is_pch_edp(struct intel_dp *intel_dp)
96{
97	return intel_dp->is_pch_edp;
98}
99
100/**
101 * is_cpu_edp - is the port on the CPU and attached to an eDP panel?
102 * @intel_dp: DP struct
103 *
104 * Returns true if the given DP struct corresponds to a CPU eDP port.
105 */
106static bool is_cpu_edp(struct intel_dp *intel_dp)
107{
108	return is_edp(intel_dp) && !is_pch_edp(intel_dp);
109}
110
111static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
112{
113	return container_of(encoder, struct intel_dp, base.base);
114}
115
116static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
117{
118	return container_of(intel_attached_encoder(connector),
119			    struct intel_dp, base);
120}
121
122/**
123 * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP?
124 * @encoder: DRM encoder
125 *
126 * Return true if @encoder corresponds to a PCH attached eDP panel.  Needed
127 * by intel_display.c.
128 */
129bool intel_encoder_is_pch_edp(struct drm_encoder *encoder)
130{
131	struct intel_dp *intel_dp;
132
133	if (!encoder)
134		return false;
135
136	intel_dp = enc_to_intel_dp(encoder);
137
138	return is_pch_edp(intel_dp);
139}
140
141static void intel_dp_start_link_train(struct intel_dp *intel_dp);
142static void intel_dp_complete_link_train(struct intel_dp *intel_dp);
143static void intel_dp_link_down(struct intel_dp *intel_dp);
144
145void
146intel_edp_link_config(struct intel_encoder *intel_encoder,
147		       int *lane_num, int *link_bw)
148{
149	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
150
151	*lane_num = intel_dp->lane_count;
152	if (intel_dp->link_bw == DP_LINK_BW_1_62)
153		*link_bw = 162000;
154	else if (intel_dp->link_bw == DP_LINK_BW_2_7)
155		*link_bw = 270000;
156}
157
158int
159intel_edp_target_clock(struct intel_encoder *intel_encoder,
160		       struct drm_display_mode *mode)
161{
162	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
163
164	if (intel_dp->panel_fixed_mode)
165		return intel_dp->panel_fixed_mode->clock;
166	else
167		return mode->clock;
168}
169
170static int
171intel_dp_max_lane_count(struct intel_dp *intel_dp)
172{
173	int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
174	switch (max_lane_count) {
175	case 1: case 2: case 4:
176		break;
177	default:
178		max_lane_count = 4;
179	}
180	return max_lane_count;
181}
182
183static int
184intel_dp_max_link_bw(struct intel_dp *intel_dp)
185{
186	int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
187
188	switch (max_link_bw) {
189	case DP_LINK_BW_1_62:
190	case DP_LINK_BW_2_7:
191		break;
192	default:
193		max_link_bw = DP_LINK_BW_1_62;
194		break;
195	}
196	return max_link_bw;
197}
198
199static int
200intel_dp_link_clock(uint8_t link_bw)
201{
202	if (link_bw == DP_LINK_BW_2_7)
203		return 270000;
204	else
205		return 162000;
206}
207
208/*
209 * The units on the numbers in the next two are... bizarre.  Examples will
210 * make it clearer; this one parallels an example in the eDP spec.
211 *
212 * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as:
213 *
214 *     270000 * 1 * 8 / 10 == 216000
215 *
216 * The actual data capacity of that configuration is 2.16Gbit/s, so the
217 * units are decakilobits.  ->clock in a drm_display_mode is in kilohertz -
218 * or equivalently, kilopixels per second - so for 1680x1050R it'd be
219 * 119000.  At 18bpp that's 2142000 kilobits per second.
220 *
221 * Thus the strange-looking division by 10 in intel_dp_link_required, to
222 * get the result in decakilobits instead of kilobits.
223 */
224
225static int
226intel_dp_link_required(int pixel_clock, int bpp)
227{
228	return (pixel_clock * bpp + 9) / 10;
229}
230
231static int
232intel_dp_max_data_rate(int max_link_clock, int max_lanes)
233{
234	return (max_link_clock * max_lanes * 8) / 10;
235}
236
237static bool
238intel_dp_adjust_dithering(struct intel_dp *intel_dp,
239			  struct drm_display_mode *mode,
240			  bool adjust_mode)
241{
242	int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
243	int max_lanes = intel_dp_max_lane_count(intel_dp);
244	int max_rate, mode_rate;
245
246	mode_rate = intel_dp_link_required(mode->clock, 24);
247	max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
248
249	if (mode_rate > max_rate) {
250		mode_rate = intel_dp_link_required(mode->clock, 18);
251		if (mode_rate > max_rate)
252			return false;
253
254		if (adjust_mode)
255			mode->private_flags
256				|= INTEL_MODE_DP_FORCE_6BPC;
257
258		return true;
259	}
260
261	return true;
262}
263
264static int
265intel_dp_mode_valid(struct drm_connector *connector,
266		    struct drm_display_mode *mode)
267{
268	struct intel_dp *intel_dp = intel_attached_dp(connector);
269
270	if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
271		if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
272			return MODE_PANEL;
273
274		if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
275			return MODE_PANEL;
276	}
277
278	if (!intel_dp_adjust_dithering(intel_dp, mode, false))
279		return MODE_CLOCK_HIGH;
280
281	if (mode->clock < 10000)
282		return MODE_CLOCK_LOW;
283
284	if (mode->flags & DRM_MODE_FLAG_DBLCLK)
285		return MODE_H_ILLEGAL;
286
287	return MODE_OK;
288}
289
290static uint32_t
291pack_aux(uint8_t *src, int src_bytes)
292{
293	int	i;
294	uint32_t v = 0;
295
296	if (src_bytes > 4)
297		src_bytes = 4;
298	for (i = 0; i < src_bytes; i++)
299		v |= ((uint32_t) src[i]) << ((3-i) * 8);
300	return v;
301}
302
303static void
304unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
305{
306	int i;
307	if (dst_bytes > 4)
308		dst_bytes = 4;
309	for (i = 0; i < dst_bytes; i++)
310		dst[i] = src >> ((3-i) * 8);
311}
312
313/* hrawclock is 1/4 the FSB frequency */
314static int
315intel_hrawclk(struct drm_device *dev)
316{
317	struct drm_i915_private *dev_priv = dev->dev_private;
318	uint32_t clkcfg;
319
320	clkcfg = I915_READ(CLKCFG);
321	switch (clkcfg & CLKCFG_FSB_MASK) {
322	case CLKCFG_FSB_400:
323		return 100;
324	case CLKCFG_FSB_533:
325		return 133;
326	case CLKCFG_FSB_667:
327		return 166;
328	case CLKCFG_FSB_800:
329		return 200;
330	case CLKCFG_FSB_1067:
331		return 266;
332	case CLKCFG_FSB_1333:
333		return 333;
334	/* these two are just a guess; one of them might be right */
335	case CLKCFG_FSB_1600:
336	case CLKCFG_FSB_1600_ALT:
337		return 400;
338	default:
339		return 133;
340	}
341}
342
343static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
344{
345	struct drm_device *dev = intel_dp->base.base.dev;
346	struct drm_i915_private *dev_priv = dev->dev_private;
347
348	return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0;
349}
350
351static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
352{
353	struct drm_device *dev = intel_dp->base.base.dev;
354	struct drm_i915_private *dev_priv = dev->dev_private;
355
356	return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0;
357}
358
359static void
360intel_dp_check_edp(struct intel_dp *intel_dp)
361{
362	struct drm_device *dev = intel_dp->base.base.dev;
363	struct drm_i915_private *dev_priv = dev->dev_private;
364
365	if (!is_edp(intel_dp))
366		return;
367	if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
368		WARN(1, "eDP powered off while attempting aux channel communication.\n");
369		DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
370			      I915_READ(PCH_PP_STATUS),
371			      I915_READ(PCH_PP_CONTROL));
372	}
373}
374
375static int
376intel_dp_aux_ch(struct intel_dp *intel_dp,
377		uint8_t *send, int send_bytes,
378		uint8_t *recv, int recv_size)
379{
380	uint32_t output_reg = intel_dp->output_reg;
381	struct drm_device *dev = intel_dp->base.base.dev;
382	struct drm_i915_private *dev_priv = dev->dev_private;
383	uint32_t ch_ctl = output_reg + 0x10;
384	uint32_t ch_data = ch_ctl + 4;
385	int i;
386	int recv_bytes;
387	uint32_t status;
388	uint32_t aux_clock_divider;
389	int try, precharge;
390
391	intel_dp_check_edp(intel_dp);
392	/* The clock divider is based off the hrawclk,
393	 * and would like to run at 2MHz. So, take the
394	 * hrawclk value and divide by 2 and use that
395	 *
396	 * Note that PCH attached eDP panels should use a 125MHz input
397	 * clock divider.
398	 */
399	if (is_cpu_edp(intel_dp)) {
400		if (IS_GEN6(dev) || IS_GEN7(dev))
401			aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */
402		else
403			aux_clock_divider = 225; /* eDP input clock at 450Mhz */
404	} else if (HAS_PCH_SPLIT(dev))
405		aux_clock_divider = 63; /* IRL input clock fixed at 125Mhz */
406	else
407		aux_clock_divider = intel_hrawclk(dev) / 2;
408
409	if (IS_GEN6(dev))
410		precharge = 3;
411	else
412		precharge = 5;
413
414	/* Try to wait for any previous AUX channel activity */
415	for (try = 0; try < 3; try++) {
416		status = I915_READ(ch_ctl);
417		if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
418			break;
419		msleep(1);
420	}
421
422	if (try == 3) {
423		WARN(1, "dp_aux_ch not started status 0x%08x\n",
424		     I915_READ(ch_ctl));
425		return -EBUSY;
426	}
427
428	/* Must try at least 3 times according to DP spec */
429	for (try = 0; try < 5; try++) {
430		/* Load the send data into the aux channel data registers */
431		for (i = 0; i < send_bytes; i += 4)
432			I915_WRITE(ch_data + i,
433				   pack_aux(send + i, send_bytes - i));
434
435		/* Send the command and wait for it to complete */
436		I915_WRITE(ch_ctl,
437			   DP_AUX_CH_CTL_SEND_BUSY |
438			   DP_AUX_CH_CTL_TIME_OUT_400us |
439			   (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
440			   (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
441			   (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
442			   DP_AUX_CH_CTL_DONE |
443			   DP_AUX_CH_CTL_TIME_OUT_ERROR |
444			   DP_AUX_CH_CTL_RECEIVE_ERROR);
445		for (;;) {
446			status = I915_READ(ch_ctl);
447			if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
448				break;
449			udelay(100);
450		}
451
452		/* Clear done status and any errors */
453		I915_WRITE(ch_ctl,
454			   status |
455			   DP_AUX_CH_CTL_DONE |
456			   DP_AUX_CH_CTL_TIME_OUT_ERROR |
457			   DP_AUX_CH_CTL_RECEIVE_ERROR);
458
459		if (status & (DP_AUX_CH_CTL_TIME_OUT_ERROR |
460			      DP_AUX_CH_CTL_RECEIVE_ERROR))
461			continue;
462		if (status & DP_AUX_CH_CTL_DONE)
463			break;
464	}
465
466	if ((status & DP_AUX_CH_CTL_DONE) == 0) {
467		DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
468		return -EBUSY;
469	}
470
471	/* Check for timeout or receive error.
472	 * Timeouts occur when the sink is not connected
473	 */
474	if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
475		DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
476		return -EIO;
477	}
478
479	/* Timeouts occur when the device isn't connected, so they're
480	 * "normal" -- don't fill the kernel log with these */
481	if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
482		DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
483		return -ETIMEDOUT;
484	}
485
486	/* Unload any bytes sent back from the other side */
487	recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
488		      DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
489	if (recv_bytes > recv_size)
490		recv_bytes = recv_size;
491
492	for (i = 0; i < recv_bytes; i += 4)
493		unpack_aux(I915_READ(ch_data + i),
494			   recv + i, recv_bytes - i);
495
496	return recv_bytes;
497}
498
499/* Write data to the aux channel in native mode */
500static int
501intel_dp_aux_native_write(struct intel_dp *intel_dp,
502			  uint16_t address, uint8_t *send, int send_bytes)
503{
504	int ret;
505	uint8_t	msg[20];
506	int msg_bytes;
507	uint8_t	ack;
508
509	intel_dp_check_edp(intel_dp);
510	if (send_bytes > 16)
511		return -1;
512	msg[0] = AUX_NATIVE_WRITE << 4;
513	msg[1] = address >> 8;
514	msg[2] = address & 0xff;
515	msg[3] = send_bytes - 1;
516	memcpy(&msg[4], send, send_bytes);
517	msg_bytes = send_bytes + 4;
518	for (;;) {
519		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
520		if (ret < 0)
521			return ret;
522		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
523			break;
524		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
525			udelay(100);
526		else
527			return -EIO;
528	}
529	return send_bytes;
530}
531
532/* Write a single byte to the aux channel in native mode */
533static int
534intel_dp_aux_native_write_1(struct intel_dp *intel_dp,
535			    uint16_t address, uint8_t byte)
536{
537	return intel_dp_aux_native_write(intel_dp, address, &byte, 1);
538}
539
540/* read bytes from a native aux channel */
541static int
542intel_dp_aux_native_read(struct intel_dp *intel_dp,
543			 uint16_t address, uint8_t *recv, int recv_bytes)
544{
545	uint8_t msg[4];
546	int msg_bytes;
547	uint8_t reply[20];
548	int reply_bytes;
549	uint8_t ack;
550	int ret;
551
552	intel_dp_check_edp(intel_dp);
553	msg[0] = AUX_NATIVE_READ << 4;
554	msg[1] = address >> 8;
555	msg[2] = address & 0xff;
556	msg[3] = recv_bytes - 1;
557
558	msg_bytes = 4;
559	reply_bytes = recv_bytes + 1;
560
561	for (;;) {
562		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes,
563				      reply, reply_bytes);
564		if (ret == 0)
565			return -EPROTO;
566		if (ret < 0)
567			return ret;
568		ack = reply[0];
569		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) {
570			memcpy(recv, reply + 1, ret - 1);
571			return ret - 1;
572		}
573		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
574			udelay(100);
575		else
576			return -EIO;
577	}
578}
579
580static int
581intel_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
582		    uint8_t write_byte, uint8_t *read_byte)
583{
584	struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
585	struct intel_dp *intel_dp = container_of(adapter,
586						struct intel_dp,
587						adapter);
588	uint16_t address = algo_data->address;
589	uint8_t msg[5];
590	uint8_t reply[2];
591	unsigned retry;
592	int msg_bytes;
593	int reply_bytes;
594	int ret;
595
596	intel_dp_check_edp(intel_dp);
597	/* Set up the command byte */
598	if (mode & MODE_I2C_READ)
599		msg[0] = AUX_I2C_READ << 4;
600	else
601		msg[0] = AUX_I2C_WRITE << 4;
602
603	if (!(mode & MODE_I2C_STOP))
604		msg[0] |= AUX_I2C_MOT << 4;
605
606	msg[1] = address >> 8;
607	msg[2] = address;
608
609	switch (mode) {
610	case MODE_I2C_WRITE:
611		msg[3] = 0;
612		msg[4] = write_byte;
613		msg_bytes = 5;
614		reply_bytes = 1;
615		break;
616	case MODE_I2C_READ:
617		msg[3] = 0;
618		msg_bytes = 4;
619		reply_bytes = 2;
620		break;
621	default:
622		msg_bytes = 3;
623		reply_bytes = 1;
624		break;
625	}
626
627	for (retry = 0; retry < 5; retry++) {
628		ret = intel_dp_aux_ch(intel_dp,
629				      msg, msg_bytes,
630				      reply, reply_bytes);
631		if (ret < 0) {
632			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
633			return ret;
634		}
635
636		switch (reply[0] & AUX_NATIVE_REPLY_MASK) {
637		case AUX_NATIVE_REPLY_ACK:
638			/* I2C-over-AUX Reply field is only valid
639			 * when paired with AUX ACK.
640			 */
641			break;
642		case AUX_NATIVE_REPLY_NACK:
643			DRM_DEBUG_KMS("aux_ch native nack\n");
644			return -EREMOTEIO;
645		case AUX_NATIVE_REPLY_DEFER:
646			udelay(100);
647			continue;
648		default:
649			DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
650				  reply[0]);
651			return -EREMOTEIO;
652		}
653
654		switch (reply[0] & AUX_I2C_REPLY_MASK) {
655		case AUX_I2C_REPLY_ACK:
656			if (mode == MODE_I2C_READ) {
657				*read_byte = reply[1];
658			}
659			return reply_bytes - 1;
660		case AUX_I2C_REPLY_NACK:
661			DRM_DEBUG_KMS("aux_i2c nack\n");
662			return -EREMOTEIO;
663		case AUX_I2C_REPLY_DEFER:
664			DRM_DEBUG_KMS("aux_i2c defer\n");
665			udelay(100);
666			break;
667		default:
668			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
669			return -EREMOTEIO;
670		}
671	}
672
673	DRM_ERROR("too many retries, giving up\n");
674	return -EREMOTEIO;
675}
676
677static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp);
678static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
679
680static int
681intel_dp_i2c_init(struct intel_dp *intel_dp,
682		  struct intel_connector *intel_connector, const char *name)
683{
684	int	ret;
685
686	DRM_DEBUG_KMS("i2c_init %s\n", name);
687	intel_dp->algo.running = false;
688	intel_dp->algo.address = 0;
689	intel_dp->algo.aux_ch = intel_dp_i2c_aux_ch;
690
691	memset(&intel_dp->adapter, '\0', sizeof(intel_dp->adapter));
692	intel_dp->adapter.owner = THIS_MODULE;
693	intel_dp->adapter.class = I2C_CLASS_DDC;
694	strncpy(intel_dp->adapter.name, name, sizeof(intel_dp->adapter.name) - 1);
695	intel_dp->adapter.name[sizeof(intel_dp->adapter.name) - 1] = '\0';
696	intel_dp->adapter.algo_data = &intel_dp->algo;
697	intel_dp->adapter.dev.parent = &intel_connector->base.kdev;
698
699	ironlake_edp_panel_vdd_on(intel_dp);
700	ret = i2c_dp_aux_add_bus(&intel_dp->adapter);
701	ironlake_edp_panel_vdd_off(intel_dp, false);
702	return ret;
703}
704
705static bool
706intel_dp_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
707		    struct drm_display_mode *adjusted_mode)
708{
709	struct drm_device *dev = encoder->dev;
710	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
711	int lane_count, clock;
712	int max_lane_count = intel_dp_max_lane_count(intel_dp);
713	int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
714	int bpp, mode_rate;
715	static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
716
717	if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
718		intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
719		intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
720					mode, adjusted_mode);
721	}
722
723	if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK)
724		return false;
725
726	DRM_DEBUG_KMS("DP link computation with max lane count %i "
727		      "max bw %02x pixel clock %iKHz\n",
728		      max_lane_count, bws[max_clock], adjusted_mode->clock);
729
730	if (!intel_dp_adjust_dithering(intel_dp, adjusted_mode, true))
731		return false;
732
733	bpp = adjusted_mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 24;
734	mode_rate = intel_dp_link_required(adjusted_mode->clock, bpp);
735
736	for (clock = 0; clock <= max_clock; clock++) {
737		for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
738			int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
739
740			if (mode_rate <= link_avail) {
741				intel_dp->link_bw = bws[clock];
742				intel_dp->lane_count = lane_count;
743				adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
744				DRM_DEBUG_KMS("DP link bw %02x lane "
745						"count %d clock %d bpp %d\n",
746				       intel_dp->link_bw, intel_dp->lane_count,
747				       adjusted_mode->clock, bpp);
748				DRM_DEBUG_KMS("DP link bw required %i available %i\n",
749					      mode_rate, link_avail);
750				return true;
751			}
752		}
753	}
754
755	return false;
756}
757
758struct intel_dp_m_n {
759	uint32_t	tu;
760	uint32_t	gmch_m;
761	uint32_t	gmch_n;
762	uint32_t	link_m;
763	uint32_t	link_n;
764};
765
766static void
767intel_reduce_ratio(uint32_t *num, uint32_t *den)
768{
769	while (*num > 0xffffff || *den > 0xffffff) {
770		*num >>= 1;
771		*den >>= 1;
772	}
773}
774
775static void
776intel_dp_compute_m_n(int bpp,
777		     int nlanes,
778		     int pixel_clock,
779		     int link_clock,
780		     struct intel_dp_m_n *m_n)
781{
782	m_n->tu = 64;
783	m_n->gmch_m = (pixel_clock * bpp) >> 3;
784	m_n->gmch_n = link_clock * nlanes;
785	intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
786	m_n->link_m = pixel_clock;
787	m_n->link_n = link_clock;
788	intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
789}
790
791void
792intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
793		 struct drm_display_mode *adjusted_mode)
794{
795	struct drm_device *dev = crtc->dev;
796	struct intel_encoder *encoder;
797	struct drm_i915_private *dev_priv = dev->dev_private;
798	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
799	int lane_count = 4;
800	struct intel_dp_m_n m_n;
801	int pipe = intel_crtc->pipe;
802
803	/*
804	 * Find the lane count in the intel_encoder private
805	 */
806	for_each_encoder_on_crtc(dev, crtc, encoder) {
807		struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
808
809		if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
810		    intel_dp->base.type == INTEL_OUTPUT_EDP)
811		{
812			lane_count = intel_dp->lane_count;
813			break;
814		}
815	}
816
817	/*
818	 * Compute the GMCH and Link ratios. The '3' here is
819	 * the number of bytes_per_pixel post-LUT, which we always
820	 * set up for 8-bits of R/G/B, or 3 bytes total.
821	 */
822	intel_dp_compute_m_n(intel_crtc->bpp, lane_count,
823			     mode->clock, adjusted_mode->clock, &m_n);
824
825	if (HAS_PCH_SPLIT(dev)) {
826		I915_WRITE(TRANSDATA_M1(pipe),
827			   ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
828			   m_n.gmch_m);
829		I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n);
830		I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m);
831		I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n);
832	} else {
833		I915_WRITE(PIPE_GMCH_DATA_M(pipe),
834			   ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
835			   m_n.gmch_m);
836		I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
837		I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
838		I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
839	}
840}
841
842static void ironlake_edp_pll_on(struct drm_encoder *encoder);
843static void ironlake_edp_pll_off(struct drm_encoder *encoder);
844
845static void
846intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
847		  struct drm_display_mode *adjusted_mode)
848{
849	struct drm_device *dev = encoder->dev;
850	struct drm_i915_private *dev_priv = dev->dev_private;
851	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
852	struct drm_crtc *crtc = intel_dp->base.base.crtc;
853	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
854
855	/* Turn on the eDP PLL if needed */
856	if (is_edp(intel_dp)) {
857		if (!is_pch_edp(intel_dp))
858			ironlake_edp_pll_on(encoder);
859		else
860			ironlake_edp_pll_off(encoder);
861	}
862
863	/*
864	 * There are four kinds of DP registers:
865	 *
866	 * 	IBX PCH
867	 * 	SNB CPU
868	 *	IVB CPU
869	 * 	CPT PCH
870	 *
871	 * IBX PCH and CPU are the same for almost everything,
872	 * except that the CPU DP PLL is configured in this
873	 * register
874	 *
875	 * CPT PCH is quite different, having many bits moved
876	 * to the TRANS_DP_CTL register instead. That
877	 * configuration happens (oddly) in ironlake_pch_enable
878	 */
879
880	/* Preserve the BIOS-computed detected bit. This is
881	 * supposed to be read-only.
882	 */
883	intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
884	intel_dp->DP |=  DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
885
886	/* Handle DP bits in common between all three register formats */
887
888	intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
889
890	switch (intel_dp->lane_count) {
891	case 1:
892		intel_dp->DP |= DP_PORT_WIDTH_1;
893		break;
894	case 2:
895		intel_dp->DP |= DP_PORT_WIDTH_2;
896		break;
897	case 4:
898		intel_dp->DP |= DP_PORT_WIDTH_4;
899		break;
900	}
901	if (intel_dp->has_audio) {
902		DRM_DEBUG_DRIVER("Enabling DP audio on pipe %c\n",
903				 pipe_name(intel_crtc->pipe));
904		intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
905		intel_write_eld(encoder, adjusted_mode);
906	}
907	memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
908	intel_dp->link_configuration[0] = intel_dp->link_bw;
909	intel_dp->link_configuration[1] = intel_dp->lane_count;
910	intel_dp->link_configuration[8] = DP_SET_ANSI_8B10B;
911	/*
912	 * Check for DPCD version > 1.1 and enhanced framing support
913	 */
914	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
915	    (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
916		intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
917	}
918
919	/* Split out the IBX/CPU vs CPT settings */
920
921	if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) {
922		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
923			intel_dp->DP |= DP_SYNC_HS_HIGH;
924		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
925			intel_dp->DP |= DP_SYNC_VS_HIGH;
926		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
927
928		if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
929			intel_dp->DP |= DP_ENHANCED_FRAMING;
930
931		intel_dp->DP |= intel_crtc->pipe << 29;
932
933		/* don't miss out required setting for eDP */
934		intel_dp->DP |= DP_PLL_ENABLE;
935		if (adjusted_mode->clock < 200000)
936			intel_dp->DP |= DP_PLL_FREQ_160MHZ;
937		else
938			intel_dp->DP |= DP_PLL_FREQ_270MHZ;
939	} else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) {
940		intel_dp->DP |= intel_dp->color_range;
941
942		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
943			intel_dp->DP |= DP_SYNC_HS_HIGH;
944		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
945			intel_dp->DP |= DP_SYNC_VS_HIGH;
946		intel_dp->DP |= DP_LINK_TRAIN_OFF;
947
948		if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
949			intel_dp->DP |= DP_ENHANCED_FRAMING;
950
951		if (intel_crtc->pipe == 1)
952			intel_dp->DP |= DP_PIPEB_SELECT;
953
954		if (is_cpu_edp(intel_dp)) {
955			/* don't miss out required setting for eDP */
956			intel_dp->DP |= DP_PLL_ENABLE;
957			if (adjusted_mode->clock < 200000)
958				intel_dp->DP |= DP_PLL_FREQ_160MHZ;
959			else
960				intel_dp->DP |= DP_PLL_FREQ_270MHZ;
961		}
962	} else {
963		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
964	}
965}
966
967#define IDLE_ON_MASK		(PP_ON | 0 	  | PP_SEQUENCE_MASK | 0                     | PP_SEQUENCE_STATE_MASK)
968#define IDLE_ON_VALUE   	(PP_ON | 0 	  | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_ON_IDLE)
969
970#define IDLE_OFF_MASK		(PP_ON | 0        | PP_SEQUENCE_MASK | 0                     | PP_SEQUENCE_STATE_MASK)
971#define IDLE_OFF_VALUE		(0     | 0        | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_OFF_IDLE)
972
973#define IDLE_CYCLE_MASK		(PP_ON | 0        | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
974#define IDLE_CYCLE_VALUE	(0     | 0        | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_OFF_IDLE)
975
976static void ironlake_wait_panel_status(struct intel_dp *intel_dp,
977				       u32 mask,
978				       u32 value)
979{
980	struct drm_device *dev = intel_dp->base.base.dev;
981	struct drm_i915_private *dev_priv = dev->dev_private;
982
983	DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
984		      mask, value,
985		      I915_READ(PCH_PP_STATUS),
986		      I915_READ(PCH_PP_CONTROL));
987
988	if (_wait_for((I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10)) {
989		DRM_ERROR("Panel status timeout: status %08x control %08x\n",
990			  I915_READ(PCH_PP_STATUS),
991			  I915_READ(PCH_PP_CONTROL));
992	}
993}
994
995static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
996{
997	DRM_DEBUG_KMS("Wait for panel power on\n");
998	ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
999}
1000
1001static void ironlake_wait_panel_off(struct intel_dp *intel_dp)
1002{
1003	DRM_DEBUG_KMS("Wait for panel power off time\n");
1004	ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
1005}
1006
1007static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp)
1008{
1009	DRM_DEBUG_KMS("Wait for panel power cycle\n");
1010	ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
1011}
1012
1013
1014/* Read the current pp_control value, unlocking the register if it
1015 * is locked
1016 */
1017
1018static  u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv)
1019{
1020	u32	control = I915_READ(PCH_PP_CONTROL);
1021
1022	control &= ~PANEL_UNLOCK_MASK;
1023	control |= PANEL_UNLOCK_REGS;
1024	return control;
1025}
1026
1027static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
1028{
1029	struct drm_device *dev = intel_dp->base.base.dev;
1030	struct drm_i915_private *dev_priv = dev->dev_private;
1031	u32 pp;
1032
1033	if (!is_edp(intel_dp))
1034		return;
1035	DRM_DEBUG_KMS("Turn eDP VDD on\n");
1036
1037	WARN(intel_dp->want_panel_vdd,
1038	     "eDP VDD already requested on\n");
1039
1040	intel_dp->want_panel_vdd = true;
1041
1042	if (ironlake_edp_have_panel_vdd(intel_dp)) {
1043		DRM_DEBUG_KMS("eDP VDD already on\n");
1044		return;
1045	}
1046
1047	if (!ironlake_edp_have_panel_power(intel_dp))
1048		ironlake_wait_panel_power_cycle(intel_dp);
1049
1050	pp = ironlake_get_pp_control(dev_priv);
1051	pp |= EDP_FORCE_VDD;
1052	I915_WRITE(PCH_PP_CONTROL, pp);
1053	POSTING_READ(PCH_PP_CONTROL);
1054	DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1055		      I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
1056
1057	/*
1058	 * If the panel wasn't on, delay before accessing aux channel
1059	 */
1060	if (!ironlake_edp_have_panel_power(intel_dp)) {
1061		DRM_DEBUG_KMS("eDP was not running\n");
1062		msleep(intel_dp->panel_power_up_delay);
1063	}
1064}
1065
1066static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
1067{
1068	struct drm_device *dev = intel_dp->base.base.dev;
1069	struct drm_i915_private *dev_priv = dev->dev_private;
1070	u32 pp;
1071
1072	if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
1073		pp = ironlake_get_pp_control(dev_priv);
1074		pp &= ~EDP_FORCE_VDD;
1075		I915_WRITE(PCH_PP_CONTROL, pp);
1076		POSTING_READ(PCH_PP_CONTROL);
1077
1078		/* Make sure sequencer is idle before allowing subsequent activity */
1079		DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1080			      I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
1081
1082		msleep(intel_dp->panel_power_down_delay);
1083	}
1084}
1085
1086static void ironlake_panel_vdd_work(struct work_struct *__work)
1087{
1088	struct intel_dp *intel_dp = container_of(to_delayed_work(__work),
1089						 struct intel_dp, panel_vdd_work);
1090	struct drm_device *dev = intel_dp->base.base.dev;
1091
1092	mutex_lock(&dev->mode_config.mutex);
1093	ironlake_panel_vdd_off_sync(intel_dp);
1094	mutex_unlock(&dev->mode_config.mutex);
1095}
1096
1097static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
1098{
1099	if (!is_edp(intel_dp))
1100		return;
1101
1102	DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd);
1103	WARN(!intel_dp->want_panel_vdd, "eDP VDD not forced on");
1104
1105	intel_dp->want_panel_vdd = false;
1106
1107	if (sync) {
1108		ironlake_panel_vdd_off_sync(intel_dp);
1109	} else {
1110		/*
1111		 * Queue the timer to fire a long
1112		 * time from now (relative to the power down delay)
1113		 * to keep the panel power up across a sequence of operations
1114		 */
1115		schedule_delayed_work(&intel_dp->panel_vdd_work,
1116				      msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5));
1117	}
1118}
1119
1120static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1121{
1122	struct drm_device *dev = intel_dp->base.base.dev;
1123	struct drm_i915_private *dev_priv = dev->dev_private;
1124	u32 pp;
1125
1126	if (!is_edp(intel_dp))
1127		return;
1128
1129	DRM_DEBUG_KMS("Turn eDP power on\n");
1130
1131	if (ironlake_edp_have_panel_power(intel_dp)) {
1132		DRM_DEBUG_KMS("eDP power already on\n");
1133		return;
1134	}
1135
1136	ironlake_wait_panel_power_cycle(intel_dp);
1137
1138	pp = ironlake_get_pp_control(dev_priv);
1139	if (IS_GEN5(dev)) {
1140		/* ILK workaround: disable reset around power sequence */
1141		pp &= ~PANEL_POWER_RESET;
1142		I915_WRITE(PCH_PP_CONTROL, pp);
1143		POSTING_READ(PCH_PP_CONTROL);
1144	}
1145
1146	pp |= POWER_TARGET_ON;
1147	if (!IS_GEN5(dev))
1148		pp |= PANEL_POWER_RESET;
1149
1150	I915_WRITE(PCH_PP_CONTROL, pp);
1151	POSTING_READ(PCH_PP_CONTROL);
1152
1153	ironlake_wait_panel_on(intel_dp);
1154
1155	if (IS_GEN5(dev)) {
1156		pp |= PANEL_POWER_RESET; /* restore panel reset bit */
1157		I915_WRITE(PCH_PP_CONTROL, pp);
1158		POSTING_READ(PCH_PP_CONTROL);
1159	}
1160}
1161
1162static void ironlake_edp_panel_off(struct intel_dp *intel_dp)
1163{
1164	struct drm_device *dev = intel_dp->base.base.dev;
1165	struct drm_i915_private *dev_priv = dev->dev_private;
1166	u32 pp;
1167
1168	if (!is_edp(intel_dp))
1169		return;
1170
1171	DRM_DEBUG_KMS("Turn eDP power off\n");
1172
1173	WARN(!intel_dp->want_panel_vdd, "Need VDD to turn off panel\n");
1174
1175	pp = ironlake_get_pp_control(dev_priv);
1176	pp &= ~(POWER_TARGET_ON | PANEL_POWER_RESET | EDP_BLC_ENABLE);
1177	I915_WRITE(PCH_PP_CONTROL, pp);
1178	POSTING_READ(PCH_PP_CONTROL);
1179
1180	ironlake_wait_panel_off(intel_dp);
1181}
1182
1183static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
1184{
1185	struct drm_device *dev = intel_dp->base.base.dev;
1186	struct drm_i915_private *dev_priv = dev->dev_private;
1187	u32 pp;
1188
1189	if (!is_edp(intel_dp))
1190		return;
1191
1192	DRM_DEBUG_KMS("\n");
1193	/*
1194	 * If we enable the backlight right away following a panel power
1195	 * on, we may see slight flicker as the panel syncs with the eDP
1196	 * link.  So delay a bit to make sure the image is solid before
1197	 * allowing it to appear.
1198	 */
1199	msleep(intel_dp->backlight_on_delay);
1200	pp = ironlake_get_pp_control(dev_priv);
1201	pp |= EDP_BLC_ENABLE;
1202	I915_WRITE(PCH_PP_CONTROL, pp);
1203	POSTING_READ(PCH_PP_CONTROL);
1204}
1205
1206static void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
1207{
1208	struct drm_device *dev = intel_dp->base.base.dev;
1209	struct drm_i915_private *dev_priv = dev->dev_private;
1210	u32 pp;
1211
1212	if (!is_edp(intel_dp))
1213		return;
1214
1215	DRM_DEBUG_KMS("\n");
1216	pp = ironlake_get_pp_control(dev_priv);
1217	pp &= ~EDP_BLC_ENABLE;
1218	I915_WRITE(PCH_PP_CONTROL, pp);
1219	POSTING_READ(PCH_PP_CONTROL);
1220	msleep(intel_dp->backlight_off_delay);
1221}
1222
1223static void ironlake_edp_pll_on(struct drm_encoder *encoder)
1224{
1225	struct drm_device *dev = encoder->dev;
1226	struct drm_i915_private *dev_priv = dev->dev_private;
1227	u32 dpa_ctl;
1228
1229	DRM_DEBUG_KMS("\n");
1230	dpa_ctl = I915_READ(DP_A);
1231	dpa_ctl |= DP_PLL_ENABLE;
1232	I915_WRITE(DP_A, dpa_ctl);
1233	POSTING_READ(DP_A);
1234	udelay(200);
1235}
1236
1237static void ironlake_edp_pll_off(struct drm_encoder *encoder)
1238{
1239	struct drm_device *dev = encoder->dev;
1240	struct drm_i915_private *dev_priv = dev->dev_private;
1241	u32 dpa_ctl;
1242
1243	dpa_ctl = I915_READ(DP_A);
1244	dpa_ctl &= ~DP_PLL_ENABLE;
1245	I915_WRITE(DP_A, dpa_ctl);
1246	POSTING_READ(DP_A);
1247	udelay(200);
1248}
1249
1250/* If the sink supports it, try to set the power state appropriately */
1251static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
1252{
1253	int ret, i;
1254
1255	/* Should have a valid DPCD by this point */
1256	if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1257		return;
1258
1259	if (mode != DRM_MODE_DPMS_ON) {
1260		ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER,
1261						  DP_SET_POWER_D3);
1262		if (ret != 1)
1263			DRM_DEBUG_DRIVER("failed to write sink power state\n");
1264	} else {
1265		/*
1266		 * When turning on, we need to retry for 1ms to give the sink
1267		 * time to wake up.
1268		 */
1269		for (i = 0; i < 3; i++) {
1270			ret = intel_dp_aux_native_write_1(intel_dp,
1271							  DP_SET_POWER,
1272							  DP_SET_POWER_D0);
1273			if (ret == 1)
1274				break;
1275			msleep(1);
1276		}
1277	}
1278}
1279
1280static void intel_dp_prepare(struct drm_encoder *encoder)
1281{
1282	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1283
1284
1285	/* Make sure the panel is off before trying to change the mode. But also
1286	 * ensure that we have vdd while we switch off the panel. */
1287	ironlake_edp_panel_vdd_on(intel_dp);
1288	ironlake_edp_backlight_off(intel_dp);
1289	ironlake_edp_panel_off(intel_dp);
1290
1291	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1292	intel_dp_link_down(intel_dp);
1293	ironlake_edp_panel_vdd_off(intel_dp, false);
1294}
1295
1296static void intel_dp_commit(struct drm_encoder *encoder)
1297{
1298	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1299	struct drm_device *dev = encoder->dev;
1300	struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1301
1302	ironlake_edp_panel_vdd_on(intel_dp);
1303	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1304	intel_dp_start_link_train(intel_dp);
1305	ironlake_edp_panel_on(intel_dp);
1306	ironlake_edp_panel_vdd_off(intel_dp, true);
1307	intel_dp_complete_link_train(intel_dp);
1308	ironlake_edp_backlight_on(intel_dp);
1309
1310	intel_dp->dpms_mode = DRM_MODE_DPMS_ON;
1311
1312	if (HAS_PCH_CPT(dev))
1313		intel_cpt_verify_modeset(dev, intel_crtc->pipe);
1314}
1315
1316static void
1317intel_dp_dpms(struct drm_encoder *encoder, int mode)
1318{
1319	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1320	struct drm_device *dev = encoder->dev;
1321	struct drm_i915_private *dev_priv = dev->dev_private;
1322	uint32_t dp_reg = I915_READ(intel_dp->output_reg);
1323
1324	if (mode != DRM_MODE_DPMS_ON) {
1325		/* Switching the panel off requires vdd. */
1326		ironlake_edp_panel_vdd_on(intel_dp);
1327		ironlake_edp_backlight_off(intel_dp);
1328		ironlake_edp_panel_off(intel_dp);
1329
1330		intel_dp_sink_dpms(intel_dp, mode);
1331		intel_dp_link_down(intel_dp);
1332		ironlake_edp_panel_vdd_off(intel_dp, false);
1333
1334		if (is_cpu_edp(intel_dp))
1335			ironlake_edp_pll_off(encoder);
1336	} else {
1337		if (is_cpu_edp(intel_dp))
1338			ironlake_edp_pll_on(encoder);
1339
1340		ironlake_edp_panel_vdd_on(intel_dp);
1341		intel_dp_sink_dpms(intel_dp, mode);
1342		if (!(dp_reg & DP_PORT_EN)) {
1343			intel_dp_start_link_train(intel_dp);
1344			ironlake_edp_panel_on(intel_dp);
1345			ironlake_edp_panel_vdd_off(intel_dp, true);
1346			intel_dp_complete_link_train(intel_dp);
1347		} else
1348			ironlake_edp_panel_vdd_off(intel_dp, false);
1349		ironlake_edp_backlight_on(intel_dp);
1350	}
1351	intel_dp->dpms_mode = mode;
1352}
1353
1354/*
1355 * Native read with retry for link status and receiver capability reads for
1356 * cases where the sink may still be asleep.
1357 */
1358static bool
1359intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address,
1360			       uint8_t *recv, int recv_bytes)
1361{
1362	int ret, i;
1363
1364	/*
1365	 * Sinks are *supposed* to come up within 1ms from an off state,
1366	 * but we're also supposed to retry 3 times per the spec.
1367	 */
1368	for (i = 0; i < 3; i++) {
1369		ret = intel_dp_aux_native_read(intel_dp, address, recv,
1370					       recv_bytes);
1371		if (ret == recv_bytes)
1372			return true;
1373		msleep(1);
1374	}
1375
1376	return false;
1377}
1378
1379/*
1380 * Fetch AUX CH registers 0x202 - 0x207 which contain
1381 * link status information
1382 */
1383static bool
1384intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1385{
1386	return intel_dp_aux_native_read_retry(intel_dp,
1387					      DP_LANE0_1_STATUS,
1388					      link_status,
1389					      DP_LINK_STATUS_SIZE);
1390}
1391
1392static uint8_t
1393intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1394		     int r)
1395{
1396	return link_status[r - DP_LANE0_1_STATUS];
1397}
1398
1399static uint8_t
1400intel_get_adjust_request_voltage(uint8_t adjust_request[2],
1401				 int lane)
1402{
1403	int	    s = ((lane & 1) ?
1404			 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1405			 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1406	uint8_t l = adjust_request[lane>>1];
1407
1408	return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1409}
1410
1411static uint8_t
1412intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2],
1413				      int lane)
1414{
1415	int	    s = ((lane & 1) ?
1416			 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1417			 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1418	uint8_t l = adjust_request[lane>>1];
1419
1420	return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1421}
1422
1423
1424#if 0
1425static char	*voltage_names[] = {
1426	"0.4V", "0.6V", "0.8V", "1.2V"
1427};
1428static char	*pre_emph_names[] = {
1429	"0dB", "3.5dB", "6dB", "9.5dB"
1430};
1431static char	*link_train_names[] = {
1432	"pattern 1", "pattern 2", "idle", "off"
1433};
1434#endif
1435
1436/*
1437 * These are source-specific values; current Intel hardware supports
1438 * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1439 */
1440
1441static uint8_t
1442intel_dp_voltage_max(struct intel_dp *intel_dp)
1443{
1444	struct drm_device *dev = intel_dp->base.base.dev;
1445
1446	if (IS_GEN7(dev) && is_cpu_edp(intel_dp))
1447		return DP_TRAIN_VOLTAGE_SWING_800;
1448	else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1449		return DP_TRAIN_VOLTAGE_SWING_1200;
1450	else
1451		return DP_TRAIN_VOLTAGE_SWING_800;
1452}
1453
1454static uint8_t
1455intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
1456{
1457	struct drm_device *dev = intel_dp->base.base.dev;
1458
1459	if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1460		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1461		case DP_TRAIN_VOLTAGE_SWING_400:
1462			return DP_TRAIN_PRE_EMPHASIS_6;
1463		case DP_TRAIN_VOLTAGE_SWING_600:
1464		case DP_TRAIN_VOLTAGE_SWING_800:
1465			return DP_TRAIN_PRE_EMPHASIS_3_5;
1466		default:
1467			return DP_TRAIN_PRE_EMPHASIS_0;
1468		}
1469	} else {
1470		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1471		case DP_TRAIN_VOLTAGE_SWING_400:
1472			return DP_TRAIN_PRE_EMPHASIS_6;
1473		case DP_TRAIN_VOLTAGE_SWING_600:
1474			return DP_TRAIN_PRE_EMPHASIS_6;
1475		case DP_TRAIN_VOLTAGE_SWING_800:
1476			return DP_TRAIN_PRE_EMPHASIS_3_5;
1477		case DP_TRAIN_VOLTAGE_SWING_1200:
1478		default:
1479			return DP_TRAIN_PRE_EMPHASIS_0;
1480		}
1481	}
1482}
1483
1484static void
1485intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1486{
1487	uint8_t v = 0;
1488	uint8_t p = 0;
1489	int lane;
1490	uint8_t	*adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS);
1491	uint8_t voltage_max;
1492	uint8_t preemph_max;
1493
1494	for (lane = 0; lane < intel_dp->lane_count; lane++) {
1495		uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane);
1496		uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane);
1497
1498		if (this_v > v)
1499			v = this_v;
1500		if (this_p > p)
1501			p = this_p;
1502	}
1503
1504	voltage_max = intel_dp_voltage_max(intel_dp);
1505	if (v >= voltage_max)
1506		v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
1507
1508	preemph_max = intel_dp_pre_emphasis_max(intel_dp, v);
1509	if (p >= preemph_max)
1510		p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1511
1512	for (lane = 0; lane < 4; lane++)
1513		intel_dp->train_set[lane] = v | p;
1514}
1515
1516static uint32_t
1517intel_dp_signal_levels(uint8_t train_set)
1518{
1519	uint32_t	signal_levels = 0;
1520
1521	switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
1522	case DP_TRAIN_VOLTAGE_SWING_400:
1523	default:
1524		signal_levels |= DP_VOLTAGE_0_4;
1525		break;
1526	case DP_TRAIN_VOLTAGE_SWING_600:
1527		signal_levels |= DP_VOLTAGE_0_6;
1528		break;
1529	case DP_TRAIN_VOLTAGE_SWING_800:
1530		signal_levels |= DP_VOLTAGE_0_8;
1531		break;
1532	case DP_TRAIN_VOLTAGE_SWING_1200:
1533		signal_levels |= DP_VOLTAGE_1_2;
1534		break;
1535	}
1536	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1537	case DP_TRAIN_PRE_EMPHASIS_0:
1538	default:
1539		signal_levels |= DP_PRE_EMPHASIS_0;
1540		break;
1541	case DP_TRAIN_PRE_EMPHASIS_3_5:
1542		signal_levels |= DP_PRE_EMPHASIS_3_5;
1543		break;
1544	case DP_TRAIN_PRE_EMPHASIS_6:
1545		signal_levels |= DP_PRE_EMPHASIS_6;
1546		break;
1547	case DP_TRAIN_PRE_EMPHASIS_9_5:
1548		signal_levels |= DP_PRE_EMPHASIS_9_5;
1549		break;
1550	}
1551	return signal_levels;
1552}
1553
1554/* Gen6's DP voltage swing and pre-emphasis control */
1555static uint32_t
1556intel_gen6_edp_signal_levels(uint8_t train_set)
1557{
1558	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1559					 DP_TRAIN_PRE_EMPHASIS_MASK);
1560	switch (signal_levels) {
1561	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1562	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1563		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1564	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1565		return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1566	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1567	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6:
1568		return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1569	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1570	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1571		return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1572	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1573	case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0:
1574		return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1575	default:
1576		DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1577			      "0x%x\n", signal_levels);
1578		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1579	}
1580}
1581
1582/* Gen7's DP voltage swing and pre-emphasis control */
1583static uint32_t
1584intel_gen7_edp_signal_levels(uint8_t train_set)
1585{
1586	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1587					 DP_TRAIN_PRE_EMPHASIS_MASK);
1588	switch (signal_levels) {
1589	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1590		return EDP_LINK_TRAIN_400MV_0DB_IVB;
1591	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1592		return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1593	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1594		return EDP_LINK_TRAIN_400MV_6DB_IVB;
1595
1596	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1597		return EDP_LINK_TRAIN_600MV_0DB_IVB;
1598	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1599		return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1600
1601	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1602		return EDP_LINK_TRAIN_800MV_0DB_IVB;
1603	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1604		return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1605
1606	default:
1607		DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1608			      "0x%x\n", signal_levels);
1609		return EDP_LINK_TRAIN_500MV_0DB_IVB;
1610	}
1611}
1612
1613static uint8_t
1614intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1615		      int lane)
1616{
1617	int s = (lane & 1) * 4;
1618	uint8_t l = link_status[lane>>1];
1619
1620	return (l >> s) & 0xf;
1621}
1622
1623/* Check for clock recovery is done on all channels */
1624static bool
1625intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1626{
1627	int lane;
1628	uint8_t lane_status;
1629
1630	for (lane = 0; lane < lane_count; lane++) {
1631		lane_status = intel_get_lane_status(link_status, lane);
1632		if ((lane_status & DP_LANE_CR_DONE) == 0)
1633			return false;
1634	}
1635	return true;
1636}
1637
1638/* Check to see if channel eq is done on all channels */
1639#define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1640			 DP_LANE_CHANNEL_EQ_DONE|\
1641			 DP_LANE_SYMBOL_LOCKED)
1642static bool
1643intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1644{
1645	uint8_t lane_align;
1646	uint8_t lane_status;
1647	int lane;
1648
1649	lane_align = intel_dp_link_status(link_status,
1650					  DP_LANE_ALIGN_STATUS_UPDATED);
1651	if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1652		return false;
1653	for (lane = 0; lane < intel_dp->lane_count; lane++) {
1654		lane_status = intel_get_lane_status(link_status, lane);
1655		if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1656			return false;
1657	}
1658	return true;
1659}
1660
1661static bool
1662intel_dp_set_link_train(struct intel_dp *intel_dp,
1663			uint32_t dp_reg_value,
1664			uint8_t dp_train_pat)
1665{
1666	struct drm_device *dev = intel_dp->base.base.dev;
1667	struct drm_i915_private *dev_priv = dev->dev_private;
1668	int ret;
1669
1670	I915_WRITE(intel_dp->output_reg, dp_reg_value);
1671	POSTING_READ(intel_dp->output_reg);
1672
1673	intel_dp_aux_native_write_1(intel_dp,
1674				    DP_TRAINING_PATTERN_SET,
1675				    dp_train_pat);
1676
1677	ret = intel_dp_aux_native_write(intel_dp,
1678					DP_TRAINING_LANE0_SET,
1679					intel_dp->train_set,
1680					intel_dp->lane_count);
1681	if (ret != intel_dp->lane_count)
1682		return false;
1683
1684	return true;
1685}
1686
1687/* Enable corresponding port and start training pattern 1 */
1688static void
1689intel_dp_start_link_train(struct intel_dp *intel_dp)
1690{
1691	struct drm_device *dev = intel_dp->base.base.dev;
1692	struct drm_i915_private *dev_priv = dev->dev_private;
1693	struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1694	int i;
1695	uint8_t voltage;
1696	bool clock_recovery = false;
1697	int voltage_tries, loop_tries;
1698	u32 reg;
1699	uint32_t DP = intel_dp->DP;
1700
1701	/*
1702	 * On CPT we have to enable the port in training pattern 1, which
1703	 * will happen below in intel_dp_set_link_train.  Otherwise, enable
1704	 * the port and wait for it to become active.
1705	 */
1706	if (!HAS_PCH_CPT(dev)) {
1707		I915_WRITE(intel_dp->output_reg, intel_dp->DP);
1708		POSTING_READ(intel_dp->output_reg);
1709		intel_wait_for_vblank(dev, intel_crtc->pipe);
1710	}
1711
1712	/* Write the link configuration data */
1713	intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
1714				  intel_dp->link_configuration,
1715				  DP_LINK_CONFIGURATION_SIZE);
1716
1717	DP |= DP_PORT_EN;
1718
1719	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1720		DP &= ~DP_LINK_TRAIN_MASK_CPT;
1721	else
1722		DP &= ~DP_LINK_TRAIN_MASK;
1723	memset(intel_dp->train_set, 0, 4);
1724	voltage = 0xff;
1725	voltage_tries = 0;
1726	loop_tries = 0;
1727	clock_recovery = false;
1728	for (;;) {
1729		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1730		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
1731		uint32_t    signal_levels;
1732
1733
1734		if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1735			signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1736			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
1737		} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1738			signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1739			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1740		} else {
1741			signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1742			DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels);
1743			DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1744		}
1745
1746		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1747			reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
1748		else
1749			reg = DP | DP_LINK_TRAIN_PAT_1;
1750
1751		if (!intel_dp_set_link_train(intel_dp, reg,
1752					     DP_TRAINING_PATTERN_1 |
1753					     DP_LINK_SCRAMBLING_DISABLE))
1754			break;
1755		/* Set training pattern 1 */
1756
1757		udelay(100);
1758		if (!intel_dp_get_link_status(intel_dp, link_status)) {
1759			DRM_ERROR("failed to get link status\n");
1760			break;
1761		}
1762
1763		if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1764			DRM_DEBUG_KMS("clock recovery OK\n");
1765			clock_recovery = true;
1766			break;
1767		}
1768
1769		/* Check to see if we've tried the max voltage */
1770		for (i = 0; i < intel_dp->lane_count; i++)
1771			if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1772				break;
1773		if (i == intel_dp->lane_count) {
1774			++loop_tries;
1775			if (loop_tries == 5) {
1776				DRM_DEBUG_KMS("too many full retries, give up\n");
1777				break;
1778			}
1779			memset(intel_dp->train_set, 0, 4);
1780			voltage_tries = 0;
1781			continue;
1782		}
1783
1784		/* Check to see if we've tried the same voltage 5 times */
1785		if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1786			++voltage_tries;
1787			if (voltage_tries == 5) {
1788				DRM_DEBUG_KMS("too many voltage retries, give up\n");
1789				break;
1790			}
1791		} else
1792			voltage_tries = 0;
1793		voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1794
1795		/* Compute new intel_dp->train_set as requested by target */
1796		intel_get_adjust_train(intel_dp, link_status);
1797	}
1798
1799	intel_dp->DP = DP;
1800}
1801
1802static void
1803intel_dp_complete_link_train(struct intel_dp *intel_dp)
1804{
1805	struct drm_device *dev = intel_dp->base.base.dev;
1806	struct drm_i915_private *dev_priv = dev->dev_private;
1807	bool channel_eq = false;
1808	int tries, cr_tries;
1809	u32 reg;
1810	uint32_t DP = intel_dp->DP;
1811
1812	/* channel equalization */
1813	tries = 0;
1814	cr_tries = 0;
1815	channel_eq = false;
1816	for (;;) {
1817		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1818		uint32_t    signal_levels;
1819		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
1820
1821		if (cr_tries > 5) {
1822			DRM_ERROR("failed to train DP, aborting\n");
1823			intel_dp_link_down(intel_dp);
1824			break;
1825		}
1826
1827		if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1828			signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1829			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
1830		} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1831			signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1832			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1833		} else {
1834			signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1835			DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1836		}
1837
1838		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1839			reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
1840		else
1841			reg = DP | DP_LINK_TRAIN_PAT_2;
1842
1843		/* channel eq pattern */
1844		if (!intel_dp_set_link_train(intel_dp, reg,
1845					     DP_TRAINING_PATTERN_2 |
1846					     DP_LINK_SCRAMBLING_DISABLE))
1847			break;
1848
1849		udelay(400);
1850		if (!intel_dp_get_link_status(intel_dp, link_status))
1851			break;
1852
1853		/* Make sure clock is still ok */
1854		if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1855			intel_dp_start_link_train(intel_dp);
1856			cr_tries++;
1857			continue;
1858		}
1859
1860		if (intel_channel_eq_ok(intel_dp, link_status)) {
1861			channel_eq = true;
1862			break;
1863		}
1864
1865		/* Try 5 times, then try clock recovery if that fails */
1866		if (tries > 5) {
1867			intel_dp_link_down(intel_dp);
1868			intel_dp_start_link_train(intel_dp);
1869			tries = 0;
1870			cr_tries++;
1871			continue;
1872		}
1873
1874		/* Compute new intel_dp->train_set as requested by target */
1875		intel_get_adjust_train(intel_dp, link_status);
1876		++tries;
1877	}
1878
1879	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1880		reg = DP | DP_LINK_TRAIN_OFF_CPT;
1881	else
1882		reg = DP | DP_LINK_TRAIN_OFF;
1883
1884	I915_WRITE(intel_dp->output_reg, reg);
1885	POSTING_READ(intel_dp->output_reg);
1886	intel_dp_aux_native_write_1(intel_dp,
1887				    DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1888}
1889
1890static void
1891intel_dp_link_down(struct intel_dp *intel_dp)
1892{
1893	struct drm_device *dev = intel_dp->base.base.dev;
1894	struct drm_i915_private *dev_priv = dev->dev_private;
1895	uint32_t DP = intel_dp->DP;
1896
1897	if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1898		return;
1899
1900	DRM_DEBUG_KMS("\n");
1901
1902	if (is_edp(intel_dp)) {
1903		DP &= ~DP_PLL_ENABLE;
1904		I915_WRITE(intel_dp->output_reg, DP);
1905		POSTING_READ(intel_dp->output_reg);
1906		udelay(100);
1907	}
1908
1909	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
1910		DP &= ~DP_LINK_TRAIN_MASK_CPT;
1911		I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
1912	} else {
1913		DP &= ~DP_LINK_TRAIN_MASK;
1914		I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1915	}
1916	POSTING_READ(intel_dp->output_reg);
1917
1918	msleep(17);
1919
1920	if (is_edp(intel_dp)) {
1921		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1922			DP |= DP_LINK_TRAIN_OFF_CPT;
1923		else
1924			DP |= DP_LINK_TRAIN_OFF;
1925	}
1926
1927	if (HAS_PCH_IBX(dev) &&
1928	    I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
1929		struct drm_crtc *crtc = intel_dp->base.base.crtc;
1930
1931		/* Hardware workaround: leaving our transcoder select
1932		 * set to transcoder B while it's off will prevent the
1933		 * corresponding HDMI output on transcoder A.
1934		 *
1935		 * Combine this with another hardware workaround:
1936		 * transcoder select bit can only be cleared while the
1937		 * port is enabled.
1938		 */
1939		DP &= ~DP_PIPEB_SELECT;
1940		I915_WRITE(intel_dp->output_reg, DP);
1941
1942		/* Changes to enable or select take place the vblank
1943		 * after being written.
1944		 */
1945		if (crtc == NULL) {
1946			/* We can arrive here never having been attached
1947			 * to a CRTC, for instance, due to inheriting
1948			 * random state from the BIOS.
1949			 *
1950			 * If the pipe is not running, play safe and
1951			 * wait for the clocks to stabilise before
1952			 * continuing.
1953			 */
1954			POSTING_READ(intel_dp->output_reg);
1955			msleep(50);
1956		} else
1957			intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe);
1958	}
1959
1960	DP &= ~DP_AUDIO_OUTPUT_ENABLE;
1961	I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1962	POSTING_READ(intel_dp->output_reg);
1963	msleep(intel_dp->panel_power_down_delay);
1964}
1965
1966static bool
1967intel_dp_get_dpcd(struct intel_dp *intel_dp)
1968{
1969	if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd,
1970					   sizeof(intel_dp->dpcd)) &&
1971	    (intel_dp->dpcd[DP_DPCD_REV] != 0)) {
1972		return true;
1973	}
1974
1975	return false;
1976}
1977
1978static void
1979intel_dp_probe_oui(struct intel_dp *intel_dp)
1980{
1981	u8 buf[3];
1982
1983	if (!(intel_dp->dpcd[DP_DOWN_STREAM_PORT_COUNT] & DP_OUI_SUPPORT))
1984		return;
1985
1986	ironlake_edp_panel_vdd_on(intel_dp);
1987
1988	if (intel_dp_aux_native_read_retry(intel_dp, DP_SINK_OUI, buf, 3))
1989		DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
1990			      buf[0], buf[1], buf[2]);
1991
1992	if (intel_dp_aux_native_read_retry(intel_dp, DP_BRANCH_OUI, buf, 3))
1993		DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
1994			      buf[0], buf[1], buf[2]);
1995
1996	ironlake_edp_panel_vdd_off(intel_dp, false);
1997}
1998
1999static bool
2000intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector)
2001{
2002	int ret;
2003
2004	ret = intel_dp_aux_native_read_retry(intel_dp,
2005					     DP_DEVICE_SERVICE_IRQ_VECTOR,
2006					     sink_irq_vector, 1);
2007	if (!ret)
2008		return false;
2009
2010	return true;
2011}
2012
2013static void
2014intel_dp_handle_test_request(struct intel_dp *intel_dp)
2015{
2016	/* NAK by default */
2017	intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK);
2018}
2019
2020/*
2021 * According to DP spec
2022 * 5.1.2:
2023 *  1. Read DPCD
2024 *  2. Configure link according to Receiver Capabilities
2025 *  3. Use Link Training from 2.5.3.3 and 3.5.1.3
2026 *  4. Check link status on receipt of hot-plug interrupt
2027 */
2028
2029static void
2030intel_dp_check_link_status(struct intel_dp *intel_dp)
2031{
2032	u8 sink_irq_vector;
2033	u8 link_status[DP_LINK_STATUS_SIZE];
2034
2035	if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON)
2036		return;
2037
2038	if (!intel_dp->base.base.crtc)
2039		return;
2040
2041	/* Try to read receiver status if the link appears to be up */
2042	if (!intel_dp_get_link_status(intel_dp, link_status)) {
2043		intel_dp_link_down(intel_dp);
2044		return;
2045	}
2046
2047	/* Now read the DPCD to see if it's actually running */
2048	if (!intel_dp_get_dpcd(intel_dp)) {
2049		intel_dp_link_down(intel_dp);
2050		return;
2051	}
2052
2053	/* Try to read the source of the interrupt */
2054	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
2055	    intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) {
2056		/* Clear interrupt source */
2057		intel_dp_aux_native_write_1(intel_dp,
2058					    DP_DEVICE_SERVICE_IRQ_VECTOR,
2059					    sink_irq_vector);
2060
2061		if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
2062			intel_dp_handle_test_request(intel_dp);
2063		if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
2064			DRM_DEBUG_DRIVER("CP or sink specific irq unhandled\n");
2065	}
2066
2067	if (!intel_channel_eq_ok(intel_dp, link_status)) {
2068		DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
2069			      drm_get_encoder_name(&intel_dp->base.base));
2070		intel_dp_start_link_train(intel_dp);
2071		intel_dp_complete_link_train(intel_dp);
2072	}
2073}
2074
2075static enum drm_connector_status
2076intel_dp_detect_dpcd(struct intel_dp *intel_dp)
2077{
2078	if (intel_dp_get_dpcd(intel_dp))
2079		return connector_status_connected;
2080	return connector_status_disconnected;
2081}
2082
2083static enum drm_connector_status
2084ironlake_dp_detect(struct intel_dp *intel_dp)
2085{
2086	enum drm_connector_status status;
2087
2088	/* Can't disconnect eDP, but you can close the lid... */
2089	if (is_edp(intel_dp)) {
2090		status = intel_panel_detect(intel_dp->base.base.dev);
2091		if (status == connector_status_unknown)
2092			status = connector_status_connected;
2093		return status;
2094	}
2095
2096	return intel_dp_detect_dpcd(intel_dp);
2097}
2098
2099static enum drm_connector_status
2100g4x_dp_detect(struct intel_dp *intel_dp)
2101{
2102	struct drm_device *dev = intel_dp->base.base.dev;
2103	struct drm_i915_private *dev_priv = dev->dev_private;
2104	uint32_t bit;
2105
2106	switch (intel_dp->output_reg) {
2107	case DP_B:
2108		bit = DPB_HOTPLUG_LIVE_STATUS;
2109		break;
2110	case DP_C:
2111		bit = DPC_HOTPLUG_LIVE_STATUS;
2112		break;
2113	case DP_D:
2114		bit = DPD_HOTPLUG_LIVE_STATUS;
2115		break;
2116	default:
2117		return connector_status_unknown;
2118	}
2119
2120	if ((I915_READ(PORT_HOTPLUG_STAT) & bit) == 0)
2121		return connector_status_disconnected;
2122
2123	return intel_dp_detect_dpcd(intel_dp);
2124}
2125
2126static struct edid *
2127intel_dp_get_edid(struct drm_connector *connector, struct i2c_adapter *adapter)
2128{
2129	struct intel_dp *intel_dp = intel_attached_dp(connector);
2130	struct edid	*edid;
2131	int size;
2132
2133	if (is_edp(intel_dp)) {
2134		if (!intel_dp->edid)
2135			return NULL;
2136
2137		size = (intel_dp->edid->extensions + 1) * EDID_LENGTH;
2138		edid = kmalloc(size, GFP_KERNEL);
2139		if (!edid)
2140			return NULL;
2141
2142		memcpy(edid, intel_dp->edid, size);
2143		return edid;
2144	}
2145
2146	edid = drm_get_edid(connector, adapter);
2147	return edid;
2148}
2149
2150static int
2151intel_dp_get_edid_modes(struct drm_connector *connector, struct i2c_adapter *adapter)
2152{
2153	struct intel_dp *intel_dp = intel_attached_dp(connector);
2154	int	ret;
2155
2156	if (is_edp(intel_dp)) {
2157		drm_mode_connector_update_edid_property(connector,
2158							intel_dp->edid);
2159		ret = drm_add_edid_modes(connector, intel_dp->edid);
2160		drm_edid_to_eld(connector,
2161				intel_dp->edid);
2162		connector->display_info.raw_edid = NULL;
2163		return intel_dp->edid_mode_count;
2164	}
2165
2166	ret = intel_ddc_get_modes(connector, adapter);
2167	return ret;
2168}
2169
2170
2171/**
2172 * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
2173 *
2174 * \return true if DP port is connected.
2175 * \return false if DP port is disconnected.
2176 */
2177static enum drm_connector_status
2178intel_dp_detect(struct drm_connector *connector, bool force)
2179{
2180	struct intel_dp *intel_dp = intel_attached_dp(connector);
2181	struct drm_device *dev = intel_dp->base.base.dev;
2182	enum drm_connector_status status;
2183	struct edid *edid = NULL;
2184
2185	intel_dp->has_audio = false;
2186
2187	if (HAS_PCH_SPLIT(dev))
2188		status = ironlake_dp_detect(intel_dp);
2189	else
2190		status = g4x_dp_detect(intel_dp);
2191
2192	DRM_DEBUG_KMS("DPCD: %02hx%02hx%02hx%02hx%02hx%02hx%02hx%02hx\n",
2193		      intel_dp->dpcd[0], intel_dp->dpcd[1], intel_dp->dpcd[2],
2194		      intel_dp->dpcd[3], intel_dp->dpcd[4], intel_dp->dpcd[5],
2195		      intel_dp->dpcd[6], intel_dp->dpcd[7]);
2196
2197	if (status != connector_status_connected)
2198		return status;
2199
2200	intel_dp_probe_oui(intel_dp);
2201
2202	if (intel_dp->force_audio != HDMI_AUDIO_AUTO) {
2203		intel_dp->has_audio = (intel_dp->force_audio == HDMI_AUDIO_ON);
2204	} else {
2205		edid = intel_dp_get_edid(connector, &intel_dp->adapter);
2206		if (edid) {
2207			intel_dp->has_audio = drm_detect_monitor_audio(edid);
2208			connector->display_info.raw_edid = NULL;
2209			kfree(edid);
2210		}
2211	}
2212
2213	return connector_status_connected;
2214}
2215
2216static int intel_dp_get_modes(struct drm_connector *connector)
2217{
2218	struct intel_dp *intel_dp = intel_attached_dp(connector);
2219	struct drm_device *dev = intel_dp->base.base.dev;
2220	struct drm_i915_private *dev_priv = dev->dev_private;
2221	int ret;
2222
2223	/* We should parse the EDID data and find out if it has an audio sink
2224	 */
2225
2226	ret = intel_dp_get_edid_modes(connector, &intel_dp->adapter);
2227	if (ret) {
2228		if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) {
2229			struct drm_display_mode *newmode;
2230			list_for_each_entry(newmode, &connector->probed_modes,
2231					    head) {
2232				if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) {
2233					intel_dp->panel_fixed_mode =
2234						drm_mode_duplicate(dev, newmode);
2235					break;
2236				}
2237			}
2238		}
2239		return ret;
2240	}
2241
2242	/* if eDP has no EDID, try to use fixed panel mode from VBT */
2243	if (is_edp(intel_dp)) {
2244		/* initialize panel mode from VBT if available for eDP */
2245		if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) {
2246			intel_dp->panel_fixed_mode =
2247				drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
2248			if (intel_dp->panel_fixed_mode) {
2249				intel_dp->panel_fixed_mode->type |=
2250					DRM_MODE_TYPE_PREFERRED;
2251			}
2252		}
2253		if (intel_dp->panel_fixed_mode) {
2254			struct drm_display_mode *mode;
2255			mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
2256			drm_mode_probed_add(connector, mode);
2257			return 1;
2258		}
2259	}
2260	return 0;
2261}
2262
2263static bool
2264intel_dp_detect_audio(struct drm_connector *connector)
2265{
2266	struct intel_dp *intel_dp = intel_attached_dp(connector);
2267	struct edid *edid;
2268	bool has_audio = false;
2269
2270	edid = intel_dp_get_edid(connector, &intel_dp->adapter);
2271	if (edid) {
2272		has_audio = drm_detect_monitor_audio(edid);
2273
2274		connector->display_info.raw_edid = NULL;
2275		kfree(edid);
2276	}
2277
2278	return has_audio;
2279}
2280
2281static int
2282intel_dp_set_property(struct drm_connector *connector,
2283		      struct drm_property *property,
2284		      uint64_t val)
2285{
2286	struct drm_i915_private *dev_priv = connector->dev->dev_private;
2287	struct intel_dp *intel_dp = intel_attached_dp(connector);
2288	int ret;
2289
2290	ret = drm_connector_property_set_value(connector, property, val);
2291	if (ret)
2292		return ret;
2293
2294	if (property == dev_priv->force_audio_property) {
2295		int i = val;
2296		bool has_audio;
2297
2298		if (i == intel_dp->force_audio)
2299			return 0;
2300
2301		intel_dp->force_audio = i;
2302
2303		if (i == HDMI_AUDIO_AUTO)
2304			has_audio = intel_dp_detect_audio(connector);
2305		else
2306			has_audio = (i == HDMI_AUDIO_ON);
2307
2308		if (has_audio == intel_dp->has_audio)
2309			return 0;
2310
2311		intel_dp->has_audio = has_audio;
2312		goto done;
2313	}
2314
2315	if (property == dev_priv->broadcast_rgb_property) {
2316		if (val == !!intel_dp->color_range)
2317			return 0;
2318
2319		intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
2320		goto done;
2321	}
2322
2323	return -EINVAL;
2324
2325done:
2326	if (intel_dp->base.base.crtc) {
2327		struct drm_crtc *crtc = intel_dp->base.base.crtc;
2328		drm_crtc_helper_set_mode(crtc, &crtc->mode,
2329					 crtc->x, crtc->y,
2330					 crtc->fb);
2331	}
2332
2333	return 0;
2334}
2335
2336static void
2337intel_dp_destroy(struct drm_connector *connector)
2338{
2339	struct drm_device *dev = connector->dev;
2340
2341	if (intel_dpd_is_edp(dev))
2342		intel_panel_destroy_backlight(dev);
2343
2344	drm_sysfs_connector_remove(connector);
2345	drm_connector_cleanup(connector);
2346	kfree(connector);
2347}
2348
2349static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
2350{
2351	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2352
2353	i2c_del_adapter(&intel_dp->adapter);
2354	drm_encoder_cleanup(encoder);
2355	if (is_edp(intel_dp)) {
2356		kfree(intel_dp->edid);
2357		cancel_delayed_work_sync(&intel_dp->panel_vdd_work);
2358		ironlake_panel_vdd_off_sync(intel_dp);
2359	}
2360	kfree(intel_dp);
2361}
2362
2363static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = {
2364	.dpms = intel_dp_dpms,
2365	.mode_fixup = intel_dp_mode_fixup,
2366	.prepare = intel_dp_prepare,
2367	.mode_set = intel_dp_mode_set,
2368	.commit = intel_dp_commit,
2369};
2370
2371static const struct drm_connector_funcs intel_dp_connector_funcs = {
2372	.dpms = drm_helper_connector_dpms,
2373	.detect = intel_dp_detect,
2374	.fill_modes = drm_helper_probe_single_connector_modes,
2375	.set_property = intel_dp_set_property,
2376	.destroy = intel_dp_destroy,
2377};
2378
2379static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
2380	.get_modes = intel_dp_get_modes,
2381	.mode_valid = intel_dp_mode_valid,
2382	.best_encoder = intel_best_encoder,
2383};
2384
2385static const struct drm_encoder_funcs intel_dp_enc_funcs = {
2386	.destroy = intel_dp_encoder_destroy,
2387};
2388
2389static void
2390intel_dp_hot_plug(struct intel_encoder *intel_encoder)
2391{
2392	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
2393
2394	intel_dp_check_link_status(intel_dp);
2395}
2396
2397/* Return which DP Port should be selected for Transcoder DP control */
2398int
2399intel_trans_dp_port_sel(struct drm_crtc *crtc)
2400{
2401	struct drm_device *dev = crtc->dev;
2402	struct intel_encoder *encoder;
2403
2404	for_each_encoder_on_crtc(dev, crtc, encoder) {
2405		struct intel_dp *intel_dp = enc_to_intel_dp(&encoder->base);
2406
2407		if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
2408		    intel_dp->base.type == INTEL_OUTPUT_EDP)
2409			return intel_dp->output_reg;
2410	}
2411
2412	return -1;
2413}
2414
2415/* check the VBT to see whether the eDP is on DP-D port */
2416bool intel_dpd_is_edp(struct drm_device *dev)
2417{
2418	struct drm_i915_private *dev_priv = dev->dev_private;
2419	struct child_device_config *p_child;
2420	int i;
2421
2422	if (!dev_priv->child_dev_num)
2423		return false;
2424
2425	for (i = 0; i < dev_priv->child_dev_num; i++) {
2426		p_child = dev_priv->child_dev + i;
2427
2428		if (p_child->dvo_port == PORT_IDPD &&
2429		    p_child->device_type == DEVICE_TYPE_eDP)
2430			return true;
2431	}
2432	return false;
2433}
2434
2435static void
2436intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
2437{
2438	intel_attach_force_audio_property(connector);
2439	intel_attach_broadcast_rgb_property(connector);
2440}
2441
2442void
2443intel_dp_init(struct drm_device *dev, int output_reg)
2444{
2445	struct drm_i915_private *dev_priv = dev->dev_private;
2446	struct drm_connector *connector;
2447	struct intel_dp *intel_dp;
2448	struct intel_encoder *intel_encoder;
2449	struct intel_connector *intel_connector;
2450	const char *name = NULL;
2451	int type;
2452
2453	intel_dp = kzalloc(sizeof(struct intel_dp), GFP_KERNEL);
2454	if (!intel_dp)
2455		return;
2456
2457	intel_dp->output_reg = output_reg;
2458	intel_dp->dpms_mode = -1;
2459
2460	intel_connector = kzalloc(sizeof(struct intel_connector), GFP_KERNEL);
2461	if (!intel_connector) {
2462		kfree(intel_dp);
2463		return;
2464	}
2465	intel_encoder = &intel_dp->base;
2466
2467	if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D)
2468		if (intel_dpd_is_edp(dev))
2469			intel_dp->is_pch_edp = true;
2470
2471	if (output_reg == DP_A || is_pch_edp(intel_dp)) {
2472		type = DRM_MODE_CONNECTOR_eDP;
2473		intel_encoder->type = INTEL_OUTPUT_EDP;
2474	} else {
2475		type = DRM_MODE_CONNECTOR_DisplayPort;
2476		intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
2477	}
2478
2479	connector = &intel_connector->base;
2480	drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
2481	drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
2482
2483	connector->polled = DRM_CONNECTOR_POLL_HPD;
2484
2485	if (output_reg == DP_B || output_reg == PCH_DP_B)
2486		intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT);
2487	else if (output_reg == DP_C || output_reg == PCH_DP_C)
2488		intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT);
2489	else if (output_reg == DP_D || output_reg == PCH_DP_D)
2490		intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT);
2491
2492	if (is_edp(intel_dp)) {
2493		intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT);
2494		INIT_DELAYED_WORK(&intel_dp->panel_vdd_work,
2495				  ironlake_panel_vdd_work);
2496	}
2497
2498	intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
2499
2500	connector->interlace_allowed = true;
2501	connector->doublescan_allowed = 0;
2502
2503	drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
2504			 DRM_MODE_ENCODER_TMDS);
2505	drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs);
2506
2507	intel_connector_attach_encoder(intel_connector, intel_encoder);
2508	drm_sysfs_connector_add(connector);
2509
2510	/* Set up the DDC bus. */
2511	switch (output_reg) {
2512		case DP_A:
2513			name = "DPDDC-A";
2514			break;
2515		case DP_B:
2516		case PCH_DP_B:
2517			dev_priv->hotplug_supported_mask |=
2518				DPB_HOTPLUG_INT_STATUS;
2519			name = "DPDDC-B";
2520			break;
2521		case DP_C:
2522		case PCH_DP_C:
2523			dev_priv->hotplug_supported_mask |=
2524				DPC_HOTPLUG_INT_STATUS;
2525			name = "DPDDC-C";
2526			break;
2527		case DP_D:
2528		case PCH_DP_D:
2529			dev_priv->hotplug_supported_mask |=
2530				DPD_HOTPLUG_INT_STATUS;
2531			name = "DPDDC-D";
2532			break;
2533	}
2534
2535	intel_dp_i2c_init(intel_dp, intel_connector, name);
2536
2537	/* Cache some DPCD data in the eDP case */
2538	if (is_edp(intel_dp)) {
2539		bool ret;
2540		struct edp_power_seq	cur, vbt;
2541		u32 pp_on, pp_off, pp_div;
2542		struct edid *edid;
2543
2544		pp_on = I915_READ(PCH_PP_ON_DELAYS);
2545		pp_off = I915_READ(PCH_PP_OFF_DELAYS);
2546		pp_div = I915_READ(PCH_PP_DIVISOR);
2547
2548		if (!pp_on || !pp_off || !pp_div) {
2549			DRM_INFO("bad panel power sequencing delays, disabling panel\n");
2550			intel_dp_encoder_destroy(&intel_dp->base.base);
2551			intel_dp_destroy(&intel_connector->base);
2552			return;
2553		}
2554
2555		/* Pull timing values out of registers */
2556		cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2557			PANEL_POWER_UP_DELAY_SHIFT;
2558
2559		cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2560			PANEL_LIGHT_ON_DELAY_SHIFT;
2561
2562		cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2563			PANEL_LIGHT_OFF_DELAY_SHIFT;
2564
2565		cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2566			PANEL_POWER_DOWN_DELAY_SHIFT;
2567
2568		cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2569			       PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
2570
2571		DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2572			      cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2573
2574		vbt = dev_priv->edp.pps;
2575
2576		DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2577			      vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12);
2578
2579#define get_delay(field)	((max(cur.field, vbt.field) + 9) / 10)
2580
2581		intel_dp->panel_power_up_delay = get_delay(t1_t3);
2582		intel_dp->backlight_on_delay = get_delay(t8);
2583		intel_dp->backlight_off_delay = get_delay(t9);
2584		intel_dp->panel_power_down_delay = get_delay(t10);
2585		intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
2586
2587		DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2588			      intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2589			      intel_dp->panel_power_cycle_delay);
2590
2591		DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2592			      intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2593
2594		ironlake_edp_panel_vdd_on(intel_dp);
2595		ret = intel_dp_get_dpcd(intel_dp);
2596		ironlake_edp_panel_vdd_off(intel_dp, false);
2597
2598		if (ret) {
2599			if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
2600				dev_priv->no_aux_handshake =
2601					intel_dp->dpcd[DP_MAX_DOWNSPREAD] &
2602					DP_NO_AUX_HANDSHAKE_LINK_TRAINING;
2603		} else {
2604			/* if this fails, presume the device is a ghost */
2605			DRM_INFO("failed to retrieve link info, disabling eDP\n");
2606			intel_dp_encoder_destroy(&intel_dp->base.base);
2607			intel_dp_destroy(&intel_connector->base);
2608			return;
2609		}
2610
2611		ironlake_edp_panel_vdd_on(intel_dp);
2612		edid = drm_get_edid(connector, &intel_dp->adapter);
2613		if (edid) {
2614			drm_mode_connector_update_edid_property(connector,
2615								edid);
2616			intel_dp->edid_mode_count =
2617				drm_add_edid_modes(connector, edid);
2618			drm_edid_to_eld(connector, edid);
2619			intel_dp->edid = edid;
2620		}
2621		ironlake_edp_panel_vdd_off(intel_dp, false);
2622	}
2623
2624	intel_encoder->hot_plug = intel_dp_hot_plug;
2625
2626	if (is_edp(intel_dp)) {
2627		dev_priv->int_edp_connector = connector;
2628		intel_panel_setup_backlight(dev);
2629	}
2630
2631	intel_dp_add_properties(intel_dp, connector);
2632
2633	/* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
2634	 * 0xd.  Failure to do so will result in spurious interrupts being
2635	 * generated on the port when a cable is not attached.
2636	 */
2637	if (IS_G4X(dev) && !IS_GM45(dev)) {
2638		u32 temp = I915_READ(PEG_BAND_GAP_DATA);
2639		I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);
2640	}
2641}
2642