cachepc-linux

Fork of AMDESE/linux with modifications for CachePC side-channel attack
git clone https://git.sinitax.com/sinitax/cachepc-linux
Log | Files | Refs | README | LICENSE | sfeed.txt

intel_dp_link_training.c (46427B)


      1/*
      2 * Copyright © 2008-2015 Intel Corporation
      3 *
      4 * Permission is hereby granted, free of charge, to any person obtaining a
      5 * copy of this software and associated documentation files (the "Software"),
      6 * to deal in the Software without restriction, including without limitation
      7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
      8 * and/or sell copies of the Software, and to permit persons to whom the
      9 * Software is furnished to do so, subject to the following conditions:
     10 *
     11 * The above copyright notice and this permission notice (including the next
     12 * paragraph) shall be included in all copies or substantial portions of the
     13 * Software.
     14 *
     15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
     18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
     20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
     21 * IN THE SOFTWARE.
     22 */
     23
     24#include "i915_drv.h"
     25#include "intel_display_types.h"
     26#include "intel_dp.h"
     27#include "intel_dp_link_training.h"
     28
     29static void intel_dp_reset_lttpr_common_caps(struct intel_dp *intel_dp)
     30{
     31	memset(intel_dp->lttpr_common_caps, 0, sizeof(intel_dp->lttpr_common_caps));
     32}
     33
     34static void intel_dp_reset_lttpr_count(struct intel_dp *intel_dp)
     35{
     36	intel_dp->lttpr_common_caps[DP_PHY_REPEATER_CNT -
     37				    DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV] = 0;
     38}
     39
     40static const char *intel_dp_phy_name(enum drm_dp_phy dp_phy,
     41				     char *buf, size_t buf_size)
     42{
     43	if (dp_phy == DP_PHY_DPRX)
     44		snprintf(buf, buf_size, "DPRX");
     45	else
     46		snprintf(buf, buf_size, "LTTPR %d", dp_phy - DP_PHY_LTTPR1 + 1);
     47
     48	return buf;
     49}
     50
     51static u8 *intel_dp_lttpr_phy_caps(struct intel_dp *intel_dp,
     52				   enum drm_dp_phy dp_phy)
     53{
     54	return intel_dp->lttpr_phy_caps[dp_phy - DP_PHY_LTTPR1];
     55}
     56
     57static void intel_dp_read_lttpr_phy_caps(struct intel_dp *intel_dp,
     58					 const u8 dpcd[DP_RECEIVER_CAP_SIZE],
     59					 enum drm_dp_phy dp_phy)
     60{
     61	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
     62	u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
     63	char phy_name[10];
     64
     65	intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
     66
     67	if (drm_dp_read_lttpr_phy_caps(&intel_dp->aux, dpcd, dp_phy, phy_caps) < 0) {
     68		drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
     69			    "[ENCODER:%d:%s][%s] failed to read the PHY caps\n",
     70			    encoder->base.base.id, encoder->base.name, phy_name);
     71		return;
     72	}
     73
     74	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
     75		    "[ENCODER:%d:%s][%s] PHY capabilities: %*ph\n",
     76		    encoder->base.base.id, encoder->base.name, phy_name,
     77		    (int)sizeof(intel_dp->lttpr_phy_caps[0]),
     78		    phy_caps);
     79}
     80
     81static bool intel_dp_read_lttpr_common_caps(struct intel_dp *intel_dp,
     82					    const u8 dpcd[DP_RECEIVER_CAP_SIZE])
     83{
     84	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
     85	int ret;
     86
     87	ret = drm_dp_read_lttpr_common_caps(&intel_dp->aux, dpcd,
     88					    intel_dp->lttpr_common_caps);
     89	if (ret < 0)
     90		goto reset_caps;
     91
     92	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
     93		    "[ENCODER:%d:%s] LTTPR common capabilities: %*ph\n",
     94		    encoder->base.base.id, encoder->base.name,
     95		    (int)sizeof(intel_dp->lttpr_common_caps),
     96		    intel_dp->lttpr_common_caps);
     97
     98	/* The minimum value of LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV is 1.4 */
     99	if (intel_dp->lttpr_common_caps[0] < 0x14)
    100		goto reset_caps;
    101
    102	return true;
    103
    104reset_caps:
    105	intel_dp_reset_lttpr_common_caps(intel_dp);
    106	return false;
    107}
    108
    109static bool
    110intel_dp_set_lttpr_transparent_mode(struct intel_dp *intel_dp, bool enable)
    111{
    112	u8 val = enable ? DP_PHY_REPEATER_MODE_TRANSPARENT :
    113			  DP_PHY_REPEATER_MODE_NON_TRANSPARENT;
    114
    115	return drm_dp_dpcd_write(&intel_dp->aux, DP_PHY_REPEATER_MODE, &val, 1) == 1;
    116}
    117
    118static int intel_dp_init_lttpr(struct intel_dp *intel_dp, const u8 dpcd[DP_RECEIVER_CAP_SIZE])
    119{
    120	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
    121	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
    122	int lttpr_count;
    123	int i;
    124
    125	if (!intel_dp_read_lttpr_common_caps(intel_dp, dpcd))
    126		return 0;
    127
    128	lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
    129	/*
    130	 * Prevent setting LTTPR transparent mode explicitly if no LTTPRs are
    131	 * detected as this breaks link training at least on the Dell WD19TB
    132	 * dock.
    133	 */
    134	if (lttpr_count == 0)
    135		return 0;
    136
    137	/*
    138	 * See DP Standard v2.0 3.6.6.1. about the explicit disabling of
    139	 * non-transparent mode and the disable->enable non-transparent mode
    140	 * sequence.
    141	 */
    142	intel_dp_set_lttpr_transparent_mode(intel_dp, true);
    143
    144	/*
    145	 * In case of unsupported number of LTTPRs or failing to switch to
    146	 * non-transparent mode fall-back to transparent link training mode,
    147	 * still taking into account any LTTPR common lane- rate/count limits.
    148	 */
    149	if (lttpr_count < 0)
    150		return 0;
    151
    152	if (!intel_dp_set_lttpr_transparent_mode(intel_dp, false)) {
    153		drm_dbg_kms(&i915->drm,
    154			    "[ENCODER:%d:%s] Switching to LTTPR non-transparent LT mode failed, fall-back to transparent mode\n",
    155			    encoder->base.base.id, encoder->base.name);
    156
    157		intel_dp_set_lttpr_transparent_mode(intel_dp, true);
    158		intel_dp_reset_lttpr_count(intel_dp);
    159
    160		return 0;
    161	}
    162
    163	for (i = 0; i < lttpr_count; i++)
    164		intel_dp_read_lttpr_phy_caps(intel_dp, dpcd, DP_PHY_LTTPR(i));
    165
    166	return lttpr_count;
    167}
    168
    169/**
    170 * intel_dp_init_lttpr_and_dprx_caps - detect LTTPR and DPRX caps, init the LTTPR link training mode
    171 * @intel_dp: Intel DP struct
    172 *
    173 * Read the LTTPR common and DPRX capabilities and switch to non-transparent
    174 * link training mode if any is detected and read the PHY capabilities for all
    175 * detected LTTPRs. In case of an LTTPR detection error or if the number of
    176 * LTTPRs is more than is supported (8), fall back to the no-LTTPR,
    177 * transparent mode link training mode.
    178 *
    179 * Returns:
    180 *   >0  if LTTPRs were detected and the non-transparent LT mode was set. The
    181 *       DPRX capabilities are read out.
    182 *    0  if no LTTPRs or more than 8 LTTPRs were detected or in case of a
    183 *       detection failure and the transparent LT mode was set. The DPRX
    184 *       capabilities are read out.
    185 *   <0  Reading out the DPRX capabilities failed.
    186 */
    187int intel_dp_init_lttpr_and_dprx_caps(struct intel_dp *intel_dp)
    188{
    189	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
    190	int lttpr_count = 0;
    191
    192	/*
    193	 * Detecting LTTPRs must be avoided on platforms with an AUX timeout
    194	 * period < 3.2ms. (see DP Standard v2.0, 2.11.2, 3.6.6.1).
    195	 */
    196	if (!intel_dp_is_edp(intel_dp) &&
    197	    (DISPLAY_VER(i915) >= 10 && !IS_GEMINILAKE(i915))) {
    198		u8 dpcd[DP_RECEIVER_CAP_SIZE];
    199
    200		if (drm_dp_dpcd_probe(&intel_dp->aux, DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV))
    201			return -EIO;
    202
    203		if (drm_dp_read_dpcd_caps(&intel_dp->aux, dpcd))
    204			return -EIO;
    205
    206		lttpr_count = intel_dp_init_lttpr(intel_dp, dpcd);
    207	}
    208
    209	/*
    210	 * The DPTX shall read the DPRX caps after LTTPR detection, so re-read
    211	 * it here.
    212	 */
    213	if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd)) {
    214		intel_dp_reset_lttpr_common_caps(intel_dp);
    215		return -EIO;
    216	}
    217
    218	return lttpr_count;
    219}
    220
    221static u8 dp_voltage_max(u8 preemph)
    222{
    223	switch (preemph & DP_TRAIN_PRE_EMPHASIS_MASK) {
    224	case DP_TRAIN_PRE_EMPH_LEVEL_0:
    225		return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
    226	case DP_TRAIN_PRE_EMPH_LEVEL_1:
    227		return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
    228	case DP_TRAIN_PRE_EMPH_LEVEL_2:
    229		return DP_TRAIN_VOLTAGE_SWING_LEVEL_1;
    230	case DP_TRAIN_PRE_EMPH_LEVEL_3:
    231	default:
    232		return DP_TRAIN_VOLTAGE_SWING_LEVEL_0;
    233	}
    234}
    235
    236static u8 intel_dp_lttpr_voltage_max(struct intel_dp *intel_dp,
    237				     enum drm_dp_phy dp_phy)
    238{
    239	const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
    240
    241	if (drm_dp_lttpr_voltage_swing_level_3_supported(phy_caps))
    242		return DP_TRAIN_VOLTAGE_SWING_LEVEL_3;
    243	else
    244		return DP_TRAIN_VOLTAGE_SWING_LEVEL_2;
    245}
    246
    247static u8 intel_dp_lttpr_preemph_max(struct intel_dp *intel_dp,
    248				     enum drm_dp_phy dp_phy)
    249{
    250	const u8 *phy_caps = intel_dp_lttpr_phy_caps(intel_dp, dp_phy);
    251
    252	if (drm_dp_lttpr_pre_emphasis_level_3_supported(phy_caps))
    253		return DP_TRAIN_PRE_EMPH_LEVEL_3;
    254	else
    255		return DP_TRAIN_PRE_EMPH_LEVEL_2;
    256}
    257
    258static bool
    259intel_dp_phy_is_downstream_of_source(struct intel_dp *intel_dp,
    260				     enum drm_dp_phy dp_phy)
    261{
    262	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
    263	int lttpr_count = drm_dp_lttpr_count(intel_dp->lttpr_common_caps);
    264
    265	drm_WARN_ON_ONCE(&i915->drm, lttpr_count <= 0 && dp_phy != DP_PHY_DPRX);
    266
    267	return lttpr_count <= 0 || dp_phy == DP_PHY_LTTPR(lttpr_count - 1);
    268}
    269
    270static u8 intel_dp_phy_voltage_max(struct intel_dp *intel_dp,
    271				   const struct intel_crtc_state *crtc_state,
    272				   enum drm_dp_phy dp_phy)
    273{
    274	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
    275	u8 voltage_max;
    276
    277	/*
    278	 * Get voltage_max from the DPTX_PHY (source or LTTPR) upstream from
    279	 * the DPRX_PHY we train.
    280	 */
    281	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
    282		voltage_max = intel_dp->voltage_max(intel_dp, crtc_state);
    283	else
    284		voltage_max = intel_dp_lttpr_voltage_max(intel_dp, dp_phy + 1);
    285
    286	drm_WARN_ON_ONCE(&i915->drm,
    287			 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_2 &&
    288			 voltage_max != DP_TRAIN_VOLTAGE_SWING_LEVEL_3);
    289
    290	return voltage_max;
    291}
    292
    293static u8 intel_dp_phy_preemph_max(struct intel_dp *intel_dp,
    294				   enum drm_dp_phy dp_phy)
    295{
    296	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
    297	u8 preemph_max;
    298
    299	/*
    300	 * Get preemph_max from the DPTX_PHY (source or LTTPR) upstream from
    301	 * the DPRX_PHY we train.
    302	 */
    303	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
    304		preemph_max = intel_dp->preemph_max(intel_dp);
    305	else
    306		preemph_max = intel_dp_lttpr_preemph_max(intel_dp, dp_phy + 1);
    307
    308	drm_WARN_ON_ONCE(&i915->drm,
    309			 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_2 &&
    310			 preemph_max != DP_TRAIN_PRE_EMPH_LEVEL_3);
    311
    312	return preemph_max;
    313}
    314
    315static bool has_per_lane_signal_levels(struct intel_dp *intel_dp,
    316				       enum drm_dp_phy dp_phy)
    317{
    318	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
    319
    320	return !intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy) ||
    321		DISPLAY_VER(i915) >= 11;
    322}
    323
    324/* 128b/132b */
    325static u8 intel_dp_get_lane_adjust_tx_ffe_preset(struct intel_dp *intel_dp,
    326						 const struct intel_crtc_state *crtc_state,
    327						 enum drm_dp_phy dp_phy,
    328						 const u8 link_status[DP_LINK_STATUS_SIZE],
    329						 int lane)
    330{
    331	u8 tx_ffe = 0;
    332
    333	if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
    334		lane = min(lane, crtc_state->lane_count - 1);
    335		tx_ffe = drm_dp_get_adjust_tx_ffe_preset(link_status, lane);
    336	} else {
    337		for (lane = 0; lane < crtc_state->lane_count; lane++)
    338			tx_ffe = max(tx_ffe, drm_dp_get_adjust_tx_ffe_preset(link_status, lane));
    339	}
    340
    341	return tx_ffe;
    342}
    343
    344/* 8b/10b */
    345static u8 intel_dp_get_lane_adjust_vswing_preemph(struct intel_dp *intel_dp,
    346						  const struct intel_crtc_state *crtc_state,
    347						  enum drm_dp_phy dp_phy,
    348						  const u8 link_status[DP_LINK_STATUS_SIZE],
    349						  int lane)
    350{
    351	u8 v = 0;
    352	u8 p = 0;
    353	u8 voltage_max;
    354	u8 preemph_max;
    355
    356	if (has_per_lane_signal_levels(intel_dp, dp_phy)) {
    357		lane = min(lane, crtc_state->lane_count - 1);
    358
    359		v = drm_dp_get_adjust_request_voltage(link_status, lane);
    360		p = drm_dp_get_adjust_request_pre_emphasis(link_status, lane);
    361	} else {
    362		for (lane = 0; lane < crtc_state->lane_count; lane++) {
    363			v = max(v, drm_dp_get_adjust_request_voltage(link_status, lane));
    364			p = max(p, drm_dp_get_adjust_request_pre_emphasis(link_status, lane));
    365		}
    366	}
    367
    368	preemph_max = intel_dp_phy_preemph_max(intel_dp, dp_phy);
    369	if (p >= preemph_max)
    370		p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
    371
    372	v = min(v, dp_voltage_max(p));
    373
    374	voltage_max = intel_dp_phy_voltage_max(intel_dp, crtc_state, dp_phy);
    375	if (v >= voltage_max)
    376		v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
    377
    378	return v | p;
    379}
    380
    381static u8 intel_dp_get_lane_adjust_train(struct intel_dp *intel_dp,
    382					 const struct intel_crtc_state *crtc_state,
    383					 enum drm_dp_phy dp_phy,
    384					 const u8 link_status[DP_LINK_STATUS_SIZE],
    385					 int lane)
    386{
    387	if (intel_dp_is_uhbr(crtc_state))
    388		return intel_dp_get_lane_adjust_tx_ffe_preset(intel_dp, crtc_state,
    389							      dp_phy, link_status, lane);
    390	else
    391		return intel_dp_get_lane_adjust_vswing_preemph(intel_dp, crtc_state,
    392							       dp_phy, link_status, lane);
    393}
    394
    395#define TRAIN_REQ_FMT "%d/%d/%d/%d"
    396#define _TRAIN_REQ_VSWING_ARGS(link_status, lane) \
    397	(drm_dp_get_adjust_request_voltage((link_status), (lane)) >> DP_TRAIN_VOLTAGE_SWING_SHIFT)
    398#define TRAIN_REQ_VSWING_ARGS(link_status) \
    399	_TRAIN_REQ_VSWING_ARGS(link_status, 0), \
    400	_TRAIN_REQ_VSWING_ARGS(link_status, 1), \
    401	_TRAIN_REQ_VSWING_ARGS(link_status, 2), \
    402	_TRAIN_REQ_VSWING_ARGS(link_status, 3)
    403#define _TRAIN_REQ_PREEMPH_ARGS(link_status, lane) \
    404	(drm_dp_get_adjust_request_pre_emphasis((link_status), (lane)) >> DP_TRAIN_PRE_EMPHASIS_SHIFT)
    405#define TRAIN_REQ_PREEMPH_ARGS(link_status) \
    406	_TRAIN_REQ_PREEMPH_ARGS(link_status, 0), \
    407	_TRAIN_REQ_PREEMPH_ARGS(link_status, 1), \
    408	_TRAIN_REQ_PREEMPH_ARGS(link_status, 2), \
    409	_TRAIN_REQ_PREEMPH_ARGS(link_status, 3)
    410#define _TRAIN_REQ_TX_FFE_ARGS(link_status, lane) \
    411	drm_dp_get_adjust_tx_ffe_preset((link_status), (lane))
    412#define TRAIN_REQ_TX_FFE_ARGS(link_status) \
    413	_TRAIN_REQ_TX_FFE_ARGS(link_status, 0), \
    414	_TRAIN_REQ_TX_FFE_ARGS(link_status, 1), \
    415	_TRAIN_REQ_TX_FFE_ARGS(link_status, 2), \
    416	_TRAIN_REQ_TX_FFE_ARGS(link_status, 3)
    417
    418void
    419intel_dp_get_adjust_train(struct intel_dp *intel_dp,
    420			  const struct intel_crtc_state *crtc_state,
    421			  enum drm_dp_phy dp_phy,
    422			  const u8 link_status[DP_LINK_STATUS_SIZE])
    423{
    424	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
    425	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
    426	char phy_name[10];
    427	int lane;
    428
    429	if (intel_dp_is_uhbr(crtc_state)) {
    430		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 128b/132b, lanes: %d, "
    431			    "TX FFE request: " TRAIN_REQ_FMT "\n",
    432			    encoder->base.base.id, encoder->base.name,
    433			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
    434			    crtc_state->lane_count,
    435			    TRAIN_REQ_TX_FFE_ARGS(link_status));
    436	} else {
    437		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 8b/10b, lanes: %d, "
    438			    "vswing request: " TRAIN_REQ_FMT ", "
    439			    "pre-emphasis request: " TRAIN_REQ_FMT "\n",
    440			    encoder->base.base.id, encoder->base.name,
    441			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
    442			    crtc_state->lane_count,
    443			    TRAIN_REQ_VSWING_ARGS(link_status),
    444			    TRAIN_REQ_PREEMPH_ARGS(link_status));
    445	}
    446
    447	for (lane = 0; lane < 4; lane++)
    448		intel_dp->train_set[lane] =
    449			intel_dp_get_lane_adjust_train(intel_dp, crtc_state,
    450						       dp_phy, link_status, lane);
    451}
    452
    453static int intel_dp_training_pattern_set_reg(struct intel_dp *intel_dp,
    454					     enum drm_dp_phy dp_phy)
    455{
    456	return dp_phy == DP_PHY_DPRX ?
    457		DP_TRAINING_PATTERN_SET :
    458		DP_TRAINING_PATTERN_SET_PHY_REPEATER(dp_phy);
    459}
    460
    461static bool
    462intel_dp_set_link_train(struct intel_dp *intel_dp,
    463			const struct intel_crtc_state *crtc_state,
    464			enum drm_dp_phy dp_phy,
    465			u8 dp_train_pat)
    466{
    467	int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
    468	u8 buf[sizeof(intel_dp->train_set) + 1];
    469	int len;
    470
    471	intel_dp_program_link_training_pattern(intel_dp, crtc_state,
    472					       dp_phy, dp_train_pat);
    473
    474	buf[0] = dp_train_pat;
    475	/* DP_TRAINING_LANEx_SET follow DP_TRAINING_PATTERN_SET */
    476	memcpy(buf + 1, intel_dp->train_set, crtc_state->lane_count);
    477	len = crtc_state->lane_count + 1;
    478
    479	return drm_dp_dpcd_write(&intel_dp->aux, reg, buf, len) == len;
    480}
    481
    482static char dp_training_pattern_name(u8 train_pat)
    483{
    484	switch (train_pat) {
    485	case DP_TRAINING_PATTERN_1:
    486	case DP_TRAINING_PATTERN_2:
    487	case DP_TRAINING_PATTERN_3:
    488		return '0' + train_pat;
    489	case DP_TRAINING_PATTERN_4:
    490		return '4';
    491	default:
    492		MISSING_CASE(train_pat);
    493		return '?';
    494	}
    495}
    496
    497void
    498intel_dp_program_link_training_pattern(struct intel_dp *intel_dp,
    499				       const struct intel_crtc_state *crtc_state,
    500				       enum drm_dp_phy dp_phy,
    501				       u8 dp_train_pat)
    502{
    503	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
    504	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
    505	u8 train_pat = intel_dp_training_pattern_symbol(dp_train_pat);
    506	char phy_name[10];
    507
    508	if (train_pat != DP_TRAINING_PATTERN_DISABLE)
    509		drm_dbg_kms(&i915->drm,
    510			    "[ENCODER:%d:%s][%s] Using DP training pattern TPS%c\n",
    511			    encoder->base.base.id, encoder->base.name,
    512			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
    513			    dp_training_pattern_name(train_pat));
    514
    515	intel_dp->set_link_train(intel_dp, crtc_state, dp_train_pat);
    516}
    517
    518#define TRAIN_SET_FMT "%d%s/%d%s/%d%s/%d%s"
    519#define _TRAIN_SET_VSWING_ARGS(train_set) \
    520	((train_set) & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT, \
    521	(train_set) & DP_TRAIN_MAX_SWING_REACHED ? "(max)" : ""
    522#define TRAIN_SET_VSWING_ARGS(train_set) \
    523	_TRAIN_SET_VSWING_ARGS((train_set)[0]), \
    524	_TRAIN_SET_VSWING_ARGS((train_set)[1]), \
    525	_TRAIN_SET_VSWING_ARGS((train_set)[2]), \
    526	_TRAIN_SET_VSWING_ARGS((train_set)[3])
    527#define _TRAIN_SET_PREEMPH_ARGS(train_set) \
    528	((train_set) & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT, \
    529	(train_set) & DP_TRAIN_MAX_PRE_EMPHASIS_REACHED ? "(max)" : ""
    530#define TRAIN_SET_PREEMPH_ARGS(train_set) \
    531	_TRAIN_SET_PREEMPH_ARGS((train_set)[0]), \
    532	_TRAIN_SET_PREEMPH_ARGS((train_set)[1]), \
    533	_TRAIN_SET_PREEMPH_ARGS((train_set)[2]), \
    534	_TRAIN_SET_PREEMPH_ARGS((train_set)[3])
    535#define _TRAIN_SET_TX_FFE_ARGS(train_set) \
    536	((train_set) & DP_TX_FFE_PRESET_VALUE_MASK), ""
    537#define TRAIN_SET_TX_FFE_ARGS(train_set) \
    538	_TRAIN_SET_TX_FFE_ARGS((train_set)[0]), \
    539	_TRAIN_SET_TX_FFE_ARGS((train_set)[1]), \
    540	_TRAIN_SET_TX_FFE_ARGS((train_set)[2]), \
    541	_TRAIN_SET_TX_FFE_ARGS((train_set)[3])
    542
    543void intel_dp_set_signal_levels(struct intel_dp *intel_dp,
    544				const struct intel_crtc_state *crtc_state,
    545				enum drm_dp_phy dp_phy)
    546{
    547	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
    548	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
    549	char phy_name[10];
    550
    551	if (intel_dp_is_uhbr(crtc_state)) {
    552		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 128b/132b, lanes: %d, "
    553			    "TX FFE presets: " TRAIN_SET_FMT "\n",
    554			    encoder->base.base.id, encoder->base.name,
    555			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
    556			    crtc_state->lane_count,
    557			    TRAIN_SET_TX_FFE_ARGS(intel_dp->train_set));
    558	} else {
    559		drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s][%s] 8b/10b, lanes: %d, "
    560			    "vswing levels: " TRAIN_SET_FMT ", "
    561			    "pre-emphasis levels: " TRAIN_SET_FMT "\n",
    562			    encoder->base.base.id, encoder->base.name,
    563			    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
    564			    crtc_state->lane_count,
    565			    TRAIN_SET_VSWING_ARGS(intel_dp->train_set),
    566			    TRAIN_SET_PREEMPH_ARGS(intel_dp->train_set));
    567	}
    568
    569	if (intel_dp_phy_is_downstream_of_source(intel_dp, dp_phy))
    570		encoder->set_signal_levels(encoder, crtc_state);
    571}
    572
    573static bool
    574intel_dp_reset_link_train(struct intel_dp *intel_dp,
    575			  const struct intel_crtc_state *crtc_state,
    576			  enum drm_dp_phy dp_phy,
    577			  u8 dp_train_pat)
    578{
    579	memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set));
    580	intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
    581	return intel_dp_set_link_train(intel_dp, crtc_state, dp_phy, dp_train_pat);
    582}
    583
    584static bool
    585intel_dp_update_link_train(struct intel_dp *intel_dp,
    586			   const struct intel_crtc_state *crtc_state,
    587			   enum drm_dp_phy dp_phy)
    588{
    589	int reg = dp_phy == DP_PHY_DPRX ?
    590			    DP_TRAINING_LANE0_SET :
    591			    DP_TRAINING_LANE0_SET_PHY_REPEATER(dp_phy);
    592	int ret;
    593
    594	intel_dp_set_signal_levels(intel_dp, crtc_state, dp_phy);
    595
    596	ret = drm_dp_dpcd_write(&intel_dp->aux, reg,
    597				intel_dp->train_set, crtc_state->lane_count);
    598
    599	return ret == crtc_state->lane_count;
    600}
    601
    602/* 128b/132b */
    603static bool intel_dp_lane_max_tx_ffe_reached(u8 train_set_lane)
    604{
    605	return (train_set_lane & DP_TX_FFE_PRESET_VALUE_MASK) ==
    606		DP_TX_FFE_PRESET_VALUE_MASK;
    607}
    608
    609/*
    610 * 8b/10b
    611 *
    612 * FIXME: The DP spec is very confusing here, also the Link CTS spec seems to
    613 * have self contradicting tests around this area.
    614 *
    615 * In lieu of better ideas let's just stop when we've reached the max supported
    616 * vswing with its max pre-emphasis, which is either 2+1 or 3+0 depending on
    617 * whether vswing level 3 is supported or not.
    618 */
    619static bool intel_dp_lane_max_vswing_reached(u8 train_set_lane)
    620{
    621	u8 v = (train_set_lane & DP_TRAIN_VOLTAGE_SWING_MASK) >>
    622		DP_TRAIN_VOLTAGE_SWING_SHIFT;
    623	u8 p = (train_set_lane & DP_TRAIN_PRE_EMPHASIS_MASK) >>
    624		DP_TRAIN_PRE_EMPHASIS_SHIFT;
    625
    626	if ((train_set_lane & DP_TRAIN_MAX_SWING_REACHED) == 0)
    627		return false;
    628
    629	if (v + p != 3)
    630		return false;
    631
    632	return true;
    633}
    634
    635static bool intel_dp_link_max_vswing_reached(struct intel_dp *intel_dp,
    636					     const struct intel_crtc_state *crtc_state)
    637{
    638	int lane;
    639
    640	for (lane = 0; lane < crtc_state->lane_count; lane++) {
    641		u8 train_set_lane = intel_dp->train_set[lane];
    642
    643		if (intel_dp_is_uhbr(crtc_state)) {
    644			if (!intel_dp_lane_max_tx_ffe_reached(train_set_lane))
    645				return false;
    646		} else {
    647			if (!intel_dp_lane_max_vswing_reached(train_set_lane))
    648				return false;
    649		}
    650	}
    651
    652	return true;
    653}
    654
    655/*
    656 * Prepare link training by configuring the link parameters. On DDI platforms
    657 * also enable the port here.
    658 */
    659static bool
    660intel_dp_prepare_link_train(struct intel_dp *intel_dp,
    661			    const struct intel_crtc_state *crtc_state)
    662{
    663	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
    664	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
    665	u8 link_config[2];
    666	u8 link_bw, rate_select;
    667
    668	if (intel_dp->prepare_link_retrain)
    669		intel_dp->prepare_link_retrain(intel_dp, crtc_state);
    670
    671	intel_dp_compute_rate(intel_dp, crtc_state->port_clock,
    672			      &link_bw, &rate_select);
    673
    674	if (link_bw)
    675		drm_dbg_kms(&i915->drm,
    676			    "[ENCODER:%d:%s] Using LINK_BW_SET value %02x\n",
    677			    encoder->base.base.id, encoder->base.name, link_bw);
    678	else
    679		drm_dbg_kms(&i915->drm,
    680			    "[ENCODER:%d:%s] Using LINK_RATE_SET value %02x\n",
    681			    encoder->base.base.id, encoder->base.name, rate_select);
    682
    683	/* Write the link configuration data */
    684	link_config[0] = link_bw;
    685	link_config[1] = crtc_state->lane_count;
    686	if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
    687		link_config[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
    688	drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_BW_SET, link_config, 2);
    689
    690	/* eDP 1.4 rate select method. */
    691	if (!link_bw)
    692		drm_dp_dpcd_write(&intel_dp->aux, DP_LINK_RATE_SET,
    693				  &rate_select, 1);
    694
    695	link_config[0] = crtc_state->vrr.enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0;
    696	link_config[1] = intel_dp_is_uhbr(crtc_state) ?
    697		DP_SET_ANSI_128B132B : DP_SET_ANSI_8B10B;
    698	drm_dp_dpcd_write(&intel_dp->aux, DP_DOWNSPREAD_CTRL, link_config, 2);
    699
    700	return true;
    701}
    702
    703static bool intel_dp_adjust_request_changed(const struct intel_crtc_state *crtc_state,
    704					    const u8 old_link_status[DP_LINK_STATUS_SIZE],
    705					    const u8 new_link_status[DP_LINK_STATUS_SIZE])
    706{
    707	int lane;
    708
    709	for (lane = 0; lane < crtc_state->lane_count; lane++) {
    710		u8 old, new;
    711
    712		if (intel_dp_is_uhbr(crtc_state)) {
    713			old = drm_dp_get_adjust_tx_ffe_preset(old_link_status, lane);
    714			new = drm_dp_get_adjust_tx_ffe_preset(new_link_status, lane);
    715		} else {
    716			old = drm_dp_get_adjust_request_voltage(old_link_status, lane) |
    717				drm_dp_get_adjust_request_pre_emphasis(old_link_status, lane);
    718			new = drm_dp_get_adjust_request_voltage(new_link_status, lane) |
    719				drm_dp_get_adjust_request_pre_emphasis(new_link_status, lane);
    720		}
    721
    722		if (old != new)
    723			return true;
    724	}
    725
    726	return false;
    727}
    728
    729void
    730intel_dp_dump_link_status(struct intel_dp *intel_dp, enum drm_dp_phy dp_phy,
    731			  const u8 link_status[DP_LINK_STATUS_SIZE])
    732{
    733	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
    734	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
    735	char phy_name[10];
    736
    737	drm_dbg_kms(&i915->drm,
    738		    "[ENCODER:%d:%s][%s] ln0_1:0x%x ln2_3:0x%x align:0x%x sink:0x%x adj_req0_1:0x%x adj_req2_3:0x%x\n",
    739		    encoder->base.base.id, encoder->base.name,
    740		    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
    741		    link_status[0], link_status[1], link_status[2],
    742		    link_status[3], link_status[4], link_status[5]);
    743}
    744
    745/*
    746 * Perform the link training clock recovery phase on the given DP PHY using
    747 * training pattern 1.
    748 */
    749static bool
    750intel_dp_link_training_clock_recovery(struct intel_dp *intel_dp,
    751				      const struct intel_crtc_state *crtc_state,
    752				      enum drm_dp_phy dp_phy)
    753{
    754	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
    755	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
    756	u8 old_link_status[DP_LINK_STATUS_SIZE] = {};
    757	int voltage_tries, cr_tries, max_cr_tries;
    758	u8 link_status[DP_LINK_STATUS_SIZE];
    759	bool max_vswing_reached = false;
    760	char phy_name[10];
    761	int delay_us;
    762
    763	delay_us = drm_dp_read_clock_recovery_delay(&intel_dp->aux,
    764						    intel_dp->dpcd, dp_phy,
    765						    intel_dp_is_uhbr(crtc_state));
    766
    767	intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
    768
    769	/* clock recovery */
    770	if (!intel_dp_reset_link_train(intel_dp, crtc_state, dp_phy,
    771				       DP_TRAINING_PATTERN_1 |
    772				       DP_LINK_SCRAMBLING_DISABLE)) {
    773		drm_err(&i915->drm, "[ENCODER:%d:%s][%s] Failed to enable link training\n",
    774			encoder->base.base.id, encoder->base.name, phy_name);
    775		return false;
    776	}
    777
    778	/*
    779	 * The DP 1.4 spec defines the max clock recovery retries value
    780	 * as 10 but for pre-DP 1.4 devices we set a very tolerant
    781	 * retry limit of 80 (4 voltage levels x 4 preemphasis levels x
    782	 * x 5 identical voltage retries). Since the previous specs didn't
    783	 * define a limit and created the possibility of an infinite loop
    784	 * we want to prevent any sync from triggering that corner case.
    785	 */
    786	if (intel_dp->dpcd[DP_DPCD_REV] >= DP_DPCD_REV_14)
    787		max_cr_tries = 10;
    788	else
    789		max_cr_tries = 80;
    790
    791	voltage_tries = 1;
    792	for (cr_tries = 0; cr_tries < max_cr_tries; ++cr_tries) {
    793		usleep_range(delay_us, 2 * delay_us);
    794
    795		if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
    796						     link_status) < 0) {
    797			drm_err(&i915->drm, "[ENCODER:%d:%s][%s] Failed to get link status\n",
    798				encoder->base.base.id, encoder->base.name, phy_name);
    799			return false;
    800		}
    801
    802		if (drm_dp_clock_recovery_ok(link_status, crtc_state->lane_count)) {
    803			drm_dbg_kms(&i915->drm,
    804				    "[ENCODER:%d:%s][%s] Clock recovery OK\n",
    805				    encoder->base.base.id, encoder->base.name, phy_name);
    806			return true;
    807		}
    808
    809		if (voltage_tries == 5) {
    810			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
    811			drm_dbg_kms(&i915->drm,
    812				    "[ENCODER:%d:%s][%s] Same voltage tried 5 times\n",
    813				    encoder->base.base.id, encoder->base.name, phy_name);
    814			return false;
    815		}
    816
    817		if (max_vswing_reached) {
    818			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
    819			drm_dbg_kms(&i915->drm,
    820				    "[ENCODER:%d:%s][%s] Max Voltage Swing reached\n",
    821				    encoder->base.base.id, encoder->base.name, phy_name);
    822			return false;
    823		}
    824
    825		/* Update training set as requested by target */
    826		intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
    827					  link_status);
    828		if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
    829			drm_err(&i915->drm,
    830				"[ENCODER:%d:%s][%s] Failed to update link training\n",
    831				encoder->base.base.id, encoder->base.name, phy_name);
    832			return false;
    833		}
    834
    835		if (!intel_dp_adjust_request_changed(crtc_state, old_link_status, link_status))
    836			++voltage_tries;
    837		else
    838			voltage_tries = 1;
    839
    840		memcpy(old_link_status, link_status, sizeof(link_status));
    841
    842		if (intel_dp_link_max_vswing_reached(intel_dp, crtc_state))
    843			max_vswing_reached = true;
    844	}
    845
    846	intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
    847	drm_err(&i915->drm,
    848		"[ENCODER:%d:%s][%s] Failed clock recovery %d times, giving up!\n",
    849		encoder->base.base.id, encoder->base.name, phy_name, max_cr_tries);
    850
    851	return false;
    852}
    853
    854/*
    855 * Pick Training Pattern Sequence (TPS) for channel equalization. 128b/132b TPS2
    856 * for UHBR+, TPS4 for HBR3 or for 1.4 devices that support it, TPS3 for HBR2 or
    857 * 1.2 devices that support it, TPS2 otherwise.
    858 */
    859static u32 intel_dp_training_pattern(struct intel_dp *intel_dp,
    860				     const struct intel_crtc_state *crtc_state,
    861				     enum drm_dp_phy dp_phy)
    862{
    863	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
    864	bool source_tps3, sink_tps3, source_tps4, sink_tps4;
    865
    866	/* UHBR+ use separate 128b/132b TPS2 */
    867	if (intel_dp_is_uhbr(crtc_state))
    868		return DP_TRAINING_PATTERN_2;
    869
    870	/*
    871	 * TPS4 support is mandatory for all downstream devices that
    872	 * support HBR3. There are no known eDP panels that support
    873	 * TPS4 as of Feb 2018 as per VESA eDP_v1.4b_E1 specification.
    874	 * LTTPRs must support TPS4.
    875	 */
    876	source_tps4 = intel_dp_source_supports_tps4(i915);
    877	sink_tps4 = dp_phy != DP_PHY_DPRX ||
    878		    drm_dp_tps4_supported(intel_dp->dpcd);
    879	if (source_tps4 && sink_tps4) {
    880		return DP_TRAINING_PATTERN_4;
    881	} else if (crtc_state->port_clock == 810000) {
    882		if (!source_tps4)
    883			drm_dbg_kms(&i915->drm,
    884				    "8.1 Gbps link rate without source TPS4 support\n");
    885		if (!sink_tps4)
    886			drm_dbg_kms(&i915->drm,
    887				    "8.1 Gbps link rate without sink TPS4 support\n");
    888	}
    889
    890	/*
    891	 * TPS3 support is mandatory for downstream devices that
    892	 * support HBR2. However, not all sinks follow the spec.
    893	 */
    894	source_tps3 = intel_dp_source_supports_tps3(i915);
    895	sink_tps3 = dp_phy != DP_PHY_DPRX ||
    896		    drm_dp_tps3_supported(intel_dp->dpcd);
    897	if (source_tps3 && sink_tps3) {
    898		return  DP_TRAINING_PATTERN_3;
    899	} else if (crtc_state->port_clock >= 540000) {
    900		if (!source_tps3)
    901			drm_dbg_kms(&i915->drm,
    902				    ">=5.4/6.48 Gbps link rate without source TPS3 support\n");
    903		if (!sink_tps3)
    904			drm_dbg_kms(&i915->drm,
    905				    ">=5.4/6.48 Gbps link rate without sink TPS3 support\n");
    906	}
    907
    908	return DP_TRAINING_PATTERN_2;
    909}
    910
    911/*
    912 * Perform the link training channel equalization phase on the given DP PHY
    913 * using one of training pattern 2, 3 or 4 depending on the source and
    914 * sink capabilities.
    915 */
    916static bool
    917intel_dp_link_training_channel_equalization(struct intel_dp *intel_dp,
    918					    const struct intel_crtc_state *crtc_state,
    919					    enum drm_dp_phy dp_phy)
    920{
    921	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
    922	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
    923	int tries;
    924	u32 training_pattern;
    925	u8 link_status[DP_LINK_STATUS_SIZE];
    926	bool channel_eq = false;
    927	char phy_name[10];
    928	int delay_us;
    929
    930	delay_us = drm_dp_read_channel_eq_delay(&intel_dp->aux,
    931						intel_dp->dpcd, dp_phy,
    932						intel_dp_is_uhbr(crtc_state));
    933
    934	intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name));
    935
    936	training_pattern = intel_dp_training_pattern(intel_dp, crtc_state, dp_phy);
    937	/* Scrambling is disabled for TPS2/3 and enabled for TPS4 */
    938	if (training_pattern != DP_TRAINING_PATTERN_4)
    939		training_pattern |= DP_LINK_SCRAMBLING_DISABLE;
    940
    941	/* channel equalization */
    942	if (!intel_dp_set_link_train(intel_dp, crtc_state, dp_phy,
    943				     training_pattern)) {
    944		drm_err(&i915->drm,
    945			"[ENCODER:%d:%s][%s] Failed to start channel equalization\n",
    946			encoder->base.base.id, encoder->base.name,
    947			phy_name);
    948		return false;
    949	}
    950
    951	for (tries = 0; tries < 5; tries++) {
    952		usleep_range(delay_us, 2 * delay_us);
    953
    954		if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, dp_phy,
    955						     link_status) < 0) {
    956			drm_err(&i915->drm,
    957				"[ENCODER:%d:%s][%s] Failed to get link status\n",
    958				encoder->base.base.id, encoder->base.name, phy_name);
    959			break;
    960		}
    961
    962		/* Make sure clock is still ok */
    963		if (!drm_dp_clock_recovery_ok(link_status,
    964					      crtc_state->lane_count)) {
    965			intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
    966			drm_dbg_kms(&i915->drm,
    967				    "[ENCODER:%d:%s][%s] Clock recovery check failed, cannot "
    968				    "continue channel equalization\n",
    969				    encoder->base.base.id, encoder->base.name, phy_name);
    970			break;
    971		}
    972
    973		if (drm_dp_channel_eq_ok(link_status,
    974					 crtc_state->lane_count)) {
    975			channel_eq = true;
    976			drm_dbg_kms(&i915->drm,
    977				    "[ENCODER:%d:%s][%s] Channel EQ done. DP Training successful\n",
    978				    encoder->base.base.id, encoder->base.name, phy_name);
    979			break;
    980		}
    981
    982		/* Update training set as requested by target */
    983		intel_dp_get_adjust_train(intel_dp, crtc_state, dp_phy,
    984					  link_status);
    985		if (!intel_dp_update_link_train(intel_dp, crtc_state, dp_phy)) {
    986			drm_err(&i915->drm,
    987				"[ENCODER:%d:%s][%s] Failed to update link training\n",
    988				encoder->base.base.id, encoder->base.name, phy_name);
    989			break;
    990		}
    991	}
    992
    993	/* Try 5 times, else fail and try at lower BW */
    994	if (tries == 5) {
    995		intel_dp_dump_link_status(intel_dp, dp_phy, link_status);
    996		drm_dbg_kms(&i915->drm,
    997			    "[ENCODER:%d:%s][%s] Channel equalization failed 5 times\n",
    998			    encoder->base.base.id, encoder->base.name, phy_name);
    999	}
   1000
   1001	return channel_eq;
   1002}
   1003
   1004static bool intel_dp_disable_dpcd_training_pattern(struct intel_dp *intel_dp,
   1005						   enum drm_dp_phy dp_phy)
   1006{
   1007	int reg = intel_dp_training_pattern_set_reg(intel_dp, dp_phy);
   1008	u8 val = DP_TRAINING_PATTERN_DISABLE;
   1009
   1010	return drm_dp_dpcd_write(&intel_dp->aux, reg, &val, 1) == 1;
   1011}
   1012
   1013static int
   1014intel_dp_128b132b_intra_hop(struct intel_dp *intel_dp,
   1015			    const struct intel_crtc_state *crtc_state)
   1016{
   1017	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
   1018	u8 sink_status;
   1019	int ret;
   1020
   1021	ret = drm_dp_dpcd_readb(&intel_dp->aux, DP_SINK_STATUS, &sink_status);
   1022	if (ret != 1) {
   1023		drm_dbg_kms(&i915->drm, "Failed to read sink status\n");
   1024		return ret < 0 ? ret : -EIO;
   1025	}
   1026
   1027	return sink_status & DP_INTRA_HOP_AUX_REPLY_INDICATION ? 1 : 0;
   1028}
   1029
   1030/**
   1031 * intel_dp_stop_link_train - stop link training
   1032 * @intel_dp: DP struct
   1033 * @crtc_state: state for CRTC attached to the encoder
   1034 *
   1035 * Stop the link training of the @intel_dp port, disabling the training
   1036 * pattern in the sink's DPCD, and disabling the test pattern symbol
   1037 * generation on the port.
   1038 *
   1039 * What symbols are output on the port after this point is
   1040 * platform specific: On DDI/VLV/CHV platforms it will be the idle pattern
   1041 * with the pipe being disabled, on older platforms it's HW specific if/how an
   1042 * idle pattern is generated, as the pipe is already enabled here for those.
   1043 *
   1044 * This function must be called after intel_dp_start_link_train().
   1045 */
   1046void intel_dp_stop_link_train(struct intel_dp *intel_dp,
   1047			      const struct intel_crtc_state *crtc_state)
   1048{
   1049	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
   1050	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
   1051
   1052	intel_dp->link_trained = true;
   1053
   1054	intel_dp_disable_dpcd_training_pattern(intel_dp, DP_PHY_DPRX);
   1055	intel_dp_program_link_training_pattern(intel_dp, crtc_state, DP_PHY_DPRX,
   1056					       DP_TRAINING_PATTERN_DISABLE);
   1057
   1058	if (intel_dp_is_uhbr(crtc_state) &&
   1059	    wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
   1060		drm_dbg_kms(&i915->drm,
   1061			    "[ENCODER:%d:%s] 128b/132b intra-hop not clearing\n",
   1062			    encoder->base.base.id, encoder->base.name);
   1063	}
   1064}
   1065
   1066static bool
   1067intel_dp_link_train_phy(struct intel_dp *intel_dp,
   1068			const struct intel_crtc_state *crtc_state,
   1069			enum drm_dp_phy dp_phy)
   1070{
   1071	struct intel_connector *connector = intel_dp->attached_connector;
   1072	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
   1073	char phy_name[10];
   1074	bool ret = false;
   1075
   1076	if (!intel_dp_link_training_clock_recovery(intel_dp, crtc_state, dp_phy))
   1077		goto out;
   1078
   1079	if (!intel_dp_link_training_channel_equalization(intel_dp, crtc_state, dp_phy))
   1080		goto out;
   1081
   1082	ret = true;
   1083
   1084out:
   1085	drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
   1086		    "[CONNECTOR:%d:%s][ENCODER:%d:%s][%s] Link Training %s at link rate = %d, lane count = %d\n",
   1087		    connector->base.base.id, connector->base.name,
   1088		    encoder->base.base.id, encoder->base.name,
   1089		    intel_dp_phy_name(dp_phy, phy_name, sizeof(phy_name)),
   1090		    ret ? "passed" : "failed",
   1091		    crtc_state->port_clock, crtc_state->lane_count);
   1092
   1093	return ret;
   1094}
   1095
   1096static void intel_dp_schedule_fallback_link_training(struct intel_dp *intel_dp,
   1097						     const struct intel_crtc_state *crtc_state)
   1098{
   1099	struct intel_connector *intel_connector = intel_dp->attached_connector;
   1100	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
   1101
   1102	if (intel_dp->hobl_active) {
   1103		drm_dbg_kms(&dp_to_i915(intel_dp)->drm,
   1104			    "[ENCODER:%d:%s] Link Training failed with HOBL active, "
   1105			    "not enabling it from now on",
   1106			    encoder->base.base.id, encoder->base.name);
   1107		intel_dp->hobl_failed = true;
   1108	} else if (intel_dp_get_link_train_fallback_values(intel_dp,
   1109							   crtc_state->port_clock,
   1110							   crtc_state->lane_count)) {
   1111		return;
   1112	}
   1113
   1114	/* Schedule a Hotplug Uevent to userspace to start modeset */
   1115	schedule_work(&intel_connector->modeset_retry_work);
   1116}
   1117
   1118/* Perform the link training on all LTTPRs and the DPRX on a link. */
   1119static bool
   1120intel_dp_link_train_all_phys(struct intel_dp *intel_dp,
   1121			     const struct intel_crtc_state *crtc_state,
   1122			     int lttpr_count)
   1123{
   1124	bool ret = true;
   1125	int i;
   1126
   1127	for (i = lttpr_count - 1; i >= 0; i--) {
   1128		enum drm_dp_phy dp_phy = DP_PHY_LTTPR(i);
   1129
   1130		ret = intel_dp_link_train_phy(intel_dp, crtc_state, dp_phy);
   1131		intel_dp_disable_dpcd_training_pattern(intel_dp, dp_phy);
   1132
   1133		if (!ret)
   1134			break;
   1135	}
   1136
   1137	if (ret)
   1138		ret = intel_dp_link_train_phy(intel_dp, crtc_state, DP_PHY_DPRX);
   1139
   1140	if (intel_dp->set_idle_link_train)
   1141		intel_dp->set_idle_link_train(intel_dp, crtc_state);
   1142
   1143	return ret;
   1144}
   1145
   1146/*
   1147 * 128b/132b DP LANEx_EQ_DONE Sequence (DP 2.0 E11 3.5.2.16.1)
   1148 */
   1149static bool
   1150intel_dp_128b132b_lane_eq(struct intel_dp *intel_dp,
   1151			  const struct intel_crtc_state *crtc_state)
   1152{
   1153	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
   1154	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
   1155	u8 link_status[DP_LINK_STATUS_SIZE];
   1156	int delay_us;
   1157	int try, max_tries = 20;
   1158	unsigned long deadline;
   1159	bool timeout = false;
   1160
   1161	/*
   1162	 * Reset signal levels. Start transmitting 128b/132b TPS1.
   1163	 *
   1164	 * Put DPRX and LTTPRs (if any) into intra-hop AUX mode by writing TPS1
   1165	 * in DP_TRAINING_PATTERN_SET.
   1166	 */
   1167	if (!intel_dp_reset_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
   1168				       DP_TRAINING_PATTERN_1)) {
   1169		drm_err(&i915->drm,
   1170			"[ENCODER:%d:%s] Failed to start 128b/132b TPS1\n",
   1171			encoder->base.base.id, encoder->base.name);
   1172		return false;
   1173	}
   1174
   1175	delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
   1176
   1177	/* Read the initial TX FFE settings. */
   1178	if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
   1179		drm_err(&i915->drm,
   1180			"[ENCODER:%d:%s] Failed to read TX FFE presets\n",
   1181			encoder->base.base.id, encoder->base.name);
   1182		return false;
   1183	}
   1184
   1185	/* Update signal levels and training set as requested. */
   1186	intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
   1187	if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
   1188		drm_err(&i915->drm,
   1189			"[ENCODER:%d:%s] Failed to set initial TX FFE settings\n",
   1190			encoder->base.base.id, encoder->base.name);
   1191		return false;
   1192	}
   1193
   1194	/* Start transmitting 128b/132b TPS2. */
   1195	if (!intel_dp_set_link_train(intel_dp, crtc_state, DP_PHY_DPRX,
   1196				     DP_TRAINING_PATTERN_2)) {
   1197		drm_err(&i915->drm,
   1198			"[ENCODER:%d:%s] Failed to start 128b/132b TPS2\n",
   1199			encoder->base.base.id, encoder->base.name);
   1200		return false;
   1201	}
   1202
   1203	/* Time budget for the LANEx_EQ_DONE Sequence */
   1204	deadline = jiffies + msecs_to_jiffies_timeout(400);
   1205
   1206	for (try = 0; try < max_tries; try++) {
   1207		usleep_range(delay_us, 2 * delay_us);
   1208
   1209		/*
   1210		 * The delay may get updated. The transmitter shall read the
   1211		 * delay before link status during link training.
   1212		 */
   1213		delay_us = drm_dp_128b132b_read_aux_rd_interval(&intel_dp->aux);
   1214
   1215		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
   1216			drm_err(&i915->drm,
   1217				"[ENCODER:%d:%s] Failed to read link status\n",
   1218				encoder->base.base.id, encoder->base.name);
   1219			return false;
   1220		}
   1221
   1222		if (drm_dp_128b132b_link_training_failed(link_status)) {
   1223			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
   1224			drm_err(&i915->drm,
   1225				"[ENCODER:%d:%s] Downstream link training failure\n",
   1226				encoder->base.base.id, encoder->base.name);
   1227			return false;
   1228		}
   1229
   1230		if (drm_dp_128b132b_lane_channel_eq_done(link_status, crtc_state->lane_count)) {
   1231			drm_dbg_kms(&i915->drm,
   1232				    "[ENCODER:%d:%s] Lane channel eq done\n",
   1233				    encoder->base.base.id, encoder->base.name);
   1234			break;
   1235		}
   1236
   1237		if (timeout) {
   1238			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
   1239			drm_err(&i915->drm,
   1240				"[ENCODER:%d:%s] Lane channel eq timeout\n",
   1241				encoder->base.base.id, encoder->base.name);
   1242			return false;
   1243		}
   1244
   1245		if (time_after(jiffies, deadline))
   1246			timeout = true; /* try one last time after deadline */
   1247
   1248		/* Update signal levels and training set as requested. */
   1249		intel_dp_get_adjust_train(intel_dp, crtc_state, DP_PHY_DPRX, link_status);
   1250		if (!intel_dp_update_link_train(intel_dp, crtc_state, DP_PHY_DPRX)) {
   1251			drm_err(&i915->drm,
   1252				"[ENCODER:%d:%s] Failed to update TX FFE settings\n",
   1253				encoder->base.base.id, encoder->base.name);
   1254			return false;
   1255		}
   1256	}
   1257
   1258	if (try == max_tries) {
   1259		intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
   1260		drm_err(&i915->drm,
   1261			"[ENCODER:%d:%s] Max loop count reached\n",
   1262			encoder->base.base.id, encoder->base.name);
   1263		return false;
   1264	}
   1265
   1266	for (;;) {
   1267		if (time_after(jiffies, deadline))
   1268			timeout = true; /* try one last time after deadline */
   1269
   1270		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
   1271			drm_err(&i915->drm,
   1272				"[ENCODER:%d:%s] Failed to read link status\n",
   1273				encoder->base.base.id, encoder->base.name);
   1274			return false;
   1275		}
   1276
   1277		if (drm_dp_128b132b_link_training_failed(link_status)) {
   1278			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
   1279			drm_err(&i915->drm,
   1280				"[ENCODER:%d:%s] Downstream link training failure\n",
   1281				encoder->base.base.id, encoder->base.name);
   1282			return false;
   1283		}
   1284
   1285		if (drm_dp_128b132b_eq_interlane_align_done(link_status)) {
   1286			drm_dbg_kms(&i915->drm,
   1287				    "[ENCODER:%d:%s] Interlane align done\n",
   1288				    encoder->base.base.id, encoder->base.name);
   1289			break;
   1290		}
   1291
   1292		if (timeout) {
   1293			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
   1294			drm_err(&i915->drm,
   1295				"[ENCODER:%d:%s] Interlane align timeout\n",
   1296				encoder->base.base.id, encoder->base.name);
   1297			return false;
   1298		}
   1299
   1300		usleep_range(2000, 3000);
   1301	}
   1302
   1303	return true;
   1304}
   1305
   1306/*
   1307 * 128b/132b DP LANEx_CDS_DONE Sequence (DP 2.0 E11 3.5.2.16.2)
   1308 */
   1309static bool
   1310intel_dp_128b132b_lane_cds(struct intel_dp *intel_dp,
   1311			   const struct intel_crtc_state *crtc_state,
   1312			   int lttpr_count)
   1313{
   1314	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
   1315	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
   1316	u8 link_status[DP_LINK_STATUS_SIZE];
   1317	unsigned long deadline;
   1318
   1319	if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_TRAINING_PATTERN_SET,
   1320			       DP_TRAINING_PATTERN_2_CDS) != 1) {
   1321		drm_err(&i915->drm,
   1322			"[ENCODER:%d:%s] Failed to start 128b/132b TPS2 CDS\n",
   1323			encoder->base.base.id, encoder->base.name);
   1324		return false;
   1325	}
   1326
   1327	/* Time budget for the LANEx_CDS_DONE Sequence */
   1328	deadline = jiffies + msecs_to_jiffies_timeout((lttpr_count + 1) * 20);
   1329
   1330	for (;;) {
   1331		bool timeout = false;
   1332
   1333		if (time_after(jiffies, deadline))
   1334			timeout = true; /* try one last time after deadline */
   1335
   1336		usleep_range(2000, 3000);
   1337
   1338		if (drm_dp_dpcd_read_link_status(&intel_dp->aux, link_status) < 0) {
   1339			drm_err(&i915->drm,
   1340				"[ENCODER:%d:%s] Failed to read link status\n",
   1341				encoder->base.base.id, encoder->base.name);
   1342			return false;
   1343		}
   1344
   1345		if (drm_dp_128b132b_eq_interlane_align_done(link_status) &&
   1346		    drm_dp_128b132b_cds_interlane_align_done(link_status) &&
   1347		    drm_dp_128b132b_lane_symbol_locked(link_status, crtc_state->lane_count)) {
   1348			drm_dbg_kms(&i915->drm,
   1349				    "[ENCODER:%d:%s] CDS interlane align done\n",
   1350				    encoder->base.base.id, encoder->base.name);
   1351			break;
   1352		}
   1353
   1354		if (drm_dp_128b132b_link_training_failed(link_status)) {
   1355			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
   1356			drm_err(&i915->drm,
   1357				"[ENCODER:%d:%s] Downstream link training failure\n",
   1358				encoder->base.base.id, encoder->base.name);
   1359			return false;
   1360		}
   1361
   1362		if (timeout) {
   1363			intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status);
   1364			drm_err(&i915->drm,
   1365				"[ENCODER:%d:%s] CDS timeout\n",
   1366				encoder->base.base.id, encoder->base.name);
   1367			return false;
   1368		}
   1369	}
   1370
   1371	/* FIXME: Should DP_TRAINING_PATTERN_DISABLE be written first? */
   1372	if (intel_dp->set_idle_link_train)
   1373		intel_dp->set_idle_link_train(intel_dp, crtc_state);
   1374
   1375	return true;
   1376}
   1377
   1378/*
   1379 * 128b/132b link training sequence. (DP 2.0 E11 SCR on link training.)
   1380 */
   1381static bool
   1382intel_dp_128b132b_link_train(struct intel_dp *intel_dp,
   1383			     const struct intel_crtc_state *crtc_state,
   1384			     int lttpr_count)
   1385{
   1386	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
   1387	struct intel_connector *connector = intel_dp->attached_connector;
   1388	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
   1389	bool passed = false;
   1390
   1391	if (wait_for(intel_dp_128b132b_intra_hop(intel_dp, crtc_state) == 0, 500)) {
   1392		drm_err(&i915->drm,
   1393			"[ENCODER:%d:%s] 128b/132b intra-hop not clear\n",
   1394			encoder->base.base.id, encoder->base.name);
   1395		return false;
   1396	}
   1397
   1398	if (intel_dp_128b132b_lane_eq(intel_dp, crtc_state) &&
   1399	    intel_dp_128b132b_lane_cds(intel_dp, crtc_state, lttpr_count))
   1400		passed = true;
   1401
   1402	drm_dbg_kms(&i915->drm,
   1403		    "[CONNECTOR:%d:%s][ENCODER:%d:%s] 128b/132b Link Training %s at link rate = %d, lane count = %d\n",
   1404		    connector->base.base.id, connector->base.name,
   1405		    encoder->base.base.id, encoder->base.name,
   1406		    passed ? "passed" : "failed",
   1407		    crtc_state->port_clock, crtc_state->lane_count);
   1408
   1409	return passed;
   1410}
   1411
   1412/**
   1413 * intel_dp_start_link_train - start link training
   1414 * @intel_dp: DP struct
   1415 * @crtc_state: state for CRTC attached to the encoder
   1416 *
   1417 * Start the link training of the @intel_dp port, scheduling a fallback
   1418 * retraining with reduced link rate/lane parameters if the link training
   1419 * fails.
   1420 * After calling this function intel_dp_stop_link_train() must be called.
   1421 */
   1422void intel_dp_start_link_train(struct intel_dp *intel_dp,
   1423			       const struct intel_crtc_state *crtc_state)
   1424{
   1425	bool passed;
   1426	/*
   1427	 * TODO: Reiniting LTTPRs here won't be needed once proper connector
   1428	 * HW state readout is added.
   1429	 */
   1430	int lttpr_count = intel_dp_init_lttpr_and_dprx_caps(intel_dp);
   1431
   1432	if (lttpr_count < 0)
   1433		/* Still continue with enabling the port and link training. */
   1434		lttpr_count = 0;
   1435
   1436	intel_dp_prepare_link_train(intel_dp, crtc_state);
   1437
   1438	if (intel_dp_is_uhbr(crtc_state))
   1439		passed = intel_dp_128b132b_link_train(intel_dp, crtc_state, lttpr_count);
   1440	else
   1441		passed = intel_dp_link_train_all_phys(intel_dp, crtc_state, lttpr_count);
   1442
   1443	if (!passed)
   1444		intel_dp_schedule_fallback_link_training(intel_dp, crtc_state);
   1445}