1 /* 2 * Copyright © 2012 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Eugeni Dodonov <eugeni.dodonov@intel.com> 25 * 26 */ 27 28 #include <drm/drm_scdc_helper.h> 29 30 #include "i915_drv.h" 31 #include "i915_trace.h" 32 #include "intel_audio.h" 33 #include "intel_combo_phy.h" 34 #include "intel_connector.h" 35 #include "intel_ddi.h" 36 #include "intel_display_types.h" 37 #include "intel_dp.h" 38 #include "intel_dp_mst.h" 39 #include "intel_dp_link_training.h" 40 #include "intel_dpio_phy.h" 41 #include "intel_dsi.h" 42 #include "intel_fifo_underrun.h" 43 #include "intel_gmbus.h" 44 #include "intel_hdcp.h" 45 #include "intel_hdmi.h" 46 #include "intel_hotplug.h" 47 #include "intel_lspcon.h" 48 #include "intel_panel.h" 49 #include "intel_pps.h" 50 #include "intel_psr.h" 51 #include "intel_sprite.h" 52 #include "intel_tc.h" 53 #include "intel_vdsc.h" 54 #include "intel_vrr.h" 55 56 struct ddi_buf_trans { 57 u32 trans1; /* balance leg enable, de-emph level */ 58 u32 trans2; /* vref sel, vswing */ 59 u8 i_boost; /* SKL: I_boost; valid: 0x0, 0x1, 0x3, 0x7 */ 60 }; 61 62 static const u8 index_to_dp_signal_levels[] = { 63 [0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0, 64 [1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1, 65 [2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2, 66 [3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3, 67 [4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0, 68 [5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1, 69 [6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2, 70 [7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0, 71 [8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1, 72 [9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0, 73 }; 74 75 /* HDMI/DVI modes ignore everything but the last 2 items. So we share 76 * them for both DP and FDI transports, allowing those ports to 77 * automatically adapt to HDMI connections as well 78 */ 79 static const struct ddi_buf_trans hsw_ddi_translations_dp[] = { 80 { 0x00FFFFFF, 0x0006000E, 0x0 }, 81 { 0x00D75FFF, 0x0005000A, 0x0 }, 82 { 0x00C30FFF, 0x00040006, 0x0 }, 83 { 0x80AAAFFF, 0x000B0000, 0x0 }, 84 { 0x00FFFFFF, 0x0005000A, 0x0 }, 85 { 0x00D75FFF, 0x000C0004, 0x0 }, 86 { 0x80C30FFF, 0x000B0000, 0x0 }, 87 { 0x00FFFFFF, 0x00040006, 0x0 }, 88 { 0x80D75FFF, 0x000B0000, 0x0 }, 89 }; 90 91 static const struct ddi_buf_trans hsw_ddi_translations_fdi[] = { 92 { 0x00FFFFFF, 0x0007000E, 0x0 }, 93 { 0x00D75FFF, 0x000F000A, 0x0 }, 94 { 0x00C30FFF, 0x00060006, 0x0 }, 95 { 0x00AAAFFF, 0x001E0000, 0x0 }, 96 { 0x00FFFFFF, 0x000F000A, 0x0 }, 97 { 0x00D75FFF, 0x00160004, 0x0 }, 98 { 0x00C30FFF, 0x001E0000, 0x0 }, 99 { 0x00FFFFFF, 0x00060006, 0x0 }, 100 { 0x00D75FFF, 0x001E0000, 0x0 }, 101 }; 102 103 static const struct ddi_buf_trans hsw_ddi_translations_hdmi[] = { 104 /* Idx NT mV d T mV d db */ 105 { 0x00FFFFFF, 0x0006000E, 0x0 },/* 0: 400 400 0 */ 106 { 0x00E79FFF, 0x000E000C, 0x0 },/* 1: 400 500 2 */ 107 { 0x00D75FFF, 0x0005000A, 0x0 },/* 2: 400 600 3.5 */ 108 { 0x00FFFFFF, 0x0005000A, 0x0 },/* 3: 600 600 0 */ 109 { 0x00E79FFF, 0x001D0007, 0x0 },/* 4: 600 750 2 */ 110 { 0x00D75FFF, 0x000C0004, 0x0 },/* 5: 600 900 3.5 */ 111 { 0x00FFFFFF, 0x00040006, 0x0 },/* 6: 800 800 0 */ 112 { 0x80E79FFF, 0x00030002, 0x0 },/* 7: 800 1000 2 */ 113 { 0x00FFFFFF, 0x00140005, 0x0 },/* 8: 850 850 0 */ 114 { 0x00FFFFFF, 0x000C0004, 0x0 },/* 9: 900 900 0 */ 115 { 0x00FFFFFF, 0x001C0003, 0x0 },/* 10: 950 950 0 */ 116 { 0x80FFFFFF, 0x00030002, 0x0 },/* 11: 1000 1000 0 */ 117 }; 118 119 static const struct ddi_buf_trans bdw_ddi_translations_edp[] = { 120 { 0x00FFFFFF, 0x00000012, 0x0 }, 121 { 0x00EBAFFF, 0x00020011, 0x0 }, 122 { 0x00C71FFF, 0x0006000F, 0x0 }, 123 { 0x00AAAFFF, 0x000E000A, 0x0 }, 124 { 0x00FFFFFF, 0x00020011, 0x0 }, 125 { 0x00DB6FFF, 0x0005000F, 0x0 }, 126 { 0x00BEEFFF, 0x000A000C, 0x0 }, 127 { 0x00FFFFFF, 0x0005000F, 0x0 }, 128 { 0x00DB6FFF, 0x000A000C, 0x0 }, 129 }; 130 131 static const struct ddi_buf_trans bdw_ddi_translations_dp[] = { 132 { 0x00FFFFFF, 0x0007000E, 0x0 }, 133 { 0x00D75FFF, 0x000E000A, 0x0 }, 134 { 0x00BEFFFF, 0x00140006, 0x0 }, 135 { 0x80B2CFFF, 0x001B0002, 0x0 }, 136 { 0x00FFFFFF, 0x000E000A, 0x0 }, 137 { 0x00DB6FFF, 0x00160005, 0x0 }, 138 { 0x80C71FFF, 0x001A0002, 0x0 }, 139 { 0x00F7DFFF, 0x00180004, 0x0 }, 140 { 0x80D75FFF, 0x001B0002, 0x0 }, 141 }; 142 143 static const struct ddi_buf_trans bdw_ddi_translations_fdi[] = { 144 { 0x00FFFFFF, 0x0001000E, 0x0 }, 145 { 0x00D75FFF, 0x0004000A, 0x0 }, 146 { 0x00C30FFF, 0x00070006, 0x0 }, 147 { 0x00AAAFFF, 0x000C0000, 0x0 }, 148 { 0x00FFFFFF, 0x0004000A, 0x0 }, 149 { 0x00D75FFF, 0x00090004, 0x0 }, 150 { 0x00C30FFF, 0x000C0000, 0x0 }, 151 { 0x00FFFFFF, 0x00070006, 0x0 }, 152 { 0x00D75FFF, 0x000C0000, 0x0 }, 153 }; 154 155 static const struct ddi_buf_trans bdw_ddi_translations_hdmi[] = { 156 /* Idx NT mV d T mV df db */ 157 { 0x00FFFFFF, 0x0007000E, 0x0 },/* 0: 400 400 0 */ 158 { 0x00D75FFF, 0x000E000A, 0x0 },/* 1: 400 600 3.5 */ 159 { 0x00BEFFFF, 0x00140006, 0x0 },/* 2: 400 800 6 */ 160 { 0x00FFFFFF, 0x0009000D, 0x0 },/* 3: 450 450 0 */ 161 { 0x00FFFFFF, 0x000E000A, 0x0 },/* 4: 600 600 0 */ 162 { 0x00D7FFFF, 0x00140006, 0x0 },/* 5: 600 800 2.5 */ 163 { 0x80CB2FFF, 0x001B0002, 0x0 },/* 6: 600 1000 4.5 */ 164 { 0x00FFFFFF, 0x00140006, 0x0 },/* 7: 800 800 0 */ 165 { 0x80E79FFF, 0x001B0002, 0x0 },/* 8: 800 1000 2 */ 166 { 0x80FFFFFF, 0x001B0002, 0x0 },/* 9: 1000 1000 0 */ 167 }; 168 169 /* Skylake H and S */ 170 static const struct ddi_buf_trans skl_ddi_translations_dp[] = { 171 { 0x00002016, 0x000000A0, 0x0 }, 172 { 0x00005012, 0x0000009B, 0x0 }, 173 { 0x00007011, 0x00000088, 0x0 }, 174 { 0x80009010, 0x000000C0, 0x1 }, 175 { 0x00002016, 0x0000009B, 0x0 }, 176 { 0x00005012, 0x00000088, 0x0 }, 177 { 0x80007011, 0x000000C0, 0x1 }, 178 { 0x00002016, 0x000000DF, 0x0 }, 179 { 0x80005012, 0x000000C0, 0x1 }, 180 }; 181 182 /* Skylake U */ 183 static const struct ddi_buf_trans skl_u_ddi_translations_dp[] = { 184 { 0x0000201B, 0x000000A2, 0x0 }, 185 { 0x00005012, 0x00000088, 0x0 }, 186 { 0x80007011, 0x000000CD, 0x1 }, 187 { 0x80009010, 0x000000C0, 0x1 }, 188 { 0x0000201B, 0x0000009D, 0x0 }, 189 { 0x80005012, 0x000000C0, 0x1 }, 190 { 0x80007011, 0x000000C0, 0x1 }, 191 { 0x00002016, 0x00000088, 0x0 }, 192 { 0x80005012, 0x000000C0, 0x1 }, 193 }; 194 195 /* Skylake Y */ 196 static const struct ddi_buf_trans skl_y_ddi_translations_dp[] = { 197 { 0x00000018, 0x000000A2, 0x0 }, 198 { 0x00005012, 0x00000088, 0x0 }, 199 { 0x80007011, 0x000000CD, 0x3 }, 200 { 0x80009010, 0x000000C0, 0x3 }, 201 { 0x00000018, 0x0000009D, 0x0 }, 202 { 0x80005012, 0x000000C0, 0x3 }, 203 { 0x80007011, 0x000000C0, 0x3 }, 204 { 0x00000018, 0x00000088, 0x0 }, 205 { 0x80005012, 0x000000C0, 0x3 }, 206 }; 207 208 /* Kabylake H and S */ 209 static const struct ddi_buf_trans kbl_ddi_translations_dp[] = { 210 { 0x00002016, 0x000000A0, 0x0 }, 211 { 0x00005012, 0x0000009B, 0x0 }, 212 { 0x00007011, 0x00000088, 0x0 }, 213 { 0x80009010, 0x000000C0, 0x1 }, 214 { 0x00002016, 0x0000009B, 0x0 }, 215 { 0x00005012, 0x00000088, 0x0 }, 216 { 0x80007011, 0x000000C0, 0x1 }, 217 { 0x00002016, 0x00000097, 0x0 }, 218 { 0x80005012, 0x000000C0, 0x1 }, 219 }; 220 221 /* Kabylake U */ 222 static const struct ddi_buf_trans kbl_u_ddi_translations_dp[] = { 223 { 0x0000201B, 0x000000A1, 0x0 }, 224 { 0x00005012, 0x00000088, 0x0 }, 225 { 0x80007011, 0x000000CD, 0x3 }, 226 { 0x80009010, 0x000000C0, 0x3 }, 227 { 0x0000201B, 0x0000009D, 0x0 }, 228 { 0x80005012, 0x000000C0, 0x3 }, 229 { 0x80007011, 0x000000C0, 0x3 }, 230 { 0x00002016, 0x0000004F, 0x0 }, 231 { 0x80005012, 0x000000C0, 0x3 }, 232 }; 233 234 /* Kabylake Y */ 235 static const struct ddi_buf_trans kbl_y_ddi_translations_dp[] = { 236 { 0x00001017, 0x000000A1, 0x0 }, 237 { 0x00005012, 0x00000088, 0x0 }, 238 { 0x80007011, 0x000000CD, 0x3 }, 239 { 0x8000800F, 0x000000C0, 0x3 }, 240 { 0x00001017, 0x0000009D, 0x0 }, 241 { 0x80005012, 0x000000C0, 0x3 }, 242 { 0x80007011, 0x000000C0, 0x3 }, 243 { 0x00001017, 0x0000004C, 0x0 }, 244 { 0x80005012, 0x000000C0, 0x3 }, 245 }; 246 247 /* 248 * Skylake/Kabylake H and S 249 * eDP 1.4 low vswing translation parameters 250 */ 251 static const struct ddi_buf_trans skl_ddi_translations_edp[] = { 252 { 0x00000018, 0x000000A8, 0x0 }, 253 { 0x00004013, 0x000000A9, 0x0 }, 254 { 0x00007011, 0x000000A2, 0x0 }, 255 { 0x00009010, 0x0000009C, 0x0 }, 256 { 0x00000018, 0x000000A9, 0x0 }, 257 { 0x00006013, 0x000000A2, 0x0 }, 258 { 0x00007011, 0x000000A6, 0x0 }, 259 { 0x00000018, 0x000000AB, 0x0 }, 260 { 0x00007013, 0x0000009F, 0x0 }, 261 { 0x00000018, 0x000000DF, 0x0 }, 262 }; 263 264 /* 265 * Skylake/Kabylake U 266 * eDP 1.4 low vswing translation parameters 267 */ 268 static const struct ddi_buf_trans skl_u_ddi_translations_edp[] = { 269 { 0x00000018, 0x000000A8, 0x0 }, 270 { 0x00004013, 0x000000A9, 0x0 }, 271 { 0x00007011, 0x000000A2, 0x0 }, 272 { 0x00009010, 0x0000009C, 0x0 }, 273 { 0x00000018, 0x000000A9, 0x0 }, 274 { 0x00006013, 0x000000A2, 0x0 }, 275 { 0x00007011, 0x000000A6, 0x0 }, 276 { 0x00002016, 0x000000AB, 0x0 }, 277 { 0x00005013, 0x0000009F, 0x0 }, 278 { 0x00000018, 0x000000DF, 0x0 }, 279 }; 280 281 /* 282 * Skylake/Kabylake Y 283 * eDP 1.4 low vswing translation parameters 284 */ 285 static const struct ddi_buf_trans skl_y_ddi_translations_edp[] = { 286 { 0x00000018, 0x000000A8, 0x0 }, 287 { 0x00004013, 0x000000AB, 0x0 }, 288 { 0x00007011, 0x000000A4, 0x0 }, 289 { 0x00009010, 0x000000DF, 0x0 }, 290 { 0x00000018, 0x000000AA, 0x0 }, 291 { 0x00006013, 0x000000A4, 0x0 }, 292 { 0x00007011, 0x0000009D, 0x0 }, 293 { 0x00000018, 0x000000A0, 0x0 }, 294 { 0x00006012, 0x000000DF, 0x0 }, 295 { 0x00000018, 0x0000008A, 0x0 }, 296 }; 297 298 /* Skylake/Kabylake U, H and S */ 299 static const struct ddi_buf_trans skl_ddi_translations_hdmi[] = { 300 { 0x00000018, 0x000000AC, 0x0 }, 301 { 0x00005012, 0x0000009D, 0x0 }, 302 { 0x00007011, 0x00000088, 0x0 }, 303 { 0x00000018, 0x000000A1, 0x0 }, 304 { 0x00000018, 0x00000098, 0x0 }, 305 { 0x00004013, 0x00000088, 0x0 }, 306 { 0x80006012, 0x000000CD, 0x1 }, 307 { 0x00000018, 0x000000DF, 0x0 }, 308 { 0x80003015, 0x000000CD, 0x1 }, /* Default */ 309 { 0x80003015, 0x000000C0, 0x1 }, 310 { 0x80000018, 0x000000C0, 0x1 }, 311 }; 312 313 /* Skylake/Kabylake Y */ 314 static const struct ddi_buf_trans skl_y_ddi_translations_hdmi[] = { 315 { 0x00000018, 0x000000A1, 0x0 }, 316 { 0x00005012, 0x000000DF, 0x0 }, 317 { 0x80007011, 0x000000CB, 0x3 }, 318 { 0x00000018, 0x000000A4, 0x0 }, 319 { 0x00000018, 0x0000009D, 0x0 }, 320 { 0x00004013, 0x00000080, 0x0 }, 321 { 0x80006013, 0x000000C0, 0x3 }, 322 { 0x00000018, 0x0000008A, 0x0 }, 323 { 0x80003015, 0x000000C0, 0x3 }, /* Default */ 324 { 0x80003015, 0x000000C0, 0x3 }, 325 { 0x80000018, 0x000000C0, 0x3 }, 326 }; 327 328 struct bxt_ddi_buf_trans { 329 u8 margin; /* swing value */ 330 u8 scale; /* scale value */ 331 u8 enable; /* scale enable */ 332 u8 deemphasis; 333 }; 334 335 static const struct bxt_ddi_buf_trans bxt_ddi_translations_dp[] = { 336 /* Idx NT mV diff db */ 337 { 52, 0x9A, 0, 128, }, /* 0: 400 0 */ 338 { 78, 0x9A, 0, 85, }, /* 1: 400 3.5 */ 339 { 104, 0x9A, 0, 64, }, /* 2: 400 6 */ 340 { 154, 0x9A, 0, 43, }, /* 3: 400 9.5 */ 341 { 77, 0x9A, 0, 128, }, /* 4: 600 0 */ 342 { 116, 0x9A, 0, 85, }, /* 5: 600 3.5 */ 343 { 154, 0x9A, 0, 64, }, /* 6: 600 6 */ 344 { 102, 0x9A, 0, 128, }, /* 7: 800 0 */ 345 { 154, 0x9A, 0, 85, }, /* 8: 800 3.5 */ 346 { 154, 0x9A, 1, 128, }, /* 9: 1200 0 */ 347 }; 348 349 static const struct bxt_ddi_buf_trans bxt_ddi_translations_edp[] = { 350 /* Idx NT mV diff db */ 351 { 26, 0, 0, 128, }, /* 0: 200 0 */ 352 { 38, 0, 0, 112, }, /* 1: 200 1.5 */ 353 { 48, 0, 0, 96, }, /* 2: 200 4 */ 354 { 54, 0, 0, 69, }, /* 3: 200 6 */ 355 { 32, 0, 0, 128, }, /* 4: 250 0 */ 356 { 48, 0, 0, 104, }, /* 5: 250 1.5 */ 357 { 54, 0, 0, 85, }, /* 6: 250 4 */ 358 { 43, 0, 0, 128, }, /* 7: 300 0 */ 359 { 54, 0, 0, 101, }, /* 8: 300 1.5 */ 360 { 48, 0, 0, 128, }, /* 9: 300 0 */ 361 }; 362 363 /* BSpec has 2 recommended values - entries 0 and 8. 364 * Using the entry with higher vswing. 365 */ 366 static const struct bxt_ddi_buf_trans bxt_ddi_translations_hdmi[] = { 367 /* Idx NT mV diff db */ 368 { 52, 0x9A, 0, 128, }, /* 0: 400 0 */ 369 { 52, 0x9A, 0, 85, }, /* 1: 400 3.5 */ 370 { 52, 0x9A, 0, 64, }, /* 2: 400 6 */ 371 { 42, 0x9A, 0, 43, }, /* 3: 400 9.5 */ 372 { 77, 0x9A, 0, 128, }, /* 4: 600 0 */ 373 { 77, 0x9A, 0, 85, }, /* 5: 600 3.5 */ 374 { 77, 0x9A, 0, 64, }, /* 6: 600 6 */ 375 { 102, 0x9A, 0, 128, }, /* 7: 800 0 */ 376 { 102, 0x9A, 0, 85, }, /* 8: 800 3.5 */ 377 { 154, 0x9A, 1, 128, }, /* 9: 1200 0 */ 378 }; 379 380 struct cnl_ddi_buf_trans { 381 u8 dw2_swing_sel; 382 u8 dw7_n_scalar; 383 u8 dw4_cursor_coeff; 384 u8 dw4_post_cursor_2; 385 u8 dw4_post_cursor_1; 386 }; 387 388 /* Voltage Swing Programming for VccIO 0.85V for DP */ 389 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_85V[] = { 390 /* NT mV Trans mV db */ 391 { 0xA, 0x5D, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 392 { 0xA, 0x6A, 0x38, 0x00, 0x07 }, /* 350 500 3.1 */ 393 { 0xB, 0x7A, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 394 { 0x6, 0x7C, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 395 { 0xA, 0x69, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 396 { 0xB, 0x7A, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 397 { 0x6, 0x7C, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 398 { 0xB, 0x7D, 0x3C, 0x00, 0x03 }, /* 650 725 0.9 */ 399 { 0x6, 0x7C, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 400 { 0x6, 0x7B, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 401 }; 402 403 /* Voltage Swing Programming for VccIO 0.85V for HDMI */ 404 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_85V[] = { 405 /* NT mV Trans mV db */ 406 { 0xA, 0x60, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 407 { 0xB, 0x73, 0x36, 0x00, 0x09 }, /* 450 650 3.2 */ 408 { 0x6, 0x7F, 0x31, 0x00, 0x0E }, /* 450 850 5.5 */ 409 { 0xB, 0x73, 0x3F, 0x00, 0x00 }, /* 650 650 0.0 */ 410 { 0x6, 0x7F, 0x37, 0x00, 0x08 }, /* 650 850 2.3 */ 411 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 850 850 0.0 */ 412 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 413 }; 414 415 /* Voltage Swing Programming for VccIO 0.85V for eDP */ 416 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_85V[] = { 417 /* NT mV Trans mV db */ 418 { 0xA, 0x66, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 419 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 420 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 421 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 422 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 423 { 0xA, 0x66, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 424 { 0xB, 0x70, 0x3C, 0x00, 0x03 }, /* 460 600 2.3 */ 425 { 0xC, 0x75, 0x3C, 0x00, 0x03 }, /* 537 700 2.3 */ 426 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 427 }; 428 429 /* Voltage Swing Programming for VccIO 0.95V for DP */ 430 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_95V[] = { 431 /* NT mV Trans mV db */ 432 { 0xA, 0x5D, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 433 { 0xA, 0x6A, 0x38, 0x00, 0x07 }, /* 350 500 3.1 */ 434 { 0xB, 0x7A, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 435 { 0x6, 0x7C, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 436 { 0xA, 0x69, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 437 { 0xB, 0x7A, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 438 { 0x6, 0x7C, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 439 { 0xB, 0x7D, 0x3C, 0x00, 0x03 }, /* 650 725 0.9 */ 440 { 0x6, 0x7C, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 441 { 0x6, 0x7B, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 442 }; 443 444 /* Voltage Swing Programming for VccIO 0.95V for HDMI */ 445 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_95V[] = { 446 /* NT mV Trans mV db */ 447 { 0xA, 0x5C, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 448 { 0xB, 0x69, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 449 { 0x5, 0x76, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 450 { 0xA, 0x5E, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 451 { 0xB, 0x69, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 452 { 0xB, 0x79, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 453 { 0x6, 0x7D, 0x32, 0x00, 0x0D }, /* 600 1000 4.4 */ 454 { 0x5, 0x76, 0x3F, 0x00, 0x00 }, /* 800 800 0.0 */ 455 { 0x6, 0x7D, 0x39, 0x00, 0x06 }, /* 800 1000 1.9 */ 456 { 0x6, 0x7F, 0x39, 0x00, 0x06 }, /* 850 1050 1.8 */ 457 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 458 }; 459 460 /* Voltage Swing Programming for VccIO 0.95V for eDP */ 461 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_95V[] = { 462 /* NT mV Trans mV db */ 463 { 0xA, 0x61, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 464 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 465 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 466 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 467 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 468 { 0xA, 0x61, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 469 { 0xB, 0x68, 0x39, 0x00, 0x06 }, /* 460 600 2.3 */ 470 { 0xC, 0x6E, 0x39, 0x00, 0x06 }, /* 537 700 2.3 */ 471 { 0x4, 0x7F, 0x3A, 0x00, 0x05 }, /* 460 600 2.3 */ 472 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 473 }; 474 475 /* Voltage Swing Programming for VccIO 1.05V for DP */ 476 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_1_05V[] = { 477 /* NT mV Trans mV db */ 478 { 0xA, 0x58, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 479 { 0xB, 0x64, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 480 { 0x5, 0x70, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 481 { 0x6, 0x7F, 0x2C, 0x00, 0x13 }, /* 400 1050 8.4 */ 482 { 0xB, 0x64, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 483 { 0x5, 0x73, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 484 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 550 1050 5.6 */ 485 { 0x5, 0x76, 0x3E, 0x00, 0x01 }, /* 850 900 0.5 */ 486 { 0x6, 0x7F, 0x36, 0x00, 0x09 }, /* 750 1050 2.9 */ 487 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 488 }; 489 490 /* Voltage Swing Programming for VccIO 1.05V for HDMI */ 491 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_1_05V[] = { 492 /* NT mV Trans mV db */ 493 { 0xA, 0x58, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 494 { 0xB, 0x64, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 495 { 0x5, 0x70, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 496 { 0xA, 0x5B, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 497 { 0xB, 0x64, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 498 { 0x5, 0x73, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 499 { 0x6, 0x7C, 0x32, 0x00, 0x0D }, /* 600 1000 4.4 */ 500 { 0x5, 0x70, 0x3F, 0x00, 0x00 }, /* 800 800 0.0 */ 501 { 0x6, 0x7C, 0x39, 0x00, 0x06 }, /* 800 1000 1.9 */ 502 { 0x6, 0x7F, 0x39, 0x00, 0x06 }, /* 850 1050 1.8 */ 503 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 504 }; 505 506 /* Voltage Swing Programming for VccIO 1.05V for eDP */ 507 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_1_05V[] = { 508 /* NT mV Trans mV db */ 509 { 0xA, 0x5E, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 510 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 511 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 512 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 513 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 514 { 0xA, 0x5E, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 515 { 0xB, 0x64, 0x39, 0x00, 0x06 }, /* 460 600 2.3 */ 516 { 0xE, 0x6A, 0x39, 0x00, 0x06 }, /* 537 700 2.3 */ 517 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 518 }; 519 520 /* icl_combo_phy_ddi_translations */ 521 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_dp_hbr2[] = { 522 /* NT mV Trans mV db */ 523 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 524 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 525 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 526 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 527 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 528 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 529 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 530 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 531 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 532 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 533 }; 534 535 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr2[] = { 536 /* NT mV Trans mV db */ 537 { 0x0, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 538 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 200 250 1.9 */ 539 { 0x1, 0x7F, 0x33, 0x00, 0x0C }, /* 200 300 3.5 */ 540 { 0x9, 0x7F, 0x31, 0x00, 0x0E }, /* 200 350 4.9 */ 541 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 542 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 250 300 1.6 */ 543 { 0x9, 0x7F, 0x35, 0x00, 0x0A }, /* 250 350 2.9 */ 544 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 545 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 546 { 0x9, 0x7F, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 547 }; 548 549 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr3[] = { 550 /* NT mV Trans mV db */ 551 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 552 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 553 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 554 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 555 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 556 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 557 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 558 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 559 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 560 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 561 }; 562 563 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_hdmi[] = { 564 /* NT mV Trans mV db */ 565 { 0xA, 0x60, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 566 { 0xB, 0x73, 0x36, 0x00, 0x09 }, /* 450 650 3.2 */ 567 { 0x6, 0x7F, 0x31, 0x00, 0x0E }, /* 450 850 5.5 */ 568 { 0xB, 0x73, 0x3F, 0x00, 0x00 }, /* 650 650 0.0 ALS */ 569 { 0x6, 0x7F, 0x37, 0x00, 0x08 }, /* 650 850 2.3 */ 570 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 850 850 0.0 */ 571 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 572 }; 573 574 static const struct cnl_ddi_buf_trans ehl_combo_phy_ddi_translations_dp[] = { 575 /* NT mV Trans mV db */ 576 { 0xA, 0x33, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 577 { 0xA, 0x47, 0x36, 0x00, 0x09 }, /* 350 500 3.1 */ 578 { 0xC, 0x64, 0x34, 0x00, 0x0B }, /* 350 700 6.0 */ 579 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 350 900 8.2 */ 580 { 0xA, 0x46, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 581 { 0xC, 0x64, 0x38, 0x00, 0x07 }, /* 500 700 2.9 */ 582 { 0x6, 0x7F, 0x32, 0x00, 0x0D }, /* 500 900 5.1 */ 583 { 0xC, 0x61, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 584 { 0x6, 0x7F, 0x38, 0x00, 0x07 }, /* 600 900 3.5 */ 585 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 586 }; 587 588 static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr[] = { 589 /* NT mV Trans mV db */ 590 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 591 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 200 250 1.9 */ 592 { 0x1, 0x7F, 0x33, 0x00, 0x0C }, /* 200 300 3.5 */ 593 { 0xA, 0x35, 0x36, 0x00, 0x09 }, /* 200 350 4.9 */ 594 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 595 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 250 300 1.6 */ 596 { 0xA, 0x35, 0x35, 0x00, 0x0A }, /* 250 350 2.9 */ 597 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 598 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 599 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 600 }; 601 602 static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr2[] = { 603 /* NT mV Trans mV db */ 604 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 605 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 250 1.9 */ 606 { 0x1, 0x7F, 0x3D, 0x00, 0x02 }, /* 200 300 3.5 */ 607 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 200 350 4.9 */ 608 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 609 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 300 1.6 */ 610 { 0xA, 0x35, 0x3A, 0x00, 0x05 }, /* 250 350 2.9 */ 611 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 612 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 613 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 614 }; 615 616 static const struct cnl_ddi_buf_trans dg1_combo_phy_ddi_translations_dp_rbr_hbr[] = { 617 /* NT mV Trans mV db */ 618 { 0xA, 0x32, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 619 { 0xA, 0x48, 0x35, 0x00, 0x0A }, /* 350 500 3.1 */ 620 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 621 { 0x6, 0x7F, 0x2C, 0x00, 0x13 }, /* 350 900 8.2 */ 622 { 0xA, 0x43, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 623 { 0xC, 0x60, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 624 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 625 { 0xC, 0x60, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 626 { 0x6, 0x7F, 0x37, 0x00, 0x08 }, /* 600 900 3.5 */ 627 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 628 }; 629 630 static const struct cnl_ddi_buf_trans dg1_combo_phy_ddi_translations_dp_hbr2_hbr3[] = { 631 /* NT mV Trans mV db */ 632 { 0xA, 0x32, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 633 { 0xA, 0x48, 0x35, 0x00, 0x0A }, /* 350 500 3.1 */ 634 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 635 { 0x6, 0x7F, 0x2C, 0x00, 0x13 }, /* 350 900 8.2 */ 636 { 0xA, 0x43, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 637 { 0xC, 0x60, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 638 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 639 { 0xC, 0x58, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 640 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 641 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 642 }; 643 644 struct icl_mg_phy_ddi_buf_trans { 645 u32 cri_txdeemph_override_11_6; 646 u32 cri_txdeemph_override_5_0; 647 u32 cri_txdeemph_override_17_12; 648 }; 649 650 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_rbr_hbr[] = { 651 /* Voltage swing pre-emphasis */ 652 { 0x18, 0x00, 0x00 }, /* 0 0 */ 653 { 0x1D, 0x00, 0x05 }, /* 0 1 */ 654 { 0x24, 0x00, 0x0C }, /* 0 2 */ 655 { 0x2B, 0x00, 0x14 }, /* 0 3 */ 656 { 0x21, 0x00, 0x00 }, /* 1 0 */ 657 { 0x2B, 0x00, 0x08 }, /* 1 1 */ 658 { 0x30, 0x00, 0x0F }, /* 1 2 */ 659 { 0x31, 0x00, 0x03 }, /* 2 0 */ 660 { 0x34, 0x00, 0x0B }, /* 2 1 */ 661 { 0x3F, 0x00, 0x00 }, /* 3 0 */ 662 }; 663 664 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hbr2_hbr3[] = { 665 /* Voltage swing pre-emphasis */ 666 { 0x18, 0x00, 0x00 }, /* 0 0 */ 667 { 0x1D, 0x00, 0x05 }, /* 0 1 */ 668 { 0x24, 0x00, 0x0C }, /* 0 2 */ 669 { 0x2B, 0x00, 0x14 }, /* 0 3 */ 670 { 0x26, 0x00, 0x00 }, /* 1 0 */ 671 { 0x2C, 0x00, 0x07 }, /* 1 1 */ 672 { 0x33, 0x00, 0x0C }, /* 1 2 */ 673 { 0x2E, 0x00, 0x00 }, /* 2 0 */ 674 { 0x36, 0x00, 0x09 }, /* 2 1 */ 675 { 0x3F, 0x00, 0x00 }, /* 3 0 */ 676 }; 677 678 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hdmi[] = { 679 /* HDMI Preset VS Pre-emph */ 680 { 0x1A, 0x0, 0x0 }, /* 1 400mV 0dB */ 681 { 0x20, 0x0, 0x0 }, /* 2 500mV 0dB */ 682 { 0x29, 0x0, 0x0 }, /* 3 650mV 0dB */ 683 { 0x32, 0x0, 0x0 }, /* 4 800mV 0dB */ 684 { 0x3F, 0x0, 0x0 }, /* 5 1000mV 0dB */ 685 { 0x3A, 0x0, 0x5 }, /* 6 Full -1.5 dB */ 686 { 0x39, 0x0, 0x6 }, /* 7 Full -1.8 dB */ 687 { 0x38, 0x0, 0x7 }, /* 8 Full -2 dB */ 688 { 0x37, 0x0, 0x8 }, /* 9 Full -2.5 dB */ 689 { 0x36, 0x0, 0x9 }, /* 10 Full -3 dB */ 690 }; 691 692 struct tgl_dkl_phy_ddi_buf_trans { 693 u32 dkl_vswing_control; 694 u32 dkl_preshoot_control; 695 u32 dkl_de_emphasis_control; 696 }; 697 698 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans[] = { 699 /* VS pre-emp Non-trans mV Pre-emph dB */ 700 { 0x7, 0x0, 0x00 }, /* 0 0 400mV 0 dB */ 701 { 0x5, 0x0, 0x05 }, /* 0 1 400mV 3.5 dB */ 702 { 0x2, 0x0, 0x0B }, /* 0 2 400mV 6 dB */ 703 { 0x0, 0x0, 0x18 }, /* 0 3 400mV 9.5 dB */ 704 { 0x5, 0x0, 0x00 }, /* 1 0 600mV 0 dB */ 705 { 0x2, 0x0, 0x08 }, /* 1 1 600mV 3.5 dB */ 706 { 0x0, 0x0, 0x14 }, /* 1 2 600mV 6 dB */ 707 { 0x2, 0x0, 0x00 }, /* 2 0 800mV 0 dB */ 708 { 0x0, 0x0, 0x0B }, /* 2 1 800mV 3.5 dB */ 709 { 0x0, 0x0, 0x00 }, /* 3 0 1200mV 0 dB HDMI default */ 710 }; 711 712 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans_hbr2[] = { 713 /* VS pre-emp Non-trans mV Pre-emph dB */ 714 { 0x7, 0x0, 0x00 }, /* 0 0 400mV 0 dB */ 715 { 0x5, 0x0, 0x05 }, /* 0 1 400mV 3.5 dB */ 716 { 0x2, 0x0, 0x0B }, /* 0 2 400mV 6 dB */ 717 { 0x0, 0x0, 0x19 }, /* 0 3 400mV 9.5 dB */ 718 { 0x5, 0x0, 0x00 }, /* 1 0 600mV 0 dB */ 719 { 0x2, 0x0, 0x08 }, /* 1 1 600mV 3.5 dB */ 720 { 0x0, 0x0, 0x14 }, /* 1 2 600mV 6 dB */ 721 { 0x2, 0x0, 0x00 }, /* 2 0 800mV 0 dB */ 722 { 0x0, 0x0, 0x0B }, /* 2 1 800mV 3.5 dB */ 723 { 0x0, 0x0, 0x00 }, /* 3 0 1200mV 0 dB HDMI default */ 724 }; 725 726 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_hdmi_ddi_trans[] = { 727 /* HDMI Preset VS Pre-emph */ 728 { 0x7, 0x0, 0x0 }, /* 1 400mV 0dB */ 729 { 0x6, 0x0, 0x0 }, /* 2 500mV 0dB */ 730 { 0x4, 0x0, 0x0 }, /* 3 650mV 0dB */ 731 { 0x2, 0x0, 0x0 }, /* 4 800mV 0dB */ 732 { 0x0, 0x0, 0x0 }, /* 5 1000mV 0dB */ 733 { 0x0, 0x0, 0x5 }, /* 6 Full -1.5 dB */ 734 { 0x0, 0x0, 0x6 }, /* 7 Full -1.8 dB */ 735 { 0x0, 0x0, 0x7 }, /* 8 Full -2 dB */ 736 { 0x0, 0x0, 0x8 }, /* 9 Full -2.5 dB */ 737 { 0x0, 0x0, 0xA }, /* 10 Full -3 dB */ 738 }; 739 740 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr[] = { 741 /* NT mV Trans mV db */ 742 { 0xA, 0x32, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 743 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 744 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 745 { 0x6, 0x7D, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 746 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 747 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 748 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 749 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 750 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 751 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 752 }; 753 754 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr2[] = { 755 /* NT mV Trans mV db */ 756 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 757 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 758 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 759 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 760 { 0xA, 0x47, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 761 { 0xC, 0x63, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 762 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 763 { 0xC, 0x61, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 764 { 0x6, 0x7B, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 765 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 766 }; 767 768 static const struct cnl_ddi_buf_trans tgl_uy_combo_phy_ddi_translations_dp_hbr2[] = { 769 /* NT mV Trans mV db */ 770 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 771 { 0xA, 0x4F, 0x36, 0x00, 0x09 }, /* 350 500 3.1 */ 772 { 0xC, 0x60, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 773 { 0xC, 0x7F, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 774 { 0xC, 0x47, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 775 { 0xC, 0x6F, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 776 { 0x6, 0x7D, 0x32, 0x00, 0x0D }, /* 500 900 5.1 */ 777 { 0x6, 0x60, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 778 { 0x6, 0x7F, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 779 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 780 }; 781 782 /* 783 * Cloned the HOBL entry to comply with the voltage and pre-emphasis entries 784 * that DisplayPort specification requires 785 */ 786 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_edp_hbr2_hobl[] = { 787 /* VS pre-emp */ 788 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 0 */ 789 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 1 */ 790 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 2 */ 791 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 3 */ 792 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 0 */ 793 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 1 */ 794 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 2 */ 795 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 2 0 */ 796 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 2 1 */ 797 }; 798 799 static const struct cnl_ddi_buf_trans rkl_combo_phy_ddi_translations_dp_hbr[] = { 800 /* NT mV Trans mV db */ 801 { 0xA, 0x2F, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 802 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 803 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 804 { 0x6, 0x7D, 0x2A, 0x00, 0x15 }, /* 350 900 8.2 */ 805 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 806 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 807 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 808 { 0xC, 0x6E, 0x3E, 0x00, 0x01 }, /* 650 700 0.6 */ 809 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 810 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 811 }; 812 813 static const struct cnl_ddi_buf_trans rkl_combo_phy_ddi_translations_dp_hbr2_hbr3[] = { 814 /* NT mV Trans mV db */ 815 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 816 { 0xA, 0x50, 0x38, 0x00, 0x07 }, /* 350 500 3.1 */ 817 { 0xC, 0x61, 0x33, 0x00, 0x0C }, /* 350 700 6.0 */ 818 { 0x6, 0x7F, 0x2E, 0x00, 0x11 }, /* 350 900 8.2 */ 819 { 0xA, 0x47, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 820 { 0xC, 0x5F, 0x38, 0x00, 0x07 }, /* 500 700 2.9 */ 821 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 822 { 0xC, 0x5F, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 823 { 0x6, 0x7E, 0x36, 0x00, 0x09 }, /* 600 900 3.5 */ 824 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 825 }; 826 827 static bool is_hobl_buf_trans(const struct cnl_ddi_buf_trans *table) 828 { 829 return table == tgl_combo_phy_ddi_translations_edp_hbr2_hobl; 830 } 831 832 static const struct ddi_buf_trans * 833 bdw_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 834 { 835 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 836 837 if (dev_priv->vbt.edp.low_vswing) { 838 *n_entries = ARRAY_SIZE(bdw_ddi_translations_edp); 839 return bdw_ddi_translations_edp; 840 } else { 841 *n_entries = ARRAY_SIZE(bdw_ddi_translations_dp); 842 return bdw_ddi_translations_dp; 843 } 844 } 845 846 static const struct ddi_buf_trans * 847 skl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 848 { 849 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 850 851 if (IS_SKL_ULX(dev_priv)) { 852 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_dp); 853 return skl_y_ddi_translations_dp; 854 } else if (IS_SKL_ULT(dev_priv)) { 855 *n_entries = ARRAY_SIZE(skl_u_ddi_translations_dp); 856 return skl_u_ddi_translations_dp; 857 } else { 858 *n_entries = ARRAY_SIZE(skl_ddi_translations_dp); 859 return skl_ddi_translations_dp; 860 } 861 } 862 863 static const struct ddi_buf_trans * 864 kbl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 865 { 866 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 867 868 if (IS_KBL_ULX(dev_priv) || 869 IS_CFL_ULX(dev_priv) || 870 IS_CML_ULX(dev_priv)) { 871 *n_entries = ARRAY_SIZE(kbl_y_ddi_translations_dp); 872 return kbl_y_ddi_translations_dp; 873 } else if (IS_KBL_ULT(dev_priv) || 874 IS_CFL_ULT(dev_priv) || 875 IS_CML_ULT(dev_priv)) { 876 *n_entries = ARRAY_SIZE(kbl_u_ddi_translations_dp); 877 return kbl_u_ddi_translations_dp; 878 } else { 879 *n_entries = ARRAY_SIZE(kbl_ddi_translations_dp); 880 return kbl_ddi_translations_dp; 881 } 882 } 883 884 static const struct ddi_buf_trans * 885 skl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 886 { 887 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 888 889 if (dev_priv->vbt.edp.low_vswing) { 890 if (IS_SKL_ULX(dev_priv) || 891 IS_KBL_ULX(dev_priv) || 892 IS_CFL_ULX(dev_priv) || 893 IS_CML_ULX(dev_priv)) { 894 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_edp); 895 return skl_y_ddi_translations_edp; 896 } else if (IS_SKL_ULT(dev_priv) || 897 IS_KBL_ULT(dev_priv) || 898 IS_CFL_ULT(dev_priv) || 899 IS_CML_ULT(dev_priv)) { 900 *n_entries = ARRAY_SIZE(skl_u_ddi_translations_edp); 901 return skl_u_ddi_translations_edp; 902 } else { 903 *n_entries = ARRAY_SIZE(skl_ddi_translations_edp); 904 return skl_ddi_translations_edp; 905 } 906 } 907 908 if (IS_KABYLAKE(dev_priv) || 909 IS_COFFEELAKE(dev_priv) || 910 IS_COMETLAKE(dev_priv)) 911 return kbl_get_buf_trans_dp(encoder, n_entries); 912 else 913 return skl_get_buf_trans_dp(encoder, n_entries); 914 } 915 916 static const struct ddi_buf_trans * 917 skl_get_buf_trans_hdmi(struct drm_i915_private *dev_priv, int *n_entries) 918 { 919 if (IS_SKL_ULX(dev_priv) || 920 IS_KBL_ULX(dev_priv) || 921 IS_CFL_ULX(dev_priv) || 922 IS_CML_ULX(dev_priv)) { 923 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_hdmi); 924 return skl_y_ddi_translations_hdmi; 925 } else { 926 *n_entries = ARRAY_SIZE(skl_ddi_translations_hdmi); 927 return skl_ddi_translations_hdmi; 928 } 929 } 930 931 static int skl_buf_trans_num_entries(enum port port, int n_entries) 932 { 933 /* Only DDIA and DDIE can select the 10th register with DP */ 934 if (port == PORT_A || port == PORT_E) 935 return min(n_entries, 10); 936 else 937 return min(n_entries, 9); 938 } 939 940 static const struct ddi_buf_trans * 941 intel_ddi_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 942 { 943 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 944 945 if (IS_KABYLAKE(dev_priv) || 946 IS_COFFEELAKE(dev_priv) || 947 IS_COMETLAKE(dev_priv)) { 948 const struct ddi_buf_trans *ddi_translations = 949 kbl_get_buf_trans_dp(encoder, n_entries); 950 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 951 return ddi_translations; 952 } else if (IS_SKYLAKE(dev_priv)) { 953 const struct ddi_buf_trans *ddi_translations = 954 skl_get_buf_trans_dp(encoder, n_entries); 955 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 956 return ddi_translations; 957 } else if (IS_BROADWELL(dev_priv)) { 958 *n_entries = ARRAY_SIZE(bdw_ddi_translations_dp); 959 return bdw_ddi_translations_dp; 960 } else if (IS_HASWELL(dev_priv)) { 961 *n_entries = ARRAY_SIZE(hsw_ddi_translations_dp); 962 return hsw_ddi_translations_dp; 963 } 964 965 *n_entries = 0; 966 return NULL; 967 } 968 969 static const struct ddi_buf_trans * 970 intel_ddi_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 971 { 972 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 973 974 if (IS_GEN9_BC(dev_priv)) { 975 const struct ddi_buf_trans *ddi_translations = 976 skl_get_buf_trans_edp(encoder, n_entries); 977 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 978 return ddi_translations; 979 } else if (IS_BROADWELL(dev_priv)) { 980 return bdw_get_buf_trans_edp(encoder, n_entries); 981 } else if (IS_HASWELL(dev_priv)) { 982 *n_entries = ARRAY_SIZE(hsw_ddi_translations_dp); 983 return hsw_ddi_translations_dp; 984 } 985 986 *n_entries = 0; 987 return NULL; 988 } 989 990 static const struct ddi_buf_trans * 991 intel_ddi_get_buf_trans_fdi(struct drm_i915_private *dev_priv, 992 int *n_entries) 993 { 994 if (IS_BROADWELL(dev_priv)) { 995 *n_entries = ARRAY_SIZE(bdw_ddi_translations_fdi); 996 return bdw_ddi_translations_fdi; 997 } else if (IS_HASWELL(dev_priv)) { 998 *n_entries = ARRAY_SIZE(hsw_ddi_translations_fdi); 999 return hsw_ddi_translations_fdi; 1000 } 1001 1002 *n_entries = 0; 1003 return NULL; 1004 } 1005 1006 static const struct ddi_buf_trans * 1007 intel_ddi_get_buf_trans_hdmi(struct intel_encoder *encoder, 1008 int *n_entries) 1009 { 1010 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1011 1012 if (IS_GEN9_BC(dev_priv)) { 1013 return skl_get_buf_trans_hdmi(dev_priv, n_entries); 1014 } else if (IS_BROADWELL(dev_priv)) { 1015 *n_entries = ARRAY_SIZE(bdw_ddi_translations_hdmi); 1016 return bdw_ddi_translations_hdmi; 1017 } else if (IS_HASWELL(dev_priv)) { 1018 *n_entries = ARRAY_SIZE(hsw_ddi_translations_hdmi); 1019 return hsw_ddi_translations_hdmi; 1020 } 1021 1022 *n_entries = 0; 1023 return NULL; 1024 } 1025 1026 static const struct bxt_ddi_buf_trans * 1027 bxt_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 1028 { 1029 *n_entries = ARRAY_SIZE(bxt_ddi_translations_dp); 1030 return bxt_ddi_translations_dp; 1031 } 1032 1033 static const struct bxt_ddi_buf_trans * 1034 bxt_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 1035 { 1036 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1037 1038 if (dev_priv->vbt.edp.low_vswing) { 1039 *n_entries = ARRAY_SIZE(bxt_ddi_translations_edp); 1040 return bxt_ddi_translations_edp; 1041 } 1042 1043 return bxt_get_buf_trans_dp(encoder, n_entries); 1044 } 1045 1046 static const struct bxt_ddi_buf_trans * 1047 bxt_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries) 1048 { 1049 *n_entries = ARRAY_SIZE(bxt_ddi_translations_hdmi); 1050 return bxt_ddi_translations_hdmi; 1051 } 1052 1053 static const struct cnl_ddi_buf_trans * 1054 cnl_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries) 1055 { 1056 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1057 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1058 1059 if (voltage == VOLTAGE_INFO_0_85V) { 1060 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_85V); 1061 return cnl_ddi_translations_hdmi_0_85V; 1062 } else if (voltage == VOLTAGE_INFO_0_95V) { 1063 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_95V); 1064 return cnl_ddi_translations_hdmi_0_95V; 1065 } else if (voltage == VOLTAGE_INFO_1_05V) { 1066 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_1_05V); 1067 return cnl_ddi_translations_hdmi_1_05V; 1068 } else { 1069 *n_entries = 1; /* shut up gcc */ 1070 MISSING_CASE(voltage); 1071 } 1072 return NULL; 1073 } 1074 1075 static const struct cnl_ddi_buf_trans * 1076 cnl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 1077 { 1078 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1079 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1080 1081 if (voltage == VOLTAGE_INFO_0_85V) { 1082 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_85V); 1083 return cnl_ddi_translations_dp_0_85V; 1084 } else if (voltage == VOLTAGE_INFO_0_95V) { 1085 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_95V); 1086 return cnl_ddi_translations_dp_0_95V; 1087 } else if (voltage == VOLTAGE_INFO_1_05V) { 1088 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_1_05V); 1089 return cnl_ddi_translations_dp_1_05V; 1090 } else { 1091 *n_entries = 1; /* shut up gcc */ 1092 MISSING_CASE(voltage); 1093 } 1094 return NULL; 1095 } 1096 1097 static const struct cnl_ddi_buf_trans * 1098 cnl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 1099 { 1100 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1101 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1102 1103 if (dev_priv->vbt.edp.low_vswing) { 1104 if (voltage == VOLTAGE_INFO_0_85V) { 1105 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_85V); 1106 return cnl_ddi_translations_edp_0_85V; 1107 } else if (voltage == VOLTAGE_INFO_0_95V) { 1108 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_95V); 1109 return cnl_ddi_translations_edp_0_95V; 1110 } else if (voltage == VOLTAGE_INFO_1_05V) { 1111 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_1_05V); 1112 return cnl_ddi_translations_edp_1_05V; 1113 } else { 1114 *n_entries = 1; /* shut up gcc */ 1115 MISSING_CASE(voltage); 1116 } 1117 return NULL; 1118 } else { 1119 return cnl_get_buf_trans_dp(encoder, n_entries); 1120 } 1121 } 1122 1123 static const struct cnl_ddi_buf_trans * 1124 icl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1125 const struct intel_crtc_state *crtc_state, 1126 int *n_entries) 1127 { 1128 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1129 return icl_combo_phy_ddi_translations_hdmi; 1130 } 1131 1132 static const struct cnl_ddi_buf_trans * 1133 icl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1134 const struct intel_crtc_state *crtc_state, 1135 int *n_entries) 1136 { 1137 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2); 1138 return icl_combo_phy_ddi_translations_dp_hbr2; 1139 } 1140 1141 static const struct cnl_ddi_buf_trans * 1142 icl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1143 const struct intel_crtc_state *crtc_state, 1144 int *n_entries) 1145 { 1146 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1147 1148 if (crtc_state->port_clock > 540000) { 1149 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3); 1150 return icl_combo_phy_ddi_translations_edp_hbr3; 1151 } else if (dev_priv->vbt.edp.low_vswing) { 1152 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1153 return icl_combo_phy_ddi_translations_edp_hbr2; 1154 } else if (IS_DG1(dev_priv) && crtc_state->port_clock > 270000) { 1155 *n_entries = ARRAY_SIZE(dg1_combo_phy_ddi_translations_dp_hbr2_hbr3); 1156 return dg1_combo_phy_ddi_translations_dp_hbr2_hbr3; 1157 } else if (IS_DG1(dev_priv)) { 1158 *n_entries = ARRAY_SIZE(dg1_combo_phy_ddi_translations_dp_rbr_hbr); 1159 return dg1_combo_phy_ddi_translations_dp_rbr_hbr; 1160 } 1161 1162 return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1163 } 1164 1165 static const struct cnl_ddi_buf_trans * 1166 icl_get_combo_buf_trans(struct intel_encoder *encoder, 1167 const struct intel_crtc_state *crtc_state, 1168 int *n_entries) 1169 { 1170 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1171 return icl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1172 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1173 return icl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1174 else 1175 return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1176 } 1177 1178 static const struct icl_mg_phy_ddi_buf_trans * 1179 icl_get_mg_buf_trans_hdmi(struct intel_encoder *encoder, 1180 const struct intel_crtc_state *crtc_state, 1181 int *n_entries) 1182 { 1183 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hdmi); 1184 return icl_mg_phy_ddi_translations_hdmi; 1185 } 1186 1187 static const struct icl_mg_phy_ddi_buf_trans * 1188 icl_get_mg_buf_trans_dp(struct intel_encoder *encoder, 1189 const struct intel_crtc_state *crtc_state, 1190 int *n_entries) 1191 { 1192 if (crtc_state->port_clock > 270000) { 1193 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hbr2_hbr3); 1194 return icl_mg_phy_ddi_translations_hbr2_hbr3; 1195 } else { 1196 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_rbr_hbr); 1197 return icl_mg_phy_ddi_translations_rbr_hbr; 1198 } 1199 } 1200 1201 static const struct icl_mg_phy_ddi_buf_trans * 1202 icl_get_mg_buf_trans(struct intel_encoder *encoder, 1203 const struct intel_crtc_state *crtc_state, 1204 int *n_entries) 1205 { 1206 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1207 return icl_get_mg_buf_trans_hdmi(encoder, crtc_state, n_entries); 1208 else 1209 return icl_get_mg_buf_trans_dp(encoder, crtc_state, n_entries); 1210 } 1211 1212 static const struct cnl_ddi_buf_trans * 1213 ehl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1214 const struct intel_crtc_state *crtc_state, 1215 int *n_entries) 1216 { 1217 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1218 return icl_combo_phy_ddi_translations_hdmi; 1219 } 1220 1221 static const struct cnl_ddi_buf_trans * 1222 ehl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1223 const struct intel_crtc_state *crtc_state, 1224 int *n_entries) 1225 { 1226 *n_entries = ARRAY_SIZE(ehl_combo_phy_ddi_translations_dp); 1227 return ehl_combo_phy_ddi_translations_dp; 1228 } 1229 1230 static const struct cnl_ddi_buf_trans * 1231 ehl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1232 const struct intel_crtc_state *crtc_state, 1233 int *n_entries) 1234 { 1235 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1236 1237 if (dev_priv->vbt.edp.low_vswing) { 1238 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1239 return icl_combo_phy_ddi_translations_edp_hbr2; 1240 } 1241 1242 return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1243 } 1244 1245 static const struct cnl_ddi_buf_trans * 1246 ehl_get_combo_buf_trans(struct intel_encoder *encoder, 1247 const struct intel_crtc_state *crtc_state, 1248 int *n_entries) 1249 { 1250 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1251 return ehl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1252 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1253 return ehl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1254 else 1255 return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1256 } 1257 1258 static const struct cnl_ddi_buf_trans * 1259 jsl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1260 const struct intel_crtc_state *crtc_state, 1261 int *n_entries) 1262 { 1263 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1264 return icl_combo_phy_ddi_translations_hdmi; 1265 } 1266 1267 static const struct cnl_ddi_buf_trans * 1268 jsl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1269 const struct intel_crtc_state *crtc_state, 1270 int *n_entries) 1271 { 1272 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2); 1273 return icl_combo_phy_ddi_translations_dp_hbr2; 1274 } 1275 1276 static const struct cnl_ddi_buf_trans * 1277 jsl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1278 const struct intel_crtc_state *crtc_state, 1279 int *n_entries) 1280 { 1281 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1282 1283 if (dev_priv->vbt.edp.low_vswing) { 1284 if (crtc_state->port_clock > 270000) { 1285 *n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr2); 1286 return jsl_combo_phy_ddi_translations_edp_hbr2; 1287 } else { 1288 *n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr); 1289 return jsl_combo_phy_ddi_translations_edp_hbr; 1290 } 1291 } 1292 1293 return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1294 } 1295 1296 static const struct cnl_ddi_buf_trans * 1297 jsl_get_combo_buf_trans(struct intel_encoder *encoder, 1298 const struct intel_crtc_state *crtc_state, 1299 int *n_entries) 1300 { 1301 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1302 return jsl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1303 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1304 return jsl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1305 else 1306 return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1307 } 1308 1309 static const struct cnl_ddi_buf_trans * 1310 tgl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1311 const struct intel_crtc_state *crtc_state, 1312 int *n_entries) 1313 { 1314 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1315 return icl_combo_phy_ddi_translations_hdmi; 1316 } 1317 1318 static const struct cnl_ddi_buf_trans * 1319 tgl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1320 const struct intel_crtc_state *crtc_state, 1321 int *n_entries) 1322 { 1323 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1324 1325 if (crtc_state->port_clock > 270000) { 1326 if (IS_ROCKETLAKE(dev_priv)) { 1327 *n_entries = ARRAY_SIZE(rkl_combo_phy_ddi_translations_dp_hbr2_hbr3); 1328 return rkl_combo_phy_ddi_translations_dp_hbr2_hbr3; 1329 } else if (IS_TGL_U(dev_priv) || IS_TGL_Y(dev_priv)) { 1330 *n_entries = ARRAY_SIZE(tgl_uy_combo_phy_ddi_translations_dp_hbr2); 1331 return tgl_uy_combo_phy_ddi_translations_dp_hbr2; 1332 } else { 1333 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr2); 1334 return tgl_combo_phy_ddi_translations_dp_hbr2; 1335 } 1336 } else { 1337 if (IS_ROCKETLAKE(dev_priv)) { 1338 *n_entries = ARRAY_SIZE(rkl_combo_phy_ddi_translations_dp_hbr); 1339 return rkl_combo_phy_ddi_translations_dp_hbr; 1340 } else { 1341 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr); 1342 return tgl_combo_phy_ddi_translations_dp_hbr; 1343 } 1344 } 1345 } 1346 1347 static const struct cnl_ddi_buf_trans * 1348 tgl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1349 const struct intel_crtc_state *crtc_state, 1350 int *n_entries) 1351 { 1352 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1353 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1354 1355 if (crtc_state->port_clock > 540000) { 1356 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3); 1357 return icl_combo_phy_ddi_translations_edp_hbr3; 1358 } else if (dev_priv->vbt.edp.hobl && !intel_dp->hobl_failed) { 1359 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_edp_hbr2_hobl); 1360 return tgl_combo_phy_ddi_translations_edp_hbr2_hobl; 1361 } else if (dev_priv->vbt.edp.low_vswing) { 1362 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1363 return icl_combo_phy_ddi_translations_edp_hbr2; 1364 } 1365 1366 return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1367 } 1368 1369 static const struct cnl_ddi_buf_trans * 1370 tgl_get_combo_buf_trans(struct intel_encoder *encoder, 1371 const struct intel_crtc_state *crtc_state, 1372 int *n_entries) 1373 { 1374 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1375 return tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1376 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1377 return tgl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1378 else 1379 return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1380 } 1381 1382 static const struct tgl_dkl_phy_ddi_buf_trans * 1383 tgl_get_dkl_buf_trans_hdmi(struct intel_encoder *encoder, 1384 const struct intel_crtc_state *crtc_state, 1385 int *n_entries) 1386 { 1387 *n_entries = ARRAY_SIZE(tgl_dkl_phy_hdmi_ddi_trans); 1388 return tgl_dkl_phy_hdmi_ddi_trans; 1389 } 1390 1391 static const struct tgl_dkl_phy_ddi_buf_trans * 1392 tgl_get_dkl_buf_trans_dp(struct intel_encoder *encoder, 1393 const struct intel_crtc_state *crtc_state, 1394 int *n_entries) 1395 { 1396 if (crtc_state->port_clock > 270000) { 1397 *n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans_hbr2); 1398 return tgl_dkl_phy_dp_ddi_trans_hbr2; 1399 } else { 1400 *n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans); 1401 return tgl_dkl_phy_dp_ddi_trans; 1402 } 1403 } 1404 1405 static const struct tgl_dkl_phy_ddi_buf_trans * 1406 tgl_get_dkl_buf_trans(struct intel_encoder *encoder, 1407 const struct intel_crtc_state *crtc_state, 1408 int *n_entries) 1409 { 1410 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1411 return tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, n_entries); 1412 else 1413 return tgl_get_dkl_buf_trans_dp(encoder, crtc_state, n_entries); 1414 } 1415 1416 static int intel_ddi_hdmi_level(struct intel_encoder *encoder, 1417 const struct intel_crtc_state *crtc_state) 1418 { 1419 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1420 int n_entries, level, default_entry; 1421 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1422 1423 if (INTEL_GEN(dev_priv) >= 12) { 1424 if (intel_phy_is_combo(dev_priv, phy)) 1425 tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1426 else 1427 tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1428 default_entry = n_entries - 1; 1429 } else if (INTEL_GEN(dev_priv) == 11) { 1430 if (intel_phy_is_combo(dev_priv, phy)) 1431 icl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1432 else 1433 icl_get_mg_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1434 default_entry = n_entries - 1; 1435 } else if (IS_CANNONLAKE(dev_priv)) { 1436 cnl_get_buf_trans_hdmi(encoder, &n_entries); 1437 default_entry = n_entries - 1; 1438 } else if (IS_GEN9_LP(dev_priv)) { 1439 bxt_get_buf_trans_hdmi(encoder, &n_entries); 1440 default_entry = n_entries - 1; 1441 } else if (IS_GEN9_BC(dev_priv)) { 1442 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1443 default_entry = 8; 1444 } else if (IS_BROADWELL(dev_priv)) { 1445 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1446 default_entry = 7; 1447 } else if (IS_HASWELL(dev_priv)) { 1448 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1449 default_entry = 6; 1450 } else { 1451 drm_WARN(&dev_priv->drm, 1, "ddi translation table missing\n"); 1452 return 0; 1453 } 1454 1455 if (drm_WARN_ON_ONCE(&dev_priv->drm, n_entries == 0)) 1456 return 0; 1457 1458 level = intel_bios_hdmi_level_shift(encoder); 1459 if (level < 0) 1460 level = default_entry; 1461 1462 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1463 level = n_entries - 1; 1464 1465 return level; 1466 } 1467 1468 /* 1469 * Starting with Haswell, DDI port buffers must be programmed with correct 1470 * values in advance. This function programs the correct values for 1471 * DP/eDP/FDI use cases. 1472 */ 1473 static void intel_prepare_dp_ddi_buffers(struct intel_encoder *encoder, 1474 const struct intel_crtc_state *crtc_state) 1475 { 1476 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1477 u32 iboost_bit = 0; 1478 int i, n_entries; 1479 enum port port = encoder->port; 1480 const struct ddi_buf_trans *ddi_translations; 1481 1482 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) 1483 ddi_translations = intel_ddi_get_buf_trans_fdi(dev_priv, 1484 &n_entries); 1485 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1486 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, 1487 &n_entries); 1488 else 1489 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, 1490 &n_entries); 1491 1492 /* If we're boosting the current, set bit 31 of trans1 */ 1493 if (IS_GEN9_BC(dev_priv) && intel_bios_dp_boost_level(encoder)) 1494 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 1495 1496 for (i = 0; i < n_entries; i++) { 1497 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i), 1498 ddi_translations[i].trans1 | iboost_bit); 1499 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i), 1500 ddi_translations[i].trans2); 1501 } 1502 } 1503 1504 /* 1505 * Starting with Haswell, DDI port buffers must be programmed with correct 1506 * values in advance. This function programs the correct values for 1507 * HDMI/DVI use cases. 1508 */ 1509 static void intel_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder, 1510 int level) 1511 { 1512 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1513 u32 iboost_bit = 0; 1514 int n_entries; 1515 enum port port = encoder->port; 1516 const struct ddi_buf_trans *ddi_translations; 1517 1518 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1519 1520 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1521 return; 1522 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1523 level = n_entries - 1; 1524 1525 /* If we're boosting the current, set bit 31 of trans1 */ 1526 if (IS_GEN9_BC(dev_priv) && intel_bios_hdmi_boost_level(encoder)) 1527 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 1528 1529 /* Entry 9 is for HDMI: */ 1530 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9), 1531 ddi_translations[level].trans1 | iboost_bit); 1532 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9), 1533 ddi_translations[level].trans2); 1534 } 1535 1536 static void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv, 1537 enum port port) 1538 { 1539 if (IS_BROXTON(dev_priv)) { 1540 udelay(16); 1541 return; 1542 } 1543 1544 if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 1545 DDI_BUF_IS_IDLE), 8)) 1546 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n", 1547 port_name(port)); 1548 } 1549 1550 static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv, 1551 enum port port) 1552 { 1553 /* Wait > 518 usecs for DDI_BUF_CTL to be non idle */ 1554 if (INTEL_GEN(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) { 1555 usleep_range(518, 1000); 1556 return; 1557 } 1558 1559 if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 1560 DDI_BUF_IS_IDLE), 500)) 1561 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n", 1562 port_name(port)); 1563 } 1564 1565 static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll) 1566 { 1567 switch (pll->info->id) { 1568 case DPLL_ID_WRPLL1: 1569 return PORT_CLK_SEL_WRPLL1; 1570 case DPLL_ID_WRPLL2: 1571 return PORT_CLK_SEL_WRPLL2; 1572 case DPLL_ID_SPLL: 1573 return PORT_CLK_SEL_SPLL; 1574 case DPLL_ID_LCPLL_810: 1575 return PORT_CLK_SEL_LCPLL_810; 1576 case DPLL_ID_LCPLL_1350: 1577 return PORT_CLK_SEL_LCPLL_1350; 1578 case DPLL_ID_LCPLL_2700: 1579 return PORT_CLK_SEL_LCPLL_2700; 1580 default: 1581 MISSING_CASE(pll->info->id); 1582 return PORT_CLK_SEL_NONE; 1583 } 1584 } 1585 1586 static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder, 1587 const struct intel_crtc_state *crtc_state) 1588 { 1589 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1590 int clock = crtc_state->port_clock; 1591 const enum intel_dpll_id id = pll->info->id; 1592 1593 switch (id) { 1594 default: 1595 /* 1596 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used 1597 * here, so do warn if this get passed in 1598 */ 1599 MISSING_CASE(id); 1600 return DDI_CLK_SEL_NONE; 1601 case DPLL_ID_ICL_TBTPLL: 1602 switch (clock) { 1603 case 162000: 1604 return DDI_CLK_SEL_TBT_162; 1605 case 270000: 1606 return DDI_CLK_SEL_TBT_270; 1607 case 540000: 1608 return DDI_CLK_SEL_TBT_540; 1609 case 810000: 1610 return DDI_CLK_SEL_TBT_810; 1611 default: 1612 MISSING_CASE(clock); 1613 return DDI_CLK_SEL_NONE; 1614 } 1615 case DPLL_ID_ICL_MGPLL1: 1616 case DPLL_ID_ICL_MGPLL2: 1617 case DPLL_ID_ICL_MGPLL3: 1618 case DPLL_ID_ICL_MGPLL4: 1619 case DPLL_ID_TGL_MGPLL5: 1620 case DPLL_ID_TGL_MGPLL6: 1621 return DDI_CLK_SEL_MG; 1622 } 1623 } 1624 1625 /* Starting with Haswell, different DDI ports can work in FDI mode for 1626 * connection to the PCH-located connectors. For this, it is necessary to train 1627 * both the DDI port and PCH receiver for the desired DDI buffer settings. 1628 * 1629 * The recommended port to work in FDI mode is DDI E, which we use here. Also, 1630 * please note that when FDI mode is active on DDI E, it shares 2 lines with 1631 * DDI A (which is used for eDP) 1632 */ 1633 1634 void hsw_fdi_link_train(struct intel_encoder *encoder, 1635 const struct intel_crtc_state *crtc_state) 1636 { 1637 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1638 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1639 u32 temp, i, rx_ctl_val, ddi_pll_sel; 1640 1641 intel_prepare_dp_ddi_buffers(encoder, crtc_state); 1642 1643 /* Set the FDI_RX_MISC pwrdn lanes and the 2 workarounds listed at the 1644 * mode set "sequence for CRT port" document: 1645 * - TP1 to TP2 time with the default value 1646 * - FDI delay to 90h 1647 * 1648 * WaFDIAutoLinkSetTimingOverrride:hsw 1649 */ 1650 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), 1651 FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2) | FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90); 1652 1653 /* Enable the PCH Receiver FDI PLL */ 1654 rx_ctl_val = dev_priv->fdi_rx_config | FDI_RX_ENHANCE_FRAME_ENABLE | 1655 FDI_RX_PLL_ENABLE | 1656 FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes); 1657 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1658 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1659 udelay(220); 1660 1661 /* Switch from Rawclk to PCDclk */ 1662 rx_ctl_val |= FDI_PCDCLK; 1663 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1664 1665 /* Configure Port Clock Select */ 1666 ddi_pll_sel = hsw_pll_to_ddi_pll_sel(crtc_state->shared_dpll); 1667 intel_de_write(dev_priv, PORT_CLK_SEL(PORT_E), ddi_pll_sel); 1668 drm_WARN_ON(&dev_priv->drm, ddi_pll_sel != PORT_CLK_SEL_SPLL); 1669 1670 /* Start the training iterating through available voltages and emphasis, 1671 * testing each value twice. */ 1672 for (i = 0; i < ARRAY_SIZE(hsw_ddi_translations_fdi) * 2; i++) { 1673 /* Configure DP_TP_CTL with auto-training */ 1674 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), 1675 DP_TP_CTL_FDI_AUTOTRAIN | 1676 DP_TP_CTL_ENHANCED_FRAME_ENABLE | 1677 DP_TP_CTL_LINK_TRAIN_PAT1 | 1678 DP_TP_CTL_ENABLE); 1679 1680 /* Configure and enable DDI_BUF_CTL for DDI E with next voltage. 1681 * DDI E does not support port reversal, the functionality is 1682 * achieved on the PCH side in FDI_RX_CTL, so no need to set the 1683 * port reversal bit */ 1684 intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), 1685 DDI_BUF_CTL_ENABLE | ((crtc_state->fdi_lanes - 1) << 1) | DDI_BUF_TRANS_SELECT(i / 2)); 1686 intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1687 1688 udelay(600); 1689 1690 /* Program PCH FDI Receiver TU */ 1691 intel_de_write(dev_priv, FDI_RX_TUSIZE1(PIPE_A), TU_SIZE(64)); 1692 1693 /* Enable PCH FDI Receiver with auto-training */ 1694 rx_ctl_val |= FDI_RX_ENABLE | FDI_LINK_TRAIN_AUTO; 1695 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1696 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1697 1698 /* Wait for FDI receiver lane calibration */ 1699 udelay(30); 1700 1701 /* Unset FDI_RX_MISC pwrdn lanes */ 1702 temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1703 temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 1704 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp); 1705 intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1706 1707 /* Wait for FDI auto training time */ 1708 udelay(5); 1709 1710 temp = intel_de_read(dev_priv, DP_TP_STATUS(PORT_E)); 1711 if (temp & DP_TP_STATUS_AUTOTRAIN_DONE) { 1712 drm_dbg_kms(&dev_priv->drm, 1713 "FDI link training done on step %d\n", i); 1714 break; 1715 } 1716 1717 /* 1718 * Leave things enabled even if we failed to train FDI. 1719 * Results in less fireworks from the state checker. 1720 */ 1721 if (i == ARRAY_SIZE(hsw_ddi_translations_fdi) * 2 - 1) { 1722 drm_err(&dev_priv->drm, "FDI link training failed!\n"); 1723 break; 1724 } 1725 1726 rx_ctl_val &= ~FDI_RX_ENABLE; 1727 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1728 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1729 1730 temp = intel_de_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1731 temp &= ~DDI_BUF_CTL_ENABLE; 1732 intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), temp); 1733 intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1734 1735 /* Disable DP_TP_CTL and FDI_RX_CTL and retry */ 1736 temp = intel_de_read(dev_priv, DP_TP_CTL(PORT_E)); 1737 temp &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 1738 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 1739 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), temp); 1740 intel_de_posting_read(dev_priv, DP_TP_CTL(PORT_E)); 1741 1742 intel_wait_ddi_buf_idle(dev_priv, PORT_E); 1743 1744 /* Reset FDI_RX_MISC pwrdn lanes */ 1745 temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1746 temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 1747 temp |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2); 1748 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp); 1749 intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1750 } 1751 1752 /* Enable normal pixel sending for FDI */ 1753 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), 1754 DP_TP_CTL_FDI_AUTOTRAIN | 1755 DP_TP_CTL_LINK_TRAIN_NORMAL | 1756 DP_TP_CTL_ENHANCED_FRAME_ENABLE | 1757 DP_TP_CTL_ENABLE); 1758 } 1759 1760 static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder, 1761 const struct intel_crtc_state *crtc_state) 1762 { 1763 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1764 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 1765 1766 intel_dp->DP = dig_port->saved_port_bits | 1767 DDI_BUF_CTL_ENABLE | DDI_BUF_TRANS_SELECT(0); 1768 intel_dp->DP |= DDI_PORT_WIDTH(crtc_state->lane_count); 1769 } 1770 1771 static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv, 1772 enum port port) 1773 { 1774 u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK; 1775 1776 switch (val) { 1777 case DDI_CLK_SEL_NONE: 1778 return 0; 1779 case DDI_CLK_SEL_TBT_162: 1780 return 162000; 1781 case DDI_CLK_SEL_TBT_270: 1782 return 270000; 1783 case DDI_CLK_SEL_TBT_540: 1784 return 540000; 1785 case DDI_CLK_SEL_TBT_810: 1786 return 810000; 1787 default: 1788 MISSING_CASE(val); 1789 return 0; 1790 } 1791 } 1792 1793 static void ddi_dotclock_get(struct intel_crtc_state *pipe_config) 1794 { 1795 int dotclock; 1796 1797 if (pipe_config->has_pch_encoder) 1798 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 1799 &pipe_config->fdi_m_n); 1800 else if (intel_crtc_has_dp_encoder(pipe_config)) 1801 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 1802 &pipe_config->dp_m_n); 1803 else if (pipe_config->has_hdmi_sink && pipe_config->pipe_bpp > 24) 1804 dotclock = pipe_config->port_clock * 24 / pipe_config->pipe_bpp; 1805 else 1806 dotclock = pipe_config->port_clock; 1807 1808 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 && 1809 !intel_crtc_has_dp_encoder(pipe_config)) 1810 dotclock *= 2; 1811 1812 if (pipe_config->pixel_multiplier) 1813 dotclock /= pipe_config->pixel_multiplier; 1814 1815 pipe_config->hw.adjusted_mode.crtc_clock = dotclock; 1816 } 1817 1818 static void intel_ddi_clock_get(struct intel_encoder *encoder, 1819 struct intel_crtc_state *pipe_config) 1820 { 1821 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1822 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1823 1824 if (intel_phy_is_tc(dev_priv, phy) && 1825 intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll) == 1826 DPLL_ID_ICL_TBTPLL) 1827 pipe_config->port_clock = icl_calc_tbt_pll_link(dev_priv, 1828 encoder->port); 1829 else 1830 pipe_config->port_clock = 1831 intel_dpll_get_freq(dev_priv, pipe_config->shared_dpll, 1832 &pipe_config->dpll_hw_state); 1833 1834 ddi_dotclock_get(pipe_config); 1835 } 1836 1837 void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state, 1838 const struct drm_connector_state *conn_state) 1839 { 1840 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1841 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1842 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1843 u32 temp; 1844 1845 if (!intel_crtc_has_dp_encoder(crtc_state)) 1846 return; 1847 1848 drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)); 1849 1850 temp = DP_MSA_MISC_SYNC_CLOCK; 1851 1852 switch (crtc_state->pipe_bpp) { 1853 case 18: 1854 temp |= DP_MSA_MISC_6_BPC; 1855 break; 1856 case 24: 1857 temp |= DP_MSA_MISC_8_BPC; 1858 break; 1859 case 30: 1860 temp |= DP_MSA_MISC_10_BPC; 1861 break; 1862 case 36: 1863 temp |= DP_MSA_MISC_12_BPC; 1864 break; 1865 default: 1866 MISSING_CASE(crtc_state->pipe_bpp); 1867 break; 1868 } 1869 1870 /* nonsense combination */ 1871 drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range && 1872 crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB); 1873 1874 if (crtc_state->limited_color_range) 1875 temp |= DP_MSA_MISC_COLOR_CEA_RGB; 1876 1877 /* 1878 * As per DP 1.2 spec section 2.3.4.3 while sending 1879 * YCBCR 444 signals we should program MSA MISC1/0 fields with 1880 * colorspace information. 1881 */ 1882 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) 1883 temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709; 1884 1885 /* 1886 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication 1887 * of Color Encoding Format and Content Color Gamut] while sending 1888 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields 1889 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format. 1890 */ 1891 if (intel_dp_needs_vsc_sdp(crtc_state, conn_state)) 1892 temp |= DP_MSA_MISC_COLOR_VSC_SDP; 1893 1894 intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp); 1895 } 1896 1897 static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder) 1898 { 1899 if (master_transcoder == TRANSCODER_EDP) 1900 return 0; 1901 else 1902 return master_transcoder + 1; 1903 } 1904 1905 /* 1906 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state. 1907 * 1908 * Only intended to be used by intel_ddi_enable_transcoder_func() and 1909 * intel_ddi_config_transcoder_func(). 1910 */ 1911 static u32 1912 intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder, 1913 const struct intel_crtc_state *crtc_state) 1914 { 1915 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1916 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1917 enum pipe pipe = crtc->pipe; 1918 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1919 enum port port = encoder->port; 1920 u32 temp; 1921 1922 /* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */ 1923 temp = TRANS_DDI_FUNC_ENABLE; 1924 if (INTEL_GEN(dev_priv) >= 12) 1925 temp |= TGL_TRANS_DDI_SELECT_PORT(port); 1926 else 1927 temp |= TRANS_DDI_SELECT_PORT(port); 1928 1929 switch (crtc_state->pipe_bpp) { 1930 case 18: 1931 temp |= TRANS_DDI_BPC_6; 1932 break; 1933 case 24: 1934 temp |= TRANS_DDI_BPC_8; 1935 break; 1936 case 30: 1937 temp |= TRANS_DDI_BPC_10; 1938 break; 1939 case 36: 1940 temp |= TRANS_DDI_BPC_12; 1941 break; 1942 default: 1943 BUG(); 1944 } 1945 1946 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC) 1947 temp |= TRANS_DDI_PVSYNC; 1948 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC) 1949 temp |= TRANS_DDI_PHSYNC; 1950 1951 if (cpu_transcoder == TRANSCODER_EDP) { 1952 switch (pipe) { 1953 case PIPE_A: 1954 /* On Haswell, can only use the always-on power well for 1955 * eDP when not using the panel fitter, and when not 1956 * using motion blur mitigation (which we don't 1957 * support). */ 1958 if (crtc_state->pch_pfit.force_thru) 1959 temp |= TRANS_DDI_EDP_INPUT_A_ONOFF; 1960 else 1961 temp |= TRANS_DDI_EDP_INPUT_A_ON; 1962 break; 1963 case PIPE_B: 1964 temp |= TRANS_DDI_EDP_INPUT_B_ONOFF; 1965 break; 1966 case PIPE_C: 1967 temp |= TRANS_DDI_EDP_INPUT_C_ONOFF; 1968 break; 1969 default: 1970 BUG(); 1971 break; 1972 } 1973 } 1974 1975 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 1976 if (crtc_state->has_hdmi_sink) 1977 temp |= TRANS_DDI_MODE_SELECT_HDMI; 1978 else 1979 temp |= TRANS_DDI_MODE_SELECT_DVI; 1980 1981 if (crtc_state->hdmi_scrambling) 1982 temp |= TRANS_DDI_HDMI_SCRAMBLING; 1983 if (crtc_state->hdmi_high_tmds_clock_ratio) 1984 temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE; 1985 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) { 1986 temp |= TRANS_DDI_MODE_SELECT_FDI; 1987 temp |= (crtc_state->fdi_lanes - 1) << 1; 1988 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 1989 temp |= TRANS_DDI_MODE_SELECT_DP_MST; 1990 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 1991 1992 if (INTEL_GEN(dev_priv) >= 12) { 1993 enum transcoder master; 1994 1995 master = crtc_state->mst_master_transcoder; 1996 drm_WARN_ON(&dev_priv->drm, 1997 master == INVALID_TRANSCODER); 1998 temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master); 1999 } 2000 } else { 2001 temp |= TRANS_DDI_MODE_SELECT_DP_SST; 2002 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 2003 } 2004 2005 if (IS_GEN_RANGE(dev_priv, 8, 10) && 2006 crtc_state->master_transcoder != INVALID_TRANSCODER) { 2007 u8 master_select = 2008 bdw_trans_port_sync_master_select(crtc_state->master_transcoder); 2009 2010 temp |= TRANS_DDI_PORT_SYNC_ENABLE | 2011 TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select); 2012 } 2013 2014 return temp; 2015 } 2016 2017 void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder, 2018 const struct intel_crtc_state *crtc_state) 2019 { 2020 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2021 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2022 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2023 2024 if (INTEL_GEN(dev_priv) >= 11) { 2025 enum transcoder master_transcoder = crtc_state->master_transcoder; 2026 u32 ctl2 = 0; 2027 2028 if (master_transcoder != INVALID_TRANSCODER) { 2029 u8 master_select = 2030 bdw_trans_port_sync_master_select(master_transcoder); 2031 2032 ctl2 |= PORT_SYNC_MODE_ENABLE | 2033 PORT_SYNC_MODE_MASTER_SELECT(master_select); 2034 } 2035 2036 intel_de_write(dev_priv, 2037 TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2); 2038 } 2039 2040 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), 2041 intel_ddi_transcoder_func_reg_val_get(encoder, 2042 crtc_state)); 2043 } 2044 2045 /* 2046 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable 2047 * bit. 2048 */ 2049 static void 2050 intel_ddi_config_transcoder_func(struct intel_encoder *encoder, 2051 const struct intel_crtc_state *crtc_state) 2052 { 2053 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2054 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2055 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2056 u32 ctl; 2057 2058 ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state); 2059 ctl &= ~TRANS_DDI_FUNC_ENABLE; 2060 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 2061 } 2062 2063 void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state) 2064 { 2065 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2066 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2067 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2068 u32 ctl; 2069 2070 if (INTEL_GEN(dev_priv) >= 11) 2071 intel_de_write(dev_priv, 2072 TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0); 2073 2074 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2075 2076 drm_WARN_ON(crtc->base.dev, ctl & TRANS_DDI_HDCP_SIGNALLING); 2077 2078 ctl &= ~TRANS_DDI_FUNC_ENABLE; 2079 2080 if (IS_GEN_RANGE(dev_priv, 8, 10)) 2081 ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE | 2082 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK); 2083 2084 if (INTEL_GEN(dev_priv) >= 12) { 2085 if (!intel_dp_mst_is_master_trans(crtc_state)) { 2086 ctl &= ~(TGL_TRANS_DDI_PORT_MASK | 2087 TRANS_DDI_MODE_SELECT_MASK); 2088 } 2089 } else { 2090 ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK); 2091 } 2092 2093 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 2094 2095 if (dev_priv->quirks & QUIRK_INCREASE_DDI_DISABLED_TIME && 2096 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 2097 drm_dbg_kms(&dev_priv->drm, 2098 "Quirk Increase DDI disabled time\n"); 2099 /* Quirk time at 100ms for reliable operation */ 2100 msleep(100); 2101 } 2102 } 2103 2104 int intel_ddi_toggle_hdcp_bits(struct intel_encoder *intel_encoder, 2105 enum transcoder cpu_transcoder, 2106 bool enable, u32 hdcp_mask) 2107 { 2108 struct drm_device *dev = intel_encoder->base.dev; 2109 struct drm_i915_private *dev_priv = to_i915(dev); 2110 intel_wakeref_t wakeref; 2111 int ret = 0; 2112 u32 tmp; 2113 2114 wakeref = intel_display_power_get_if_enabled(dev_priv, 2115 intel_encoder->power_domain); 2116 if (drm_WARN_ON(dev, !wakeref)) 2117 return -ENXIO; 2118 2119 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2120 if (enable) 2121 tmp |= hdcp_mask; 2122 else 2123 tmp &= ~hdcp_mask; 2124 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), tmp); 2125 intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref); 2126 return ret; 2127 } 2128 2129 bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector) 2130 { 2131 struct drm_device *dev = intel_connector->base.dev; 2132 struct drm_i915_private *dev_priv = to_i915(dev); 2133 struct intel_encoder *encoder = intel_attached_encoder(intel_connector); 2134 int type = intel_connector->base.connector_type; 2135 enum port port = encoder->port; 2136 enum transcoder cpu_transcoder; 2137 intel_wakeref_t wakeref; 2138 enum pipe pipe = 0; 2139 u32 tmp; 2140 bool ret; 2141 2142 wakeref = intel_display_power_get_if_enabled(dev_priv, 2143 encoder->power_domain); 2144 if (!wakeref) 2145 return false; 2146 2147 if (!encoder->get_hw_state(encoder, &pipe)) { 2148 ret = false; 2149 goto out; 2150 } 2151 2152 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 2153 cpu_transcoder = TRANSCODER_EDP; 2154 else 2155 cpu_transcoder = (enum transcoder) pipe; 2156 2157 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2158 2159 switch (tmp & TRANS_DDI_MODE_SELECT_MASK) { 2160 case TRANS_DDI_MODE_SELECT_HDMI: 2161 case TRANS_DDI_MODE_SELECT_DVI: 2162 ret = type == DRM_MODE_CONNECTOR_HDMIA; 2163 break; 2164 2165 case TRANS_DDI_MODE_SELECT_DP_SST: 2166 ret = type == DRM_MODE_CONNECTOR_eDP || 2167 type == DRM_MODE_CONNECTOR_DisplayPort; 2168 break; 2169 2170 case TRANS_DDI_MODE_SELECT_DP_MST: 2171 /* if the transcoder is in MST state then 2172 * connector isn't connected */ 2173 ret = false; 2174 break; 2175 2176 case TRANS_DDI_MODE_SELECT_FDI: 2177 ret = type == DRM_MODE_CONNECTOR_VGA; 2178 break; 2179 2180 default: 2181 ret = false; 2182 break; 2183 } 2184 2185 out: 2186 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 2187 2188 return ret; 2189 } 2190 2191 static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder, 2192 u8 *pipe_mask, bool *is_dp_mst) 2193 { 2194 struct drm_device *dev = encoder->base.dev; 2195 struct drm_i915_private *dev_priv = to_i915(dev); 2196 enum port port = encoder->port; 2197 intel_wakeref_t wakeref; 2198 enum pipe p; 2199 u32 tmp; 2200 u8 mst_pipe_mask; 2201 2202 *pipe_mask = 0; 2203 *is_dp_mst = false; 2204 2205 wakeref = intel_display_power_get_if_enabled(dev_priv, 2206 encoder->power_domain); 2207 if (!wakeref) 2208 return; 2209 2210 tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 2211 if (!(tmp & DDI_BUF_CTL_ENABLE)) 2212 goto out; 2213 2214 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) { 2215 tmp = intel_de_read(dev_priv, 2216 TRANS_DDI_FUNC_CTL(TRANSCODER_EDP)); 2217 2218 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { 2219 default: 2220 MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK); 2221 fallthrough; 2222 case TRANS_DDI_EDP_INPUT_A_ON: 2223 case TRANS_DDI_EDP_INPUT_A_ONOFF: 2224 *pipe_mask = BIT(PIPE_A); 2225 break; 2226 case TRANS_DDI_EDP_INPUT_B_ONOFF: 2227 *pipe_mask = BIT(PIPE_B); 2228 break; 2229 case TRANS_DDI_EDP_INPUT_C_ONOFF: 2230 *pipe_mask = BIT(PIPE_C); 2231 break; 2232 } 2233 2234 goto out; 2235 } 2236 2237 mst_pipe_mask = 0; 2238 for_each_pipe(dev_priv, p) { 2239 enum transcoder cpu_transcoder = (enum transcoder)p; 2240 unsigned int port_mask, ddi_select; 2241 intel_wakeref_t trans_wakeref; 2242 2243 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 2244 POWER_DOMAIN_TRANSCODER(cpu_transcoder)); 2245 if (!trans_wakeref) 2246 continue; 2247 2248 if (INTEL_GEN(dev_priv) >= 12) { 2249 port_mask = TGL_TRANS_DDI_PORT_MASK; 2250 ddi_select = TGL_TRANS_DDI_SELECT_PORT(port); 2251 } else { 2252 port_mask = TRANS_DDI_PORT_MASK; 2253 ddi_select = TRANS_DDI_SELECT_PORT(port); 2254 } 2255 2256 tmp = intel_de_read(dev_priv, 2257 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2258 intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder), 2259 trans_wakeref); 2260 2261 if ((tmp & port_mask) != ddi_select) 2262 continue; 2263 2264 if ((tmp & TRANS_DDI_MODE_SELECT_MASK) == 2265 TRANS_DDI_MODE_SELECT_DP_MST) 2266 mst_pipe_mask |= BIT(p); 2267 2268 *pipe_mask |= BIT(p); 2269 } 2270 2271 if (!*pipe_mask) 2272 drm_dbg_kms(&dev_priv->drm, 2273 "No pipe for [ENCODER:%d:%s] found\n", 2274 encoder->base.base.id, encoder->base.name); 2275 2276 if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) { 2277 drm_dbg_kms(&dev_priv->drm, 2278 "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n", 2279 encoder->base.base.id, encoder->base.name, 2280 *pipe_mask); 2281 *pipe_mask = BIT(ffs(*pipe_mask) - 1); 2282 } 2283 2284 if (mst_pipe_mask && mst_pipe_mask != *pipe_mask) 2285 drm_dbg_kms(&dev_priv->drm, 2286 "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n", 2287 encoder->base.base.id, encoder->base.name, 2288 *pipe_mask, mst_pipe_mask); 2289 else 2290 *is_dp_mst = mst_pipe_mask; 2291 2292 out: 2293 if (*pipe_mask && IS_GEN9_LP(dev_priv)) { 2294 tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port)); 2295 if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK | 2296 BXT_PHY_LANE_POWERDOWN_ACK | 2297 BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED) 2298 drm_err(&dev_priv->drm, 2299 "[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n", 2300 encoder->base.base.id, encoder->base.name, tmp); 2301 } 2302 2303 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 2304 } 2305 2306 bool intel_ddi_get_hw_state(struct intel_encoder *encoder, 2307 enum pipe *pipe) 2308 { 2309 u8 pipe_mask; 2310 bool is_mst; 2311 2312 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 2313 2314 if (is_mst || !pipe_mask) 2315 return false; 2316 2317 *pipe = ffs(pipe_mask) - 1; 2318 2319 return true; 2320 } 2321 2322 static enum intel_display_power_domain 2323 intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port) 2324 { 2325 /* CNL+ HW requires corresponding AUX IOs to be powered up for PSR with 2326 * DC states enabled at the same time, while for driver initiated AUX 2327 * transfers we need the same AUX IOs to be powered but with DC states 2328 * disabled. Accordingly use the AUX power domain here which leaves DC 2329 * states enabled. 2330 * However, for non-A AUX ports the corresponding non-EDP transcoders 2331 * would have already enabled power well 2 and DC_OFF. This means we can 2332 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a 2333 * specific AUX_IO reference without powering up any extra wells. 2334 * Note that PSR is enabled only on Port A even though this function 2335 * returns the correct domain for other ports too. 2336 */ 2337 return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A : 2338 intel_aux_power_domain(dig_port); 2339 } 2340 2341 static void intel_ddi_get_power_domains(struct intel_encoder *encoder, 2342 struct intel_crtc_state *crtc_state) 2343 { 2344 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2345 struct intel_digital_port *dig_port; 2346 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2347 2348 /* 2349 * TODO: Add support for MST encoders. Atm, the following should never 2350 * happen since fake-MST encoders don't set their get_power_domains() 2351 * hook. 2352 */ 2353 if (drm_WARN_ON(&dev_priv->drm, 2354 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST))) 2355 return; 2356 2357 dig_port = enc_to_dig_port(encoder); 2358 2359 if (!intel_phy_is_tc(dev_priv, phy) || 2360 dig_port->tc_mode != TC_PORT_TBT_ALT) { 2361 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2362 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2363 dig_port->ddi_io_power_domain); 2364 } 2365 2366 /* 2367 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC 2368 * ports. 2369 */ 2370 if (intel_crtc_has_dp_encoder(crtc_state) || 2371 intel_phy_is_tc(dev_priv, phy)) { 2372 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 2373 dig_port->aux_wakeref = 2374 intel_display_power_get(dev_priv, 2375 intel_ddi_main_link_aux_domain(dig_port)); 2376 } 2377 } 2378 2379 void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder, 2380 const struct intel_crtc_state *crtc_state) 2381 { 2382 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2383 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2384 enum port port = encoder->port; 2385 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2386 2387 if (cpu_transcoder != TRANSCODER_EDP) { 2388 if (INTEL_GEN(dev_priv) >= 12) 2389 intel_de_write(dev_priv, 2390 TRANS_CLK_SEL(cpu_transcoder), 2391 TGL_TRANS_CLK_SEL_PORT(port)); 2392 else 2393 intel_de_write(dev_priv, 2394 TRANS_CLK_SEL(cpu_transcoder), 2395 TRANS_CLK_SEL_PORT(port)); 2396 } 2397 } 2398 2399 void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state) 2400 { 2401 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 2402 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2403 2404 if (cpu_transcoder != TRANSCODER_EDP) { 2405 if (INTEL_GEN(dev_priv) >= 12) 2406 intel_de_write(dev_priv, 2407 TRANS_CLK_SEL(cpu_transcoder), 2408 TGL_TRANS_CLK_SEL_DISABLED); 2409 else 2410 intel_de_write(dev_priv, 2411 TRANS_CLK_SEL(cpu_transcoder), 2412 TRANS_CLK_SEL_DISABLED); 2413 } 2414 } 2415 2416 static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv, 2417 enum port port, u8 iboost) 2418 { 2419 u32 tmp; 2420 2421 tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0); 2422 tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port)); 2423 if (iboost) 2424 tmp |= iboost << BALANCE_LEG_SHIFT(port); 2425 else 2426 tmp |= BALANCE_LEG_DISABLE(port); 2427 intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp); 2428 } 2429 2430 static void skl_ddi_set_iboost(struct intel_encoder *encoder, 2431 const struct intel_crtc_state *crtc_state, 2432 int level) 2433 { 2434 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2435 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2436 u8 iboost; 2437 2438 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2439 iboost = intel_bios_hdmi_boost_level(encoder); 2440 else 2441 iboost = intel_bios_dp_boost_level(encoder); 2442 2443 if (iboost == 0) { 2444 const struct ddi_buf_trans *ddi_translations; 2445 int n_entries; 2446 2447 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2448 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 2449 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2450 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, &n_entries); 2451 else 2452 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, &n_entries); 2453 2454 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2455 return; 2456 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2457 level = n_entries - 1; 2458 2459 iboost = ddi_translations[level].i_boost; 2460 } 2461 2462 /* Make sure that the requested I_boost is valid */ 2463 if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) { 2464 drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost); 2465 return; 2466 } 2467 2468 _skl_ddi_set_iboost(dev_priv, encoder->port, iboost); 2469 2470 if (encoder->port == PORT_A && dig_port->max_lanes == 4) 2471 _skl_ddi_set_iboost(dev_priv, PORT_E, iboost); 2472 } 2473 2474 static void bxt_ddi_vswing_sequence(struct intel_encoder *encoder, 2475 const struct intel_crtc_state *crtc_state, 2476 int level) 2477 { 2478 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2479 const struct bxt_ddi_buf_trans *ddi_translations; 2480 enum port port = encoder->port; 2481 int n_entries; 2482 2483 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2484 ddi_translations = bxt_get_buf_trans_hdmi(encoder, &n_entries); 2485 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2486 ddi_translations = bxt_get_buf_trans_edp(encoder, &n_entries); 2487 else 2488 ddi_translations = bxt_get_buf_trans_dp(encoder, &n_entries); 2489 2490 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2491 return; 2492 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2493 level = n_entries - 1; 2494 2495 bxt_ddi_phy_set_signal_level(dev_priv, port, 2496 ddi_translations[level].margin, 2497 ddi_translations[level].scale, 2498 ddi_translations[level].enable, 2499 ddi_translations[level].deemphasis); 2500 } 2501 2502 static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp, 2503 const struct intel_crtc_state *crtc_state) 2504 { 2505 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 2506 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2507 enum port port = encoder->port; 2508 enum phy phy = intel_port_to_phy(dev_priv, port); 2509 int n_entries; 2510 2511 if (INTEL_GEN(dev_priv) >= 12) { 2512 if (intel_phy_is_combo(dev_priv, phy)) 2513 tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2514 else 2515 tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 2516 } else if (INTEL_GEN(dev_priv) == 11) { 2517 if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 2518 jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2519 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 2520 ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2521 else if (intel_phy_is_combo(dev_priv, phy)) 2522 icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2523 else 2524 icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 2525 } else if (IS_CANNONLAKE(dev_priv)) { 2526 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2527 cnl_get_buf_trans_edp(encoder, &n_entries); 2528 else 2529 cnl_get_buf_trans_dp(encoder, &n_entries); 2530 } else if (IS_GEN9_LP(dev_priv)) { 2531 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2532 bxt_get_buf_trans_edp(encoder, &n_entries); 2533 else 2534 bxt_get_buf_trans_dp(encoder, &n_entries); 2535 } else { 2536 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2537 intel_ddi_get_buf_trans_edp(encoder, &n_entries); 2538 else 2539 intel_ddi_get_buf_trans_dp(encoder, &n_entries); 2540 } 2541 2542 if (drm_WARN_ON(&dev_priv->drm, n_entries < 1)) 2543 n_entries = 1; 2544 if (drm_WARN_ON(&dev_priv->drm, 2545 n_entries > ARRAY_SIZE(index_to_dp_signal_levels))) 2546 n_entries = ARRAY_SIZE(index_to_dp_signal_levels); 2547 2548 return index_to_dp_signal_levels[n_entries - 1] & 2549 DP_TRAIN_VOLTAGE_SWING_MASK; 2550 } 2551 2552 /* 2553 * We assume that the full set of pre-emphasis values can be 2554 * used on all DDI platforms. Should that change we need to 2555 * rethink this code. 2556 */ 2557 static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp) 2558 { 2559 return DP_TRAIN_PRE_EMPH_LEVEL_3; 2560 } 2561 2562 static void cnl_ddi_vswing_program(struct intel_encoder *encoder, 2563 const struct intel_crtc_state *crtc_state, 2564 int level) 2565 { 2566 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2567 const struct cnl_ddi_buf_trans *ddi_translations; 2568 enum port port = encoder->port; 2569 int n_entries, ln; 2570 u32 val; 2571 2572 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2573 ddi_translations = cnl_get_buf_trans_hdmi(encoder, &n_entries); 2574 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2575 ddi_translations = cnl_get_buf_trans_edp(encoder, &n_entries); 2576 else 2577 ddi_translations = cnl_get_buf_trans_dp(encoder, &n_entries); 2578 2579 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2580 return; 2581 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2582 level = n_entries - 1; 2583 2584 /* Set PORT_TX_DW5 Scaling Mode Sel to 010b. */ 2585 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2586 val &= ~SCALING_MODE_SEL_MASK; 2587 val |= SCALING_MODE_SEL(2); 2588 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2589 2590 /* Program PORT_TX_DW2 */ 2591 val = intel_de_read(dev_priv, CNL_PORT_TX_DW2_LN0(port)); 2592 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 2593 RCOMP_SCALAR_MASK); 2594 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 2595 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 2596 /* Rcomp scalar is fixed as 0x98 for every table entry */ 2597 val |= RCOMP_SCALAR(0x98); 2598 intel_de_write(dev_priv, CNL_PORT_TX_DW2_GRP(port), val); 2599 2600 /* Program PORT_TX_DW4 */ 2601 /* We cannot write to GRP. It would overrite individual loadgen */ 2602 for (ln = 0; ln < 4; ln++) { 2603 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 2604 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 2605 CURSOR_COEFF_MASK); 2606 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 2607 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 2608 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 2609 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 2610 } 2611 2612 /* Program PORT_TX_DW5 */ 2613 /* All DW5 values are fixed for every table entry */ 2614 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2615 val &= ~RTERM_SELECT_MASK; 2616 val |= RTERM_SELECT(6); 2617 val |= TAP3_DISABLE; 2618 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2619 2620 /* Program PORT_TX_DW7 */ 2621 val = intel_de_read(dev_priv, CNL_PORT_TX_DW7_LN0(port)); 2622 val &= ~N_SCALAR_MASK; 2623 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 2624 intel_de_write(dev_priv, CNL_PORT_TX_DW7_GRP(port), val); 2625 } 2626 2627 static void cnl_ddi_vswing_sequence(struct intel_encoder *encoder, 2628 const struct intel_crtc_state *crtc_state, 2629 int level) 2630 { 2631 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2632 enum port port = encoder->port; 2633 int width, rate, ln; 2634 u32 val; 2635 2636 width = crtc_state->lane_count; 2637 rate = crtc_state->port_clock; 2638 2639 /* 2640 * 1. If port type is eDP or DP, 2641 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 2642 * else clear to 0b. 2643 */ 2644 val = intel_de_read(dev_priv, CNL_PORT_PCS_DW1_LN0(port)); 2645 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2646 val &= ~COMMON_KEEPER_EN; 2647 else 2648 val |= COMMON_KEEPER_EN; 2649 intel_de_write(dev_priv, CNL_PORT_PCS_DW1_GRP(port), val); 2650 2651 /* 2. Program loadgen select */ 2652 /* 2653 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 2654 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 2655 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 2656 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 2657 */ 2658 for (ln = 0; ln <= 3; ln++) { 2659 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 2660 val &= ~LOADGEN_SELECT; 2661 2662 if ((rate <= 600000 && width == 4 && ln >= 1) || 2663 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 2664 val |= LOADGEN_SELECT; 2665 } 2666 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 2667 } 2668 2669 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 2670 val = intel_de_read(dev_priv, CNL_PORT_CL1CM_DW5); 2671 val |= SUS_CLOCK_CONFIG; 2672 intel_de_write(dev_priv, CNL_PORT_CL1CM_DW5, val); 2673 2674 /* 4. Clear training enable to change swing values */ 2675 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2676 val &= ~TX_TRAINING_EN; 2677 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2678 2679 /* 5. Program swing and de-emphasis */ 2680 cnl_ddi_vswing_program(encoder, crtc_state, level); 2681 2682 /* 6. Set training enable to trigger update */ 2683 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2684 val |= TX_TRAINING_EN; 2685 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2686 } 2687 2688 static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder, 2689 const struct intel_crtc_state *crtc_state, 2690 int level) 2691 { 2692 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2693 const struct cnl_ddi_buf_trans *ddi_translations; 2694 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2695 int n_entries, ln; 2696 u32 val; 2697 2698 if (INTEL_GEN(dev_priv) >= 12) 2699 ddi_translations = tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2700 else if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 2701 ddi_translations = jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2702 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 2703 ddi_translations = ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2704 else 2705 ddi_translations = icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2706 2707 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2708 return; 2709 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2710 level = n_entries - 1; 2711 2712 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) { 2713 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2714 2715 val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED; 2716 intel_dp->hobl_active = is_hobl_buf_trans(ddi_translations); 2717 intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val, 2718 intel_dp->hobl_active ? val : 0); 2719 } 2720 2721 /* Set PORT_TX_DW5 */ 2722 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2723 val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK | 2724 TAP2_DISABLE | TAP3_DISABLE); 2725 val |= SCALING_MODE_SEL(0x2); 2726 val |= RTERM_SELECT(0x6); 2727 val |= TAP3_DISABLE; 2728 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2729 2730 /* Program PORT_TX_DW2 */ 2731 val = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy)); 2732 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 2733 RCOMP_SCALAR_MASK); 2734 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 2735 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 2736 /* Program Rcomp scalar for every table entry */ 2737 val |= RCOMP_SCALAR(0x98); 2738 intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), val); 2739 2740 /* Program PORT_TX_DW4 */ 2741 /* We cannot write to GRP. It would overwrite individual loadgen. */ 2742 for (ln = 0; ln <= 3; ln++) { 2743 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 2744 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 2745 CURSOR_COEFF_MASK); 2746 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 2747 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 2748 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 2749 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 2750 } 2751 2752 /* Program PORT_TX_DW7 */ 2753 val = intel_de_read(dev_priv, ICL_PORT_TX_DW7_LN0(phy)); 2754 val &= ~N_SCALAR_MASK; 2755 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 2756 intel_de_write(dev_priv, ICL_PORT_TX_DW7_GRP(phy), val); 2757 } 2758 2759 static void icl_combo_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2760 const struct intel_crtc_state *crtc_state, 2761 int level) 2762 { 2763 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2764 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2765 int width, rate, ln; 2766 u32 val; 2767 2768 width = crtc_state->lane_count; 2769 rate = crtc_state->port_clock; 2770 2771 /* 2772 * 1. If port type is eDP or DP, 2773 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 2774 * else clear to 0b. 2775 */ 2776 val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy)); 2777 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2778 val &= ~COMMON_KEEPER_EN; 2779 else 2780 val |= COMMON_KEEPER_EN; 2781 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val); 2782 2783 /* 2. Program loadgen select */ 2784 /* 2785 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 2786 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 2787 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 2788 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 2789 */ 2790 for (ln = 0; ln <= 3; ln++) { 2791 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 2792 val &= ~LOADGEN_SELECT; 2793 2794 if ((rate <= 600000 && width == 4 && ln >= 1) || 2795 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 2796 val |= LOADGEN_SELECT; 2797 } 2798 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 2799 } 2800 2801 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 2802 val = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy)); 2803 val |= SUS_CLOCK_CONFIG; 2804 intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), val); 2805 2806 /* 4. Clear training enable to change swing values */ 2807 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2808 val &= ~TX_TRAINING_EN; 2809 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2810 2811 /* 5. Program swing and de-emphasis */ 2812 icl_ddi_combo_vswing_program(encoder, crtc_state, level); 2813 2814 /* 6. Set training enable to trigger update */ 2815 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2816 val |= TX_TRAINING_EN; 2817 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2818 } 2819 2820 static void icl_mg_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2821 const struct intel_crtc_state *crtc_state, 2822 int level) 2823 { 2824 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2825 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 2826 const struct icl_mg_phy_ddi_buf_trans *ddi_translations; 2827 int n_entries, ln; 2828 u32 val; 2829 2830 if (enc_to_dig_port(encoder)->tc_mode == TC_PORT_TBT_ALT) 2831 return; 2832 2833 ddi_translations = icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 2834 2835 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2836 return; 2837 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2838 level = n_entries - 1; 2839 2840 /* Set MG_TX_LINK_PARAMS cri_use_fs32 to 0. */ 2841 for (ln = 0; ln < 2; ln++) { 2842 val = intel_de_read(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port)); 2843 val &= ~CRI_USE_FS32; 2844 intel_de_write(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), val); 2845 2846 val = intel_de_read(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port)); 2847 val &= ~CRI_USE_FS32; 2848 intel_de_write(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), val); 2849 } 2850 2851 /* Program MG_TX_SWINGCTRL with values from vswing table */ 2852 for (ln = 0; ln < 2; ln++) { 2853 val = intel_de_read(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port)); 2854 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 2855 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 2856 ddi_translations[level].cri_txdeemph_override_17_12); 2857 intel_de_write(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), val); 2858 2859 val = intel_de_read(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port)); 2860 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 2861 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 2862 ddi_translations[level].cri_txdeemph_override_17_12); 2863 intel_de_write(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), val); 2864 } 2865 2866 /* Program MG_TX_DRVCTRL with values from vswing table */ 2867 for (ln = 0; ln < 2; ln++) { 2868 val = intel_de_read(dev_priv, MG_TX1_DRVCTRL(ln, tc_port)); 2869 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 2870 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 2871 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 2872 ddi_translations[level].cri_txdeemph_override_5_0) | 2873 CRI_TXDEEMPH_OVERRIDE_11_6( 2874 ddi_translations[level].cri_txdeemph_override_11_6) | 2875 CRI_TXDEEMPH_OVERRIDE_EN; 2876 intel_de_write(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), val); 2877 2878 val = intel_de_read(dev_priv, MG_TX2_DRVCTRL(ln, tc_port)); 2879 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 2880 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 2881 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 2882 ddi_translations[level].cri_txdeemph_override_5_0) | 2883 CRI_TXDEEMPH_OVERRIDE_11_6( 2884 ddi_translations[level].cri_txdeemph_override_11_6) | 2885 CRI_TXDEEMPH_OVERRIDE_EN; 2886 intel_de_write(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), val); 2887 2888 /* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */ 2889 } 2890 2891 /* 2892 * Program MG_CLKHUB<LN, port being used> with value from frequency table 2893 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the 2894 * values from table for which TX1 and TX2 enabled. 2895 */ 2896 for (ln = 0; ln < 2; ln++) { 2897 val = intel_de_read(dev_priv, MG_CLKHUB(ln, tc_port)); 2898 if (crtc_state->port_clock < 300000) 2899 val |= CFG_LOW_RATE_LKREN_EN; 2900 else 2901 val &= ~CFG_LOW_RATE_LKREN_EN; 2902 intel_de_write(dev_priv, MG_CLKHUB(ln, tc_port), val); 2903 } 2904 2905 /* Program the MG_TX_DCC<LN, port being used> based on the link frequency */ 2906 for (ln = 0; ln < 2; ln++) { 2907 val = intel_de_read(dev_priv, MG_TX1_DCC(ln, tc_port)); 2908 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 2909 if (crtc_state->port_clock <= 500000) { 2910 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 2911 } else { 2912 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 2913 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 2914 } 2915 intel_de_write(dev_priv, MG_TX1_DCC(ln, tc_port), val); 2916 2917 val = intel_de_read(dev_priv, MG_TX2_DCC(ln, tc_port)); 2918 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 2919 if (crtc_state->port_clock <= 500000) { 2920 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 2921 } else { 2922 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 2923 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 2924 } 2925 intel_de_write(dev_priv, MG_TX2_DCC(ln, tc_port), val); 2926 } 2927 2928 /* Program MG_TX_PISO_READLOAD with values from vswing table */ 2929 for (ln = 0; ln < 2; ln++) { 2930 val = intel_de_read(dev_priv, 2931 MG_TX1_PISO_READLOAD(ln, tc_port)); 2932 val |= CRI_CALCINIT; 2933 intel_de_write(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port), 2934 val); 2935 2936 val = intel_de_read(dev_priv, 2937 MG_TX2_PISO_READLOAD(ln, tc_port)); 2938 val |= CRI_CALCINIT; 2939 intel_de_write(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port), 2940 val); 2941 } 2942 } 2943 2944 static void icl_ddi_vswing_sequence(struct intel_encoder *encoder, 2945 const struct intel_crtc_state *crtc_state, 2946 int level) 2947 { 2948 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2949 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2950 2951 if (intel_phy_is_combo(dev_priv, phy)) 2952 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 2953 else 2954 icl_mg_phy_ddi_vswing_sequence(encoder, crtc_state, level); 2955 } 2956 2957 static void 2958 tgl_dkl_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2959 const struct intel_crtc_state *crtc_state, 2960 int level) 2961 { 2962 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2963 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 2964 const struct tgl_dkl_phy_ddi_buf_trans *ddi_translations; 2965 u32 val, dpcnt_mask, dpcnt_val; 2966 int n_entries, ln; 2967 2968 if (enc_to_dig_port(encoder)->tc_mode == TC_PORT_TBT_ALT) 2969 return; 2970 2971 ddi_translations = tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 2972 2973 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2974 return; 2975 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2976 level = n_entries - 1; 2977 2978 dpcnt_mask = (DKL_TX_PRESHOOT_COEFF_MASK | 2979 DKL_TX_DE_EMPAHSIS_COEFF_MASK | 2980 DKL_TX_VSWING_CONTROL_MASK); 2981 dpcnt_val = DKL_TX_VSWING_CONTROL(ddi_translations[level].dkl_vswing_control); 2982 dpcnt_val |= DKL_TX_DE_EMPHASIS_COEFF(ddi_translations[level].dkl_de_emphasis_control); 2983 dpcnt_val |= DKL_TX_PRESHOOT_COEFF(ddi_translations[level].dkl_preshoot_control); 2984 2985 for (ln = 0; ln < 2; ln++) { 2986 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2987 HIP_INDEX_VAL(tc_port, ln)); 2988 2989 intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0); 2990 2991 /* All the registers are RMW */ 2992 val = intel_de_read(dev_priv, DKL_TX_DPCNTL0(tc_port)); 2993 val &= ~dpcnt_mask; 2994 val |= dpcnt_val; 2995 intel_de_write(dev_priv, DKL_TX_DPCNTL0(tc_port), val); 2996 2997 val = intel_de_read(dev_priv, DKL_TX_DPCNTL1(tc_port)); 2998 val &= ~dpcnt_mask; 2999 val |= dpcnt_val; 3000 intel_de_write(dev_priv, DKL_TX_DPCNTL1(tc_port), val); 3001 3002 val = intel_de_read(dev_priv, DKL_TX_DPCNTL2(tc_port)); 3003 val &= ~DKL_TX_DP20BITMODE; 3004 intel_de_write(dev_priv, DKL_TX_DPCNTL2(tc_port), val); 3005 } 3006 } 3007 3008 static void tgl_ddi_vswing_sequence(struct intel_encoder *encoder, 3009 const struct intel_crtc_state *crtc_state, 3010 int level) 3011 { 3012 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3013 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3014 3015 if (intel_phy_is_combo(dev_priv, phy)) 3016 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 3017 else 3018 tgl_dkl_phy_ddi_vswing_sequence(encoder, crtc_state, level); 3019 } 3020 3021 static int translate_signal_level(struct intel_dp *intel_dp, 3022 u8 signal_levels) 3023 { 3024 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 3025 int i; 3026 3027 for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) { 3028 if (index_to_dp_signal_levels[i] == signal_levels) 3029 return i; 3030 } 3031 3032 drm_WARN(&i915->drm, 1, 3033 "Unsupported voltage swing/pre-emphasis level: 0x%x\n", 3034 signal_levels); 3035 3036 return 0; 3037 } 3038 3039 static int intel_ddi_dp_level(struct intel_dp *intel_dp) 3040 { 3041 u8 train_set = intel_dp->train_set[0]; 3042 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 3043 DP_TRAIN_PRE_EMPHASIS_MASK); 3044 3045 return translate_signal_level(intel_dp, signal_levels); 3046 } 3047 3048 static void 3049 tgl_set_signal_levels(struct intel_dp *intel_dp, 3050 const struct intel_crtc_state *crtc_state) 3051 { 3052 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3053 int level = intel_ddi_dp_level(intel_dp); 3054 3055 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3056 } 3057 3058 static void 3059 icl_set_signal_levels(struct intel_dp *intel_dp, 3060 const struct intel_crtc_state *crtc_state) 3061 { 3062 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3063 int level = intel_ddi_dp_level(intel_dp); 3064 3065 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3066 } 3067 3068 static void 3069 cnl_set_signal_levels(struct intel_dp *intel_dp, 3070 const struct intel_crtc_state *crtc_state) 3071 { 3072 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3073 int level = intel_ddi_dp_level(intel_dp); 3074 3075 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3076 } 3077 3078 static void 3079 bxt_set_signal_levels(struct intel_dp *intel_dp, 3080 const struct intel_crtc_state *crtc_state) 3081 { 3082 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3083 int level = intel_ddi_dp_level(intel_dp); 3084 3085 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3086 } 3087 3088 static void 3089 hsw_set_signal_levels(struct intel_dp *intel_dp, 3090 const struct intel_crtc_state *crtc_state) 3091 { 3092 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3093 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3094 int level = intel_ddi_dp_level(intel_dp); 3095 enum port port = encoder->port; 3096 u32 signal_levels; 3097 3098 signal_levels = DDI_BUF_TRANS_SELECT(level); 3099 3100 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 3101 signal_levels); 3102 3103 intel_dp->DP &= ~DDI_BUF_EMP_MASK; 3104 intel_dp->DP |= signal_levels; 3105 3106 if (IS_GEN9_BC(dev_priv)) 3107 skl_ddi_set_iboost(encoder, crtc_state, level); 3108 3109 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 3110 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 3111 } 3112 3113 static u32 icl_dpclka_cfgcr0_clk_off(struct drm_i915_private *dev_priv, 3114 enum phy phy) 3115 { 3116 if (IS_ROCKETLAKE(dev_priv)) { 3117 return RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3118 } else if (intel_phy_is_combo(dev_priv, phy)) { 3119 return ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3120 } else if (intel_phy_is_tc(dev_priv, phy)) { 3121 enum tc_port tc_port = intel_port_to_tc(dev_priv, 3122 (enum port)phy); 3123 3124 return ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port); 3125 } 3126 3127 return 0; 3128 } 3129 3130 static void dg1_map_plls_to_ports(struct intel_encoder *encoder, 3131 const struct intel_crtc_state *crtc_state) 3132 { 3133 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3134 struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3135 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3136 u32 val; 3137 3138 /* 3139 * If we fail this, something went very wrong: first 2 PLLs should be 3140 * used by first 2 phys and last 2 PLLs by last phys 3141 */ 3142 if (drm_WARN_ON(&dev_priv->drm, 3143 (pll->info->id < DPLL_ID_DG1_DPLL2 && phy >= PHY_C) || 3144 (pll->info->id >= DPLL_ID_DG1_DPLL2 && phy < PHY_C))) 3145 return; 3146 3147 mutex_lock(&dev_priv->dpll.lock); 3148 3149 val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3150 drm_WARN_ON(&dev_priv->drm, 3151 (val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)) == 0); 3152 3153 val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3154 val |= DG1_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3155 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3156 intel_de_posting_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3157 3158 val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3159 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3160 3161 mutex_unlock(&dev_priv->dpll.lock); 3162 } 3163 3164 static void icl_map_plls_to_ports(struct intel_encoder *encoder, 3165 const struct intel_crtc_state *crtc_state) 3166 { 3167 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3168 struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3169 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3170 u32 val; 3171 3172 mutex_lock(&dev_priv->dpll.lock); 3173 3174 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3175 drm_WARN_ON(&dev_priv->drm, 3176 (val & icl_dpclka_cfgcr0_clk_off(dev_priv, phy)) == 0); 3177 3178 if (intel_phy_is_combo(dev_priv, phy)) { 3179 u32 mask, sel; 3180 3181 if (IS_ROCKETLAKE(dev_priv)) { 3182 mask = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3183 sel = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3184 } else { 3185 mask = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3186 sel = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3187 } 3188 3189 /* 3190 * Even though this register references DDIs, note that we 3191 * want to pass the PHY rather than the port (DDI). For 3192 * ICL, port=phy in all cases so it doesn't matter, but for 3193 * EHL the bspec notes the following: 3194 * 3195 * "DDID clock tied to DDIA clock, so DPCLKA_CFGCR0 DDIA 3196 * Clock Select chooses the PLL for both DDIA and DDID and 3197 * drives port A in all cases." 3198 */ 3199 val &= ~mask; 3200 val |= sel; 3201 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3202 intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0); 3203 } 3204 3205 val &= ~icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3206 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3207 3208 mutex_unlock(&dev_priv->dpll.lock); 3209 } 3210 3211 static void dg1_unmap_plls_to_ports(struct intel_encoder *encoder) 3212 { 3213 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3214 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3215 3216 mutex_lock(&dev_priv->dpll.lock); 3217 3218 intel_de_rmw(dev_priv, DG1_DPCLKA_CFGCR0(phy), 0, 3219 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 3220 3221 mutex_unlock(&dev_priv->dpll.lock); 3222 } 3223 3224 static void icl_unmap_plls_to_ports(struct intel_encoder *encoder) 3225 { 3226 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3227 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3228 u32 val; 3229 3230 mutex_lock(&dev_priv->dpll.lock); 3231 3232 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3233 val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3234 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3235 3236 mutex_unlock(&dev_priv->dpll.lock); 3237 } 3238 3239 static void dg1_sanitize_port_clk_off(struct drm_i915_private *dev_priv, 3240 u32 port_mask, bool ddi_clk_needed) 3241 { 3242 enum port port; 3243 u32 val; 3244 3245 for_each_port_masked(port, port_mask) { 3246 enum phy phy = intel_port_to_phy(dev_priv, port); 3247 bool ddi_clk_off; 3248 3249 val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3250 ddi_clk_off = val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3251 3252 if (ddi_clk_needed == !ddi_clk_off) 3253 continue; 3254 3255 /* 3256 * Punt on the case now where clock is gated, but it would 3257 * be needed by the port. Something else is really broken then. 3258 */ 3259 if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed)) 3260 continue; 3261 3262 drm_notice(&dev_priv->drm, 3263 "PHY %c is disabled with an ungated DDI clock, gate it\n", 3264 phy_name(phy)); 3265 val |= DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3266 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3267 } 3268 } 3269 3270 static void icl_sanitize_port_clk_off(struct drm_i915_private *dev_priv, 3271 u32 port_mask, bool ddi_clk_needed) 3272 { 3273 enum port port; 3274 u32 val; 3275 3276 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3277 for_each_port_masked(port, port_mask) { 3278 enum phy phy = intel_port_to_phy(dev_priv, port); 3279 bool ddi_clk_off = val & icl_dpclka_cfgcr0_clk_off(dev_priv, 3280 phy); 3281 3282 if (ddi_clk_needed == !ddi_clk_off) 3283 continue; 3284 3285 /* 3286 * Punt on the case now where clock is gated, but it would 3287 * be needed by the port. Something else is really broken then. 3288 */ 3289 if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed)) 3290 continue; 3291 3292 drm_notice(&dev_priv->drm, 3293 "PHY %c is disabled/in DSI mode with an ungated DDI clock, gate it\n", 3294 phy_name(phy)); 3295 val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3296 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3297 } 3298 } 3299 3300 void icl_sanitize_encoder_pll_mapping(struct intel_encoder *encoder) 3301 { 3302 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3303 u32 port_mask; 3304 bool ddi_clk_needed; 3305 3306 /* 3307 * In case of DP MST, we sanitize the primary encoder only, not the 3308 * virtual ones. 3309 */ 3310 if (encoder->type == INTEL_OUTPUT_DP_MST) 3311 return; 3312 3313 if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) { 3314 u8 pipe_mask; 3315 bool is_mst; 3316 3317 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 3318 /* 3319 * In the unlikely case that BIOS enables DP in MST mode, just 3320 * warn since our MST HW readout is incomplete. 3321 */ 3322 if (drm_WARN_ON(&dev_priv->drm, is_mst)) 3323 return; 3324 } 3325 3326 port_mask = BIT(encoder->port); 3327 ddi_clk_needed = encoder->base.crtc; 3328 3329 if (encoder->type == INTEL_OUTPUT_DSI) { 3330 struct intel_encoder *other_encoder; 3331 3332 port_mask = intel_dsi_encoder_ports(encoder); 3333 /* 3334 * Sanity check that we haven't incorrectly registered another 3335 * encoder using any of the ports of this DSI encoder. 3336 */ 3337 for_each_intel_encoder(&dev_priv->drm, other_encoder) { 3338 if (other_encoder == encoder) 3339 continue; 3340 3341 if (drm_WARN_ON(&dev_priv->drm, 3342 port_mask & BIT(other_encoder->port))) 3343 return; 3344 } 3345 /* 3346 * For DSI we keep the ddi clocks gated 3347 * except during enable/disable sequence. 3348 */ 3349 ddi_clk_needed = false; 3350 } 3351 3352 if (IS_DG1(dev_priv)) 3353 dg1_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed); 3354 else 3355 icl_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed); 3356 } 3357 3358 static void intel_ddi_clk_select(struct intel_encoder *encoder, 3359 const struct intel_crtc_state *crtc_state) 3360 { 3361 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3362 enum port port = encoder->port; 3363 enum phy phy = intel_port_to_phy(dev_priv, port); 3364 u32 val; 3365 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3366 3367 if (drm_WARN_ON(&dev_priv->drm, !pll)) 3368 return; 3369 3370 mutex_lock(&dev_priv->dpll.lock); 3371 3372 if (INTEL_GEN(dev_priv) >= 11) { 3373 if (!intel_phy_is_combo(dev_priv, phy)) 3374 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3375 icl_pll_to_ddi_clk_sel(encoder, crtc_state)); 3376 else if (IS_JSL_EHL(dev_priv) && port >= PORT_C) 3377 /* 3378 * MG does not exist but the programming is required 3379 * to ungate DDIC and DDID 3380 */ 3381 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3382 DDI_CLK_SEL_MG); 3383 } else if (IS_CANNONLAKE(dev_priv)) { 3384 /* Configure DPCLKA_CFGCR0 to map the DPLL to the DDI. */ 3385 val = intel_de_read(dev_priv, DPCLKA_CFGCR0); 3386 val &= ~DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port); 3387 val |= DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port); 3388 intel_de_write(dev_priv, DPCLKA_CFGCR0, val); 3389 3390 /* 3391 * Configure DPCLKA_CFGCR0 to turn on the clock for the DDI. 3392 * This step and the step before must be done with separate 3393 * register writes. 3394 */ 3395 val = intel_de_read(dev_priv, DPCLKA_CFGCR0); 3396 val &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port); 3397 intel_de_write(dev_priv, DPCLKA_CFGCR0, val); 3398 } else if (IS_GEN9_BC(dev_priv)) { 3399 /* DDI -> PLL mapping */ 3400 val = intel_de_read(dev_priv, DPLL_CTRL2); 3401 3402 val &= ~(DPLL_CTRL2_DDI_CLK_OFF(port) | 3403 DPLL_CTRL2_DDI_CLK_SEL_MASK(port)); 3404 val |= (DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) | 3405 DPLL_CTRL2_DDI_SEL_OVERRIDE(port)); 3406 3407 intel_de_write(dev_priv, DPLL_CTRL2, val); 3408 3409 } else if (INTEL_GEN(dev_priv) < 9) { 3410 intel_de_write(dev_priv, PORT_CLK_SEL(port), 3411 hsw_pll_to_ddi_pll_sel(pll)); 3412 } 3413 3414 mutex_unlock(&dev_priv->dpll.lock); 3415 } 3416 3417 static void intel_ddi_clk_disable(struct intel_encoder *encoder) 3418 { 3419 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3420 enum port port = encoder->port; 3421 enum phy phy = intel_port_to_phy(dev_priv, port); 3422 3423 if (INTEL_GEN(dev_priv) >= 11) { 3424 if (!intel_phy_is_combo(dev_priv, phy) || 3425 (IS_JSL_EHL(dev_priv) && port >= PORT_C)) 3426 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3427 DDI_CLK_SEL_NONE); 3428 } else if (IS_CANNONLAKE(dev_priv)) { 3429 intel_de_write(dev_priv, DPCLKA_CFGCR0, 3430 intel_de_read(dev_priv, DPCLKA_CFGCR0) | DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 3431 } else if (IS_GEN9_BC(dev_priv)) { 3432 intel_de_write(dev_priv, DPLL_CTRL2, 3433 intel_de_read(dev_priv, DPLL_CTRL2) | DPLL_CTRL2_DDI_CLK_OFF(port)); 3434 } else if (INTEL_GEN(dev_priv) < 9) { 3435 intel_de_write(dev_priv, PORT_CLK_SEL(port), 3436 PORT_CLK_SEL_NONE); 3437 } 3438 } 3439 3440 static void 3441 icl_program_mg_dp_mode(struct intel_digital_port *dig_port, 3442 const struct intel_crtc_state *crtc_state) 3443 { 3444 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 3445 enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port); 3446 u32 ln0, ln1, pin_assignment; 3447 u8 width; 3448 3449 if (dig_port->tc_mode == TC_PORT_TBT_ALT) 3450 return; 3451 3452 if (INTEL_GEN(dev_priv) >= 12) { 3453 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3454 HIP_INDEX_VAL(tc_port, 0x0)); 3455 ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 3456 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3457 HIP_INDEX_VAL(tc_port, 0x1)); 3458 ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 3459 } else { 3460 ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port)); 3461 ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port)); 3462 } 3463 3464 ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 3465 ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 3466 3467 /* DPPATC */ 3468 pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port); 3469 width = crtc_state->lane_count; 3470 3471 switch (pin_assignment) { 3472 case 0x0: 3473 drm_WARN_ON(&dev_priv->drm, 3474 dig_port->tc_mode != TC_PORT_LEGACY); 3475 if (width == 1) { 3476 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3477 } else { 3478 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3479 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3480 } 3481 break; 3482 case 0x1: 3483 if (width == 4) { 3484 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3485 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3486 } 3487 break; 3488 case 0x2: 3489 if (width == 2) { 3490 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3491 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3492 } 3493 break; 3494 case 0x3: 3495 case 0x5: 3496 if (width == 1) { 3497 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 3498 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3499 } else { 3500 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3501 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3502 } 3503 break; 3504 case 0x4: 3505 case 0x6: 3506 if (width == 1) { 3507 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 3508 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3509 } else { 3510 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3511 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3512 } 3513 break; 3514 default: 3515 MISSING_CASE(pin_assignment); 3516 } 3517 3518 if (INTEL_GEN(dev_priv) >= 12) { 3519 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3520 HIP_INDEX_VAL(tc_port, 0x0)); 3521 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0); 3522 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3523 HIP_INDEX_VAL(tc_port, 0x1)); 3524 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1); 3525 } else { 3526 intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0); 3527 intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1); 3528 } 3529 } 3530 3531 static enum transcoder 3532 tgl_dp_tp_transcoder(const struct intel_crtc_state *crtc_state) 3533 { 3534 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) 3535 return crtc_state->mst_master_transcoder; 3536 else 3537 return crtc_state->cpu_transcoder; 3538 } 3539 3540 i915_reg_t dp_tp_ctl_reg(struct intel_encoder *encoder, 3541 const struct intel_crtc_state *crtc_state) 3542 { 3543 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3544 3545 if (INTEL_GEN(dev_priv) >= 12) 3546 return TGL_DP_TP_CTL(tgl_dp_tp_transcoder(crtc_state)); 3547 else 3548 return DP_TP_CTL(encoder->port); 3549 } 3550 3551 i915_reg_t dp_tp_status_reg(struct intel_encoder *encoder, 3552 const struct intel_crtc_state *crtc_state) 3553 { 3554 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3555 3556 if (INTEL_GEN(dev_priv) >= 12) 3557 return TGL_DP_TP_STATUS(tgl_dp_tp_transcoder(crtc_state)); 3558 else 3559 return DP_TP_STATUS(encoder->port); 3560 } 3561 3562 static void intel_dp_sink_set_msa_timing_par_ignore_state(struct intel_dp *intel_dp, 3563 const struct intel_crtc_state *crtc_state, 3564 bool enable) 3565 { 3566 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 3567 3568 if (!crtc_state->vrr.enable) 3569 return; 3570 3571 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_DOWNSPREAD_CTRL, 3572 enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0) <= 0) 3573 drm_dbg_kms(&i915->drm, 3574 "Failed to set MSA_TIMING_PAR_IGNORE %s in the sink\n", 3575 enable ? "enable" : "disable"); 3576 } 3577 3578 static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp, 3579 const struct intel_crtc_state *crtc_state) 3580 { 3581 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 3582 3583 if (!crtc_state->fec_enable) 3584 return; 3585 3586 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0) 3587 drm_dbg_kms(&i915->drm, 3588 "Failed to set FEC_READY in the sink\n"); 3589 } 3590 3591 static void intel_ddi_enable_fec(struct intel_encoder *encoder, 3592 const struct intel_crtc_state *crtc_state) 3593 { 3594 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3595 struct intel_dp *intel_dp; 3596 u32 val; 3597 3598 if (!crtc_state->fec_enable) 3599 return; 3600 3601 intel_dp = enc_to_intel_dp(encoder); 3602 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3603 val |= DP_TP_CTL_FEC_ENABLE; 3604 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3605 } 3606 3607 static void intel_ddi_disable_fec_state(struct intel_encoder *encoder, 3608 const struct intel_crtc_state *crtc_state) 3609 { 3610 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3611 struct intel_dp *intel_dp; 3612 u32 val; 3613 3614 if (!crtc_state->fec_enable) 3615 return; 3616 3617 intel_dp = enc_to_intel_dp(encoder); 3618 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3619 val &= ~DP_TP_CTL_FEC_ENABLE; 3620 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3621 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3622 } 3623 3624 static void intel_ddi_power_up_lanes(struct intel_encoder *encoder, 3625 const struct intel_crtc_state *crtc_state) 3626 { 3627 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3628 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3629 enum phy phy = intel_port_to_phy(i915, encoder->port); 3630 3631 if (intel_phy_is_combo(i915, phy)) { 3632 bool lane_reversal = 3633 dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL; 3634 3635 intel_combo_phy_power_up_lanes(i915, phy, false, 3636 crtc_state->lane_count, 3637 lane_reversal); 3638 } 3639 } 3640 3641 static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state, 3642 struct intel_encoder *encoder, 3643 const struct intel_crtc_state *crtc_state, 3644 const struct drm_connector_state *conn_state) 3645 { 3646 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3647 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3648 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3649 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3650 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 3651 int level = intel_ddi_dp_level(intel_dp); 3652 3653 intel_dp_set_link_params(intel_dp, 3654 crtc_state->port_clock, 3655 crtc_state->lane_count); 3656 3657 /* 3658 * 1. Enable Power Wells 3659 * 3660 * This was handled at the beginning of intel_atomic_commit_tail(), 3661 * before we called down into this function. 3662 */ 3663 3664 /* 2. Enable Panel Power if PPS is required */ 3665 intel_pps_on(intel_dp); 3666 3667 /* 3668 * 3. For non-TBT Type-C ports, set FIA lane count 3669 * (DFLEXDPSP.DPX4TXLATC) 3670 * 3671 * This was done before tgl_ddi_pre_enable_dp by 3672 * hsw_crtc_enable()->intel_encoders_pre_pll_enable(). 3673 */ 3674 3675 /* 3676 * 4. Enable the port PLL. 3677 * 3678 * The PLL enabling itself was already done before this function by 3679 * hsw_crtc_enable()->intel_enable_shared_dpll(). We need only 3680 * configure the PLL to port mapping here. 3681 */ 3682 intel_ddi_clk_select(encoder, crtc_state); 3683 3684 /* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */ 3685 if (!intel_phy_is_tc(dev_priv, phy) || 3686 dig_port->tc_mode != TC_PORT_TBT_ALT) { 3687 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 3688 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 3689 dig_port->ddi_io_power_domain); 3690 } 3691 3692 /* 6. Program DP_MODE */ 3693 icl_program_mg_dp_mode(dig_port, crtc_state); 3694 3695 /* 3696 * 7. The rest of the below are substeps under the bspec's "Enable and 3697 * Train Display Port" step. Note that steps that are specific to 3698 * MST will be handled by intel_mst_pre_enable_dp() before/after it 3699 * calls into this function. Also intel_mst_pre_enable_dp() only calls 3700 * us when active_mst_links==0, so any steps designated for "single 3701 * stream or multi-stream master transcoder" can just be performed 3702 * unconditionally here. 3703 */ 3704 3705 /* 3706 * 7.a Configure Transcoder Clock Select to direct the Port clock to the 3707 * Transcoder. 3708 */ 3709 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3710 3711 /* 3712 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST 3713 * Transport Select 3714 */ 3715 intel_ddi_config_transcoder_func(encoder, crtc_state); 3716 3717 /* 3718 * 7.c Configure & enable DP_TP_CTL with link training pattern 1 3719 * selected 3720 * 3721 * This will be handled by the intel_dp_start_link_train() farther 3722 * down this function. 3723 */ 3724 3725 /* 7.e Configure voltage swing and related IO settings */ 3726 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3727 3728 /* 3729 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up 3730 * the used lanes of the DDI. 3731 */ 3732 intel_ddi_power_up_lanes(encoder, crtc_state); 3733 3734 /* 3735 * 7.g Configure and enable DDI_BUF_CTL 3736 * 7.h Wait for DDI_BUF_CTL DDI Idle Status = 0b (Not Idle), timeout 3737 * after 500 us. 3738 * 3739 * We only configure what the register value will be here. Actual 3740 * enabling happens during link training farther down. 3741 */ 3742 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 3743 3744 if (!is_mst) 3745 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 3746 3747 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 3748 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true); 3749 /* 3750 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit 3751 * in the FEC_CONFIGURATION register to 1 before initiating link 3752 * training 3753 */ 3754 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 3755 3756 intel_dp_check_frl_training(intel_dp); 3757 intel_dp_pcon_dsc_configure(intel_dp, crtc_state); 3758 3759 /* 3760 * 7.i Follow DisplayPort specification training sequence (see notes for 3761 * failure handling) 3762 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle 3763 * Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent) 3764 * (timeout after 800 us) 3765 */ 3766 intel_dp_start_link_train(intel_dp, crtc_state); 3767 3768 /* 7.k Set DP_TP_CTL link training to Normal */ 3769 if (!is_trans_port_sync_mode(crtc_state)) 3770 intel_dp_stop_link_train(intel_dp, crtc_state); 3771 3772 /* 7.l Configure and enable FEC if needed */ 3773 intel_ddi_enable_fec(encoder, crtc_state); 3774 if (!crtc_state->bigjoiner) 3775 intel_dsc_enable(encoder, crtc_state); 3776 } 3777 3778 static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state, 3779 struct intel_encoder *encoder, 3780 const struct intel_crtc_state *crtc_state, 3781 const struct drm_connector_state *conn_state) 3782 { 3783 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3784 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3785 enum port port = encoder->port; 3786 enum phy phy = intel_port_to_phy(dev_priv, port); 3787 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3788 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 3789 int level = intel_ddi_dp_level(intel_dp); 3790 3791 if (INTEL_GEN(dev_priv) < 11) 3792 drm_WARN_ON(&dev_priv->drm, 3793 is_mst && (port == PORT_A || port == PORT_E)); 3794 else 3795 drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A); 3796 3797 intel_dp_set_link_params(intel_dp, 3798 crtc_state->port_clock, 3799 crtc_state->lane_count); 3800 3801 intel_pps_on(intel_dp); 3802 3803 intel_ddi_clk_select(encoder, crtc_state); 3804 3805 if (!intel_phy_is_tc(dev_priv, phy) || 3806 dig_port->tc_mode != TC_PORT_TBT_ALT) { 3807 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 3808 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 3809 dig_port->ddi_io_power_domain); 3810 } 3811 3812 icl_program_mg_dp_mode(dig_port, crtc_state); 3813 3814 if (INTEL_GEN(dev_priv) >= 11) 3815 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3816 else if (IS_CANNONLAKE(dev_priv)) 3817 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3818 else if (IS_GEN9_LP(dev_priv)) 3819 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3820 else 3821 intel_prepare_dp_ddi_buffers(encoder, crtc_state); 3822 3823 intel_ddi_power_up_lanes(encoder, crtc_state); 3824 3825 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 3826 if (!is_mst) 3827 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 3828 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 3829 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, 3830 true); 3831 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 3832 intel_dp_start_link_train(intel_dp, crtc_state); 3833 if ((port != PORT_A || INTEL_GEN(dev_priv) >= 9) && 3834 !is_trans_port_sync_mode(crtc_state)) 3835 intel_dp_stop_link_train(intel_dp, crtc_state); 3836 3837 intel_ddi_enable_fec(encoder, crtc_state); 3838 3839 if (!is_mst) 3840 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3841 3842 if (!crtc_state->bigjoiner) 3843 intel_dsc_enable(encoder, crtc_state); 3844 } 3845 3846 static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state, 3847 struct intel_encoder *encoder, 3848 const struct intel_crtc_state *crtc_state, 3849 const struct drm_connector_state *conn_state) 3850 { 3851 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3852 3853 if (INTEL_GEN(dev_priv) >= 12) 3854 tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 3855 else 3856 hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 3857 3858 /* MST will call a setting of MSA after an allocating of Virtual Channel 3859 * from MST encoder pre_enable callback. 3860 */ 3861 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 3862 intel_ddi_set_dp_msa(crtc_state, conn_state); 3863 3864 intel_dp_set_m_n(crtc_state, M1_N1); 3865 } 3866 } 3867 3868 static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state, 3869 struct intel_encoder *encoder, 3870 const struct intel_crtc_state *crtc_state, 3871 const struct drm_connector_state *conn_state) 3872 { 3873 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3874 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 3875 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3876 int level = intel_ddi_hdmi_level(encoder, crtc_state); 3877 3878 intel_dp_dual_mode_set_tmds_output(intel_hdmi, true); 3879 intel_ddi_clk_select(encoder, crtc_state); 3880 3881 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 3882 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 3883 dig_port->ddi_io_power_domain); 3884 3885 icl_program_mg_dp_mode(dig_port, crtc_state); 3886 3887 if (INTEL_GEN(dev_priv) >= 12) 3888 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3889 else if (INTEL_GEN(dev_priv) == 11) 3890 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3891 else if (IS_CANNONLAKE(dev_priv)) 3892 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3893 else if (IS_GEN9_LP(dev_priv)) 3894 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3895 else 3896 intel_prepare_hdmi_ddi_buffers(encoder, level); 3897 3898 if (IS_GEN9_BC(dev_priv)) 3899 skl_ddi_set_iboost(encoder, crtc_state, level); 3900 3901 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3902 3903 dig_port->set_infoframes(encoder, 3904 crtc_state->has_infoframe, 3905 crtc_state, conn_state); 3906 } 3907 3908 static void intel_ddi_pre_enable(struct intel_atomic_state *state, 3909 struct intel_encoder *encoder, 3910 const struct intel_crtc_state *crtc_state, 3911 const struct drm_connector_state *conn_state) 3912 { 3913 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 3914 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 3915 enum pipe pipe = crtc->pipe; 3916 3917 /* 3918 * When called from DP MST code: 3919 * - conn_state will be NULL 3920 * - encoder will be the main encoder (ie. mst->primary) 3921 * - the main connector associated with this port 3922 * won't be active or linked to a crtc 3923 * - crtc_state will be the state of the first stream to 3924 * be activated on this port, and it may not be the same 3925 * stream that will be deactivated last, but each stream 3926 * should have a state that is identical when it comes to 3927 * the DP link parameteres 3928 */ 3929 3930 drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder); 3931 3932 if (IS_DG1(dev_priv)) 3933 dg1_map_plls_to_ports(encoder, crtc_state); 3934 else if (INTEL_GEN(dev_priv) >= 11) 3935 icl_map_plls_to_ports(encoder, crtc_state); 3936 3937 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true); 3938 3939 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 3940 intel_ddi_pre_enable_hdmi(state, encoder, crtc_state, 3941 conn_state); 3942 } else { 3943 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3944 3945 intel_ddi_pre_enable_dp(state, encoder, crtc_state, 3946 conn_state); 3947 3948 /* FIXME precompute everything properly */ 3949 /* FIXME how do we turn infoframes off again? */ 3950 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 3951 dig_port->set_infoframes(encoder, 3952 crtc_state->has_infoframe, 3953 crtc_state, conn_state); 3954 } 3955 } 3956 3957 static void intel_disable_ddi_buf(struct intel_encoder *encoder, 3958 const struct intel_crtc_state *crtc_state) 3959 { 3960 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3961 enum port port = encoder->port; 3962 bool wait = false; 3963 u32 val; 3964 3965 val = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 3966 if (val & DDI_BUF_CTL_ENABLE) { 3967 val &= ~DDI_BUF_CTL_ENABLE; 3968 intel_de_write(dev_priv, DDI_BUF_CTL(port), val); 3969 wait = true; 3970 } 3971 3972 if (intel_crtc_has_dp_encoder(crtc_state)) { 3973 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3974 val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 3975 val |= DP_TP_CTL_LINK_TRAIN_PAT1; 3976 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3977 } 3978 3979 /* Disable FEC in DP Sink */ 3980 intel_ddi_disable_fec_state(encoder, crtc_state); 3981 3982 if (wait) 3983 intel_wait_ddi_buf_idle(dev_priv, port); 3984 } 3985 3986 static void intel_ddi_post_disable_dp(struct intel_atomic_state *state, 3987 struct intel_encoder *encoder, 3988 const struct intel_crtc_state *old_crtc_state, 3989 const struct drm_connector_state *old_conn_state) 3990 { 3991 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3992 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3993 struct intel_dp *intel_dp = &dig_port->dp; 3994 bool is_mst = intel_crtc_has_type(old_crtc_state, 3995 INTEL_OUTPUT_DP_MST); 3996 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3997 3998 if (!is_mst) 3999 intel_dp_set_infoframes(encoder, false, 4000 old_crtc_state, old_conn_state); 4001 4002 /* 4003 * Power down sink before disabling the port, otherwise we end 4004 * up getting interrupts from the sink on detecting link loss. 4005 */ 4006 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 4007 4008 if (INTEL_GEN(dev_priv) >= 12) { 4009 if (is_mst) { 4010 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder; 4011 u32 val; 4012 4013 val = intel_de_read(dev_priv, 4014 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 4015 val &= ~(TGL_TRANS_DDI_PORT_MASK | 4016 TRANS_DDI_MODE_SELECT_MASK); 4017 intel_de_write(dev_priv, 4018 TRANS_DDI_FUNC_CTL(cpu_transcoder), 4019 val); 4020 } 4021 } else { 4022 if (!is_mst) 4023 intel_ddi_disable_pipe_clock(old_crtc_state); 4024 } 4025 4026 intel_disable_ddi_buf(encoder, old_crtc_state); 4027 4028 /* 4029 * From TGL spec: "If single stream or multi-stream master transcoder: 4030 * Configure Transcoder Clock select to direct no clock to the 4031 * transcoder" 4032 */ 4033 if (INTEL_GEN(dev_priv) >= 12) 4034 intel_ddi_disable_pipe_clock(old_crtc_state); 4035 4036 intel_pps_vdd_on(intel_dp); 4037 intel_pps_off(intel_dp); 4038 4039 if (!intel_phy_is_tc(dev_priv, phy) || 4040 dig_port->tc_mode != TC_PORT_TBT_ALT) 4041 intel_display_power_put(dev_priv, 4042 dig_port->ddi_io_power_domain, 4043 fetch_and_zero(&dig_port->ddi_io_wakeref)); 4044 4045 intel_ddi_clk_disable(encoder); 4046 } 4047 4048 static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state, 4049 struct intel_encoder *encoder, 4050 const struct intel_crtc_state *old_crtc_state, 4051 const struct drm_connector_state *old_conn_state) 4052 { 4053 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4054 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4055 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 4056 4057 dig_port->set_infoframes(encoder, false, 4058 old_crtc_state, old_conn_state); 4059 4060 intel_ddi_disable_pipe_clock(old_crtc_state); 4061 4062 intel_disable_ddi_buf(encoder, old_crtc_state); 4063 4064 intel_display_power_put(dev_priv, 4065 dig_port->ddi_io_power_domain, 4066 fetch_and_zero(&dig_port->ddi_io_wakeref)); 4067 4068 intel_ddi_clk_disable(encoder); 4069 4070 intel_dp_dual_mode_set_tmds_output(intel_hdmi, false); 4071 } 4072 4073 static void intel_ddi_post_disable(struct intel_atomic_state *state, 4074 struct intel_encoder *encoder, 4075 const struct intel_crtc_state *old_crtc_state, 4076 const struct drm_connector_state *old_conn_state) 4077 { 4078 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4079 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4080 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 4081 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 4082 4083 if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) { 4084 intel_crtc_vblank_off(old_crtc_state); 4085 4086 intel_disable_pipe(old_crtc_state); 4087 4088 intel_vrr_disable(old_crtc_state); 4089 4090 intel_ddi_disable_transcoder_func(old_crtc_state); 4091 4092 intel_dsc_disable(old_crtc_state); 4093 4094 if (INTEL_GEN(dev_priv) >= 9) 4095 skl_scaler_disable(old_crtc_state); 4096 else 4097 ilk_pfit_disable(old_crtc_state); 4098 } 4099 4100 if (old_crtc_state->bigjoiner_linked_crtc) { 4101 struct intel_atomic_state *state = 4102 to_intel_atomic_state(old_crtc_state->uapi.state); 4103 struct intel_crtc *slave = 4104 old_crtc_state->bigjoiner_linked_crtc; 4105 const struct intel_crtc_state *old_slave_crtc_state = 4106 intel_atomic_get_old_crtc_state(state, slave); 4107 4108 intel_crtc_vblank_off(old_slave_crtc_state); 4109 trace_intel_pipe_disable(slave); 4110 4111 intel_dsc_disable(old_slave_crtc_state); 4112 skl_scaler_disable(old_slave_crtc_state); 4113 } 4114 4115 /* 4116 * When called from DP MST code: 4117 * - old_conn_state will be NULL 4118 * - encoder will be the main encoder (ie. mst->primary) 4119 * - the main connector associated with this port 4120 * won't be active or linked to a crtc 4121 * - old_crtc_state will be the state of the last stream to 4122 * be deactivated on this port, and it may not be the same 4123 * stream that was activated last, but each stream 4124 * should have a state that is identical when it comes to 4125 * the DP link parameteres 4126 */ 4127 4128 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 4129 intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state, 4130 old_conn_state); 4131 else 4132 intel_ddi_post_disable_dp(state, encoder, old_crtc_state, 4133 old_conn_state); 4134 4135 if (IS_DG1(dev_priv)) 4136 dg1_unmap_plls_to_ports(encoder); 4137 else if (INTEL_GEN(dev_priv) >= 11) 4138 icl_unmap_plls_to_ports(encoder); 4139 4140 if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port) 4141 intel_display_power_put(dev_priv, 4142 intel_ddi_main_link_aux_domain(dig_port), 4143 fetch_and_zero(&dig_port->aux_wakeref)); 4144 4145 if (is_tc_port) 4146 intel_tc_port_put_link(dig_port); 4147 } 4148 4149 void intel_ddi_fdi_post_disable(struct intel_atomic_state *state, 4150 struct intel_encoder *encoder, 4151 const struct intel_crtc_state *old_crtc_state, 4152 const struct drm_connector_state *old_conn_state) 4153 { 4154 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4155 u32 val; 4156 4157 /* 4158 * Bspec lists this as both step 13 (before DDI_BUF_CTL disable) 4159 * and step 18 (after clearing PORT_CLK_SEL). Based on a BUN, 4160 * step 13 is the correct place for it. Step 18 is where it was 4161 * originally before the BUN. 4162 */ 4163 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4164 val &= ~FDI_RX_ENABLE; 4165 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4166 4167 intel_disable_ddi_buf(encoder, old_crtc_state); 4168 intel_ddi_clk_disable(encoder); 4169 4170 val = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 4171 val &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 4172 val |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2); 4173 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), val); 4174 4175 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4176 val &= ~FDI_PCDCLK; 4177 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4178 4179 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4180 val &= ~FDI_RX_PLL_ENABLE; 4181 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4182 } 4183 4184 static void trans_port_sync_stop_link_train(struct intel_atomic_state *state, 4185 struct intel_encoder *encoder, 4186 const struct intel_crtc_state *crtc_state) 4187 { 4188 const struct drm_connector_state *conn_state; 4189 struct drm_connector *conn; 4190 int i; 4191 4192 if (!crtc_state->sync_mode_slaves_mask) 4193 return; 4194 4195 for_each_new_connector_in_state(&state->base, conn, conn_state, i) { 4196 struct intel_encoder *slave_encoder = 4197 to_intel_encoder(conn_state->best_encoder); 4198 struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc); 4199 const struct intel_crtc_state *slave_crtc_state; 4200 4201 if (!slave_crtc) 4202 continue; 4203 4204 slave_crtc_state = 4205 intel_atomic_get_new_crtc_state(state, slave_crtc); 4206 4207 if (slave_crtc_state->master_transcoder != 4208 crtc_state->cpu_transcoder) 4209 continue; 4210 4211 intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder), 4212 slave_crtc_state); 4213 } 4214 4215 usleep_range(200, 400); 4216 4217 intel_dp_stop_link_train(enc_to_intel_dp(encoder), 4218 crtc_state); 4219 } 4220 4221 static void intel_enable_ddi_dp(struct intel_atomic_state *state, 4222 struct intel_encoder *encoder, 4223 const struct intel_crtc_state *crtc_state, 4224 const struct drm_connector_state *conn_state) 4225 { 4226 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4227 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4228 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4229 enum port port = encoder->port; 4230 4231 if (port == PORT_A && INTEL_GEN(dev_priv) < 9) 4232 intel_dp_stop_link_train(intel_dp, crtc_state); 4233 4234 intel_edp_backlight_on(crtc_state, conn_state); 4235 intel_psr_enable(intel_dp, crtc_state, conn_state); 4236 4237 if (!dig_port->lspcon.active || dig_port->dp.has_hdmi_sink) 4238 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 4239 4240 intel_edp_drrs_enable(intel_dp, crtc_state); 4241 4242 if (crtc_state->has_audio) 4243 intel_audio_codec_enable(encoder, crtc_state, conn_state); 4244 4245 trans_port_sync_stop_link_train(state, encoder, crtc_state); 4246 } 4247 4248 static i915_reg_t 4249 gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv, 4250 enum port port) 4251 { 4252 static const enum transcoder trans[] = { 4253 [PORT_A] = TRANSCODER_EDP, 4254 [PORT_B] = TRANSCODER_A, 4255 [PORT_C] = TRANSCODER_B, 4256 [PORT_D] = TRANSCODER_C, 4257 [PORT_E] = TRANSCODER_A, 4258 }; 4259 4260 drm_WARN_ON(&dev_priv->drm, INTEL_GEN(dev_priv) < 9); 4261 4262 if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E)) 4263 port = PORT_A; 4264 4265 return CHICKEN_TRANS(trans[port]); 4266 } 4267 4268 static void intel_enable_ddi_hdmi(struct intel_atomic_state *state, 4269 struct intel_encoder *encoder, 4270 const struct intel_crtc_state *crtc_state, 4271 const struct drm_connector_state *conn_state) 4272 { 4273 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4274 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4275 struct drm_connector *connector = conn_state->connector; 4276 enum port port = encoder->port; 4277 4278 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 4279 crtc_state->hdmi_high_tmds_clock_ratio, 4280 crtc_state->hdmi_scrambling)) 4281 drm_dbg_kms(&dev_priv->drm, 4282 "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n", 4283 connector->base.id, connector->name); 4284 4285 /* Display WA #1143: skl,kbl,cfl */ 4286 if (IS_GEN9_BC(dev_priv)) { 4287 /* 4288 * For some reason these chicken bits have been 4289 * stuffed into a transcoder register, event though 4290 * the bits affect a specific DDI port rather than 4291 * a specific transcoder. 4292 */ 4293 i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port); 4294 u32 val; 4295 4296 val = intel_de_read(dev_priv, reg); 4297 4298 if (port == PORT_E) 4299 val |= DDIE_TRAINING_OVERRIDE_ENABLE | 4300 DDIE_TRAINING_OVERRIDE_VALUE; 4301 else 4302 val |= DDI_TRAINING_OVERRIDE_ENABLE | 4303 DDI_TRAINING_OVERRIDE_VALUE; 4304 4305 intel_de_write(dev_priv, reg, val); 4306 intel_de_posting_read(dev_priv, reg); 4307 4308 udelay(1); 4309 4310 if (port == PORT_E) 4311 val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE | 4312 DDIE_TRAINING_OVERRIDE_VALUE); 4313 else 4314 val &= ~(DDI_TRAINING_OVERRIDE_ENABLE | 4315 DDI_TRAINING_OVERRIDE_VALUE); 4316 4317 intel_de_write(dev_priv, reg, val); 4318 } 4319 4320 intel_ddi_power_up_lanes(encoder, crtc_state); 4321 4322 /* In HDMI/DVI mode, the port width, and swing/emphasis values 4323 * are ignored so nothing special needs to be done besides 4324 * enabling the port. 4325 */ 4326 intel_de_write(dev_priv, DDI_BUF_CTL(port), 4327 dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE); 4328 4329 if (crtc_state->has_audio) 4330 intel_audio_codec_enable(encoder, crtc_state, conn_state); 4331 } 4332 4333 static void intel_enable_ddi(struct intel_atomic_state *state, 4334 struct intel_encoder *encoder, 4335 const struct intel_crtc_state *crtc_state, 4336 const struct drm_connector_state *conn_state) 4337 { 4338 drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder); 4339 4340 if (!crtc_state->bigjoiner_slave) 4341 intel_ddi_enable_transcoder_func(encoder, crtc_state); 4342 4343 intel_vrr_enable(encoder, crtc_state); 4344 4345 intel_enable_pipe(crtc_state); 4346 4347 intel_crtc_vblank_on(crtc_state); 4348 4349 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 4350 intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state); 4351 else 4352 intel_enable_ddi_dp(state, encoder, crtc_state, conn_state); 4353 4354 /* Enable hdcp if it's desired */ 4355 if (conn_state->content_protection == 4356 DRM_MODE_CONTENT_PROTECTION_DESIRED) 4357 intel_hdcp_enable(to_intel_connector(conn_state->connector), 4358 crtc_state, 4359 (u8)conn_state->hdcp_content_type); 4360 } 4361 4362 static void intel_disable_ddi_dp(struct intel_atomic_state *state, 4363 struct intel_encoder *encoder, 4364 const struct intel_crtc_state *old_crtc_state, 4365 const struct drm_connector_state *old_conn_state) 4366 { 4367 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4368 4369 intel_dp->link_trained = false; 4370 4371 if (old_crtc_state->has_audio) 4372 intel_audio_codec_disable(encoder, 4373 old_crtc_state, old_conn_state); 4374 4375 intel_edp_drrs_disable(intel_dp, old_crtc_state); 4376 intel_psr_disable(intel_dp, old_crtc_state); 4377 intel_edp_backlight_off(old_conn_state); 4378 /* Disable the decompression in DP Sink */ 4379 intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state, 4380 false); 4381 /* Disable Ignore_MSA bit in DP Sink */ 4382 intel_dp_sink_set_msa_timing_par_ignore_state(intel_dp, old_crtc_state, 4383 false); 4384 } 4385 4386 static void intel_disable_ddi_hdmi(struct intel_atomic_state *state, 4387 struct intel_encoder *encoder, 4388 const struct intel_crtc_state *old_crtc_state, 4389 const struct drm_connector_state *old_conn_state) 4390 { 4391 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 4392 struct drm_connector *connector = old_conn_state->connector; 4393 4394 if (old_crtc_state->has_audio) 4395 intel_audio_codec_disable(encoder, 4396 old_crtc_state, old_conn_state); 4397 4398 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 4399 false, false)) 4400 drm_dbg_kms(&i915->drm, 4401 "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n", 4402 connector->base.id, connector->name); 4403 } 4404 4405 static void intel_disable_ddi(struct intel_atomic_state *state, 4406 struct intel_encoder *encoder, 4407 const struct intel_crtc_state *old_crtc_state, 4408 const struct drm_connector_state *old_conn_state) 4409 { 4410 intel_hdcp_disable(to_intel_connector(old_conn_state->connector)); 4411 4412 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 4413 intel_disable_ddi_hdmi(state, encoder, old_crtc_state, 4414 old_conn_state); 4415 else 4416 intel_disable_ddi_dp(state, encoder, old_crtc_state, 4417 old_conn_state); 4418 } 4419 4420 static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state, 4421 struct intel_encoder *encoder, 4422 const struct intel_crtc_state *crtc_state, 4423 const struct drm_connector_state *conn_state) 4424 { 4425 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4426 4427 intel_ddi_set_dp_msa(crtc_state, conn_state); 4428 4429 intel_psr_update(intel_dp, crtc_state, conn_state); 4430 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 4431 intel_edp_drrs_update(intel_dp, crtc_state); 4432 4433 intel_panel_update_backlight(state, encoder, crtc_state, conn_state); 4434 } 4435 4436 void intel_ddi_update_pipe(struct intel_atomic_state *state, 4437 struct intel_encoder *encoder, 4438 const struct intel_crtc_state *crtc_state, 4439 const struct drm_connector_state *conn_state) 4440 { 4441 4442 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) && 4443 !intel_encoder_is_mst(encoder)) 4444 intel_ddi_update_pipe_dp(state, encoder, crtc_state, 4445 conn_state); 4446 4447 intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state); 4448 } 4449 4450 static void 4451 intel_ddi_update_prepare(struct intel_atomic_state *state, 4452 struct intel_encoder *encoder, 4453 struct intel_crtc *crtc) 4454 { 4455 struct intel_crtc_state *crtc_state = 4456 crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL; 4457 int required_lanes = crtc_state ? crtc_state->lane_count : 1; 4458 4459 drm_WARN_ON(state->base.dev, crtc && crtc->active); 4460 4461 intel_tc_port_get_link(enc_to_dig_port(encoder), 4462 required_lanes); 4463 if (crtc_state && crtc_state->hw.active) 4464 intel_update_active_dpll(state, crtc, encoder); 4465 } 4466 4467 static void 4468 intel_ddi_update_complete(struct intel_atomic_state *state, 4469 struct intel_encoder *encoder, 4470 struct intel_crtc *crtc) 4471 { 4472 intel_tc_port_put_link(enc_to_dig_port(encoder)); 4473 } 4474 4475 static void 4476 intel_ddi_pre_pll_enable(struct intel_atomic_state *state, 4477 struct intel_encoder *encoder, 4478 const struct intel_crtc_state *crtc_state, 4479 const struct drm_connector_state *conn_state) 4480 { 4481 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4482 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4483 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 4484 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 4485 4486 if (is_tc_port) 4487 intel_tc_port_get_link(dig_port, crtc_state->lane_count); 4488 4489 if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port) { 4490 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 4491 dig_port->aux_wakeref = 4492 intel_display_power_get(dev_priv, 4493 intel_ddi_main_link_aux_domain(dig_port)); 4494 } 4495 4496 if (is_tc_port && dig_port->tc_mode != TC_PORT_TBT_ALT) 4497 /* 4498 * Program the lane count for static/dynamic connections on 4499 * Type-C ports. Skip this step for TBT. 4500 */ 4501 intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count); 4502 else if (IS_GEN9_LP(dev_priv)) 4503 bxt_ddi_phy_set_lane_optim_mask(encoder, 4504 crtc_state->lane_lat_optim_mask); 4505 } 4506 4507 static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp, 4508 const struct intel_crtc_state *crtc_state) 4509 { 4510 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4511 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4512 enum port port = encoder->port; 4513 u32 dp_tp_ctl, ddi_buf_ctl; 4514 bool wait = false; 4515 4516 dp_tp_ctl = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4517 4518 if (dp_tp_ctl & DP_TP_CTL_ENABLE) { 4519 ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 4520 if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) { 4521 intel_de_write(dev_priv, DDI_BUF_CTL(port), 4522 ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE); 4523 wait = true; 4524 } 4525 4526 dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 4527 dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1; 4528 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 4529 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4530 4531 if (wait) 4532 intel_wait_ddi_buf_idle(dev_priv, port); 4533 } 4534 4535 dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1; 4536 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 4537 dp_tp_ctl |= DP_TP_CTL_MODE_MST; 4538 } else { 4539 dp_tp_ctl |= DP_TP_CTL_MODE_SST; 4540 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 4541 dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE; 4542 } 4543 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 4544 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4545 4546 intel_dp->DP |= DDI_BUF_CTL_ENABLE; 4547 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 4548 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 4549 4550 intel_wait_ddi_buf_active(dev_priv, port); 4551 } 4552 4553 static void intel_ddi_set_link_train(struct intel_dp *intel_dp, 4554 const struct intel_crtc_state *crtc_state, 4555 u8 dp_train_pat) 4556 { 4557 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4558 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4559 u32 temp; 4560 4561 temp = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4562 4563 temp &= ~DP_TP_CTL_LINK_TRAIN_MASK; 4564 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 4565 case DP_TRAINING_PATTERN_DISABLE: 4566 temp |= DP_TP_CTL_LINK_TRAIN_NORMAL; 4567 break; 4568 case DP_TRAINING_PATTERN_1: 4569 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 4570 break; 4571 case DP_TRAINING_PATTERN_2: 4572 temp |= DP_TP_CTL_LINK_TRAIN_PAT2; 4573 break; 4574 case DP_TRAINING_PATTERN_3: 4575 temp |= DP_TP_CTL_LINK_TRAIN_PAT3; 4576 break; 4577 case DP_TRAINING_PATTERN_4: 4578 temp |= DP_TP_CTL_LINK_TRAIN_PAT4; 4579 break; 4580 } 4581 4582 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), temp); 4583 } 4584 4585 static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp, 4586 const struct intel_crtc_state *crtc_state) 4587 { 4588 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4589 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4590 enum port port = encoder->port; 4591 u32 val; 4592 4593 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4594 val &= ~DP_TP_CTL_LINK_TRAIN_MASK; 4595 val |= DP_TP_CTL_LINK_TRAIN_IDLE; 4596 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 4597 4598 /* 4599 * Until TGL on PORT_A we can have only eDP in SST mode. There the only 4600 * reason we need to set idle transmission mode is to work around a HW 4601 * issue where we enable the pipe while not in idle link-training mode. 4602 * In this case there is requirement to wait for a minimum number of 4603 * idle patterns to be sent. 4604 */ 4605 if (port == PORT_A && INTEL_GEN(dev_priv) < 12) 4606 return; 4607 4608 if (intel_de_wait_for_set(dev_priv, 4609 dp_tp_status_reg(encoder, crtc_state), 4610 DP_TP_STATUS_IDLE_DONE, 1)) 4611 drm_err(&dev_priv->drm, 4612 "Timed out waiting for DP idle patterns\n"); 4613 } 4614 4615 static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv, 4616 enum transcoder cpu_transcoder) 4617 { 4618 if (cpu_transcoder == TRANSCODER_EDP) 4619 return false; 4620 4621 if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO)) 4622 return false; 4623 4624 return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) & 4625 AUDIO_OUTPUT_ENABLE(cpu_transcoder); 4626 } 4627 4628 void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv, 4629 struct intel_crtc_state *crtc_state) 4630 { 4631 if (INTEL_GEN(dev_priv) >= 12 && crtc_state->port_clock > 594000) 4632 crtc_state->min_voltage_level = 2; 4633 else if (IS_JSL_EHL(dev_priv) && crtc_state->port_clock > 594000) 4634 crtc_state->min_voltage_level = 3; 4635 else if (INTEL_GEN(dev_priv) >= 11 && crtc_state->port_clock > 594000) 4636 crtc_state->min_voltage_level = 1; 4637 else if (IS_CANNONLAKE(dev_priv) && crtc_state->port_clock > 594000) 4638 crtc_state->min_voltage_level = 2; 4639 } 4640 4641 static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv, 4642 enum transcoder cpu_transcoder) 4643 { 4644 u32 master_select; 4645 4646 if (INTEL_GEN(dev_priv) >= 11) { 4647 u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder)); 4648 4649 if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0) 4650 return INVALID_TRANSCODER; 4651 4652 master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2); 4653 } else { 4654 u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 4655 4656 if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0) 4657 return INVALID_TRANSCODER; 4658 4659 master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl); 4660 } 4661 4662 if (master_select == 0) 4663 return TRANSCODER_EDP; 4664 else 4665 return master_select - 1; 4666 } 4667 4668 static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state) 4669 { 4670 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 4671 u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | 4672 BIT(TRANSCODER_C) | BIT(TRANSCODER_D); 4673 enum transcoder cpu_transcoder; 4674 4675 crtc_state->master_transcoder = 4676 bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder); 4677 4678 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) { 4679 enum intel_display_power_domain power_domain; 4680 intel_wakeref_t trans_wakeref; 4681 4682 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder); 4683 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 4684 power_domain); 4685 4686 if (!trans_wakeref) 4687 continue; 4688 4689 if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) == 4690 crtc_state->cpu_transcoder) 4691 crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder); 4692 4693 intel_display_power_put(dev_priv, power_domain, trans_wakeref); 4694 } 4695 4696 drm_WARN_ON(&dev_priv->drm, 4697 crtc_state->master_transcoder != INVALID_TRANSCODER && 4698 crtc_state->sync_mode_slaves_mask); 4699 } 4700 4701 static void intel_ddi_read_func_ctl(struct intel_encoder *encoder, 4702 struct intel_crtc_state *pipe_config) 4703 { 4704 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4705 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc); 4706 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 4707 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4708 u32 temp, flags = 0; 4709 4710 temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 4711 if (temp & TRANS_DDI_PHSYNC) 4712 flags |= DRM_MODE_FLAG_PHSYNC; 4713 else 4714 flags |= DRM_MODE_FLAG_NHSYNC; 4715 if (temp & TRANS_DDI_PVSYNC) 4716 flags |= DRM_MODE_FLAG_PVSYNC; 4717 else 4718 flags |= DRM_MODE_FLAG_NVSYNC; 4719 4720 pipe_config->hw.adjusted_mode.flags |= flags; 4721 4722 switch (temp & TRANS_DDI_BPC_MASK) { 4723 case TRANS_DDI_BPC_6: 4724 pipe_config->pipe_bpp = 18; 4725 break; 4726 case TRANS_DDI_BPC_8: 4727 pipe_config->pipe_bpp = 24; 4728 break; 4729 case TRANS_DDI_BPC_10: 4730 pipe_config->pipe_bpp = 30; 4731 break; 4732 case TRANS_DDI_BPC_12: 4733 pipe_config->pipe_bpp = 36; 4734 break; 4735 default: 4736 break; 4737 } 4738 4739 switch (temp & TRANS_DDI_MODE_SELECT_MASK) { 4740 case TRANS_DDI_MODE_SELECT_HDMI: 4741 pipe_config->has_hdmi_sink = true; 4742 4743 pipe_config->infoframes.enable |= 4744 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4745 4746 if (pipe_config->infoframes.enable) 4747 pipe_config->has_infoframe = true; 4748 4749 if (temp & TRANS_DDI_HDMI_SCRAMBLING) 4750 pipe_config->hdmi_scrambling = true; 4751 if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE) 4752 pipe_config->hdmi_high_tmds_clock_ratio = true; 4753 fallthrough; 4754 case TRANS_DDI_MODE_SELECT_DVI: 4755 pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI); 4756 pipe_config->lane_count = 4; 4757 break; 4758 case TRANS_DDI_MODE_SELECT_FDI: 4759 pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG); 4760 break; 4761 case TRANS_DDI_MODE_SELECT_DP_SST: 4762 if (encoder->type == INTEL_OUTPUT_EDP) 4763 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 4764 else 4765 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 4766 pipe_config->lane_count = 4767 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 4768 intel_dp_get_m_n(intel_crtc, pipe_config); 4769 4770 if (INTEL_GEN(dev_priv) >= 11) { 4771 i915_reg_t dp_tp_ctl = dp_tp_ctl_reg(encoder, pipe_config); 4772 4773 pipe_config->fec_enable = 4774 intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE; 4775 4776 drm_dbg_kms(&dev_priv->drm, 4777 "[ENCODER:%d:%s] Fec status: %u\n", 4778 encoder->base.base.id, encoder->base.name, 4779 pipe_config->fec_enable); 4780 } 4781 4782 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 4783 pipe_config->infoframes.enable |= 4784 intel_lspcon_infoframes_enabled(encoder, pipe_config); 4785 else 4786 pipe_config->infoframes.enable |= 4787 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4788 break; 4789 case TRANS_DDI_MODE_SELECT_DP_MST: 4790 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST); 4791 pipe_config->lane_count = 4792 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 4793 4794 if (INTEL_GEN(dev_priv) >= 12) 4795 pipe_config->mst_master_transcoder = 4796 REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp); 4797 4798 intel_dp_get_m_n(intel_crtc, pipe_config); 4799 4800 pipe_config->infoframes.enable |= 4801 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4802 break; 4803 default: 4804 break; 4805 } 4806 } 4807 4808 void intel_ddi_get_config(struct intel_encoder *encoder, 4809 struct intel_crtc_state *pipe_config) 4810 { 4811 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4812 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 4813 4814 /* XXX: DSI transcoder paranoia */ 4815 if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder))) 4816 return; 4817 4818 if (pipe_config->bigjoiner_slave) { 4819 /* read out pipe settings from master */ 4820 enum transcoder save = pipe_config->cpu_transcoder; 4821 4822 /* Our own transcoder needs to be disabled when reading it in intel_ddi_read_func_ctl() */ 4823 WARN_ON(pipe_config->output_types); 4824 pipe_config->cpu_transcoder = (enum transcoder)pipe_config->bigjoiner_linked_crtc->pipe; 4825 intel_ddi_read_func_ctl(encoder, pipe_config); 4826 pipe_config->cpu_transcoder = save; 4827 } else { 4828 intel_ddi_read_func_ctl(encoder, pipe_config); 4829 } 4830 4831 pipe_config->has_audio = 4832 intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder); 4833 4834 if (encoder->type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.bpp && 4835 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 4836 /* 4837 * This is a big fat ugly hack. 4838 * 4839 * Some machines in UEFI boot mode provide us a VBT that has 18 4840 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 4841 * unknown we fail to light up. Yet the same BIOS boots up with 4842 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 4843 * max, not what it tells us to use. 4844 * 4845 * Note: This will still be broken if the eDP panel is not lit 4846 * up by the BIOS, and thus we can't get the mode at module 4847 * load. 4848 */ 4849 drm_dbg_kms(&dev_priv->drm, 4850 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 4851 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 4852 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 4853 } 4854 4855 if (!pipe_config->bigjoiner_slave) 4856 intel_ddi_clock_get(encoder, pipe_config); 4857 4858 if (IS_GEN9_LP(dev_priv)) 4859 pipe_config->lane_lat_optim_mask = 4860 bxt_ddi_phy_get_lane_lat_optim_mask(encoder); 4861 4862 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 4863 4864 intel_hdmi_read_gcp_infoframe(encoder, pipe_config); 4865 4866 intel_read_infoframe(encoder, pipe_config, 4867 HDMI_INFOFRAME_TYPE_AVI, 4868 &pipe_config->infoframes.avi); 4869 intel_read_infoframe(encoder, pipe_config, 4870 HDMI_INFOFRAME_TYPE_SPD, 4871 &pipe_config->infoframes.spd); 4872 intel_read_infoframe(encoder, pipe_config, 4873 HDMI_INFOFRAME_TYPE_VENDOR, 4874 &pipe_config->infoframes.hdmi); 4875 intel_read_infoframe(encoder, pipe_config, 4876 HDMI_INFOFRAME_TYPE_DRM, 4877 &pipe_config->infoframes.drm); 4878 4879 if (INTEL_GEN(dev_priv) >= 8) 4880 bdw_get_trans_port_sync_config(pipe_config); 4881 4882 intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA); 4883 intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC); 4884 } 4885 4886 static void intel_ddi_sync_state(struct intel_encoder *encoder, 4887 const struct intel_crtc_state *crtc_state) 4888 { 4889 if (intel_crtc_has_dp_encoder(crtc_state)) 4890 intel_dp_sync_state(encoder, crtc_state); 4891 } 4892 4893 static bool intel_ddi_initial_fastset_check(struct intel_encoder *encoder, 4894 struct intel_crtc_state *crtc_state) 4895 { 4896 if (intel_crtc_has_dp_encoder(crtc_state)) 4897 return intel_dp_initial_fastset_check(encoder, crtc_state); 4898 4899 return true; 4900 } 4901 4902 static enum intel_output_type 4903 intel_ddi_compute_output_type(struct intel_encoder *encoder, 4904 struct intel_crtc_state *crtc_state, 4905 struct drm_connector_state *conn_state) 4906 { 4907 switch (conn_state->connector->connector_type) { 4908 case DRM_MODE_CONNECTOR_HDMIA: 4909 return INTEL_OUTPUT_HDMI; 4910 case DRM_MODE_CONNECTOR_eDP: 4911 return INTEL_OUTPUT_EDP; 4912 case DRM_MODE_CONNECTOR_DisplayPort: 4913 return INTEL_OUTPUT_DP; 4914 default: 4915 MISSING_CASE(conn_state->connector->connector_type); 4916 return INTEL_OUTPUT_UNUSED; 4917 } 4918 } 4919 4920 static int intel_ddi_compute_config(struct intel_encoder *encoder, 4921 struct intel_crtc_state *pipe_config, 4922 struct drm_connector_state *conn_state) 4923 { 4924 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 4925 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4926 enum port port = encoder->port; 4927 int ret; 4928 4929 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 4930 pipe_config->cpu_transcoder = TRANSCODER_EDP; 4931 4932 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) { 4933 ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state); 4934 } else { 4935 ret = intel_dp_compute_config(encoder, pipe_config, conn_state); 4936 } 4937 4938 if (ret) 4939 return ret; 4940 4941 if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A && 4942 pipe_config->cpu_transcoder == TRANSCODER_EDP) 4943 pipe_config->pch_pfit.force_thru = 4944 pipe_config->pch_pfit.enabled || 4945 pipe_config->crc_enabled; 4946 4947 if (IS_GEN9_LP(dev_priv)) 4948 pipe_config->lane_lat_optim_mask = 4949 bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 4950 4951 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 4952 4953 return 0; 4954 } 4955 4956 static bool mode_equal(const struct drm_display_mode *mode1, 4957 const struct drm_display_mode *mode2) 4958 { 4959 return drm_mode_match(mode1, mode2, 4960 DRM_MODE_MATCH_TIMINGS | 4961 DRM_MODE_MATCH_FLAGS | 4962 DRM_MODE_MATCH_3D_FLAGS) && 4963 mode1->clock == mode2->clock; /* we want an exact match */ 4964 } 4965 4966 static bool m_n_equal(const struct intel_link_m_n *m_n_1, 4967 const struct intel_link_m_n *m_n_2) 4968 { 4969 return m_n_1->tu == m_n_2->tu && 4970 m_n_1->gmch_m == m_n_2->gmch_m && 4971 m_n_1->gmch_n == m_n_2->gmch_n && 4972 m_n_1->link_m == m_n_2->link_m && 4973 m_n_1->link_n == m_n_2->link_n; 4974 } 4975 4976 static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1, 4977 const struct intel_crtc_state *crtc_state2) 4978 { 4979 return crtc_state1->hw.active && crtc_state2->hw.active && 4980 crtc_state1->output_types == crtc_state2->output_types && 4981 crtc_state1->output_format == crtc_state2->output_format && 4982 crtc_state1->lane_count == crtc_state2->lane_count && 4983 crtc_state1->port_clock == crtc_state2->port_clock && 4984 mode_equal(&crtc_state1->hw.adjusted_mode, 4985 &crtc_state2->hw.adjusted_mode) && 4986 m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n); 4987 } 4988 4989 static u8 4990 intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state, 4991 int tile_group_id) 4992 { 4993 struct drm_connector *connector; 4994 const struct drm_connector_state *conn_state; 4995 struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev); 4996 struct intel_atomic_state *state = 4997 to_intel_atomic_state(ref_crtc_state->uapi.state); 4998 u8 transcoders = 0; 4999 int i; 5000 5001 /* 5002 * We don't enable port sync on BDW due to missing w/as and 5003 * due to not having adjusted the modeset sequence appropriately. 5004 */ 5005 if (INTEL_GEN(dev_priv) < 9) 5006 return 0; 5007 5008 if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP)) 5009 return 0; 5010 5011 for_each_new_connector_in_state(&state->base, connector, conn_state, i) { 5012 struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc); 5013 const struct intel_crtc_state *crtc_state; 5014 5015 if (!crtc) 5016 continue; 5017 5018 if (!connector->has_tile || 5019 connector->tile_group->id != 5020 tile_group_id) 5021 continue; 5022 crtc_state = intel_atomic_get_new_crtc_state(state, 5023 crtc); 5024 if (!crtcs_port_sync_compatible(ref_crtc_state, 5025 crtc_state)) 5026 continue; 5027 transcoders |= BIT(crtc_state->cpu_transcoder); 5028 } 5029 5030 return transcoders; 5031 } 5032 5033 static int intel_ddi_compute_config_late(struct intel_encoder *encoder, 5034 struct intel_crtc_state *crtc_state, 5035 struct drm_connector_state *conn_state) 5036 { 5037 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 5038 struct drm_connector *connector = conn_state->connector; 5039 u8 port_sync_transcoders = 0; 5040 5041 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]", 5042 encoder->base.base.id, encoder->base.name, 5043 crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name); 5044 5045 if (connector->has_tile) 5046 port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state, 5047 connector->tile_group->id); 5048 5049 /* 5050 * EDP Transcoders cannot be ensalved 5051 * make them a master always when present 5052 */ 5053 if (port_sync_transcoders & BIT(TRANSCODER_EDP)) 5054 crtc_state->master_transcoder = TRANSCODER_EDP; 5055 else 5056 crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1; 5057 5058 if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) { 5059 crtc_state->master_transcoder = INVALID_TRANSCODER; 5060 crtc_state->sync_mode_slaves_mask = 5061 port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder); 5062 } 5063 5064 return 0; 5065 } 5066 5067 static void intel_ddi_encoder_destroy(struct drm_encoder *encoder) 5068 { 5069 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder)); 5070 5071 intel_dp_encoder_flush_work(encoder); 5072 5073 drm_encoder_cleanup(encoder); 5074 if (dig_port) 5075 kfree(dig_port->hdcp_port_data.streams); 5076 kfree(dig_port); 5077 } 5078 5079 static const struct drm_encoder_funcs intel_ddi_funcs = { 5080 .reset = intel_dp_encoder_reset, 5081 .destroy = intel_ddi_encoder_destroy, 5082 }; 5083 5084 static struct intel_connector * 5085 intel_ddi_init_dp_connector(struct intel_digital_port *dig_port) 5086 { 5087 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 5088 struct intel_connector *connector; 5089 enum port port = dig_port->base.port; 5090 5091 connector = intel_connector_alloc(); 5092 if (!connector) 5093 return NULL; 5094 5095 dig_port->dp.output_reg = DDI_BUF_CTL(port); 5096 dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain; 5097 dig_port->dp.set_link_train = intel_ddi_set_link_train; 5098 dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train; 5099 5100 if (INTEL_GEN(dev_priv) >= 12) 5101 dig_port->dp.set_signal_levels = tgl_set_signal_levels; 5102 else if (INTEL_GEN(dev_priv) >= 11) 5103 dig_port->dp.set_signal_levels = icl_set_signal_levels; 5104 else if (IS_CANNONLAKE(dev_priv)) 5105 dig_port->dp.set_signal_levels = cnl_set_signal_levels; 5106 else if (IS_GEN9_LP(dev_priv)) 5107 dig_port->dp.set_signal_levels = bxt_set_signal_levels; 5108 else 5109 dig_port->dp.set_signal_levels = hsw_set_signal_levels; 5110 5111 dig_port->dp.voltage_max = intel_ddi_dp_voltage_max; 5112 dig_port->dp.preemph_max = intel_ddi_dp_preemph_max; 5113 5114 if (!intel_dp_init_connector(dig_port, connector)) { 5115 kfree(connector); 5116 return NULL; 5117 } 5118 5119 return connector; 5120 } 5121 5122 static int modeset_pipe(struct drm_crtc *crtc, 5123 struct drm_modeset_acquire_ctx *ctx) 5124 { 5125 struct drm_atomic_state *state; 5126 struct drm_crtc_state *crtc_state; 5127 int ret; 5128 5129 state = drm_atomic_state_alloc(crtc->dev); 5130 if (!state) 5131 return -ENOMEM; 5132 5133 state->acquire_ctx = ctx; 5134 5135 crtc_state = drm_atomic_get_crtc_state(state, crtc); 5136 if (IS_ERR(crtc_state)) { 5137 ret = PTR_ERR(crtc_state); 5138 goto out; 5139 } 5140 5141 crtc_state->connectors_changed = true; 5142 5143 ret = drm_atomic_commit(state); 5144 out: 5145 drm_atomic_state_put(state); 5146 5147 return ret; 5148 } 5149 5150 static int intel_hdmi_reset_link(struct intel_encoder *encoder, 5151 struct drm_modeset_acquire_ctx *ctx) 5152 { 5153 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5154 struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder); 5155 struct intel_connector *connector = hdmi->attached_connector; 5156 struct i2c_adapter *adapter = 5157 intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus); 5158 struct drm_connector_state *conn_state; 5159 struct intel_crtc_state *crtc_state; 5160 struct intel_crtc *crtc; 5161 u8 config; 5162 int ret; 5163 5164 if (!connector || connector->base.status != connector_status_connected) 5165 return 0; 5166 5167 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex, 5168 ctx); 5169 if (ret) 5170 return ret; 5171 5172 conn_state = connector->base.state; 5173 5174 crtc = to_intel_crtc(conn_state->crtc); 5175 if (!crtc) 5176 return 0; 5177 5178 ret = drm_modeset_lock(&crtc->base.mutex, ctx); 5179 if (ret) 5180 return ret; 5181 5182 crtc_state = to_intel_crtc_state(crtc->base.state); 5183 5184 drm_WARN_ON(&dev_priv->drm, 5185 !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)); 5186 5187 if (!crtc_state->hw.active) 5188 return 0; 5189 5190 if (!crtc_state->hdmi_high_tmds_clock_ratio && 5191 !crtc_state->hdmi_scrambling) 5192 return 0; 5193 5194 if (conn_state->commit && 5195 !try_wait_for_completion(&conn_state->commit->hw_done)) 5196 return 0; 5197 5198 ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config); 5199 if (ret < 0) { 5200 drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n", 5201 ret); 5202 return 0; 5203 } 5204 5205 if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) == 5206 crtc_state->hdmi_high_tmds_clock_ratio && 5207 !!(config & SCDC_SCRAMBLING_ENABLE) == 5208 crtc_state->hdmi_scrambling) 5209 return 0; 5210 5211 /* 5212 * HDMI 2.0 says that one should not send scrambled data 5213 * prior to configuring the sink scrambling, and that 5214 * TMDS clock/data transmission should be suspended when 5215 * changing the TMDS clock rate in the sink. So let's 5216 * just do a full modeset here, even though some sinks 5217 * would be perfectly happy if were to just reconfigure 5218 * the SCDC settings on the fly. 5219 */ 5220 return modeset_pipe(&crtc->base, ctx); 5221 } 5222 5223 static enum intel_hotplug_state 5224 intel_ddi_hotplug(struct intel_encoder *encoder, 5225 struct intel_connector *connector) 5226 { 5227 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 5228 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5229 struct intel_dp *intel_dp = &dig_port->dp; 5230 enum phy phy = intel_port_to_phy(i915, encoder->port); 5231 bool is_tc = intel_phy_is_tc(i915, phy); 5232 struct drm_modeset_acquire_ctx ctx; 5233 enum intel_hotplug_state state; 5234 int ret; 5235 5236 if (intel_dp->compliance.test_active && 5237 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 5238 intel_dp_phy_test(encoder); 5239 /* just do the PHY test and nothing else */ 5240 return INTEL_HOTPLUG_UNCHANGED; 5241 } 5242 5243 state = intel_encoder_hotplug(encoder, connector); 5244 5245 drm_modeset_acquire_init(&ctx, 0); 5246 5247 for (;;) { 5248 if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA) 5249 ret = intel_hdmi_reset_link(encoder, &ctx); 5250 else 5251 ret = intel_dp_retrain_link(encoder, &ctx); 5252 5253 if (ret == -EDEADLK) { 5254 drm_modeset_backoff(&ctx); 5255 continue; 5256 } 5257 5258 break; 5259 } 5260 5261 drm_modeset_drop_locks(&ctx); 5262 drm_modeset_acquire_fini(&ctx); 5263 drm_WARN(encoder->base.dev, ret, 5264 "Acquiring modeset locks failed with %i\n", ret); 5265 5266 /* 5267 * Unpowered type-c dongles can take some time to boot and be 5268 * responsible, so here giving some time to those dongles to power up 5269 * and then retrying the probe. 5270 * 5271 * On many platforms the HDMI live state signal is known to be 5272 * unreliable, so we can't use it to detect if a sink is connected or 5273 * not. Instead we detect if it's connected based on whether we can 5274 * read the EDID or not. That in turn has a problem during disconnect, 5275 * since the HPD interrupt may be raised before the DDC lines get 5276 * disconnected (due to how the required length of DDC vs. HPD 5277 * connector pins are specified) and so we'll still be able to get a 5278 * valid EDID. To solve this schedule another detection cycle if this 5279 * time around we didn't detect any change in the sink's connection 5280 * status. 5281 * 5282 * Type-c connectors which get their HPD signal deasserted then 5283 * reasserted, without unplugging/replugging the sink from the 5284 * connector, introduce a delay until the AUX channel communication 5285 * becomes functional. Retry the detection for 5 seconds on type-c 5286 * connectors to account for this delay. 5287 */ 5288 if (state == INTEL_HOTPLUG_UNCHANGED && 5289 connector->hotplug_retries < (is_tc ? 5 : 1) && 5290 !dig_port->dp.is_mst) 5291 state = INTEL_HOTPLUG_RETRY; 5292 5293 return state; 5294 } 5295 5296 static bool lpt_digital_port_connected(struct intel_encoder *encoder) 5297 { 5298 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5299 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 5300 5301 return intel_de_read(dev_priv, SDEISR) & bit; 5302 } 5303 5304 static bool hsw_digital_port_connected(struct intel_encoder *encoder) 5305 { 5306 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5307 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 5308 5309 return intel_de_read(dev_priv, DEISR) & bit; 5310 } 5311 5312 static bool bdw_digital_port_connected(struct intel_encoder *encoder) 5313 { 5314 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5315 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 5316 5317 return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit; 5318 } 5319 5320 static struct intel_connector * 5321 intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port) 5322 { 5323 struct intel_connector *connector; 5324 enum port port = dig_port->base.port; 5325 5326 connector = intel_connector_alloc(); 5327 if (!connector) 5328 return NULL; 5329 5330 dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port); 5331 intel_hdmi_init_connector(dig_port, connector); 5332 5333 return connector; 5334 } 5335 5336 static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port) 5337 { 5338 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 5339 5340 if (dig_port->base.port != PORT_A) 5341 return false; 5342 5343 if (dig_port->saved_port_bits & DDI_A_4_LANES) 5344 return false; 5345 5346 /* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only 5347 * supported configuration 5348 */ 5349 if (IS_GEN9_LP(dev_priv)) 5350 return true; 5351 5352 /* Cannonlake: Most of SKUs don't support DDI_E, and the only 5353 * one who does also have a full A/E split called 5354 * DDI_F what makes DDI_E useless. However for this 5355 * case let's trust VBT info. 5356 */ 5357 if (IS_CANNONLAKE(dev_priv) && 5358 !intel_bios_is_port_present(dev_priv, PORT_E)) 5359 return true; 5360 5361 return false; 5362 } 5363 5364 static int 5365 intel_ddi_max_lanes(struct intel_digital_port *dig_port) 5366 { 5367 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 5368 enum port port = dig_port->base.port; 5369 int max_lanes = 4; 5370 5371 if (INTEL_GEN(dev_priv) >= 11) 5372 return max_lanes; 5373 5374 if (port == PORT_A || port == PORT_E) { 5375 if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES) 5376 max_lanes = port == PORT_A ? 4 : 0; 5377 else 5378 /* Both A and E share 2 lanes */ 5379 max_lanes = 2; 5380 } 5381 5382 /* 5383 * Some BIOS might fail to set this bit on port A if eDP 5384 * wasn't lit up at boot. Force this bit set when needed 5385 * so we use the proper lane count for our calculations. 5386 */ 5387 if (intel_ddi_a_force_4_lanes(dig_port)) { 5388 drm_dbg_kms(&dev_priv->drm, 5389 "Forcing DDI_A_4_LANES for port A\n"); 5390 dig_port->saved_port_bits |= DDI_A_4_LANES; 5391 max_lanes = 4; 5392 } 5393 5394 return max_lanes; 5395 } 5396 5397 static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy) 5398 { 5399 return i915->hti_state & HDPORT_ENABLED && 5400 i915->hti_state & HDPORT_DDI_USED(phy); 5401 } 5402 5403 static enum hpd_pin dg1_hpd_pin(struct drm_i915_private *dev_priv, 5404 enum port port) 5405 { 5406 if (port >= PORT_TC1) 5407 return HPD_PORT_C + port - PORT_TC1; 5408 else 5409 return HPD_PORT_A + port - PORT_A; 5410 } 5411 5412 static enum hpd_pin tgl_hpd_pin(struct drm_i915_private *dev_priv, 5413 enum port port) 5414 { 5415 if (port >= PORT_TC1) 5416 return HPD_PORT_TC1 + port - PORT_TC1; 5417 else 5418 return HPD_PORT_A + port - PORT_A; 5419 } 5420 5421 static enum hpd_pin rkl_hpd_pin(struct drm_i915_private *dev_priv, 5422 enum port port) 5423 { 5424 if (HAS_PCH_TGP(dev_priv)) 5425 return tgl_hpd_pin(dev_priv, port); 5426 5427 if (port >= PORT_TC1) 5428 return HPD_PORT_C + port - PORT_TC1; 5429 else 5430 return HPD_PORT_A + port - PORT_A; 5431 } 5432 5433 static enum hpd_pin icl_hpd_pin(struct drm_i915_private *dev_priv, 5434 enum port port) 5435 { 5436 if (port >= PORT_C) 5437 return HPD_PORT_TC1 + port - PORT_C; 5438 else 5439 return HPD_PORT_A + port - PORT_A; 5440 } 5441 5442 static enum hpd_pin ehl_hpd_pin(struct drm_i915_private *dev_priv, 5443 enum port port) 5444 { 5445 if (port == PORT_D) 5446 return HPD_PORT_A; 5447 5448 if (HAS_PCH_MCC(dev_priv)) 5449 return icl_hpd_pin(dev_priv, port); 5450 5451 return HPD_PORT_A + port - PORT_A; 5452 } 5453 5454 static enum hpd_pin cnl_hpd_pin(struct drm_i915_private *dev_priv, 5455 enum port port) 5456 { 5457 if (port == PORT_F) 5458 return HPD_PORT_E; 5459 5460 return HPD_PORT_A + port - PORT_A; 5461 } 5462 5463 #define port_tc_name(port) ((port) - PORT_TC1 + '1') 5464 #define tc_port_name(tc_port) ((tc_port) - TC_PORT_1 + '1') 5465 5466 void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port) 5467 { 5468 struct intel_digital_port *dig_port; 5469 struct intel_encoder *encoder; 5470 bool init_hdmi, init_dp; 5471 enum phy phy = intel_port_to_phy(dev_priv, port); 5472 5473 /* 5474 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may 5475 * have taken over some of the PHYs and made them unavailable to the 5476 * driver. In that case we should skip initializing the corresponding 5477 * outputs. 5478 */ 5479 if (hti_uses_phy(dev_priv, phy)) { 5480 drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n", 5481 port_name(port), phy_name(phy)); 5482 return; 5483 } 5484 5485 init_hdmi = intel_bios_port_supports_dvi(dev_priv, port) || 5486 intel_bios_port_supports_hdmi(dev_priv, port); 5487 init_dp = intel_bios_port_supports_dp(dev_priv, port); 5488 5489 if (intel_bios_is_lspcon_present(dev_priv, port)) { 5490 /* 5491 * Lspcon device needs to be driven with DP connector 5492 * with special detection sequence. So make sure DP 5493 * is initialized before lspcon. 5494 */ 5495 init_dp = true; 5496 init_hdmi = false; 5497 drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n", 5498 port_name(port)); 5499 } 5500 5501 if (!init_dp && !init_hdmi) { 5502 drm_dbg_kms(&dev_priv->drm, 5503 "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n", 5504 port_name(port)); 5505 return; 5506 } 5507 5508 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 5509 if (!dig_port) 5510 return; 5511 5512 encoder = &dig_port->base; 5513 5514 if (INTEL_GEN(dev_priv) >= 12) { 5515 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 5516 5517 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5518 DRM_MODE_ENCODER_TMDS, 5519 "DDI %s%c/PHY %s%c", 5520 port >= PORT_TC1 ? "TC" : "", 5521 port >= PORT_TC1 ? port_tc_name(port) : port_name(port), 5522 tc_port != TC_PORT_NONE ? "TC" : "", 5523 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 5524 } else if (INTEL_GEN(dev_priv) >= 11) { 5525 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 5526 5527 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5528 DRM_MODE_ENCODER_TMDS, 5529 "DDI %c%s/PHY %s%c", 5530 port_name(port), 5531 port >= PORT_C ? " (TC)" : "", 5532 tc_port != TC_PORT_NONE ? "TC" : "", 5533 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 5534 } else { 5535 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5536 DRM_MODE_ENCODER_TMDS, 5537 "DDI %c/PHY %c", port_name(port), phy_name(phy)); 5538 } 5539 5540 mutex_init(&dig_port->hdcp_mutex); 5541 dig_port->num_hdcp_streams = 0; 5542 5543 encoder->hotplug = intel_ddi_hotplug; 5544 encoder->compute_output_type = intel_ddi_compute_output_type; 5545 encoder->compute_config = intel_ddi_compute_config; 5546 encoder->compute_config_late = intel_ddi_compute_config_late; 5547 encoder->enable = intel_enable_ddi; 5548 encoder->pre_pll_enable = intel_ddi_pre_pll_enable; 5549 encoder->pre_enable = intel_ddi_pre_enable; 5550 encoder->disable = intel_disable_ddi; 5551 encoder->post_disable = intel_ddi_post_disable; 5552 encoder->update_pipe = intel_ddi_update_pipe; 5553 encoder->get_hw_state = intel_ddi_get_hw_state; 5554 encoder->get_config = intel_ddi_get_config; 5555 encoder->sync_state = intel_ddi_sync_state; 5556 encoder->initial_fastset_check = intel_ddi_initial_fastset_check; 5557 encoder->suspend = intel_dp_encoder_suspend; 5558 encoder->shutdown = intel_dp_encoder_shutdown; 5559 encoder->get_power_domains = intel_ddi_get_power_domains; 5560 5561 encoder->type = INTEL_OUTPUT_DDI; 5562 encoder->power_domain = intel_port_to_power_domain(port); 5563 encoder->port = port; 5564 encoder->cloneable = 0; 5565 encoder->pipe_mask = ~0; 5566 5567 if (IS_DG1(dev_priv)) 5568 encoder->hpd_pin = dg1_hpd_pin(dev_priv, port); 5569 else if (IS_ROCKETLAKE(dev_priv)) 5570 encoder->hpd_pin = rkl_hpd_pin(dev_priv, port); 5571 else if (INTEL_GEN(dev_priv) >= 12) 5572 encoder->hpd_pin = tgl_hpd_pin(dev_priv, port); 5573 else if (IS_JSL_EHL(dev_priv)) 5574 encoder->hpd_pin = ehl_hpd_pin(dev_priv, port); 5575 else if (IS_GEN(dev_priv, 11)) 5576 encoder->hpd_pin = icl_hpd_pin(dev_priv, port); 5577 else if (IS_GEN(dev_priv, 10)) 5578 encoder->hpd_pin = cnl_hpd_pin(dev_priv, port); 5579 else 5580 encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 5581 5582 if (INTEL_GEN(dev_priv) >= 11) 5583 dig_port->saved_port_bits = 5584 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 5585 & DDI_BUF_PORT_REVERSAL; 5586 else 5587 dig_port->saved_port_bits = 5588 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 5589 & (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES); 5590 5591 dig_port->dp.output_reg = INVALID_MMIO_REG; 5592 dig_port->max_lanes = intel_ddi_max_lanes(dig_port); 5593 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 5594 5595 if (intel_phy_is_tc(dev_priv, phy)) { 5596 bool is_legacy = 5597 !intel_bios_port_supports_typec_usb(dev_priv, port) && 5598 !intel_bios_port_supports_tbt(dev_priv, port); 5599 5600 intel_tc_port_init(dig_port, is_legacy); 5601 5602 encoder->update_prepare = intel_ddi_update_prepare; 5603 encoder->update_complete = intel_ddi_update_complete; 5604 } 5605 5606 drm_WARN_ON(&dev_priv->drm, port > PORT_I); 5607 dig_port->ddi_io_power_domain = POWER_DOMAIN_PORT_DDI_A_IO + 5608 port - PORT_A; 5609 5610 if (init_dp) { 5611 if (!intel_ddi_init_dp_connector(dig_port)) 5612 goto err; 5613 5614 dig_port->hpd_pulse = intel_dp_hpd_pulse; 5615 } 5616 5617 /* In theory we don't need the encoder->type check, but leave it just in 5618 * case we have some really bad VBTs... */ 5619 if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) { 5620 if (!intel_ddi_init_hdmi_connector(dig_port)) 5621 goto err; 5622 } 5623 5624 if (INTEL_GEN(dev_priv) >= 11) { 5625 if (intel_phy_is_tc(dev_priv, phy)) 5626 dig_port->connected = intel_tc_port_connected; 5627 else 5628 dig_port->connected = lpt_digital_port_connected; 5629 } else if (INTEL_GEN(dev_priv) >= 8) { 5630 if (port == PORT_A || IS_GEN9_LP(dev_priv)) 5631 dig_port->connected = bdw_digital_port_connected; 5632 else 5633 dig_port->connected = lpt_digital_port_connected; 5634 } else { 5635 if (port == PORT_A) 5636 dig_port->connected = hsw_digital_port_connected; 5637 else 5638 dig_port->connected = lpt_digital_port_connected; 5639 } 5640 5641 intel_infoframe_init(dig_port); 5642 5643 return; 5644 5645 err: 5646 drm_encoder_cleanup(&encoder->base); 5647 kfree(dig_port); 5648 } 5649