1 /* 2 * Copyright © 2012 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Eugeni Dodonov <eugeni.dodonov@intel.com> 25 * 26 */ 27 28 #include <drm/drm_scdc_helper.h> 29 30 #include "i915_drv.h" 31 #include "i915_trace.h" 32 #include "intel_audio.h" 33 #include "intel_combo_phy.h" 34 #include "intel_connector.h" 35 #include "intel_ddi.h" 36 #include "intel_display_types.h" 37 #include "intel_dp.h" 38 #include "intel_dp_mst.h" 39 #include "intel_dp_link_training.h" 40 #include "intel_dpio_phy.h" 41 #include "intel_dsi.h" 42 #include "intel_fifo_underrun.h" 43 #include "intel_gmbus.h" 44 #include "intel_hdcp.h" 45 #include "intel_hdmi.h" 46 #include "intel_hotplug.h" 47 #include "intel_lspcon.h" 48 #include "intel_panel.h" 49 #include "intel_psr.h" 50 #include "intel_sprite.h" 51 #include "intel_tc.h" 52 #include "intel_vdsc.h" 53 54 struct ddi_buf_trans { 55 u32 trans1; /* balance leg enable, de-emph level */ 56 u32 trans2; /* vref sel, vswing */ 57 u8 i_boost; /* SKL: I_boost; valid: 0x0, 0x1, 0x3, 0x7 */ 58 }; 59 60 static const u8 index_to_dp_signal_levels[] = { 61 [0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0, 62 [1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1, 63 [2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2, 64 [3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3, 65 [4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0, 66 [5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1, 67 [6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2, 68 [7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0, 69 [8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1, 70 [9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0, 71 }; 72 73 /* HDMI/DVI modes ignore everything but the last 2 items. So we share 74 * them for both DP and FDI transports, allowing those ports to 75 * automatically adapt to HDMI connections as well 76 */ 77 static const struct ddi_buf_trans hsw_ddi_translations_dp[] = { 78 { 0x00FFFFFF, 0x0006000E, 0x0 }, 79 { 0x00D75FFF, 0x0005000A, 0x0 }, 80 { 0x00C30FFF, 0x00040006, 0x0 }, 81 { 0x80AAAFFF, 0x000B0000, 0x0 }, 82 { 0x00FFFFFF, 0x0005000A, 0x0 }, 83 { 0x00D75FFF, 0x000C0004, 0x0 }, 84 { 0x80C30FFF, 0x000B0000, 0x0 }, 85 { 0x00FFFFFF, 0x00040006, 0x0 }, 86 { 0x80D75FFF, 0x000B0000, 0x0 }, 87 }; 88 89 static const struct ddi_buf_trans hsw_ddi_translations_fdi[] = { 90 { 0x00FFFFFF, 0x0007000E, 0x0 }, 91 { 0x00D75FFF, 0x000F000A, 0x0 }, 92 { 0x00C30FFF, 0x00060006, 0x0 }, 93 { 0x00AAAFFF, 0x001E0000, 0x0 }, 94 { 0x00FFFFFF, 0x000F000A, 0x0 }, 95 { 0x00D75FFF, 0x00160004, 0x0 }, 96 { 0x00C30FFF, 0x001E0000, 0x0 }, 97 { 0x00FFFFFF, 0x00060006, 0x0 }, 98 { 0x00D75FFF, 0x001E0000, 0x0 }, 99 }; 100 101 static const struct ddi_buf_trans hsw_ddi_translations_hdmi[] = { 102 /* Idx NT mV d T mV d db */ 103 { 0x00FFFFFF, 0x0006000E, 0x0 },/* 0: 400 400 0 */ 104 { 0x00E79FFF, 0x000E000C, 0x0 },/* 1: 400 500 2 */ 105 { 0x00D75FFF, 0x0005000A, 0x0 },/* 2: 400 600 3.5 */ 106 { 0x00FFFFFF, 0x0005000A, 0x0 },/* 3: 600 600 0 */ 107 { 0x00E79FFF, 0x001D0007, 0x0 },/* 4: 600 750 2 */ 108 { 0x00D75FFF, 0x000C0004, 0x0 },/* 5: 600 900 3.5 */ 109 { 0x00FFFFFF, 0x00040006, 0x0 },/* 6: 800 800 0 */ 110 { 0x80E79FFF, 0x00030002, 0x0 },/* 7: 800 1000 2 */ 111 { 0x00FFFFFF, 0x00140005, 0x0 },/* 8: 850 850 0 */ 112 { 0x00FFFFFF, 0x000C0004, 0x0 },/* 9: 900 900 0 */ 113 { 0x00FFFFFF, 0x001C0003, 0x0 },/* 10: 950 950 0 */ 114 { 0x80FFFFFF, 0x00030002, 0x0 },/* 11: 1000 1000 0 */ 115 }; 116 117 static const struct ddi_buf_trans bdw_ddi_translations_edp[] = { 118 { 0x00FFFFFF, 0x00000012, 0x0 }, 119 { 0x00EBAFFF, 0x00020011, 0x0 }, 120 { 0x00C71FFF, 0x0006000F, 0x0 }, 121 { 0x00AAAFFF, 0x000E000A, 0x0 }, 122 { 0x00FFFFFF, 0x00020011, 0x0 }, 123 { 0x00DB6FFF, 0x0005000F, 0x0 }, 124 { 0x00BEEFFF, 0x000A000C, 0x0 }, 125 { 0x00FFFFFF, 0x0005000F, 0x0 }, 126 { 0x00DB6FFF, 0x000A000C, 0x0 }, 127 }; 128 129 static const struct ddi_buf_trans bdw_ddi_translations_dp[] = { 130 { 0x00FFFFFF, 0x0007000E, 0x0 }, 131 { 0x00D75FFF, 0x000E000A, 0x0 }, 132 { 0x00BEFFFF, 0x00140006, 0x0 }, 133 { 0x80B2CFFF, 0x001B0002, 0x0 }, 134 { 0x00FFFFFF, 0x000E000A, 0x0 }, 135 { 0x00DB6FFF, 0x00160005, 0x0 }, 136 { 0x80C71FFF, 0x001A0002, 0x0 }, 137 { 0x00F7DFFF, 0x00180004, 0x0 }, 138 { 0x80D75FFF, 0x001B0002, 0x0 }, 139 }; 140 141 static const struct ddi_buf_trans bdw_ddi_translations_fdi[] = { 142 { 0x00FFFFFF, 0x0001000E, 0x0 }, 143 { 0x00D75FFF, 0x0004000A, 0x0 }, 144 { 0x00C30FFF, 0x00070006, 0x0 }, 145 { 0x00AAAFFF, 0x000C0000, 0x0 }, 146 { 0x00FFFFFF, 0x0004000A, 0x0 }, 147 { 0x00D75FFF, 0x00090004, 0x0 }, 148 { 0x00C30FFF, 0x000C0000, 0x0 }, 149 { 0x00FFFFFF, 0x00070006, 0x0 }, 150 { 0x00D75FFF, 0x000C0000, 0x0 }, 151 }; 152 153 static const struct ddi_buf_trans bdw_ddi_translations_hdmi[] = { 154 /* Idx NT mV d T mV df db */ 155 { 0x00FFFFFF, 0x0007000E, 0x0 },/* 0: 400 400 0 */ 156 { 0x00D75FFF, 0x000E000A, 0x0 },/* 1: 400 600 3.5 */ 157 { 0x00BEFFFF, 0x00140006, 0x0 },/* 2: 400 800 6 */ 158 { 0x00FFFFFF, 0x0009000D, 0x0 },/* 3: 450 450 0 */ 159 { 0x00FFFFFF, 0x000E000A, 0x0 },/* 4: 600 600 0 */ 160 { 0x00D7FFFF, 0x00140006, 0x0 },/* 5: 600 800 2.5 */ 161 { 0x80CB2FFF, 0x001B0002, 0x0 },/* 6: 600 1000 4.5 */ 162 { 0x00FFFFFF, 0x00140006, 0x0 },/* 7: 800 800 0 */ 163 { 0x80E79FFF, 0x001B0002, 0x0 },/* 8: 800 1000 2 */ 164 { 0x80FFFFFF, 0x001B0002, 0x0 },/* 9: 1000 1000 0 */ 165 }; 166 167 /* Skylake H and S */ 168 static const struct ddi_buf_trans skl_ddi_translations_dp[] = { 169 { 0x00002016, 0x000000A0, 0x0 }, 170 { 0x00005012, 0x0000009B, 0x0 }, 171 { 0x00007011, 0x00000088, 0x0 }, 172 { 0x80009010, 0x000000C0, 0x1 }, 173 { 0x00002016, 0x0000009B, 0x0 }, 174 { 0x00005012, 0x00000088, 0x0 }, 175 { 0x80007011, 0x000000C0, 0x1 }, 176 { 0x00002016, 0x000000DF, 0x0 }, 177 { 0x80005012, 0x000000C0, 0x1 }, 178 }; 179 180 /* Skylake U */ 181 static const struct ddi_buf_trans skl_u_ddi_translations_dp[] = { 182 { 0x0000201B, 0x000000A2, 0x0 }, 183 { 0x00005012, 0x00000088, 0x0 }, 184 { 0x80007011, 0x000000CD, 0x1 }, 185 { 0x80009010, 0x000000C0, 0x1 }, 186 { 0x0000201B, 0x0000009D, 0x0 }, 187 { 0x80005012, 0x000000C0, 0x1 }, 188 { 0x80007011, 0x000000C0, 0x1 }, 189 { 0x00002016, 0x00000088, 0x0 }, 190 { 0x80005012, 0x000000C0, 0x1 }, 191 }; 192 193 /* Skylake Y */ 194 static const struct ddi_buf_trans skl_y_ddi_translations_dp[] = { 195 { 0x00000018, 0x000000A2, 0x0 }, 196 { 0x00005012, 0x00000088, 0x0 }, 197 { 0x80007011, 0x000000CD, 0x3 }, 198 { 0x80009010, 0x000000C0, 0x3 }, 199 { 0x00000018, 0x0000009D, 0x0 }, 200 { 0x80005012, 0x000000C0, 0x3 }, 201 { 0x80007011, 0x000000C0, 0x3 }, 202 { 0x00000018, 0x00000088, 0x0 }, 203 { 0x80005012, 0x000000C0, 0x3 }, 204 }; 205 206 /* Kabylake H and S */ 207 static const struct ddi_buf_trans kbl_ddi_translations_dp[] = { 208 { 0x00002016, 0x000000A0, 0x0 }, 209 { 0x00005012, 0x0000009B, 0x0 }, 210 { 0x00007011, 0x00000088, 0x0 }, 211 { 0x80009010, 0x000000C0, 0x1 }, 212 { 0x00002016, 0x0000009B, 0x0 }, 213 { 0x00005012, 0x00000088, 0x0 }, 214 { 0x80007011, 0x000000C0, 0x1 }, 215 { 0x00002016, 0x00000097, 0x0 }, 216 { 0x80005012, 0x000000C0, 0x1 }, 217 }; 218 219 /* Kabylake U */ 220 static const struct ddi_buf_trans kbl_u_ddi_translations_dp[] = { 221 { 0x0000201B, 0x000000A1, 0x0 }, 222 { 0x00005012, 0x00000088, 0x0 }, 223 { 0x80007011, 0x000000CD, 0x3 }, 224 { 0x80009010, 0x000000C0, 0x3 }, 225 { 0x0000201B, 0x0000009D, 0x0 }, 226 { 0x80005012, 0x000000C0, 0x3 }, 227 { 0x80007011, 0x000000C0, 0x3 }, 228 { 0x00002016, 0x0000004F, 0x0 }, 229 { 0x80005012, 0x000000C0, 0x3 }, 230 }; 231 232 /* Kabylake Y */ 233 static const struct ddi_buf_trans kbl_y_ddi_translations_dp[] = { 234 { 0x00001017, 0x000000A1, 0x0 }, 235 { 0x00005012, 0x00000088, 0x0 }, 236 { 0x80007011, 0x000000CD, 0x3 }, 237 { 0x8000800F, 0x000000C0, 0x3 }, 238 { 0x00001017, 0x0000009D, 0x0 }, 239 { 0x80005012, 0x000000C0, 0x3 }, 240 { 0x80007011, 0x000000C0, 0x3 }, 241 { 0x00001017, 0x0000004C, 0x0 }, 242 { 0x80005012, 0x000000C0, 0x3 }, 243 }; 244 245 /* 246 * Skylake/Kabylake H and S 247 * eDP 1.4 low vswing translation parameters 248 */ 249 static const struct ddi_buf_trans skl_ddi_translations_edp[] = { 250 { 0x00000018, 0x000000A8, 0x0 }, 251 { 0x00004013, 0x000000A9, 0x0 }, 252 { 0x00007011, 0x000000A2, 0x0 }, 253 { 0x00009010, 0x0000009C, 0x0 }, 254 { 0x00000018, 0x000000A9, 0x0 }, 255 { 0x00006013, 0x000000A2, 0x0 }, 256 { 0x00007011, 0x000000A6, 0x0 }, 257 { 0x00000018, 0x000000AB, 0x0 }, 258 { 0x00007013, 0x0000009F, 0x0 }, 259 { 0x00000018, 0x000000DF, 0x0 }, 260 }; 261 262 /* 263 * Skylake/Kabylake U 264 * eDP 1.4 low vswing translation parameters 265 */ 266 static const struct ddi_buf_trans skl_u_ddi_translations_edp[] = { 267 { 0x00000018, 0x000000A8, 0x0 }, 268 { 0x00004013, 0x000000A9, 0x0 }, 269 { 0x00007011, 0x000000A2, 0x0 }, 270 { 0x00009010, 0x0000009C, 0x0 }, 271 { 0x00000018, 0x000000A9, 0x0 }, 272 { 0x00006013, 0x000000A2, 0x0 }, 273 { 0x00007011, 0x000000A6, 0x0 }, 274 { 0x00002016, 0x000000AB, 0x0 }, 275 { 0x00005013, 0x0000009F, 0x0 }, 276 { 0x00000018, 0x000000DF, 0x0 }, 277 }; 278 279 /* 280 * Skylake/Kabylake Y 281 * eDP 1.4 low vswing translation parameters 282 */ 283 static const struct ddi_buf_trans skl_y_ddi_translations_edp[] = { 284 { 0x00000018, 0x000000A8, 0x0 }, 285 { 0x00004013, 0x000000AB, 0x0 }, 286 { 0x00007011, 0x000000A4, 0x0 }, 287 { 0x00009010, 0x000000DF, 0x0 }, 288 { 0x00000018, 0x000000AA, 0x0 }, 289 { 0x00006013, 0x000000A4, 0x0 }, 290 { 0x00007011, 0x0000009D, 0x0 }, 291 { 0x00000018, 0x000000A0, 0x0 }, 292 { 0x00006012, 0x000000DF, 0x0 }, 293 { 0x00000018, 0x0000008A, 0x0 }, 294 }; 295 296 /* Skylake/Kabylake U, H and S */ 297 static const struct ddi_buf_trans skl_ddi_translations_hdmi[] = { 298 { 0x00000018, 0x000000AC, 0x0 }, 299 { 0x00005012, 0x0000009D, 0x0 }, 300 { 0x00007011, 0x00000088, 0x0 }, 301 { 0x00000018, 0x000000A1, 0x0 }, 302 { 0x00000018, 0x00000098, 0x0 }, 303 { 0x00004013, 0x00000088, 0x0 }, 304 { 0x80006012, 0x000000CD, 0x1 }, 305 { 0x00000018, 0x000000DF, 0x0 }, 306 { 0x80003015, 0x000000CD, 0x1 }, /* Default */ 307 { 0x80003015, 0x000000C0, 0x1 }, 308 { 0x80000018, 0x000000C0, 0x1 }, 309 }; 310 311 /* Skylake/Kabylake Y */ 312 static const struct ddi_buf_trans skl_y_ddi_translations_hdmi[] = { 313 { 0x00000018, 0x000000A1, 0x0 }, 314 { 0x00005012, 0x000000DF, 0x0 }, 315 { 0x80007011, 0x000000CB, 0x3 }, 316 { 0x00000018, 0x000000A4, 0x0 }, 317 { 0x00000018, 0x0000009D, 0x0 }, 318 { 0x00004013, 0x00000080, 0x0 }, 319 { 0x80006013, 0x000000C0, 0x3 }, 320 { 0x00000018, 0x0000008A, 0x0 }, 321 { 0x80003015, 0x000000C0, 0x3 }, /* Default */ 322 { 0x80003015, 0x000000C0, 0x3 }, 323 { 0x80000018, 0x000000C0, 0x3 }, 324 }; 325 326 struct bxt_ddi_buf_trans { 327 u8 margin; /* swing value */ 328 u8 scale; /* scale value */ 329 u8 enable; /* scale enable */ 330 u8 deemphasis; 331 }; 332 333 static const struct bxt_ddi_buf_trans bxt_ddi_translations_dp[] = { 334 /* Idx NT mV diff db */ 335 { 52, 0x9A, 0, 128, }, /* 0: 400 0 */ 336 { 78, 0x9A, 0, 85, }, /* 1: 400 3.5 */ 337 { 104, 0x9A, 0, 64, }, /* 2: 400 6 */ 338 { 154, 0x9A, 0, 43, }, /* 3: 400 9.5 */ 339 { 77, 0x9A, 0, 128, }, /* 4: 600 0 */ 340 { 116, 0x9A, 0, 85, }, /* 5: 600 3.5 */ 341 { 154, 0x9A, 0, 64, }, /* 6: 600 6 */ 342 { 102, 0x9A, 0, 128, }, /* 7: 800 0 */ 343 { 154, 0x9A, 0, 85, }, /* 8: 800 3.5 */ 344 { 154, 0x9A, 1, 128, }, /* 9: 1200 0 */ 345 }; 346 347 static const struct bxt_ddi_buf_trans bxt_ddi_translations_edp[] = { 348 /* Idx NT mV diff db */ 349 { 26, 0, 0, 128, }, /* 0: 200 0 */ 350 { 38, 0, 0, 112, }, /* 1: 200 1.5 */ 351 { 48, 0, 0, 96, }, /* 2: 200 4 */ 352 { 54, 0, 0, 69, }, /* 3: 200 6 */ 353 { 32, 0, 0, 128, }, /* 4: 250 0 */ 354 { 48, 0, 0, 104, }, /* 5: 250 1.5 */ 355 { 54, 0, 0, 85, }, /* 6: 250 4 */ 356 { 43, 0, 0, 128, }, /* 7: 300 0 */ 357 { 54, 0, 0, 101, }, /* 8: 300 1.5 */ 358 { 48, 0, 0, 128, }, /* 9: 300 0 */ 359 }; 360 361 /* BSpec has 2 recommended values - entries 0 and 8. 362 * Using the entry with higher vswing. 363 */ 364 static const struct bxt_ddi_buf_trans bxt_ddi_translations_hdmi[] = { 365 /* Idx NT mV diff db */ 366 { 52, 0x9A, 0, 128, }, /* 0: 400 0 */ 367 { 52, 0x9A, 0, 85, }, /* 1: 400 3.5 */ 368 { 52, 0x9A, 0, 64, }, /* 2: 400 6 */ 369 { 42, 0x9A, 0, 43, }, /* 3: 400 9.5 */ 370 { 77, 0x9A, 0, 128, }, /* 4: 600 0 */ 371 { 77, 0x9A, 0, 85, }, /* 5: 600 3.5 */ 372 { 77, 0x9A, 0, 64, }, /* 6: 600 6 */ 373 { 102, 0x9A, 0, 128, }, /* 7: 800 0 */ 374 { 102, 0x9A, 0, 85, }, /* 8: 800 3.5 */ 375 { 154, 0x9A, 1, 128, }, /* 9: 1200 0 */ 376 }; 377 378 struct cnl_ddi_buf_trans { 379 u8 dw2_swing_sel; 380 u8 dw7_n_scalar; 381 u8 dw4_cursor_coeff; 382 u8 dw4_post_cursor_2; 383 u8 dw4_post_cursor_1; 384 }; 385 386 /* Voltage Swing Programming for VccIO 0.85V for DP */ 387 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_85V[] = { 388 /* NT mV Trans mV db */ 389 { 0xA, 0x5D, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 390 { 0xA, 0x6A, 0x38, 0x00, 0x07 }, /* 350 500 3.1 */ 391 { 0xB, 0x7A, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 392 { 0x6, 0x7C, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 393 { 0xA, 0x69, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 394 { 0xB, 0x7A, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 395 { 0x6, 0x7C, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 396 { 0xB, 0x7D, 0x3C, 0x00, 0x03 }, /* 650 725 0.9 */ 397 { 0x6, 0x7C, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 398 { 0x6, 0x7B, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 399 }; 400 401 /* Voltage Swing Programming for VccIO 0.85V for HDMI */ 402 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_85V[] = { 403 /* NT mV Trans mV db */ 404 { 0xA, 0x60, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 405 { 0xB, 0x73, 0x36, 0x00, 0x09 }, /* 450 650 3.2 */ 406 { 0x6, 0x7F, 0x31, 0x00, 0x0E }, /* 450 850 5.5 */ 407 { 0xB, 0x73, 0x3F, 0x00, 0x00 }, /* 650 650 0.0 */ 408 { 0x6, 0x7F, 0x37, 0x00, 0x08 }, /* 650 850 2.3 */ 409 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 850 850 0.0 */ 410 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 411 }; 412 413 /* Voltage Swing Programming for VccIO 0.85V for eDP */ 414 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_85V[] = { 415 /* NT mV Trans mV db */ 416 { 0xA, 0x66, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 417 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 418 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 419 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 420 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 421 { 0xA, 0x66, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 422 { 0xB, 0x70, 0x3C, 0x00, 0x03 }, /* 460 600 2.3 */ 423 { 0xC, 0x75, 0x3C, 0x00, 0x03 }, /* 537 700 2.3 */ 424 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 425 }; 426 427 /* Voltage Swing Programming for VccIO 0.95V for DP */ 428 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_95V[] = { 429 /* NT mV Trans mV db */ 430 { 0xA, 0x5D, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 431 { 0xA, 0x6A, 0x38, 0x00, 0x07 }, /* 350 500 3.1 */ 432 { 0xB, 0x7A, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 433 { 0x6, 0x7C, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 434 { 0xA, 0x69, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 435 { 0xB, 0x7A, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 436 { 0x6, 0x7C, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 437 { 0xB, 0x7D, 0x3C, 0x00, 0x03 }, /* 650 725 0.9 */ 438 { 0x6, 0x7C, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 439 { 0x6, 0x7B, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 440 }; 441 442 /* Voltage Swing Programming for VccIO 0.95V for HDMI */ 443 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_95V[] = { 444 /* NT mV Trans mV db */ 445 { 0xA, 0x5C, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 446 { 0xB, 0x69, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 447 { 0x5, 0x76, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 448 { 0xA, 0x5E, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 449 { 0xB, 0x69, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 450 { 0xB, 0x79, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 451 { 0x6, 0x7D, 0x32, 0x00, 0x0D }, /* 600 1000 4.4 */ 452 { 0x5, 0x76, 0x3F, 0x00, 0x00 }, /* 800 800 0.0 */ 453 { 0x6, 0x7D, 0x39, 0x00, 0x06 }, /* 800 1000 1.9 */ 454 { 0x6, 0x7F, 0x39, 0x00, 0x06 }, /* 850 1050 1.8 */ 455 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 456 }; 457 458 /* Voltage Swing Programming for VccIO 0.95V for eDP */ 459 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_95V[] = { 460 /* NT mV Trans mV db */ 461 { 0xA, 0x61, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 462 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 463 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 464 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 465 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 466 { 0xA, 0x61, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 467 { 0xB, 0x68, 0x39, 0x00, 0x06 }, /* 460 600 2.3 */ 468 { 0xC, 0x6E, 0x39, 0x00, 0x06 }, /* 537 700 2.3 */ 469 { 0x4, 0x7F, 0x3A, 0x00, 0x05 }, /* 460 600 2.3 */ 470 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 471 }; 472 473 /* Voltage Swing Programming for VccIO 1.05V for DP */ 474 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_1_05V[] = { 475 /* NT mV Trans mV db */ 476 { 0xA, 0x58, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 477 { 0xB, 0x64, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 478 { 0x5, 0x70, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 479 { 0x6, 0x7F, 0x2C, 0x00, 0x13 }, /* 400 1050 8.4 */ 480 { 0xB, 0x64, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 481 { 0x5, 0x73, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 482 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 550 1050 5.6 */ 483 { 0x5, 0x76, 0x3E, 0x00, 0x01 }, /* 850 900 0.5 */ 484 { 0x6, 0x7F, 0x36, 0x00, 0x09 }, /* 750 1050 2.9 */ 485 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 486 }; 487 488 /* Voltage Swing Programming for VccIO 1.05V for HDMI */ 489 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_1_05V[] = { 490 /* NT mV Trans mV db */ 491 { 0xA, 0x58, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 492 { 0xB, 0x64, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 493 { 0x5, 0x70, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 494 { 0xA, 0x5B, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 495 { 0xB, 0x64, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 496 { 0x5, 0x73, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 497 { 0x6, 0x7C, 0x32, 0x00, 0x0D }, /* 600 1000 4.4 */ 498 { 0x5, 0x70, 0x3F, 0x00, 0x00 }, /* 800 800 0.0 */ 499 { 0x6, 0x7C, 0x39, 0x00, 0x06 }, /* 800 1000 1.9 */ 500 { 0x6, 0x7F, 0x39, 0x00, 0x06 }, /* 850 1050 1.8 */ 501 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 502 }; 503 504 /* Voltage Swing Programming for VccIO 1.05V for eDP */ 505 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_1_05V[] = { 506 /* NT mV Trans mV db */ 507 { 0xA, 0x5E, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 508 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 509 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 510 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 511 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 512 { 0xA, 0x5E, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 513 { 0xB, 0x64, 0x39, 0x00, 0x06 }, /* 460 600 2.3 */ 514 { 0xE, 0x6A, 0x39, 0x00, 0x06 }, /* 537 700 2.3 */ 515 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 516 }; 517 518 /* icl_combo_phy_ddi_translations */ 519 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_dp_hbr2[] = { 520 /* NT mV Trans mV db */ 521 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 522 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 523 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 524 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 525 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 526 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 527 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 528 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 529 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 530 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 531 }; 532 533 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr2[] = { 534 /* NT mV Trans mV db */ 535 { 0x0, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 536 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 200 250 1.9 */ 537 { 0x1, 0x7F, 0x33, 0x00, 0x0C }, /* 200 300 3.5 */ 538 { 0x9, 0x7F, 0x31, 0x00, 0x0E }, /* 200 350 4.9 */ 539 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 540 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 250 300 1.6 */ 541 { 0x9, 0x7F, 0x35, 0x00, 0x0A }, /* 250 350 2.9 */ 542 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 543 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 544 { 0x9, 0x7F, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 545 }; 546 547 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr3[] = { 548 /* NT mV Trans mV db */ 549 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 550 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 551 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 552 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 553 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 554 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 555 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 556 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 557 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 558 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 559 }; 560 561 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_hdmi[] = { 562 /* NT mV Trans mV db */ 563 { 0xA, 0x60, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 564 { 0xB, 0x73, 0x36, 0x00, 0x09 }, /* 450 650 3.2 */ 565 { 0x6, 0x7F, 0x31, 0x00, 0x0E }, /* 450 850 5.5 */ 566 { 0xB, 0x73, 0x3F, 0x00, 0x00 }, /* 650 650 0.0 ALS */ 567 { 0x6, 0x7F, 0x37, 0x00, 0x08 }, /* 650 850 2.3 */ 568 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 850 850 0.0 */ 569 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 570 }; 571 572 static const struct cnl_ddi_buf_trans ehl_combo_phy_ddi_translations_dp[] = { 573 /* NT mV Trans mV db */ 574 { 0xA, 0x33, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 575 { 0xA, 0x47, 0x36, 0x00, 0x09 }, /* 350 500 3.1 */ 576 { 0xC, 0x64, 0x34, 0x00, 0x0B }, /* 350 700 6.0 */ 577 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 350 900 8.2 */ 578 { 0xA, 0x46, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 579 { 0xC, 0x64, 0x38, 0x00, 0x07 }, /* 500 700 2.9 */ 580 { 0x6, 0x7F, 0x32, 0x00, 0x0D }, /* 500 900 5.1 */ 581 { 0xC, 0x61, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 582 { 0x6, 0x7F, 0x38, 0x00, 0x07 }, /* 600 900 3.5 */ 583 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 584 }; 585 586 static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr[] = { 587 /* NT mV Trans mV db */ 588 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 589 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 200 250 1.9 */ 590 { 0x1, 0x7F, 0x33, 0x00, 0x0C }, /* 200 300 3.5 */ 591 { 0xA, 0x35, 0x36, 0x00, 0x09 }, /* 200 350 4.9 */ 592 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 593 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 250 300 1.6 */ 594 { 0xA, 0x35, 0x35, 0x00, 0x0A }, /* 250 350 2.9 */ 595 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 596 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 597 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 598 }; 599 600 static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr2[] = { 601 /* NT mV Trans mV db */ 602 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 603 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 250 1.9 */ 604 { 0x1, 0x7F, 0x3D, 0x00, 0x02 }, /* 200 300 3.5 */ 605 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 200 350 4.9 */ 606 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 607 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 300 1.6 */ 608 { 0xA, 0x35, 0x3A, 0x00, 0x05 }, /* 250 350 2.9 */ 609 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 610 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 611 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 612 }; 613 614 struct icl_mg_phy_ddi_buf_trans { 615 u32 cri_txdeemph_override_11_6; 616 u32 cri_txdeemph_override_5_0; 617 u32 cri_txdeemph_override_17_12; 618 }; 619 620 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_rbr_hbr[] = { 621 /* Voltage swing pre-emphasis */ 622 { 0x18, 0x00, 0x00 }, /* 0 0 */ 623 { 0x1D, 0x00, 0x05 }, /* 0 1 */ 624 { 0x24, 0x00, 0x0C }, /* 0 2 */ 625 { 0x2B, 0x00, 0x14 }, /* 0 3 */ 626 { 0x21, 0x00, 0x00 }, /* 1 0 */ 627 { 0x2B, 0x00, 0x08 }, /* 1 1 */ 628 { 0x30, 0x00, 0x0F }, /* 1 2 */ 629 { 0x31, 0x00, 0x03 }, /* 2 0 */ 630 { 0x34, 0x00, 0x0B }, /* 2 1 */ 631 { 0x3F, 0x00, 0x00 }, /* 3 0 */ 632 }; 633 634 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hbr2_hbr3[] = { 635 /* Voltage swing pre-emphasis */ 636 { 0x18, 0x00, 0x00 }, /* 0 0 */ 637 { 0x1D, 0x00, 0x05 }, /* 0 1 */ 638 { 0x24, 0x00, 0x0C }, /* 0 2 */ 639 { 0x2B, 0x00, 0x14 }, /* 0 3 */ 640 { 0x26, 0x00, 0x00 }, /* 1 0 */ 641 { 0x2C, 0x00, 0x07 }, /* 1 1 */ 642 { 0x33, 0x00, 0x0C }, /* 1 2 */ 643 { 0x2E, 0x00, 0x00 }, /* 2 0 */ 644 { 0x36, 0x00, 0x09 }, /* 2 1 */ 645 { 0x3F, 0x00, 0x00 }, /* 3 0 */ 646 }; 647 648 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hdmi[] = { 649 /* HDMI Preset VS Pre-emph */ 650 { 0x1A, 0x0, 0x0 }, /* 1 400mV 0dB */ 651 { 0x20, 0x0, 0x0 }, /* 2 500mV 0dB */ 652 { 0x29, 0x0, 0x0 }, /* 3 650mV 0dB */ 653 { 0x32, 0x0, 0x0 }, /* 4 800mV 0dB */ 654 { 0x3F, 0x0, 0x0 }, /* 5 1000mV 0dB */ 655 { 0x3A, 0x0, 0x5 }, /* 6 Full -1.5 dB */ 656 { 0x39, 0x0, 0x6 }, /* 7 Full -1.8 dB */ 657 { 0x38, 0x0, 0x7 }, /* 8 Full -2 dB */ 658 { 0x37, 0x0, 0x8 }, /* 9 Full -2.5 dB */ 659 { 0x36, 0x0, 0x9 }, /* 10 Full -3 dB */ 660 }; 661 662 struct tgl_dkl_phy_ddi_buf_trans { 663 u32 dkl_vswing_control; 664 u32 dkl_preshoot_control; 665 u32 dkl_de_emphasis_control; 666 }; 667 668 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans[] = { 669 /* VS pre-emp Non-trans mV Pre-emph dB */ 670 { 0x7, 0x0, 0x00 }, /* 0 0 400mV 0 dB */ 671 { 0x5, 0x0, 0x05 }, /* 0 1 400mV 3.5 dB */ 672 { 0x2, 0x0, 0x0B }, /* 0 2 400mV 6 dB */ 673 { 0x0, 0x0, 0x18 }, /* 0 3 400mV 9.5 dB */ 674 { 0x5, 0x0, 0x00 }, /* 1 0 600mV 0 dB */ 675 { 0x2, 0x0, 0x08 }, /* 1 1 600mV 3.5 dB */ 676 { 0x0, 0x0, 0x14 }, /* 1 2 600mV 6 dB */ 677 { 0x2, 0x0, 0x00 }, /* 2 0 800mV 0 dB */ 678 { 0x0, 0x0, 0x0B }, /* 2 1 800mV 3.5 dB */ 679 { 0x0, 0x0, 0x00 }, /* 3 0 1200mV 0 dB HDMI default */ 680 }; 681 682 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans_hbr2[] = { 683 /* VS pre-emp Non-trans mV Pre-emph dB */ 684 { 0x7, 0x0, 0x00 }, /* 0 0 400mV 0 dB */ 685 { 0x5, 0x0, 0x05 }, /* 0 1 400mV 3.5 dB */ 686 { 0x2, 0x0, 0x0B }, /* 0 2 400mV 6 dB */ 687 { 0x0, 0x0, 0x19 }, /* 0 3 400mV 9.5 dB */ 688 { 0x5, 0x0, 0x00 }, /* 1 0 600mV 0 dB */ 689 { 0x2, 0x0, 0x08 }, /* 1 1 600mV 3.5 dB */ 690 { 0x0, 0x0, 0x14 }, /* 1 2 600mV 6 dB */ 691 { 0x2, 0x0, 0x00 }, /* 2 0 800mV 0 dB */ 692 { 0x0, 0x0, 0x0B }, /* 2 1 800mV 3.5 dB */ 693 { 0x0, 0x0, 0x00 }, /* 3 0 1200mV 0 dB HDMI default */ 694 }; 695 696 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_hdmi_ddi_trans[] = { 697 /* HDMI Preset VS Pre-emph */ 698 { 0x7, 0x0, 0x0 }, /* 1 400mV 0dB */ 699 { 0x6, 0x0, 0x0 }, /* 2 500mV 0dB */ 700 { 0x4, 0x0, 0x0 }, /* 3 650mV 0dB */ 701 { 0x2, 0x0, 0x0 }, /* 4 800mV 0dB */ 702 { 0x0, 0x0, 0x0 }, /* 5 1000mV 0dB */ 703 { 0x0, 0x0, 0x5 }, /* 6 Full -1.5 dB */ 704 { 0x0, 0x0, 0x6 }, /* 7 Full -1.8 dB */ 705 { 0x0, 0x0, 0x7 }, /* 8 Full -2 dB */ 706 { 0x0, 0x0, 0x8 }, /* 9 Full -2.5 dB */ 707 { 0x0, 0x0, 0xA }, /* 10 Full -3 dB */ 708 }; 709 710 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr[] = { 711 /* NT mV Trans mV db */ 712 { 0xA, 0x32, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 713 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 714 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 715 { 0x6, 0x7D, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 716 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 717 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 718 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 719 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 720 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 721 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 722 }; 723 724 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr2[] = { 725 /* NT mV Trans mV db */ 726 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 727 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 728 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 729 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 730 { 0xA, 0x47, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 731 { 0xC, 0x63, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 732 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 733 { 0xC, 0x61, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 734 { 0x6, 0x7B, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 735 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 736 }; 737 738 static const struct cnl_ddi_buf_trans tgl_uy_combo_phy_ddi_translations_dp_hbr2[] = { 739 /* NT mV Trans mV db */ 740 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 741 { 0xA, 0x4F, 0x36, 0x00, 0x09 }, /* 350 500 3.1 */ 742 { 0xC, 0x60, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 743 { 0xC, 0x7F, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 744 { 0xC, 0x47, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 745 { 0xC, 0x6F, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 746 { 0x6, 0x7D, 0x32, 0x00, 0x0D }, /* 500 900 5.1 */ 747 { 0x6, 0x60, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 748 { 0x6, 0x7F, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 749 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 750 }; 751 752 /* 753 * Cloned the HOBL entry to comply with the voltage and pre-emphasis entries 754 * that DisplayPort specification requires 755 */ 756 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_edp_hbr2_hobl[] = { 757 /* VS pre-emp */ 758 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 0 */ 759 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 1 */ 760 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 2 */ 761 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 3 */ 762 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 0 */ 763 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 1 */ 764 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 2 */ 765 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 2 0 */ 766 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 2 1 */ 767 }; 768 769 static bool is_hobl_buf_trans(const struct cnl_ddi_buf_trans *table) 770 { 771 return table == tgl_combo_phy_ddi_translations_edp_hbr2_hobl; 772 } 773 774 static const struct ddi_buf_trans * 775 bdw_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 776 { 777 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 778 779 if (dev_priv->vbt.edp.low_vswing) { 780 *n_entries = ARRAY_SIZE(bdw_ddi_translations_edp); 781 return bdw_ddi_translations_edp; 782 } else { 783 *n_entries = ARRAY_SIZE(bdw_ddi_translations_dp); 784 return bdw_ddi_translations_dp; 785 } 786 } 787 788 static const struct ddi_buf_trans * 789 skl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 790 { 791 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 792 793 if (IS_SKL_ULX(dev_priv)) { 794 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_dp); 795 return skl_y_ddi_translations_dp; 796 } else if (IS_SKL_ULT(dev_priv)) { 797 *n_entries = ARRAY_SIZE(skl_u_ddi_translations_dp); 798 return skl_u_ddi_translations_dp; 799 } else { 800 *n_entries = ARRAY_SIZE(skl_ddi_translations_dp); 801 return skl_ddi_translations_dp; 802 } 803 } 804 805 static const struct ddi_buf_trans * 806 kbl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 807 { 808 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 809 810 if (IS_KBL_ULX(dev_priv) || 811 IS_CFL_ULX(dev_priv) || 812 IS_CML_ULX(dev_priv)) { 813 *n_entries = ARRAY_SIZE(kbl_y_ddi_translations_dp); 814 return kbl_y_ddi_translations_dp; 815 } else if (IS_KBL_ULT(dev_priv) || 816 IS_CFL_ULT(dev_priv) || 817 IS_CML_ULT(dev_priv)) { 818 *n_entries = ARRAY_SIZE(kbl_u_ddi_translations_dp); 819 return kbl_u_ddi_translations_dp; 820 } else { 821 *n_entries = ARRAY_SIZE(kbl_ddi_translations_dp); 822 return kbl_ddi_translations_dp; 823 } 824 } 825 826 static const struct ddi_buf_trans * 827 skl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 828 { 829 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 830 831 if (dev_priv->vbt.edp.low_vswing) { 832 if (IS_SKL_ULX(dev_priv) || 833 IS_KBL_ULX(dev_priv) || 834 IS_CFL_ULX(dev_priv) || 835 IS_CML_ULX(dev_priv)) { 836 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_edp); 837 return skl_y_ddi_translations_edp; 838 } else if (IS_SKL_ULT(dev_priv) || 839 IS_KBL_ULT(dev_priv) || 840 IS_CFL_ULT(dev_priv) || 841 IS_CML_ULT(dev_priv)) { 842 *n_entries = ARRAY_SIZE(skl_u_ddi_translations_edp); 843 return skl_u_ddi_translations_edp; 844 } else { 845 *n_entries = ARRAY_SIZE(skl_ddi_translations_edp); 846 return skl_ddi_translations_edp; 847 } 848 } 849 850 if (IS_KABYLAKE(dev_priv) || 851 IS_COFFEELAKE(dev_priv) || 852 IS_COMETLAKE(dev_priv)) 853 return kbl_get_buf_trans_dp(encoder, n_entries); 854 else 855 return skl_get_buf_trans_dp(encoder, n_entries); 856 } 857 858 static const struct ddi_buf_trans * 859 skl_get_buf_trans_hdmi(struct drm_i915_private *dev_priv, int *n_entries) 860 { 861 if (IS_SKL_ULX(dev_priv) || 862 IS_KBL_ULX(dev_priv) || 863 IS_CFL_ULX(dev_priv) || 864 IS_CML_ULX(dev_priv)) { 865 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_hdmi); 866 return skl_y_ddi_translations_hdmi; 867 } else { 868 *n_entries = ARRAY_SIZE(skl_ddi_translations_hdmi); 869 return skl_ddi_translations_hdmi; 870 } 871 } 872 873 static int skl_buf_trans_num_entries(enum port port, int n_entries) 874 { 875 /* Only DDIA and DDIE can select the 10th register with DP */ 876 if (port == PORT_A || port == PORT_E) 877 return min(n_entries, 10); 878 else 879 return min(n_entries, 9); 880 } 881 882 static const struct ddi_buf_trans * 883 intel_ddi_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 884 { 885 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 886 887 if (IS_KABYLAKE(dev_priv) || 888 IS_COFFEELAKE(dev_priv) || 889 IS_COMETLAKE(dev_priv)) { 890 const struct ddi_buf_trans *ddi_translations = 891 kbl_get_buf_trans_dp(encoder, n_entries); 892 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 893 return ddi_translations; 894 } else if (IS_SKYLAKE(dev_priv)) { 895 const struct ddi_buf_trans *ddi_translations = 896 skl_get_buf_trans_dp(encoder, n_entries); 897 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 898 return ddi_translations; 899 } else if (IS_BROADWELL(dev_priv)) { 900 *n_entries = ARRAY_SIZE(bdw_ddi_translations_dp); 901 return bdw_ddi_translations_dp; 902 } else if (IS_HASWELL(dev_priv)) { 903 *n_entries = ARRAY_SIZE(hsw_ddi_translations_dp); 904 return hsw_ddi_translations_dp; 905 } 906 907 *n_entries = 0; 908 return NULL; 909 } 910 911 static const struct ddi_buf_trans * 912 intel_ddi_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 913 { 914 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 915 916 if (IS_GEN9_BC(dev_priv)) { 917 const struct ddi_buf_trans *ddi_translations = 918 skl_get_buf_trans_edp(encoder, n_entries); 919 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 920 return ddi_translations; 921 } else if (IS_BROADWELL(dev_priv)) { 922 return bdw_get_buf_trans_edp(encoder, n_entries); 923 } else if (IS_HASWELL(dev_priv)) { 924 *n_entries = ARRAY_SIZE(hsw_ddi_translations_dp); 925 return hsw_ddi_translations_dp; 926 } 927 928 *n_entries = 0; 929 return NULL; 930 } 931 932 static const struct ddi_buf_trans * 933 intel_ddi_get_buf_trans_fdi(struct drm_i915_private *dev_priv, 934 int *n_entries) 935 { 936 if (IS_BROADWELL(dev_priv)) { 937 *n_entries = ARRAY_SIZE(bdw_ddi_translations_fdi); 938 return bdw_ddi_translations_fdi; 939 } else if (IS_HASWELL(dev_priv)) { 940 *n_entries = ARRAY_SIZE(hsw_ddi_translations_fdi); 941 return hsw_ddi_translations_fdi; 942 } 943 944 *n_entries = 0; 945 return NULL; 946 } 947 948 static const struct ddi_buf_trans * 949 intel_ddi_get_buf_trans_hdmi(struct intel_encoder *encoder, 950 int *n_entries) 951 { 952 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 953 954 if (IS_GEN9_BC(dev_priv)) { 955 return skl_get_buf_trans_hdmi(dev_priv, n_entries); 956 } else if (IS_BROADWELL(dev_priv)) { 957 *n_entries = ARRAY_SIZE(bdw_ddi_translations_hdmi); 958 return bdw_ddi_translations_hdmi; 959 } else if (IS_HASWELL(dev_priv)) { 960 *n_entries = ARRAY_SIZE(hsw_ddi_translations_hdmi); 961 return hsw_ddi_translations_hdmi; 962 } 963 964 *n_entries = 0; 965 return NULL; 966 } 967 968 static const struct bxt_ddi_buf_trans * 969 bxt_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 970 { 971 *n_entries = ARRAY_SIZE(bxt_ddi_translations_dp); 972 return bxt_ddi_translations_dp; 973 } 974 975 static const struct bxt_ddi_buf_trans * 976 bxt_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 977 { 978 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 979 980 if (dev_priv->vbt.edp.low_vswing) { 981 *n_entries = ARRAY_SIZE(bxt_ddi_translations_edp); 982 return bxt_ddi_translations_edp; 983 } 984 985 return bxt_get_buf_trans_dp(encoder, n_entries); 986 } 987 988 static const struct bxt_ddi_buf_trans * 989 bxt_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries) 990 { 991 *n_entries = ARRAY_SIZE(bxt_ddi_translations_hdmi); 992 return bxt_ddi_translations_hdmi; 993 } 994 995 static const struct cnl_ddi_buf_trans * 996 cnl_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries) 997 { 998 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 999 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1000 1001 if (voltage == VOLTAGE_INFO_0_85V) { 1002 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_85V); 1003 return cnl_ddi_translations_hdmi_0_85V; 1004 } else if (voltage == VOLTAGE_INFO_0_95V) { 1005 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_95V); 1006 return cnl_ddi_translations_hdmi_0_95V; 1007 } else if (voltage == VOLTAGE_INFO_1_05V) { 1008 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_1_05V); 1009 return cnl_ddi_translations_hdmi_1_05V; 1010 } else { 1011 *n_entries = 1; /* shut up gcc */ 1012 MISSING_CASE(voltage); 1013 } 1014 return NULL; 1015 } 1016 1017 static const struct cnl_ddi_buf_trans * 1018 cnl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 1019 { 1020 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1021 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1022 1023 if (voltage == VOLTAGE_INFO_0_85V) { 1024 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_85V); 1025 return cnl_ddi_translations_dp_0_85V; 1026 } else if (voltage == VOLTAGE_INFO_0_95V) { 1027 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_95V); 1028 return cnl_ddi_translations_dp_0_95V; 1029 } else if (voltage == VOLTAGE_INFO_1_05V) { 1030 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_1_05V); 1031 return cnl_ddi_translations_dp_1_05V; 1032 } else { 1033 *n_entries = 1; /* shut up gcc */ 1034 MISSING_CASE(voltage); 1035 } 1036 return NULL; 1037 } 1038 1039 static const struct cnl_ddi_buf_trans * 1040 cnl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 1041 { 1042 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1043 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1044 1045 if (dev_priv->vbt.edp.low_vswing) { 1046 if (voltage == VOLTAGE_INFO_0_85V) { 1047 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_85V); 1048 return cnl_ddi_translations_edp_0_85V; 1049 } else if (voltage == VOLTAGE_INFO_0_95V) { 1050 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_95V); 1051 return cnl_ddi_translations_edp_0_95V; 1052 } else if (voltage == VOLTAGE_INFO_1_05V) { 1053 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_1_05V); 1054 return cnl_ddi_translations_edp_1_05V; 1055 } else { 1056 *n_entries = 1; /* shut up gcc */ 1057 MISSING_CASE(voltage); 1058 } 1059 return NULL; 1060 } else { 1061 return cnl_get_buf_trans_dp(encoder, n_entries); 1062 } 1063 } 1064 1065 static const struct cnl_ddi_buf_trans * 1066 icl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1067 const struct intel_crtc_state *crtc_state, 1068 int *n_entries) 1069 { 1070 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1071 return icl_combo_phy_ddi_translations_hdmi; 1072 } 1073 1074 static const struct cnl_ddi_buf_trans * 1075 icl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1076 const struct intel_crtc_state *crtc_state, 1077 int *n_entries) 1078 { 1079 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2); 1080 return icl_combo_phy_ddi_translations_dp_hbr2; 1081 } 1082 1083 static const struct cnl_ddi_buf_trans * 1084 icl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1085 const struct intel_crtc_state *crtc_state, 1086 int *n_entries) 1087 { 1088 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1089 1090 if (crtc_state->port_clock > 540000) { 1091 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3); 1092 return icl_combo_phy_ddi_translations_edp_hbr3; 1093 } else if (dev_priv->vbt.edp.low_vswing) { 1094 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1095 return icl_combo_phy_ddi_translations_edp_hbr2; 1096 } 1097 1098 return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1099 } 1100 1101 static const struct cnl_ddi_buf_trans * 1102 icl_get_combo_buf_trans(struct intel_encoder *encoder, 1103 const struct intel_crtc_state *crtc_state, 1104 int *n_entries) 1105 { 1106 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1107 return icl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1108 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1109 return icl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1110 else 1111 return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1112 } 1113 1114 static const struct icl_mg_phy_ddi_buf_trans * 1115 icl_get_mg_buf_trans_hdmi(struct intel_encoder *encoder, 1116 const struct intel_crtc_state *crtc_state, 1117 int *n_entries) 1118 { 1119 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hdmi); 1120 return icl_mg_phy_ddi_translations_hdmi; 1121 } 1122 1123 static const struct icl_mg_phy_ddi_buf_trans * 1124 icl_get_mg_buf_trans_dp(struct intel_encoder *encoder, 1125 const struct intel_crtc_state *crtc_state, 1126 int *n_entries) 1127 { 1128 if (crtc_state->port_clock > 270000) { 1129 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hbr2_hbr3); 1130 return icl_mg_phy_ddi_translations_hbr2_hbr3; 1131 } else { 1132 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_rbr_hbr); 1133 return icl_mg_phy_ddi_translations_rbr_hbr; 1134 } 1135 } 1136 1137 static const struct icl_mg_phy_ddi_buf_trans * 1138 icl_get_mg_buf_trans(struct intel_encoder *encoder, 1139 const struct intel_crtc_state *crtc_state, 1140 int *n_entries) 1141 { 1142 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1143 return icl_get_mg_buf_trans_hdmi(encoder, crtc_state, n_entries); 1144 else 1145 return icl_get_mg_buf_trans_dp(encoder, crtc_state, n_entries); 1146 } 1147 1148 static const struct cnl_ddi_buf_trans * 1149 ehl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1150 const struct intel_crtc_state *crtc_state, 1151 int *n_entries) 1152 { 1153 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1154 return icl_combo_phy_ddi_translations_hdmi; 1155 } 1156 1157 static const struct cnl_ddi_buf_trans * 1158 ehl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1159 const struct intel_crtc_state *crtc_state, 1160 int *n_entries) 1161 { 1162 *n_entries = ARRAY_SIZE(ehl_combo_phy_ddi_translations_dp); 1163 return ehl_combo_phy_ddi_translations_dp; 1164 } 1165 1166 static const struct cnl_ddi_buf_trans * 1167 ehl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1168 const struct intel_crtc_state *crtc_state, 1169 int *n_entries) 1170 { 1171 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1172 1173 if (dev_priv->vbt.edp.low_vswing) { 1174 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1175 return icl_combo_phy_ddi_translations_edp_hbr2; 1176 } 1177 1178 return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1179 } 1180 1181 static const struct cnl_ddi_buf_trans * 1182 ehl_get_combo_buf_trans(struct intel_encoder *encoder, 1183 const struct intel_crtc_state *crtc_state, 1184 int *n_entries) 1185 { 1186 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1187 return ehl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1188 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1189 return ehl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1190 else 1191 return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1192 } 1193 1194 static const struct cnl_ddi_buf_trans * 1195 jsl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1196 const struct intel_crtc_state *crtc_state, 1197 int *n_entries) 1198 { 1199 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1200 return icl_combo_phy_ddi_translations_hdmi; 1201 } 1202 1203 static const struct cnl_ddi_buf_trans * 1204 jsl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1205 const struct intel_crtc_state *crtc_state, 1206 int *n_entries) 1207 { 1208 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2); 1209 return icl_combo_phy_ddi_translations_dp_hbr2; 1210 } 1211 1212 static const struct cnl_ddi_buf_trans * 1213 jsl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1214 const struct intel_crtc_state *crtc_state, 1215 int *n_entries) 1216 { 1217 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1218 1219 if (dev_priv->vbt.edp.low_vswing) { 1220 if (crtc_state->port_clock > 270000) { 1221 *n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr2); 1222 return jsl_combo_phy_ddi_translations_edp_hbr2; 1223 } else { 1224 *n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr); 1225 return jsl_combo_phy_ddi_translations_edp_hbr; 1226 } 1227 } 1228 1229 return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1230 } 1231 1232 static const struct cnl_ddi_buf_trans * 1233 jsl_get_combo_buf_trans(struct intel_encoder *encoder, 1234 const struct intel_crtc_state *crtc_state, 1235 int *n_entries) 1236 { 1237 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1238 return jsl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1239 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1240 return jsl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1241 else 1242 return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1243 } 1244 1245 static const struct cnl_ddi_buf_trans * 1246 tgl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1247 const struct intel_crtc_state *crtc_state, 1248 int *n_entries) 1249 { 1250 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1251 return icl_combo_phy_ddi_translations_hdmi; 1252 } 1253 1254 static const struct cnl_ddi_buf_trans * 1255 tgl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1256 const struct intel_crtc_state *crtc_state, 1257 int *n_entries) 1258 { 1259 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1260 1261 if (crtc_state->port_clock > 270000) { 1262 if (IS_TGL_U(dev_priv) || IS_TGL_Y(dev_priv)) { 1263 *n_entries = ARRAY_SIZE(tgl_uy_combo_phy_ddi_translations_dp_hbr2); 1264 return tgl_uy_combo_phy_ddi_translations_dp_hbr2; 1265 } else { 1266 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr2); 1267 return tgl_combo_phy_ddi_translations_dp_hbr2; 1268 } 1269 } else { 1270 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr); 1271 return tgl_combo_phy_ddi_translations_dp_hbr; 1272 } 1273 } 1274 1275 static const struct cnl_ddi_buf_trans * 1276 tgl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1277 const struct intel_crtc_state *crtc_state, 1278 int *n_entries) 1279 { 1280 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1281 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1282 1283 if (crtc_state->port_clock > 540000) { 1284 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3); 1285 return icl_combo_phy_ddi_translations_edp_hbr3; 1286 } else if (dev_priv->vbt.edp.hobl && !intel_dp->hobl_failed) { 1287 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_edp_hbr2_hobl); 1288 return tgl_combo_phy_ddi_translations_edp_hbr2_hobl; 1289 } else if (dev_priv->vbt.edp.low_vswing) { 1290 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1291 return icl_combo_phy_ddi_translations_edp_hbr2; 1292 } 1293 1294 return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1295 } 1296 1297 static const struct cnl_ddi_buf_trans * 1298 tgl_get_combo_buf_trans(struct intel_encoder *encoder, 1299 const struct intel_crtc_state *crtc_state, 1300 int *n_entries) 1301 { 1302 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1303 return tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1304 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1305 return tgl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1306 else 1307 return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1308 } 1309 1310 static const struct tgl_dkl_phy_ddi_buf_trans * 1311 tgl_get_dkl_buf_trans_hdmi(struct intel_encoder *encoder, 1312 const struct intel_crtc_state *crtc_state, 1313 int *n_entries) 1314 { 1315 *n_entries = ARRAY_SIZE(tgl_dkl_phy_hdmi_ddi_trans); 1316 return tgl_dkl_phy_hdmi_ddi_trans; 1317 } 1318 1319 static const struct tgl_dkl_phy_ddi_buf_trans * 1320 tgl_get_dkl_buf_trans_dp(struct intel_encoder *encoder, 1321 const struct intel_crtc_state *crtc_state, 1322 int *n_entries) 1323 { 1324 if (crtc_state->port_clock > 270000) { 1325 *n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans_hbr2); 1326 return tgl_dkl_phy_dp_ddi_trans_hbr2; 1327 } else { 1328 *n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans); 1329 return tgl_dkl_phy_dp_ddi_trans; 1330 } 1331 } 1332 1333 static const struct tgl_dkl_phy_ddi_buf_trans * 1334 tgl_get_dkl_buf_trans(struct intel_encoder *encoder, 1335 const struct intel_crtc_state *crtc_state, 1336 int *n_entries) 1337 { 1338 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1339 return tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, n_entries); 1340 else 1341 return tgl_get_dkl_buf_trans_dp(encoder, crtc_state, n_entries); 1342 } 1343 1344 static int intel_ddi_hdmi_level(struct intel_encoder *encoder, 1345 const struct intel_crtc_state *crtc_state) 1346 { 1347 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1348 int n_entries, level, default_entry; 1349 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1350 1351 if (INTEL_GEN(dev_priv) >= 12) { 1352 if (intel_phy_is_combo(dev_priv, phy)) 1353 tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1354 else 1355 tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1356 default_entry = n_entries - 1; 1357 } else if (INTEL_GEN(dev_priv) == 11) { 1358 if (intel_phy_is_combo(dev_priv, phy)) 1359 icl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1360 else 1361 icl_get_mg_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1362 default_entry = n_entries - 1; 1363 } else if (IS_CANNONLAKE(dev_priv)) { 1364 cnl_get_buf_trans_hdmi(encoder, &n_entries); 1365 default_entry = n_entries - 1; 1366 } else if (IS_GEN9_LP(dev_priv)) { 1367 bxt_get_buf_trans_hdmi(encoder, &n_entries); 1368 default_entry = n_entries - 1; 1369 } else if (IS_GEN9_BC(dev_priv)) { 1370 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1371 default_entry = 8; 1372 } else if (IS_BROADWELL(dev_priv)) { 1373 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1374 default_entry = 7; 1375 } else if (IS_HASWELL(dev_priv)) { 1376 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1377 default_entry = 6; 1378 } else { 1379 drm_WARN(&dev_priv->drm, 1, "ddi translation table missing\n"); 1380 return 0; 1381 } 1382 1383 if (drm_WARN_ON_ONCE(&dev_priv->drm, n_entries == 0)) 1384 return 0; 1385 1386 level = intel_bios_hdmi_level_shift(encoder); 1387 if (level < 0) 1388 level = default_entry; 1389 1390 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1391 level = n_entries - 1; 1392 1393 return level; 1394 } 1395 1396 /* 1397 * Starting with Haswell, DDI port buffers must be programmed with correct 1398 * values in advance. This function programs the correct values for 1399 * DP/eDP/FDI use cases. 1400 */ 1401 static void intel_prepare_dp_ddi_buffers(struct intel_encoder *encoder, 1402 const struct intel_crtc_state *crtc_state) 1403 { 1404 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1405 u32 iboost_bit = 0; 1406 int i, n_entries; 1407 enum port port = encoder->port; 1408 const struct ddi_buf_trans *ddi_translations; 1409 1410 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) 1411 ddi_translations = intel_ddi_get_buf_trans_fdi(dev_priv, 1412 &n_entries); 1413 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1414 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, 1415 &n_entries); 1416 else 1417 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, 1418 &n_entries); 1419 1420 /* If we're boosting the current, set bit 31 of trans1 */ 1421 if (IS_GEN9_BC(dev_priv) && intel_bios_dp_boost_level(encoder)) 1422 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 1423 1424 for (i = 0; i < n_entries; i++) { 1425 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i), 1426 ddi_translations[i].trans1 | iboost_bit); 1427 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i), 1428 ddi_translations[i].trans2); 1429 } 1430 } 1431 1432 /* 1433 * Starting with Haswell, DDI port buffers must be programmed with correct 1434 * values in advance. This function programs the correct values for 1435 * HDMI/DVI use cases. 1436 */ 1437 static void intel_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder, 1438 int level) 1439 { 1440 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1441 u32 iboost_bit = 0; 1442 int n_entries; 1443 enum port port = encoder->port; 1444 const struct ddi_buf_trans *ddi_translations; 1445 1446 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1447 1448 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1449 return; 1450 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1451 level = n_entries - 1; 1452 1453 /* If we're boosting the current, set bit 31 of trans1 */ 1454 if (IS_GEN9_BC(dev_priv) && intel_bios_hdmi_boost_level(encoder)) 1455 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 1456 1457 /* Entry 9 is for HDMI: */ 1458 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9), 1459 ddi_translations[level].trans1 | iboost_bit); 1460 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9), 1461 ddi_translations[level].trans2); 1462 } 1463 1464 static void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv, 1465 enum port port) 1466 { 1467 if (IS_BROXTON(dev_priv)) { 1468 udelay(16); 1469 return; 1470 } 1471 1472 if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 1473 DDI_BUF_IS_IDLE), 8)) 1474 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n", 1475 port_name(port)); 1476 } 1477 1478 static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv, 1479 enum port port) 1480 { 1481 /* Wait > 518 usecs for DDI_BUF_CTL to be non idle */ 1482 if (INTEL_GEN(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) { 1483 usleep_range(518, 1000); 1484 return; 1485 } 1486 1487 if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 1488 DDI_BUF_IS_IDLE), 500)) 1489 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n", 1490 port_name(port)); 1491 } 1492 1493 static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll) 1494 { 1495 switch (pll->info->id) { 1496 case DPLL_ID_WRPLL1: 1497 return PORT_CLK_SEL_WRPLL1; 1498 case DPLL_ID_WRPLL2: 1499 return PORT_CLK_SEL_WRPLL2; 1500 case DPLL_ID_SPLL: 1501 return PORT_CLK_SEL_SPLL; 1502 case DPLL_ID_LCPLL_810: 1503 return PORT_CLK_SEL_LCPLL_810; 1504 case DPLL_ID_LCPLL_1350: 1505 return PORT_CLK_SEL_LCPLL_1350; 1506 case DPLL_ID_LCPLL_2700: 1507 return PORT_CLK_SEL_LCPLL_2700; 1508 default: 1509 MISSING_CASE(pll->info->id); 1510 return PORT_CLK_SEL_NONE; 1511 } 1512 } 1513 1514 static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder, 1515 const struct intel_crtc_state *crtc_state) 1516 { 1517 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1518 int clock = crtc_state->port_clock; 1519 const enum intel_dpll_id id = pll->info->id; 1520 1521 switch (id) { 1522 default: 1523 /* 1524 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used 1525 * here, so do warn if this get passed in 1526 */ 1527 MISSING_CASE(id); 1528 return DDI_CLK_SEL_NONE; 1529 case DPLL_ID_ICL_TBTPLL: 1530 switch (clock) { 1531 case 162000: 1532 return DDI_CLK_SEL_TBT_162; 1533 case 270000: 1534 return DDI_CLK_SEL_TBT_270; 1535 case 540000: 1536 return DDI_CLK_SEL_TBT_540; 1537 case 810000: 1538 return DDI_CLK_SEL_TBT_810; 1539 default: 1540 MISSING_CASE(clock); 1541 return DDI_CLK_SEL_NONE; 1542 } 1543 case DPLL_ID_ICL_MGPLL1: 1544 case DPLL_ID_ICL_MGPLL2: 1545 case DPLL_ID_ICL_MGPLL3: 1546 case DPLL_ID_ICL_MGPLL4: 1547 case DPLL_ID_TGL_MGPLL5: 1548 case DPLL_ID_TGL_MGPLL6: 1549 return DDI_CLK_SEL_MG; 1550 } 1551 } 1552 1553 /* Starting with Haswell, different DDI ports can work in FDI mode for 1554 * connection to the PCH-located connectors. For this, it is necessary to train 1555 * both the DDI port and PCH receiver for the desired DDI buffer settings. 1556 * 1557 * The recommended port to work in FDI mode is DDI E, which we use here. Also, 1558 * please note that when FDI mode is active on DDI E, it shares 2 lines with 1559 * DDI A (which is used for eDP) 1560 */ 1561 1562 void hsw_fdi_link_train(struct intel_encoder *encoder, 1563 const struct intel_crtc_state *crtc_state) 1564 { 1565 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1566 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1567 u32 temp, i, rx_ctl_val, ddi_pll_sel; 1568 1569 intel_prepare_dp_ddi_buffers(encoder, crtc_state); 1570 1571 /* Set the FDI_RX_MISC pwrdn lanes and the 2 workarounds listed at the 1572 * mode set "sequence for CRT port" document: 1573 * - TP1 to TP2 time with the default value 1574 * - FDI delay to 90h 1575 * 1576 * WaFDIAutoLinkSetTimingOverrride:hsw 1577 */ 1578 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), 1579 FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2) | FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90); 1580 1581 /* Enable the PCH Receiver FDI PLL */ 1582 rx_ctl_val = dev_priv->fdi_rx_config | FDI_RX_ENHANCE_FRAME_ENABLE | 1583 FDI_RX_PLL_ENABLE | 1584 FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes); 1585 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1586 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1587 udelay(220); 1588 1589 /* Switch from Rawclk to PCDclk */ 1590 rx_ctl_val |= FDI_PCDCLK; 1591 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1592 1593 /* Configure Port Clock Select */ 1594 ddi_pll_sel = hsw_pll_to_ddi_pll_sel(crtc_state->shared_dpll); 1595 intel_de_write(dev_priv, PORT_CLK_SEL(PORT_E), ddi_pll_sel); 1596 drm_WARN_ON(&dev_priv->drm, ddi_pll_sel != PORT_CLK_SEL_SPLL); 1597 1598 /* Start the training iterating through available voltages and emphasis, 1599 * testing each value twice. */ 1600 for (i = 0; i < ARRAY_SIZE(hsw_ddi_translations_fdi) * 2; i++) { 1601 /* Configure DP_TP_CTL with auto-training */ 1602 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), 1603 DP_TP_CTL_FDI_AUTOTRAIN | 1604 DP_TP_CTL_ENHANCED_FRAME_ENABLE | 1605 DP_TP_CTL_LINK_TRAIN_PAT1 | 1606 DP_TP_CTL_ENABLE); 1607 1608 /* Configure and enable DDI_BUF_CTL for DDI E with next voltage. 1609 * DDI E does not support port reversal, the functionality is 1610 * achieved on the PCH side in FDI_RX_CTL, so no need to set the 1611 * port reversal bit */ 1612 intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), 1613 DDI_BUF_CTL_ENABLE | ((crtc_state->fdi_lanes - 1) << 1) | DDI_BUF_TRANS_SELECT(i / 2)); 1614 intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1615 1616 udelay(600); 1617 1618 /* Program PCH FDI Receiver TU */ 1619 intel_de_write(dev_priv, FDI_RX_TUSIZE1(PIPE_A), TU_SIZE(64)); 1620 1621 /* Enable PCH FDI Receiver with auto-training */ 1622 rx_ctl_val |= FDI_RX_ENABLE | FDI_LINK_TRAIN_AUTO; 1623 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1624 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1625 1626 /* Wait for FDI receiver lane calibration */ 1627 udelay(30); 1628 1629 /* Unset FDI_RX_MISC pwrdn lanes */ 1630 temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1631 temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 1632 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp); 1633 intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1634 1635 /* Wait for FDI auto training time */ 1636 udelay(5); 1637 1638 temp = intel_de_read(dev_priv, DP_TP_STATUS(PORT_E)); 1639 if (temp & DP_TP_STATUS_AUTOTRAIN_DONE) { 1640 drm_dbg_kms(&dev_priv->drm, 1641 "FDI link training done on step %d\n", i); 1642 break; 1643 } 1644 1645 /* 1646 * Leave things enabled even if we failed to train FDI. 1647 * Results in less fireworks from the state checker. 1648 */ 1649 if (i == ARRAY_SIZE(hsw_ddi_translations_fdi) * 2 - 1) { 1650 drm_err(&dev_priv->drm, "FDI link training failed!\n"); 1651 break; 1652 } 1653 1654 rx_ctl_val &= ~FDI_RX_ENABLE; 1655 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1656 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1657 1658 temp = intel_de_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1659 temp &= ~DDI_BUF_CTL_ENABLE; 1660 intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), temp); 1661 intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1662 1663 /* Disable DP_TP_CTL and FDI_RX_CTL and retry */ 1664 temp = intel_de_read(dev_priv, DP_TP_CTL(PORT_E)); 1665 temp &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 1666 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 1667 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), temp); 1668 intel_de_posting_read(dev_priv, DP_TP_CTL(PORT_E)); 1669 1670 intel_wait_ddi_buf_idle(dev_priv, PORT_E); 1671 1672 /* Reset FDI_RX_MISC pwrdn lanes */ 1673 temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1674 temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 1675 temp |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2); 1676 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp); 1677 intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1678 } 1679 1680 /* Enable normal pixel sending for FDI */ 1681 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), 1682 DP_TP_CTL_FDI_AUTOTRAIN | 1683 DP_TP_CTL_LINK_TRAIN_NORMAL | 1684 DP_TP_CTL_ENHANCED_FRAME_ENABLE | 1685 DP_TP_CTL_ENABLE); 1686 } 1687 1688 static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder, 1689 const struct intel_crtc_state *crtc_state) 1690 { 1691 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1692 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 1693 1694 intel_dp->DP = dig_port->saved_port_bits | 1695 DDI_BUF_CTL_ENABLE | DDI_BUF_TRANS_SELECT(0); 1696 intel_dp->DP |= DDI_PORT_WIDTH(crtc_state->lane_count); 1697 } 1698 1699 static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv, 1700 enum port port) 1701 { 1702 u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK; 1703 1704 switch (val) { 1705 case DDI_CLK_SEL_NONE: 1706 return 0; 1707 case DDI_CLK_SEL_TBT_162: 1708 return 162000; 1709 case DDI_CLK_SEL_TBT_270: 1710 return 270000; 1711 case DDI_CLK_SEL_TBT_540: 1712 return 540000; 1713 case DDI_CLK_SEL_TBT_810: 1714 return 810000; 1715 default: 1716 MISSING_CASE(val); 1717 return 0; 1718 } 1719 } 1720 1721 static void ddi_dotclock_get(struct intel_crtc_state *pipe_config) 1722 { 1723 int dotclock; 1724 1725 if (pipe_config->has_pch_encoder) 1726 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 1727 &pipe_config->fdi_m_n); 1728 else if (intel_crtc_has_dp_encoder(pipe_config)) 1729 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 1730 &pipe_config->dp_m_n); 1731 else if (pipe_config->has_hdmi_sink && pipe_config->pipe_bpp > 24) 1732 dotclock = pipe_config->port_clock * 24 / pipe_config->pipe_bpp; 1733 else 1734 dotclock = pipe_config->port_clock; 1735 1736 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 && 1737 !intel_crtc_has_dp_encoder(pipe_config)) 1738 dotclock *= 2; 1739 1740 if (pipe_config->pixel_multiplier) 1741 dotclock /= pipe_config->pixel_multiplier; 1742 1743 pipe_config->hw.adjusted_mode.crtc_clock = dotclock; 1744 } 1745 1746 static void intel_ddi_clock_get(struct intel_encoder *encoder, 1747 struct intel_crtc_state *pipe_config) 1748 { 1749 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1750 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1751 1752 if (intel_phy_is_tc(dev_priv, phy) && 1753 intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll) == 1754 DPLL_ID_ICL_TBTPLL) 1755 pipe_config->port_clock = icl_calc_tbt_pll_link(dev_priv, 1756 encoder->port); 1757 else 1758 pipe_config->port_clock = 1759 intel_dpll_get_freq(dev_priv, pipe_config->shared_dpll, 1760 &pipe_config->dpll_hw_state); 1761 1762 ddi_dotclock_get(pipe_config); 1763 } 1764 1765 void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state, 1766 const struct drm_connector_state *conn_state) 1767 { 1768 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1769 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1770 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1771 u32 temp; 1772 1773 if (!intel_crtc_has_dp_encoder(crtc_state)) 1774 return; 1775 1776 drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)); 1777 1778 temp = DP_MSA_MISC_SYNC_CLOCK; 1779 1780 switch (crtc_state->pipe_bpp) { 1781 case 18: 1782 temp |= DP_MSA_MISC_6_BPC; 1783 break; 1784 case 24: 1785 temp |= DP_MSA_MISC_8_BPC; 1786 break; 1787 case 30: 1788 temp |= DP_MSA_MISC_10_BPC; 1789 break; 1790 case 36: 1791 temp |= DP_MSA_MISC_12_BPC; 1792 break; 1793 default: 1794 MISSING_CASE(crtc_state->pipe_bpp); 1795 break; 1796 } 1797 1798 /* nonsense combination */ 1799 drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range && 1800 crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB); 1801 1802 if (crtc_state->limited_color_range) 1803 temp |= DP_MSA_MISC_COLOR_CEA_RGB; 1804 1805 /* 1806 * As per DP 1.2 spec section 2.3.4.3 while sending 1807 * YCBCR 444 signals we should program MSA MISC1/0 fields with 1808 * colorspace information. 1809 */ 1810 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) 1811 temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709; 1812 1813 /* 1814 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication 1815 * of Color Encoding Format and Content Color Gamut] while sending 1816 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields 1817 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format. 1818 */ 1819 if (intel_dp_needs_vsc_sdp(crtc_state, conn_state)) 1820 temp |= DP_MSA_MISC_COLOR_VSC_SDP; 1821 1822 intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp); 1823 } 1824 1825 static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder) 1826 { 1827 if (master_transcoder == TRANSCODER_EDP) 1828 return 0; 1829 else 1830 return master_transcoder + 1; 1831 } 1832 1833 /* 1834 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state. 1835 * 1836 * Only intended to be used by intel_ddi_enable_transcoder_func() and 1837 * intel_ddi_config_transcoder_func(). 1838 */ 1839 static u32 1840 intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder, 1841 const struct intel_crtc_state *crtc_state) 1842 { 1843 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1844 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1845 enum pipe pipe = crtc->pipe; 1846 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1847 enum port port = encoder->port; 1848 u32 temp; 1849 1850 /* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */ 1851 temp = TRANS_DDI_FUNC_ENABLE; 1852 if (INTEL_GEN(dev_priv) >= 12) 1853 temp |= TGL_TRANS_DDI_SELECT_PORT(port); 1854 else 1855 temp |= TRANS_DDI_SELECT_PORT(port); 1856 1857 switch (crtc_state->pipe_bpp) { 1858 case 18: 1859 temp |= TRANS_DDI_BPC_6; 1860 break; 1861 case 24: 1862 temp |= TRANS_DDI_BPC_8; 1863 break; 1864 case 30: 1865 temp |= TRANS_DDI_BPC_10; 1866 break; 1867 case 36: 1868 temp |= TRANS_DDI_BPC_12; 1869 break; 1870 default: 1871 BUG(); 1872 } 1873 1874 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC) 1875 temp |= TRANS_DDI_PVSYNC; 1876 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC) 1877 temp |= TRANS_DDI_PHSYNC; 1878 1879 if (cpu_transcoder == TRANSCODER_EDP) { 1880 switch (pipe) { 1881 case PIPE_A: 1882 /* On Haswell, can only use the always-on power well for 1883 * eDP when not using the panel fitter, and when not 1884 * using motion blur mitigation (which we don't 1885 * support). */ 1886 if (crtc_state->pch_pfit.force_thru) 1887 temp |= TRANS_DDI_EDP_INPUT_A_ONOFF; 1888 else 1889 temp |= TRANS_DDI_EDP_INPUT_A_ON; 1890 break; 1891 case PIPE_B: 1892 temp |= TRANS_DDI_EDP_INPUT_B_ONOFF; 1893 break; 1894 case PIPE_C: 1895 temp |= TRANS_DDI_EDP_INPUT_C_ONOFF; 1896 break; 1897 default: 1898 BUG(); 1899 break; 1900 } 1901 } 1902 1903 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 1904 if (crtc_state->has_hdmi_sink) 1905 temp |= TRANS_DDI_MODE_SELECT_HDMI; 1906 else 1907 temp |= TRANS_DDI_MODE_SELECT_DVI; 1908 1909 if (crtc_state->hdmi_scrambling) 1910 temp |= TRANS_DDI_HDMI_SCRAMBLING; 1911 if (crtc_state->hdmi_high_tmds_clock_ratio) 1912 temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE; 1913 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) { 1914 temp |= TRANS_DDI_MODE_SELECT_FDI; 1915 temp |= (crtc_state->fdi_lanes - 1) << 1; 1916 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 1917 temp |= TRANS_DDI_MODE_SELECT_DP_MST; 1918 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 1919 1920 if (INTEL_GEN(dev_priv) >= 12) { 1921 enum transcoder master; 1922 1923 master = crtc_state->mst_master_transcoder; 1924 drm_WARN_ON(&dev_priv->drm, 1925 master == INVALID_TRANSCODER); 1926 temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master); 1927 } 1928 } else { 1929 temp |= TRANS_DDI_MODE_SELECT_DP_SST; 1930 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 1931 } 1932 1933 if (IS_GEN_RANGE(dev_priv, 8, 10) && 1934 crtc_state->master_transcoder != INVALID_TRANSCODER) { 1935 u8 master_select = 1936 bdw_trans_port_sync_master_select(crtc_state->master_transcoder); 1937 1938 temp |= TRANS_DDI_PORT_SYNC_ENABLE | 1939 TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select); 1940 } 1941 1942 return temp; 1943 } 1944 1945 void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder, 1946 const struct intel_crtc_state *crtc_state) 1947 { 1948 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1949 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1950 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1951 1952 if (INTEL_GEN(dev_priv) >= 11) { 1953 enum transcoder master_transcoder = crtc_state->master_transcoder; 1954 u32 ctl2 = 0; 1955 1956 if (master_transcoder != INVALID_TRANSCODER) { 1957 u8 master_select = 1958 bdw_trans_port_sync_master_select(master_transcoder); 1959 1960 ctl2 |= PORT_SYNC_MODE_ENABLE | 1961 PORT_SYNC_MODE_MASTER_SELECT(master_select); 1962 } 1963 1964 intel_de_write(dev_priv, 1965 TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2); 1966 } 1967 1968 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), 1969 intel_ddi_transcoder_func_reg_val_get(encoder, 1970 crtc_state)); 1971 } 1972 1973 /* 1974 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable 1975 * bit. 1976 */ 1977 static void 1978 intel_ddi_config_transcoder_func(struct intel_encoder *encoder, 1979 const struct intel_crtc_state *crtc_state) 1980 { 1981 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1982 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1983 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1984 u32 ctl; 1985 1986 ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state); 1987 ctl &= ~TRANS_DDI_FUNC_ENABLE; 1988 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 1989 } 1990 1991 void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state) 1992 { 1993 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1994 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1995 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1996 u32 ctl; 1997 1998 if (INTEL_GEN(dev_priv) >= 11) 1999 intel_de_write(dev_priv, 2000 TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0); 2001 2002 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2003 2004 drm_WARN_ON(crtc->base.dev, ctl & TRANS_DDI_HDCP_SIGNALLING); 2005 2006 ctl &= ~TRANS_DDI_FUNC_ENABLE; 2007 2008 if (IS_GEN_RANGE(dev_priv, 8, 10)) 2009 ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE | 2010 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK); 2011 2012 if (INTEL_GEN(dev_priv) >= 12) { 2013 if (!intel_dp_mst_is_master_trans(crtc_state)) { 2014 ctl &= ~(TGL_TRANS_DDI_PORT_MASK | 2015 TRANS_DDI_MODE_SELECT_MASK); 2016 } 2017 } else { 2018 ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK); 2019 } 2020 2021 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 2022 2023 if (dev_priv->quirks & QUIRK_INCREASE_DDI_DISABLED_TIME && 2024 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 2025 drm_dbg_kms(&dev_priv->drm, 2026 "Quirk Increase DDI disabled time\n"); 2027 /* Quirk time at 100ms for reliable operation */ 2028 msleep(100); 2029 } 2030 } 2031 2032 int intel_ddi_toggle_hdcp_signalling(struct intel_encoder *intel_encoder, 2033 enum transcoder cpu_transcoder, 2034 bool enable) 2035 { 2036 struct drm_device *dev = intel_encoder->base.dev; 2037 struct drm_i915_private *dev_priv = to_i915(dev); 2038 intel_wakeref_t wakeref; 2039 int ret = 0; 2040 u32 tmp; 2041 2042 wakeref = intel_display_power_get_if_enabled(dev_priv, 2043 intel_encoder->power_domain); 2044 if (drm_WARN_ON(dev, !wakeref)) 2045 return -ENXIO; 2046 2047 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2048 if (enable) 2049 tmp |= TRANS_DDI_HDCP_SIGNALLING; 2050 else 2051 tmp &= ~TRANS_DDI_HDCP_SIGNALLING; 2052 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), tmp); 2053 intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref); 2054 return ret; 2055 } 2056 2057 bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector) 2058 { 2059 struct drm_device *dev = intel_connector->base.dev; 2060 struct drm_i915_private *dev_priv = to_i915(dev); 2061 struct intel_encoder *encoder = intel_attached_encoder(intel_connector); 2062 int type = intel_connector->base.connector_type; 2063 enum port port = encoder->port; 2064 enum transcoder cpu_transcoder; 2065 intel_wakeref_t wakeref; 2066 enum pipe pipe = 0; 2067 u32 tmp; 2068 bool ret; 2069 2070 wakeref = intel_display_power_get_if_enabled(dev_priv, 2071 encoder->power_domain); 2072 if (!wakeref) 2073 return false; 2074 2075 if (!encoder->get_hw_state(encoder, &pipe)) { 2076 ret = false; 2077 goto out; 2078 } 2079 2080 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 2081 cpu_transcoder = TRANSCODER_EDP; 2082 else 2083 cpu_transcoder = (enum transcoder) pipe; 2084 2085 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2086 2087 switch (tmp & TRANS_DDI_MODE_SELECT_MASK) { 2088 case TRANS_DDI_MODE_SELECT_HDMI: 2089 case TRANS_DDI_MODE_SELECT_DVI: 2090 ret = type == DRM_MODE_CONNECTOR_HDMIA; 2091 break; 2092 2093 case TRANS_DDI_MODE_SELECT_DP_SST: 2094 ret = type == DRM_MODE_CONNECTOR_eDP || 2095 type == DRM_MODE_CONNECTOR_DisplayPort; 2096 break; 2097 2098 case TRANS_DDI_MODE_SELECT_DP_MST: 2099 /* if the transcoder is in MST state then 2100 * connector isn't connected */ 2101 ret = false; 2102 break; 2103 2104 case TRANS_DDI_MODE_SELECT_FDI: 2105 ret = type == DRM_MODE_CONNECTOR_VGA; 2106 break; 2107 2108 default: 2109 ret = false; 2110 break; 2111 } 2112 2113 out: 2114 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 2115 2116 return ret; 2117 } 2118 2119 static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder, 2120 u8 *pipe_mask, bool *is_dp_mst) 2121 { 2122 struct drm_device *dev = encoder->base.dev; 2123 struct drm_i915_private *dev_priv = to_i915(dev); 2124 enum port port = encoder->port; 2125 intel_wakeref_t wakeref; 2126 enum pipe p; 2127 u32 tmp; 2128 u8 mst_pipe_mask; 2129 2130 *pipe_mask = 0; 2131 *is_dp_mst = false; 2132 2133 wakeref = intel_display_power_get_if_enabled(dev_priv, 2134 encoder->power_domain); 2135 if (!wakeref) 2136 return; 2137 2138 tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 2139 if (!(tmp & DDI_BUF_CTL_ENABLE)) 2140 goto out; 2141 2142 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) { 2143 tmp = intel_de_read(dev_priv, 2144 TRANS_DDI_FUNC_CTL(TRANSCODER_EDP)); 2145 2146 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { 2147 default: 2148 MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK); 2149 fallthrough; 2150 case TRANS_DDI_EDP_INPUT_A_ON: 2151 case TRANS_DDI_EDP_INPUT_A_ONOFF: 2152 *pipe_mask = BIT(PIPE_A); 2153 break; 2154 case TRANS_DDI_EDP_INPUT_B_ONOFF: 2155 *pipe_mask = BIT(PIPE_B); 2156 break; 2157 case TRANS_DDI_EDP_INPUT_C_ONOFF: 2158 *pipe_mask = BIT(PIPE_C); 2159 break; 2160 } 2161 2162 goto out; 2163 } 2164 2165 mst_pipe_mask = 0; 2166 for_each_pipe(dev_priv, p) { 2167 enum transcoder cpu_transcoder = (enum transcoder)p; 2168 unsigned int port_mask, ddi_select; 2169 intel_wakeref_t trans_wakeref; 2170 2171 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 2172 POWER_DOMAIN_TRANSCODER(cpu_transcoder)); 2173 if (!trans_wakeref) 2174 continue; 2175 2176 if (INTEL_GEN(dev_priv) >= 12) { 2177 port_mask = TGL_TRANS_DDI_PORT_MASK; 2178 ddi_select = TGL_TRANS_DDI_SELECT_PORT(port); 2179 } else { 2180 port_mask = TRANS_DDI_PORT_MASK; 2181 ddi_select = TRANS_DDI_SELECT_PORT(port); 2182 } 2183 2184 tmp = intel_de_read(dev_priv, 2185 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2186 intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder), 2187 trans_wakeref); 2188 2189 if ((tmp & port_mask) != ddi_select) 2190 continue; 2191 2192 if ((tmp & TRANS_DDI_MODE_SELECT_MASK) == 2193 TRANS_DDI_MODE_SELECT_DP_MST) 2194 mst_pipe_mask |= BIT(p); 2195 2196 *pipe_mask |= BIT(p); 2197 } 2198 2199 if (!*pipe_mask) 2200 drm_dbg_kms(&dev_priv->drm, 2201 "No pipe for [ENCODER:%d:%s] found\n", 2202 encoder->base.base.id, encoder->base.name); 2203 2204 if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) { 2205 drm_dbg_kms(&dev_priv->drm, 2206 "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n", 2207 encoder->base.base.id, encoder->base.name, 2208 *pipe_mask); 2209 *pipe_mask = BIT(ffs(*pipe_mask) - 1); 2210 } 2211 2212 if (mst_pipe_mask && mst_pipe_mask != *pipe_mask) 2213 drm_dbg_kms(&dev_priv->drm, 2214 "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n", 2215 encoder->base.base.id, encoder->base.name, 2216 *pipe_mask, mst_pipe_mask); 2217 else 2218 *is_dp_mst = mst_pipe_mask; 2219 2220 out: 2221 if (*pipe_mask && IS_GEN9_LP(dev_priv)) { 2222 tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port)); 2223 if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK | 2224 BXT_PHY_LANE_POWERDOWN_ACK | 2225 BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED) 2226 drm_err(&dev_priv->drm, 2227 "[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n", 2228 encoder->base.base.id, encoder->base.name, tmp); 2229 } 2230 2231 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 2232 } 2233 2234 bool intel_ddi_get_hw_state(struct intel_encoder *encoder, 2235 enum pipe *pipe) 2236 { 2237 u8 pipe_mask; 2238 bool is_mst; 2239 2240 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 2241 2242 if (is_mst || !pipe_mask) 2243 return false; 2244 2245 *pipe = ffs(pipe_mask) - 1; 2246 2247 return true; 2248 } 2249 2250 static enum intel_display_power_domain 2251 intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port) 2252 { 2253 /* CNL+ HW requires corresponding AUX IOs to be powered up for PSR with 2254 * DC states enabled at the same time, while for driver initiated AUX 2255 * transfers we need the same AUX IOs to be powered but with DC states 2256 * disabled. Accordingly use the AUX power domain here which leaves DC 2257 * states enabled. 2258 * However, for non-A AUX ports the corresponding non-EDP transcoders 2259 * would have already enabled power well 2 and DC_OFF. This means we can 2260 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a 2261 * specific AUX_IO reference without powering up any extra wells. 2262 * Note that PSR is enabled only on Port A even though this function 2263 * returns the correct domain for other ports too. 2264 */ 2265 return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A : 2266 intel_aux_power_domain(dig_port); 2267 } 2268 2269 static void intel_ddi_get_power_domains(struct intel_encoder *encoder, 2270 struct intel_crtc_state *crtc_state) 2271 { 2272 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2273 struct intel_digital_port *dig_port; 2274 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2275 2276 /* 2277 * TODO: Add support for MST encoders. Atm, the following should never 2278 * happen since fake-MST encoders don't set their get_power_domains() 2279 * hook. 2280 */ 2281 if (drm_WARN_ON(&dev_priv->drm, 2282 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST))) 2283 return; 2284 2285 dig_port = enc_to_dig_port(encoder); 2286 2287 if (!intel_phy_is_tc(dev_priv, phy) || 2288 dig_port->tc_mode != TC_PORT_TBT_ALT) 2289 intel_display_power_get(dev_priv, 2290 dig_port->ddi_io_power_domain); 2291 2292 /* 2293 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC 2294 * ports. 2295 */ 2296 if (intel_crtc_has_dp_encoder(crtc_state) || 2297 intel_phy_is_tc(dev_priv, phy)) 2298 intel_display_power_get(dev_priv, 2299 intel_ddi_main_link_aux_domain(dig_port)); 2300 } 2301 2302 void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder, 2303 const struct intel_crtc_state *crtc_state) 2304 { 2305 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2306 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2307 enum port port = encoder->port; 2308 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2309 2310 if (cpu_transcoder != TRANSCODER_EDP) { 2311 if (INTEL_GEN(dev_priv) >= 12) 2312 intel_de_write(dev_priv, 2313 TRANS_CLK_SEL(cpu_transcoder), 2314 TGL_TRANS_CLK_SEL_PORT(port)); 2315 else 2316 intel_de_write(dev_priv, 2317 TRANS_CLK_SEL(cpu_transcoder), 2318 TRANS_CLK_SEL_PORT(port)); 2319 } 2320 } 2321 2322 void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state) 2323 { 2324 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 2325 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2326 2327 if (cpu_transcoder != TRANSCODER_EDP) { 2328 if (INTEL_GEN(dev_priv) >= 12) 2329 intel_de_write(dev_priv, 2330 TRANS_CLK_SEL(cpu_transcoder), 2331 TGL_TRANS_CLK_SEL_DISABLED); 2332 else 2333 intel_de_write(dev_priv, 2334 TRANS_CLK_SEL(cpu_transcoder), 2335 TRANS_CLK_SEL_DISABLED); 2336 } 2337 } 2338 2339 static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv, 2340 enum port port, u8 iboost) 2341 { 2342 u32 tmp; 2343 2344 tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0); 2345 tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port)); 2346 if (iboost) 2347 tmp |= iboost << BALANCE_LEG_SHIFT(port); 2348 else 2349 tmp |= BALANCE_LEG_DISABLE(port); 2350 intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp); 2351 } 2352 2353 static void skl_ddi_set_iboost(struct intel_encoder *encoder, 2354 const struct intel_crtc_state *crtc_state, 2355 int level) 2356 { 2357 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2358 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2359 u8 iboost; 2360 2361 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2362 iboost = intel_bios_hdmi_boost_level(encoder); 2363 else 2364 iboost = intel_bios_dp_boost_level(encoder); 2365 2366 if (iboost == 0) { 2367 const struct ddi_buf_trans *ddi_translations; 2368 int n_entries; 2369 2370 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2371 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 2372 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2373 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, &n_entries); 2374 else 2375 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, &n_entries); 2376 2377 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2378 return; 2379 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2380 level = n_entries - 1; 2381 2382 iboost = ddi_translations[level].i_boost; 2383 } 2384 2385 /* Make sure that the requested I_boost is valid */ 2386 if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) { 2387 drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost); 2388 return; 2389 } 2390 2391 _skl_ddi_set_iboost(dev_priv, encoder->port, iboost); 2392 2393 if (encoder->port == PORT_A && dig_port->max_lanes == 4) 2394 _skl_ddi_set_iboost(dev_priv, PORT_E, iboost); 2395 } 2396 2397 static void bxt_ddi_vswing_sequence(struct intel_encoder *encoder, 2398 const struct intel_crtc_state *crtc_state, 2399 int level) 2400 { 2401 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2402 const struct bxt_ddi_buf_trans *ddi_translations; 2403 enum port port = encoder->port; 2404 int n_entries; 2405 2406 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2407 ddi_translations = bxt_get_buf_trans_hdmi(encoder, &n_entries); 2408 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2409 ddi_translations = bxt_get_buf_trans_edp(encoder, &n_entries); 2410 else 2411 ddi_translations = bxt_get_buf_trans_dp(encoder, &n_entries); 2412 2413 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2414 return; 2415 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2416 level = n_entries - 1; 2417 2418 bxt_ddi_phy_set_signal_level(dev_priv, port, 2419 ddi_translations[level].margin, 2420 ddi_translations[level].scale, 2421 ddi_translations[level].enable, 2422 ddi_translations[level].deemphasis); 2423 } 2424 2425 static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp, 2426 const struct intel_crtc_state *crtc_state) 2427 { 2428 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 2429 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2430 enum port port = encoder->port; 2431 enum phy phy = intel_port_to_phy(dev_priv, port); 2432 int n_entries; 2433 2434 if (INTEL_GEN(dev_priv) >= 12) { 2435 if (intel_phy_is_combo(dev_priv, phy)) 2436 tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2437 else 2438 tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 2439 } else if (INTEL_GEN(dev_priv) == 11) { 2440 if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 2441 jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2442 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 2443 ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2444 else if (intel_phy_is_combo(dev_priv, phy)) 2445 icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2446 else 2447 icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 2448 } else if (IS_CANNONLAKE(dev_priv)) { 2449 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2450 cnl_get_buf_trans_edp(encoder, &n_entries); 2451 else 2452 cnl_get_buf_trans_dp(encoder, &n_entries); 2453 } else if (IS_GEN9_LP(dev_priv)) { 2454 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2455 bxt_get_buf_trans_edp(encoder, &n_entries); 2456 else 2457 bxt_get_buf_trans_dp(encoder, &n_entries); 2458 } else { 2459 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2460 intel_ddi_get_buf_trans_edp(encoder, &n_entries); 2461 else 2462 intel_ddi_get_buf_trans_dp(encoder, &n_entries); 2463 } 2464 2465 if (drm_WARN_ON(&dev_priv->drm, n_entries < 1)) 2466 n_entries = 1; 2467 if (drm_WARN_ON(&dev_priv->drm, 2468 n_entries > ARRAY_SIZE(index_to_dp_signal_levels))) 2469 n_entries = ARRAY_SIZE(index_to_dp_signal_levels); 2470 2471 return index_to_dp_signal_levels[n_entries - 1] & 2472 DP_TRAIN_VOLTAGE_SWING_MASK; 2473 } 2474 2475 /* 2476 * We assume that the full set of pre-emphasis values can be 2477 * used on all DDI platforms. Should that change we need to 2478 * rethink this code. 2479 */ 2480 static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp) 2481 { 2482 return DP_TRAIN_PRE_EMPH_LEVEL_3; 2483 } 2484 2485 static void cnl_ddi_vswing_program(struct intel_encoder *encoder, 2486 const struct intel_crtc_state *crtc_state, 2487 int level) 2488 { 2489 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2490 const struct cnl_ddi_buf_trans *ddi_translations; 2491 enum port port = encoder->port; 2492 int n_entries, ln; 2493 u32 val; 2494 2495 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2496 ddi_translations = cnl_get_buf_trans_hdmi(encoder, &n_entries); 2497 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2498 ddi_translations = cnl_get_buf_trans_edp(encoder, &n_entries); 2499 else 2500 ddi_translations = cnl_get_buf_trans_dp(encoder, &n_entries); 2501 2502 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2503 return; 2504 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2505 level = n_entries - 1; 2506 2507 /* Set PORT_TX_DW5 Scaling Mode Sel to 010b. */ 2508 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2509 val &= ~SCALING_MODE_SEL_MASK; 2510 val |= SCALING_MODE_SEL(2); 2511 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2512 2513 /* Program PORT_TX_DW2 */ 2514 val = intel_de_read(dev_priv, CNL_PORT_TX_DW2_LN0(port)); 2515 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 2516 RCOMP_SCALAR_MASK); 2517 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 2518 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 2519 /* Rcomp scalar is fixed as 0x98 for every table entry */ 2520 val |= RCOMP_SCALAR(0x98); 2521 intel_de_write(dev_priv, CNL_PORT_TX_DW2_GRP(port), val); 2522 2523 /* Program PORT_TX_DW4 */ 2524 /* We cannot write to GRP. It would overrite individual loadgen */ 2525 for (ln = 0; ln < 4; ln++) { 2526 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 2527 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 2528 CURSOR_COEFF_MASK); 2529 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 2530 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 2531 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 2532 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 2533 } 2534 2535 /* Program PORT_TX_DW5 */ 2536 /* All DW5 values are fixed for every table entry */ 2537 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2538 val &= ~RTERM_SELECT_MASK; 2539 val |= RTERM_SELECT(6); 2540 val |= TAP3_DISABLE; 2541 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2542 2543 /* Program PORT_TX_DW7 */ 2544 val = intel_de_read(dev_priv, CNL_PORT_TX_DW7_LN0(port)); 2545 val &= ~N_SCALAR_MASK; 2546 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 2547 intel_de_write(dev_priv, CNL_PORT_TX_DW7_GRP(port), val); 2548 } 2549 2550 static void cnl_ddi_vswing_sequence(struct intel_encoder *encoder, 2551 const struct intel_crtc_state *crtc_state, 2552 int level) 2553 { 2554 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2555 enum port port = encoder->port; 2556 int width, rate, ln; 2557 u32 val; 2558 2559 width = crtc_state->lane_count; 2560 rate = crtc_state->port_clock; 2561 2562 /* 2563 * 1. If port type is eDP or DP, 2564 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 2565 * else clear to 0b. 2566 */ 2567 val = intel_de_read(dev_priv, CNL_PORT_PCS_DW1_LN0(port)); 2568 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2569 val &= ~COMMON_KEEPER_EN; 2570 else 2571 val |= COMMON_KEEPER_EN; 2572 intel_de_write(dev_priv, CNL_PORT_PCS_DW1_GRP(port), val); 2573 2574 /* 2. Program loadgen select */ 2575 /* 2576 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 2577 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 2578 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 2579 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 2580 */ 2581 for (ln = 0; ln <= 3; ln++) { 2582 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 2583 val &= ~LOADGEN_SELECT; 2584 2585 if ((rate <= 600000 && width == 4 && ln >= 1) || 2586 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 2587 val |= LOADGEN_SELECT; 2588 } 2589 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 2590 } 2591 2592 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 2593 val = intel_de_read(dev_priv, CNL_PORT_CL1CM_DW5); 2594 val |= SUS_CLOCK_CONFIG; 2595 intel_de_write(dev_priv, CNL_PORT_CL1CM_DW5, val); 2596 2597 /* 4. Clear training enable to change swing values */ 2598 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2599 val &= ~TX_TRAINING_EN; 2600 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2601 2602 /* 5. Program swing and de-emphasis */ 2603 cnl_ddi_vswing_program(encoder, crtc_state, level); 2604 2605 /* 6. Set training enable to trigger update */ 2606 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2607 val |= TX_TRAINING_EN; 2608 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2609 } 2610 2611 static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder, 2612 const struct intel_crtc_state *crtc_state, 2613 int level) 2614 { 2615 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2616 const struct cnl_ddi_buf_trans *ddi_translations; 2617 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2618 int n_entries, ln; 2619 u32 val; 2620 2621 if (INTEL_GEN(dev_priv) >= 12) 2622 ddi_translations = tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2623 else if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 2624 ddi_translations = jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2625 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 2626 ddi_translations = ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2627 else 2628 ddi_translations = icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2629 if (!ddi_translations) 2630 return; 2631 2632 if (level >= n_entries) { 2633 drm_dbg_kms(&dev_priv->drm, 2634 "DDI translation not found for level %d. Using %d instead.", 2635 level, n_entries - 1); 2636 level = n_entries - 1; 2637 } 2638 2639 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) { 2640 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2641 2642 val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED; 2643 intel_dp->hobl_active = is_hobl_buf_trans(ddi_translations); 2644 intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val, 2645 intel_dp->hobl_active ? val : 0); 2646 } 2647 2648 /* Set PORT_TX_DW5 */ 2649 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2650 val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK | 2651 TAP2_DISABLE | TAP3_DISABLE); 2652 val |= SCALING_MODE_SEL(0x2); 2653 val |= RTERM_SELECT(0x6); 2654 val |= TAP3_DISABLE; 2655 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2656 2657 /* Program PORT_TX_DW2 */ 2658 val = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy)); 2659 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 2660 RCOMP_SCALAR_MASK); 2661 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 2662 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 2663 /* Program Rcomp scalar for every table entry */ 2664 val |= RCOMP_SCALAR(0x98); 2665 intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), val); 2666 2667 /* Program PORT_TX_DW4 */ 2668 /* We cannot write to GRP. It would overwrite individual loadgen. */ 2669 for (ln = 0; ln <= 3; ln++) { 2670 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 2671 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 2672 CURSOR_COEFF_MASK); 2673 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 2674 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 2675 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 2676 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 2677 } 2678 2679 /* Program PORT_TX_DW7 */ 2680 val = intel_de_read(dev_priv, ICL_PORT_TX_DW7_LN0(phy)); 2681 val &= ~N_SCALAR_MASK; 2682 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 2683 intel_de_write(dev_priv, ICL_PORT_TX_DW7_GRP(phy), val); 2684 } 2685 2686 static void icl_combo_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2687 const struct intel_crtc_state *crtc_state, 2688 int level) 2689 { 2690 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2691 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2692 int width, rate, ln; 2693 u32 val; 2694 2695 width = crtc_state->lane_count; 2696 rate = crtc_state->port_clock; 2697 2698 /* 2699 * 1. If port type is eDP or DP, 2700 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 2701 * else clear to 0b. 2702 */ 2703 val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy)); 2704 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2705 val &= ~COMMON_KEEPER_EN; 2706 else 2707 val |= COMMON_KEEPER_EN; 2708 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val); 2709 2710 /* 2. Program loadgen select */ 2711 /* 2712 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 2713 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 2714 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 2715 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 2716 */ 2717 for (ln = 0; ln <= 3; ln++) { 2718 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 2719 val &= ~LOADGEN_SELECT; 2720 2721 if ((rate <= 600000 && width == 4 && ln >= 1) || 2722 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 2723 val |= LOADGEN_SELECT; 2724 } 2725 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 2726 } 2727 2728 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 2729 val = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy)); 2730 val |= SUS_CLOCK_CONFIG; 2731 intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), val); 2732 2733 /* 4. Clear training enable to change swing values */ 2734 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2735 val &= ~TX_TRAINING_EN; 2736 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2737 2738 /* 5. Program swing and de-emphasis */ 2739 icl_ddi_combo_vswing_program(encoder, crtc_state, level); 2740 2741 /* 6. Set training enable to trigger update */ 2742 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2743 val |= TX_TRAINING_EN; 2744 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2745 } 2746 2747 static void icl_mg_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2748 const struct intel_crtc_state *crtc_state, 2749 int level) 2750 { 2751 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2752 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 2753 const struct icl_mg_phy_ddi_buf_trans *ddi_translations; 2754 int n_entries, ln; 2755 u32 val; 2756 2757 ddi_translations = icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 2758 /* The table does not have values for level 3 and level 9. */ 2759 if (level >= n_entries || level == 3 || level == 9) { 2760 drm_dbg_kms(&dev_priv->drm, 2761 "DDI translation not found for level %d. Using %d instead.", 2762 level, n_entries - 2); 2763 level = n_entries - 2; 2764 } 2765 2766 /* Set MG_TX_LINK_PARAMS cri_use_fs32 to 0. */ 2767 for (ln = 0; ln < 2; ln++) { 2768 val = intel_de_read(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port)); 2769 val &= ~CRI_USE_FS32; 2770 intel_de_write(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), val); 2771 2772 val = intel_de_read(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port)); 2773 val &= ~CRI_USE_FS32; 2774 intel_de_write(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), val); 2775 } 2776 2777 /* Program MG_TX_SWINGCTRL with values from vswing table */ 2778 for (ln = 0; ln < 2; ln++) { 2779 val = intel_de_read(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port)); 2780 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 2781 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 2782 ddi_translations[level].cri_txdeemph_override_17_12); 2783 intel_de_write(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), val); 2784 2785 val = intel_de_read(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port)); 2786 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 2787 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 2788 ddi_translations[level].cri_txdeemph_override_17_12); 2789 intel_de_write(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), val); 2790 } 2791 2792 /* Program MG_TX_DRVCTRL with values from vswing table */ 2793 for (ln = 0; ln < 2; ln++) { 2794 val = intel_de_read(dev_priv, MG_TX1_DRVCTRL(ln, tc_port)); 2795 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 2796 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 2797 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 2798 ddi_translations[level].cri_txdeemph_override_5_0) | 2799 CRI_TXDEEMPH_OVERRIDE_11_6( 2800 ddi_translations[level].cri_txdeemph_override_11_6) | 2801 CRI_TXDEEMPH_OVERRIDE_EN; 2802 intel_de_write(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), val); 2803 2804 val = intel_de_read(dev_priv, MG_TX2_DRVCTRL(ln, tc_port)); 2805 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 2806 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 2807 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 2808 ddi_translations[level].cri_txdeemph_override_5_0) | 2809 CRI_TXDEEMPH_OVERRIDE_11_6( 2810 ddi_translations[level].cri_txdeemph_override_11_6) | 2811 CRI_TXDEEMPH_OVERRIDE_EN; 2812 intel_de_write(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), val); 2813 2814 /* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */ 2815 } 2816 2817 /* 2818 * Program MG_CLKHUB<LN, port being used> with value from frequency table 2819 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the 2820 * values from table for which TX1 and TX2 enabled. 2821 */ 2822 for (ln = 0; ln < 2; ln++) { 2823 val = intel_de_read(dev_priv, MG_CLKHUB(ln, tc_port)); 2824 if (crtc_state->port_clock < 300000) 2825 val |= CFG_LOW_RATE_LKREN_EN; 2826 else 2827 val &= ~CFG_LOW_RATE_LKREN_EN; 2828 intel_de_write(dev_priv, MG_CLKHUB(ln, tc_port), val); 2829 } 2830 2831 /* Program the MG_TX_DCC<LN, port being used> based on the link frequency */ 2832 for (ln = 0; ln < 2; ln++) { 2833 val = intel_de_read(dev_priv, MG_TX1_DCC(ln, tc_port)); 2834 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 2835 if (crtc_state->port_clock <= 500000) { 2836 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 2837 } else { 2838 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 2839 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 2840 } 2841 intel_de_write(dev_priv, MG_TX1_DCC(ln, tc_port), val); 2842 2843 val = intel_de_read(dev_priv, MG_TX2_DCC(ln, tc_port)); 2844 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 2845 if (crtc_state->port_clock <= 500000) { 2846 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 2847 } else { 2848 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 2849 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 2850 } 2851 intel_de_write(dev_priv, MG_TX2_DCC(ln, tc_port), val); 2852 } 2853 2854 /* Program MG_TX_PISO_READLOAD with values from vswing table */ 2855 for (ln = 0; ln < 2; ln++) { 2856 val = intel_de_read(dev_priv, 2857 MG_TX1_PISO_READLOAD(ln, tc_port)); 2858 val |= CRI_CALCINIT; 2859 intel_de_write(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port), 2860 val); 2861 2862 val = intel_de_read(dev_priv, 2863 MG_TX2_PISO_READLOAD(ln, tc_port)); 2864 val |= CRI_CALCINIT; 2865 intel_de_write(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port), 2866 val); 2867 } 2868 } 2869 2870 static void icl_ddi_vswing_sequence(struct intel_encoder *encoder, 2871 const struct intel_crtc_state *crtc_state, 2872 int level) 2873 { 2874 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2875 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2876 2877 if (intel_phy_is_combo(dev_priv, phy)) 2878 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 2879 else 2880 icl_mg_phy_ddi_vswing_sequence(encoder, crtc_state, level); 2881 } 2882 2883 static void 2884 tgl_dkl_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2885 const struct intel_crtc_state *crtc_state, 2886 int level) 2887 { 2888 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2889 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 2890 const struct tgl_dkl_phy_ddi_buf_trans *ddi_translations; 2891 u32 val, dpcnt_mask, dpcnt_val; 2892 int n_entries, ln; 2893 2894 ddi_translations = tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 2895 2896 if (level >= n_entries) 2897 level = n_entries - 1; 2898 2899 dpcnt_mask = (DKL_TX_PRESHOOT_COEFF_MASK | 2900 DKL_TX_DE_EMPAHSIS_COEFF_MASK | 2901 DKL_TX_VSWING_CONTROL_MASK); 2902 dpcnt_val = DKL_TX_VSWING_CONTROL(ddi_translations[level].dkl_vswing_control); 2903 dpcnt_val |= DKL_TX_DE_EMPHASIS_COEFF(ddi_translations[level].dkl_de_emphasis_control); 2904 dpcnt_val |= DKL_TX_PRESHOOT_COEFF(ddi_translations[level].dkl_preshoot_control); 2905 2906 for (ln = 0; ln < 2; ln++) { 2907 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2908 HIP_INDEX_VAL(tc_port, ln)); 2909 2910 intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0); 2911 2912 /* All the registers are RMW */ 2913 val = intel_de_read(dev_priv, DKL_TX_DPCNTL0(tc_port)); 2914 val &= ~dpcnt_mask; 2915 val |= dpcnt_val; 2916 intel_de_write(dev_priv, DKL_TX_DPCNTL0(tc_port), val); 2917 2918 val = intel_de_read(dev_priv, DKL_TX_DPCNTL1(tc_port)); 2919 val &= ~dpcnt_mask; 2920 val |= dpcnt_val; 2921 intel_de_write(dev_priv, DKL_TX_DPCNTL1(tc_port), val); 2922 2923 val = intel_de_read(dev_priv, DKL_TX_DPCNTL2(tc_port)); 2924 val &= ~DKL_TX_DP20BITMODE; 2925 intel_de_write(dev_priv, DKL_TX_DPCNTL2(tc_port), val); 2926 } 2927 } 2928 2929 static void tgl_ddi_vswing_sequence(struct intel_encoder *encoder, 2930 const struct intel_crtc_state *crtc_state, 2931 int level) 2932 { 2933 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2934 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2935 2936 if (intel_phy_is_combo(dev_priv, phy)) 2937 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 2938 else 2939 tgl_dkl_phy_ddi_vswing_sequence(encoder, crtc_state, level); 2940 } 2941 2942 static int translate_signal_level(struct intel_dp *intel_dp, 2943 u8 signal_levels) 2944 { 2945 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 2946 int i; 2947 2948 for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) { 2949 if (index_to_dp_signal_levels[i] == signal_levels) 2950 return i; 2951 } 2952 2953 drm_WARN(&i915->drm, 1, 2954 "Unsupported voltage swing/pre-emphasis level: 0x%x\n", 2955 signal_levels); 2956 2957 return 0; 2958 } 2959 2960 static int intel_ddi_dp_level(struct intel_dp *intel_dp) 2961 { 2962 u8 train_set = intel_dp->train_set[0]; 2963 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 2964 DP_TRAIN_PRE_EMPHASIS_MASK); 2965 2966 return translate_signal_level(intel_dp, signal_levels); 2967 } 2968 2969 static void 2970 tgl_set_signal_levels(struct intel_dp *intel_dp, 2971 const struct intel_crtc_state *crtc_state) 2972 { 2973 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 2974 int level = intel_ddi_dp_level(intel_dp); 2975 2976 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 2977 } 2978 2979 static void 2980 icl_set_signal_levels(struct intel_dp *intel_dp, 2981 const struct intel_crtc_state *crtc_state) 2982 { 2983 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 2984 int level = intel_ddi_dp_level(intel_dp); 2985 2986 icl_ddi_vswing_sequence(encoder, crtc_state, level); 2987 } 2988 2989 static void 2990 cnl_set_signal_levels(struct intel_dp *intel_dp, 2991 const struct intel_crtc_state *crtc_state) 2992 { 2993 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 2994 int level = intel_ddi_dp_level(intel_dp); 2995 2996 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 2997 } 2998 2999 static void 3000 bxt_set_signal_levels(struct intel_dp *intel_dp, 3001 const struct intel_crtc_state *crtc_state) 3002 { 3003 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3004 int level = intel_ddi_dp_level(intel_dp); 3005 3006 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3007 } 3008 3009 static void 3010 hsw_set_signal_levels(struct intel_dp *intel_dp, 3011 const struct intel_crtc_state *crtc_state) 3012 { 3013 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3014 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3015 int level = intel_ddi_dp_level(intel_dp); 3016 enum port port = encoder->port; 3017 u32 signal_levels; 3018 3019 signal_levels = DDI_BUF_TRANS_SELECT(level); 3020 3021 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 3022 signal_levels); 3023 3024 intel_dp->DP &= ~DDI_BUF_EMP_MASK; 3025 intel_dp->DP |= signal_levels; 3026 3027 if (IS_GEN9_BC(dev_priv)) 3028 skl_ddi_set_iboost(encoder, crtc_state, level); 3029 3030 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 3031 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 3032 } 3033 3034 static u32 icl_dpclka_cfgcr0_clk_off(struct drm_i915_private *dev_priv, 3035 enum phy phy) 3036 { 3037 if (IS_ROCKETLAKE(dev_priv)) { 3038 return RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3039 } else if (intel_phy_is_combo(dev_priv, phy)) { 3040 return ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3041 } else if (intel_phy_is_tc(dev_priv, phy)) { 3042 enum tc_port tc_port = intel_port_to_tc(dev_priv, 3043 (enum port)phy); 3044 3045 return ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port); 3046 } 3047 3048 return 0; 3049 } 3050 3051 static void dg1_map_plls_to_ports(struct intel_encoder *encoder, 3052 const struct intel_crtc_state *crtc_state) 3053 { 3054 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3055 struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3056 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3057 u32 val; 3058 3059 /* 3060 * If we fail this, something went very wrong: first 2 PLLs should be 3061 * used by first 2 phys and last 2 PLLs by last phys 3062 */ 3063 if (drm_WARN_ON(&dev_priv->drm, 3064 (pll->info->id < DPLL_ID_DG1_DPLL2 && phy >= PHY_C) || 3065 (pll->info->id >= DPLL_ID_DG1_DPLL2 && phy < PHY_C))) 3066 return; 3067 3068 mutex_lock(&dev_priv->dpll.lock); 3069 3070 val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3071 drm_WARN_ON(&dev_priv->drm, 3072 (val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)) == 0); 3073 3074 val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3075 val |= DG1_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3076 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3077 intel_de_posting_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3078 3079 val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3080 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3081 3082 mutex_unlock(&dev_priv->dpll.lock); 3083 } 3084 3085 static void icl_map_plls_to_ports(struct intel_encoder *encoder, 3086 const struct intel_crtc_state *crtc_state) 3087 { 3088 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3089 struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3090 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3091 u32 val; 3092 3093 mutex_lock(&dev_priv->dpll.lock); 3094 3095 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3096 drm_WARN_ON(&dev_priv->drm, 3097 (val & icl_dpclka_cfgcr0_clk_off(dev_priv, phy)) == 0); 3098 3099 if (intel_phy_is_combo(dev_priv, phy)) { 3100 u32 mask, sel; 3101 3102 if (IS_ROCKETLAKE(dev_priv)) { 3103 mask = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3104 sel = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3105 } else { 3106 mask = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3107 sel = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3108 } 3109 3110 /* 3111 * Even though this register references DDIs, note that we 3112 * want to pass the PHY rather than the port (DDI). For 3113 * ICL, port=phy in all cases so it doesn't matter, but for 3114 * EHL the bspec notes the following: 3115 * 3116 * "DDID clock tied to DDIA clock, so DPCLKA_CFGCR0 DDIA 3117 * Clock Select chooses the PLL for both DDIA and DDID and 3118 * drives port A in all cases." 3119 */ 3120 val &= ~mask; 3121 val |= sel; 3122 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3123 intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0); 3124 } 3125 3126 val &= ~icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3127 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3128 3129 mutex_unlock(&dev_priv->dpll.lock); 3130 } 3131 3132 static void dg1_unmap_plls_to_ports(struct intel_encoder *encoder) 3133 { 3134 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3135 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3136 3137 mutex_lock(&dev_priv->dpll.lock); 3138 3139 intel_de_rmw(dev_priv, DG1_DPCLKA_CFGCR0(phy), 0, 3140 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 3141 3142 mutex_unlock(&dev_priv->dpll.lock); 3143 } 3144 3145 static void icl_unmap_plls_to_ports(struct intel_encoder *encoder) 3146 { 3147 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3148 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3149 u32 val; 3150 3151 mutex_lock(&dev_priv->dpll.lock); 3152 3153 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3154 val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3155 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3156 3157 mutex_unlock(&dev_priv->dpll.lock); 3158 } 3159 3160 static void dg1_sanitize_port_clk_off(struct drm_i915_private *dev_priv, 3161 u32 port_mask, bool ddi_clk_needed) 3162 { 3163 enum port port; 3164 u32 val; 3165 3166 for_each_port_masked(port, port_mask) { 3167 enum phy phy = intel_port_to_phy(dev_priv, port); 3168 bool ddi_clk_off; 3169 3170 val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3171 ddi_clk_off = val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3172 3173 if (ddi_clk_needed == !ddi_clk_off) 3174 continue; 3175 3176 /* 3177 * Punt on the case now where clock is gated, but it would 3178 * be needed by the port. Something else is really broken then. 3179 */ 3180 if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed)) 3181 continue; 3182 3183 drm_notice(&dev_priv->drm, 3184 "PHY %c is disabled with an ungated DDI clock, gate it\n", 3185 phy_name(phy)); 3186 val |= DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3187 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3188 } 3189 } 3190 3191 static void icl_sanitize_port_clk_off(struct drm_i915_private *dev_priv, 3192 u32 port_mask, bool ddi_clk_needed) 3193 { 3194 enum port port; 3195 u32 val; 3196 3197 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3198 for_each_port_masked(port, port_mask) { 3199 enum phy phy = intel_port_to_phy(dev_priv, port); 3200 bool ddi_clk_off = val & icl_dpclka_cfgcr0_clk_off(dev_priv, 3201 phy); 3202 3203 if (ddi_clk_needed == !ddi_clk_off) 3204 continue; 3205 3206 /* 3207 * Punt on the case now where clock is gated, but it would 3208 * be needed by the port. Something else is really broken then. 3209 */ 3210 if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed)) 3211 continue; 3212 3213 drm_notice(&dev_priv->drm, 3214 "PHY %c is disabled/in DSI mode with an ungated DDI clock, gate it\n", 3215 phy_name(phy)); 3216 val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3217 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3218 } 3219 } 3220 3221 void icl_sanitize_encoder_pll_mapping(struct intel_encoder *encoder) 3222 { 3223 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3224 u32 port_mask; 3225 bool ddi_clk_needed; 3226 3227 /* 3228 * In case of DP MST, we sanitize the primary encoder only, not the 3229 * virtual ones. 3230 */ 3231 if (encoder->type == INTEL_OUTPUT_DP_MST) 3232 return; 3233 3234 if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) { 3235 u8 pipe_mask; 3236 bool is_mst; 3237 3238 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 3239 /* 3240 * In the unlikely case that BIOS enables DP in MST mode, just 3241 * warn since our MST HW readout is incomplete. 3242 */ 3243 if (drm_WARN_ON(&dev_priv->drm, is_mst)) 3244 return; 3245 } 3246 3247 port_mask = BIT(encoder->port); 3248 ddi_clk_needed = encoder->base.crtc; 3249 3250 if (encoder->type == INTEL_OUTPUT_DSI) { 3251 struct intel_encoder *other_encoder; 3252 3253 port_mask = intel_dsi_encoder_ports(encoder); 3254 /* 3255 * Sanity check that we haven't incorrectly registered another 3256 * encoder using any of the ports of this DSI encoder. 3257 */ 3258 for_each_intel_encoder(&dev_priv->drm, other_encoder) { 3259 if (other_encoder == encoder) 3260 continue; 3261 3262 if (drm_WARN_ON(&dev_priv->drm, 3263 port_mask & BIT(other_encoder->port))) 3264 return; 3265 } 3266 /* 3267 * For DSI we keep the ddi clocks gated 3268 * except during enable/disable sequence. 3269 */ 3270 ddi_clk_needed = false; 3271 } 3272 3273 if (IS_DG1(dev_priv)) 3274 dg1_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed); 3275 else 3276 icl_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed); 3277 } 3278 3279 static void intel_ddi_clk_select(struct intel_encoder *encoder, 3280 const struct intel_crtc_state *crtc_state) 3281 { 3282 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3283 enum port port = encoder->port; 3284 enum phy phy = intel_port_to_phy(dev_priv, port); 3285 u32 val; 3286 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3287 3288 if (drm_WARN_ON(&dev_priv->drm, !pll)) 3289 return; 3290 3291 mutex_lock(&dev_priv->dpll.lock); 3292 3293 if (INTEL_GEN(dev_priv) >= 11) { 3294 if (!intel_phy_is_combo(dev_priv, phy)) 3295 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3296 icl_pll_to_ddi_clk_sel(encoder, crtc_state)); 3297 else if (IS_JSL_EHL(dev_priv) && port >= PORT_C) 3298 /* 3299 * MG does not exist but the programming is required 3300 * to ungate DDIC and DDID 3301 */ 3302 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3303 DDI_CLK_SEL_MG); 3304 } else if (IS_CANNONLAKE(dev_priv)) { 3305 /* Configure DPCLKA_CFGCR0 to map the DPLL to the DDI. */ 3306 val = intel_de_read(dev_priv, DPCLKA_CFGCR0); 3307 val &= ~DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port); 3308 val |= DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port); 3309 intel_de_write(dev_priv, DPCLKA_CFGCR0, val); 3310 3311 /* 3312 * Configure DPCLKA_CFGCR0 to turn on the clock for the DDI. 3313 * This step and the step before must be done with separate 3314 * register writes. 3315 */ 3316 val = intel_de_read(dev_priv, DPCLKA_CFGCR0); 3317 val &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port); 3318 intel_de_write(dev_priv, DPCLKA_CFGCR0, val); 3319 } else if (IS_GEN9_BC(dev_priv)) { 3320 /* DDI -> PLL mapping */ 3321 val = intel_de_read(dev_priv, DPLL_CTRL2); 3322 3323 val &= ~(DPLL_CTRL2_DDI_CLK_OFF(port) | 3324 DPLL_CTRL2_DDI_CLK_SEL_MASK(port)); 3325 val |= (DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) | 3326 DPLL_CTRL2_DDI_SEL_OVERRIDE(port)); 3327 3328 intel_de_write(dev_priv, DPLL_CTRL2, val); 3329 3330 } else if (INTEL_GEN(dev_priv) < 9) { 3331 intel_de_write(dev_priv, PORT_CLK_SEL(port), 3332 hsw_pll_to_ddi_pll_sel(pll)); 3333 } 3334 3335 mutex_unlock(&dev_priv->dpll.lock); 3336 } 3337 3338 static void intel_ddi_clk_disable(struct intel_encoder *encoder) 3339 { 3340 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3341 enum port port = encoder->port; 3342 enum phy phy = intel_port_to_phy(dev_priv, port); 3343 3344 if (INTEL_GEN(dev_priv) >= 11) { 3345 if (!intel_phy_is_combo(dev_priv, phy) || 3346 (IS_JSL_EHL(dev_priv) && port >= PORT_C)) 3347 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3348 DDI_CLK_SEL_NONE); 3349 } else if (IS_CANNONLAKE(dev_priv)) { 3350 intel_de_write(dev_priv, DPCLKA_CFGCR0, 3351 intel_de_read(dev_priv, DPCLKA_CFGCR0) | DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 3352 } else if (IS_GEN9_BC(dev_priv)) { 3353 intel_de_write(dev_priv, DPLL_CTRL2, 3354 intel_de_read(dev_priv, DPLL_CTRL2) | DPLL_CTRL2_DDI_CLK_OFF(port)); 3355 } else if (INTEL_GEN(dev_priv) < 9) { 3356 intel_de_write(dev_priv, PORT_CLK_SEL(port), 3357 PORT_CLK_SEL_NONE); 3358 } 3359 } 3360 3361 static void 3362 icl_program_mg_dp_mode(struct intel_digital_port *dig_port, 3363 const struct intel_crtc_state *crtc_state) 3364 { 3365 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 3366 enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port); 3367 u32 ln0, ln1, pin_assignment; 3368 u8 width; 3369 3370 if (dig_port->tc_mode == TC_PORT_TBT_ALT) 3371 return; 3372 3373 if (INTEL_GEN(dev_priv) >= 12) { 3374 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3375 HIP_INDEX_VAL(tc_port, 0x0)); 3376 ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 3377 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3378 HIP_INDEX_VAL(tc_port, 0x1)); 3379 ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 3380 } else { 3381 ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port)); 3382 ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port)); 3383 } 3384 3385 ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 3386 ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 3387 3388 /* DPPATC */ 3389 pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port); 3390 width = crtc_state->lane_count; 3391 3392 switch (pin_assignment) { 3393 case 0x0: 3394 drm_WARN_ON(&dev_priv->drm, 3395 dig_port->tc_mode != TC_PORT_LEGACY); 3396 if (width == 1) { 3397 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3398 } else { 3399 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3400 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3401 } 3402 break; 3403 case 0x1: 3404 if (width == 4) { 3405 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3406 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3407 } 3408 break; 3409 case 0x2: 3410 if (width == 2) { 3411 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3412 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3413 } 3414 break; 3415 case 0x3: 3416 case 0x5: 3417 if (width == 1) { 3418 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 3419 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3420 } else { 3421 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3422 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3423 } 3424 break; 3425 case 0x4: 3426 case 0x6: 3427 if (width == 1) { 3428 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 3429 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3430 } else { 3431 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3432 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3433 } 3434 break; 3435 default: 3436 MISSING_CASE(pin_assignment); 3437 } 3438 3439 if (INTEL_GEN(dev_priv) >= 12) { 3440 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3441 HIP_INDEX_VAL(tc_port, 0x0)); 3442 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0); 3443 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3444 HIP_INDEX_VAL(tc_port, 0x1)); 3445 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1); 3446 } else { 3447 intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0); 3448 intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1); 3449 } 3450 } 3451 3452 static enum transcoder 3453 tgl_dp_tp_transcoder(const struct intel_crtc_state *crtc_state) 3454 { 3455 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) 3456 return crtc_state->mst_master_transcoder; 3457 else 3458 return crtc_state->cpu_transcoder; 3459 } 3460 3461 i915_reg_t dp_tp_ctl_reg(struct intel_encoder *encoder, 3462 const struct intel_crtc_state *crtc_state) 3463 { 3464 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3465 3466 if (INTEL_GEN(dev_priv) >= 12) 3467 return TGL_DP_TP_CTL(tgl_dp_tp_transcoder(crtc_state)); 3468 else 3469 return DP_TP_CTL(encoder->port); 3470 } 3471 3472 i915_reg_t dp_tp_status_reg(struct intel_encoder *encoder, 3473 const struct intel_crtc_state *crtc_state) 3474 { 3475 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3476 3477 if (INTEL_GEN(dev_priv) >= 12) 3478 return TGL_DP_TP_STATUS(tgl_dp_tp_transcoder(crtc_state)); 3479 else 3480 return DP_TP_STATUS(encoder->port); 3481 } 3482 3483 static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp, 3484 const struct intel_crtc_state *crtc_state) 3485 { 3486 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 3487 3488 if (!crtc_state->fec_enable) 3489 return; 3490 3491 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0) 3492 drm_dbg_kms(&i915->drm, 3493 "Failed to set FEC_READY in the sink\n"); 3494 } 3495 3496 static void intel_ddi_enable_fec(struct intel_encoder *encoder, 3497 const struct intel_crtc_state *crtc_state) 3498 { 3499 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3500 struct intel_dp *intel_dp; 3501 u32 val; 3502 3503 if (!crtc_state->fec_enable) 3504 return; 3505 3506 intel_dp = enc_to_intel_dp(encoder); 3507 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3508 val |= DP_TP_CTL_FEC_ENABLE; 3509 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3510 3511 if (intel_de_wait_for_set(dev_priv, 3512 dp_tp_status_reg(encoder, crtc_state), 3513 DP_TP_STATUS_FEC_ENABLE_LIVE, 1)) 3514 drm_err(&dev_priv->drm, 3515 "Timed out waiting for FEC Enable Status\n"); 3516 } 3517 3518 static void intel_ddi_disable_fec_state(struct intel_encoder *encoder, 3519 const struct intel_crtc_state *crtc_state) 3520 { 3521 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3522 struct intel_dp *intel_dp; 3523 u32 val; 3524 3525 if (!crtc_state->fec_enable) 3526 return; 3527 3528 intel_dp = enc_to_intel_dp(encoder); 3529 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3530 val &= ~DP_TP_CTL_FEC_ENABLE; 3531 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3532 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3533 } 3534 3535 static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state, 3536 struct intel_encoder *encoder, 3537 const struct intel_crtc_state *crtc_state, 3538 const struct drm_connector_state *conn_state) 3539 { 3540 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3541 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3542 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3543 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3544 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 3545 int level = intel_ddi_dp_level(intel_dp); 3546 3547 intel_dp_set_link_params(intel_dp, 3548 crtc_state->port_clock, 3549 crtc_state->lane_count); 3550 3551 /* 3552 * 1. Enable Power Wells 3553 * 3554 * This was handled at the beginning of intel_atomic_commit_tail(), 3555 * before we called down into this function. 3556 */ 3557 3558 /* 2. Enable Panel Power if PPS is required */ 3559 intel_edp_panel_on(intel_dp); 3560 3561 /* 3562 * 3. For non-TBT Type-C ports, set FIA lane count 3563 * (DFLEXDPSP.DPX4TXLATC) 3564 * 3565 * This was done before tgl_ddi_pre_enable_dp by 3566 * hsw_crtc_enable()->intel_encoders_pre_pll_enable(). 3567 */ 3568 3569 /* 3570 * 4. Enable the port PLL. 3571 * 3572 * The PLL enabling itself was already done before this function by 3573 * hsw_crtc_enable()->intel_enable_shared_dpll(). We need only 3574 * configure the PLL to port mapping here. 3575 */ 3576 intel_ddi_clk_select(encoder, crtc_state); 3577 3578 /* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */ 3579 if (!intel_phy_is_tc(dev_priv, phy) || 3580 dig_port->tc_mode != TC_PORT_TBT_ALT) 3581 intel_display_power_get(dev_priv, 3582 dig_port->ddi_io_power_domain); 3583 3584 /* 6. Program DP_MODE */ 3585 icl_program_mg_dp_mode(dig_port, crtc_state); 3586 3587 /* 3588 * 7. The rest of the below are substeps under the bspec's "Enable and 3589 * Train Display Port" step. Note that steps that are specific to 3590 * MST will be handled by intel_mst_pre_enable_dp() before/after it 3591 * calls into this function. Also intel_mst_pre_enable_dp() only calls 3592 * us when active_mst_links==0, so any steps designated for "single 3593 * stream or multi-stream master transcoder" can just be performed 3594 * unconditionally here. 3595 */ 3596 3597 /* 3598 * 7.a Configure Transcoder Clock Select to direct the Port clock to the 3599 * Transcoder. 3600 */ 3601 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3602 3603 /* 3604 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST 3605 * Transport Select 3606 */ 3607 intel_ddi_config_transcoder_func(encoder, crtc_state); 3608 3609 /* 3610 * 7.c Configure & enable DP_TP_CTL with link training pattern 1 3611 * selected 3612 * 3613 * This will be handled by the intel_dp_start_link_train() farther 3614 * down this function. 3615 */ 3616 3617 /* 7.e Configure voltage swing and related IO settings */ 3618 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3619 3620 /* 3621 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up 3622 * the used lanes of the DDI. 3623 */ 3624 if (intel_phy_is_combo(dev_priv, phy)) { 3625 bool lane_reversal = 3626 dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL; 3627 3628 intel_combo_phy_power_up_lanes(dev_priv, phy, false, 3629 crtc_state->lane_count, 3630 lane_reversal); 3631 } 3632 3633 /* 3634 * 7.g Configure and enable DDI_BUF_CTL 3635 * 7.h Wait for DDI_BUF_CTL DDI Idle Status = 0b (Not Idle), timeout 3636 * after 500 us. 3637 * 3638 * We only configure what the register value will be here. Actual 3639 * enabling happens during link training farther down. 3640 */ 3641 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 3642 3643 if (!is_mst) 3644 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 3645 3646 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true); 3647 /* 3648 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit 3649 * in the FEC_CONFIGURATION register to 1 before initiating link 3650 * training 3651 */ 3652 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 3653 3654 /* 3655 * 7.i Follow DisplayPort specification training sequence (see notes for 3656 * failure handling) 3657 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle 3658 * Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent) 3659 * (timeout after 800 us) 3660 */ 3661 intel_dp_start_link_train(intel_dp, crtc_state); 3662 3663 /* 7.k Set DP_TP_CTL link training to Normal */ 3664 if (!is_trans_port_sync_mode(crtc_state)) 3665 intel_dp_stop_link_train(intel_dp, crtc_state); 3666 3667 /* 7.l Configure and enable FEC if needed */ 3668 intel_ddi_enable_fec(encoder, crtc_state); 3669 if (!crtc_state->bigjoiner) 3670 intel_dsc_enable(encoder, crtc_state); 3671 } 3672 3673 static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state, 3674 struct intel_encoder *encoder, 3675 const struct intel_crtc_state *crtc_state, 3676 const struct drm_connector_state *conn_state) 3677 { 3678 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3679 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3680 enum port port = encoder->port; 3681 enum phy phy = intel_port_to_phy(dev_priv, port); 3682 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3683 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 3684 int level = intel_ddi_dp_level(intel_dp); 3685 3686 if (INTEL_GEN(dev_priv) < 11) 3687 drm_WARN_ON(&dev_priv->drm, 3688 is_mst && (port == PORT_A || port == PORT_E)); 3689 else 3690 drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A); 3691 3692 intel_dp_set_link_params(intel_dp, 3693 crtc_state->port_clock, 3694 crtc_state->lane_count); 3695 3696 intel_edp_panel_on(intel_dp); 3697 3698 intel_ddi_clk_select(encoder, crtc_state); 3699 3700 if (!intel_phy_is_tc(dev_priv, phy) || 3701 dig_port->tc_mode != TC_PORT_TBT_ALT) 3702 intel_display_power_get(dev_priv, 3703 dig_port->ddi_io_power_domain); 3704 3705 icl_program_mg_dp_mode(dig_port, crtc_state); 3706 3707 if (INTEL_GEN(dev_priv) >= 11) 3708 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3709 else if (IS_CANNONLAKE(dev_priv)) 3710 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3711 else if (IS_GEN9_LP(dev_priv)) 3712 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3713 else 3714 intel_prepare_dp_ddi_buffers(encoder, crtc_state); 3715 3716 if (intel_phy_is_combo(dev_priv, phy)) { 3717 bool lane_reversal = 3718 dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL; 3719 3720 intel_combo_phy_power_up_lanes(dev_priv, phy, false, 3721 crtc_state->lane_count, 3722 lane_reversal); 3723 } 3724 3725 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 3726 if (!is_mst) 3727 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 3728 intel_dp_configure_protocol_converter(intel_dp); 3729 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, 3730 true); 3731 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 3732 intel_dp_start_link_train(intel_dp, crtc_state); 3733 if ((port != PORT_A || INTEL_GEN(dev_priv) >= 9) && 3734 !is_trans_port_sync_mode(crtc_state)) 3735 intel_dp_stop_link_train(intel_dp, crtc_state); 3736 3737 intel_ddi_enable_fec(encoder, crtc_state); 3738 3739 if (!is_mst) 3740 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3741 3742 if (!crtc_state->bigjoiner) 3743 intel_dsc_enable(encoder, crtc_state); 3744 } 3745 3746 static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state, 3747 struct intel_encoder *encoder, 3748 const struct intel_crtc_state *crtc_state, 3749 const struct drm_connector_state *conn_state) 3750 { 3751 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3752 3753 if (INTEL_GEN(dev_priv) >= 12) 3754 tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 3755 else 3756 hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 3757 3758 /* MST will call a setting of MSA after an allocating of Virtual Channel 3759 * from MST encoder pre_enable callback. 3760 */ 3761 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 3762 intel_ddi_set_dp_msa(crtc_state, conn_state); 3763 3764 intel_dp_set_m_n(crtc_state, M1_N1); 3765 } 3766 } 3767 3768 static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state, 3769 struct intel_encoder *encoder, 3770 const struct intel_crtc_state *crtc_state, 3771 const struct drm_connector_state *conn_state) 3772 { 3773 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3774 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 3775 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3776 int level = intel_ddi_hdmi_level(encoder, crtc_state); 3777 3778 intel_dp_dual_mode_set_tmds_output(intel_hdmi, true); 3779 intel_ddi_clk_select(encoder, crtc_state); 3780 3781 intel_display_power_get(dev_priv, dig_port->ddi_io_power_domain); 3782 3783 icl_program_mg_dp_mode(dig_port, crtc_state); 3784 3785 if (INTEL_GEN(dev_priv) >= 12) 3786 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3787 else if (INTEL_GEN(dev_priv) == 11) 3788 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3789 else if (IS_CANNONLAKE(dev_priv)) 3790 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3791 else if (IS_GEN9_LP(dev_priv)) 3792 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3793 else 3794 intel_prepare_hdmi_ddi_buffers(encoder, level); 3795 3796 if (IS_GEN9_BC(dev_priv)) 3797 skl_ddi_set_iboost(encoder, crtc_state, level); 3798 3799 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3800 3801 dig_port->set_infoframes(encoder, 3802 crtc_state->has_infoframe, 3803 crtc_state, conn_state); 3804 } 3805 3806 static void intel_ddi_pre_enable(struct intel_atomic_state *state, 3807 struct intel_encoder *encoder, 3808 const struct intel_crtc_state *crtc_state, 3809 const struct drm_connector_state *conn_state) 3810 { 3811 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 3812 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 3813 enum pipe pipe = crtc->pipe; 3814 3815 /* 3816 * When called from DP MST code: 3817 * - conn_state will be NULL 3818 * - encoder will be the main encoder (ie. mst->primary) 3819 * - the main connector associated with this port 3820 * won't be active or linked to a crtc 3821 * - crtc_state will be the state of the first stream to 3822 * be activated on this port, and it may not be the same 3823 * stream that will be deactivated last, but each stream 3824 * should have a state that is identical when it comes to 3825 * the DP link parameteres 3826 */ 3827 3828 drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder); 3829 3830 if (IS_DG1(dev_priv)) 3831 dg1_map_plls_to_ports(encoder, crtc_state); 3832 else if (INTEL_GEN(dev_priv) >= 11) 3833 icl_map_plls_to_ports(encoder, crtc_state); 3834 3835 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true); 3836 3837 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 3838 intel_ddi_pre_enable_hdmi(state, encoder, crtc_state, 3839 conn_state); 3840 } else { 3841 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3842 3843 intel_ddi_pre_enable_dp(state, encoder, crtc_state, 3844 conn_state); 3845 3846 /* FIXME precompute everything properly */ 3847 /* FIXME how do we turn infoframes off again? */ 3848 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 3849 dig_port->set_infoframes(encoder, 3850 crtc_state->has_infoframe, 3851 crtc_state, conn_state); 3852 } 3853 } 3854 3855 static void intel_disable_ddi_buf(struct intel_encoder *encoder, 3856 const struct intel_crtc_state *crtc_state) 3857 { 3858 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3859 enum port port = encoder->port; 3860 bool wait = false; 3861 u32 val; 3862 3863 val = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 3864 if (val & DDI_BUF_CTL_ENABLE) { 3865 val &= ~DDI_BUF_CTL_ENABLE; 3866 intel_de_write(dev_priv, DDI_BUF_CTL(port), val); 3867 wait = true; 3868 } 3869 3870 if (intel_crtc_has_dp_encoder(crtc_state)) { 3871 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3872 val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 3873 val |= DP_TP_CTL_LINK_TRAIN_PAT1; 3874 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3875 } 3876 3877 /* Disable FEC in DP Sink */ 3878 intel_ddi_disable_fec_state(encoder, crtc_state); 3879 3880 if (wait) 3881 intel_wait_ddi_buf_idle(dev_priv, port); 3882 } 3883 3884 static void intel_ddi_post_disable_dp(struct intel_atomic_state *state, 3885 struct intel_encoder *encoder, 3886 const struct intel_crtc_state *old_crtc_state, 3887 const struct drm_connector_state *old_conn_state) 3888 { 3889 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3890 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3891 struct intel_dp *intel_dp = &dig_port->dp; 3892 bool is_mst = intel_crtc_has_type(old_crtc_state, 3893 INTEL_OUTPUT_DP_MST); 3894 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3895 3896 if (!is_mst) 3897 intel_dp_set_infoframes(encoder, false, 3898 old_crtc_state, old_conn_state); 3899 3900 /* 3901 * Power down sink before disabling the port, otherwise we end 3902 * up getting interrupts from the sink on detecting link loss. 3903 */ 3904 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 3905 3906 if (INTEL_GEN(dev_priv) >= 12) { 3907 if (is_mst) { 3908 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder; 3909 u32 val; 3910 3911 val = intel_de_read(dev_priv, 3912 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 3913 val &= ~(TGL_TRANS_DDI_PORT_MASK | 3914 TRANS_DDI_MODE_SELECT_MASK); 3915 intel_de_write(dev_priv, 3916 TRANS_DDI_FUNC_CTL(cpu_transcoder), 3917 val); 3918 } 3919 } else { 3920 if (!is_mst) 3921 intel_ddi_disable_pipe_clock(old_crtc_state); 3922 } 3923 3924 intel_disable_ddi_buf(encoder, old_crtc_state); 3925 3926 /* 3927 * From TGL spec: "If single stream or multi-stream master transcoder: 3928 * Configure Transcoder Clock select to direct no clock to the 3929 * transcoder" 3930 */ 3931 if (INTEL_GEN(dev_priv) >= 12) 3932 intel_ddi_disable_pipe_clock(old_crtc_state); 3933 3934 intel_edp_panel_vdd_on(intel_dp); 3935 intel_edp_panel_off(intel_dp); 3936 3937 if (!intel_phy_is_tc(dev_priv, phy) || 3938 dig_port->tc_mode != TC_PORT_TBT_ALT) 3939 intel_display_power_put_unchecked(dev_priv, 3940 dig_port->ddi_io_power_domain); 3941 3942 intel_ddi_clk_disable(encoder); 3943 } 3944 3945 static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state, 3946 struct intel_encoder *encoder, 3947 const struct intel_crtc_state *old_crtc_state, 3948 const struct drm_connector_state *old_conn_state) 3949 { 3950 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3951 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3952 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 3953 3954 dig_port->set_infoframes(encoder, false, 3955 old_crtc_state, old_conn_state); 3956 3957 intel_ddi_disable_pipe_clock(old_crtc_state); 3958 3959 intel_disable_ddi_buf(encoder, old_crtc_state); 3960 3961 intel_display_power_put_unchecked(dev_priv, 3962 dig_port->ddi_io_power_domain); 3963 3964 intel_ddi_clk_disable(encoder); 3965 3966 intel_dp_dual_mode_set_tmds_output(intel_hdmi, false); 3967 } 3968 3969 static void intel_ddi_post_disable(struct intel_atomic_state *state, 3970 struct intel_encoder *encoder, 3971 const struct intel_crtc_state *old_crtc_state, 3972 const struct drm_connector_state *old_conn_state) 3973 { 3974 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3975 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3976 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3977 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 3978 3979 if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) { 3980 intel_crtc_vblank_off(old_crtc_state); 3981 3982 intel_disable_pipe(old_crtc_state); 3983 3984 intel_ddi_disable_transcoder_func(old_crtc_state); 3985 3986 intel_dsc_disable(old_crtc_state); 3987 3988 if (INTEL_GEN(dev_priv) >= 9) 3989 skl_scaler_disable(old_crtc_state); 3990 else 3991 ilk_pfit_disable(old_crtc_state); 3992 } 3993 3994 if (old_crtc_state->bigjoiner_linked_crtc) { 3995 struct intel_atomic_state *state = 3996 to_intel_atomic_state(old_crtc_state->uapi.state); 3997 struct intel_crtc *slave = 3998 old_crtc_state->bigjoiner_linked_crtc; 3999 const struct intel_crtc_state *old_slave_crtc_state = 4000 intel_atomic_get_old_crtc_state(state, slave); 4001 4002 intel_crtc_vblank_off(old_slave_crtc_state); 4003 trace_intel_pipe_disable(slave); 4004 4005 intel_dsc_disable(old_slave_crtc_state); 4006 skl_scaler_disable(old_slave_crtc_state); 4007 } 4008 4009 /* 4010 * When called from DP MST code: 4011 * - old_conn_state will be NULL 4012 * - encoder will be the main encoder (ie. mst->primary) 4013 * - the main connector associated with this port 4014 * won't be active or linked to a crtc 4015 * - old_crtc_state will be the state of the last stream to 4016 * be deactivated on this port, and it may not be the same 4017 * stream that was activated last, but each stream 4018 * should have a state that is identical when it comes to 4019 * the DP link parameteres 4020 */ 4021 4022 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 4023 intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state, 4024 old_conn_state); 4025 else 4026 intel_ddi_post_disable_dp(state, encoder, old_crtc_state, 4027 old_conn_state); 4028 4029 if (IS_DG1(dev_priv)) 4030 dg1_unmap_plls_to_ports(encoder); 4031 else if (INTEL_GEN(dev_priv) >= 11) 4032 icl_unmap_plls_to_ports(encoder); 4033 4034 if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port) 4035 intel_display_power_put_unchecked(dev_priv, 4036 intel_ddi_main_link_aux_domain(dig_port)); 4037 4038 if (is_tc_port) 4039 intel_tc_port_put_link(dig_port); 4040 } 4041 4042 void intel_ddi_fdi_post_disable(struct intel_atomic_state *state, 4043 struct intel_encoder *encoder, 4044 const struct intel_crtc_state *old_crtc_state, 4045 const struct drm_connector_state *old_conn_state) 4046 { 4047 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4048 u32 val; 4049 4050 /* 4051 * Bspec lists this as both step 13 (before DDI_BUF_CTL disable) 4052 * and step 18 (after clearing PORT_CLK_SEL). Based on a BUN, 4053 * step 13 is the correct place for it. Step 18 is where it was 4054 * originally before the BUN. 4055 */ 4056 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4057 val &= ~FDI_RX_ENABLE; 4058 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4059 4060 intel_disable_ddi_buf(encoder, old_crtc_state); 4061 intel_ddi_clk_disable(encoder); 4062 4063 val = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 4064 val &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 4065 val |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2); 4066 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), val); 4067 4068 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4069 val &= ~FDI_PCDCLK; 4070 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4071 4072 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4073 val &= ~FDI_RX_PLL_ENABLE; 4074 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4075 } 4076 4077 static void trans_port_sync_stop_link_train(struct intel_atomic_state *state, 4078 struct intel_encoder *encoder, 4079 const struct intel_crtc_state *crtc_state) 4080 { 4081 const struct drm_connector_state *conn_state; 4082 struct drm_connector *conn; 4083 int i; 4084 4085 if (!crtc_state->sync_mode_slaves_mask) 4086 return; 4087 4088 for_each_new_connector_in_state(&state->base, conn, conn_state, i) { 4089 struct intel_encoder *slave_encoder = 4090 to_intel_encoder(conn_state->best_encoder); 4091 struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc); 4092 const struct intel_crtc_state *slave_crtc_state; 4093 4094 if (!slave_crtc) 4095 continue; 4096 4097 slave_crtc_state = 4098 intel_atomic_get_new_crtc_state(state, slave_crtc); 4099 4100 if (slave_crtc_state->master_transcoder != 4101 crtc_state->cpu_transcoder) 4102 continue; 4103 4104 intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder), 4105 slave_crtc_state); 4106 } 4107 4108 usleep_range(200, 400); 4109 4110 intel_dp_stop_link_train(enc_to_intel_dp(encoder), 4111 crtc_state); 4112 } 4113 4114 static void intel_enable_ddi_dp(struct intel_atomic_state *state, 4115 struct intel_encoder *encoder, 4116 const struct intel_crtc_state *crtc_state, 4117 const struct drm_connector_state *conn_state) 4118 { 4119 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4120 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4121 enum port port = encoder->port; 4122 4123 if (port == PORT_A && INTEL_GEN(dev_priv) < 9) 4124 intel_dp_stop_link_train(intel_dp, crtc_state); 4125 4126 intel_edp_backlight_on(crtc_state, conn_state); 4127 intel_psr_enable(intel_dp, crtc_state, conn_state); 4128 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 4129 intel_edp_drrs_enable(intel_dp, crtc_state); 4130 4131 if (crtc_state->has_audio) 4132 intel_audio_codec_enable(encoder, crtc_state, conn_state); 4133 4134 trans_port_sync_stop_link_train(state, encoder, crtc_state); 4135 } 4136 4137 static i915_reg_t 4138 gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv, 4139 enum port port) 4140 { 4141 static const enum transcoder trans[] = { 4142 [PORT_A] = TRANSCODER_EDP, 4143 [PORT_B] = TRANSCODER_A, 4144 [PORT_C] = TRANSCODER_B, 4145 [PORT_D] = TRANSCODER_C, 4146 [PORT_E] = TRANSCODER_A, 4147 }; 4148 4149 drm_WARN_ON(&dev_priv->drm, INTEL_GEN(dev_priv) < 9); 4150 4151 if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E)) 4152 port = PORT_A; 4153 4154 return CHICKEN_TRANS(trans[port]); 4155 } 4156 4157 static void intel_enable_ddi_hdmi(struct intel_atomic_state *state, 4158 struct intel_encoder *encoder, 4159 const struct intel_crtc_state *crtc_state, 4160 const struct drm_connector_state *conn_state) 4161 { 4162 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4163 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4164 struct drm_connector *connector = conn_state->connector; 4165 enum port port = encoder->port; 4166 4167 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 4168 crtc_state->hdmi_high_tmds_clock_ratio, 4169 crtc_state->hdmi_scrambling)) 4170 drm_dbg_kms(&dev_priv->drm, 4171 "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n", 4172 connector->base.id, connector->name); 4173 4174 /* Display WA #1143: skl,kbl,cfl */ 4175 if (IS_GEN9_BC(dev_priv)) { 4176 /* 4177 * For some reason these chicken bits have been 4178 * stuffed into a transcoder register, event though 4179 * the bits affect a specific DDI port rather than 4180 * a specific transcoder. 4181 */ 4182 i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port); 4183 u32 val; 4184 4185 val = intel_de_read(dev_priv, reg); 4186 4187 if (port == PORT_E) 4188 val |= DDIE_TRAINING_OVERRIDE_ENABLE | 4189 DDIE_TRAINING_OVERRIDE_VALUE; 4190 else 4191 val |= DDI_TRAINING_OVERRIDE_ENABLE | 4192 DDI_TRAINING_OVERRIDE_VALUE; 4193 4194 intel_de_write(dev_priv, reg, val); 4195 intel_de_posting_read(dev_priv, reg); 4196 4197 udelay(1); 4198 4199 if (port == PORT_E) 4200 val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE | 4201 DDIE_TRAINING_OVERRIDE_VALUE); 4202 else 4203 val &= ~(DDI_TRAINING_OVERRIDE_ENABLE | 4204 DDI_TRAINING_OVERRIDE_VALUE); 4205 4206 intel_de_write(dev_priv, reg, val); 4207 } 4208 4209 /* In HDMI/DVI mode, the port width, and swing/emphasis values 4210 * are ignored so nothing special needs to be done besides 4211 * enabling the port. 4212 */ 4213 intel_de_write(dev_priv, DDI_BUF_CTL(port), 4214 dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE); 4215 4216 if (crtc_state->has_audio) 4217 intel_audio_codec_enable(encoder, crtc_state, conn_state); 4218 } 4219 4220 static void intel_enable_ddi(struct intel_atomic_state *state, 4221 struct intel_encoder *encoder, 4222 const struct intel_crtc_state *crtc_state, 4223 const struct drm_connector_state *conn_state) 4224 { 4225 drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder); 4226 4227 if (!crtc_state->bigjoiner_slave) 4228 intel_ddi_enable_transcoder_func(encoder, crtc_state); 4229 4230 intel_enable_pipe(crtc_state); 4231 4232 intel_crtc_vblank_on(crtc_state); 4233 4234 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 4235 intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state); 4236 else 4237 intel_enable_ddi_dp(state, encoder, crtc_state, conn_state); 4238 4239 /* Enable hdcp if it's desired */ 4240 if (conn_state->content_protection == 4241 DRM_MODE_CONTENT_PROTECTION_DESIRED) 4242 intel_hdcp_enable(to_intel_connector(conn_state->connector), 4243 crtc_state->cpu_transcoder, 4244 (u8)conn_state->hdcp_content_type); 4245 } 4246 4247 static void intel_disable_ddi_dp(struct intel_atomic_state *state, 4248 struct intel_encoder *encoder, 4249 const struct intel_crtc_state *old_crtc_state, 4250 const struct drm_connector_state *old_conn_state) 4251 { 4252 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4253 4254 intel_dp->link_trained = false; 4255 4256 if (old_crtc_state->has_audio) 4257 intel_audio_codec_disable(encoder, 4258 old_crtc_state, old_conn_state); 4259 4260 intel_edp_drrs_disable(intel_dp, old_crtc_state); 4261 intel_psr_disable(intel_dp, old_crtc_state); 4262 intel_edp_backlight_off(old_conn_state); 4263 /* Disable the decompression in DP Sink */ 4264 intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state, 4265 false); 4266 } 4267 4268 static void intel_disable_ddi_hdmi(struct intel_atomic_state *state, 4269 struct intel_encoder *encoder, 4270 const struct intel_crtc_state *old_crtc_state, 4271 const struct drm_connector_state *old_conn_state) 4272 { 4273 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 4274 struct drm_connector *connector = old_conn_state->connector; 4275 4276 if (old_crtc_state->has_audio) 4277 intel_audio_codec_disable(encoder, 4278 old_crtc_state, old_conn_state); 4279 4280 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 4281 false, false)) 4282 drm_dbg_kms(&i915->drm, 4283 "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n", 4284 connector->base.id, connector->name); 4285 } 4286 4287 static void intel_disable_ddi(struct intel_atomic_state *state, 4288 struct intel_encoder *encoder, 4289 const struct intel_crtc_state *old_crtc_state, 4290 const struct drm_connector_state *old_conn_state) 4291 { 4292 intel_hdcp_disable(to_intel_connector(old_conn_state->connector)); 4293 4294 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 4295 intel_disable_ddi_hdmi(state, encoder, old_crtc_state, 4296 old_conn_state); 4297 else 4298 intel_disable_ddi_dp(state, encoder, old_crtc_state, 4299 old_conn_state); 4300 } 4301 4302 static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state, 4303 struct intel_encoder *encoder, 4304 const struct intel_crtc_state *crtc_state, 4305 const struct drm_connector_state *conn_state) 4306 { 4307 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4308 4309 intel_ddi_set_dp_msa(crtc_state, conn_state); 4310 4311 intel_psr_update(intel_dp, crtc_state, conn_state); 4312 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 4313 intel_edp_drrs_update(intel_dp, crtc_state); 4314 4315 intel_panel_update_backlight(state, encoder, crtc_state, conn_state); 4316 } 4317 4318 void intel_ddi_update_pipe(struct intel_atomic_state *state, 4319 struct intel_encoder *encoder, 4320 const struct intel_crtc_state *crtc_state, 4321 const struct drm_connector_state *conn_state) 4322 { 4323 4324 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) && 4325 !intel_encoder_is_mst(encoder)) 4326 intel_ddi_update_pipe_dp(state, encoder, crtc_state, 4327 conn_state); 4328 4329 intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state); 4330 } 4331 4332 static void 4333 intel_ddi_update_prepare(struct intel_atomic_state *state, 4334 struct intel_encoder *encoder, 4335 struct intel_crtc *crtc) 4336 { 4337 struct intel_crtc_state *crtc_state = 4338 crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL; 4339 int required_lanes = crtc_state ? crtc_state->lane_count : 1; 4340 4341 drm_WARN_ON(state->base.dev, crtc && crtc->active); 4342 4343 intel_tc_port_get_link(enc_to_dig_port(encoder), 4344 required_lanes); 4345 if (crtc_state && crtc_state->hw.active) 4346 intel_update_active_dpll(state, crtc, encoder); 4347 } 4348 4349 static void 4350 intel_ddi_update_complete(struct intel_atomic_state *state, 4351 struct intel_encoder *encoder, 4352 struct intel_crtc *crtc) 4353 { 4354 intel_tc_port_put_link(enc_to_dig_port(encoder)); 4355 } 4356 4357 static void 4358 intel_ddi_pre_pll_enable(struct intel_atomic_state *state, 4359 struct intel_encoder *encoder, 4360 const struct intel_crtc_state *crtc_state, 4361 const struct drm_connector_state *conn_state) 4362 { 4363 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4364 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4365 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 4366 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 4367 4368 if (is_tc_port) 4369 intel_tc_port_get_link(dig_port, crtc_state->lane_count); 4370 4371 if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port) 4372 intel_display_power_get(dev_priv, 4373 intel_ddi_main_link_aux_domain(dig_port)); 4374 4375 if (is_tc_port && dig_port->tc_mode != TC_PORT_TBT_ALT) 4376 /* 4377 * Program the lane count for static/dynamic connections on 4378 * Type-C ports. Skip this step for TBT. 4379 */ 4380 intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count); 4381 else if (IS_GEN9_LP(dev_priv)) 4382 bxt_ddi_phy_set_lane_optim_mask(encoder, 4383 crtc_state->lane_lat_optim_mask); 4384 } 4385 4386 static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp, 4387 const struct intel_crtc_state *crtc_state) 4388 { 4389 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4390 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4391 enum port port = encoder->port; 4392 u32 dp_tp_ctl, ddi_buf_ctl; 4393 bool wait = false; 4394 4395 dp_tp_ctl = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4396 4397 if (dp_tp_ctl & DP_TP_CTL_ENABLE) { 4398 ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 4399 if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) { 4400 intel_de_write(dev_priv, DDI_BUF_CTL(port), 4401 ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE); 4402 wait = true; 4403 } 4404 4405 dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 4406 dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1; 4407 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 4408 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4409 4410 if (wait) 4411 intel_wait_ddi_buf_idle(dev_priv, port); 4412 } 4413 4414 dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1; 4415 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 4416 dp_tp_ctl |= DP_TP_CTL_MODE_MST; 4417 } else { 4418 dp_tp_ctl |= DP_TP_CTL_MODE_SST; 4419 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 4420 dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE; 4421 } 4422 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 4423 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4424 4425 intel_dp->DP |= DDI_BUF_CTL_ENABLE; 4426 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 4427 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 4428 4429 intel_wait_ddi_buf_active(dev_priv, port); 4430 } 4431 4432 static void intel_ddi_set_link_train(struct intel_dp *intel_dp, 4433 const struct intel_crtc_state *crtc_state, 4434 u8 dp_train_pat) 4435 { 4436 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4437 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4438 u32 temp; 4439 4440 temp = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4441 4442 temp &= ~DP_TP_CTL_LINK_TRAIN_MASK; 4443 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 4444 case DP_TRAINING_PATTERN_DISABLE: 4445 temp |= DP_TP_CTL_LINK_TRAIN_NORMAL; 4446 break; 4447 case DP_TRAINING_PATTERN_1: 4448 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 4449 break; 4450 case DP_TRAINING_PATTERN_2: 4451 temp |= DP_TP_CTL_LINK_TRAIN_PAT2; 4452 break; 4453 case DP_TRAINING_PATTERN_3: 4454 temp |= DP_TP_CTL_LINK_TRAIN_PAT3; 4455 break; 4456 case DP_TRAINING_PATTERN_4: 4457 temp |= DP_TP_CTL_LINK_TRAIN_PAT4; 4458 break; 4459 } 4460 4461 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), temp); 4462 } 4463 4464 static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp, 4465 const struct intel_crtc_state *crtc_state) 4466 { 4467 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4468 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4469 enum port port = encoder->port; 4470 u32 val; 4471 4472 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4473 val &= ~DP_TP_CTL_LINK_TRAIN_MASK; 4474 val |= DP_TP_CTL_LINK_TRAIN_IDLE; 4475 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 4476 4477 /* 4478 * Until TGL on PORT_A we can have only eDP in SST mode. There the only 4479 * reason we need to set idle transmission mode is to work around a HW 4480 * issue where we enable the pipe while not in idle link-training mode. 4481 * In this case there is requirement to wait for a minimum number of 4482 * idle patterns to be sent. 4483 */ 4484 if (port == PORT_A && INTEL_GEN(dev_priv) < 12) 4485 return; 4486 4487 if (intel_de_wait_for_set(dev_priv, 4488 dp_tp_status_reg(encoder, crtc_state), 4489 DP_TP_STATUS_IDLE_DONE, 1)) 4490 drm_err(&dev_priv->drm, 4491 "Timed out waiting for DP idle patterns\n"); 4492 } 4493 4494 static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv, 4495 enum transcoder cpu_transcoder) 4496 { 4497 if (cpu_transcoder == TRANSCODER_EDP) 4498 return false; 4499 4500 if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO)) 4501 return false; 4502 4503 return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) & 4504 AUDIO_OUTPUT_ENABLE(cpu_transcoder); 4505 } 4506 4507 void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv, 4508 struct intel_crtc_state *crtc_state) 4509 { 4510 if (INTEL_GEN(dev_priv) >= 12 && crtc_state->port_clock > 594000) 4511 crtc_state->min_voltage_level = 2; 4512 else if (IS_JSL_EHL(dev_priv) && crtc_state->port_clock > 594000) 4513 crtc_state->min_voltage_level = 3; 4514 else if (INTEL_GEN(dev_priv) >= 11 && crtc_state->port_clock > 594000) 4515 crtc_state->min_voltage_level = 1; 4516 else if (IS_CANNONLAKE(dev_priv) && crtc_state->port_clock > 594000) 4517 crtc_state->min_voltage_level = 2; 4518 } 4519 4520 static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv, 4521 enum transcoder cpu_transcoder) 4522 { 4523 u32 master_select; 4524 4525 if (INTEL_GEN(dev_priv) >= 11) { 4526 u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder)); 4527 4528 if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0) 4529 return INVALID_TRANSCODER; 4530 4531 master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2); 4532 } else { 4533 u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 4534 4535 if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0) 4536 return INVALID_TRANSCODER; 4537 4538 master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl); 4539 } 4540 4541 if (master_select == 0) 4542 return TRANSCODER_EDP; 4543 else 4544 return master_select - 1; 4545 } 4546 4547 static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state) 4548 { 4549 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 4550 u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | 4551 BIT(TRANSCODER_C) | BIT(TRANSCODER_D); 4552 enum transcoder cpu_transcoder; 4553 4554 crtc_state->master_transcoder = 4555 bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder); 4556 4557 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) { 4558 enum intel_display_power_domain power_domain; 4559 intel_wakeref_t trans_wakeref; 4560 4561 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder); 4562 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 4563 power_domain); 4564 4565 if (!trans_wakeref) 4566 continue; 4567 4568 if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) == 4569 crtc_state->cpu_transcoder) 4570 crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder); 4571 4572 intel_display_power_put(dev_priv, power_domain, trans_wakeref); 4573 } 4574 4575 drm_WARN_ON(&dev_priv->drm, 4576 crtc_state->master_transcoder != INVALID_TRANSCODER && 4577 crtc_state->sync_mode_slaves_mask); 4578 } 4579 4580 static void intel_ddi_read_func_ctl(struct intel_encoder *encoder, 4581 struct intel_crtc_state *pipe_config) 4582 { 4583 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4584 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc); 4585 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 4586 u32 temp, flags = 0; 4587 4588 temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 4589 if (temp & TRANS_DDI_PHSYNC) 4590 flags |= DRM_MODE_FLAG_PHSYNC; 4591 else 4592 flags |= DRM_MODE_FLAG_NHSYNC; 4593 if (temp & TRANS_DDI_PVSYNC) 4594 flags |= DRM_MODE_FLAG_PVSYNC; 4595 else 4596 flags |= DRM_MODE_FLAG_NVSYNC; 4597 4598 pipe_config->hw.adjusted_mode.flags |= flags; 4599 4600 switch (temp & TRANS_DDI_BPC_MASK) { 4601 case TRANS_DDI_BPC_6: 4602 pipe_config->pipe_bpp = 18; 4603 break; 4604 case TRANS_DDI_BPC_8: 4605 pipe_config->pipe_bpp = 24; 4606 break; 4607 case TRANS_DDI_BPC_10: 4608 pipe_config->pipe_bpp = 30; 4609 break; 4610 case TRANS_DDI_BPC_12: 4611 pipe_config->pipe_bpp = 36; 4612 break; 4613 default: 4614 break; 4615 } 4616 4617 switch (temp & TRANS_DDI_MODE_SELECT_MASK) { 4618 case TRANS_DDI_MODE_SELECT_HDMI: 4619 pipe_config->has_hdmi_sink = true; 4620 4621 pipe_config->infoframes.enable |= 4622 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4623 4624 if (pipe_config->infoframes.enable) 4625 pipe_config->has_infoframe = true; 4626 4627 if (temp & TRANS_DDI_HDMI_SCRAMBLING) 4628 pipe_config->hdmi_scrambling = true; 4629 if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE) 4630 pipe_config->hdmi_high_tmds_clock_ratio = true; 4631 fallthrough; 4632 case TRANS_DDI_MODE_SELECT_DVI: 4633 pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI); 4634 pipe_config->lane_count = 4; 4635 break; 4636 case TRANS_DDI_MODE_SELECT_FDI: 4637 pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG); 4638 break; 4639 case TRANS_DDI_MODE_SELECT_DP_SST: 4640 if (encoder->type == INTEL_OUTPUT_EDP) 4641 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 4642 else 4643 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 4644 pipe_config->lane_count = 4645 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 4646 intel_dp_get_m_n(intel_crtc, pipe_config); 4647 4648 if (INTEL_GEN(dev_priv) >= 11) { 4649 i915_reg_t dp_tp_ctl = dp_tp_ctl_reg(encoder, pipe_config); 4650 4651 pipe_config->fec_enable = 4652 intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE; 4653 4654 drm_dbg_kms(&dev_priv->drm, 4655 "[ENCODER:%d:%s] Fec status: %u\n", 4656 encoder->base.base.id, encoder->base.name, 4657 pipe_config->fec_enable); 4658 } 4659 4660 pipe_config->infoframes.enable |= 4661 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4662 4663 break; 4664 case TRANS_DDI_MODE_SELECT_DP_MST: 4665 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST); 4666 pipe_config->lane_count = 4667 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 4668 4669 if (INTEL_GEN(dev_priv) >= 12) 4670 pipe_config->mst_master_transcoder = 4671 REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp); 4672 4673 intel_dp_get_m_n(intel_crtc, pipe_config); 4674 4675 pipe_config->infoframes.enable |= 4676 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4677 break; 4678 default: 4679 break; 4680 } 4681 } 4682 4683 void intel_ddi_get_config(struct intel_encoder *encoder, 4684 struct intel_crtc_state *pipe_config) 4685 { 4686 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4687 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 4688 4689 /* XXX: DSI transcoder paranoia */ 4690 if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder))) 4691 return; 4692 4693 if (pipe_config->bigjoiner_slave) { 4694 /* read out pipe settings from master */ 4695 enum transcoder save = pipe_config->cpu_transcoder; 4696 4697 /* Our own transcoder needs to be disabled when reading it in intel_ddi_read_func_ctl() */ 4698 WARN_ON(pipe_config->output_types); 4699 pipe_config->cpu_transcoder = (enum transcoder)pipe_config->bigjoiner_linked_crtc->pipe; 4700 intel_ddi_read_func_ctl(encoder, pipe_config); 4701 pipe_config->cpu_transcoder = save; 4702 } else { 4703 intel_ddi_read_func_ctl(encoder, pipe_config); 4704 } 4705 4706 pipe_config->has_audio = 4707 intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder); 4708 4709 if (encoder->type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.bpp && 4710 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 4711 /* 4712 * This is a big fat ugly hack. 4713 * 4714 * Some machines in UEFI boot mode provide us a VBT that has 18 4715 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 4716 * unknown we fail to light up. Yet the same BIOS boots up with 4717 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 4718 * max, not what it tells us to use. 4719 * 4720 * Note: This will still be broken if the eDP panel is not lit 4721 * up by the BIOS, and thus we can't get the mode at module 4722 * load. 4723 */ 4724 drm_dbg_kms(&dev_priv->drm, 4725 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 4726 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 4727 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 4728 } 4729 4730 if (!pipe_config->bigjoiner_slave) 4731 intel_ddi_clock_get(encoder, pipe_config); 4732 4733 if (IS_GEN9_LP(dev_priv)) 4734 pipe_config->lane_lat_optim_mask = 4735 bxt_ddi_phy_get_lane_lat_optim_mask(encoder); 4736 4737 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 4738 4739 intel_hdmi_read_gcp_infoframe(encoder, pipe_config); 4740 4741 intel_read_infoframe(encoder, pipe_config, 4742 HDMI_INFOFRAME_TYPE_AVI, 4743 &pipe_config->infoframes.avi); 4744 intel_read_infoframe(encoder, pipe_config, 4745 HDMI_INFOFRAME_TYPE_SPD, 4746 &pipe_config->infoframes.spd); 4747 intel_read_infoframe(encoder, pipe_config, 4748 HDMI_INFOFRAME_TYPE_VENDOR, 4749 &pipe_config->infoframes.hdmi); 4750 intel_read_infoframe(encoder, pipe_config, 4751 HDMI_INFOFRAME_TYPE_DRM, 4752 &pipe_config->infoframes.drm); 4753 4754 if (INTEL_GEN(dev_priv) >= 8) 4755 bdw_get_trans_port_sync_config(pipe_config); 4756 4757 intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA); 4758 intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC); 4759 } 4760 4761 static void intel_ddi_sync_state(struct intel_encoder *encoder, 4762 const struct intel_crtc_state *crtc_state) 4763 { 4764 if (intel_crtc_has_dp_encoder(crtc_state)) 4765 intel_dp_sync_state(encoder, crtc_state); 4766 } 4767 4768 static bool intel_ddi_initial_fastset_check(struct intel_encoder *encoder, 4769 struct intel_crtc_state *crtc_state) 4770 { 4771 if (intel_crtc_has_dp_encoder(crtc_state)) 4772 return intel_dp_initial_fastset_check(encoder, crtc_state); 4773 4774 return true; 4775 } 4776 4777 static enum intel_output_type 4778 intel_ddi_compute_output_type(struct intel_encoder *encoder, 4779 struct intel_crtc_state *crtc_state, 4780 struct drm_connector_state *conn_state) 4781 { 4782 switch (conn_state->connector->connector_type) { 4783 case DRM_MODE_CONNECTOR_HDMIA: 4784 return INTEL_OUTPUT_HDMI; 4785 case DRM_MODE_CONNECTOR_eDP: 4786 return INTEL_OUTPUT_EDP; 4787 case DRM_MODE_CONNECTOR_DisplayPort: 4788 return INTEL_OUTPUT_DP; 4789 default: 4790 MISSING_CASE(conn_state->connector->connector_type); 4791 return INTEL_OUTPUT_UNUSED; 4792 } 4793 } 4794 4795 static int intel_ddi_compute_config(struct intel_encoder *encoder, 4796 struct intel_crtc_state *pipe_config, 4797 struct drm_connector_state *conn_state) 4798 { 4799 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 4800 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4801 enum port port = encoder->port; 4802 int ret; 4803 4804 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 4805 pipe_config->cpu_transcoder = TRANSCODER_EDP; 4806 4807 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) { 4808 ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state); 4809 } else { 4810 ret = intel_dp_compute_config(encoder, pipe_config, conn_state); 4811 } 4812 4813 if (ret) 4814 return ret; 4815 4816 if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A && 4817 pipe_config->cpu_transcoder == TRANSCODER_EDP) 4818 pipe_config->pch_pfit.force_thru = 4819 pipe_config->pch_pfit.enabled || 4820 pipe_config->crc_enabled; 4821 4822 if (IS_GEN9_LP(dev_priv)) 4823 pipe_config->lane_lat_optim_mask = 4824 bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 4825 4826 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 4827 4828 return 0; 4829 } 4830 4831 static bool mode_equal(const struct drm_display_mode *mode1, 4832 const struct drm_display_mode *mode2) 4833 { 4834 return drm_mode_match(mode1, mode2, 4835 DRM_MODE_MATCH_TIMINGS | 4836 DRM_MODE_MATCH_FLAGS | 4837 DRM_MODE_MATCH_3D_FLAGS) && 4838 mode1->clock == mode2->clock; /* we want an exact match */ 4839 } 4840 4841 static bool m_n_equal(const struct intel_link_m_n *m_n_1, 4842 const struct intel_link_m_n *m_n_2) 4843 { 4844 return m_n_1->tu == m_n_2->tu && 4845 m_n_1->gmch_m == m_n_2->gmch_m && 4846 m_n_1->gmch_n == m_n_2->gmch_n && 4847 m_n_1->link_m == m_n_2->link_m && 4848 m_n_1->link_n == m_n_2->link_n; 4849 } 4850 4851 static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1, 4852 const struct intel_crtc_state *crtc_state2) 4853 { 4854 return crtc_state1->hw.active && crtc_state2->hw.active && 4855 crtc_state1->output_types == crtc_state2->output_types && 4856 crtc_state1->output_format == crtc_state2->output_format && 4857 crtc_state1->lane_count == crtc_state2->lane_count && 4858 crtc_state1->port_clock == crtc_state2->port_clock && 4859 mode_equal(&crtc_state1->hw.adjusted_mode, 4860 &crtc_state2->hw.adjusted_mode) && 4861 m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n); 4862 } 4863 4864 static u8 4865 intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state, 4866 int tile_group_id) 4867 { 4868 struct drm_connector *connector; 4869 const struct drm_connector_state *conn_state; 4870 struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev); 4871 struct intel_atomic_state *state = 4872 to_intel_atomic_state(ref_crtc_state->uapi.state); 4873 u8 transcoders = 0; 4874 int i; 4875 4876 /* 4877 * We don't enable port sync on BDW due to missing w/as and 4878 * due to not having adjusted the modeset sequence appropriately. 4879 */ 4880 if (INTEL_GEN(dev_priv) < 9) 4881 return 0; 4882 4883 if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP)) 4884 return 0; 4885 4886 for_each_new_connector_in_state(&state->base, connector, conn_state, i) { 4887 struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc); 4888 const struct intel_crtc_state *crtc_state; 4889 4890 if (!crtc) 4891 continue; 4892 4893 if (!connector->has_tile || 4894 connector->tile_group->id != 4895 tile_group_id) 4896 continue; 4897 crtc_state = intel_atomic_get_new_crtc_state(state, 4898 crtc); 4899 if (!crtcs_port_sync_compatible(ref_crtc_state, 4900 crtc_state)) 4901 continue; 4902 transcoders |= BIT(crtc_state->cpu_transcoder); 4903 } 4904 4905 return transcoders; 4906 } 4907 4908 static int intel_ddi_compute_config_late(struct intel_encoder *encoder, 4909 struct intel_crtc_state *crtc_state, 4910 struct drm_connector_state *conn_state) 4911 { 4912 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 4913 struct drm_connector *connector = conn_state->connector; 4914 u8 port_sync_transcoders = 0; 4915 4916 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]", 4917 encoder->base.base.id, encoder->base.name, 4918 crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name); 4919 4920 if (connector->has_tile) 4921 port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state, 4922 connector->tile_group->id); 4923 4924 /* 4925 * EDP Transcoders cannot be ensalved 4926 * make them a master always when present 4927 */ 4928 if (port_sync_transcoders & BIT(TRANSCODER_EDP)) 4929 crtc_state->master_transcoder = TRANSCODER_EDP; 4930 else 4931 crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1; 4932 4933 if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) { 4934 crtc_state->master_transcoder = INVALID_TRANSCODER; 4935 crtc_state->sync_mode_slaves_mask = 4936 port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder); 4937 } 4938 4939 return 0; 4940 } 4941 4942 static void intel_ddi_encoder_destroy(struct drm_encoder *encoder) 4943 { 4944 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder)); 4945 4946 intel_dp_encoder_flush_work(encoder); 4947 4948 drm_encoder_cleanup(encoder); 4949 kfree(dig_port); 4950 } 4951 4952 static const struct drm_encoder_funcs intel_ddi_funcs = { 4953 .reset = intel_dp_encoder_reset, 4954 .destroy = intel_ddi_encoder_destroy, 4955 }; 4956 4957 static struct intel_connector * 4958 intel_ddi_init_dp_connector(struct intel_digital_port *dig_port) 4959 { 4960 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4961 struct intel_connector *connector; 4962 enum port port = dig_port->base.port; 4963 4964 connector = intel_connector_alloc(); 4965 if (!connector) 4966 return NULL; 4967 4968 dig_port->dp.output_reg = DDI_BUF_CTL(port); 4969 dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain; 4970 dig_port->dp.set_link_train = intel_ddi_set_link_train; 4971 dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train; 4972 4973 if (INTEL_GEN(dev_priv) >= 12) 4974 dig_port->dp.set_signal_levels = tgl_set_signal_levels; 4975 else if (INTEL_GEN(dev_priv) >= 11) 4976 dig_port->dp.set_signal_levels = icl_set_signal_levels; 4977 else if (IS_CANNONLAKE(dev_priv)) 4978 dig_port->dp.set_signal_levels = cnl_set_signal_levels; 4979 else if (IS_GEN9_LP(dev_priv)) 4980 dig_port->dp.set_signal_levels = bxt_set_signal_levels; 4981 else 4982 dig_port->dp.set_signal_levels = hsw_set_signal_levels; 4983 4984 dig_port->dp.voltage_max = intel_ddi_dp_voltage_max; 4985 dig_port->dp.preemph_max = intel_ddi_dp_preemph_max; 4986 4987 if (!intel_dp_init_connector(dig_port, connector)) { 4988 kfree(connector); 4989 return NULL; 4990 } 4991 4992 return connector; 4993 } 4994 4995 static int modeset_pipe(struct drm_crtc *crtc, 4996 struct drm_modeset_acquire_ctx *ctx) 4997 { 4998 struct drm_atomic_state *state; 4999 struct drm_crtc_state *crtc_state; 5000 int ret; 5001 5002 state = drm_atomic_state_alloc(crtc->dev); 5003 if (!state) 5004 return -ENOMEM; 5005 5006 state->acquire_ctx = ctx; 5007 5008 crtc_state = drm_atomic_get_crtc_state(state, crtc); 5009 if (IS_ERR(crtc_state)) { 5010 ret = PTR_ERR(crtc_state); 5011 goto out; 5012 } 5013 5014 crtc_state->connectors_changed = true; 5015 5016 ret = drm_atomic_commit(state); 5017 out: 5018 drm_atomic_state_put(state); 5019 5020 return ret; 5021 } 5022 5023 static int intel_hdmi_reset_link(struct intel_encoder *encoder, 5024 struct drm_modeset_acquire_ctx *ctx) 5025 { 5026 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5027 struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder); 5028 struct intel_connector *connector = hdmi->attached_connector; 5029 struct i2c_adapter *adapter = 5030 intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus); 5031 struct drm_connector_state *conn_state; 5032 struct intel_crtc_state *crtc_state; 5033 struct intel_crtc *crtc; 5034 u8 config; 5035 int ret; 5036 5037 if (!connector || connector->base.status != connector_status_connected) 5038 return 0; 5039 5040 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex, 5041 ctx); 5042 if (ret) 5043 return ret; 5044 5045 conn_state = connector->base.state; 5046 5047 crtc = to_intel_crtc(conn_state->crtc); 5048 if (!crtc) 5049 return 0; 5050 5051 ret = drm_modeset_lock(&crtc->base.mutex, ctx); 5052 if (ret) 5053 return ret; 5054 5055 crtc_state = to_intel_crtc_state(crtc->base.state); 5056 5057 drm_WARN_ON(&dev_priv->drm, 5058 !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)); 5059 5060 if (!crtc_state->hw.active) 5061 return 0; 5062 5063 if (!crtc_state->hdmi_high_tmds_clock_ratio && 5064 !crtc_state->hdmi_scrambling) 5065 return 0; 5066 5067 if (conn_state->commit && 5068 !try_wait_for_completion(&conn_state->commit->hw_done)) 5069 return 0; 5070 5071 ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config); 5072 if (ret < 0) { 5073 drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n", 5074 ret); 5075 return 0; 5076 } 5077 5078 if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) == 5079 crtc_state->hdmi_high_tmds_clock_ratio && 5080 !!(config & SCDC_SCRAMBLING_ENABLE) == 5081 crtc_state->hdmi_scrambling) 5082 return 0; 5083 5084 /* 5085 * HDMI 2.0 says that one should not send scrambled data 5086 * prior to configuring the sink scrambling, and that 5087 * TMDS clock/data transmission should be suspended when 5088 * changing the TMDS clock rate in the sink. So let's 5089 * just do a full modeset here, even though some sinks 5090 * would be perfectly happy if were to just reconfigure 5091 * the SCDC settings on the fly. 5092 */ 5093 return modeset_pipe(&crtc->base, ctx); 5094 } 5095 5096 static enum intel_hotplug_state 5097 intel_ddi_hotplug(struct intel_encoder *encoder, 5098 struct intel_connector *connector) 5099 { 5100 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 5101 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5102 enum phy phy = intel_port_to_phy(i915, encoder->port); 5103 bool is_tc = intel_phy_is_tc(i915, phy); 5104 struct drm_modeset_acquire_ctx ctx; 5105 enum intel_hotplug_state state; 5106 int ret; 5107 5108 state = intel_encoder_hotplug(encoder, connector); 5109 5110 drm_modeset_acquire_init(&ctx, 0); 5111 5112 for (;;) { 5113 if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA) 5114 ret = intel_hdmi_reset_link(encoder, &ctx); 5115 else 5116 ret = intel_dp_retrain_link(encoder, &ctx); 5117 5118 if (ret == -EDEADLK) { 5119 drm_modeset_backoff(&ctx); 5120 continue; 5121 } 5122 5123 break; 5124 } 5125 5126 drm_modeset_drop_locks(&ctx); 5127 drm_modeset_acquire_fini(&ctx); 5128 drm_WARN(encoder->base.dev, ret, 5129 "Acquiring modeset locks failed with %i\n", ret); 5130 5131 /* 5132 * Unpowered type-c dongles can take some time to boot and be 5133 * responsible, so here giving some time to those dongles to power up 5134 * and then retrying the probe. 5135 * 5136 * On many platforms the HDMI live state signal is known to be 5137 * unreliable, so we can't use it to detect if a sink is connected or 5138 * not. Instead we detect if it's connected based on whether we can 5139 * read the EDID or not. That in turn has a problem during disconnect, 5140 * since the HPD interrupt may be raised before the DDC lines get 5141 * disconnected (due to how the required length of DDC vs. HPD 5142 * connector pins are specified) and so we'll still be able to get a 5143 * valid EDID. To solve this schedule another detection cycle if this 5144 * time around we didn't detect any change in the sink's connection 5145 * status. 5146 * 5147 * Type-c connectors which get their HPD signal deasserted then 5148 * reasserted, without unplugging/replugging the sink from the 5149 * connector, introduce a delay until the AUX channel communication 5150 * becomes functional. Retry the detection for 5 seconds on type-c 5151 * connectors to account for this delay. 5152 */ 5153 if (state == INTEL_HOTPLUG_UNCHANGED && 5154 connector->hotplug_retries < (is_tc ? 5 : 1) && 5155 !dig_port->dp.is_mst) 5156 state = INTEL_HOTPLUG_RETRY; 5157 5158 return state; 5159 } 5160 5161 static bool lpt_digital_port_connected(struct intel_encoder *encoder) 5162 { 5163 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5164 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 5165 5166 return intel_de_read(dev_priv, SDEISR) & bit; 5167 } 5168 5169 static bool hsw_digital_port_connected(struct intel_encoder *encoder) 5170 { 5171 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5172 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 5173 5174 return intel_de_read(dev_priv, DEISR) & bit; 5175 } 5176 5177 static bool bdw_digital_port_connected(struct intel_encoder *encoder) 5178 { 5179 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5180 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 5181 5182 return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit; 5183 } 5184 5185 static struct intel_connector * 5186 intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port) 5187 { 5188 struct intel_connector *connector; 5189 enum port port = dig_port->base.port; 5190 5191 connector = intel_connector_alloc(); 5192 if (!connector) 5193 return NULL; 5194 5195 dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port); 5196 intel_hdmi_init_connector(dig_port, connector); 5197 5198 return connector; 5199 } 5200 5201 static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port) 5202 { 5203 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 5204 5205 if (dig_port->base.port != PORT_A) 5206 return false; 5207 5208 if (dig_port->saved_port_bits & DDI_A_4_LANES) 5209 return false; 5210 5211 /* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only 5212 * supported configuration 5213 */ 5214 if (IS_GEN9_LP(dev_priv)) 5215 return true; 5216 5217 /* Cannonlake: Most of SKUs don't support DDI_E, and the only 5218 * one who does also have a full A/E split called 5219 * DDI_F what makes DDI_E useless. However for this 5220 * case let's trust VBT info. 5221 */ 5222 if (IS_CANNONLAKE(dev_priv) && 5223 !intel_bios_is_port_present(dev_priv, PORT_E)) 5224 return true; 5225 5226 return false; 5227 } 5228 5229 static int 5230 intel_ddi_max_lanes(struct intel_digital_port *dig_port) 5231 { 5232 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 5233 enum port port = dig_port->base.port; 5234 int max_lanes = 4; 5235 5236 if (INTEL_GEN(dev_priv) >= 11) 5237 return max_lanes; 5238 5239 if (port == PORT_A || port == PORT_E) { 5240 if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES) 5241 max_lanes = port == PORT_A ? 4 : 0; 5242 else 5243 /* Both A and E share 2 lanes */ 5244 max_lanes = 2; 5245 } 5246 5247 /* 5248 * Some BIOS might fail to set this bit on port A if eDP 5249 * wasn't lit up at boot. Force this bit set when needed 5250 * so we use the proper lane count for our calculations. 5251 */ 5252 if (intel_ddi_a_force_4_lanes(dig_port)) { 5253 drm_dbg_kms(&dev_priv->drm, 5254 "Forcing DDI_A_4_LANES for port A\n"); 5255 dig_port->saved_port_bits |= DDI_A_4_LANES; 5256 max_lanes = 4; 5257 } 5258 5259 return max_lanes; 5260 } 5261 5262 static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy) 5263 { 5264 return i915->hti_state & HDPORT_ENABLED && 5265 (i915->hti_state & HDPORT_PHY_USED_DP(phy) || 5266 i915->hti_state & HDPORT_PHY_USED_HDMI(phy)); 5267 } 5268 5269 static enum hpd_pin dg1_hpd_pin(struct drm_i915_private *dev_priv, 5270 enum port port) 5271 { 5272 if (port >= PORT_TC1) 5273 return HPD_PORT_C + port - PORT_TC1; 5274 else 5275 return HPD_PORT_A + port - PORT_A; 5276 } 5277 5278 static enum hpd_pin tgl_hpd_pin(struct drm_i915_private *dev_priv, 5279 enum port port) 5280 { 5281 if (port >= PORT_TC1) 5282 return HPD_PORT_TC1 + port - PORT_TC1; 5283 else 5284 return HPD_PORT_A + port - PORT_A; 5285 } 5286 5287 static enum hpd_pin rkl_hpd_pin(struct drm_i915_private *dev_priv, 5288 enum port port) 5289 { 5290 if (HAS_PCH_TGP(dev_priv)) 5291 return tgl_hpd_pin(dev_priv, port); 5292 5293 if (port >= PORT_TC1) 5294 return HPD_PORT_C + port - PORT_TC1; 5295 else 5296 return HPD_PORT_A + port - PORT_A; 5297 } 5298 5299 static enum hpd_pin icl_hpd_pin(struct drm_i915_private *dev_priv, 5300 enum port port) 5301 { 5302 if (port >= PORT_C) 5303 return HPD_PORT_TC1 + port - PORT_C; 5304 else 5305 return HPD_PORT_A + port - PORT_A; 5306 } 5307 5308 static enum hpd_pin ehl_hpd_pin(struct drm_i915_private *dev_priv, 5309 enum port port) 5310 { 5311 if (port == PORT_D) 5312 return HPD_PORT_A; 5313 5314 if (HAS_PCH_MCC(dev_priv)) 5315 return icl_hpd_pin(dev_priv, port); 5316 5317 return HPD_PORT_A + port - PORT_A; 5318 } 5319 5320 static enum hpd_pin cnl_hpd_pin(struct drm_i915_private *dev_priv, 5321 enum port port) 5322 { 5323 if (port == PORT_F) 5324 return HPD_PORT_E; 5325 5326 return HPD_PORT_A + port - PORT_A; 5327 } 5328 5329 #define port_tc_name(port) ((port) - PORT_TC1 + '1') 5330 #define tc_port_name(tc_port) ((tc_port) - TC_PORT_1 + '1') 5331 5332 void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port) 5333 { 5334 struct intel_digital_port *dig_port; 5335 struct intel_encoder *encoder; 5336 bool init_hdmi, init_dp; 5337 enum phy phy = intel_port_to_phy(dev_priv, port); 5338 5339 /* 5340 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may 5341 * have taken over some of the PHYs and made them unavailable to the 5342 * driver. In that case we should skip initializing the corresponding 5343 * outputs. 5344 */ 5345 if (hti_uses_phy(dev_priv, phy)) { 5346 drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n", 5347 port_name(port), phy_name(phy)); 5348 return; 5349 } 5350 5351 init_hdmi = intel_bios_port_supports_dvi(dev_priv, port) || 5352 intel_bios_port_supports_hdmi(dev_priv, port); 5353 init_dp = intel_bios_port_supports_dp(dev_priv, port); 5354 5355 if (intel_bios_is_lspcon_present(dev_priv, port)) { 5356 /* 5357 * Lspcon device needs to be driven with DP connector 5358 * with special detection sequence. So make sure DP 5359 * is initialized before lspcon. 5360 */ 5361 init_dp = true; 5362 init_hdmi = false; 5363 drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n", 5364 port_name(port)); 5365 } 5366 5367 if (!init_dp && !init_hdmi) { 5368 drm_dbg_kms(&dev_priv->drm, 5369 "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n", 5370 port_name(port)); 5371 return; 5372 } 5373 5374 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 5375 if (!dig_port) 5376 return; 5377 5378 encoder = &dig_port->base; 5379 5380 if (INTEL_GEN(dev_priv) >= 12) { 5381 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 5382 5383 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5384 DRM_MODE_ENCODER_TMDS, 5385 "DDI %s%c/PHY %s%c", 5386 port >= PORT_TC1 ? "TC" : "", 5387 port >= PORT_TC1 ? port_tc_name(port) : port_name(port), 5388 tc_port != TC_PORT_NONE ? "TC" : "", 5389 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 5390 } else if (INTEL_GEN(dev_priv) >= 11) { 5391 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 5392 5393 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5394 DRM_MODE_ENCODER_TMDS, 5395 "DDI %c%s/PHY %s%c", 5396 port_name(port), 5397 port >= PORT_C ? " (TC)" : "", 5398 tc_port != TC_PORT_NONE ? "TC" : "", 5399 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 5400 } else { 5401 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5402 DRM_MODE_ENCODER_TMDS, 5403 "DDI %c/PHY %c", port_name(port), phy_name(phy)); 5404 } 5405 5406 mutex_init(&dig_port->hdcp_mutex); 5407 dig_port->num_hdcp_streams = 0; 5408 5409 encoder->hotplug = intel_ddi_hotplug; 5410 encoder->compute_output_type = intel_ddi_compute_output_type; 5411 encoder->compute_config = intel_ddi_compute_config; 5412 encoder->compute_config_late = intel_ddi_compute_config_late; 5413 encoder->enable = intel_enable_ddi; 5414 encoder->pre_pll_enable = intel_ddi_pre_pll_enable; 5415 encoder->pre_enable = intel_ddi_pre_enable; 5416 encoder->disable = intel_disable_ddi; 5417 encoder->post_disable = intel_ddi_post_disable; 5418 encoder->update_pipe = intel_ddi_update_pipe; 5419 encoder->get_hw_state = intel_ddi_get_hw_state; 5420 encoder->get_config = intel_ddi_get_config; 5421 encoder->sync_state = intel_ddi_sync_state; 5422 encoder->initial_fastset_check = intel_ddi_initial_fastset_check; 5423 encoder->suspend = intel_dp_encoder_suspend; 5424 encoder->shutdown = intel_dp_encoder_shutdown; 5425 encoder->get_power_domains = intel_ddi_get_power_domains; 5426 5427 encoder->type = INTEL_OUTPUT_DDI; 5428 encoder->power_domain = intel_port_to_power_domain(port); 5429 encoder->port = port; 5430 encoder->cloneable = 0; 5431 encoder->pipe_mask = ~0; 5432 5433 if (IS_DG1(dev_priv)) 5434 encoder->hpd_pin = dg1_hpd_pin(dev_priv, port); 5435 else if (IS_ROCKETLAKE(dev_priv)) 5436 encoder->hpd_pin = rkl_hpd_pin(dev_priv, port); 5437 else if (INTEL_GEN(dev_priv) >= 12) 5438 encoder->hpd_pin = tgl_hpd_pin(dev_priv, port); 5439 else if (IS_JSL_EHL(dev_priv)) 5440 encoder->hpd_pin = ehl_hpd_pin(dev_priv, port); 5441 else if (IS_GEN(dev_priv, 11)) 5442 encoder->hpd_pin = icl_hpd_pin(dev_priv, port); 5443 else if (IS_GEN(dev_priv, 10)) 5444 encoder->hpd_pin = cnl_hpd_pin(dev_priv, port); 5445 else 5446 encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 5447 5448 if (INTEL_GEN(dev_priv) >= 11) 5449 dig_port->saved_port_bits = 5450 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 5451 & DDI_BUF_PORT_REVERSAL; 5452 else 5453 dig_port->saved_port_bits = 5454 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 5455 & (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES); 5456 5457 dig_port->dp.output_reg = INVALID_MMIO_REG; 5458 dig_port->max_lanes = intel_ddi_max_lanes(dig_port); 5459 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 5460 5461 if (intel_phy_is_tc(dev_priv, phy)) { 5462 bool is_legacy = 5463 !intel_bios_port_supports_typec_usb(dev_priv, port) && 5464 !intel_bios_port_supports_tbt(dev_priv, port); 5465 5466 intel_tc_port_init(dig_port, is_legacy); 5467 5468 encoder->update_prepare = intel_ddi_update_prepare; 5469 encoder->update_complete = intel_ddi_update_complete; 5470 } 5471 5472 drm_WARN_ON(&dev_priv->drm, port > PORT_I); 5473 dig_port->ddi_io_power_domain = POWER_DOMAIN_PORT_DDI_A_IO + 5474 port - PORT_A; 5475 5476 if (init_dp) { 5477 if (!intel_ddi_init_dp_connector(dig_port)) 5478 goto err; 5479 5480 dig_port->hpd_pulse = intel_dp_hpd_pulse; 5481 } 5482 5483 /* In theory we don't need the encoder->type check, but leave it just in 5484 * case we have some really bad VBTs... */ 5485 if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) { 5486 if (!intel_ddi_init_hdmi_connector(dig_port)) 5487 goto err; 5488 } 5489 5490 if (INTEL_GEN(dev_priv) >= 11) { 5491 if (intel_phy_is_tc(dev_priv, phy)) 5492 dig_port->connected = intel_tc_port_connected; 5493 else 5494 dig_port->connected = lpt_digital_port_connected; 5495 } else if (INTEL_GEN(dev_priv) >= 8) { 5496 if (port == PORT_A || IS_GEN9_LP(dev_priv)) 5497 dig_port->connected = bdw_digital_port_connected; 5498 else 5499 dig_port->connected = lpt_digital_port_connected; 5500 } else { 5501 if (port == PORT_A) 5502 dig_port->connected = hsw_digital_port_connected; 5503 else 5504 dig_port->connected = lpt_digital_port_connected; 5505 } 5506 5507 intel_infoframe_init(dig_port); 5508 5509 return; 5510 5511 err: 5512 drm_encoder_cleanup(&encoder->base); 5513 kfree(dig_port); 5514 } 5515