1 /* 2 * Copyright © 2012 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Eugeni Dodonov <eugeni.dodonov@intel.com> 25 * 26 */ 27 28 #include <drm/drm_scdc_helper.h> 29 30 #include "i915_drv.h" 31 #include "i915_trace.h" 32 #include "intel_audio.h" 33 #include "intel_combo_phy.h" 34 #include "intel_connector.h" 35 #include "intel_ddi.h" 36 #include "intel_display_types.h" 37 #include "intel_dp.h" 38 #include "intel_dp_mst.h" 39 #include "intel_dp_link_training.h" 40 #include "intel_dpio_phy.h" 41 #include "intel_dsi.h" 42 #include "intel_fifo_underrun.h" 43 #include "intel_gmbus.h" 44 #include "intel_hdcp.h" 45 #include "intel_hdmi.h" 46 #include "intel_hotplug.h" 47 #include "intel_lspcon.h" 48 #include "intel_panel.h" 49 #include "intel_pps.h" 50 #include "intel_psr.h" 51 #include "intel_sprite.h" 52 #include "intel_tc.h" 53 #include "intel_vdsc.h" 54 #include "intel_vrr.h" 55 56 struct ddi_buf_trans { 57 u32 trans1; /* balance leg enable, de-emph level */ 58 u32 trans2; /* vref sel, vswing */ 59 u8 i_boost; /* SKL: I_boost; valid: 0x0, 0x1, 0x3, 0x7 */ 60 }; 61 62 static const u8 index_to_dp_signal_levels[] = { 63 [0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0, 64 [1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1, 65 [2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2, 66 [3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3, 67 [4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0, 68 [5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1, 69 [6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2, 70 [7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0, 71 [8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1, 72 [9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0, 73 }; 74 75 /* HDMI/DVI modes ignore everything but the last 2 items. So we share 76 * them for both DP and FDI transports, allowing those ports to 77 * automatically adapt to HDMI connections as well 78 */ 79 static const struct ddi_buf_trans hsw_ddi_translations_dp[] = { 80 { 0x00FFFFFF, 0x0006000E, 0x0 }, 81 { 0x00D75FFF, 0x0005000A, 0x0 }, 82 { 0x00C30FFF, 0x00040006, 0x0 }, 83 { 0x80AAAFFF, 0x000B0000, 0x0 }, 84 { 0x00FFFFFF, 0x0005000A, 0x0 }, 85 { 0x00D75FFF, 0x000C0004, 0x0 }, 86 { 0x80C30FFF, 0x000B0000, 0x0 }, 87 { 0x00FFFFFF, 0x00040006, 0x0 }, 88 { 0x80D75FFF, 0x000B0000, 0x0 }, 89 }; 90 91 static const struct ddi_buf_trans hsw_ddi_translations_fdi[] = { 92 { 0x00FFFFFF, 0x0007000E, 0x0 }, 93 { 0x00D75FFF, 0x000F000A, 0x0 }, 94 { 0x00C30FFF, 0x00060006, 0x0 }, 95 { 0x00AAAFFF, 0x001E0000, 0x0 }, 96 { 0x00FFFFFF, 0x000F000A, 0x0 }, 97 { 0x00D75FFF, 0x00160004, 0x0 }, 98 { 0x00C30FFF, 0x001E0000, 0x0 }, 99 { 0x00FFFFFF, 0x00060006, 0x0 }, 100 { 0x00D75FFF, 0x001E0000, 0x0 }, 101 }; 102 103 static const struct ddi_buf_trans hsw_ddi_translations_hdmi[] = { 104 /* Idx NT mV d T mV d db */ 105 { 0x00FFFFFF, 0x0006000E, 0x0 },/* 0: 400 400 0 */ 106 { 0x00E79FFF, 0x000E000C, 0x0 },/* 1: 400 500 2 */ 107 { 0x00D75FFF, 0x0005000A, 0x0 },/* 2: 400 600 3.5 */ 108 { 0x00FFFFFF, 0x0005000A, 0x0 },/* 3: 600 600 0 */ 109 { 0x00E79FFF, 0x001D0007, 0x0 },/* 4: 600 750 2 */ 110 { 0x00D75FFF, 0x000C0004, 0x0 },/* 5: 600 900 3.5 */ 111 { 0x00FFFFFF, 0x00040006, 0x0 },/* 6: 800 800 0 */ 112 { 0x80E79FFF, 0x00030002, 0x0 },/* 7: 800 1000 2 */ 113 { 0x00FFFFFF, 0x00140005, 0x0 },/* 8: 850 850 0 */ 114 { 0x00FFFFFF, 0x000C0004, 0x0 },/* 9: 900 900 0 */ 115 { 0x00FFFFFF, 0x001C0003, 0x0 },/* 10: 950 950 0 */ 116 { 0x80FFFFFF, 0x00030002, 0x0 },/* 11: 1000 1000 0 */ 117 }; 118 119 static const struct ddi_buf_trans bdw_ddi_translations_edp[] = { 120 { 0x00FFFFFF, 0x00000012, 0x0 }, 121 { 0x00EBAFFF, 0x00020011, 0x0 }, 122 { 0x00C71FFF, 0x0006000F, 0x0 }, 123 { 0x00AAAFFF, 0x000E000A, 0x0 }, 124 { 0x00FFFFFF, 0x00020011, 0x0 }, 125 { 0x00DB6FFF, 0x0005000F, 0x0 }, 126 { 0x00BEEFFF, 0x000A000C, 0x0 }, 127 { 0x00FFFFFF, 0x0005000F, 0x0 }, 128 { 0x00DB6FFF, 0x000A000C, 0x0 }, 129 }; 130 131 static const struct ddi_buf_trans bdw_ddi_translations_dp[] = { 132 { 0x00FFFFFF, 0x0007000E, 0x0 }, 133 { 0x00D75FFF, 0x000E000A, 0x0 }, 134 { 0x00BEFFFF, 0x00140006, 0x0 }, 135 { 0x80B2CFFF, 0x001B0002, 0x0 }, 136 { 0x00FFFFFF, 0x000E000A, 0x0 }, 137 { 0x00DB6FFF, 0x00160005, 0x0 }, 138 { 0x80C71FFF, 0x001A0002, 0x0 }, 139 { 0x00F7DFFF, 0x00180004, 0x0 }, 140 { 0x80D75FFF, 0x001B0002, 0x0 }, 141 }; 142 143 static const struct ddi_buf_trans bdw_ddi_translations_fdi[] = { 144 { 0x00FFFFFF, 0x0001000E, 0x0 }, 145 { 0x00D75FFF, 0x0004000A, 0x0 }, 146 { 0x00C30FFF, 0x00070006, 0x0 }, 147 { 0x00AAAFFF, 0x000C0000, 0x0 }, 148 { 0x00FFFFFF, 0x0004000A, 0x0 }, 149 { 0x00D75FFF, 0x00090004, 0x0 }, 150 { 0x00C30FFF, 0x000C0000, 0x0 }, 151 { 0x00FFFFFF, 0x00070006, 0x0 }, 152 { 0x00D75FFF, 0x000C0000, 0x0 }, 153 }; 154 155 static const struct ddi_buf_trans bdw_ddi_translations_hdmi[] = { 156 /* Idx NT mV d T mV df db */ 157 { 0x00FFFFFF, 0x0007000E, 0x0 },/* 0: 400 400 0 */ 158 { 0x00D75FFF, 0x000E000A, 0x0 },/* 1: 400 600 3.5 */ 159 { 0x00BEFFFF, 0x00140006, 0x0 },/* 2: 400 800 6 */ 160 { 0x00FFFFFF, 0x0009000D, 0x0 },/* 3: 450 450 0 */ 161 { 0x00FFFFFF, 0x000E000A, 0x0 },/* 4: 600 600 0 */ 162 { 0x00D7FFFF, 0x00140006, 0x0 },/* 5: 600 800 2.5 */ 163 { 0x80CB2FFF, 0x001B0002, 0x0 },/* 6: 600 1000 4.5 */ 164 { 0x00FFFFFF, 0x00140006, 0x0 },/* 7: 800 800 0 */ 165 { 0x80E79FFF, 0x001B0002, 0x0 },/* 8: 800 1000 2 */ 166 { 0x80FFFFFF, 0x001B0002, 0x0 },/* 9: 1000 1000 0 */ 167 }; 168 169 /* Skylake H and S */ 170 static const struct ddi_buf_trans skl_ddi_translations_dp[] = { 171 { 0x00002016, 0x000000A0, 0x0 }, 172 { 0x00005012, 0x0000009B, 0x0 }, 173 { 0x00007011, 0x00000088, 0x0 }, 174 { 0x80009010, 0x000000C0, 0x1 }, 175 { 0x00002016, 0x0000009B, 0x0 }, 176 { 0x00005012, 0x00000088, 0x0 }, 177 { 0x80007011, 0x000000C0, 0x1 }, 178 { 0x00002016, 0x000000DF, 0x0 }, 179 { 0x80005012, 0x000000C0, 0x1 }, 180 }; 181 182 /* Skylake U */ 183 static const struct ddi_buf_trans skl_u_ddi_translations_dp[] = { 184 { 0x0000201B, 0x000000A2, 0x0 }, 185 { 0x00005012, 0x00000088, 0x0 }, 186 { 0x80007011, 0x000000CD, 0x1 }, 187 { 0x80009010, 0x000000C0, 0x1 }, 188 { 0x0000201B, 0x0000009D, 0x0 }, 189 { 0x80005012, 0x000000C0, 0x1 }, 190 { 0x80007011, 0x000000C0, 0x1 }, 191 { 0x00002016, 0x00000088, 0x0 }, 192 { 0x80005012, 0x000000C0, 0x1 }, 193 }; 194 195 /* Skylake Y */ 196 static const struct ddi_buf_trans skl_y_ddi_translations_dp[] = { 197 { 0x00000018, 0x000000A2, 0x0 }, 198 { 0x00005012, 0x00000088, 0x0 }, 199 { 0x80007011, 0x000000CD, 0x3 }, 200 { 0x80009010, 0x000000C0, 0x3 }, 201 { 0x00000018, 0x0000009D, 0x0 }, 202 { 0x80005012, 0x000000C0, 0x3 }, 203 { 0x80007011, 0x000000C0, 0x3 }, 204 { 0x00000018, 0x00000088, 0x0 }, 205 { 0x80005012, 0x000000C0, 0x3 }, 206 }; 207 208 /* Kabylake H and S */ 209 static const struct ddi_buf_trans kbl_ddi_translations_dp[] = { 210 { 0x00002016, 0x000000A0, 0x0 }, 211 { 0x00005012, 0x0000009B, 0x0 }, 212 { 0x00007011, 0x00000088, 0x0 }, 213 { 0x80009010, 0x000000C0, 0x1 }, 214 { 0x00002016, 0x0000009B, 0x0 }, 215 { 0x00005012, 0x00000088, 0x0 }, 216 { 0x80007011, 0x000000C0, 0x1 }, 217 { 0x00002016, 0x00000097, 0x0 }, 218 { 0x80005012, 0x000000C0, 0x1 }, 219 }; 220 221 /* Kabylake U */ 222 static const struct ddi_buf_trans kbl_u_ddi_translations_dp[] = { 223 { 0x0000201B, 0x000000A1, 0x0 }, 224 { 0x00005012, 0x00000088, 0x0 }, 225 { 0x80007011, 0x000000CD, 0x3 }, 226 { 0x80009010, 0x000000C0, 0x3 }, 227 { 0x0000201B, 0x0000009D, 0x0 }, 228 { 0x80005012, 0x000000C0, 0x3 }, 229 { 0x80007011, 0x000000C0, 0x3 }, 230 { 0x00002016, 0x0000004F, 0x0 }, 231 { 0x80005012, 0x000000C0, 0x3 }, 232 }; 233 234 /* Kabylake Y */ 235 static const struct ddi_buf_trans kbl_y_ddi_translations_dp[] = { 236 { 0x00001017, 0x000000A1, 0x0 }, 237 { 0x00005012, 0x00000088, 0x0 }, 238 { 0x80007011, 0x000000CD, 0x3 }, 239 { 0x8000800F, 0x000000C0, 0x3 }, 240 { 0x00001017, 0x0000009D, 0x0 }, 241 { 0x80005012, 0x000000C0, 0x3 }, 242 { 0x80007011, 0x000000C0, 0x3 }, 243 { 0x00001017, 0x0000004C, 0x0 }, 244 { 0x80005012, 0x000000C0, 0x3 }, 245 }; 246 247 /* 248 * Skylake/Kabylake H and S 249 * eDP 1.4 low vswing translation parameters 250 */ 251 static const struct ddi_buf_trans skl_ddi_translations_edp[] = { 252 { 0x00000018, 0x000000A8, 0x0 }, 253 { 0x00004013, 0x000000A9, 0x0 }, 254 { 0x00007011, 0x000000A2, 0x0 }, 255 { 0x00009010, 0x0000009C, 0x0 }, 256 { 0x00000018, 0x000000A9, 0x0 }, 257 { 0x00006013, 0x000000A2, 0x0 }, 258 { 0x00007011, 0x000000A6, 0x0 }, 259 { 0x00000018, 0x000000AB, 0x0 }, 260 { 0x00007013, 0x0000009F, 0x0 }, 261 { 0x00000018, 0x000000DF, 0x0 }, 262 }; 263 264 /* 265 * Skylake/Kabylake U 266 * eDP 1.4 low vswing translation parameters 267 */ 268 static const struct ddi_buf_trans skl_u_ddi_translations_edp[] = { 269 { 0x00000018, 0x000000A8, 0x0 }, 270 { 0x00004013, 0x000000A9, 0x0 }, 271 { 0x00007011, 0x000000A2, 0x0 }, 272 { 0x00009010, 0x0000009C, 0x0 }, 273 { 0x00000018, 0x000000A9, 0x0 }, 274 { 0x00006013, 0x000000A2, 0x0 }, 275 { 0x00007011, 0x000000A6, 0x0 }, 276 { 0x00002016, 0x000000AB, 0x0 }, 277 { 0x00005013, 0x0000009F, 0x0 }, 278 { 0x00000018, 0x000000DF, 0x0 }, 279 }; 280 281 /* 282 * Skylake/Kabylake Y 283 * eDP 1.4 low vswing translation parameters 284 */ 285 static const struct ddi_buf_trans skl_y_ddi_translations_edp[] = { 286 { 0x00000018, 0x000000A8, 0x0 }, 287 { 0x00004013, 0x000000AB, 0x0 }, 288 { 0x00007011, 0x000000A4, 0x0 }, 289 { 0x00009010, 0x000000DF, 0x0 }, 290 { 0x00000018, 0x000000AA, 0x0 }, 291 { 0x00006013, 0x000000A4, 0x0 }, 292 { 0x00007011, 0x0000009D, 0x0 }, 293 { 0x00000018, 0x000000A0, 0x0 }, 294 { 0x00006012, 0x000000DF, 0x0 }, 295 { 0x00000018, 0x0000008A, 0x0 }, 296 }; 297 298 /* Skylake/Kabylake U, H and S */ 299 static const struct ddi_buf_trans skl_ddi_translations_hdmi[] = { 300 { 0x00000018, 0x000000AC, 0x0 }, 301 { 0x00005012, 0x0000009D, 0x0 }, 302 { 0x00007011, 0x00000088, 0x0 }, 303 { 0x00000018, 0x000000A1, 0x0 }, 304 { 0x00000018, 0x00000098, 0x0 }, 305 { 0x00004013, 0x00000088, 0x0 }, 306 { 0x80006012, 0x000000CD, 0x1 }, 307 { 0x00000018, 0x000000DF, 0x0 }, 308 { 0x80003015, 0x000000CD, 0x1 }, /* Default */ 309 { 0x80003015, 0x000000C0, 0x1 }, 310 { 0x80000018, 0x000000C0, 0x1 }, 311 }; 312 313 /* Skylake/Kabylake Y */ 314 static const struct ddi_buf_trans skl_y_ddi_translations_hdmi[] = { 315 { 0x00000018, 0x000000A1, 0x0 }, 316 { 0x00005012, 0x000000DF, 0x0 }, 317 { 0x80007011, 0x000000CB, 0x3 }, 318 { 0x00000018, 0x000000A4, 0x0 }, 319 { 0x00000018, 0x0000009D, 0x0 }, 320 { 0x00004013, 0x00000080, 0x0 }, 321 { 0x80006013, 0x000000C0, 0x3 }, 322 { 0x00000018, 0x0000008A, 0x0 }, 323 { 0x80003015, 0x000000C0, 0x3 }, /* Default */ 324 { 0x80003015, 0x000000C0, 0x3 }, 325 { 0x80000018, 0x000000C0, 0x3 }, 326 }; 327 328 struct bxt_ddi_buf_trans { 329 u8 margin; /* swing value */ 330 u8 scale; /* scale value */ 331 u8 enable; /* scale enable */ 332 u8 deemphasis; 333 }; 334 335 static const struct bxt_ddi_buf_trans bxt_ddi_translations_dp[] = { 336 /* Idx NT mV diff db */ 337 { 52, 0x9A, 0, 128, }, /* 0: 400 0 */ 338 { 78, 0x9A, 0, 85, }, /* 1: 400 3.5 */ 339 { 104, 0x9A, 0, 64, }, /* 2: 400 6 */ 340 { 154, 0x9A, 0, 43, }, /* 3: 400 9.5 */ 341 { 77, 0x9A, 0, 128, }, /* 4: 600 0 */ 342 { 116, 0x9A, 0, 85, }, /* 5: 600 3.5 */ 343 { 154, 0x9A, 0, 64, }, /* 6: 600 6 */ 344 { 102, 0x9A, 0, 128, }, /* 7: 800 0 */ 345 { 154, 0x9A, 0, 85, }, /* 8: 800 3.5 */ 346 { 154, 0x9A, 1, 128, }, /* 9: 1200 0 */ 347 }; 348 349 static const struct bxt_ddi_buf_trans bxt_ddi_translations_edp[] = { 350 /* Idx NT mV diff db */ 351 { 26, 0, 0, 128, }, /* 0: 200 0 */ 352 { 38, 0, 0, 112, }, /* 1: 200 1.5 */ 353 { 48, 0, 0, 96, }, /* 2: 200 4 */ 354 { 54, 0, 0, 69, }, /* 3: 200 6 */ 355 { 32, 0, 0, 128, }, /* 4: 250 0 */ 356 { 48, 0, 0, 104, }, /* 5: 250 1.5 */ 357 { 54, 0, 0, 85, }, /* 6: 250 4 */ 358 { 43, 0, 0, 128, }, /* 7: 300 0 */ 359 { 54, 0, 0, 101, }, /* 8: 300 1.5 */ 360 { 48, 0, 0, 128, }, /* 9: 300 0 */ 361 }; 362 363 /* BSpec has 2 recommended values - entries 0 and 8. 364 * Using the entry with higher vswing. 365 */ 366 static const struct bxt_ddi_buf_trans bxt_ddi_translations_hdmi[] = { 367 /* Idx NT mV diff db */ 368 { 52, 0x9A, 0, 128, }, /* 0: 400 0 */ 369 { 52, 0x9A, 0, 85, }, /* 1: 400 3.5 */ 370 { 52, 0x9A, 0, 64, }, /* 2: 400 6 */ 371 { 42, 0x9A, 0, 43, }, /* 3: 400 9.5 */ 372 { 77, 0x9A, 0, 128, }, /* 4: 600 0 */ 373 { 77, 0x9A, 0, 85, }, /* 5: 600 3.5 */ 374 { 77, 0x9A, 0, 64, }, /* 6: 600 6 */ 375 { 102, 0x9A, 0, 128, }, /* 7: 800 0 */ 376 { 102, 0x9A, 0, 85, }, /* 8: 800 3.5 */ 377 { 154, 0x9A, 1, 128, }, /* 9: 1200 0 */ 378 }; 379 380 struct cnl_ddi_buf_trans { 381 u8 dw2_swing_sel; 382 u8 dw7_n_scalar; 383 u8 dw4_cursor_coeff; 384 u8 dw4_post_cursor_2; 385 u8 dw4_post_cursor_1; 386 }; 387 388 /* Voltage Swing Programming for VccIO 0.85V for DP */ 389 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_85V[] = { 390 /* NT mV Trans mV db */ 391 { 0xA, 0x5D, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 392 { 0xA, 0x6A, 0x38, 0x00, 0x07 }, /* 350 500 3.1 */ 393 { 0xB, 0x7A, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 394 { 0x6, 0x7C, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 395 { 0xA, 0x69, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 396 { 0xB, 0x7A, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 397 { 0x6, 0x7C, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 398 { 0xB, 0x7D, 0x3C, 0x00, 0x03 }, /* 650 725 0.9 */ 399 { 0x6, 0x7C, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 400 { 0x6, 0x7B, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 401 }; 402 403 /* Voltage Swing Programming for VccIO 0.85V for HDMI */ 404 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_85V[] = { 405 /* NT mV Trans mV db */ 406 { 0xA, 0x60, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 407 { 0xB, 0x73, 0x36, 0x00, 0x09 }, /* 450 650 3.2 */ 408 { 0x6, 0x7F, 0x31, 0x00, 0x0E }, /* 450 850 5.5 */ 409 { 0xB, 0x73, 0x3F, 0x00, 0x00 }, /* 650 650 0.0 */ 410 { 0x6, 0x7F, 0x37, 0x00, 0x08 }, /* 650 850 2.3 */ 411 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 850 850 0.0 */ 412 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 413 }; 414 415 /* Voltage Swing Programming for VccIO 0.85V for eDP */ 416 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_85V[] = { 417 /* NT mV Trans mV db */ 418 { 0xA, 0x66, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 419 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 420 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 421 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 422 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 423 { 0xA, 0x66, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 424 { 0xB, 0x70, 0x3C, 0x00, 0x03 }, /* 460 600 2.3 */ 425 { 0xC, 0x75, 0x3C, 0x00, 0x03 }, /* 537 700 2.3 */ 426 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 427 }; 428 429 /* Voltage Swing Programming for VccIO 0.95V for DP */ 430 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_95V[] = { 431 /* NT mV Trans mV db */ 432 { 0xA, 0x5D, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 433 { 0xA, 0x6A, 0x38, 0x00, 0x07 }, /* 350 500 3.1 */ 434 { 0xB, 0x7A, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 435 { 0x6, 0x7C, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 436 { 0xA, 0x69, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 437 { 0xB, 0x7A, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 438 { 0x6, 0x7C, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 439 { 0xB, 0x7D, 0x3C, 0x00, 0x03 }, /* 650 725 0.9 */ 440 { 0x6, 0x7C, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 441 { 0x6, 0x7B, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 442 }; 443 444 /* Voltage Swing Programming for VccIO 0.95V for HDMI */ 445 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_95V[] = { 446 /* NT mV Trans mV db */ 447 { 0xA, 0x5C, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 448 { 0xB, 0x69, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 449 { 0x5, 0x76, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 450 { 0xA, 0x5E, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 451 { 0xB, 0x69, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 452 { 0xB, 0x79, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 453 { 0x6, 0x7D, 0x32, 0x00, 0x0D }, /* 600 1000 4.4 */ 454 { 0x5, 0x76, 0x3F, 0x00, 0x00 }, /* 800 800 0.0 */ 455 { 0x6, 0x7D, 0x39, 0x00, 0x06 }, /* 800 1000 1.9 */ 456 { 0x6, 0x7F, 0x39, 0x00, 0x06 }, /* 850 1050 1.8 */ 457 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 458 }; 459 460 /* Voltage Swing Programming for VccIO 0.95V for eDP */ 461 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_95V[] = { 462 /* NT mV Trans mV db */ 463 { 0xA, 0x61, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 464 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 465 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 466 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 467 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 468 { 0xA, 0x61, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 469 { 0xB, 0x68, 0x39, 0x00, 0x06 }, /* 460 600 2.3 */ 470 { 0xC, 0x6E, 0x39, 0x00, 0x06 }, /* 537 700 2.3 */ 471 { 0x4, 0x7F, 0x3A, 0x00, 0x05 }, /* 460 600 2.3 */ 472 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 473 }; 474 475 /* Voltage Swing Programming for VccIO 1.05V for DP */ 476 static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_1_05V[] = { 477 /* NT mV Trans mV db */ 478 { 0xA, 0x58, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 479 { 0xB, 0x64, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 480 { 0x5, 0x70, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 481 { 0x6, 0x7F, 0x2C, 0x00, 0x13 }, /* 400 1050 8.4 */ 482 { 0xB, 0x64, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 483 { 0x5, 0x73, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 484 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 550 1050 5.6 */ 485 { 0x5, 0x76, 0x3E, 0x00, 0x01 }, /* 850 900 0.5 */ 486 { 0x6, 0x7F, 0x36, 0x00, 0x09 }, /* 750 1050 2.9 */ 487 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 488 }; 489 490 /* Voltage Swing Programming for VccIO 1.05V for HDMI */ 491 static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_1_05V[] = { 492 /* NT mV Trans mV db */ 493 { 0xA, 0x58, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 494 { 0xB, 0x64, 0x37, 0x00, 0x08 }, /* 400 600 3.5 */ 495 { 0x5, 0x70, 0x31, 0x00, 0x0E }, /* 400 800 6.0 */ 496 { 0xA, 0x5B, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 497 { 0xB, 0x64, 0x3F, 0x00, 0x00 }, /* 600 600 0.0 */ 498 { 0x5, 0x73, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 499 { 0x6, 0x7C, 0x32, 0x00, 0x0D }, /* 600 1000 4.4 */ 500 { 0x5, 0x70, 0x3F, 0x00, 0x00 }, /* 800 800 0.0 */ 501 { 0x6, 0x7C, 0x39, 0x00, 0x06 }, /* 800 1000 1.9 */ 502 { 0x6, 0x7F, 0x39, 0x00, 0x06 }, /* 850 1050 1.8 */ 503 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1050 1050 0.0 */ 504 }; 505 506 /* Voltage Swing Programming for VccIO 1.05V for eDP */ 507 static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_1_05V[] = { 508 /* NT mV Trans mV db */ 509 { 0xA, 0x5E, 0x3A, 0x00, 0x05 }, /* 384 500 2.3 */ 510 { 0x0, 0x7F, 0x38, 0x00, 0x07 }, /* 153 200 2.3 */ 511 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 192 250 2.3 */ 512 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 230 300 2.3 */ 513 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 269 350 2.3 */ 514 { 0xA, 0x5E, 0x3C, 0x00, 0x03 }, /* 446 500 1.0 */ 515 { 0xB, 0x64, 0x39, 0x00, 0x06 }, /* 460 600 2.3 */ 516 { 0xE, 0x6A, 0x39, 0x00, 0x06 }, /* 537 700 2.3 */ 517 { 0x2, 0x7F, 0x3F, 0x00, 0x00 }, /* 400 400 0.0 */ 518 }; 519 520 /* icl_combo_phy_ddi_translations */ 521 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_dp_hbr2[] = { 522 /* NT mV Trans mV db */ 523 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 524 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 525 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 526 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 527 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 528 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 529 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 530 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 531 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 532 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 533 }; 534 535 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr2[] = { 536 /* NT mV Trans mV db */ 537 { 0x0, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 538 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 200 250 1.9 */ 539 { 0x1, 0x7F, 0x33, 0x00, 0x0C }, /* 200 300 3.5 */ 540 { 0x9, 0x7F, 0x31, 0x00, 0x0E }, /* 200 350 4.9 */ 541 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 542 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 250 300 1.6 */ 543 { 0x9, 0x7F, 0x35, 0x00, 0x0A }, /* 250 350 2.9 */ 544 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 545 { 0x9, 0x7F, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 546 { 0x9, 0x7F, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 547 }; 548 549 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr3[] = { 550 /* NT mV Trans mV db */ 551 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 552 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 553 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 554 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 555 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 556 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 557 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 558 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 559 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 560 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 561 }; 562 563 static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_hdmi[] = { 564 /* NT mV Trans mV db */ 565 { 0xA, 0x60, 0x3F, 0x00, 0x00 }, /* 450 450 0.0 */ 566 { 0xB, 0x73, 0x36, 0x00, 0x09 }, /* 450 650 3.2 */ 567 { 0x6, 0x7F, 0x31, 0x00, 0x0E }, /* 450 850 5.5 */ 568 { 0xB, 0x73, 0x3F, 0x00, 0x00 }, /* 650 650 0.0 ALS */ 569 { 0x6, 0x7F, 0x37, 0x00, 0x08 }, /* 650 850 2.3 */ 570 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 850 850 0.0 */ 571 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 850 3.0 */ 572 }; 573 574 static const struct cnl_ddi_buf_trans ehl_combo_phy_ddi_translations_dp[] = { 575 /* NT mV Trans mV db */ 576 { 0xA, 0x33, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 577 { 0xA, 0x47, 0x36, 0x00, 0x09 }, /* 350 500 3.1 */ 578 { 0xC, 0x64, 0x34, 0x00, 0x0B }, /* 350 700 6.0 */ 579 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 350 900 8.2 */ 580 { 0xA, 0x46, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 581 { 0xC, 0x64, 0x38, 0x00, 0x07 }, /* 500 700 2.9 */ 582 { 0x6, 0x7F, 0x32, 0x00, 0x0D }, /* 500 900 5.1 */ 583 { 0xC, 0x61, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 584 { 0x6, 0x7F, 0x38, 0x00, 0x07 }, /* 600 900 3.5 */ 585 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 586 }; 587 588 static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr[] = { 589 /* NT mV Trans mV db */ 590 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 591 { 0x8, 0x7F, 0x38, 0x00, 0x07 }, /* 200 250 1.9 */ 592 { 0x1, 0x7F, 0x33, 0x00, 0x0C }, /* 200 300 3.5 */ 593 { 0xA, 0x35, 0x36, 0x00, 0x09 }, /* 200 350 4.9 */ 594 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 595 { 0x1, 0x7F, 0x38, 0x00, 0x07 }, /* 250 300 1.6 */ 596 { 0xA, 0x35, 0x35, 0x00, 0x0A }, /* 250 350 2.9 */ 597 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 598 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 599 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 600 }; 601 602 static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr2[] = { 603 /* NT mV Trans mV db */ 604 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 200 0.0 */ 605 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 200 250 1.9 */ 606 { 0x1, 0x7F, 0x3D, 0x00, 0x02 }, /* 200 300 3.5 */ 607 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 200 350 4.9 */ 608 { 0x8, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 250 0.0 */ 609 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 250 300 1.6 */ 610 { 0xA, 0x35, 0x3A, 0x00, 0x05 }, /* 250 350 2.9 */ 611 { 0x1, 0x7F, 0x3F, 0x00, 0x00 }, /* 300 300 0.0 */ 612 { 0xA, 0x35, 0x38, 0x00, 0x07 }, /* 300 350 1.3 */ 613 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 614 }; 615 616 static const struct cnl_ddi_buf_trans dg1_combo_phy_ddi_translations_dp_rbr_hbr[] = { 617 /* NT mV Trans mV db */ 618 { 0xA, 0x32, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 619 { 0xA, 0x48, 0x35, 0x00, 0x0A }, /* 350 500 3.1 */ 620 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 621 { 0x6, 0x7F, 0x2C, 0x00, 0x13 }, /* 350 900 8.2 */ 622 { 0xA, 0x43, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 623 { 0xC, 0x60, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 624 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 625 { 0xC, 0x60, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 626 { 0x6, 0x7F, 0x37, 0x00, 0x08 }, /* 600 900 3.5 */ 627 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 628 }; 629 630 static const struct cnl_ddi_buf_trans dg1_combo_phy_ddi_translations_dp_hbr2_hbr3[] = { 631 /* NT mV Trans mV db */ 632 { 0xA, 0x32, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 633 { 0xA, 0x48, 0x35, 0x00, 0x0A }, /* 350 500 3.1 */ 634 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 635 { 0x6, 0x7F, 0x2C, 0x00, 0x13 }, /* 350 900 8.2 */ 636 { 0xA, 0x43, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 637 { 0xC, 0x60, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 638 { 0x6, 0x7F, 0x30, 0x00, 0x0F }, /* 500 900 5.1 */ 639 { 0xC, 0x58, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 640 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 641 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 642 }; 643 644 struct icl_mg_phy_ddi_buf_trans { 645 u32 cri_txdeemph_override_11_6; 646 u32 cri_txdeemph_override_5_0; 647 u32 cri_txdeemph_override_17_12; 648 }; 649 650 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_rbr_hbr[] = { 651 /* Voltage swing pre-emphasis */ 652 { 0x18, 0x00, 0x00 }, /* 0 0 */ 653 { 0x1D, 0x00, 0x05 }, /* 0 1 */ 654 { 0x24, 0x00, 0x0C }, /* 0 2 */ 655 { 0x2B, 0x00, 0x14 }, /* 0 3 */ 656 { 0x21, 0x00, 0x00 }, /* 1 0 */ 657 { 0x2B, 0x00, 0x08 }, /* 1 1 */ 658 { 0x30, 0x00, 0x0F }, /* 1 2 */ 659 { 0x31, 0x00, 0x03 }, /* 2 0 */ 660 { 0x34, 0x00, 0x0B }, /* 2 1 */ 661 { 0x3F, 0x00, 0x00 }, /* 3 0 */ 662 }; 663 664 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hbr2_hbr3[] = { 665 /* Voltage swing pre-emphasis */ 666 { 0x18, 0x00, 0x00 }, /* 0 0 */ 667 { 0x1D, 0x00, 0x05 }, /* 0 1 */ 668 { 0x24, 0x00, 0x0C }, /* 0 2 */ 669 { 0x2B, 0x00, 0x14 }, /* 0 3 */ 670 { 0x26, 0x00, 0x00 }, /* 1 0 */ 671 { 0x2C, 0x00, 0x07 }, /* 1 1 */ 672 { 0x33, 0x00, 0x0C }, /* 1 2 */ 673 { 0x2E, 0x00, 0x00 }, /* 2 0 */ 674 { 0x36, 0x00, 0x09 }, /* 2 1 */ 675 { 0x3F, 0x00, 0x00 }, /* 3 0 */ 676 }; 677 678 static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hdmi[] = { 679 /* HDMI Preset VS Pre-emph */ 680 { 0x1A, 0x0, 0x0 }, /* 1 400mV 0dB */ 681 { 0x20, 0x0, 0x0 }, /* 2 500mV 0dB */ 682 { 0x29, 0x0, 0x0 }, /* 3 650mV 0dB */ 683 { 0x32, 0x0, 0x0 }, /* 4 800mV 0dB */ 684 { 0x3F, 0x0, 0x0 }, /* 5 1000mV 0dB */ 685 { 0x3A, 0x0, 0x5 }, /* 6 Full -1.5 dB */ 686 { 0x39, 0x0, 0x6 }, /* 7 Full -1.8 dB */ 687 { 0x38, 0x0, 0x7 }, /* 8 Full -2 dB */ 688 { 0x37, 0x0, 0x8 }, /* 9 Full -2.5 dB */ 689 { 0x36, 0x0, 0x9 }, /* 10 Full -3 dB */ 690 }; 691 692 struct tgl_dkl_phy_ddi_buf_trans { 693 u32 dkl_vswing_control; 694 u32 dkl_preshoot_control; 695 u32 dkl_de_emphasis_control; 696 }; 697 698 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans[] = { 699 /* VS pre-emp Non-trans mV Pre-emph dB */ 700 { 0x7, 0x0, 0x00 }, /* 0 0 400mV 0 dB */ 701 { 0x5, 0x0, 0x05 }, /* 0 1 400mV 3.5 dB */ 702 { 0x2, 0x0, 0x0B }, /* 0 2 400mV 6 dB */ 703 { 0x0, 0x0, 0x18 }, /* 0 3 400mV 9.5 dB */ 704 { 0x5, 0x0, 0x00 }, /* 1 0 600mV 0 dB */ 705 { 0x2, 0x0, 0x08 }, /* 1 1 600mV 3.5 dB */ 706 { 0x0, 0x0, 0x14 }, /* 1 2 600mV 6 dB */ 707 { 0x2, 0x0, 0x00 }, /* 2 0 800mV 0 dB */ 708 { 0x0, 0x0, 0x0B }, /* 2 1 800mV 3.5 dB */ 709 { 0x0, 0x0, 0x00 }, /* 3 0 1200mV 0 dB HDMI default */ 710 }; 711 712 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans_hbr2[] = { 713 /* VS pre-emp Non-trans mV Pre-emph dB */ 714 { 0x7, 0x0, 0x00 }, /* 0 0 400mV 0 dB */ 715 { 0x5, 0x0, 0x05 }, /* 0 1 400mV 3.5 dB */ 716 { 0x2, 0x0, 0x0B }, /* 0 2 400mV 6 dB */ 717 { 0x0, 0x0, 0x19 }, /* 0 3 400mV 9.5 dB */ 718 { 0x5, 0x0, 0x00 }, /* 1 0 600mV 0 dB */ 719 { 0x2, 0x0, 0x08 }, /* 1 1 600mV 3.5 dB */ 720 { 0x0, 0x0, 0x14 }, /* 1 2 600mV 6 dB */ 721 { 0x2, 0x0, 0x00 }, /* 2 0 800mV 0 dB */ 722 { 0x0, 0x0, 0x0B }, /* 2 1 800mV 3.5 dB */ 723 { 0x0, 0x0, 0x00 }, /* 3 0 1200mV 0 dB HDMI default */ 724 }; 725 726 static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_hdmi_ddi_trans[] = { 727 /* HDMI Preset VS Pre-emph */ 728 { 0x7, 0x0, 0x0 }, /* 1 400mV 0dB */ 729 { 0x6, 0x0, 0x0 }, /* 2 500mV 0dB */ 730 { 0x4, 0x0, 0x0 }, /* 3 650mV 0dB */ 731 { 0x2, 0x0, 0x0 }, /* 4 800mV 0dB */ 732 { 0x0, 0x0, 0x0 }, /* 5 1000mV 0dB */ 733 { 0x0, 0x0, 0x5 }, /* 6 Full -1.5 dB */ 734 { 0x0, 0x0, 0x6 }, /* 7 Full -1.8 dB */ 735 { 0x0, 0x0, 0x7 }, /* 8 Full -2 dB */ 736 { 0x0, 0x0, 0x8 }, /* 9 Full -2.5 dB */ 737 { 0x0, 0x0, 0xA }, /* 10 Full -3 dB */ 738 }; 739 740 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr[] = { 741 /* NT mV Trans mV db */ 742 { 0xA, 0x32, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 743 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 744 { 0xC, 0x71, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 745 { 0x6, 0x7D, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 746 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 747 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 748 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 749 { 0xC, 0x6C, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 750 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 751 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 752 }; 753 754 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr2[] = { 755 /* NT mV Trans mV db */ 756 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 757 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 758 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 759 { 0x6, 0x7F, 0x2B, 0x00, 0x14 }, /* 350 900 8.2 */ 760 { 0xA, 0x47, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 761 { 0xC, 0x63, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 762 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 763 { 0xC, 0x61, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 764 { 0x6, 0x7B, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 765 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 766 }; 767 768 static const struct cnl_ddi_buf_trans tgl_uy_combo_phy_ddi_translations_dp_hbr2[] = { 769 /* NT mV Trans mV db */ 770 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 771 { 0xA, 0x4F, 0x36, 0x00, 0x09 }, /* 350 500 3.1 */ 772 { 0xC, 0x60, 0x32, 0x00, 0x0D }, /* 350 700 6.0 */ 773 { 0xC, 0x7F, 0x2D, 0x00, 0x12 }, /* 350 900 8.2 */ 774 { 0xC, 0x47, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 775 { 0xC, 0x6F, 0x36, 0x00, 0x09 }, /* 500 700 2.9 */ 776 { 0x6, 0x7D, 0x32, 0x00, 0x0D }, /* 500 900 5.1 */ 777 { 0x6, 0x60, 0x3C, 0x00, 0x03 }, /* 650 700 0.6 */ 778 { 0x6, 0x7F, 0x34, 0x00, 0x0B }, /* 600 900 3.5 */ 779 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 780 }; 781 782 /* 783 * Cloned the HOBL entry to comply with the voltage and pre-emphasis entries 784 * that DisplayPort specification requires 785 */ 786 static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_edp_hbr2_hobl[] = { 787 /* VS pre-emp */ 788 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 0 */ 789 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 1 */ 790 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 2 */ 791 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 0 3 */ 792 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 0 */ 793 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 1 */ 794 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 1 2 */ 795 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 2 0 */ 796 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 2 1 */ 797 }; 798 799 static const struct cnl_ddi_buf_trans rkl_combo_phy_ddi_translations_dp_hbr[] = { 800 /* NT mV Trans mV db */ 801 { 0xA, 0x2F, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 802 { 0xA, 0x4F, 0x37, 0x00, 0x08 }, /* 350 500 3.1 */ 803 { 0xC, 0x63, 0x2F, 0x00, 0x10 }, /* 350 700 6.0 */ 804 { 0x6, 0x7D, 0x2A, 0x00, 0x15 }, /* 350 900 8.2 */ 805 { 0xA, 0x4C, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 806 { 0xC, 0x73, 0x34, 0x00, 0x0B }, /* 500 700 2.9 */ 807 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 808 { 0xC, 0x6E, 0x3E, 0x00, 0x01 }, /* 650 700 0.6 */ 809 { 0x6, 0x7F, 0x35, 0x00, 0x0A }, /* 600 900 3.5 */ 810 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 811 }; 812 813 static const struct cnl_ddi_buf_trans rkl_combo_phy_ddi_translations_dp_hbr2_hbr3[] = { 814 /* NT mV Trans mV db */ 815 { 0xA, 0x35, 0x3F, 0x00, 0x00 }, /* 350 350 0.0 */ 816 { 0xA, 0x50, 0x38, 0x00, 0x07 }, /* 350 500 3.1 */ 817 { 0xC, 0x61, 0x33, 0x00, 0x0C }, /* 350 700 6.0 */ 818 { 0x6, 0x7F, 0x2E, 0x00, 0x11 }, /* 350 900 8.2 */ 819 { 0xA, 0x47, 0x3F, 0x00, 0x00 }, /* 500 500 0.0 */ 820 { 0xC, 0x5F, 0x38, 0x00, 0x07 }, /* 500 700 2.9 */ 821 { 0x6, 0x7F, 0x2F, 0x00, 0x10 }, /* 500 900 5.1 */ 822 { 0xC, 0x5F, 0x3F, 0x00, 0x00 }, /* 650 700 0.6 */ 823 { 0x6, 0x7E, 0x36, 0x00, 0x09 }, /* 600 900 3.5 */ 824 { 0x6, 0x7F, 0x3F, 0x00, 0x00 }, /* 900 900 0.0 */ 825 }; 826 827 static bool is_hobl_buf_trans(const struct cnl_ddi_buf_trans *table) 828 { 829 return table == tgl_combo_phy_ddi_translations_edp_hbr2_hobl; 830 } 831 832 static const struct ddi_buf_trans * 833 bdw_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 834 { 835 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 836 837 if (dev_priv->vbt.edp.low_vswing) { 838 *n_entries = ARRAY_SIZE(bdw_ddi_translations_edp); 839 return bdw_ddi_translations_edp; 840 } else { 841 *n_entries = ARRAY_SIZE(bdw_ddi_translations_dp); 842 return bdw_ddi_translations_dp; 843 } 844 } 845 846 static const struct ddi_buf_trans * 847 skl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 848 { 849 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 850 851 if (IS_SKL_ULX(dev_priv)) { 852 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_dp); 853 return skl_y_ddi_translations_dp; 854 } else if (IS_SKL_ULT(dev_priv)) { 855 *n_entries = ARRAY_SIZE(skl_u_ddi_translations_dp); 856 return skl_u_ddi_translations_dp; 857 } else { 858 *n_entries = ARRAY_SIZE(skl_ddi_translations_dp); 859 return skl_ddi_translations_dp; 860 } 861 } 862 863 static const struct ddi_buf_trans * 864 kbl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 865 { 866 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 867 868 if (IS_KBL_ULX(dev_priv) || 869 IS_CFL_ULX(dev_priv) || 870 IS_CML_ULX(dev_priv)) { 871 *n_entries = ARRAY_SIZE(kbl_y_ddi_translations_dp); 872 return kbl_y_ddi_translations_dp; 873 } else if (IS_KBL_ULT(dev_priv) || 874 IS_CFL_ULT(dev_priv) || 875 IS_CML_ULT(dev_priv)) { 876 *n_entries = ARRAY_SIZE(kbl_u_ddi_translations_dp); 877 return kbl_u_ddi_translations_dp; 878 } else { 879 *n_entries = ARRAY_SIZE(kbl_ddi_translations_dp); 880 return kbl_ddi_translations_dp; 881 } 882 } 883 884 static const struct ddi_buf_trans * 885 skl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 886 { 887 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 888 889 if (dev_priv->vbt.edp.low_vswing) { 890 if (IS_SKL_ULX(dev_priv) || 891 IS_KBL_ULX(dev_priv) || 892 IS_CFL_ULX(dev_priv) || 893 IS_CML_ULX(dev_priv)) { 894 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_edp); 895 return skl_y_ddi_translations_edp; 896 } else if (IS_SKL_ULT(dev_priv) || 897 IS_KBL_ULT(dev_priv) || 898 IS_CFL_ULT(dev_priv) || 899 IS_CML_ULT(dev_priv)) { 900 *n_entries = ARRAY_SIZE(skl_u_ddi_translations_edp); 901 return skl_u_ddi_translations_edp; 902 } else { 903 *n_entries = ARRAY_SIZE(skl_ddi_translations_edp); 904 return skl_ddi_translations_edp; 905 } 906 } 907 908 if (IS_KABYLAKE(dev_priv) || 909 IS_COFFEELAKE(dev_priv) || 910 IS_COMETLAKE(dev_priv)) 911 return kbl_get_buf_trans_dp(encoder, n_entries); 912 else 913 return skl_get_buf_trans_dp(encoder, n_entries); 914 } 915 916 static const struct ddi_buf_trans * 917 skl_get_buf_trans_hdmi(struct drm_i915_private *dev_priv, int *n_entries) 918 { 919 if (IS_SKL_ULX(dev_priv) || 920 IS_KBL_ULX(dev_priv) || 921 IS_CFL_ULX(dev_priv) || 922 IS_CML_ULX(dev_priv)) { 923 *n_entries = ARRAY_SIZE(skl_y_ddi_translations_hdmi); 924 return skl_y_ddi_translations_hdmi; 925 } else { 926 *n_entries = ARRAY_SIZE(skl_ddi_translations_hdmi); 927 return skl_ddi_translations_hdmi; 928 } 929 } 930 931 static int skl_buf_trans_num_entries(enum port port, int n_entries) 932 { 933 /* Only DDIA and DDIE can select the 10th register with DP */ 934 if (port == PORT_A || port == PORT_E) 935 return min(n_entries, 10); 936 else 937 return min(n_entries, 9); 938 } 939 940 static const struct ddi_buf_trans * 941 intel_ddi_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 942 { 943 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 944 945 if (IS_KABYLAKE(dev_priv) || 946 IS_COFFEELAKE(dev_priv) || 947 IS_COMETLAKE(dev_priv)) { 948 const struct ddi_buf_trans *ddi_translations = 949 kbl_get_buf_trans_dp(encoder, n_entries); 950 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 951 return ddi_translations; 952 } else if (IS_SKYLAKE(dev_priv)) { 953 const struct ddi_buf_trans *ddi_translations = 954 skl_get_buf_trans_dp(encoder, n_entries); 955 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 956 return ddi_translations; 957 } else if (IS_BROADWELL(dev_priv)) { 958 *n_entries = ARRAY_SIZE(bdw_ddi_translations_dp); 959 return bdw_ddi_translations_dp; 960 } else if (IS_HASWELL(dev_priv)) { 961 *n_entries = ARRAY_SIZE(hsw_ddi_translations_dp); 962 return hsw_ddi_translations_dp; 963 } 964 965 *n_entries = 0; 966 return NULL; 967 } 968 969 static const struct ddi_buf_trans * 970 intel_ddi_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 971 { 972 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 973 974 if (IS_GEN9_BC(dev_priv)) { 975 const struct ddi_buf_trans *ddi_translations = 976 skl_get_buf_trans_edp(encoder, n_entries); 977 *n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries); 978 return ddi_translations; 979 } else if (IS_BROADWELL(dev_priv)) { 980 return bdw_get_buf_trans_edp(encoder, n_entries); 981 } else if (IS_HASWELL(dev_priv)) { 982 *n_entries = ARRAY_SIZE(hsw_ddi_translations_dp); 983 return hsw_ddi_translations_dp; 984 } 985 986 *n_entries = 0; 987 return NULL; 988 } 989 990 static const struct ddi_buf_trans * 991 intel_ddi_get_buf_trans_fdi(struct drm_i915_private *dev_priv, 992 int *n_entries) 993 { 994 if (IS_BROADWELL(dev_priv)) { 995 *n_entries = ARRAY_SIZE(bdw_ddi_translations_fdi); 996 return bdw_ddi_translations_fdi; 997 } else if (IS_HASWELL(dev_priv)) { 998 *n_entries = ARRAY_SIZE(hsw_ddi_translations_fdi); 999 return hsw_ddi_translations_fdi; 1000 } 1001 1002 *n_entries = 0; 1003 return NULL; 1004 } 1005 1006 static const struct ddi_buf_trans * 1007 intel_ddi_get_buf_trans_hdmi(struct intel_encoder *encoder, 1008 int *n_entries) 1009 { 1010 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1011 1012 if (IS_GEN9_BC(dev_priv)) { 1013 return skl_get_buf_trans_hdmi(dev_priv, n_entries); 1014 } else if (IS_BROADWELL(dev_priv)) { 1015 *n_entries = ARRAY_SIZE(bdw_ddi_translations_hdmi); 1016 return bdw_ddi_translations_hdmi; 1017 } else if (IS_HASWELL(dev_priv)) { 1018 *n_entries = ARRAY_SIZE(hsw_ddi_translations_hdmi); 1019 return hsw_ddi_translations_hdmi; 1020 } 1021 1022 *n_entries = 0; 1023 return NULL; 1024 } 1025 1026 static const struct bxt_ddi_buf_trans * 1027 bxt_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 1028 { 1029 *n_entries = ARRAY_SIZE(bxt_ddi_translations_dp); 1030 return bxt_ddi_translations_dp; 1031 } 1032 1033 static const struct bxt_ddi_buf_trans * 1034 bxt_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 1035 { 1036 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1037 1038 if (dev_priv->vbt.edp.low_vswing) { 1039 *n_entries = ARRAY_SIZE(bxt_ddi_translations_edp); 1040 return bxt_ddi_translations_edp; 1041 } 1042 1043 return bxt_get_buf_trans_dp(encoder, n_entries); 1044 } 1045 1046 static const struct bxt_ddi_buf_trans * 1047 bxt_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries) 1048 { 1049 *n_entries = ARRAY_SIZE(bxt_ddi_translations_hdmi); 1050 return bxt_ddi_translations_hdmi; 1051 } 1052 1053 static const struct cnl_ddi_buf_trans * 1054 cnl_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries) 1055 { 1056 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1057 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1058 1059 if (voltage == VOLTAGE_INFO_0_85V) { 1060 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_85V); 1061 return cnl_ddi_translations_hdmi_0_85V; 1062 } else if (voltage == VOLTAGE_INFO_0_95V) { 1063 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_95V); 1064 return cnl_ddi_translations_hdmi_0_95V; 1065 } else if (voltage == VOLTAGE_INFO_1_05V) { 1066 *n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_1_05V); 1067 return cnl_ddi_translations_hdmi_1_05V; 1068 } else { 1069 *n_entries = 1; /* shut up gcc */ 1070 MISSING_CASE(voltage); 1071 } 1072 return NULL; 1073 } 1074 1075 static const struct cnl_ddi_buf_trans * 1076 cnl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries) 1077 { 1078 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1079 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1080 1081 if (voltage == VOLTAGE_INFO_0_85V) { 1082 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_85V); 1083 return cnl_ddi_translations_dp_0_85V; 1084 } else if (voltage == VOLTAGE_INFO_0_95V) { 1085 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_95V); 1086 return cnl_ddi_translations_dp_0_95V; 1087 } else if (voltage == VOLTAGE_INFO_1_05V) { 1088 *n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_1_05V); 1089 return cnl_ddi_translations_dp_1_05V; 1090 } else { 1091 *n_entries = 1; /* shut up gcc */ 1092 MISSING_CASE(voltage); 1093 } 1094 return NULL; 1095 } 1096 1097 static const struct cnl_ddi_buf_trans * 1098 cnl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries) 1099 { 1100 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1101 u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK; 1102 1103 if (dev_priv->vbt.edp.low_vswing) { 1104 if (voltage == VOLTAGE_INFO_0_85V) { 1105 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_85V); 1106 return cnl_ddi_translations_edp_0_85V; 1107 } else if (voltage == VOLTAGE_INFO_0_95V) { 1108 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_95V); 1109 return cnl_ddi_translations_edp_0_95V; 1110 } else if (voltage == VOLTAGE_INFO_1_05V) { 1111 *n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_1_05V); 1112 return cnl_ddi_translations_edp_1_05V; 1113 } else { 1114 *n_entries = 1; /* shut up gcc */ 1115 MISSING_CASE(voltage); 1116 } 1117 return NULL; 1118 } else { 1119 return cnl_get_buf_trans_dp(encoder, n_entries); 1120 } 1121 } 1122 1123 static const struct cnl_ddi_buf_trans * 1124 icl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1125 const struct intel_crtc_state *crtc_state, 1126 int *n_entries) 1127 { 1128 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1129 return icl_combo_phy_ddi_translations_hdmi; 1130 } 1131 1132 static const struct cnl_ddi_buf_trans * 1133 icl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1134 const struct intel_crtc_state *crtc_state, 1135 int *n_entries) 1136 { 1137 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2); 1138 return icl_combo_phy_ddi_translations_dp_hbr2; 1139 } 1140 1141 static const struct cnl_ddi_buf_trans * 1142 icl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1143 const struct intel_crtc_state *crtc_state, 1144 int *n_entries) 1145 { 1146 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1147 1148 if (crtc_state->port_clock > 540000) { 1149 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3); 1150 return icl_combo_phy_ddi_translations_edp_hbr3; 1151 } else if (dev_priv->vbt.edp.low_vswing) { 1152 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1153 return icl_combo_phy_ddi_translations_edp_hbr2; 1154 } else if (IS_DG1(dev_priv) && crtc_state->port_clock > 270000) { 1155 *n_entries = ARRAY_SIZE(dg1_combo_phy_ddi_translations_dp_hbr2_hbr3); 1156 return dg1_combo_phy_ddi_translations_dp_hbr2_hbr3; 1157 } else if (IS_DG1(dev_priv)) { 1158 *n_entries = ARRAY_SIZE(dg1_combo_phy_ddi_translations_dp_rbr_hbr); 1159 return dg1_combo_phy_ddi_translations_dp_rbr_hbr; 1160 } 1161 1162 return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1163 } 1164 1165 static const struct cnl_ddi_buf_trans * 1166 icl_get_combo_buf_trans(struct intel_encoder *encoder, 1167 const struct intel_crtc_state *crtc_state, 1168 int *n_entries) 1169 { 1170 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1171 return icl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1172 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1173 return icl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1174 else 1175 return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1176 } 1177 1178 static const struct icl_mg_phy_ddi_buf_trans * 1179 icl_get_mg_buf_trans_hdmi(struct intel_encoder *encoder, 1180 const struct intel_crtc_state *crtc_state, 1181 int *n_entries) 1182 { 1183 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hdmi); 1184 return icl_mg_phy_ddi_translations_hdmi; 1185 } 1186 1187 static const struct icl_mg_phy_ddi_buf_trans * 1188 icl_get_mg_buf_trans_dp(struct intel_encoder *encoder, 1189 const struct intel_crtc_state *crtc_state, 1190 int *n_entries) 1191 { 1192 if (crtc_state->port_clock > 270000) { 1193 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hbr2_hbr3); 1194 return icl_mg_phy_ddi_translations_hbr2_hbr3; 1195 } else { 1196 *n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_rbr_hbr); 1197 return icl_mg_phy_ddi_translations_rbr_hbr; 1198 } 1199 } 1200 1201 static const struct icl_mg_phy_ddi_buf_trans * 1202 icl_get_mg_buf_trans(struct intel_encoder *encoder, 1203 const struct intel_crtc_state *crtc_state, 1204 int *n_entries) 1205 { 1206 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1207 return icl_get_mg_buf_trans_hdmi(encoder, crtc_state, n_entries); 1208 else 1209 return icl_get_mg_buf_trans_dp(encoder, crtc_state, n_entries); 1210 } 1211 1212 static const struct cnl_ddi_buf_trans * 1213 ehl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1214 const struct intel_crtc_state *crtc_state, 1215 int *n_entries) 1216 { 1217 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1218 return icl_combo_phy_ddi_translations_hdmi; 1219 } 1220 1221 static const struct cnl_ddi_buf_trans * 1222 ehl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1223 const struct intel_crtc_state *crtc_state, 1224 int *n_entries) 1225 { 1226 *n_entries = ARRAY_SIZE(ehl_combo_phy_ddi_translations_dp); 1227 return ehl_combo_phy_ddi_translations_dp; 1228 } 1229 1230 static const struct cnl_ddi_buf_trans * 1231 ehl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1232 const struct intel_crtc_state *crtc_state, 1233 int *n_entries) 1234 { 1235 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1236 1237 if (dev_priv->vbt.edp.low_vswing) { 1238 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1239 return icl_combo_phy_ddi_translations_edp_hbr2; 1240 } 1241 1242 return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1243 } 1244 1245 static const struct cnl_ddi_buf_trans * 1246 ehl_get_combo_buf_trans(struct intel_encoder *encoder, 1247 const struct intel_crtc_state *crtc_state, 1248 int *n_entries) 1249 { 1250 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1251 return ehl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1252 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1253 return ehl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1254 else 1255 return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1256 } 1257 1258 static const struct cnl_ddi_buf_trans * 1259 jsl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1260 const struct intel_crtc_state *crtc_state, 1261 int *n_entries) 1262 { 1263 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1264 return icl_combo_phy_ddi_translations_hdmi; 1265 } 1266 1267 static const struct cnl_ddi_buf_trans * 1268 jsl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1269 const struct intel_crtc_state *crtc_state, 1270 int *n_entries) 1271 { 1272 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2); 1273 return icl_combo_phy_ddi_translations_dp_hbr2; 1274 } 1275 1276 static const struct cnl_ddi_buf_trans * 1277 jsl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1278 const struct intel_crtc_state *crtc_state, 1279 int *n_entries) 1280 { 1281 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1282 1283 if (dev_priv->vbt.edp.low_vswing) { 1284 if (crtc_state->port_clock > 270000) { 1285 *n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr2); 1286 return jsl_combo_phy_ddi_translations_edp_hbr2; 1287 } else { 1288 *n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr); 1289 return jsl_combo_phy_ddi_translations_edp_hbr; 1290 } 1291 } 1292 1293 return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1294 } 1295 1296 static const struct cnl_ddi_buf_trans * 1297 jsl_get_combo_buf_trans(struct intel_encoder *encoder, 1298 const struct intel_crtc_state *crtc_state, 1299 int *n_entries) 1300 { 1301 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1302 return jsl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1303 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1304 return jsl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1305 else 1306 return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1307 } 1308 1309 static const struct cnl_ddi_buf_trans * 1310 tgl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder, 1311 const struct intel_crtc_state *crtc_state, 1312 int *n_entries) 1313 { 1314 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi); 1315 return icl_combo_phy_ddi_translations_hdmi; 1316 } 1317 1318 static const struct cnl_ddi_buf_trans * 1319 tgl_get_combo_buf_trans_dp(struct intel_encoder *encoder, 1320 const struct intel_crtc_state *crtc_state, 1321 int *n_entries) 1322 { 1323 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1324 1325 if (crtc_state->port_clock > 270000) { 1326 if (IS_ROCKETLAKE(dev_priv)) { 1327 *n_entries = ARRAY_SIZE(rkl_combo_phy_ddi_translations_dp_hbr2_hbr3); 1328 return rkl_combo_phy_ddi_translations_dp_hbr2_hbr3; 1329 } else if (IS_TGL_U(dev_priv) || IS_TGL_Y(dev_priv)) { 1330 *n_entries = ARRAY_SIZE(tgl_uy_combo_phy_ddi_translations_dp_hbr2); 1331 return tgl_uy_combo_phy_ddi_translations_dp_hbr2; 1332 } else { 1333 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr2); 1334 return tgl_combo_phy_ddi_translations_dp_hbr2; 1335 } 1336 } else { 1337 if (IS_ROCKETLAKE(dev_priv)) { 1338 *n_entries = ARRAY_SIZE(rkl_combo_phy_ddi_translations_dp_hbr); 1339 return rkl_combo_phy_ddi_translations_dp_hbr; 1340 } else { 1341 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr); 1342 return tgl_combo_phy_ddi_translations_dp_hbr; 1343 } 1344 } 1345 } 1346 1347 static const struct cnl_ddi_buf_trans * 1348 tgl_get_combo_buf_trans_edp(struct intel_encoder *encoder, 1349 const struct intel_crtc_state *crtc_state, 1350 int *n_entries) 1351 { 1352 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1353 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1354 1355 if (crtc_state->port_clock > 540000) { 1356 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3); 1357 return icl_combo_phy_ddi_translations_edp_hbr3; 1358 } else if (dev_priv->vbt.edp.hobl && !intel_dp->hobl_failed) { 1359 *n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_edp_hbr2_hobl); 1360 return tgl_combo_phy_ddi_translations_edp_hbr2_hobl; 1361 } else if (dev_priv->vbt.edp.low_vswing) { 1362 *n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2); 1363 return icl_combo_phy_ddi_translations_edp_hbr2; 1364 } 1365 1366 return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1367 } 1368 1369 static const struct cnl_ddi_buf_trans * 1370 tgl_get_combo_buf_trans(struct intel_encoder *encoder, 1371 const struct intel_crtc_state *crtc_state, 1372 int *n_entries) 1373 { 1374 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1375 return tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries); 1376 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1377 return tgl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries); 1378 else 1379 return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries); 1380 } 1381 1382 static const struct tgl_dkl_phy_ddi_buf_trans * 1383 tgl_get_dkl_buf_trans_hdmi(struct intel_encoder *encoder, 1384 const struct intel_crtc_state *crtc_state, 1385 int *n_entries) 1386 { 1387 *n_entries = ARRAY_SIZE(tgl_dkl_phy_hdmi_ddi_trans); 1388 return tgl_dkl_phy_hdmi_ddi_trans; 1389 } 1390 1391 static const struct tgl_dkl_phy_ddi_buf_trans * 1392 tgl_get_dkl_buf_trans_dp(struct intel_encoder *encoder, 1393 const struct intel_crtc_state *crtc_state, 1394 int *n_entries) 1395 { 1396 if (crtc_state->port_clock > 270000) { 1397 *n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans_hbr2); 1398 return tgl_dkl_phy_dp_ddi_trans_hbr2; 1399 } else { 1400 *n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans); 1401 return tgl_dkl_phy_dp_ddi_trans; 1402 } 1403 } 1404 1405 static const struct tgl_dkl_phy_ddi_buf_trans * 1406 tgl_get_dkl_buf_trans(struct intel_encoder *encoder, 1407 const struct intel_crtc_state *crtc_state, 1408 int *n_entries) 1409 { 1410 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1411 return tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, n_entries); 1412 else 1413 return tgl_get_dkl_buf_trans_dp(encoder, crtc_state, n_entries); 1414 } 1415 1416 static int intel_ddi_hdmi_level(struct intel_encoder *encoder, 1417 const struct intel_crtc_state *crtc_state) 1418 { 1419 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1420 int n_entries, level, default_entry; 1421 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1422 1423 if (INTEL_GEN(dev_priv) >= 12) { 1424 if (intel_phy_is_combo(dev_priv, phy)) 1425 tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1426 else 1427 tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1428 default_entry = n_entries - 1; 1429 } else if (INTEL_GEN(dev_priv) == 11) { 1430 if (intel_phy_is_combo(dev_priv, phy)) 1431 icl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1432 else 1433 icl_get_mg_buf_trans_hdmi(encoder, crtc_state, &n_entries); 1434 default_entry = n_entries - 1; 1435 } else if (IS_CANNONLAKE(dev_priv)) { 1436 cnl_get_buf_trans_hdmi(encoder, &n_entries); 1437 default_entry = n_entries - 1; 1438 } else if (IS_GEN9_LP(dev_priv)) { 1439 bxt_get_buf_trans_hdmi(encoder, &n_entries); 1440 default_entry = n_entries - 1; 1441 } else if (IS_GEN9_BC(dev_priv)) { 1442 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1443 default_entry = 8; 1444 } else if (IS_BROADWELL(dev_priv)) { 1445 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1446 default_entry = 7; 1447 } else if (IS_HASWELL(dev_priv)) { 1448 intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1449 default_entry = 6; 1450 } else { 1451 drm_WARN(&dev_priv->drm, 1, "ddi translation table missing\n"); 1452 return 0; 1453 } 1454 1455 if (drm_WARN_ON_ONCE(&dev_priv->drm, n_entries == 0)) 1456 return 0; 1457 1458 level = intel_bios_hdmi_level_shift(encoder); 1459 if (level < 0) 1460 level = default_entry; 1461 1462 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1463 level = n_entries - 1; 1464 1465 return level; 1466 } 1467 1468 /* 1469 * Starting with Haswell, DDI port buffers must be programmed with correct 1470 * values in advance. This function programs the correct values for 1471 * DP/eDP/FDI use cases. 1472 */ 1473 static void intel_prepare_dp_ddi_buffers(struct intel_encoder *encoder, 1474 const struct intel_crtc_state *crtc_state) 1475 { 1476 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1477 u32 iboost_bit = 0; 1478 int i, n_entries; 1479 enum port port = encoder->port; 1480 const struct ddi_buf_trans *ddi_translations; 1481 1482 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) 1483 ddi_translations = intel_ddi_get_buf_trans_fdi(dev_priv, 1484 &n_entries); 1485 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 1486 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, 1487 &n_entries); 1488 else 1489 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, 1490 &n_entries); 1491 1492 /* If we're boosting the current, set bit 31 of trans1 */ 1493 if (IS_GEN9_BC(dev_priv) && intel_bios_dp_boost_level(encoder)) 1494 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 1495 1496 for (i = 0; i < n_entries; i++) { 1497 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i), 1498 ddi_translations[i].trans1 | iboost_bit); 1499 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i), 1500 ddi_translations[i].trans2); 1501 } 1502 } 1503 1504 /* 1505 * Starting with Haswell, DDI port buffers must be programmed with correct 1506 * values in advance. This function programs the correct values for 1507 * HDMI/DVI use cases. 1508 */ 1509 static void intel_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder, 1510 int level) 1511 { 1512 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1513 u32 iboost_bit = 0; 1514 int n_entries; 1515 enum port port = encoder->port; 1516 const struct ddi_buf_trans *ddi_translations; 1517 1518 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 1519 1520 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1521 return; 1522 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1523 level = n_entries - 1; 1524 1525 /* If we're boosting the current, set bit 31 of trans1 */ 1526 if (IS_GEN9_BC(dev_priv) && intel_bios_hdmi_boost_level(encoder)) 1527 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 1528 1529 /* Entry 9 is for HDMI: */ 1530 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9), 1531 ddi_translations[level].trans1 | iboost_bit); 1532 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9), 1533 ddi_translations[level].trans2); 1534 } 1535 1536 static void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv, 1537 enum port port) 1538 { 1539 if (IS_BROXTON(dev_priv)) { 1540 udelay(16); 1541 return; 1542 } 1543 1544 if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 1545 DDI_BUF_IS_IDLE), 8)) 1546 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n", 1547 port_name(port)); 1548 } 1549 1550 static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv, 1551 enum port port) 1552 { 1553 /* Wait > 518 usecs for DDI_BUF_CTL to be non idle */ 1554 if (INTEL_GEN(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) { 1555 usleep_range(518, 1000); 1556 return; 1557 } 1558 1559 if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 1560 DDI_BUF_IS_IDLE), 500)) 1561 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n", 1562 port_name(port)); 1563 } 1564 1565 static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll) 1566 { 1567 switch (pll->info->id) { 1568 case DPLL_ID_WRPLL1: 1569 return PORT_CLK_SEL_WRPLL1; 1570 case DPLL_ID_WRPLL2: 1571 return PORT_CLK_SEL_WRPLL2; 1572 case DPLL_ID_SPLL: 1573 return PORT_CLK_SEL_SPLL; 1574 case DPLL_ID_LCPLL_810: 1575 return PORT_CLK_SEL_LCPLL_810; 1576 case DPLL_ID_LCPLL_1350: 1577 return PORT_CLK_SEL_LCPLL_1350; 1578 case DPLL_ID_LCPLL_2700: 1579 return PORT_CLK_SEL_LCPLL_2700; 1580 default: 1581 MISSING_CASE(pll->info->id); 1582 return PORT_CLK_SEL_NONE; 1583 } 1584 } 1585 1586 static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder, 1587 const struct intel_crtc_state *crtc_state) 1588 { 1589 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1590 int clock = crtc_state->port_clock; 1591 const enum intel_dpll_id id = pll->info->id; 1592 1593 switch (id) { 1594 default: 1595 /* 1596 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used 1597 * here, so do warn if this get passed in 1598 */ 1599 MISSING_CASE(id); 1600 return DDI_CLK_SEL_NONE; 1601 case DPLL_ID_ICL_TBTPLL: 1602 switch (clock) { 1603 case 162000: 1604 return DDI_CLK_SEL_TBT_162; 1605 case 270000: 1606 return DDI_CLK_SEL_TBT_270; 1607 case 540000: 1608 return DDI_CLK_SEL_TBT_540; 1609 case 810000: 1610 return DDI_CLK_SEL_TBT_810; 1611 default: 1612 MISSING_CASE(clock); 1613 return DDI_CLK_SEL_NONE; 1614 } 1615 case DPLL_ID_ICL_MGPLL1: 1616 case DPLL_ID_ICL_MGPLL2: 1617 case DPLL_ID_ICL_MGPLL3: 1618 case DPLL_ID_ICL_MGPLL4: 1619 case DPLL_ID_TGL_MGPLL5: 1620 case DPLL_ID_TGL_MGPLL6: 1621 return DDI_CLK_SEL_MG; 1622 } 1623 } 1624 1625 /* Starting with Haswell, different DDI ports can work in FDI mode for 1626 * connection to the PCH-located connectors. For this, it is necessary to train 1627 * both the DDI port and PCH receiver for the desired DDI buffer settings. 1628 * 1629 * The recommended port to work in FDI mode is DDI E, which we use here. Also, 1630 * please note that when FDI mode is active on DDI E, it shares 2 lines with 1631 * DDI A (which is used for eDP) 1632 */ 1633 1634 void hsw_fdi_link_train(struct intel_encoder *encoder, 1635 const struct intel_crtc_state *crtc_state) 1636 { 1637 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1638 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1639 u32 temp, i, rx_ctl_val, ddi_pll_sel; 1640 1641 intel_prepare_dp_ddi_buffers(encoder, crtc_state); 1642 1643 /* Set the FDI_RX_MISC pwrdn lanes and the 2 workarounds listed at the 1644 * mode set "sequence for CRT port" document: 1645 * - TP1 to TP2 time with the default value 1646 * - FDI delay to 90h 1647 * 1648 * WaFDIAutoLinkSetTimingOverrride:hsw 1649 */ 1650 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), 1651 FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2) | FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90); 1652 1653 /* Enable the PCH Receiver FDI PLL */ 1654 rx_ctl_val = dev_priv->fdi_rx_config | FDI_RX_ENHANCE_FRAME_ENABLE | 1655 FDI_RX_PLL_ENABLE | 1656 FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes); 1657 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1658 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1659 udelay(220); 1660 1661 /* Switch from Rawclk to PCDclk */ 1662 rx_ctl_val |= FDI_PCDCLK; 1663 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1664 1665 /* Configure Port Clock Select */ 1666 ddi_pll_sel = hsw_pll_to_ddi_pll_sel(crtc_state->shared_dpll); 1667 intel_de_write(dev_priv, PORT_CLK_SEL(PORT_E), ddi_pll_sel); 1668 drm_WARN_ON(&dev_priv->drm, ddi_pll_sel != PORT_CLK_SEL_SPLL); 1669 1670 /* Start the training iterating through available voltages and emphasis, 1671 * testing each value twice. */ 1672 for (i = 0; i < ARRAY_SIZE(hsw_ddi_translations_fdi) * 2; i++) { 1673 /* Configure DP_TP_CTL with auto-training */ 1674 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), 1675 DP_TP_CTL_FDI_AUTOTRAIN | 1676 DP_TP_CTL_ENHANCED_FRAME_ENABLE | 1677 DP_TP_CTL_LINK_TRAIN_PAT1 | 1678 DP_TP_CTL_ENABLE); 1679 1680 /* Configure and enable DDI_BUF_CTL for DDI E with next voltage. 1681 * DDI E does not support port reversal, the functionality is 1682 * achieved on the PCH side in FDI_RX_CTL, so no need to set the 1683 * port reversal bit */ 1684 intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), 1685 DDI_BUF_CTL_ENABLE | ((crtc_state->fdi_lanes - 1) << 1) | DDI_BUF_TRANS_SELECT(i / 2)); 1686 intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1687 1688 udelay(600); 1689 1690 /* Program PCH FDI Receiver TU */ 1691 intel_de_write(dev_priv, FDI_RX_TUSIZE1(PIPE_A), TU_SIZE(64)); 1692 1693 /* Enable PCH FDI Receiver with auto-training */ 1694 rx_ctl_val |= FDI_RX_ENABLE | FDI_LINK_TRAIN_AUTO; 1695 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1696 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1697 1698 /* Wait for FDI receiver lane calibration */ 1699 udelay(30); 1700 1701 /* Unset FDI_RX_MISC pwrdn lanes */ 1702 temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1703 temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 1704 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp); 1705 intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1706 1707 /* Wait for FDI auto training time */ 1708 udelay(5); 1709 1710 temp = intel_de_read(dev_priv, DP_TP_STATUS(PORT_E)); 1711 if (temp & DP_TP_STATUS_AUTOTRAIN_DONE) { 1712 drm_dbg_kms(&dev_priv->drm, 1713 "FDI link training done on step %d\n", i); 1714 break; 1715 } 1716 1717 /* 1718 * Leave things enabled even if we failed to train FDI. 1719 * Results in less fireworks from the state checker. 1720 */ 1721 if (i == ARRAY_SIZE(hsw_ddi_translations_fdi) * 2 - 1) { 1722 drm_err(&dev_priv->drm, "FDI link training failed!\n"); 1723 break; 1724 } 1725 1726 rx_ctl_val &= ~FDI_RX_ENABLE; 1727 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val); 1728 intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A)); 1729 1730 temp = intel_de_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1731 temp &= ~DDI_BUF_CTL_ENABLE; 1732 intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), temp); 1733 intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E)); 1734 1735 /* Disable DP_TP_CTL and FDI_RX_CTL and retry */ 1736 temp = intel_de_read(dev_priv, DP_TP_CTL(PORT_E)); 1737 temp &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 1738 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 1739 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), temp); 1740 intel_de_posting_read(dev_priv, DP_TP_CTL(PORT_E)); 1741 1742 intel_wait_ddi_buf_idle(dev_priv, PORT_E); 1743 1744 /* Reset FDI_RX_MISC pwrdn lanes */ 1745 temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1746 temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 1747 temp |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2); 1748 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp); 1749 intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A)); 1750 } 1751 1752 /* Enable normal pixel sending for FDI */ 1753 intel_de_write(dev_priv, DP_TP_CTL(PORT_E), 1754 DP_TP_CTL_FDI_AUTOTRAIN | 1755 DP_TP_CTL_LINK_TRAIN_NORMAL | 1756 DP_TP_CTL_ENHANCED_FRAME_ENABLE | 1757 DP_TP_CTL_ENABLE); 1758 } 1759 1760 static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder, 1761 const struct intel_crtc_state *crtc_state) 1762 { 1763 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1764 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 1765 1766 intel_dp->DP = dig_port->saved_port_bits | 1767 DDI_BUF_CTL_ENABLE | DDI_BUF_TRANS_SELECT(0); 1768 intel_dp->DP |= DDI_PORT_WIDTH(crtc_state->lane_count); 1769 } 1770 1771 static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv, 1772 enum port port) 1773 { 1774 u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK; 1775 1776 switch (val) { 1777 case DDI_CLK_SEL_NONE: 1778 return 0; 1779 case DDI_CLK_SEL_TBT_162: 1780 return 162000; 1781 case DDI_CLK_SEL_TBT_270: 1782 return 270000; 1783 case DDI_CLK_SEL_TBT_540: 1784 return 540000; 1785 case DDI_CLK_SEL_TBT_810: 1786 return 810000; 1787 default: 1788 MISSING_CASE(val); 1789 return 0; 1790 } 1791 } 1792 1793 static void ddi_dotclock_get(struct intel_crtc_state *pipe_config) 1794 { 1795 int dotclock; 1796 1797 if (pipe_config->has_pch_encoder) 1798 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 1799 &pipe_config->fdi_m_n); 1800 else if (intel_crtc_has_dp_encoder(pipe_config)) 1801 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 1802 &pipe_config->dp_m_n); 1803 else if (pipe_config->has_hdmi_sink && pipe_config->pipe_bpp > 24) 1804 dotclock = pipe_config->port_clock * 24 / pipe_config->pipe_bpp; 1805 else 1806 dotclock = pipe_config->port_clock; 1807 1808 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 && 1809 !intel_crtc_has_dp_encoder(pipe_config)) 1810 dotclock *= 2; 1811 1812 if (pipe_config->pixel_multiplier) 1813 dotclock /= pipe_config->pixel_multiplier; 1814 1815 pipe_config->hw.adjusted_mode.crtc_clock = dotclock; 1816 } 1817 1818 static void intel_ddi_clock_get(struct intel_encoder *encoder, 1819 struct intel_crtc_state *pipe_config) 1820 { 1821 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1822 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1823 1824 if (intel_phy_is_tc(dev_priv, phy) && 1825 intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll) == 1826 DPLL_ID_ICL_TBTPLL) 1827 pipe_config->port_clock = icl_calc_tbt_pll_link(dev_priv, 1828 encoder->port); 1829 else 1830 pipe_config->port_clock = 1831 intel_dpll_get_freq(dev_priv, pipe_config->shared_dpll, 1832 &pipe_config->dpll_hw_state); 1833 1834 ddi_dotclock_get(pipe_config); 1835 } 1836 1837 void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state, 1838 const struct drm_connector_state *conn_state) 1839 { 1840 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1841 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1842 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1843 u32 temp; 1844 1845 if (!intel_crtc_has_dp_encoder(crtc_state)) 1846 return; 1847 1848 drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)); 1849 1850 temp = DP_MSA_MISC_SYNC_CLOCK; 1851 1852 switch (crtc_state->pipe_bpp) { 1853 case 18: 1854 temp |= DP_MSA_MISC_6_BPC; 1855 break; 1856 case 24: 1857 temp |= DP_MSA_MISC_8_BPC; 1858 break; 1859 case 30: 1860 temp |= DP_MSA_MISC_10_BPC; 1861 break; 1862 case 36: 1863 temp |= DP_MSA_MISC_12_BPC; 1864 break; 1865 default: 1866 MISSING_CASE(crtc_state->pipe_bpp); 1867 break; 1868 } 1869 1870 /* nonsense combination */ 1871 drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range && 1872 crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB); 1873 1874 if (crtc_state->limited_color_range) 1875 temp |= DP_MSA_MISC_COLOR_CEA_RGB; 1876 1877 /* 1878 * As per DP 1.2 spec section 2.3.4.3 while sending 1879 * YCBCR 444 signals we should program MSA MISC1/0 fields with 1880 * colorspace information. 1881 */ 1882 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) 1883 temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709; 1884 1885 /* 1886 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication 1887 * of Color Encoding Format and Content Color Gamut] while sending 1888 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields 1889 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format. 1890 */ 1891 if (intel_dp_needs_vsc_sdp(crtc_state, conn_state)) 1892 temp |= DP_MSA_MISC_COLOR_VSC_SDP; 1893 1894 intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp); 1895 } 1896 1897 static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder) 1898 { 1899 if (master_transcoder == TRANSCODER_EDP) 1900 return 0; 1901 else 1902 return master_transcoder + 1; 1903 } 1904 1905 /* 1906 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state. 1907 * 1908 * Only intended to be used by intel_ddi_enable_transcoder_func() and 1909 * intel_ddi_config_transcoder_func(). 1910 */ 1911 static u32 1912 intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder, 1913 const struct intel_crtc_state *crtc_state) 1914 { 1915 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 1916 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 1917 enum pipe pipe = crtc->pipe; 1918 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 1919 enum port port = encoder->port; 1920 u32 temp; 1921 1922 /* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */ 1923 temp = TRANS_DDI_FUNC_ENABLE; 1924 if (INTEL_GEN(dev_priv) >= 12) 1925 temp |= TGL_TRANS_DDI_SELECT_PORT(port); 1926 else 1927 temp |= TRANS_DDI_SELECT_PORT(port); 1928 1929 switch (crtc_state->pipe_bpp) { 1930 case 18: 1931 temp |= TRANS_DDI_BPC_6; 1932 break; 1933 case 24: 1934 temp |= TRANS_DDI_BPC_8; 1935 break; 1936 case 30: 1937 temp |= TRANS_DDI_BPC_10; 1938 break; 1939 case 36: 1940 temp |= TRANS_DDI_BPC_12; 1941 break; 1942 default: 1943 BUG(); 1944 } 1945 1946 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC) 1947 temp |= TRANS_DDI_PVSYNC; 1948 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC) 1949 temp |= TRANS_DDI_PHSYNC; 1950 1951 if (cpu_transcoder == TRANSCODER_EDP) { 1952 switch (pipe) { 1953 case PIPE_A: 1954 /* On Haswell, can only use the always-on power well for 1955 * eDP when not using the panel fitter, and when not 1956 * using motion blur mitigation (which we don't 1957 * support). */ 1958 if (crtc_state->pch_pfit.force_thru) 1959 temp |= TRANS_DDI_EDP_INPUT_A_ONOFF; 1960 else 1961 temp |= TRANS_DDI_EDP_INPUT_A_ON; 1962 break; 1963 case PIPE_B: 1964 temp |= TRANS_DDI_EDP_INPUT_B_ONOFF; 1965 break; 1966 case PIPE_C: 1967 temp |= TRANS_DDI_EDP_INPUT_C_ONOFF; 1968 break; 1969 default: 1970 BUG(); 1971 break; 1972 } 1973 } 1974 1975 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 1976 if (crtc_state->has_hdmi_sink) 1977 temp |= TRANS_DDI_MODE_SELECT_HDMI; 1978 else 1979 temp |= TRANS_DDI_MODE_SELECT_DVI; 1980 1981 if (crtc_state->hdmi_scrambling) 1982 temp |= TRANS_DDI_HDMI_SCRAMBLING; 1983 if (crtc_state->hdmi_high_tmds_clock_ratio) 1984 temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE; 1985 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) { 1986 temp |= TRANS_DDI_MODE_SELECT_FDI; 1987 temp |= (crtc_state->fdi_lanes - 1) << 1; 1988 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 1989 temp |= TRANS_DDI_MODE_SELECT_DP_MST; 1990 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 1991 1992 if (INTEL_GEN(dev_priv) >= 12) { 1993 enum transcoder master; 1994 1995 master = crtc_state->mst_master_transcoder; 1996 drm_WARN_ON(&dev_priv->drm, 1997 master == INVALID_TRANSCODER); 1998 temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master); 1999 } 2000 } else { 2001 temp |= TRANS_DDI_MODE_SELECT_DP_SST; 2002 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 2003 } 2004 2005 if (IS_GEN_RANGE(dev_priv, 8, 10) && 2006 crtc_state->master_transcoder != INVALID_TRANSCODER) { 2007 u8 master_select = 2008 bdw_trans_port_sync_master_select(crtc_state->master_transcoder); 2009 2010 temp |= TRANS_DDI_PORT_SYNC_ENABLE | 2011 TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select); 2012 } 2013 2014 return temp; 2015 } 2016 2017 void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder, 2018 const struct intel_crtc_state *crtc_state) 2019 { 2020 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2021 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2022 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2023 2024 if (INTEL_GEN(dev_priv) >= 11) { 2025 enum transcoder master_transcoder = crtc_state->master_transcoder; 2026 u32 ctl2 = 0; 2027 2028 if (master_transcoder != INVALID_TRANSCODER) { 2029 u8 master_select = 2030 bdw_trans_port_sync_master_select(master_transcoder); 2031 2032 ctl2 |= PORT_SYNC_MODE_ENABLE | 2033 PORT_SYNC_MODE_MASTER_SELECT(master_select); 2034 } 2035 2036 intel_de_write(dev_priv, 2037 TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2); 2038 } 2039 2040 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), 2041 intel_ddi_transcoder_func_reg_val_get(encoder, 2042 crtc_state)); 2043 } 2044 2045 /* 2046 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable 2047 * bit. 2048 */ 2049 static void 2050 intel_ddi_config_transcoder_func(struct intel_encoder *encoder, 2051 const struct intel_crtc_state *crtc_state) 2052 { 2053 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2054 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2055 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2056 u32 ctl; 2057 2058 ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state); 2059 ctl &= ~TRANS_DDI_FUNC_ENABLE; 2060 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 2061 } 2062 2063 void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state) 2064 { 2065 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2066 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2067 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2068 u32 ctl; 2069 2070 if (INTEL_GEN(dev_priv) >= 11) 2071 intel_de_write(dev_priv, 2072 TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0); 2073 2074 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2075 2076 drm_WARN_ON(crtc->base.dev, ctl & TRANS_DDI_HDCP_SIGNALLING); 2077 2078 ctl &= ~TRANS_DDI_FUNC_ENABLE; 2079 2080 if (IS_GEN_RANGE(dev_priv, 8, 10)) 2081 ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE | 2082 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK); 2083 2084 if (INTEL_GEN(dev_priv) >= 12) { 2085 if (!intel_dp_mst_is_master_trans(crtc_state)) { 2086 ctl &= ~(TGL_TRANS_DDI_PORT_MASK | 2087 TRANS_DDI_MODE_SELECT_MASK); 2088 } 2089 } else { 2090 ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK); 2091 } 2092 2093 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 2094 2095 if (dev_priv->quirks & QUIRK_INCREASE_DDI_DISABLED_TIME && 2096 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 2097 drm_dbg_kms(&dev_priv->drm, 2098 "Quirk Increase DDI disabled time\n"); 2099 /* Quirk time at 100ms for reliable operation */ 2100 msleep(100); 2101 } 2102 } 2103 2104 int intel_ddi_toggle_hdcp_bits(struct intel_encoder *intel_encoder, 2105 enum transcoder cpu_transcoder, 2106 bool enable, u32 hdcp_mask) 2107 { 2108 struct drm_device *dev = intel_encoder->base.dev; 2109 struct drm_i915_private *dev_priv = to_i915(dev); 2110 intel_wakeref_t wakeref; 2111 int ret = 0; 2112 u32 tmp; 2113 2114 wakeref = intel_display_power_get_if_enabled(dev_priv, 2115 intel_encoder->power_domain); 2116 if (drm_WARN_ON(dev, !wakeref)) 2117 return -ENXIO; 2118 2119 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2120 if (enable) 2121 tmp |= hdcp_mask; 2122 else 2123 tmp &= ~hdcp_mask; 2124 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), tmp); 2125 intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref); 2126 return ret; 2127 } 2128 2129 bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector) 2130 { 2131 struct drm_device *dev = intel_connector->base.dev; 2132 struct drm_i915_private *dev_priv = to_i915(dev); 2133 struct intel_encoder *encoder = intel_attached_encoder(intel_connector); 2134 int type = intel_connector->base.connector_type; 2135 enum port port = encoder->port; 2136 enum transcoder cpu_transcoder; 2137 intel_wakeref_t wakeref; 2138 enum pipe pipe = 0; 2139 u32 tmp; 2140 bool ret; 2141 2142 wakeref = intel_display_power_get_if_enabled(dev_priv, 2143 encoder->power_domain); 2144 if (!wakeref) 2145 return false; 2146 2147 if (!encoder->get_hw_state(encoder, &pipe)) { 2148 ret = false; 2149 goto out; 2150 } 2151 2152 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 2153 cpu_transcoder = TRANSCODER_EDP; 2154 else 2155 cpu_transcoder = (enum transcoder) pipe; 2156 2157 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2158 2159 switch (tmp & TRANS_DDI_MODE_SELECT_MASK) { 2160 case TRANS_DDI_MODE_SELECT_HDMI: 2161 case TRANS_DDI_MODE_SELECT_DVI: 2162 ret = type == DRM_MODE_CONNECTOR_HDMIA; 2163 break; 2164 2165 case TRANS_DDI_MODE_SELECT_DP_SST: 2166 ret = type == DRM_MODE_CONNECTOR_eDP || 2167 type == DRM_MODE_CONNECTOR_DisplayPort; 2168 break; 2169 2170 case TRANS_DDI_MODE_SELECT_DP_MST: 2171 /* if the transcoder is in MST state then 2172 * connector isn't connected */ 2173 ret = false; 2174 break; 2175 2176 case TRANS_DDI_MODE_SELECT_FDI: 2177 ret = type == DRM_MODE_CONNECTOR_VGA; 2178 break; 2179 2180 default: 2181 ret = false; 2182 break; 2183 } 2184 2185 out: 2186 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 2187 2188 return ret; 2189 } 2190 2191 static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder, 2192 u8 *pipe_mask, bool *is_dp_mst) 2193 { 2194 struct drm_device *dev = encoder->base.dev; 2195 struct drm_i915_private *dev_priv = to_i915(dev); 2196 enum port port = encoder->port; 2197 intel_wakeref_t wakeref; 2198 enum pipe p; 2199 u32 tmp; 2200 u8 mst_pipe_mask; 2201 2202 *pipe_mask = 0; 2203 *is_dp_mst = false; 2204 2205 wakeref = intel_display_power_get_if_enabled(dev_priv, 2206 encoder->power_domain); 2207 if (!wakeref) 2208 return; 2209 2210 tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 2211 if (!(tmp & DDI_BUF_CTL_ENABLE)) 2212 goto out; 2213 2214 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) { 2215 tmp = intel_de_read(dev_priv, 2216 TRANS_DDI_FUNC_CTL(TRANSCODER_EDP)); 2217 2218 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { 2219 default: 2220 MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK); 2221 fallthrough; 2222 case TRANS_DDI_EDP_INPUT_A_ON: 2223 case TRANS_DDI_EDP_INPUT_A_ONOFF: 2224 *pipe_mask = BIT(PIPE_A); 2225 break; 2226 case TRANS_DDI_EDP_INPUT_B_ONOFF: 2227 *pipe_mask = BIT(PIPE_B); 2228 break; 2229 case TRANS_DDI_EDP_INPUT_C_ONOFF: 2230 *pipe_mask = BIT(PIPE_C); 2231 break; 2232 } 2233 2234 goto out; 2235 } 2236 2237 mst_pipe_mask = 0; 2238 for_each_pipe(dev_priv, p) { 2239 enum transcoder cpu_transcoder = (enum transcoder)p; 2240 unsigned int port_mask, ddi_select; 2241 intel_wakeref_t trans_wakeref; 2242 2243 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 2244 POWER_DOMAIN_TRANSCODER(cpu_transcoder)); 2245 if (!trans_wakeref) 2246 continue; 2247 2248 if (INTEL_GEN(dev_priv) >= 12) { 2249 port_mask = TGL_TRANS_DDI_PORT_MASK; 2250 ddi_select = TGL_TRANS_DDI_SELECT_PORT(port); 2251 } else { 2252 port_mask = TRANS_DDI_PORT_MASK; 2253 ddi_select = TRANS_DDI_SELECT_PORT(port); 2254 } 2255 2256 tmp = intel_de_read(dev_priv, 2257 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2258 intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder), 2259 trans_wakeref); 2260 2261 if ((tmp & port_mask) != ddi_select) 2262 continue; 2263 2264 if ((tmp & TRANS_DDI_MODE_SELECT_MASK) == 2265 TRANS_DDI_MODE_SELECT_DP_MST) 2266 mst_pipe_mask |= BIT(p); 2267 2268 *pipe_mask |= BIT(p); 2269 } 2270 2271 if (!*pipe_mask) 2272 drm_dbg_kms(&dev_priv->drm, 2273 "No pipe for [ENCODER:%d:%s] found\n", 2274 encoder->base.base.id, encoder->base.name); 2275 2276 if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) { 2277 drm_dbg_kms(&dev_priv->drm, 2278 "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n", 2279 encoder->base.base.id, encoder->base.name, 2280 *pipe_mask); 2281 *pipe_mask = BIT(ffs(*pipe_mask) - 1); 2282 } 2283 2284 if (mst_pipe_mask && mst_pipe_mask != *pipe_mask) 2285 drm_dbg_kms(&dev_priv->drm, 2286 "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n", 2287 encoder->base.base.id, encoder->base.name, 2288 *pipe_mask, mst_pipe_mask); 2289 else 2290 *is_dp_mst = mst_pipe_mask; 2291 2292 out: 2293 if (*pipe_mask && IS_GEN9_LP(dev_priv)) { 2294 tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port)); 2295 if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK | 2296 BXT_PHY_LANE_POWERDOWN_ACK | 2297 BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED) 2298 drm_err(&dev_priv->drm, 2299 "[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n", 2300 encoder->base.base.id, encoder->base.name, tmp); 2301 } 2302 2303 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 2304 } 2305 2306 bool intel_ddi_get_hw_state(struct intel_encoder *encoder, 2307 enum pipe *pipe) 2308 { 2309 u8 pipe_mask; 2310 bool is_mst; 2311 2312 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 2313 2314 if (is_mst || !pipe_mask) 2315 return false; 2316 2317 *pipe = ffs(pipe_mask) - 1; 2318 2319 return true; 2320 } 2321 2322 static enum intel_display_power_domain 2323 intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port) 2324 { 2325 /* CNL+ HW requires corresponding AUX IOs to be powered up for PSR with 2326 * DC states enabled at the same time, while for driver initiated AUX 2327 * transfers we need the same AUX IOs to be powered but with DC states 2328 * disabled. Accordingly use the AUX power domain here which leaves DC 2329 * states enabled. 2330 * However, for non-A AUX ports the corresponding non-EDP transcoders 2331 * would have already enabled power well 2 and DC_OFF. This means we can 2332 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a 2333 * specific AUX_IO reference without powering up any extra wells. 2334 * Note that PSR is enabled only on Port A even though this function 2335 * returns the correct domain for other ports too. 2336 */ 2337 return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A : 2338 intel_aux_power_domain(dig_port); 2339 } 2340 2341 static void intel_ddi_get_power_domains(struct intel_encoder *encoder, 2342 struct intel_crtc_state *crtc_state) 2343 { 2344 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2345 struct intel_digital_port *dig_port; 2346 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2347 2348 /* 2349 * TODO: Add support for MST encoders. Atm, the following should never 2350 * happen since fake-MST encoders don't set their get_power_domains() 2351 * hook. 2352 */ 2353 if (drm_WARN_ON(&dev_priv->drm, 2354 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST))) 2355 return; 2356 2357 dig_port = enc_to_dig_port(encoder); 2358 2359 if (!intel_phy_is_tc(dev_priv, phy) || 2360 dig_port->tc_mode != TC_PORT_TBT_ALT) { 2361 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2362 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2363 dig_port->ddi_io_power_domain); 2364 } 2365 2366 /* 2367 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC 2368 * ports. 2369 */ 2370 if (intel_crtc_has_dp_encoder(crtc_state) || 2371 intel_phy_is_tc(dev_priv, phy)) { 2372 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 2373 dig_port->aux_wakeref = 2374 intel_display_power_get(dev_priv, 2375 intel_ddi_main_link_aux_domain(dig_port)); 2376 } 2377 } 2378 2379 void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder, 2380 const struct intel_crtc_state *crtc_state) 2381 { 2382 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2383 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2384 enum port port = encoder->port; 2385 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2386 2387 if (cpu_transcoder != TRANSCODER_EDP) { 2388 if (INTEL_GEN(dev_priv) >= 12) 2389 intel_de_write(dev_priv, 2390 TRANS_CLK_SEL(cpu_transcoder), 2391 TGL_TRANS_CLK_SEL_PORT(port)); 2392 else 2393 intel_de_write(dev_priv, 2394 TRANS_CLK_SEL(cpu_transcoder), 2395 TRANS_CLK_SEL_PORT(port)); 2396 } 2397 } 2398 2399 void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state) 2400 { 2401 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 2402 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 2403 2404 if (cpu_transcoder != TRANSCODER_EDP) { 2405 if (INTEL_GEN(dev_priv) >= 12) 2406 intel_de_write(dev_priv, 2407 TRANS_CLK_SEL(cpu_transcoder), 2408 TGL_TRANS_CLK_SEL_DISABLED); 2409 else 2410 intel_de_write(dev_priv, 2411 TRANS_CLK_SEL(cpu_transcoder), 2412 TRANS_CLK_SEL_DISABLED); 2413 } 2414 } 2415 2416 static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv, 2417 enum port port, u8 iboost) 2418 { 2419 u32 tmp; 2420 2421 tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0); 2422 tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port)); 2423 if (iboost) 2424 tmp |= iboost << BALANCE_LEG_SHIFT(port); 2425 else 2426 tmp |= BALANCE_LEG_DISABLE(port); 2427 intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp); 2428 } 2429 2430 static void skl_ddi_set_iboost(struct intel_encoder *encoder, 2431 const struct intel_crtc_state *crtc_state, 2432 int level) 2433 { 2434 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2435 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2436 u8 iboost; 2437 2438 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2439 iboost = intel_bios_hdmi_boost_level(encoder); 2440 else 2441 iboost = intel_bios_dp_boost_level(encoder); 2442 2443 if (iboost == 0) { 2444 const struct ddi_buf_trans *ddi_translations; 2445 int n_entries; 2446 2447 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2448 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 2449 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2450 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, &n_entries); 2451 else 2452 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, &n_entries); 2453 2454 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2455 return; 2456 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2457 level = n_entries - 1; 2458 2459 iboost = ddi_translations[level].i_boost; 2460 } 2461 2462 /* Make sure that the requested I_boost is valid */ 2463 if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) { 2464 drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost); 2465 return; 2466 } 2467 2468 _skl_ddi_set_iboost(dev_priv, encoder->port, iboost); 2469 2470 if (encoder->port == PORT_A && dig_port->max_lanes == 4) 2471 _skl_ddi_set_iboost(dev_priv, PORT_E, iboost); 2472 } 2473 2474 static void bxt_ddi_vswing_sequence(struct intel_encoder *encoder, 2475 const struct intel_crtc_state *crtc_state, 2476 int level) 2477 { 2478 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2479 const struct bxt_ddi_buf_trans *ddi_translations; 2480 enum port port = encoder->port; 2481 int n_entries; 2482 2483 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2484 ddi_translations = bxt_get_buf_trans_hdmi(encoder, &n_entries); 2485 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2486 ddi_translations = bxt_get_buf_trans_edp(encoder, &n_entries); 2487 else 2488 ddi_translations = bxt_get_buf_trans_dp(encoder, &n_entries); 2489 2490 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2491 return; 2492 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2493 level = n_entries - 1; 2494 2495 bxt_ddi_phy_set_signal_level(dev_priv, port, 2496 ddi_translations[level].margin, 2497 ddi_translations[level].scale, 2498 ddi_translations[level].enable, 2499 ddi_translations[level].deemphasis); 2500 } 2501 2502 static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp, 2503 const struct intel_crtc_state *crtc_state) 2504 { 2505 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 2506 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2507 enum port port = encoder->port; 2508 enum phy phy = intel_port_to_phy(dev_priv, port); 2509 int n_entries; 2510 2511 if (INTEL_GEN(dev_priv) >= 12) { 2512 if (intel_phy_is_combo(dev_priv, phy)) 2513 tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2514 else 2515 tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 2516 } else if (INTEL_GEN(dev_priv) == 11) { 2517 if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 2518 jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2519 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 2520 ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2521 else if (intel_phy_is_combo(dev_priv, phy)) 2522 icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2523 else 2524 icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 2525 } else if (IS_CANNONLAKE(dev_priv)) { 2526 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2527 cnl_get_buf_trans_edp(encoder, &n_entries); 2528 else 2529 cnl_get_buf_trans_dp(encoder, &n_entries); 2530 } else if (IS_GEN9_LP(dev_priv)) { 2531 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2532 bxt_get_buf_trans_edp(encoder, &n_entries); 2533 else 2534 bxt_get_buf_trans_dp(encoder, &n_entries); 2535 } else { 2536 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2537 intel_ddi_get_buf_trans_edp(encoder, &n_entries); 2538 else 2539 intel_ddi_get_buf_trans_dp(encoder, &n_entries); 2540 } 2541 2542 if (drm_WARN_ON(&dev_priv->drm, n_entries < 1)) 2543 n_entries = 1; 2544 if (drm_WARN_ON(&dev_priv->drm, 2545 n_entries > ARRAY_SIZE(index_to_dp_signal_levels))) 2546 n_entries = ARRAY_SIZE(index_to_dp_signal_levels); 2547 2548 return index_to_dp_signal_levels[n_entries - 1] & 2549 DP_TRAIN_VOLTAGE_SWING_MASK; 2550 } 2551 2552 /* 2553 * We assume that the full set of pre-emphasis values can be 2554 * used on all DDI platforms. Should that change we need to 2555 * rethink this code. 2556 */ 2557 static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp) 2558 { 2559 return DP_TRAIN_PRE_EMPH_LEVEL_3; 2560 } 2561 2562 static void cnl_ddi_vswing_program(struct intel_encoder *encoder, 2563 const struct intel_crtc_state *crtc_state, 2564 int level) 2565 { 2566 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2567 const struct cnl_ddi_buf_trans *ddi_translations; 2568 enum port port = encoder->port; 2569 int n_entries, ln; 2570 u32 val; 2571 2572 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2573 ddi_translations = cnl_get_buf_trans_hdmi(encoder, &n_entries); 2574 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2575 ddi_translations = cnl_get_buf_trans_edp(encoder, &n_entries); 2576 else 2577 ddi_translations = cnl_get_buf_trans_dp(encoder, &n_entries); 2578 2579 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2580 return; 2581 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2582 level = n_entries - 1; 2583 2584 /* Set PORT_TX_DW5 Scaling Mode Sel to 010b. */ 2585 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2586 val &= ~SCALING_MODE_SEL_MASK; 2587 val |= SCALING_MODE_SEL(2); 2588 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2589 2590 /* Program PORT_TX_DW2 */ 2591 val = intel_de_read(dev_priv, CNL_PORT_TX_DW2_LN0(port)); 2592 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 2593 RCOMP_SCALAR_MASK); 2594 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 2595 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 2596 /* Rcomp scalar is fixed as 0x98 for every table entry */ 2597 val |= RCOMP_SCALAR(0x98); 2598 intel_de_write(dev_priv, CNL_PORT_TX_DW2_GRP(port), val); 2599 2600 /* Program PORT_TX_DW4 */ 2601 /* We cannot write to GRP. It would overrite individual loadgen */ 2602 for (ln = 0; ln < 4; ln++) { 2603 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 2604 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 2605 CURSOR_COEFF_MASK); 2606 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 2607 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 2608 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 2609 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 2610 } 2611 2612 /* Program PORT_TX_DW5 */ 2613 /* All DW5 values are fixed for every table entry */ 2614 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2615 val &= ~RTERM_SELECT_MASK; 2616 val |= RTERM_SELECT(6); 2617 val |= TAP3_DISABLE; 2618 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2619 2620 /* Program PORT_TX_DW7 */ 2621 val = intel_de_read(dev_priv, CNL_PORT_TX_DW7_LN0(port)); 2622 val &= ~N_SCALAR_MASK; 2623 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 2624 intel_de_write(dev_priv, CNL_PORT_TX_DW7_GRP(port), val); 2625 } 2626 2627 static void cnl_ddi_vswing_sequence(struct intel_encoder *encoder, 2628 const struct intel_crtc_state *crtc_state, 2629 int level) 2630 { 2631 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2632 enum port port = encoder->port; 2633 int width, rate, ln; 2634 u32 val; 2635 2636 width = crtc_state->lane_count; 2637 rate = crtc_state->port_clock; 2638 2639 /* 2640 * 1. If port type is eDP or DP, 2641 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 2642 * else clear to 0b. 2643 */ 2644 val = intel_de_read(dev_priv, CNL_PORT_PCS_DW1_LN0(port)); 2645 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2646 val &= ~COMMON_KEEPER_EN; 2647 else 2648 val |= COMMON_KEEPER_EN; 2649 intel_de_write(dev_priv, CNL_PORT_PCS_DW1_GRP(port), val); 2650 2651 /* 2. Program loadgen select */ 2652 /* 2653 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 2654 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 2655 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 2656 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 2657 */ 2658 for (ln = 0; ln <= 3; ln++) { 2659 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 2660 val &= ~LOADGEN_SELECT; 2661 2662 if ((rate <= 600000 && width == 4 && ln >= 1) || 2663 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 2664 val |= LOADGEN_SELECT; 2665 } 2666 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 2667 } 2668 2669 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 2670 val = intel_de_read(dev_priv, CNL_PORT_CL1CM_DW5); 2671 val |= SUS_CLOCK_CONFIG; 2672 intel_de_write(dev_priv, CNL_PORT_CL1CM_DW5, val); 2673 2674 /* 4. Clear training enable to change swing values */ 2675 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2676 val &= ~TX_TRAINING_EN; 2677 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2678 2679 /* 5. Program swing and de-emphasis */ 2680 cnl_ddi_vswing_program(encoder, crtc_state, level); 2681 2682 /* 6. Set training enable to trigger update */ 2683 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 2684 val |= TX_TRAINING_EN; 2685 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 2686 } 2687 2688 static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder, 2689 const struct intel_crtc_state *crtc_state, 2690 int level) 2691 { 2692 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2693 const struct cnl_ddi_buf_trans *ddi_translations; 2694 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2695 int n_entries, ln; 2696 u32 val; 2697 2698 if (INTEL_GEN(dev_priv) >= 12) 2699 ddi_translations = tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2700 else if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 2701 ddi_translations = jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2702 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 2703 ddi_translations = ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2704 else 2705 ddi_translations = icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 2706 2707 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2708 return; 2709 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2710 level = n_entries - 1; 2711 2712 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) { 2713 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2714 2715 val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED; 2716 intel_dp->hobl_active = is_hobl_buf_trans(ddi_translations); 2717 intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val, 2718 intel_dp->hobl_active ? val : 0); 2719 } 2720 2721 /* Set PORT_TX_DW5 */ 2722 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2723 val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK | 2724 TAP2_DISABLE | TAP3_DISABLE); 2725 val |= SCALING_MODE_SEL(0x2); 2726 val |= RTERM_SELECT(0x6); 2727 val |= TAP3_DISABLE; 2728 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2729 2730 /* Program PORT_TX_DW2 */ 2731 val = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy)); 2732 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 2733 RCOMP_SCALAR_MASK); 2734 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 2735 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 2736 /* Program Rcomp scalar for every table entry */ 2737 val |= RCOMP_SCALAR(0x98); 2738 intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), val); 2739 2740 /* Program PORT_TX_DW4 */ 2741 /* We cannot write to GRP. It would overwrite individual loadgen. */ 2742 for (ln = 0; ln <= 3; ln++) { 2743 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 2744 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 2745 CURSOR_COEFF_MASK); 2746 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 2747 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 2748 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 2749 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 2750 } 2751 2752 /* Program PORT_TX_DW7 */ 2753 val = intel_de_read(dev_priv, ICL_PORT_TX_DW7_LN0(phy)); 2754 val &= ~N_SCALAR_MASK; 2755 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 2756 intel_de_write(dev_priv, ICL_PORT_TX_DW7_GRP(phy), val); 2757 } 2758 2759 static void icl_combo_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2760 const struct intel_crtc_state *crtc_state, 2761 int level) 2762 { 2763 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2764 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2765 int width, rate, ln; 2766 u32 val; 2767 2768 width = crtc_state->lane_count; 2769 rate = crtc_state->port_clock; 2770 2771 /* 2772 * 1. If port type is eDP or DP, 2773 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 2774 * else clear to 0b. 2775 */ 2776 val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy)); 2777 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 2778 val &= ~COMMON_KEEPER_EN; 2779 else 2780 val |= COMMON_KEEPER_EN; 2781 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val); 2782 2783 /* 2. Program loadgen select */ 2784 /* 2785 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 2786 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 2787 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 2788 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 2789 */ 2790 for (ln = 0; ln <= 3; ln++) { 2791 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 2792 val &= ~LOADGEN_SELECT; 2793 2794 if ((rate <= 600000 && width == 4 && ln >= 1) || 2795 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 2796 val |= LOADGEN_SELECT; 2797 } 2798 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 2799 } 2800 2801 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 2802 val = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy)); 2803 val |= SUS_CLOCK_CONFIG; 2804 intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), val); 2805 2806 /* 4. Clear training enable to change swing values */ 2807 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2808 val &= ~TX_TRAINING_EN; 2809 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2810 2811 /* 5. Program swing and de-emphasis */ 2812 icl_ddi_combo_vswing_program(encoder, crtc_state, level); 2813 2814 /* 6. Set training enable to trigger update */ 2815 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 2816 val |= TX_TRAINING_EN; 2817 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 2818 } 2819 2820 static void icl_mg_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2821 const struct intel_crtc_state *crtc_state, 2822 int level) 2823 { 2824 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2825 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 2826 const struct icl_mg_phy_ddi_buf_trans *ddi_translations; 2827 int n_entries, ln; 2828 u32 val; 2829 2830 ddi_translations = icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 2831 2832 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2833 return; 2834 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2835 level = n_entries - 1; 2836 2837 /* Set MG_TX_LINK_PARAMS cri_use_fs32 to 0. */ 2838 for (ln = 0; ln < 2; ln++) { 2839 val = intel_de_read(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port)); 2840 val &= ~CRI_USE_FS32; 2841 intel_de_write(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), val); 2842 2843 val = intel_de_read(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port)); 2844 val &= ~CRI_USE_FS32; 2845 intel_de_write(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), val); 2846 } 2847 2848 /* Program MG_TX_SWINGCTRL with values from vswing table */ 2849 for (ln = 0; ln < 2; ln++) { 2850 val = intel_de_read(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port)); 2851 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 2852 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 2853 ddi_translations[level].cri_txdeemph_override_17_12); 2854 intel_de_write(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), val); 2855 2856 val = intel_de_read(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port)); 2857 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 2858 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 2859 ddi_translations[level].cri_txdeemph_override_17_12); 2860 intel_de_write(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), val); 2861 } 2862 2863 /* Program MG_TX_DRVCTRL with values from vswing table */ 2864 for (ln = 0; ln < 2; ln++) { 2865 val = intel_de_read(dev_priv, MG_TX1_DRVCTRL(ln, tc_port)); 2866 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 2867 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 2868 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 2869 ddi_translations[level].cri_txdeemph_override_5_0) | 2870 CRI_TXDEEMPH_OVERRIDE_11_6( 2871 ddi_translations[level].cri_txdeemph_override_11_6) | 2872 CRI_TXDEEMPH_OVERRIDE_EN; 2873 intel_de_write(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), val); 2874 2875 val = intel_de_read(dev_priv, MG_TX2_DRVCTRL(ln, tc_port)); 2876 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 2877 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 2878 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 2879 ddi_translations[level].cri_txdeemph_override_5_0) | 2880 CRI_TXDEEMPH_OVERRIDE_11_6( 2881 ddi_translations[level].cri_txdeemph_override_11_6) | 2882 CRI_TXDEEMPH_OVERRIDE_EN; 2883 intel_de_write(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), val); 2884 2885 /* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */ 2886 } 2887 2888 /* 2889 * Program MG_CLKHUB<LN, port being used> with value from frequency table 2890 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the 2891 * values from table for which TX1 and TX2 enabled. 2892 */ 2893 for (ln = 0; ln < 2; ln++) { 2894 val = intel_de_read(dev_priv, MG_CLKHUB(ln, tc_port)); 2895 if (crtc_state->port_clock < 300000) 2896 val |= CFG_LOW_RATE_LKREN_EN; 2897 else 2898 val &= ~CFG_LOW_RATE_LKREN_EN; 2899 intel_de_write(dev_priv, MG_CLKHUB(ln, tc_port), val); 2900 } 2901 2902 /* Program the MG_TX_DCC<LN, port being used> based on the link frequency */ 2903 for (ln = 0; ln < 2; ln++) { 2904 val = intel_de_read(dev_priv, MG_TX1_DCC(ln, tc_port)); 2905 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 2906 if (crtc_state->port_clock <= 500000) { 2907 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 2908 } else { 2909 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 2910 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 2911 } 2912 intel_de_write(dev_priv, MG_TX1_DCC(ln, tc_port), val); 2913 2914 val = intel_de_read(dev_priv, MG_TX2_DCC(ln, tc_port)); 2915 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 2916 if (crtc_state->port_clock <= 500000) { 2917 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 2918 } else { 2919 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 2920 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 2921 } 2922 intel_de_write(dev_priv, MG_TX2_DCC(ln, tc_port), val); 2923 } 2924 2925 /* Program MG_TX_PISO_READLOAD with values from vswing table */ 2926 for (ln = 0; ln < 2; ln++) { 2927 val = intel_de_read(dev_priv, 2928 MG_TX1_PISO_READLOAD(ln, tc_port)); 2929 val |= CRI_CALCINIT; 2930 intel_de_write(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port), 2931 val); 2932 2933 val = intel_de_read(dev_priv, 2934 MG_TX2_PISO_READLOAD(ln, tc_port)); 2935 val |= CRI_CALCINIT; 2936 intel_de_write(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port), 2937 val); 2938 } 2939 } 2940 2941 static void icl_ddi_vswing_sequence(struct intel_encoder *encoder, 2942 const struct intel_crtc_state *crtc_state, 2943 int level) 2944 { 2945 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2946 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2947 2948 if (intel_phy_is_combo(dev_priv, phy)) 2949 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 2950 else 2951 icl_mg_phy_ddi_vswing_sequence(encoder, crtc_state, level); 2952 } 2953 2954 static void 2955 tgl_dkl_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 2956 const struct intel_crtc_state *crtc_state, 2957 int level) 2958 { 2959 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2960 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 2961 const struct tgl_dkl_phy_ddi_buf_trans *ddi_translations; 2962 u32 val, dpcnt_mask, dpcnt_val; 2963 int n_entries, ln; 2964 2965 ddi_translations = tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 2966 2967 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 2968 return; 2969 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 2970 level = n_entries - 1; 2971 2972 dpcnt_mask = (DKL_TX_PRESHOOT_COEFF_MASK | 2973 DKL_TX_DE_EMPAHSIS_COEFF_MASK | 2974 DKL_TX_VSWING_CONTROL_MASK); 2975 dpcnt_val = DKL_TX_VSWING_CONTROL(ddi_translations[level].dkl_vswing_control); 2976 dpcnt_val |= DKL_TX_DE_EMPHASIS_COEFF(ddi_translations[level].dkl_de_emphasis_control); 2977 dpcnt_val |= DKL_TX_PRESHOOT_COEFF(ddi_translations[level].dkl_preshoot_control); 2978 2979 for (ln = 0; ln < 2; ln++) { 2980 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2981 HIP_INDEX_VAL(tc_port, ln)); 2982 2983 intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0); 2984 2985 /* All the registers are RMW */ 2986 val = intel_de_read(dev_priv, DKL_TX_DPCNTL0(tc_port)); 2987 val &= ~dpcnt_mask; 2988 val |= dpcnt_val; 2989 intel_de_write(dev_priv, DKL_TX_DPCNTL0(tc_port), val); 2990 2991 val = intel_de_read(dev_priv, DKL_TX_DPCNTL1(tc_port)); 2992 val &= ~dpcnt_mask; 2993 val |= dpcnt_val; 2994 intel_de_write(dev_priv, DKL_TX_DPCNTL1(tc_port), val); 2995 2996 val = intel_de_read(dev_priv, DKL_TX_DPCNTL2(tc_port)); 2997 val &= ~DKL_TX_DP20BITMODE; 2998 intel_de_write(dev_priv, DKL_TX_DPCNTL2(tc_port), val); 2999 } 3000 } 3001 3002 static void tgl_ddi_vswing_sequence(struct intel_encoder *encoder, 3003 const struct intel_crtc_state *crtc_state, 3004 int level) 3005 { 3006 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3007 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3008 3009 if (intel_phy_is_combo(dev_priv, phy)) 3010 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 3011 else 3012 tgl_dkl_phy_ddi_vswing_sequence(encoder, crtc_state, level); 3013 } 3014 3015 static int translate_signal_level(struct intel_dp *intel_dp, 3016 u8 signal_levels) 3017 { 3018 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 3019 int i; 3020 3021 for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) { 3022 if (index_to_dp_signal_levels[i] == signal_levels) 3023 return i; 3024 } 3025 3026 drm_WARN(&i915->drm, 1, 3027 "Unsupported voltage swing/pre-emphasis level: 0x%x\n", 3028 signal_levels); 3029 3030 return 0; 3031 } 3032 3033 static int intel_ddi_dp_level(struct intel_dp *intel_dp) 3034 { 3035 u8 train_set = intel_dp->train_set[0]; 3036 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 3037 DP_TRAIN_PRE_EMPHASIS_MASK); 3038 3039 return translate_signal_level(intel_dp, signal_levels); 3040 } 3041 3042 static void 3043 tgl_set_signal_levels(struct intel_dp *intel_dp, 3044 const struct intel_crtc_state *crtc_state) 3045 { 3046 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3047 int level = intel_ddi_dp_level(intel_dp); 3048 3049 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3050 } 3051 3052 static void 3053 icl_set_signal_levels(struct intel_dp *intel_dp, 3054 const struct intel_crtc_state *crtc_state) 3055 { 3056 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3057 int level = intel_ddi_dp_level(intel_dp); 3058 3059 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3060 } 3061 3062 static void 3063 cnl_set_signal_levels(struct intel_dp *intel_dp, 3064 const struct intel_crtc_state *crtc_state) 3065 { 3066 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3067 int level = intel_ddi_dp_level(intel_dp); 3068 3069 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3070 } 3071 3072 static void 3073 bxt_set_signal_levels(struct intel_dp *intel_dp, 3074 const struct intel_crtc_state *crtc_state) 3075 { 3076 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3077 int level = intel_ddi_dp_level(intel_dp); 3078 3079 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3080 } 3081 3082 static void 3083 hsw_set_signal_levels(struct intel_dp *intel_dp, 3084 const struct intel_crtc_state *crtc_state) 3085 { 3086 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3087 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3088 int level = intel_ddi_dp_level(intel_dp); 3089 enum port port = encoder->port; 3090 u32 signal_levels; 3091 3092 signal_levels = DDI_BUF_TRANS_SELECT(level); 3093 3094 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 3095 signal_levels); 3096 3097 intel_dp->DP &= ~DDI_BUF_EMP_MASK; 3098 intel_dp->DP |= signal_levels; 3099 3100 if (IS_GEN9_BC(dev_priv)) 3101 skl_ddi_set_iboost(encoder, crtc_state, level); 3102 3103 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 3104 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 3105 } 3106 3107 static u32 icl_dpclka_cfgcr0_clk_off(struct drm_i915_private *dev_priv, 3108 enum phy phy) 3109 { 3110 if (IS_ROCKETLAKE(dev_priv)) { 3111 return RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3112 } else if (intel_phy_is_combo(dev_priv, phy)) { 3113 return ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3114 } else if (intel_phy_is_tc(dev_priv, phy)) { 3115 enum tc_port tc_port = intel_port_to_tc(dev_priv, 3116 (enum port)phy); 3117 3118 return ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port); 3119 } 3120 3121 return 0; 3122 } 3123 3124 static void dg1_map_plls_to_ports(struct intel_encoder *encoder, 3125 const struct intel_crtc_state *crtc_state) 3126 { 3127 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3128 struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3129 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3130 u32 val; 3131 3132 /* 3133 * If we fail this, something went very wrong: first 2 PLLs should be 3134 * used by first 2 phys and last 2 PLLs by last phys 3135 */ 3136 if (drm_WARN_ON(&dev_priv->drm, 3137 (pll->info->id < DPLL_ID_DG1_DPLL2 && phy >= PHY_C) || 3138 (pll->info->id >= DPLL_ID_DG1_DPLL2 && phy < PHY_C))) 3139 return; 3140 3141 mutex_lock(&dev_priv->dpll.lock); 3142 3143 val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3144 drm_WARN_ON(&dev_priv->drm, 3145 (val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)) == 0); 3146 3147 val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3148 val |= DG1_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3149 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3150 intel_de_posting_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3151 3152 val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3153 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3154 3155 mutex_unlock(&dev_priv->dpll.lock); 3156 } 3157 3158 static void icl_map_plls_to_ports(struct intel_encoder *encoder, 3159 const struct intel_crtc_state *crtc_state) 3160 { 3161 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3162 struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3163 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3164 u32 val; 3165 3166 mutex_lock(&dev_priv->dpll.lock); 3167 3168 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3169 drm_WARN_ON(&dev_priv->drm, 3170 (val & icl_dpclka_cfgcr0_clk_off(dev_priv, phy)) == 0); 3171 3172 if (intel_phy_is_combo(dev_priv, phy)) { 3173 u32 mask, sel; 3174 3175 if (IS_ROCKETLAKE(dev_priv)) { 3176 mask = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3177 sel = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3178 } else { 3179 mask = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy); 3180 sel = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy); 3181 } 3182 3183 /* 3184 * Even though this register references DDIs, note that we 3185 * want to pass the PHY rather than the port (DDI). For 3186 * ICL, port=phy in all cases so it doesn't matter, but for 3187 * EHL the bspec notes the following: 3188 * 3189 * "DDID clock tied to DDIA clock, so DPCLKA_CFGCR0 DDIA 3190 * Clock Select chooses the PLL for both DDIA and DDID and 3191 * drives port A in all cases." 3192 */ 3193 val &= ~mask; 3194 val |= sel; 3195 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3196 intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0); 3197 } 3198 3199 val &= ~icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3200 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3201 3202 mutex_unlock(&dev_priv->dpll.lock); 3203 } 3204 3205 static void dg1_unmap_plls_to_ports(struct intel_encoder *encoder) 3206 { 3207 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3208 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3209 3210 mutex_lock(&dev_priv->dpll.lock); 3211 3212 intel_de_rmw(dev_priv, DG1_DPCLKA_CFGCR0(phy), 0, 3213 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 3214 3215 mutex_unlock(&dev_priv->dpll.lock); 3216 } 3217 3218 static void icl_unmap_plls_to_ports(struct intel_encoder *encoder) 3219 { 3220 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3221 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3222 u32 val; 3223 3224 mutex_lock(&dev_priv->dpll.lock); 3225 3226 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3227 val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3228 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3229 3230 mutex_unlock(&dev_priv->dpll.lock); 3231 } 3232 3233 static void dg1_sanitize_port_clk_off(struct drm_i915_private *dev_priv, 3234 u32 port_mask, bool ddi_clk_needed) 3235 { 3236 enum port port; 3237 u32 val; 3238 3239 for_each_port_masked(port, port_mask) { 3240 enum phy phy = intel_port_to_phy(dev_priv, port); 3241 bool ddi_clk_off; 3242 3243 val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy)); 3244 ddi_clk_off = val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3245 3246 if (ddi_clk_needed == !ddi_clk_off) 3247 continue; 3248 3249 /* 3250 * Punt on the case now where clock is gated, but it would 3251 * be needed by the port. Something else is really broken then. 3252 */ 3253 if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed)) 3254 continue; 3255 3256 drm_notice(&dev_priv->drm, 3257 "PHY %c is disabled with an ungated DDI clock, gate it\n", 3258 phy_name(phy)); 3259 val |= DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy); 3260 intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val); 3261 } 3262 } 3263 3264 static void icl_sanitize_port_clk_off(struct drm_i915_private *dev_priv, 3265 u32 port_mask, bool ddi_clk_needed) 3266 { 3267 enum port port; 3268 u32 val; 3269 3270 val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0); 3271 for_each_port_masked(port, port_mask) { 3272 enum phy phy = intel_port_to_phy(dev_priv, port); 3273 bool ddi_clk_off = val & icl_dpclka_cfgcr0_clk_off(dev_priv, 3274 phy); 3275 3276 if (ddi_clk_needed == !ddi_clk_off) 3277 continue; 3278 3279 /* 3280 * Punt on the case now where clock is gated, but it would 3281 * be needed by the port. Something else is really broken then. 3282 */ 3283 if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed)) 3284 continue; 3285 3286 drm_notice(&dev_priv->drm, 3287 "PHY %c is disabled/in DSI mode with an ungated DDI clock, gate it\n", 3288 phy_name(phy)); 3289 val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy); 3290 intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val); 3291 } 3292 } 3293 3294 void icl_sanitize_encoder_pll_mapping(struct intel_encoder *encoder) 3295 { 3296 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3297 u32 port_mask; 3298 bool ddi_clk_needed; 3299 3300 /* 3301 * In case of DP MST, we sanitize the primary encoder only, not the 3302 * virtual ones. 3303 */ 3304 if (encoder->type == INTEL_OUTPUT_DP_MST) 3305 return; 3306 3307 if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) { 3308 u8 pipe_mask; 3309 bool is_mst; 3310 3311 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 3312 /* 3313 * In the unlikely case that BIOS enables DP in MST mode, just 3314 * warn since our MST HW readout is incomplete. 3315 */ 3316 if (drm_WARN_ON(&dev_priv->drm, is_mst)) 3317 return; 3318 } 3319 3320 port_mask = BIT(encoder->port); 3321 ddi_clk_needed = encoder->base.crtc; 3322 3323 if (encoder->type == INTEL_OUTPUT_DSI) { 3324 struct intel_encoder *other_encoder; 3325 3326 port_mask = intel_dsi_encoder_ports(encoder); 3327 /* 3328 * Sanity check that we haven't incorrectly registered another 3329 * encoder using any of the ports of this DSI encoder. 3330 */ 3331 for_each_intel_encoder(&dev_priv->drm, other_encoder) { 3332 if (other_encoder == encoder) 3333 continue; 3334 3335 if (drm_WARN_ON(&dev_priv->drm, 3336 port_mask & BIT(other_encoder->port))) 3337 return; 3338 } 3339 /* 3340 * For DSI we keep the ddi clocks gated 3341 * except during enable/disable sequence. 3342 */ 3343 ddi_clk_needed = false; 3344 } 3345 3346 if (IS_DG1(dev_priv)) 3347 dg1_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed); 3348 else 3349 icl_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed); 3350 } 3351 3352 static void intel_ddi_clk_select(struct intel_encoder *encoder, 3353 const struct intel_crtc_state *crtc_state) 3354 { 3355 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3356 enum port port = encoder->port; 3357 enum phy phy = intel_port_to_phy(dev_priv, port); 3358 u32 val; 3359 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 3360 3361 if (drm_WARN_ON(&dev_priv->drm, !pll)) 3362 return; 3363 3364 mutex_lock(&dev_priv->dpll.lock); 3365 3366 if (INTEL_GEN(dev_priv) >= 11) { 3367 if (!intel_phy_is_combo(dev_priv, phy)) 3368 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3369 icl_pll_to_ddi_clk_sel(encoder, crtc_state)); 3370 else if (IS_JSL_EHL(dev_priv) && port >= PORT_C) 3371 /* 3372 * MG does not exist but the programming is required 3373 * to ungate DDIC and DDID 3374 */ 3375 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3376 DDI_CLK_SEL_MG); 3377 } else if (IS_CANNONLAKE(dev_priv)) { 3378 /* Configure DPCLKA_CFGCR0 to map the DPLL to the DDI. */ 3379 val = intel_de_read(dev_priv, DPCLKA_CFGCR0); 3380 val &= ~DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port); 3381 val |= DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port); 3382 intel_de_write(dev_priv, DPCLKA_CFGCR0, val); 3383 3384 /* 3385 * Configure DPCLKA_CFGCR0 to turn on the clock for the DDI. 3386 * This step and the step before must be done with separate 3387 * register writes. 3388 */ 3389 val = intel_de_read(dev_priv, DPCLKA_CFGCR0); 3390 val &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port); 3391 intel_de_write(dev_priv, DPCLKA_CFGCR0, val); 3392 } else if (IS_GEN9_BC(dev_priv)) { 3393 /* DDI -> PLL mapping */ 3394 val = intel_de_read(dev_priv, DPLL_CTRL2); 3395 3396 val &= ~(DPLL_CTRL2_DDI_CLK_OFF(port) | 3397 DPLL_CTRL2_DDI_CLK_SEL_MASK(port)); 3398 val |= (DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) | 3399 DPLL_CTRL2_DDI_SEL_OVERRIDE(port)); 3400 3401 intel_de_write(dev_priv, DPLL_CTRL2, val); 3402 3403 } else if (INTEL_GEN(dev_priv) < 9) { 3404 intel_de_write(dev_priv, PORT_CLK_SEL(port), 3405 hsw_pll_to_ddi_pll_sel(pll)); 3406 } 3407 3408 mutex_unlock(&dev_priv->dpll.lock); 3409 } 3410 3411 static void intel_ddi_clk_disable(struct intel_encoder *encoder) 3412 { 3413 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3414 enum port port = encoder->port; 3415 enum phy phy = intel_port_to_phy(dev_priv, port); 3416 3417 if (INTEL_GEN(dev_priv) >= 11) { 3418 if (!intel_phy_is_combo(dev_priv, phy) || 3419 (IS_JSL_EHL(dev_priv) && port >= PORT_C)) 3420 intel_de_write(dev_priv, DDI_CLK_SEL(port), 3421 DDI_CLK_SEL_NONE); 3422 } else if (IS_CANNONLAKE(dev_priv)) { 3423 intel_de_write(dev_priv, DPCLKA_CFGCR0, 3424 intel_de_read(dev_priv, DPCLKA_CFGCR0) | DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 3425 } else if (IS_GEN9_BC(dev_priv)) { 3426 intel_de_write(dev_priv, DPLL_CTRL2, 3427 intel_de_read(dev_priv, DPLL_CTRL2) | DPLL_CTRL2_DDI_CLK_OFF(port)); 3428 } else if (INTEL_GEN(dev_priv) < 9) { 3429 intel_de_write(dev_priv, PORT_CLK_SEL(port), 3430 PORT_CLK_SEL_NONE); 3431 } 3432 } 3433 3434 static void 3435 icl_program_mg_dp_mode(struct intel_digital_port *dig_port, 3436 const struct intel_crtc_state *crtc_state) 3437 { 3438 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 3439 enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port); 3440 u32 ln0, ln1, pin_assignment; 3441 u8 width; 3442 3443 if (dig_port->tc_mode == TC_PORT_TBT_ALT) 3444 return; 3445 3446 if (INTEL_GEN(dev_priv) >= 12) { 3447 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3448 HIP_INDEX_VAL(tc_port, 0x0)); 3449 ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 3450 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3451 HIP_INDEX_VAL(tc_port, 0x1)); 3452 ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 3453 } else { 3454 ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port)); 3455 ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port)); 3456 } 3457 3458 ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 3459 ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 3460 3461 /* DPPATC */ 3462 pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port); 3463 width = crtc_state->lane_count; 3464 3465 switch (pin_assignment) { 3466 case 0x0: 3467 drm_WARN_ON(&dev_priv->drm, 3468 dig_port->tc_mode != TC_PORT_LEGACY); 3469 if (width == 1) { 3470 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3471 } else { 3472 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3473 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3474 } 3475 break; 3476 case 0x1: 3477 if (width == 4) { 3478 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3479 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3480 } 3481 break; 3482 case 0x2: 3483 if (width == 2) { 3484 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3485 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3486 } 3487 break; 3488 case 0x3: 3489 case 0x5: 3490 if (width == 1) { 3491 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 3492 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3493 } else { 3494 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3495 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3496 } 3497 break; 3498 case 0x4: 3499 case 0x6: 3500 if (width == 1) { 3501 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 3502 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 3503 } else { 3504 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 3505 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 3506 } 3507 break; 3508 default: 3509 MISSING_CASE(pin_assignment); 3510 } 3511 3512 if (INTEL_GEN(dev_priv) >= 12) { 3513 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3514 HIP_INDEX_VAL(tc_port, 0x0)); 3515 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0); 3516 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 3517 HIP_INDEX_VAL(tc_port, 0x1)); 3518 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1); 3519 } else { 3520 intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0); 3521 intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1); 3522 } 3523 } 3524 3525 static enum transcoder 3526 tgl_dp_tp_transcoder(const struct intel_crtc_state *crtc_state) 3527 { 3528 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) 3529 return crtc_state->mst_master_transcoder; 3530 else 3531 return crtc_state->cpu_transcoder; 3532 } 3533 3534 i915_reg_t dp_tp_ctl_reg(struct intel_encoder *encoder, 3535 const struct intel_crtc_state *crtc_state) 3536 { 3537 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3538 3539 if (INTEL_GEN(dev_priv) >= 12) 3540 return TGL_DP_TP_CTL(tgl_dp_tp_transcoder(crtc_state)); 3541 else 3542 return DP_TP_CTL(encoder->port); 3543 } 3544 3545 i915_reg_t dp_tp_status_reg(struct intel_encoder *encoder, 3546 const struct intel_crtc_state *crtc_state) 3547 { 3548 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3549 3550 if (INTEL_GEN(dev_priv) >= 12) 3551 return TGL_DP_TP_STATUS(tgl_dp_tp_transcoder(crtc_state)); 3552 else 3553 return DP_TP_STATUS(encoder->port); 3554 } 3555 3556 static void intel_dp_sink_set_msa_timing_par_ignore_state(struct intel_dp *intel_dp, 3557 const struct intel_crtc_state *crtc_state, 3558 bool enable) 3559 { 3560 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 3561 3562 if (!crtc_state->vrr.enable) 3563 return; 3564 3565 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_DOWNSPREAD_CTRL, 3566 enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0) <= 0) 3567 drm_dbg_kms(&i915->drm, 3568 "Failed to set MSA_TIMING_PAR_IGNORE %s in the sink\n", 3569 enable ? "enable" : "disable"); 3570 } 3571 3572 static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp, 3573 const struct intel_crtc_state *crtc_state) 3574 { 3575 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 3576 3577 if (!crtc_state->fec_enable) 3578 return; 3579 3580 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0) 3581 drm_dbg_kms(&i915->drm, 3582 "Failed to set FEC_READY in the sink\n"); 3583 } 3584 3585 static void intel_ddi_enable_fec(struct intel_encoder *encoder, 3586 const struct intel_crtc_state *crtc_state) 3587 { 3588 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3589 struct intel_dp *intel_dp; 3590 u32 val; 3591 3592 if (!crtc_state->fec_enable) 3593 return; 3594 3595 intel_dp = enc_to_intel_dp(encoder); 3596 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3597 val |= DP_TP_CTL_FEC_ENABLE; 3598 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3599 } 3600 3601 static void intel_ddi_disable_fec_state(struct intel_encoder *encoder, 3602 const struct intel_crtc_state *crtc_state) 3603 { 3604 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3605 struct intel_dp *intel_dp; 3606 u32 val; 3607 3608 if (!crtc_state->fec_enable) 3609 return; 3610 3611 intel_dp = enc_to_intel_dp(encoder); 3612 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3613 val &= ~DP_TP_CTL_FEC_ENABLE; 3614 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3615 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3616 } 3617 3618 static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state, 3619 struct intel_encoder *encoder, 3620 const struct intel_crtc_state *crtc_state, 3621 const struct drm_connector_state *conn_state) 3622 { 3623 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3624 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3625 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3626 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3627 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 3628 int level = intel_ddi_dp_level(intel_dp); 3629 3630 intel_dp_set_link_params(intel_dp, 3631 crtc_state->port_clock, 3632 crtc_state->lane_count); 3633 3634 /* 3635 * 1. Enable Power Wells 3636 * 3637 * This was handled at the beginning of intel_atomic_commit_tail(), 3638 * before we called down into this function. 3639 */ 3640 3641 /* 2. Enable Panel Power if PPS is required */ 3642 intel_pps_on(intel_dp); 3643 3644 /* 3645 * 3. For non-TBT Type-C ports, set FIA lane count 3646 * (DFLEXDPSP.DPX4TXLATC) 3647 * 3648 * This was done before tgl_ddi_pre_enable_dp by 3649 * hsw_crtc_enable()->intel_encoders_pre_pll_enable(). 3650 */ 3651 3652 /* 3653 * 4. Enable the port PLL. 3654 * 3655 * The PLL enabling itself was already done before this function by 3656 * hsw_crtc_enable()->intel_enable_shared_dpll(). We need only 3657 * configure the PLL to port mapping here. 3658 */ 3659 intel_ddi_clk_select(encoder, crtc_state); 3660 3661 /* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */ 3662 if (!intel_phy_is_tc(dev_priv, phy) || 3663 dig_port->tc_mode != TC_PORT_TBT_ALT) { 3664 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 3665 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 3666 dig_port->ddi_io_power_domain); 3667 } 3668 3669 /* 6. Program DP_MODE */ 3670 icl_program_mg_dp_mode(dig_port, crtc_state); 3671 3672 /* 3673 * 7. The rest of the below are substeps under the bspec's "Enable and 3674 * Train Display Port" step. Note that steps that are specific to 3675 * MST will be handled by intel_mst_pre_enable_dp() before/after it 3676 * calls into this function. Also intel_mst_pre_enable_dp() only calls 3677 * us when active_mst_links==0, so any steps designated for "single 3678 * stream or multi-stream master transcoder" can just be performed 3679 * unconditionally here. 3680 */ 3681 3682 /* 3683 * 7.a Configure Transcoder Clock Select to direct the Port clock to the 3684 * Transcoder. 3685 */ 3686 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3687 3688 /* 3689 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST 3690 * Transport Select 3691 */ 3692 intel_ddi_config_transcoder_func(encoder, crtc_state); 3693 3694 /* 3695 * 7.c Configure & enable DP_TP_CTL with link training pattern 1 3696 * selected 3697 * 3698 * This will be handled by the intel_dp_start_link_train() farther 3699 * down this function. 3700 */ 3701 3702 /* 7.e Configure voltage swing and related IO settings */ 3703 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3704 3705 /* 3706 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up 3707 * the used lanes of the DDI. 3708 */ 3709 if (intel_phy_is_combo(dev_priv, phy)) { 3710 bool lane_reversal = 3711 dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL; 3712 3713 intel_combo_phy_power_up_lanes(dev_priv, phy, false, 3714 crtc_state->lane_count, 3715 lane_reversal); 3716 } 3717 3718 /* 3719 * 7.g Configure and enable DDI_BUF_CTL 3720 * 7.h Wait for DDI_BUF_CTL DDI Idle Status = 0b (Not Idle), timeout 3721 * after 500 us. 3722 * 3723 * We only configure what the register value will be here. Actual 3724 * enabling happens during link training farther down. 3725 */ 3726 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 3727 3728 if (!is_mst) 3729 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 3730 3731 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 3732 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true); 3733 /* 3734 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit 3735 * in the FEC_CONFIGURATION register to 1 before initiating link 3736 * training 3737 */ 3738 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 3739 3740 intel_dp_check_frl_training(intel_dp); 3741 intel_dp_pcon_dsc_configure(intel_dp, crtc_state); 3742 3743 /* 3744 * 7.i Follow DisplayPort specification training sequence (see notes for 3745 * failure handling) 3746 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle 3747 * Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent) 3748 * (timeout after 800 us) 3749 */ 3750 intel_dp_start_link_train(intel_dp, crtc_state); 3751 3752 /* 7.k Set DP_TP_CTL link training to Normal */ 3753 if (!is_trans_port_sync_mode(crtc_state)) 3754 intel_dp_stop_link_train(intel_dp, crtc_state); 3755 3756 /* 7.l Configure and enable FEC if needed */ 3757 intel_ddi_enable_fec(encoder, crtc_state); 3758 if (!crtc_state->bigjoiner) 3759 intel_dsc_enable(encoder, crtc_state); 3760 } 3761 3762 static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state, 3763 struct intel_encoder *encoder, 3764 const struct intel_crtc_state *crtc_state, 3765 const struct drm_connector_state *conn_state) 3766 { 3767 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3768 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3769 enum port port = encoder->port; 3770 enum phy phy = intel_port_to_phy(dev_priv, port); 3771 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3772 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 3773 int level = intel_ddi_dp_level(intel_dp); 3774 3775 if (INTEL_GEN(dev_priv) < 11) 3776 drm_WARN_ON(&dev_priv->drm, 3777 is_mst && (port == PORT_A || port == PORT_E)); 3778 else 3779 drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A); 3780 3781 intel_dp_set_link_params(intel_dp, 3782 crtc_state->port_clock, 3783 crtc_state->lane_count); 3784 3785 intel_pps_on(intel_dp); 3786 3787 intel_ddi_clk_select(encoder, crtc_state); 3788 3789 if (!intel_phy_is_tc(dev_priv, phy) || 3790 dig_port->tc_mode != TC_PORT_TBT_ALT) { 3791 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 3792 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 3793 dig_port->ddi_io_power_domain); 3794 } 3795 3796 icl_program_mg_dp_mode(dig_port, crtc_state); 3797 3798 if (INTEL_GEN(dev_priv) >= 11) 3799 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3800 else if (IS_CANNONLAKE(dev_priv)) 3801 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3802 else if (IS_GEN9_LP(dev_priv)) 3803 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3804 else 3805 intel_prepare_dp_ddi_buffers(encoder, crtc_state); 3806 3807 if (intel_phy_is_combo(dev_priv, phy)) { 3808 bool lane_reversal = 3809 dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL; 3810 3811 intel_combo_phy_power_up_lanes(dev_priv, phy, false, 3812 crtc_state->lane_count, 3813 lane_reversal); 3814 } 3815 3816 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 3817 if (!is_mst) 3818 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 3819 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 3820 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, 3821 true); 3822 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 3823 intel_dp_start_link_train(intel_dp, crtc_state); 3824 if ((port != PORT_A || INTEL_GEN(dev_priv) >= 9) && 3825 !is_trans_port_sync_mode(crtc_state)) 3826 intel_dp_stop_link_train(intel_dp, crtc_state); 3827 3828 intel_ddi_enable_fec(encoder, crtc_state); 3829 3830 if (!is_mst) 3831 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3832 3833 if (!crtc_state->bigjoiner) 3834 intel_dsc_enable(encoder, crtc_state); 3835 } 3836 3837 static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state, 3838 struct intel_encoder *encoder, 3839 const struct intel_crtc_state *crtc_state, 3840 const struct drm_connector_state *conn_state) 3841 { 3842 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3843 3844 if (INTEL_GEN(dev_priv) >= 12) 3845 tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 3846 else 3847 hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 3848 3849 /* MST will call a setting of MSA after an allocating of Virtual Channel 3850 * from MST encoder pre_enable callback. 3851 */ 3852 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 3853 intel_ddi_set_dp_msa(crtc_state, conn_state); 3854 3855 intel_dp_set_m_n(crtc_state, M1_N1); 3856 } 3857 } 3858 3859 static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state, 3860 struct intel_encoder *encoder, 3861 const struct intel_crtc_state *crtc_state, 3862 const struct drm_connector_state *conn_state) 3863 { 3864 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3865 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 3866 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3867 int level = intel_ddi_hdmi_level(encoder, crtc_state); 3868 3869 intel_dp_dual_mode_set_tmds_output(intel_hdmi, true); 3870 intel_ddi_clk_select(encoder, crtc_state); 3871 3872 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 3873 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 3874 dig_port->ddi_io_power_domain); 3875 3876 icl_program_mg_dp_mode(dig_port, crtc_state); 3877 3878 if (INTEL_GEN(dev_priv) >= 12) 3879 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3880 else if (INTEL_GEN(dev_priv) == 11) 3881 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3882 else if (IS_CANNONLAKE(dev_priv)) 3883 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3884 else if (IS_GEN9_LP(dev_priv)) 3885 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3886 else 3887 intel_prepare_hdmi_ddi_buffers(encoder, level); 3888 3889 if (IS_GEN9_BC(dev_priv)) 3890 skl_ddi_set_iboost(encoder, crtc_state, level); 3891 3892 intel_ddi_enable_pipe_clock(encoder, crtc_state); 3893 3894 dig_port->set_infoframes(encoder, 3895 crtc_state->has_infoframe, 3896 crtc_state, conn_state); 3897 } 3898 3899 static void intel_ddi_pre_enable(struct intel_atomic_state *state, 3900 struct intel_encoder *encoder, 3901 const struct intel_crtc_state *crtc_state, 3902 const struct drm_connector_state *conn_state) 3903 { 3904 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 3905 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 3906 enum pipe pipe = crtc->pipe; 3907 3908 /* 3909 * When called from DP MST code: 3910 * - conn_state will be NULL 3911 * - encoder will be the main encoder (ie. mst->primary) 3912 * - the main connector associated with this port 3913 * won't be active or linked to a crtc 3914 * - crtc_state will be the state of the first stream to 3915 * be activated on this port, and it may not be the same 3916 * stream that will be deactivated last, but each stream 3917 * should have a state that is identical when it comes to 3918 * the DP link parameteres 3919 */ 3920 3921 drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder); 3922 3923 if (IS_DG1(dev_priv)) 3924 dg1_map_plls_to_ports(encoder, crtc_state); 3925 else if (INTEL_GEN(dev_priv) >= 11) 3926 icl_map_plls_to_ports(encoder, crtc_state); 3927 3928 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true); 3929 3930 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 3931 intel_ddi_pre_enable_hdmi(state, encoder, crtc_state, 3932 conn_state); 3933 } else { 3934 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3935 3936 intel_ddi_pre_enable_dp(state, encoder, crtc_state, 3937 conn_state); 3938 3939 /* FIXME precompute everything properly */ 3940 /* FIXME how do we turn infoframes off again? */ 3941 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 3942 dig_port->set_infoframes(encoder, 3943 crtc_state->has_infoframe, 3944 crtc_state, conn_state); 3945 } 3946 } 3947 3948 static void intel_disable_ddi_buf(struct intel_encoder *encoder, 3949 const struct intel_crtc_state *crtc_state) 3950 { 3951 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3952 enum port port = encoder->port; 3953 bool wait = false; 3954 u32 val; 3955 3956 val = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 3957 if (val & DDI_BUF_CTL_ENABLE) { 3958 val &= ~DDI_BUF_CTL_ENABLE; 3959 intel_de_write(dev_priv, DDI_BUF_CTL(port), val); 3960 wait = true; 3961 } 3962 3963 if (intel_crtc_has_dp_encoder(crtc_state)) { 3964 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3965 val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 3966 val |= DP_TP_CTL_LINK_TRAIN_PAT1; 3967 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3968 } 3969 3970 /* Disable FEC in DP Sink */ 3971 intel_ddi_disable_fec_state(encoder, crtc_state); 3972 3973 if (wait) 3974 intel_wait_ddi_buf_idle(dev_priv, port); 3975 } 3976 3977 static void intel_ddi_post_disable_dp(struct intel_atomic_state *state, 3978 struct intel_encoder *encoder, 3979 const struct intel_crtc_state *old_crtc_state, 3980 const struct drm_connector_state *old_conn_state) 3981 { 3982 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3983 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3984 struct intel_dp *intel_dp = &dig_port->dp; 3985 bool is_mst = intel_crtc_has_type(old_crtc_state, 3986 INTEL_OUTPUT_DP_MST); 3987 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3988 3989 if (!is_mst) 3990 intel_dp_set_infoframes(encoder, false, 3991 old_crtc_state, old_conn_state); 3992 3993 /* 3994 * Power down sink before disabling the port, otherwise we end 3995 * up getting interrupts from the sink on detecting link loss. 3996 */ 3997 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 3998 3999 if (INTEL_GEN(dev_priv) >= 12) { 4000 if (is_mst) { 4001 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder; 4002 u32 val; 4003 4004 val = intel_de_read(dev_priv, 4005 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 4006 val &= ~(TGL_TRANS_DDI_PORT_MASK | 4007 TRANS_DDI_MODE_SELECT_MASK); 4008 intel_de_write(dev_priv, 4009 TRANS_DDI_FUNC_CTL(cpu_transcoder), 4010 val); 4011 } 4012 } else { 4013 if (!is_mst) 4014 intel_ddi_disable_pipe_clock(old_crtc_state); 4015 } 4016 4017 intel_disable_ddi_buf(encoder, old_crtc_state); 4018 4019 /* 4020 * From TGL spec: "If single stream or multi-stream master transcoder: 4021 * Configure Transcoder Clock select to direct no clock to the 4022 * transcoder" 4023 */ 4024 if (INTEL_GEN(dev_priv) >= 12) 4025 intel_ddi_disable_pipe_clock(old_crtc_state); 4026 4027 intel_pps_vdd_on(intel_dp); 4028 intel_pps_off(intel_dp); 4029 4030 if (!intel_phy_is_tc(dev_priv, phy) || 4031 dig_port->tc_mode != TC_PORT_TBT_ALT) 4032 intel_display_power_put(dev_priv, 4033 dig_port->ddi_io_power_domain, 4034 fetch_and_zero(&dig_port->ddi_io_wakeref)); 4035 4036 intel_ddi_clk_disable(encoder); 4037 } 4038 4039 static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state, 4040 struct intel_encoder *encoder, 4041 const struct intel_crtc_state *old_crtc_state, 4042 const struct drm_connector_state *old_conn_state) 4043 { 4044 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4045 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4046 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 4047 4048 dig_port->set_infoframes(encoder, false, 4049 old_crtc_state, old_conn_state); 4050 4051 intel_ddi_disable_pipe_clock(old_crtc_state); 4052 4053 intel_disable_ddi_buf(encoder, old_crtc_state); 4054 4055 intel_display_power_put(dev_priv, 4056 dig_port->ddi_io_power_domain, 4057 fetch_and_zero(&dig_port->ddi_io_wakeref)); 4058 4059 intel_ddi_clk_disable(encoder); 4060 4061 intel_dp_dual_mode_set_tmds_output(intel_hdmi, false); 4062 } 4063 4064 static void intel_ddi_post_disable(struct intel_atomic_state *state, 4065 struct intel_encoder *encoder, 4066 const struct intel_crtc_state *old_crtc_state, 4067 const struct drm_connector_state *old_conn_state) 4068 { 4069 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4070 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4071 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 4072 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 4073 4074 if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) { 4075 intel_crtc_vblank_off(old_crtc_state); 4076 4077 intel_disable_pipe(old_crtc_state); 4078 4079 intel_vrr_disable(old_crtc_state); 4080 4081 intel_ddi_disable_transcoder_func(old_crtc_state); 4082 4083 intel_dsc_disable(old_crtc_state); 4084 4085 if (INTEL_GEN(dev_priv) >= 9) 4086 skl_scaler_disable(old_crtc_state); 4087 else 4088 ilk_pfit_disable(old_crtc_state); 4089 } 4090 4091 if (old_crtc_state->bigjoiner_linked_crtc) { 4092 struct intel_atomic_state *state = 4093 to_intel_atomic_state(old_crtc_state->uapi.state); 4094 struct intel_crtc *slave = 4095 old_crtc_state->bigjoiner_linked_crtc; 4096 const struct intel_crtc_state *old_slave_crtc_state = 4097 intel_atomic_get_old_crtc_state(state, slave); 4098 4099 intel_crtc_vblank_off(old_slave_crtc_state); 4100 trace_intel_pipe_disable(slave); 4101 4102 intel_dsc_disable(old_slave_crtc_state); 4103 skl_scaler_disable(old_slave_crtc_state); 4104 } 4105 4106 /* 4107 * When called from DP MST code: 4108 * - old_conn_state will be NULL 4109 * - encoder will be the main encoder (ie. mst->primary) 4110 * - the main connector associated with this port 4111 * won't be active or linked to a crtc 4112 * - old_crtc_state will be the state of the last stream to 4113 * be deactivated on this port, and it may not be the same 4114 * stream that was activated last, but each stream 4115 * should have a state that is identical when it comes to 4116 * the DP link parameteres 4117 */ 4118 4119 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 4120 intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state, 4121 old_conn_state); 4122 else 4123 intel_ddi_post_disable_dp(state, encoder, old_crtc_state, 4124 old_conn_state); 4125 4126 if (IS_DG1(dev_priv)) 4127 dg1_unmap_plls_to_ports(encoder); 4128 else if (INTEL_GEN(dev_priv) >= 11) 4129 icl_unmap_plls_to_ports(encoder); 4130 4131 if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port) 4132 intel_display_power_put(dev_priv, 4133 intel_ddi_main_link_aux_domain(dig_port), 4134 fetch_and_zero(&dig_port->aux_wakeref)); 4135 4136 if (is_tc_port) 4137 intel_tc_port_put_link(dig_port); 4138 } 4139 4140 void intel_ddi_fdi_post_disable(struct intel_atomic_state *state, 4141 struct intel_encoder *encoder, 4142 const struct intel_crtc_state *old_crtc_state, 4143 const struct drm_connector_state *old_conn_state) 4144 { 4145 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4146 u32 val; 4147 4148 /* 4149 * Bspec lists this as both step 13 (before DDI_BUF_CTL disable) 4150 * and step 18 (after clearing PORT_CLK_SEL). Based on a BUN, 4151 * step 13 is the correct place for it. Step 18 is where it was 4152 * originally before the BUN. 4153 */ 4154 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4155 val &= ~FDI_RX_ENABLE; 4156 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4157 4158 intel_disable_ddi_buf(encoder, old_crtc_state); 4159 intel_ddi_clk_disable(encoder); 4160 4161 val = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 4162 val &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 4163 val |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2); 4164 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), val); 4165 4166 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4167 val &= ~FDI_PCDCLK; 4168 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4169 4170 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 4171 val &= ~FDI_RX_PLL_ENABLE; 4172 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 4173 } 4174 4175 static void trans_port_sync_stop_link_train(struct intel_atomic_state *state, 4176 struct intel_encoder *encoder, 4177 const struct intel_crtc_state *crtc_state) 4178 { 4179 const struct drm_connector_state *conn_state; 4180 struct drm_connector *conn; 4181 int i; 4182 4183 if (!crtc_state->sync_mode_slaves_mask) 4184 return; 4185 4186 for_each_new_connector_in_state(&state->base, conn, conn_state, i) { 4187 struct intel_encoder *slave_encoder = 4188 to_intel_encoder(conn_state->best_encoder); 4189 struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc); 4190 const struct intel_crtc_state *slave_crtc_state; 4191 4192 if (!slave_crtc) 4193 continue; 4194 4195 slave_crtc_state = 4196 intel_atomic_get_new_crtc_state(state, slave_crtc); 4197 4198 if (slave_crtc_state->master_transcoder != 4199 crtc_state->cpu_transcoder) 4200 continue; 4201 4202 intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder), 4203 slave_crtc_state); 4204 } 4205 4206 usleep_range(200, 400); 4207 4208 intel_dp_stop_link_train(enc_to_intel_dp(encoder), 4209 crtc_state); 4210 } 4211 4212 static void intel_enable_ddi_dp(struct intel_atomic_state *state, 4213 struct intel_encoder *encoder, 4214 const struct intel_crtc_state *crtc_state, 4215 const struct drm_connector_state *conn_state) 4216 { 4217 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4218 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4219 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4220 enum port port = encoder->port; 4221 4222 if (port == PORT_A && INTEL_GEN(dev_priv) < 9) 4223 intel_dp_stop_link_train(intel_dp, crtc_state); 4224 4225 intel_edp_backlight_on(crtc_state, conn_state); 4226 intel_psr_enable(intel_dp, crtc_state, conn_state); 4227 4228 if (!dig_port->lspcon.active || dig_port->dp.has_hdmi_sink) 4229 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 4230 4231 intel_edp_drrs_enable(intel_dp, crtc_state); 4232 4233 if (crtc_state->has_audio) 4234 intel_audio_codec_enable(encoder, crtc_state, conn_state); 4235 4236 trans_port_sync_stop_link_train(state, encoder, crtc_state); 4237 } 4238 4239 static i915_reg_t 4240 gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv, 4241 enum port port) 4242 { 4243 static const enum transcoder trans[] = { 4244 [PORT_A] = TRANSCODER_EDP, 4245 [PORT_B] = TRANSCODER_A, 4246 [PORT_C] = TRANSCODER_B, 4247 [PORT_D] = TRANSCODER_C, 4248 [PORT_E] = TRANSCODER_A, 4249 }; 4250 4251 drm_WARN_ON(&dev_priv->drm, INTEL_GEN(dev_priv) < 9); 4252 4253 if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E)) 4254 port = PORT_A; 4255 4256 return CHICKEN_TRANS(trans[port]); 4257 } 4258 4259 static void intel_enable_ddi_hdmi(struct intel_atomic_state *state, 4260 struct intel_encoder *encoder, 4261 const struct intel_crtc_state *crtc_state, 4262 const struct drm_connector_state *conn_state) 4263 { 4264 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4265 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4266 struct drm_connector *connector = conn_state->connector; 4267 enum port port = encoder->port; 4268 4269 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 4270 crtc_state->hdmi_high_tmds_clock_ratio, 4271 crtc_state->hdmi_scrambling)) 4272 drm_dbg_kms(&dev_priv->drm, 4273 "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n", 4274 connector->base.id, connector->name); 4275 4276 /* Display WA #1143: skl,kbl,cfl */ 4277 if (IS_GEN9_BC(dev_priv)) { 4278 /* 4279 * For some reason these chicken bits have been 4280 * stuffed into a transcoder register, event though 4281 * the bits affect a specific DDI port rather than 4282 * a specific transcoder. 4283 */ 4284 i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port); 4285 u32 val; 4286 4287 val = intel_de_read(dev_priv, reg); 4288 4289 if (port == PORT_E) 4290 val |= DDIE_TRAINING_OVERRIDE_ENABLE | 4291 DDIE_TRAINING_OVERRIDE_VALUE; 4292 else 4293 val |= DDI_TRAINING_OVERRIDE_ENABLE | 4294 DDI_TRAINING_OVERRIDE_VALUE; 4295 4296 intel_de_write(dev_priv, reg, val); 4297 intel_de_posting_read(dev_priv, reg); 4298 4299 udelay(1); 4300 4301 if (port == PORT_E) 4302 val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE | 4303 DDIE_TRAINING_OVERRIDE_VALUE); 4304 else 4305 val &= ~(DDI_TRAINING_OVERRIDE_ENABLE | 4306 DDI_TRAINING_OVERRIDE_VALUE); 4307 4308 intel_de_write(dev_priv, reg, val); 4309 } 4310 4311 /* In HDMI/DVI mode, the port width, and swing/emphasis values 4312 * are ignored so nothing special needs to be done besides 4313 * enabling the port. 4314 */ 4315 intel_de_write(dev_priv, DDI_BUF_CTL(port), 4316 dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE); 4317 4318 if (crtc_state->has_audio) 4319 intel_audio_codec_enable(encoder, crtc_state, conn_state); 4320 } 4321 4322 static void intel_enable_ddi(struct intel_atomic_state *state, 4323 struct intel_encoder *encoder, 4324 const struct intel_crtc_state *crtc_state, 4325 const struct drm_connector_state *conn_state) 4326 { 4327 drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder); 4328 4329 if (!crtc_state->bigjoiner_slave) 4330 intel_ddi_enable_transcoder_func(encoder, crtc_state); 4331 4332 intel_vrr_enable(encoder, crtc_state); 4333 4334 intel_enable_pipe(crtc_state); 4335 4336 intel_crtc_vblank_on(crtc_state); 4337 4338 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 4339 intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state); 4340 else 4341 intel_enable_ddi_dp(state, encoder, crtc_state, conn_state); 4342 4343 /* Enable hdcp if it's desired */ 4344 if (conn_state->content_protection == 4345 DRM_MODE_CONTENT_PROTECTION_DESIRED) 4346 intel_hdcp_enable(to_intel_connector(conn_state->connector), 4347 crtc_state, 4348 (u8)conn_state->hdcp_content_type); 4349 } 4350 4351 static void intel_disable_ddi_dp(struct intel_atomic_state *state, 4352 struct intel_encoder *encoder, 4353 const struct intel_crtc_state *old_crtc_state, 4354 const struct drm_connector_state *old_conn_state) 4355 { 4356 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4357 4358 intel_dp->link_trained = false; 4359 4360 if (old_crtc_state->has_audio) 4361 intel_audio_codec_disable(encoder, 4362 old_crtc_state, old_conn_state); 4363 4364 intel_edp_drrs_disable(intel_dp, old_crtc_state); 4365 intel_psr_disable(intel_dp, old_crtc_state); 4366 intel_edp_backlight_off(old_conn_state); 4367 /* Disable the decompression in DP Sink */ 4368 intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state, 4369 false); 4370 /* Disable Ignore_MSA bit in DP Sink */ 4371 intel_dp_sink_set_msa_timing_par_ignore_state(intel_dp, old_crtc_state, 4372 false); 4373 } 4374 4375 static void intel_disable_ddi_hdmi(struct intel_atomic_state *state, 4376 struct intel_encoder *encoder, 4377 const struct intel_crtc_state *old_crtc_state, 4378 const struct drm_connector_state *old_conn_state) 4379 { 4380 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 4381 struct drm_connector *connector = old_conn_state->connector; 4382 4383 if (old_crtc_state->has_audio) 4384 intel_audio_codec_disable(encoder, 4385 old_crtc_state, old_conn_state); 4386 4387 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 4388 false, false)) 4389 drm_dbg_kms(&i915->drm, 4390 "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n", 4391 connector->base.id, connector->name); 4392 } 4393 4394 static void intel_disable_ddi(struct intel_atomic_state *state, 4395 struct intel_encoder *encoder, 4396 const struct intel_crtc_state *old_crtc_state, 4397 const struct drm_connector_state *old_conn_state) 4398 { 4399 intel_hdcp_disable(to_intel_connector(old_conn_state->connector)); 4400 4401 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 4402 intel_disable_ddi_hdmi(state, encoder, old_crtc_state, 4403 old_conn_state); 4404 else 4405 intel_disable_ddi_dp(state, encoder, old_crtc_state, 4406 old_conn_state); 4407 } 4408 4409 static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state, 4410 struct intel_encoder *encoder, 4411 const struct intel_crtc_state *crtc_state, 4412 const struct drm_connector_state *conn_state) 4413 { 4414 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4415 4416 intel_ddi_set_dp_msa(crtc_state, conn_state); 4417 4418 intel_psr_update(intel_dp, crtc_state, conn_state); 4419 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 4420 intel_edp_drrs_update(intel_dp, crtc_state); 4421 4422 intel_panel_update_backlight(state, encoder, crtc_state, conn_state); 4423 } 4424 4425 void intel_ddi_update_pipe(struct intel_atomic_state *state, 4426 struct intel_encoder *encoder, 4427 const struct intel_crtc_state *crtc_state, 4428 const struct drm_connector_state *conn_state) 4429 { 4430 4431 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) && 4432 !intel_encoder_is_mst(encoder)) 4433 intel_ddi_update_pipe_dp(state, encoder, crtc_state, 4434 conn_state); 4435 4436 intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state); 4437 } 4438 4439 static void 4440 intel_ddi_update_prepare(struct intel_atomic_state *state, 4441 struct intel_encoder *encoder, 4442 struct intel_crtc *crtc) 4443 { 4444 struct intel_crtc_state *crtc_state = 4445 crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL; 4446 int required_lanes = crtc_state ? crtc_state->lane_count : 1; 4447 4448 drm_WARN_ON(state->base.dev, crtc && crtc->active); 4449 4450 intel_tc_port_get_link(enc_to_dig_port(encoder), 4451 required_lanes); 4452 if (crtc_state && crtc_state->hw.active) 4453 intel_update_active_dpll(state, crtc, encoder); 4454 } 4455 4456 static void 4457 intel_ddi_update_complete(struct intel_atomic_state *state, 4458 struct intel_encoder *encoder, 4459 struct intel_crtc *crtc) 4460 { 4461 intel_tc_port_put_link(enc_to_dig_port(encoder)); 4462 } 4463 4464 static void 4465 intel_ddi_pre_pll_enable(struct intel_atomic_state *state, 4466 struct intel_encoder *encoder, 4467 const struct intel_crtc_state *crtc_state, 4468 const struct drm_connector_state *conn_state) 4469 { 4470 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4471 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4472 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 4473 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 4474 4475 if (is_tc_port) 4476 intel_tc_port_get_link(dig_port, crtc_state->lane_count); 4477 4478 if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port) { 4479 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 4480 dig_port->aux_wakeref = 4481 intel_display_power_get(dev_priv, 4482 intel_ddi_main_link_aux_domain(dig_port)); 4483 } 4484 4485 if (is_tc_port && dig_port->tc_mode != TC_PORT_TBT_ALT) 4486 /* 4487 * Program the lane count for static/dynamic connections on 4488 * Type-C ports. Skip this step for TBT. 4489 */ 4490 intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count); 4491 else if (IS_GEN9_LP(dev_priv)) 4492 bxt_ddi_phy_set_lane_optim_mask(encoder, 4493 crtc_state->lane_lat_optim_mask); 4494 } 4495 4496 static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp, 4497 const struct intel_crtc_state *crtc_state) 4498 { 4499 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4500 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4501 enum port port = encoder->port; 4502 u32 dp_tp_ctl, ddi_buf_ctl; 4503 bool wait = false; 4504 4505 dp_tp_ctl = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4506 4507 if (dp_tp_ctl & DP_TP_CTL_ENABLE) { 4508 ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 4509 if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) { 4510 intel_de_write(dev_priv, DDI_BUF_CTL(port), 4511 ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE); 4512 wait = true; 4513 } 4514 4515 dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 4516 dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1; 4517 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 4518 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4519 4520 if (wait) 4521 intel_wait_ddi_buf_idle(dev_priv, port); 4522 } 4523 4524 dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1; 4525 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 4526 dp_tp_ctl |= DP_TP_CTL_MODE_MST; 4527 } else { 4528 dp_tp_ctl |= DP_TP_CTL_MODE_SST; 4529 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 4530 dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE; 4531 } 4532 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 4533 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4534 4535 intel_dp->DP |= DDI_BUF_CTL_ENABLE; 4536 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 4537 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 4538 4539 intel_wait_ddi_buf_active(dev_priv, port); 4540 } 4541 4542 static void intel_ddi_set_link_train(struct intel_dp *intel_dp, 4543 const struct intel_crtc_state *crtc_state, 4544 u8 dp_train_pat) 4545 { 4546 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4547 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4548 u32 temp; 4549 4550 temp = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4551 4552 temp &= ~DP_TP_CTL_LINK_TRAIN_MASK; 4553 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 4554 case DP_TRAINING_PATTERN_DISABLE: 4555 temp |= DP_TP_CTL_LINK_TRAIN_NORMAL; 4556 break; 4557 case DP_TRAINING_PATTERN_1: 4558 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 4559 break; 4560 case DP_TRAINING_PATTERN_2: 4561 temp |= DP_TP_CTL_LINK_TRAIN_PAT2; 4562 break; 4563 case DP_TRAINING_PATTERN_3: 4564 temp |= DP_TP_CTL_LINK_TRAIN_PAT3; 4565 break; 4566 case DP_TRAINING_PATTERN_4: 4567 temp |= DP_TP_CTL_LINK_TRAIN_PAT4; 4568 break; 4569 } 4570 4571 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), temp); 4572 } 4573 4574 static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp, 4575 const struct intel_crtc_state *crtc_state) 4576 { 4577 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4578 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4579 enum port port = encoder->port; 4580 u32 val; 4581 4582 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 4583 val &= ~DP_TP_CTL_LINK_TRAIN_MASK; 4584 val |= DP_TP_CTL_LINK_TRAIN_IDLE; 4585 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 4586 4587 /* 4588 * Until TGL on PORT_A we can have only eDP in SST mode. There the only 4589 * reason we need to set idle transmission mode is to work around a HW 4590 * issue where we enable the pipe while not in idle link-training mode. 4591 * In this case there is requirement to wait for a minimum number of 4592 * idle patterns to be sent. 4593 */ 4594 if (port == PORT_A && INTEL_GEN(dev_priv) < 12) 4595 return; 4596 4597 if (intel_de_wait_for_set(dev_priv, 4598 dp_tp_status_reg(encoder, crtc_state), 4599 DP_TP_STATUS_IDLE_DONE, 1)) 4600 drm_err(&dev_priv->drm, 4601 "Timed out waiting for DP idle patterns\n"); 4602 } 4603 4604 static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv, 4605 enum transcoder cpu_transcoder) 4606 { 4607 if (cpu_transcoder == TRANSCODER_EDP) 4608 return false; 4609 4610 if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO)) 4611 return false; 4612 4613 return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) & 4614 AUDIO_OUTPUT_ENABLE(cpu_transcoder); 4615 } 4616 4617 void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv, 4618 struct intel_crtc_state *crtc_state) 4619 { 4620 if (INTEL_GEN(dev_priv) >= 12 && crtc_state->port_clock > 594000) 4621 crtc_state->min_voltage_level = 2; 4622 else if (IS_JSL_EHL(dev_priv) && crtc_state->port_clock > 594000) 4623 crtc_state->min_voltage_level = 3; 4624 else if (INTEL_GEN(dev_priv) >= 11 && crtc_state->port_clock > 594000) 4625 crtc_state->min_voltage_level = 1; 4626 else if (IS_CANNONLAKE(dev_priv) && crtc_state->port_clock > 594000) 4627 crtc_state->min_voltage_level = 2; 4628 } 4629 4630 static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv, 4631 enum transcoder cpu_transcoder) 4632 { 4633 u32 master_select; 4634 4635 if (INTEL_GEN(dev_priv) >= 11) { 4636 u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder)); 4637 4638 if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0) 4639 return INVALID_TRANSCODER; 4640 4641 master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2); 4642 } else { 4643 u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 4644 4645 if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0) 4646 return INVALID_TRANSCODER; 4647 4648 master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl); 4649 } 4650 4651 if (master_select == 0) 4652 return TRANSCODER_EDP; 4653 else 4654 return master_select - 1; 4655 } 4656 4657 static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state) 4658 { 4659 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 4660 u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | 4661 BIT(TRANSCODER_C) | BIT(TRANSCODER_D); 4662 enum transcoder cpu_transcoder; 4663 4664 crtc_state->master_transcoder = 4665 bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder); 4666 4667 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) { 4668 enum intel_display_power_domain power_domain; 4669 intel_wakeref_t trans_wakeref; 4670 4671 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder); 4672 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 4673 power_domain); 4674 4675 if (!trans_wakeref) 4676 continue; 4677 4678 if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) == 4679 crtc_state->cpu_transcoder) 4680 crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder); 4681 4682 intel_display_power_put(dev_priv, power_domain, trans_wakeref); 4683 } 4684 4685 drm_WARN_ON(&dev_priv->drm, 4686 crtc_state->master_transcoder != INVALID_TRANSCODER && 4687 crtc_state->sync_mode_slaves_mask); 4688 } 4689 4690 static void intel_ddi_read_func_ctl(struct intel_encoder *encoder, 4691 struct intel_crtc_state *pipe_config) 4692 { 4693 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4694 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc); 4695 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 4696 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4697 u32 temp, flags = 0; 4698 4699 temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 4700 if (temp & TRANS_DDI_PHSYNC) 4701 flags |= DRM_MODE_FLAG_PHSYNC; 4702 else 4703 flags |= DRM_MODE_FLAG_NHSYNC; 4704 if (temp & TRANS_DDI_PVSYNC) 4705 flags |= DRM_MODE_FLAG_PVSYNC; 4706 else 4707 flags |= DRM_MODE_FLAG_NVSYNC; 4708 4709 pipe_config->hw.adjusted_mode.flags |= flags; 4710 4711 switch (temp & TRANS_DDI_BPC_MASK) { 4712 case TRANS_DDI_BPC_6: 4713 pipe_config->pipe_bpp = 18; 4714 break; 4715 case TRANS_DDI_BPC_8: 4716 pipe_config->pipe_bpp = 24; 4717 break; 4718 case TRANS_DDI_BPC_10: 4719 pipe_config->pipe_bpp = 30; 4720 break; 4721 case TRANS_DDI_BPC_12: 4722 pipe_config->pipe_bpp = 36; 4723 break; 4724 default: 4725 break; 4726 } 4727 4728 switch (temp & TRANS_DDI_MODE_SELECT_MASK) { 4729 case TRANS_DDI_MODE_SELECT_HDMI: 4730 pipe_config->has_hdmi_sink = true; 4731 4732 pipe_config->infoframes.enable |= 4733 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4734 4735 if (pipe_config->infoframes.enable) 4736 pipe_config->has_infoframe = true; 4737 4738 if (temp & TRANS_DDI_HDMI_SCRAMBLING) 4739 pipe_config->hdmi_scrambling = true; 4740 if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE) 4741 pipe_config->hdmi_high_tmds_clock_ratio = true; 4742 fallthrough; 4743 case TRANS_DDI_MODE_SELECT_DVI: 4744 pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI); 4745 pipe_config->lane_count = 4; 4746 break; 4747 case TRANS_DDI_MODE_SELECT_FDI: 4748 pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG); 4749 break; 4750 case TRANS_DDI_MODE_SELECT_DP_SST: 4751 if (encoder->type == INTEL_OUTPUT_EDP) 4752 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 4753 else 4754 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 4755 pipe_config->lane_count = 4756 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 4757 intel_dp_get_m_n(intel_crtc, pipe_config); 4758 4759 if (INTEL_GEN(dev_priv) >= 11) { 4760 i915_reg_t dp_tp_ctl = dp_tp_ctl_reg(encoder, pipe_config); 4761 4762 pipe_config->fec_enable = 4763 intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE; 4764 4765 drm_dbg_kms(&dev_priv->drm, 4766 "[ENCODER:%d:%s] Fec status: %u\n", 4767 encoder->base.base.id, encoder->base.name, 4768 pipe_config->fec_enable); 4769 } 4770 4771 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 4772 pipe_config->infoframes.enable |= 4773 intel_lspcon_infoframes_enabled(encoder, pipe_config); 4774 else 4775 pipe_config->infoframes.enable |= 4776 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4777 break; 4778 case TRANS_DDI_MODE_SELECT_DP_MST: 4779 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST); 4780 pipe_config->lane_count = 4781 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 4782 4783 if (INTEL_GEN(dev_priv) >= 12) 4784 pipe_config->mst_master_transcoder = 4785 REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp); 4786 4787 intel_dp_get_m_n(intel_crtc, pipe_config); 4788 4789 pipe_config->infoframes.enable |= 4790 intel_hdmi_infoframes_enabled(encoder, pipe_config); 4791 break; 4792 default: 4793 break; 4794 } 4795 } 4796 4797 void intel_ddi_get_config(struct intel_encoder *encoder, 4798 struct intel_crtc_state *pipe_config) 4799 { 4800 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4801 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 4802 4803 /* XXX: DSI transcoder paranoia */ 4804 if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder))) 4805 return; 4806 4807 if (pipe_config->bigjoiner_slave) { 4808 /* read out pipe settings from master */ 4809 enum transcoder save = pipe_config->cpu_transcoder; 4810 4811 /* Our own transcoder needs to be disabled when reading it in intel_ddi_read_func_ctl() */ 4812 WARN_ON(pipe_config->output_types); 4813 pipe_config->cpu_transcoder = (enum transcoder)pipe_config->bigjoiner_linked_crtc->pipe; 4814 intel_ddi_read_func_ctl(encoder, pipe_config); 4815 pipe_config->cpu_transcoder = save; 4816 } else { 4817 intel_ddi_read_func_ctl(encoder, pipe_config); 4818 } 4819 4820 pipe_config->has_audio = 4821 intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder); 4822 4823 if (encoder->type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.bpp && 4824 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 4825 /* 4826 * This is a big fat ugly hack. 4827 * 4828 * Some machines in UEFI boot mode provide us a VBT that has 18 4829 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 4830 * unknown we fail to light up. Yet the same BIOS boots up with 4831 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 4832 * max, not what it tells us to use. 4833 * 4834 * Note: This will still be broken if the eDP panel is not lit 4835 * up by the BIOS, and thus we can't get the mode at module 4836 * load. 4837 */ 4838 drm_dbg_kms(&dev_priv->drm, 4839 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 4840 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 4841 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 4842 } 4843 4844 if (!pipe_config->bigjoiner_slave) 4845 intel_ddi_clock_get(encoder, pipe_config); 4846 4847 if (IS_GEN9_LP(dev_priv)) 4848 pipe_config->lane_lat_optim_mask = 4849 bxt_ddi_phy_get_lane_lat_optim_mask(encoder); 4850 4851 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 4852 4853 intel_hdmi_read_gcp_infoframe(encoder, pipe_config); 4854 4855 intel_read_infoframe(encoder, pipe_config, 4856 HDMI_INFOFRAME_TYPE_AVI, 4857 &pipe_config->infoframes.avi); 4858 intel_read_infoframe(encoder, pipe_config, 4859 HDMI_INFOFRAME_TYPE_SPD, 4860 &pipe_config->infoframes.spd); 4861 intel_read_infoframe(encoder, pipe_config, 4862 HDMI_INFOFRAME_TYPE_VENDOR, 4863 &pipe_config->infoframes.hdmi); 4864 intel_read_infoframe(encoder, pipe_config, 4865 HDMI_INFOFRAME_TYPE_DRM, 4866 &pipe_config->infoframes.drm); 4867 4868 if (INTEL_GEN(dev_priv) >= 8) 4869 bdw_get_trans_port_sync_config(pipe_config); 4870 4871 intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA); 4872 intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC); 4873 } 4874 4875 static void intel_ddi_sync_state(struct intel_encoder *encoder, 4876 const struct intel_crtc_state *crtc_state) 4877 { 4878 if (intel_crtc_has_dp_encoder(crtc_state)) 4879 intel_dp_sync_state(encoder, crtc_state); 4880 } 4881 4882 static bool intel_ddi_initial_fastset_check(struct intel_encoder *encoder, 4883 struct intel_crtc_state *crtc_state) 4884 { 4885 if (intel_crtc_has_dp_encoder(crtc_state)) 4886 return intel_dp_initial_fastset_check(encoder, crtc_state); 4887 4888 return true; 4889 } 4890 4891 static enum intel_output_type 4892 intel_ddi_compute_output_type(struct intel_encoder *encoder, 4893 struct intel_crtc_state *crtc_state, 4894 struct drm_connector_state *conn_state) 4895 { 4896 switch (conn_state->connector->connector_type) { 4897 case DRM_MODE_CONNECTOR_HDMIA: 4898 return INTEL_OUTPUT_HDMI; 4899 case DRM_MODE_CONNECTOR_eDP: 4900 return INTEL_OUTPUT_EDP; 4901 case DRM_MODE_CONNECTOR_DisplayPort: 4902 return INTEL_OUTPUT_DP; 4903 default: 4904 MISSING_CASE(conn_state->connector->connector_type); 4905 return INTEL_OUTPUT_UNUSED; 4906 } 4907 } 4908 4909 static int intel_ddi_compute_config(struct intel_encoder *encoder, 4910 struct intel_crtc_state *pipe_config, 4911 struct drm_connector_state *conn_state) 4912 { 4913 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 4914 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4915 enum port port = encoder->port; 4916 int ret; 4917 4918 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 4919 pipe_config->cpu_transcoder = TRANSCODER_EDP; 4920 4921 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) { 4922 ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state); 4923 } else { 4924 ret = intel_dp_compute_config(encoder, pipe_config, conn_state); 4925 } 4926 4927 if (ret) 4928 return ret; 4929 4930 if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A && 4931 pipe_config->cpu_transcoder == TRANSCODER_EDP) 4932 pipe_config->pch_pfit.force_thru = 4933 pipe_config->pch_pfit.enabled || 4934 pipe_config->crc_enabled; 4935 4936 if (IS_GEN9_LP(dev_priv)) 4937 pipe_config->lane_lat_optim_mask = 4938 bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 4939 4940 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 4941 4942 return 0; 4943 } 4944 4945 static bool mode_equal(const struct drm_display_mode *mode1, 4946 const struct drm_display_mode *mode2) 4947 { 4948 return drm_mode_match(mode1, mode2, 4949 DRM_MODE_MATCH_TIMINGS | 4950 DRM_MODE_MATCH_FLAGS | 4951 DRM_MODE_MATCH_3D_FLAGS) && 4952 mode1->clock == mode2->clock; /* we want an exact match */ 4953 } 4954 4955 static bool m_n_equal(const struct intel_link_m_n *m_n_1, 4956 const struct intel_link_m_n *m_n_2) 4957 { 4958 return m_n_1->tu == m_n_2->tu && 4959 m_n_1->gmch_m == m_n_2->gmch_m && 4960 m_n_1->gmch_n == m_n_2->gmch_n && 4961 m_n_1->link_m == m_n_2->link_m && 4962 m_n_1->link_n == m_n_2->link_n; 4963 } 4964 4965 static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1, 4966 const struct intel_crtc_state *crtc_state2) 4967 { 4968 return crtc_state1->hw.active && crtc_state2->hw.active && 4969 crtc_state1->output_types == crtc_state2->output_types && 4970 crtc_state1->output_format == crtc_state2->output_format && 4971 crtc_state1->lane_count == crtc_state2->lane_count && 4972 crtc_state1->port_clock == crtc_state2->port_clock && 4973 mode_equal(&crtc_state1->hw.adjusted_mode, 4974 &crtc_state2->hw.adjusted_mode) && 4975 m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n); 4976 } 4977 4978 static u8 4979 intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state, 4980 int tile_group_id) 4981 { 4982 struct drm_connector *connector; 4983 const struct drm_connector_state *conn_state; 4984 struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev); 4985 struct intel_atomic_state *state = 4986 to_intel_atomic_state(ref_crtc_state->uapi.state); 4987 u8 transcoders = 0; 4988 int i; 4989 4990 /* 4991 * We don't enable port sync on BDW due to missing w/as and 4992 * due to not having adjusted the modeset sequence appropriately. 4993 */ 4994 if (INTEL_GEN(dev_priv) < 9) 4995 return 0; 4996 4997 if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP)) 4998 return 0; 4999 5000 for_each_new_connector_in_state(&state->base, connector, conn_state, i) { 5001 struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc); 5002 const struct intel_crtc_state *crtc_state; 5003 5004 if (!crtc) 5005 continue; 5006 5007 if (!connector->has_tile || 5008 connector->tile_group->id != 5009 tile_group_id) 5010 continue; 5011 crtc_state = intel_atomic_get_new_crtc_state(state, 5012 crtc); 5013 if (!crtcs_port_sync_compatible(ref_crtc_state, 5014 crtc_state)) 5015 continue; 5016 transcoders |= BIT(crtc_state->cpu_transcoder); 5017 } 5018 5019 return transcoders; 5020 } 5021 5022 static int intel_ddi_compute_config_late(struct intel_encoder *encoder, 5023 struct intel_crtc_state *crtc_state, 5024 struct drm_connector_state *conn_state) 5025 { 5026 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 5027 struct drm_connector *connector = conn_state->connector; 5028 u8 port_sync_transcoders = 0; 5029 5030 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]", 5031 encoder->base.base.id, encoder->base.name, 5032 crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name); 5033 5034 if (connector->has_tile) 5035 port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state, 5036 connector->tile_group->id); 5037 5038 /* 5039 * EDP Transcoders cannot be ensalved 5040 * make them a master always when present 5041 */ 5042 if (port_sync_transcoders & BIT(TRANSCODER_EDP)) 5043 crtc_state->master_transcoder = TRANSCODER_EDP; 5044 else 5045 crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1; 5046 5047 if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) { 5048 crtc_state->master_transcoder = INVALID_TRANSCODER; 5049 crtc_state->sync_mode_slaves_mask = 5050 port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder); 5051 } 5052 5053 return 0; 5054 } 5055 5056 static void intel_ddi_encoder_destroy(struct drm_encoder *encoder) 5057 { 5058 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder)); 5059 5060 intel_dp_encoder_flush_work(encoder); 5061 5062 drm_encoder_cleanup(encoder); 5063 if (dig_port) 5064 kfree(dig_port->hdcp_port_data.streams); 5065 kfree(dig_port); 5066 } 5067 5068 static const struct drm_encoder_funcs intel_ddi_funcs = { 5069 .reset = intel_dp_encoder_reset, 5070 .destroy = intel_ddi_encoder_destroy, 5071 }; 5072 5073 static struct intel_connector * 5074 intel_ddi_init_dp_connector(struct intel_digital_port *dig_port) 5075 { 5076 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 5077 struct intel_connector *connector; 5078 enum port port = dig_port->base.port; 5079 5080 connector = intel_connector_alloc(); 5081 if (!connector) 5082 return NULL; 5083 5084 dig_port->dp.output_reg = DDI_BUF_CTL(port); 5085 dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain; 5086 dig_port->dp.set_link_train = intel_ddi_set_link_train; 5087 dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train; 5088 5089 if (INTEL_GEN(dev_priv) >= 12) 5090 dig_port->dp.set_signal_levels = tgl_set_signal_levels; 5091 else if (INTEL_GEN(dev_priv) >= 11) 5092 dig_port->dp.set_signal_levels = icl_set_signal_levels; 5093 else if (IS_CANNONLAKE(dev_priv)) 5094 dig_port->dp.set_signal_levels = cnl_set_signal_levels; 5095 else if (IS_GEN9_LP(dev_priv)) 5096 dig_port->dp.set_signal_levels = bxt_set_signal_levels; 5097 else 5098 dig_port->dp.set_signal_levels = hsw_set_signal_levels; 5099 5100 dig_port->dp.voltage_max = intel_ddi_dp_voltage_max; 5101 dig_port->dp.preemph_max = intel_ddi_dp_preemph_max; 5102 5103 if (!intel_dp_init_connector(dig_port, connector)) { 5104 kfree(connector); 5105 return NULL; 5106 } 5107 5108 return connector; 5109 } 5110 5111 static int modeset_pipe(struct drm_crtc *crtc, 5112 struct drm_modeset_acquire_ctx *ctx) 5113 { 5114 struct drm_atomic_state *state; 5115 struct drm_crtc_state *crtc_state; 5116 int ret; 5117 5118 state = drm_atomic_state_alloc(crtc->dev); 5119 if (!state) 5120 return -ENOMEM; 5121 5122 state->acquire_ctx = ctx; 5123 5124 crtc_state = drm_atomic_get_crtc_state(state, crtc); 5125 if (IS_ERR(crtc_state)) { 5126 ret = PTR_ERR(crtc_state); 5127 goto out; 5128 } 5129 5130 crtc_state->connectors_changed = true; 5131 5132 ret = drm_atomic_commit(state); 5133 out: 5134 drm_atomic_state_put(state); 5135 5136 return ret; 5137 } 5138 5139 static int intel_hdmi_reset_link(struct intel_encoder *encoder, 5140 struct drm_modeset_acquire_ctx *ctx) 5141 { 5142 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5143 struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder); 5144 struct intel_connector *connector = hdmi->attached_connector; 5145 struct i2c_adapter *adapter = 5146 intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus); 5147 struct drm_connector_state *conn_state; 5148 struct intel_crtc_state *crtc_state; 5149 struct intel_crtc *crtc; 5150 u8 config; 5151 int ret; 5152 5153 if (!connector || connector->base.status != connector_status_connected) 5154 return 0; 5155 5156 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex, 5157 ctx); 5158 if (ret) 5159 return ret; 5160 5161 conn_state = connector->base.state; 5162 5163 crtc = to_intel_crtc(conn_state->crtc); 5164 if (!crtc) 5165 return 0; 5166 5167 ret = drm_modeset_lock(&crtc->base.mutex, ctx); 5168 if (ret) 5169 return ret; 5170 5171 crtc_state = to_intel_crtc_state(crtc->base.state); 5172 5173 drm_WARN_ON(&dev_priv->drm, 5174 !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)); 5175 5176 if (!crtc_state->hw.active) 5177 return 0; 5178 5179 if (!crtc_state->hdmi_high_tmds_clock_ratio && 5180 !crtc_state->hdmi_scrambling) 5181 return 0; 5182 5183 if (conn_state->commit && 5184 !try_wait_for_completion(&conn_state->commit->hw_done)) 5185 return 0; 5186 5187 ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config); 5188 if (ret < 0) { 5189 drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n", 5190 ret); 5191 return 0; 5192 } 5193 5194 if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) == 5195 crtc_state->hdmi_high_tmds_clock_ratio && 5196 !!(config & SCDC_SCRAMBLING_ENABLE) == 5197 crtc_state->hdmi_scrambling) 5198 return 0; 5199 5200 /* 5201 * HDMI 2.0 says that one should not send scrambled data 5202 * prior to configuring the sink scrambling, and that 5203 * TMDS clock/data transmission should be suspended when 5204 * changing the TMDS clock rate in the sink. So let's 5205 * just do a full modeset here, even though some sinks 5206 * would be perfectly happy if were to just reconfigure 5207 * the SCDC settings on the fly. 5208 */ 5209 return modeset_pipe(&crtc->base, ctx); 5210 } 5211 5212 static enum intel_hotplug_state 5213 intel_ddi_hotplug(struct intel_encoder *encoder, 5214 struct intel_connector *connector) 5215 { 5216 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 5217 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5218 struct intel_dp *intel_dp = &dig_port->dp; 5219 enum phy phy = intel_port_to_phy(i915, encoder->port); 5220 bool is_tc = intel_phy_is_tc(i915, phy); 5221 struct drm_modeset_acquire_ctx ctx; 5222 enum intel_hotplug_state state; 5223 int ret; 5224 5225 if (intel_dp->compliance.test_active && 5226 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 5227 intel_dp_phy_test(encoder); 5228 /* just do the PHY test and nothing else */ 5229 return INTEL_HOTPLUG_UNCHANGED; 5230 } 5231 5232 state = intel_encoder_hotplug(encoder, connector); 5233 5234 drm_modeset_acquire_init(&ctx, 0); 5235 5236 for (;;) { 5237 if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA) 5238 ret = intel_hdmi_reset_link(encoder, &ctx); 5239 else 5240 ret = intel_dp_retrain_link(encoder, &ctx); 5241 5242 if (ret == -EDEADLK) { 5243 drm_modeset_backoff(&ctx); 5244 continue; 5245 } 5246 5247 break; 5248 } 5249 5250 drm_modeset_drop_locks(&ctx); 5251 drm_modeset_acquire_fini(&ctx); 5252 drm_WARN(encoder->base.dev, ret, 5253 "Acquiring modeset locks failed with %i\n", ret); 5254 5255 /* 5256 * Unpowered type-c dongles can take some time to boot and be 5257 * responsible, so here giving some time to those dongles to power up 5258 * and then retrying the probe. 5259 * 5260 * On many platforms the HDMI live state signal is known to be 5261 * unreliable, so we can't use it to detect if a sink is connected or 5262 * not. Instead we detect if it's connected based on whether we can 5263 * read the EDID or not. That in turn has a problem during disconnect, 5264 * since the HPD interrupt may be raised before the DDC lines get 5265 * disconnected (due to how the required length of DDC vs. HPD 5266 * connector pins are specified) and so we'll still be able to get a 5267 * valid EDID. To solve this schedule another detection cycle if this 5268 * time around we didn't detect any change in the sink's connection 5269 * status. 5270 * 5271 * Type-c connectors which get their HPD signal deasserted then 5272 * reasserted, without unplugging/replugging the sink from the 5273 * connector, introduce a delay until the AUX channel communication 5274 * becomes functional. Retry the detection for 5 seconds on type-c 5275 * connectors to account for this delay. 5276 */ 5277 if (state == INTEL_HOTPLUG_UNCHANGED && 5278 connector->hotplug_retries < (is_tc ? 5 : 1) && 5279 !dig_port->dp.is_mst) 5280 state = INTEL_HOTPLUG_RETRY; 5281 5282 return state; 5283 } 5284 5285 static bool lpt_digital_port_connected(struct intel_encoder *encoder) 5286 { 5287 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5288 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 5289 5290 return intel_de_read(dev_priv, SDEISR) & bit; 5291 } 5292 5293 static bool hsw_digital_port_connected(struct intel_encoder *encoder) 5294 { 5295 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5296 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 5297 5298 return intel_de_read(dev_priv, DEISR) & bit; 5299 } 5300 5301 static bool bdw_digital_port_connected(struct intel_encoder *encoder) 5302 { 5303 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 5304 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 5305 5306 return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit; 5307 } 5308 5309 static struct intel_connector * 5310 intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port) 5311 { 5312 struct intel_connector *connector; 5313 enum port port = dig_port->base.port; 5314 5315 connector = intel_connector_alloc(); 5316 if (!connector) 5317 return NULL; 5318 5319 dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port); 5320 intel_hdmi_init_connector(dig_port, connector); 5321 5322 return connector; 5323 } 5324 5325 static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port) 5326 { 5327 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 5328 5329 if (dig_port->base.port != PORT_A) 5330 return false; 5331 5332 if (dig_port->saved_port_bits & DDI_A_4_LANES) 5333 return false; 5334 5335 /* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only 5336 * supported configuration 5337 */ 5338 if (IS_GEN9_LP(dev_priv)) 5339 return true; 5340 5341 /* Cannonlake: Most of SKUs don't support DDI_E, and the only 5342 * one who does also have a full A/E split called 5343 * DDI_F what makes DDI_E useless. However for this 5344 * case let's trust VBT info. 5345 */ 5346 if (IS_CANNONLAKE(dev_priv) && 5347 !intel_bios_is_port_present(dev_priv, PORT_E)) 5348 return true; 5349 5350 return false; 5351 } 5352 5353 static int 5354 intel_ddi_max_lanes(struct intel_digital_port *dig_port) 5355 { 5356 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 5357 enum port port = dig_port->base.port; 5358 int max_lanes = 4; 5359 5360 if (INTEL_GEN(dev_priv) >= 11) 5361 return max_lanes; 5362 5363 if (port == PORT_A || port == PORT_E) { 5364 if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES) 5365 max_lanes = port == PORT_A ? 4 : 0; 5366 else 5367 /* Both A and E share 2 lanes */ 5368 max_lanes = 2; 5369 } 5370 5371 /* 5372 * Some BIOS might fail to set this bit on port A if eDP 5373 * wasn't lit up at boot. Force this bit set when needed 5374 * so we use the proper lane count for our calculations. 5375 */ 5376 if (intel_ddi_a_force_4_lanes(dig_port)) { 5377 drm_dbg_kms(&dev_priv->drm, 5378 "Forcing DDI_A_4_LANES for port A\n"); 5379 dig_port->saved_port_bits |= DDI_A_4_LANES; 5380 max_lanes = 4; 5381 } 5382 5383 return max_lanes; 5384 } 5385 5386 static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy) 5387 { 5388 return i915->hti_state & HDPORT_ENABLED && 5389 i915->hti_state & HDPORT_DDI_USED(phy); 5390 } 5391 5392 static enum hpd_pin dg1_hpd_pin(struct drm_i915_private *dev_priv, 5393 enum port port) 5394 { 5395 if (port >= PORT_TC1) 5396 return HPD_PORT_C + port - PORT_TC1; 5397 else 5398 return HPD_PORT_A + port - PORT_A; 5399 } 5400 5401 static enum hpd_pin tgl_hpd_pin(struct drm_i915_private *dev_priv, 5402 enum port port) 5403 { 5404 if (port >= PORT_TC1) 5405 return HPD_PORT_TC1 + port - PORT_TC1; 5406 else 5407 return HPD_PORT_A + port - PORT_A; 5408 } 5409 5410 static enum hpd_pin rkl_hpd_pin(struct drm_i915_private *dev_priv, 5411 enum port port) 5412 { 5413 if (HAS_PCH_TGP(dev_priv)) 5414 return tgl_hpd_pin(dev_priv, port); 5415 5416 if (port >= PORT_TC1) 5417 return HPD_PORT_C + port - PORT_TC1; 5418 else 5419 return HPD_PORT_A + port - PORT_A; 5420 } 5421 5422 static enum hpd_pin icl_hpd_pin(struct drm_i915_private *dev_priv, 5423 enum port port) 5424 { 5425 if (port >= PORT_C) 5426 return HPD_PORT_TC1 + port - PORT_C; 5427 else 5428 return HPD_PORT_A + port - PORT_A; 5429 } 5430 5431 static enum hpd_pin ehl_hpd_pin(struct drm_i915_private *dev_priv, 5432 enum port port) 5433 { 5434 if (port == PORT_D) 5435 return HPD_PORT_A; 5436 5437 if (HAS_PCH_MCC(dev_priv)) 5438 return icl_hpd_pin(dev_priv, port); 5439 5440 return HPD_PORT_A + port - PORT_A; 5441 } 5442 5443 static enum hpd_pin cnl_hpd_pin(struct drm_i915_private *dev_priv, 5444 enum port port) 5445 { 5446 if (port == PORT_F) 5447 return HPD_PORT_E; 5448 5449 return HPD_PORT_A + port - PORT_A; 5450 } 5451 5452 #define port_tc_name(port) ((port) - PORT_TC1 + '1') 5453 #define tc_port_name(tc_port) ((tc_port) - TC_PORT_1 + '1') 5454 5455 void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port) 5456 { 5457 struct intel_digital_port *dig_port; 5458 struct intel_encoder *encoder; 5459 bool init_hdmi, init_dp; 5460 enum phy phy = intel_port_to_phy(dev_priv, port); 5461 5462 /* 5463 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may 5464 * have taken over some of the PHYs and made them unavailable to the 5465 * driver. In that case we should skip initializing the corresponding 5466 * outputs. 5467 */ 5468 if (hti_uses_phy(dev_priv, phy)) { 5469 drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n", 5470 port_name(port), phy_name(phy)); 5471 return; 5472 } 5473 5474 init_hdmi = intel_bios_port_supports_dvi(dev_priv, port) || 5475 intel_bios_port_supports_hdmi(dev_priv, port); 5476 init_dp = intel_bios_port_supports_dp(dev_priv, port); 5477 5478 if (intel_bios_is_lspcon_present(dev_priv, port)) { 5479 /* 5480 * Lspcon device needs to be driven with DP connector 5481 * with special detection sequence. So make sure DP 5482 * is initialized before lspcon. 5483 */ 5484 init_dp = true; 5485 init_hdmi = false; 5486 drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n", 5487 port_name(port)); 5488 } 5489 5490 if (!init_dp && !init_hdmi) { 5491 drm_dbg_kms(&dev_priv->drm, 5492 "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n", 5493 port_name(port)); 5494 return; 5495 } 5496 5497 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 5498 if (!dig_port) 5499 return; 5500 5501 encoder = &dig_port->base; 5502 5503 if (INTEL_GEN(dev_priv) >= 12) { 5504 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 5505 5506 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5507 DRM_MODE_ENCODER_TMDS, 5508 "DDI %s%c/PHY %s%c", 5509 port >= PORT_TC1 ? "TC" : "", 5510 port >= PORT_TC1 ? port_tc_name(port) : port_name(port), 5511 tc_port != TC_PORT_NONE ? "TC" : "", 5512 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 5513 } else if (INTEL_GEN(dev_priv) >= 11) { 5514 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 5515 5516 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5517 DRM_MODE_ENCODER_TMDS, 5518 "DDI %c%s/PHY %s%c", 5519 port_name(port), 5520 port >= PORT_C ? " (TC)" : "", 5521 tc_port != TC_PORT_NONE ? "TC" : "", 5522 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 5523 } else { 5524 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 5525 DRM_MODE_ENCODER_TMDS, 5526 "DDI %c/PHY %c", port_name(port), phy_name(phy)); 5527 } 5528 5529 mutex_init(&dig_port->hdcp_mutex); 5530 dig_port->num_hdcp_streams = 0; 5531 5532 encoder->hotplug = intel_ddi_hotplug; 5533 encoder->compute_output_type = intel_ddi_compute_output_type; 5534 encoder->compute_config = intel_ddi_compute_config; 5535 encoder->compute_config_late = intel_ddi_compute_config_late; 5536 encoder->enable = intel_enable_ddi; 5537 encoder->pre_pll_enable = intel_ddi_pre_pll_enable; 5538 encoder->pre_enable = intel_ddi_pre_enable; 5539 encoder->disable = intel_disable_ddi; 5540 encoder->post_disable = intel_ddi_post_disable; 5541 encoder->update_pipe = intel_ddi_update_pipe; 5542 encoder->get_hw_state = intel_ddi_get_hw_state; 5543 encoder->get_config = intel_ddi_get_config; 5544 encoder->sync_state = intel_ddi_sync_state; 5545 encoder->initial_fastset_check = intel_ddi_initial_fastset_check; 5546 encoder->suspend = intel_dp_encoder_suspend; 5547 encoder->shutdown = intel_dp_encoder_shutdown; 5548 encoder->get_power_domains = intel_ddi_get_power_domains; 5549 5550 encoder->type = INTEL_OUTPUT_DDI; 5551 encoder->power_domain = intel_port_to_power_domain(port); 5552 encoder->port = port; 5553 encoder->cloneable = 0; 5554 encoder->pipe_mask = ~0; 5555 5556 if (IS_DG1(dev_priv)) 5557 encoder->hpd_pin = dg1_hpd_pin(dev_priv, port); 5558 else if (IS_ROCKETLAKE(dev_priv)) 5559 encoder->hpd_pin = rkl_hpd_pin(dev_priv, port); 5560 else if (INTEL_GEN(dev_priv) >= 12) 5561 encoder->hpd_pin = tgl_hpd_pin(dev_priv, port); 5562 else if (IS_JSL_EHL(dev_priv)) 5563 encoder->hpd_pin = ehl_hpd_pin(dev_priv, port); 5564 else if (IS_GEN(dev_priv, 11)) 5565 encoder->hpd_pin = icl_hpd_pin(dev_priv, port); 5566 else if (IS_GEN(dev_priv, 10)) 5567 encoder->hpd_pin = cnl_hpd_pin(dev_priv, port); 5568 else 5569 encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 5570 5571 if (INTEL_GEN(dev_priv) >= 11) 5572 dig_port->saved_port_bits = 5573 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 5574 & DDI_BUF_PORT_REVERSAL; 5575 else 5576 dig_port->saved_port_bits = 5577 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 5578 & (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES); 5579 5580 dig_port->dp.output_reg = INVALID_MMIO_REG; 5581 dig_port->max_lanes = intel_ddi_max_lanes(dig_port); 5582 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 5583 5584 if (intel_phy_is_tc(dev_priv, phy)) { 5585 bool is_legacy = 5586 !intel_bios_port_supports_typec_usb(dev_priv, port) && 5587 !intel_bios_port_supports_tbt(dev_priv, port); 5588 5589 intel_tc_port_init(dig_port, is_legacy); 5590 5591 encoder->update_prepare = intel_ddi_update_prepare; 5592 encoder->update_complete = intel_ddi_update_complete; 5593 } 5594 5595 drm_WARN_ON(&dev_priv->drm, port > PORT_I); 5596 dig_port->ddi_io_power_domain = POWER_DOMAIN_PORT_DDI_A_IO + 5597 port - PORT_A; 5598 5599 if (init_dp) { 5600 if (!intel_ddi_init_dp_connector(dig_port)) 5601 goto err; 5602 5603 dig_port->hpd_pulse = intel_dp_hpd_pulse; 5604 } 5605 5606 /* In theory we don't need the encoder->type check, but leave it just in 5607 * case we have some really bad VBTs... */ 5608 if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) { 5609 if (!intel_ddi_init_hdmi_connector(dig_port)) 5610 goto err; 5611 } 5612 5613 if (INTEL_GEN(dev_priv) >= 11) { 5614 if (intel_phy_is_tc(dev_priv, phy)) 5615 dig_port->connected = intel_tc_port_connected; 5616 else 5617 dig_port->connected = lpt_digital_port_connected; 5618 } else if (INTEL_GEN(dev_priv) >= 8) { 5619 if (port == PORT_A || IS_GEN9_LP(dev_priv)) 5620 dig_port->connected = bdw_digital_port_connected; 5621 else 5622 dig_port->connected = lpt_digital_port_connected; 5623 } else { 5624 if (port == PORT_A) 5625 dig_port->connected = hsw_digital_port_connected; 5626 else 5627 dig_port->connected = lpt_digital_port_connected; 5628 } 5629 5630 intel_infoframe_init(dig_port); 5631 5632 return; 5633 5634 err: 5635 drm_encoder_cleanup(&encoder->base); 5636 kfree(dig_port); 5637 } 5638