1 /* 2 * Copyright 2012-15 Advanced Micro Devices, Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 * 22 * Authors: AMD 23 * 24 */ 25 #include "dm_services.h" 26 27 #include "resource.h" 28 #include "include/irq_service_interface.h" 29 #include "link_encoder.h" 30 #include "stream_encoder.h" 31 #include "opp.h" 32 #include "timing_generator.h" 33 #include "transform.h" 34 #include "dpp.h" 35 #include "core_types.h" 36 #include "set_mode_types.h" 37 #include "virtual/virtual_stream_encoder.h" 38 39 #include "dce80/dce80_resource.h" 40 #include "dce100/dce100_resource.h" 41 #include "dce110/dce110_resource.h" 42 #include "dce112/dce112_resource.h" 43 #if defined(CONFIG_DRM_AMD_DC_DCN1_0) 44 #include "dcn10/dcn10_resource.h" 45 #endif 46 #include "dce120/dce120_resource.h" 47 48 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id) 49 { 50 enum dce_version dc_version = DCE_VERSION_UNKNOWN; 51 switch (asic_id.chip_family) { 52 53 case FAMILY_CI: 54 dc_version = DCE_VERSION_8_0; 55 break; 56 case FAMILY_KV: 57 if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) || 58 ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) || 59 ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev)) 60 dc_version = DCE_VERSION_8_3; 61 else 62 dc_version = DCE_VERSION_8_1; 63 break; 64 case FAMILY_CZ: 65 dc_version = DCE_VERSION_11_0; 66 break; 67 68 case FAMILY_VI: 69 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) || 70 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) { 71 dc_version = DCE_VERSION_10_0; 72 break; 73 } 74 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) || 75 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) || 76 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) { 77 dc_version = DCE_VERSION_11_2; 78 } 79 break; 80 case FAMILY_AI: 81 dc_version = DCE_VERSION_12_0; 82 break; 83 #if defined(CONFIG_DRM_AMD_DC_DCN1_0) 84 case FAMILY_RV: 85 dc_version = DCN_VERSION_1_0; 86 break; 87 #endif 88 default: 89 dc_version = DCE_VERSION_UNKNOWN; 90 break; 91 } 92 return dc_version; 93 } 94 95 struct resource_pool *dc_create_resource_pool( 96 struct dc *dc, 97 int num_virtual_links, 98 enum dce_version dc_version, 99 struct hw_asic_id asic_id) 100 { 101 struct resource_pool *res_pool = NULL; 102 103 switch (dc_version) { 104 case DCE_VERSION_8_0: 105 res_pool = dce80_create_resource_pool( 106 num_virtual_links, dc); 107 break; 108 case DCE_VERSION_8_1: 109 res_pool = dce81_create_resource_pool( 110 num_virtual_links, dc); 111 break; 112 case DCE_VERSION_8_3: 113 res_pool = dce83_create_resource_pool( 114 num_virtual_links, dc); 115 break; 116 case DCE_VERSION_10_0: 117 res_pool = dce100_create_resource_pool( 118 num_virtual_links, dc); 119 break; 120 case DCE_VERSION_11_0: 121 res_pool = dce110_create_resource_pool( 122 num_virtual_links, dc, asic_id); 123 break; 124 case DCE_VERSION_11_2: 125 res_pool = dce112_create_resource_pool( 126 num_virtual_links, dc); 127 break; 128 case DCE_VERSION_12_0: 129 res_pool = dce120_create_resource_pool( 130 num_virtual_links, dc); 131 break; 132 133 #if defined(CONFIG_DRM_AMD_DC_DCN1_0) 134 case DCN_VERSION_1_0: 135 res_pool = dcn10_create_resource_pool( 136 num_virtual_links, dc); 137 break; 138 #endif 139 140 141 default: 142 break; 143 } 144 if (res_pool != NULL) { 145 struct dc_firmware_info fw_info = { { 0 } }; 146 147 if (dc->ctx->dc_bios->funcs->get_firmware_info( 148 dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) { 149 res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency; 150 } else 151 ASSERT_CRITICAL(false); 152 } 153 154 return res_pool; 155 } 156 157 void dc_destroy_resource_pool(struct dc *dc) 158 { 159 if (dc) { 160 if (dc->res_pool) 161 dc->res_pool->funcs->destroy(&dc->res_pool); 162 163 kfree(dc->hwseq); 164 } 165 } 166 167 static void update_num_audio( 168 const struct resource_straps *straps, 169 unsigned int *num_audio, 170 struct audio_support *aud_support) 171 { 172 aud_support->dp_audio = true; 173 aud_support->hdmi_audio_native = false; 174 aud_support->hdmi_audio_on_dongle = false; 175 176 if (straps->hdmi_disable == 0) { 177 if (straps->dc_pinstraps_audio & 0x2) { 178 aud_support->hdmi_audio_on_dongle = true; 179 aud_support->hdmi_audio_native = true; 180 } 181 } 182 183 switch (straps->audio_stream_number) { 184 case 0: /* multi streams supported */ 185 break; 186 case 1: /* multi streams not supported */ 187 *num_audio = 1; 188 break; 189 default: 190 DC_ERR("DC: unexpected audio fuse!\n"); 191 } 192 } 193 194 bool resource_construct( 195 unsigned int num_virtual_links, 196 struct dc *dc, 197 struct resource_pool *pool, 198 const struct resource_create_funcs *create_funcs) 199 { 200 struct dc_context *ctx = dc->ctx; 201 const struct resource_caps *caps = pool->res_cap; 202 int i; 203 unsigned int num_audio = caps->num_audio; 204 struct resource_straps straps = {0}; 205 206 if (create_funcs->read_dce_straps) 207 create_funcs->read_dce_straps(dc->ctx, &straps); 208 209 pool->audio_count = 0; 210 if (create_funcs->create_audio) { 211 /* find the total number of streams available via the 212 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT 213 * registers (one for each pin) starting from pin 1 214 * up to the max number of audio pins. 215 * We stop on the first pin where 216 * PORT_CONNECTIVITY == 1 (as instructed by HW team). 217 */ 218 update_num_audio(&straps, &num_audio, &pool->audio_support); 219 for (i = 0; i < pool->pipe_count && i < num_audio; i++) { 220 struct audio *aud = create_funcs->create_audio(ctx, i); 221 222 if (aud == NULL) { 223 DC_ERR("DC: failed to create audio!\n"); 224 return false; 225 } 226 227 if (!aud->funcs->endpoint_valid(aud)) { 228 aud->funcs->destroy(&aud); 229 break; 230 } 231 232 pool->audios[i] = aud; 233 pool->audio_count++; 234 } 235 } 236 237 pool->stream_enc_count = 0; 238 if (create_funcs->create_stream_encoder) { 239 for (i = 0; i < caps->num_stream_encoder; i++) { 240 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx); 241 if (pool->stream_enc[i] == NULL) 242 DC_ERR("DC: failed to create stream_encoder!\n"); 243 pool->stream_enc_count++; 244 } 245 } 246 dc->caps.dynamic_audio = false; 247 if (pool->audio_count < pool->stream_enc_count) { 248 dc->caps.dynamic_audio = true; 249 } 250 for (i = 0; i < num_virtual_links; i++) { 251 pool->stream_enc[pool->stream_enc_count] = 252 virtual_stream_encoder_create( 253 ctx, ctx->dc_bios); 254 if (pool->stream_enc[pool->stream_enc_count] == NULL) { 255 DC_ERR("DC: failed to create stream_encoder!\n"); 256 return false; 257 } 258 pool->stream_enc_count++; 259 } 260 261 dc->hwseq = create_funcs->create_hwseq(ctx); 262 263 return true; 264 } 265 266 267 void resource_unreference_clock_source( 268 struct resource_context *res_ctx, 269 const struct resource_pool *pool, 270 struct clock_source *clock_source) 271 { 272 int i; 273 274 for (i = 0; i < pool->clk_src_count; i++) { 275 if (pool->clock_sources[i] != clock_source) 276 continue; 277 278 res_ctx->clock_source_ref_count[i]--; 279 280 break; 281 } 282 283 if (pool->dp_clock_source == clock_source) 284 res_ctx->dp_clock_source_ref_count--; 285 } 286 287 void resource_reference_clock_source( 288 struct resource_context *res_ctx, 289 const struct resource_pool *pool, 290 struct clock_source *clock_source) 291 { 292 int i; 293 for (i = 0; i < pool->clk_src_count; i++) { 294 if (pool->clock_sources[i] != clock_source) 295 continue; 296 297 res_ctx->clock_source_ref_count[i]++; 298 break; 299 } 300 301 if (pool->dp_clock_source == clock_source) 302 res_ctx->dp_clock_source_ref_count++; 303 } 304 305 bool resource_are_streams_timing_synchronizable( 306 struct dc_stream_state *stream1, 307 struct dc_stream_state *stream2) 308 { 309 if (stream1->timing.h_total != stream2->timing.h_total) 310 return false; 311 312 if (stream1->timing.v_total != stream2->timing.v_total) 313 return false; 314 315 if (stream1->timing.h_addressable 316 != stream2->timing.h_addressable) 317 return false; 318 319 if (stream1->timing.v_addressable 320 != stream2->timing.v_addressable) 321 return false; 322 323 if (stream1->timing.pix_clk_khz 324 != stream2->timing.pix_clk_khz) 325 return false; 326 327 if (stream1->phy_pix_clk != stream2->phy_pix_clk 328 && (!dc_is_dp_signal(stream1->signal) 329 || !dc_is_dp_signal(stream2->signal))) 330 return false; 331 332 return true; 333 } 334 335 static bool is_sharable_clk_src( 336 const struct pipe_ctx *pipe_with_clk_src, 337 const struct pipe_ctx *pipe) 338 { 339 if (pipe_with_clk_src->clock_source == NULL) 340 return false; 341 342 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL) 343 return false; 344 345 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal)) 346 return false; 347 348 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal) 349 && dc_is_dvi_signal(pipe->stream->signal)) 350 return false; 351 352 if (dc_is_hdmi_signal(pipe->stream->signal) 353 && dc_is_dvi_signal(pipe_with_clk_src->stream->signal)) 354 return false; 355 356 if (!resource_are_streams_timing_synchronizable( 357 pipe_with_clk_src->stream, pipe->stream)) 358 return false; 359 360 return true; 361 } 362 363 struct clock_source *resource_find_used_clk_src_for_sharing( 364 struct resource_context *res_ctx, 365 struct pipe_ctx *pipe_ctx) 366 { 367 int i; 368 369 for (i = 0; i < MAX_PIPES; i++) { 370 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx)) 371 return res_ctx->pipe_ctx[i].clock_source; 372 } 373 374 return NULL; 375 } 376 377 static enum pixel_format convert_pixel_format_to_dalsurface( 378 enum surface_pixel_format surface_pixel_format) 379 { 380 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN; 381 382 switch (surface_pixel_format) { 383 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS: 384 dal_pixel_format = PIXEL_FORMAT_INDEX8; 385 break; 386 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555: 387 dal_pixel_format = PIXEL_FORMAT_RGB565; 388 break; 389 case SURFACE_PIXEL_FORMAT_GRPH_RGB565: 390 dal_pixel_format = PIXEL_FORMAT_RGB565; 391 break; 392 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888: 393 dal_pixel_format = PIXEL_FORMAT_ARGB8888; 394 break; 395 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888: 396 dal_pixel_format = PIXEL_FORMAT_ARGB8888; 397 break; 398 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010: 399 dal_pixel_format = PIXEL_FORMAT_ARGB2101010; 400 break; 401 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010: 402 dal_pixel_format = PIXEL_FORMAT_ARGB2101010; 403 break; 404 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS: 405 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS; 406 break; 407 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F: 408 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F: 409 dal_pixel_format = PIXEL_FORMAT_FP16; 410 break; 411 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr: 412 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb: 413 dal_pixel_format = PIXEL_FORMAT_420BPP8; 414 break; 415 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr: 416 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb: 417 dal_pixel_format = PIXEL_FORMAT_420BPP10; 418 break; 419 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616: 420 default: 421 dal_pixel_format = PIXEL_FORMAT_UNKNOWN; 422 break; 423 } 424 return dal_pixel_format; 425 } 426 427 static void rect_swap_helper(struct rect *rect) 428 { 429 uint32_t temp = 0; 430 431 temp = rect->height; 432 rect->height = rect->width; 433 rect->width = temp; 434 435 temp = rect->x; 436 rect->x = rect->y; 437 rect->y = temp; 438 } 439 440 static void calculate_viewport(struct pipe_ctx *pipe_ctx) 441 { 442 const struct dc_plane_state *plane_state = pipe_ctx->plane_state; 443 const struct dc_stream_state *stream = pipe_ctx->stream; 444 struct scaler_data *data = &pipe_ctx->plane_res.scl_data; 445 struct rect surf_src = plane_state->src_rect; 446 struct rect clip = { 0 }; 447 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8 448 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1; 449 bool pri_split = pipe_ctx->bottom_pipe && 450 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state; 451 bool sec_split = pipe_ctx->top_pipe && 452 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state; 453 454 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE || 455 stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) { 456 pri_split = false; 457 sec_split = false; 458 } 459 460 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 || 461 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) 462 rect_swap_helper(&surf_src); 463 464 /* The actual clip is an intersection between stream 465 * source and surface clip 466 */ 467 clip.x = stream->src.x > plane_state->clip_rect.x ? 468 stream->src.x : plane_state->clip_rect.x; 469 470 clip.width = stream->src.x + stream->src.width < 471 plane_state->clip_rect.x + plane_state->clip_rect.width ? 472 stream->src.x + stream->src.width - clip.x : 473 plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ; 474 475 clip.y = stream->src.y > plane_state->clip_rect.y ? 476 stream->src.y : plane_state->clip_rect.y; 477 478 clip.height = stream->src.y + stream->src.height < 479 plane_state->clip_rect.y + plane_state->clip_rect.height ? 480 stream->src.y + stream->src.height - clip.y : 481 plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ; 482 483 /* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio 484 * num_pixels = clip.num_pix * scl_ratio 485 */ 486 data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) * 487 surf_src.width / plane_state->dst_rect.width; 488 data->viewport.width = clip.width * 489 surf_src.width / plane_state->dst_rect.width; 490 491 data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) * 492 surf_src.height / plane_state->dst_rect.height; 493 data->viewport.height = clip.height * 494 surf_src.height / plane_state->dst_rect.height; 495 496 /* Round down, compensate in init */ 497 data->viewport_c.x = data->viewport.x / vpc_div; 498 data->viewport_c.y = data->viewport.y / vpc_div; 499 data->inits.h_c = (data->viewport.x % vpc_div) != 0 ? 500 dal_fixed31_32_half : dal_fixed31_32_zero; 501 data->inits.v_c = (data->viewport.y % vpc_div) != 0 ? 502 dal_fixed31_32_half : dal_fixed31_32_zero; 503 /* Round up, assume original video size always even dimensions */ 504 data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div; 505 data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div; 506 507 /* Handle hsplit */ 508 if (pri_split || sec_split) { 509 /* HMirror XOR Secondary_pipe XOR Rotation_180 */ 510 bool right_view = (sec_split != plane_state->horizontal_mirror) != 511 (plane_state->rotation == ROTATION_ANGLE_180); 512 513 if (plane_state->rotation == ROTATION_ANGLE_90 514 || plane_state->rotation == ROTATION_ANGLE_270) 515 /* Secondary_pipe XOR Rotation_270 */ 516 right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split; 517 518 if (right_view) { 519 data->viewport.width /= 2; 520 data->viewport_c.width /= 2; 521 data->viewport.x += data->viewport.width; 522 data->viewport_c.x += data->viewport_c.width; 523 /* Ceil offset pipe */ 524 data->viewport.width += data->viewport.width % 2; 525 data->viewport_c.width += data->viewport_c.width % 2; 526 } else { 527 data->viewport.width /= 2; 528 data->viewport_c.width /= 2; 529 } 530 } 531 532 if (plane_state->rotation == ROTATION_ANGLE_90 || 533 plane_state->rotation == ROTATION_ANGLE_270) { 534 rect_swap_helper(&data->viewport_c); 535 rect_swap_helper(&data->viewport); 536 } 537 } 538 539 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct view *recout_skip) 540 { 541 const struct dc_plane_state *plane_state = pipe_ctx->plane_state; 542 const struct dc_stream_state *stream = pipe_ctx->stream; 543 struct rect surf_src = plane_state->src_rect; 544 struct rect surf_clip = plane_state->clip_rect; 545 int recout_full_x, recout_full_y; 546 547 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 || 548 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) 549 rect_swap_helper(&surf_src); 550 551 pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x; 552 if (stream->src.x < surf_clip.x) 553 pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x 554 - stream->src.x) * stream->dst.width 555 / stream->src.width; 556 557 pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width * 558 stream->dst.width / stream->src.width; 559 if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x > 560 stream->dst.x + stream->dst.width) 561 pipe_ctx->plane_res.scl_data.recout.width = 562 stream->dst.x + stream->dst.width 563 - pipe_ctx->plane_res.scl_data.recout.x; 564 565 pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y; 566 if (stream->src.y < surf_clip.y) 567 pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y 568 - stream->src.y) * stream->dst.height 569 / stream->src.height; 570 571 pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height * 572 stream->dst.height / stream->src.height; 573 if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y > 574 stream->dst.y + stream->dst.height) 575 pipe_ctx->plane_res.scl_data.recout.height = 576 stream->dst.y + stream->dst.height 577 - pipe_ctx->plane_res.scl_data.recout.y; 578 579 /* Handle h & vsplit */ 580 if (pipe_ctx->top_pipe && pipe_ctx->top_pipe->plane_state == 581 pipe_ctx->plane_state) { 582 if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) { 583 pipe_ctx->plane_res.scl_data.recout.height /= 2; 584 pipe_ctx->plane_res.scl_data.recout.y += pipe_ctx->plane_res.scl_data.recout.height; 585 /* Floor primary pipe, ceil 2ndary pipe */ 586 pipe_ctx->plane_res.scl_data.recout.height += pipe_ctx->plane_res.scl_data.recout.height % 2; 587 } else { 588 pipe_ctx->plane_res.scl_data.recout.width /= 2; 589 pipe_ctx->plane_res.scl_data.recout.x += pipe_ctx->plane_res.scl_data.recout.width; 590 pipe_ctx->plane_res.scl_data.recout.width += pipe_ctx->plane_res.scl_data.recout.width % 2; 591 } 592 } else if (pipe_ctx->bottom_pipe && 593 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state) { 594 if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) 595 pipe_ctx->plane_res.scl_data.recout.height /= 2; 596 else 597 pipe_ctx->plane_res.scl_data.recout.width /= 2; 598 } 599 600 /* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset) 601 * * 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl 602 * ratio) 603 */ 604 recout_full_x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x) 605 * stream->dst.width / stream->src.width - 606 surf_src.x * plane_state->dst_rect.width / surf_src.width 607 * stream->dst.width / stream->src.width; 608 recout_full_y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y) 609 * stream->dst.height / stream->src.height - 610 surf_src.y * plane_state->dst_rect.height / surf_src.height 611 * stream->dst.height / stream->src.height; 612 613 recout_skip->width = pipe_ctx->plane_res.scl_data.recout.x - recout_full_x; 614 recout_skip->height = pipe_ctx->plane_res.scl_data.recout.y - recout_full_y; 615 } 616 617 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx) 618 { 619 const struct dc_plane_state *plane_state = pipe_ctx->plane_state; 620 const struct dc_stream_state *stream = pipe_ctx->stream; 621 struct rect surf_src = plane_state->src_rect; 622 const int in_w = stream->src.width; 623 const int in_h = stream->src.height; 624 const int out_w = stream->dst.width; 625 const int out_h = stream->dst.height; 626 627 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 || 628 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) 629 rect_swap_helper(&surf_src); 630 631 pipe_ctx->plane_res.scl_data.ratios.horz = dal_fixed31_32_from_fraction( 632 surf_src.width, 633 plane_state->dst_rect.width); 634 pipe_ctx->plane_res.scl_data.ratios.vert = dal_fixed31_32_from_fraction( 635 surf_src.height, 636 plane_state->dst_rect.height); 637 638 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE) 639 pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2; 640 else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) 641 pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2; 642 643 pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64( 644 pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h); 645 pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64( 646 pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w); 647 648 pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz; 649 pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert; 650 651 if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8 652 || pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) { 653 pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2; 654 pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2; 655 } 656 } 657 658 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct view *recout_skip) 659 { 660 struct scaler_data *data = &pipe_ctx->plane_res.scl_data; 661 struct rect src = pipe_ctx->plane_state->src_rect; 662 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8 663 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1; 664 665 666 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 || 667 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) { 668 rect_swap_helper(&src); 669 rect_swap_helper(&data->viewport_c); 670 rect_swap_helper(&data->viewport); 671 } 672 673 /* 674 * Init calculated according to formula: 675 * init = (scaling_ratio + number_of_taps + 1) / 2 676 * init_bot = init + scaling_ratio 677 * init_c = init + truncated_vp_c_offset(from calculate viewport) 678 */ 679 data->inits.h = dal_fixed31_32_div_int( 680 dal_fixed31_32_add_int(data->ratios.horz, data->taps.h_taps + 1), 2); 681 682 data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_div_int( 683 dal_fixed31_32_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2)); 684 685 data->inits.v = dal_fixed31_32_div_int( 686 dal_fixed31_32_add_int(data->ratios.vert, data->taps.v_taps + 1), 2); 687 688 data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_div_int( 689 dal_fixed31_32_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2)); 690 691 692 /* Adjust for viewport end clip-off */ 693 if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) { 694 int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x; 695 int int_part = dal_fixed31_32_floor( 696 dal_fixed31_32_sub(data->inits.h, data->ratios.horz)); 697 698 int_part = int_part > 0 ? int_part : 0; 699 data->viewport.width += int_part < vp_clip ? int_part : vp_clip; 700 } 701 if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) { 702 int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y; 703 int int_part = dal_fixed31_32_floor( 704 dal_fixed31_32_sub(data->inits.v, data->ratios.vert)); 705 706 int_part = int_part > 0 ? int_part : 0; 707 data->viewport.height += int_part < vp_clip ? int_part : vp_clip; 708 } 709 if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) { 710 int vp_clip = (src.x + src.width) / vpc_div - 711 data->viewport_c.width - data->viewport_c.x; 712 int int_part = dal_fixed31_32_floor( 713 dal_fixed31_32_sub(data->inits.h_c, data->ratios.horz_c)); 714 715 int_part = int_part > 0 ? int_part : 0; 716 data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip; 717 } 718 if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) { 719 int vp_clip = (src.y + src.height) / vpc_div - 720 data->viewport_c.height - data->viewport_c.y; 721 int int_part = dal_fixed31_32_floor( 722 dal_fixed31_32_sub(data->inits.v_c, data->ratios.vert_c)); 723 724 int_part = int_part > 0 ? int_part : 0; 725 data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip; 726 } 727 728 /* Adjust for non-0 viewport offset */ 729 if (data->viewport.x) { 730 int int_part; 731 732 data->inits.h = dal_fixed31_32_add(data->inits.h, dal_fixed31_32_mul_int( 733 data->ratios.horz, recout_skip->width)); 734 int_part = dal_fixed31_32_floor(data->inits.h) - data->viewport.x; 735 if (int_part < data->taps.h_taps) { 736 int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ? 737 (data->taps.h_taps - int_part) : data->viewport.x; 738 data->viewport.x -= int_adj; 739 data->viewport.width += int_adj; 740 int_part += int_adj; 741 } else if (int_part > data->taps.h_taps) { 742 data->viewport.x += int_part - data->taps.h_taps; 743 data->viewport.width -= int_part - data->taps.h_taps; 744 int_part = data->taps.h_taps; 745 } 746 data->inits.h.value &= 0xffffffff; 747 data->inits.h = dal_fixed31_32_add_int(data->inits.h, int_part); 748 } 749 750 if (data->viewport_c.x) { 751 int int_part; 752 753 data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_mul_int( 754 data->ratios.horz_c, recout_skip->width)); 755 int_part = dal_fixed31_32_floor(data->inits.h_c) - data->viewport_c.x; 756 if (int_part < data->taps.h_taps_c) { 757 int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ? 758 (data->taps.h_taps_c - int_part) : data->viewport_c.x; 759 data->viewport_c.x -= int_adj; 760 data->viewport_c.width += int_adj; 761 int_part += int_adj; 762 } else if (int_part > data->taps.h_taps_c) { 763 data->viewport_c.x += int_part - data->taps.h_taps_c; 764 data->viewport_c.width -= int_part - data->taps.h_taps_c; 765 int_part = data->taps.h_taps_c; 766 } 767 data->inits.h_c.value &= 0xffffffff; 768 data->inits.h_c = dal_fixed31_32_add_int(data->inits.h_c, int_part); 769 } 770 771 if (data->viewport.y) { 772 int int_part; 773 774 data->inits.v = dal_fixed31_32_add(data->inits.v, dal_fixed31_32_mul_int( 775 data->ratios.vert, recout_skip->height)); 776 int_part = dal_fixed31_32_floor(data->inits.v) - data->viewport.y; 777 if (int_part < data->taps.v_taps) { 778 int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ? 779 (data->taps.v_taps - int_part) : data->viewport.y; 780 data->viewport.y -= int_adj; 781 data->viewport.height += int_adj; 782 int_part += int_adj; 783 } else if (int_part > data->taps.v_taps) { 784 data->viewport.y += int_part - data->taps.v_taps; 785 data->viewport.height -= int_part - data->taps.v_taps; 786 int_part = data->taps.v_taps; 787 } 788 data->inits.v.value &= 0xffffffff; 789 data->inits.v = dal_fixed31_32_add_int(data->inits.v, int_part); 790 } 791 792 if (data->viewport_c.y) { 793 int int_part; 794 795 data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_mul_int( 796 data->ratios.vert_c, recout_skip->height)); 797 int_part = dal_fixed31_32_floor(data->inits.v_c) - data->viewport_c.y; 798 if (int_part < data->taps.v_taps_c) { 799 int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ? 800 (data->taps.v_taps_c - int_part) : data->viewport_c.y; 801 data->viewport_c.y -= int_adj; 802 data->viewport_c.height += int_adj; 803 int_part += int_adj; 804 } else if (int_part > data->taps.v_taps_c) { 805 data->viewport_c.y += int_part - data->taps.v_taps_c; 806 data->viewport_c.height -= int_part - data->taps.v_taps_c; 807 int_part = data->taps.v_taps_c; 808 } 809 data->inits.v_c.value &= 0xffffffff; 810 data->inits.v_c = dal_fixed31_32_add_int(data->inits.v_c, int_part); 811 } 812 813 /* Interlaced inits based on final vert inits */ 814 data->inits.v_bot = dal_fixed31_32_add(data->inits.v, data->ratios.vert); 815 data->inits.v_c_bot = dal_fixed31_32_add(data->inits.v_c, data->ratios.vert_c); 816 817 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 || 818 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) { 819 rect_swap_helper(&data->viewport_c); 820 rect_swap_helper(&data->viewport); 821 } 822 } 823 824 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx) 825 { 826 const struct dc_plane_state *plane_state = pipe_ctx->plane_state; 827 struct dc_crtc_timing *timing = &pipe_ctx->stream->timing; 828 struct view recout_skip = { 0 }; 829 bool res = false; 830 831 /* Important: scaling ratio calculation requires pixel format, 832 * lb depth calculation requires recout and taps require scaling ratios. 833 * Inits require viewport, taps, ratios and recout of split pipe 834 */ 835 pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface( 836 pipe_ctx->plane_state->format); 837 838 calculate_scaling_ratios(pipe_ctx); 839 840 calculate_viewport(pipe_ctx); 841 842 if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16) 843 return false; 844 845 calculate_recout(pipe_ctx, &recout_skip); 846 847 /** 848 * Setting line buffer pixel depth to 24bpp yields banding 849 * on certain displays, such as the Sharp 4k 850 */ 851 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP; 852 853 pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable; 854 pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable; 855 856 /* Taps calculations */ 857 if (pipe_ctx->plane_res.xfm != NULL) 858 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps( 859 pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality); 860 861 if (pipe_ctx->plane_res.dpp != NULL) 862 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps( 863 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality); 864 865 if (!res) { 866 /* Try 24 bpp linebuffer */ 867 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP; 868 869 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps( 870 pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality); 871 872 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps( 873 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality); 874 } 875 876 if (res) 877 /* May need to re-check lb size after this in some obscure scenario */ 878 calculate_inits_and_adj_vp(pipe_ctx, &recout_skip); 879 880 dm_logger_write(pipe_ctx->stream->ctx->logger, LOG_SCALER, 881 "%s: Viewport:\nheight:%d width:%d x:%d " 882 "y:%d\n dst_rect:\nheight:%d width:%d x:%d " 883 "y:%d\n", 884 __func__, 885 pipe_ctx->plane_res.scl_data.viewport.height, 886 pipe_ctx->plane_res.scl_data.viewport.width, 887 pipe_ctx->plane_res.scl_data.viewport.x, 888 pipe_ctx->plane_res.scl_data.viewport.y, 889 plane_state->dst_rect.height, 890 plane_state->dst_rect.width, 891 plane_state->dst_rect.x, 892 plane_state->dst_rect.y); 893 894 return res; 895 } 896 897 898 enum dc_status resource_build_scaling_params_for_context( 899 const struct dc *dc, 900 struct dc_state *context) 901 { 902 int i; 903 904 for (i = 0; i < MAX_PIPES; i++) { 905 if (context->res_ctx.pipe_ctx[i].plane_state != NULL && 906 context->res_ctx.pipe_ctx[i].stream != NULL) 907 if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i])) 908 return DC_FAIL_SCALING; 909 } 910 911 return DC_OK; 912 } 913 914 struct pipe_ctx *find_idle_secondary_pipe( 915 struct resource_context *res_ctx, 916 const struct resource_pool *pool) 917 { 918 int i; 919 struct pipe_ctx *secondary_pipe = NULL; 920 921 /* 922 * search backwards for the second pipe to keep pipe 923 * assignment more consistent 924 */ 925 926 for (i = pool->pipe_count - 1; i >= 0; i--) { 927 if (res_ctx->pipe_ctx[i].stream == NULL) { 928 secondary_pipe = &res_ctx->pipe_ctx[i]; 929 secondary_pipe->pipe_idx = i; 930 break; 931 } 932 } 933 934 935 return secondary_pipe; 936 } 937 938 struct pipe_ctx *resource_get_head_pipe_for_stream( 939 struct resource_context *res_ctx, 940 struct dc_stream_state *stream) 941 { 942 int i; 943 for (i = 0; i < MAX_PIPES; i++) { 944 if (res_ctx->pipe_ctx[i].stream == stream && 945 !res_ctx->pipe_ctx[i].top_pipe) { 946 return &res_ctx->pipe_ctx[i]; 947 break; 948 } 949 } 950 return NULL; 951 } 952 953 static struct pipe_ctx *resource_get_tail_pipe_for_stream( 954 struct resource_context *res_ctx, 955 struct dc_stream_state *stream) 956 { 957 struct pipe_ctx *head_pipe, *tail_pipe; 958 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream); 959 960 if (!head_pipe) 961 return NULL; 962 963 tail_pipe = head_pipe->bottom_pipe; 964 965 while (tail_pipe) { 966 head_pipe = tail_pipe; 967 tail_pipe = tail_pipe->bottom_pipe; 968 } 969 970 return head_pipe; 971 } 972 973 /* 974 * A free_pipe for a stream is defined here as a pipe 975 * that has no surface attached yet 976 */ 977 static struct pipe_ctx *acquire_free_pipe_for_stream( 978 struct dc_state *context, 979 const struct resource_pool *pool, 980 struct dc_stream_state *stream) 981 { 982 int i; 983 struct resource_context *res_ctx = &context->res_ctx; 984 985 struct pipe_ctx *head_pipe = NULL; 986 987 /* Find head pipe, which has the back end set up*/ 988 989 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream); 990 991 if (!head_pipe) 992 ASSERT(0); 993 994 if (!head_pipe->plane_state) 995 return head_pipe; 996 997 /* Re-use pipe already acquired for this stream if available*/ 998 for (i = pool->pipe_count - 1; i >= 0; i--) { 999 if (res_ctx->pipe_ctx[i].stream == stream && 1000 !res_ctx->pipe_ctx[i].plane_state) { 1001 return &res_ctx->pipe_ctx[i]; 1002 } 1003 } 1004 1005 /* 1006 * At this point we have no re-useable pipe for this stream and we need 1007 * to acquire an idle one to satisfy the request 1008 */ 1009 1010 if (!pool->funcs->acquire_idle_pipe_for_layer) 1011 return NULL; 1012 1013 return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream); 1014 1015 } 1016 1017 #if defined(CONFIG_DRM_AMD_DC_DCN1_0) 1018 static int acquire_first_split_pipe( 1019 struct resource_context *res_ctx, 1020 const struct resource_pool *pool, 1021 struct dc_stream_state *stream) 1022 { 1023 int i; 1024 1025 for (i = 0; i < pool->pipe_count; i++) { 1026 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i]; 1027 1028 if (pipe_ctx->top_pipe && 1029 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) { 1030 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe; 1031 if (pipe_ctx->bottom_pipe) 1032 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe; 1033 1034 memset(pipe_ctx, 0, sizeof(*pipe_ctx)); 1035 pipe_ctx->stream_res.tg = pool->timing_generators[i]; 1036 pipe_ctx->plane_res.hubp = pool->hubps[i]; 1037 pipe_ctx->plane_res.ipp = pool->ipps[i]; 1038 pipe_ctx->plane_res.dpp = pool->dpps[i]; 1039 pipe_ctx->stream_res.opp = pool->opps[i]; 1040 pipe_ctx->pipe_idx = i; 1041 1042 pipe_ctx->stream = stream; 1043 return i; 1044 } 1045 } 1046 return -1; 1047 } 1048 #endif 1049 1050 bool dc_add_plane_to_context( 1051 const struct dc *dc, 1052 struct dc_stream_state *stream, 1053 struct dc_plane_state *plane_state, 1054 struct dc_state *context) 1055 { 1056 int i; 1057 struct resource_pool *pool = dc->res_pool; 1058 struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe; 1059 struct dc_stream_status *stream_status = NULL; 1060 1061 for (i = 0; i < context->stream_count; i++) 1062 if (context->streams[i] == stream) { 1063 stream_status = &context->stream_status[i]; 1064 break; 1065 } 1066 if (stream_status == NULL) { 1067 dm_error("Existing stream not found; failed to attach surface!\n"); 1068 return false; 1069 } 1070 1071 1072 if (stream_status->plane_count == MAX_SURFACE_NUM) { 1073 dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n", 1074 plane_state, MAX_SURFACE_NUM); 1075 return false; 1076 } 1077 1078 head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream); 1079 1080 if (!head_pipe) { 1081 dm_error("Head pipe not found for stream_state %p !\n", stream); 1082 return false; 1083 } 1084 1085 free_pipe = acquire_free_pipe_for_stream(context, pool, stream); 1086 1087 #if defined(CONFIG_DRM_AMD_DC_DCN1_0) 1088 if (!free_pipe) { 1089 int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream); 1090 if (pipe_idx >= 0) 1091 free_pipe = &context->res_ctx.pipe_ctx[pipe_idx]; 1092 } 1093 #endif 1094 if (!free_pipe) 1095 return false; 1096 1097 /* retain new surfaces */ 1098 dc_plane_state_retain(plane_state); 1099 free_pipe->plane_state = plane_state; 1100 1101 if (head_pipe != free_pipe) { 1102 1103 tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream); 1104 ASSERT(tail_pipe); 1105 1106 free_pipe->stream_res.tg = tail_pipe->stream_res.tg; 1107 free_pipe->stream_res.opp = tail_pipe->stream_res.opp; 1108 free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc; 1109 free_pipe->stream_res.audio = tail_pipe->stream_res.audio; 1110 free_pipe->clock_source = tail_pipe->clock_source; 1111 free_pipe->top_pipe = tail_pipe; 1112 tail_pipe->bottom_pipe = free_pipe; 1113 } 1114 1115 /* assign new surfaces*/ 1116 stream_status->plane_states[stream_status->plane_count] = plane_state; 1117 1118 stream_status->plane_count++; 1119 1120 return true; 1121 } 1122 1123 bool dc_remove_plane_from_context( 1124 const struct dc *dc, 1125 struct dc_stream_state *stream, 1126 struct dc_plane_state *plane_state, 1127 struct dc_state *context) 1128 { 1129 int i; 1130 struct dc_stream_status *stream_status = NULL; 1131 struct resource_pool *pool = dc->res_pool; 1132 1133 for (i = 0; i < context->stream_count; i++) 1134 if (context->streams[i] == stream) { 1135 stream_status = &context->stream_status[i]; 1136 break; 1137 } 1138 1139 if (stream_status == NULL) { 1140 dm_error("Existing stream not found; failed to remove plane.\n"); 1141 return false; 1142 } 1143 1144 /* release pipe for plane*/ 1145 for (i = pool->pipe_count - 1; i >= 0; i--) { 1146 struct pipe_ctx *pipe_ctx; 1147 1148 if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) { 1149 pipe_ctx = &context->res_ctx.pipe_ctx[i]; 1150 1151 if (pipe_ctx->top_pipe) 1152 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe; 1153 1154 /* Second condition is to avoid setting NULL to top pipe 1155 * of tail pipe making it look like head pipe in subsequent 1156 * deletes 1157 */ 1158 if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe) 1159 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe; 1160 1161 /* 1162 * For head pipe detach surfaces from pipe for tail 1163 * pipe just zero it out 1164 */ 1165 if (!pipe_ctx->top_pipe) { 1166 pipe_ctx->plane_state = NULL; 1167 pipe_ctx->bottom_pipe = NULL; 1168 } else { 1169 memset(pipe_ctx, 0, sizeof(*pipe_ctx)); 1170 } 1171 } 1172 } 1173 1174 1175 for (i = 0; i < stream_status->plane_count; i++) { 1176 if (stream_status->plane_states[i] == plane_state) { 1177 1178 dc_plane_state_release(stream_status->plane_states[i]); 1179 break; 1180 } 1181 } 1182 1183 if (i == stream_status->plane_count) { 1184 dm_error("Existing plane_state not found; failed to detach it!\n"); 1185 return false; 1186 } 1187 1188 stream_status->plane_count--; 1189 1190 /* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */ 1191 for (; i < stream_status->plane_count; i++) 1192 stream_status->plane_states[i] = stream_status->plane_states[i + 1]; 1193 1194 stream_status->plane_states[stream_status->plane_count] = NULL; 1195 1196 return true; 1197 } 1198 1199 bool dc_rem_all_planes_for_stream( 1200 const struct dc *dc, 1201 struct dc_stream_state *stream, 1202 struct dc_state *context) 1203 { 1204 int i, old_plane_count; 1205 struct dc_stream_status *stream_status = NULL; 1206 struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 }; 1207 1208 for (i = 0; i < context->stream_count; i++) 1209 if (context->streams[i] == stream) { 1210 stream_status = &context->stream_status[i]; 1211 break; 1212 } 1213 1214 if (stream_status == NULL) { 1215 dm_error("Existing stream %p not found!\n", stream); 1216 return false; 1217 } 1218 1219 old_plane_count = stream_status->plane_count; 1220 1221 for (i = 0; i < old_plane_count; i++) 1222 del_planes[i] = stream_status->plane_states[i]; 1223 1224 for (i = 0; i < old_plane_count; i++) 1225 if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context)) 1226 return false; 1227 1228 return true; 1229 } 1230 1231 static bool add_all_planes_for_stream( 1232 const struct dc *dc, 1233 struct dc_stream_state *stream, 1234 const struct dc_validation_set set[], 1235 int set_count, 1236 struct dc_state *context) 1237 { 1238 int i, j; 1239 1240 for (i = 0; i < set_count; i++) 1241 if (set[i].stream == stream) 1242 break; 1243 1244 if (i == set_count) { 1245 dm_error("Stream %p not found in set!\n", stream); 1246 return false; 1247 } 1248 1249 for (j = 0; j < set[i].plane_count; j++) 1250 if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context)) 1251 return false; 1252 1253 return true; 1254 } 1255 1256 bool dc_add_all_planes_for_stream( 1257 const struct dc *dc, 1258 struct dc_stream_state *stream, 1259 struct dc_plane_state * const *plane_states, 1260 int plane_count, 1261 struct dc_state *context) 1262 { 1263 struct dc_validation_set set; 1264 int i; 1265 1266 set.stream = stream; 1267 set.plane_count = plane_count; 1268 1269 for (i = 0; i < plane_count; i++) 1270 set.plane_states[i] = plane_states[i]; 1271 1272 return add_all_planes_for_stream(dc, stream, &set, 1, context); 1273 } 1274 1275 1276 1277 static bool is_timing_changed(struct dc_stream_state *cur_stream, 1278 struct dc_stream_state *new_stream) 1279 { 1280 if (cur_stream == NULL) 1281 return true; 1282 1283 /* If sink pointer changed, it means this is a hotplug, we should do 1284 * full hw setting. 1285 */ 1286 if (cur_stream->sink != new_stream->sink) 1287 return true; 1288 1289 /* If output color space is changed, need to reprogram info frames */ 1290 if (cur_stream->output_color_space != new_stream->output_color_space) 1291 return true; 1292 1293 return memcmp( 1294 &cur_stream->timing, 1295 &new_stream->timing, 1296 sizeof(struct dc_crtc_timing)) != 0; 1297 } 1298 1299 static bool are_stream_backends_same( 1300 struct dc_stream_state *stream_a, struct dc_stream_state *stream_b) 1301 { 1302 if (stream_a == stream_b) 1303 return true; 1304 1305 if (stream_a == NULL || stream_b == NULL) 1306 return false; 1307 1308 if (is_timing_changed(stream_a, stream_b)) 1309 return false; 1310 1311 return true; 1312 } 1313 1314 bool dc_is_stream_unchanged( 1315 struct dc_stream_state *old_stream, struct dc_stream_state *stream) 1316 { 1317 1318 if (!are_stream_backends_same(old_stream, stream)) 1319 return false; 1320 1321 return true; 1322 } 1323 1324 /* Maximum TMDS single link pixel clock 165MHz */ 1325 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ 165000 1326 1327 static void update_stream_engine_usage( 1328 struct resource_context *res_ctx, 1329 const struct resource_pool *pool, 1330 struct stream_encoder *stream_enc, 1331 bool acquired) 1332 { 1333 int i; 1334 1335 for (i = 0; i < pool->stream_enc_count; i++) { 1336 if (pool->stream_enc[i] == stream_enc) 1337 res_ctx->is_stream_enc_acquired[i] = acquired; 1338 } 1339 } 1340 1341 /* TODO: release audio object */ 1342 void update_audio_usage( 1343 struct resource_context *res_ctx, 1344 const struct resource_pool *pool, 1345 struct audio *audio, 1346 bool acquired) 1347 { 1348 int i; 1349 for (i = 0; i < pool->audio_count; i++) { 1350 if (pool->audios[i] == audio) 1351 res_ctx->is_audio_acquired[i] = acquired; 1352 } 1353 } 1354 1355 static int acquire_first_free_pipe( 1356 struct resource_context *res_ctx, 1357 const struct resource_pool *pool, 1358 struct dc_stream_state *stream) 1359 { 1360 int i; 1361 1362 for (i = 0; i < pool->pipe_count; i++) { 1363 if (!res_ctx->pipe_ctx[i].stream) { 1364 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i]; 1365 1366 pipe_ctx->stream_res.tg = pool->timing_generators[i]; 1367 pipe_ctx->plane_res.mi = pool->mis[i]; 1368 pipe_ctx->plane_res.hubp = pool->hubps[i]; 1369 pipe_ctx->plane_res.ipp = pool->ipps[i]; 1370 pipe_ctx->plane_res.xfm = pool->transforms[i]; 1371 pipe_ctx->plane_res.dpp = pool->dpps[i]; 1372 pipe_ctx->stream_res.opp = pool->opps[i]; 1373 pipe_ctx->pipe_idx = i; 1374 1375 1376 pipe_ctx->stream = stream; 1377 return i; 1378 } 1379 } 1380 return -1; 1381 } 1382 1383 static struct stream_encoder *find_first_free_match_stream_enc_for_link( 1384 struct resource_context *res_ctx, 1385 const struct resource_pool *pool, 1386 struct dc_stream_state *stream) 1387 { 1388 int i; 1389 int j = -1; 1390 struct dc_link *link = stream->sink->link; 1391 1392 for (i = 0; i < pool->stream_enc_count; i++) { 1393 if (!res_ctx->is_stream_enc_acquired[i] && 1394 pool->stream_enc[i]) { 1395 /* Store first available for MST second display 1396 * in daisy chain use case */ 1397 j = i; 1398 if (pool->stream_enc[i]->id == 1399 link->link_enc->preferred_engine) 1400 return pool->stream_enc[i]; 1401 } 1402 } 1403 1404 /* 1405 * below can happen in cases when stream encoder is acquired: 1406 * 1) for second MST display in chain, so preferred engine already 1407 * acquired; 1408 * 2) for another link, which preferred engine already acquired by any 1409 * MST configuration. 1410 * 1411 * If signal is of DP type and preferred engine not found, return last available 1412 * 1413 * TODO - This is just a patch up and a generic solution is 1414 * required for non DP connectors. 1415 */ 1416 1417 if (j >= 0 && dc_is_dp_signal(stream->signal)) 1418 return pool->stream_enc[j]; 1419 1420 return NULL; 1421 } 1422 1423 static struct audio *find_first_free_audio( 1424 struct resource_context *res_ctx, 1425 const struct resource_pool *pool) 1426 { 1427 int i; 1428 for (i = 0; i < pool->audio_count; i++) { 1429 if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) { 1430 return pool->audios[i]; 1431 } 1432 } 1433 /*not found the matching one, first come first serve*/ 1434 for (i = 0; i < pool->audio_count; i++) { 1435 if (res_ctx->is_audio_acquired[i] == false) { 1436 return pool->audios[i]; 1437 } 1438 } 1439 return 0; 1440 } 1441 1442 bool resource_is_stream_unchanged( 1443 struct dc_state *old_context, struct dc_stream_state *stream) 1444 { 1445 int i; 1446 1447 for (i = 0; i < old_context->stream_count; i++) { 1448 struct dc_stream_state *old_stream = old_context->streams[i]; 1449 1450 if (are_stream_backends_same(old_stream, stream)) 1451 return true; 1452 } 1453 1454 return false; 1455 } 1456 1457 enum dc_status dc_add_stream_to_ctx( 1458 struct dc *dc, 1459 struct dc_state *new_ctx, 1460 struct dc_stream_state *stream) 1461 { 1462 struct dc_context *dc_ctx = dc->ctx; 1463 enum dc_status res; 1464 1465 if (new_ctx->stream_count >= dc->res_pool->pipe_count) { 1466 DC_ERROR("Max streams reached, can add stream %p !\n", stream); 1467 return DC_ERROR_UNEXPECTED; 1468 } 1469 1470 new_ctx->streams[new_ctx->stream_count] = stream; 1471 dc_stream_retain(stream); 1472 new_ctx->stream_count++; 1473 1474 res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream); 1475 if (res != DC_OK) 1476 DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res); 1477 1478 return res; 1479 } 1480 1481 bool dc_remove_stream_from_ctx( 1482 struct dc *dc, 1483 struct dc_state *new_ctx, 1484 struct dc_stream_state *stream) 1485 { 1486 int i; 1487 struct dc_context *dc_ctx = dc->ctx; 1488 struct pipe_ctx *del_pipe = NULL; 1489 1490 /* Release primary pipe */ 1491 for (i = 0; i < MAX_PIPES; i++) { 1492 if (new_ctx->res_ctx.pipe_ctx[i].stream == stream && 1493 !new_ctx->res_ctx.pipe_ctx[i].top_pipe) { 1494 del_pipe = &new_ctx->res_ctx.pipe_ctx[i]; 1495 1496 ASSERT(del_pipe->stream_res.stream_enc); 1497 update_stream_engine_usage( 1498 &new_ctx->res_ctx, 1499 dc->res_pool, 1500 del_pipe->stream_res.stream_enc, 1501 false); 1502 1503 if (del_pipe->stream_res.audio) 1504 update_audio_usage( 1505 &new_ctx->res_ctx, 1506 dc->res_pool, 1507 del_pipe->stream_res.audio, 1508 false); 1509 1510 resource_unreference_clock_source(&new_ctx->res_ctx, 1511 dc->res_pool, 1512 del_pipe->clock_source); 1513 1514 memset(del_pipe, 0, sizeof(*del_pipe)); 1515 1516 break; 1517 } 1518 } 1519 1520 if (!del_pipe) { 1521 DC_ERROR("Pipe not found for stream %p !\n", stream); 1522 return DC_ERROR_UNEXPECTED; 1523 } 1524 1525 for (i = 0; i < new_ctx->stream_count; i++) 1526 if (new_ctx->streams[i] == stream) 1527 break; 1528 1529 if (new_ctx->streams[i] != stream) { 1530 DC_ERROR("Context doesn't have stream %p !\n", stream); 1531 return DC_ERROR_UNEXPECTED; 1532 } 1533 1534 dc_stream_release(new_ctx->streams[i]); 1535 new_ctx->stream_count--; 1536 1537 /* Trim back arrays */ 1538 for (; i < new_ctx->stream_count; i++) { 1539 new_ctx->streams[i] = new_ctx->streams[i + 1]; 1540 new_ctx->stream_status[i] = new_ctx->stream_status[i + 1]; 1541 } 1542 1543 new_ctx->streams[new_ctx->stream_count] = NULL; 1544 memset( 1545 &new_ctx->stream_status[new_ctx->stream_count], 1546 0, 1547 sizeof(new_ctx->stream_status[0])); 1548 1549 return DC_OK; 1550 } 1551 1552 static void copy_pipe_ctx( 1553 const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx) 1554 { 1555 struct dc_plane_state *plane_state = to_pipe_ctx->plane_state; 1556 struct dc_stream_state *stream = to_pipe_ctx->stream; 1557 1558 *to_pipe_ctx = *from_pipe_ctx; 1559 to_pipe_ctx->stream = stream; 1560 if (plane_state != NULL) 1561 to_pipe_ctx->plane_state = plane_state; 1562 } 1563 1564 static struct dc_stream_state *find_pll_sharable_stream( 1565 struct dc_stream_state *stream_needs_pll, 1566 struct dc_state *context) 1567 { 1568 int i; 1569 1570 for (i = 0; i < context->stream_count; i++) { 1571 struct dc_stream_state *stream_has_pll = context->streams[i]; 1572 1573 /* We are looking for non dp, non virtual stream */ 1574 if (resource_are_streams_timing_synchronizable( 1575 stream_needs_pll, stream_has_pll) 1576 && !dc_is_dp_signal(stream_has_pll->signal) 1577 && stream_has_pll->sink->link->connector_signal 1578 != SIGNAL_TYPE_VIRTUAL) 1579 return stream_has_pll; 1580 1581 } 1582 1583 return NULL; 1584 } 1585 1586 static int get_norm_pix_clk(const struct dc_crtc_timing *timing) 1587 { 1588 uint32_t pix_clk = timing->pix_clk_khz; 1589 uint32_t normalized_pix_clk = pix_clk; 1590 1591 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420) 1592 pix_clk /= 2; 1593 if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) { 1594 switch (timing->display_color_depth) { 1595 case COLOR_DEPTH_888: 1596 normalized_pix_clk = pix_clk; 1597 break; 1598 case COLOR_DEPTH_101010: 1599 normalized_pix_clk = (pix_clk * 30) / 24; 1600 break; 1601 case COLOR_DEPTH_121212: 1602 normalized_pix_clk = (pix_clk * 36) / 24; 1603 break; 1604 case COLOR_DEPTH_161616: 1605 normalized_pix_clk = (pix_clk * 48) / 24; 1606 break; 1607 default: 1608 ASSERT(0); 1609 break; 1610 } 1611 } 1612 return normalized_pix_clk; 1613 } 1614 1615 static void calculate_phy_pix_clks(struct dc_stream_state *stream) 1616 { 1617 /* update actual pixel clock on all streams */ 1618 if (dc_is_hdmi_signal(stream->signal)) 1619 stream->phy_pix_clk = get_norm_pix_clk( 1620 &stream->timing); 1621 else 1622 stream->phy_pix_clk = 1623 stream->timing.pix_clk_khz; 1624 } 1625 1626 enum dc_status resource_map_pool_resources( 1627 const struct dc *dc, 1628 struct dc_state *context, 1629 struct dc_stream_state *stream) 1630 { 1631 const struct resource_pool *pool = dc->res_pool; 1632 int i; 1633 struct dc_context *dc_ctx = dc->ctx; 1634 struct pipe_ctx *pipe_ctx = NULL; 1635 int pipe_idx = -1; 1636 1637 /* TODO Check if this is needed */ 1638 /*if (!resource_is_stream_unchanged(old_context, stream)) { 1639 if (stream != NULL && old_context->streams[i] != NULL) { 1640 stream->bit_depth_params = 1641 old_context->streams[i]->bit_depth_params; 1642 stream->clamping = old_context->streams[i]->clamping; 1643 continue; 1644 } 1645 } 1646 */ 1647 1648 /* acquire new resources */ 1649 pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream); 1650 1651 if (pipe_idx < 0) 1652 pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream); 1653 1654 if (pipe_idx < 0) 1655 return DC_NO_CONTROLLER_RESOURCE; 1656 1657 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx]; 1658 1659 pipe_ctx->stream_res.stream_enc = 1660 find_first_free_match_stream_enc_for_link( 1661 &context->res_ctx, pool, stream); 1662 1663 if (!pipe_ctx->stream_res.stream_enc) 1664 return DC_NO_STREAM_ENG_RESOURCE; 1665 1666 update_stream_engine_usage( 1667 &context->res_ctx, pool, 1668 pipe_ctx->stream_res.stream_enc, 1669 true); 1670 1671 /* TODO: Add check if ASIC support and EDID audio */ 1672 if (!stream->sink->converter_disable_audio && 1673 dc_is_audio_capable_signal(pipe_ctx->stream->signal) && 1674 stream->audio_info.mode_count) { 1675 pipe_ctx->stream_res.audio = find_first_free_audio( 1676 &context->res_ctx, pool); 1677 1678 /* 1679 * Audio assigned in order first come first get. 1680 * There are asics which has number of audio 1681 * resources less then number of pipes 1682 */ 1683 if (pipe_ctx->stream_res.audio) 1684 update_audio_usage(&context->res_ctx, pool, 1685 pipe_ctx->stream_res.audio, true); 1686 } 1687 1688 for (i = 0; i < context->stream_count; i++) 1689 if (context->streams[i] == stream) { 1690 context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst; 1691 context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id; 1692 return DC_OK; 1693 } 1694 1695 DC_ERROR("Stream %p not found in new ctx!\n", stream); 1696 return DC_ERROR_UNEXPECTED; 1697 } 1698 1699 /* first stream in the context is used to populate the rest */ 1700 void validate_guaranteed_copy_streams( 1701 struct dc_state *context, 1702 int max_streams) 1703 { 1704 int i; 1705 1706 for (i = 1; i < max_streams; i++) { 1707 context->streams[i] = context->streams[0]; 1708 1709 copy_pipe_ctx(&context->res_ctx.pipe_ctx[0], 1710 &context->res_ctx.pipe_ctx[i]); 1711 context->res_ctx.pipe_ctx[i].stream = 1712 context->res_ctx.pipe_ctx[0].stream; 1713 1714 dc_stream_retain(context->streams[i]); 1715 context->stream_count++; 1716 } 1717 } 1718 1719 void dc_resource_state_copy_construct_current( 1720 const struct dc *dc, 1721 struct dc_state *dst_ctx) 1722 { 1723 dc_resource_state_copy_construct(dc->current_state, dst_ctx); 1724 } 1725 1726 1727 void dc_resource_state_construct( 1728 const struct dc *dc, 1729 struct dc_state *dst_ctx) 1730 { 1731 dst_ctx->dis_clk = dc->res_pool->display_clock; 1732 } 1733 1734 enum dc_status dc_validate_global_state( 1735 struct dc *dc, 1736 struct dc_state *new_ctx) 1737 { 1738 enum dc_status result = DC_ERROR_UNEXPECTED; 1739 int i, j; 1740 1741 if (dc->res_pool->funcs->validate_global) { 1742 result = dc->res_pool->funcs->validate_global(dc, new_ctx); 1743 if (result != DC_OK) 1744 return result; 1745 } 1746 1747 for (i = 0; new_ctx && i < new_ctx->stream_count; i++) { 1748 struct dc_stream_state *stream = new_ctx->streams[i]; 1749 1750 for (j = 0; j < dc->res_pool->pipe_count; j++) { 1751 struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j]; 1752 1753 if (pipe_ctx->stream != stream) 1754 continue; 1755 1756 /* Switch to dp clock source only if there is 1757 * no non dp stream that shares the same timing 1758 * with the dp stream. 1759 */ 1760 if (dc_is_dp_signal(pipe_ctx->stream->signal) && 1761 !find_pll_sharable_stream(stream, new_ctx)) { 1762 1763 resource_unreference_clock_source( 1764 &new_ctx->res_ctx, 1765 dc->res_pool, 1766 pipe_ctx->clock_source); 1767 1768 pipe_ctx->clock_source = dc->res_pool->dp_clock_source; 1769 resource_reference_clock_source( 1770 &new_ctx->res_ctx, 1771 dc->res_pool, 1772 pipe_ctx->clock_source); 1773 } 1774 } 1775 } 1776 1777 result = resource_build_scaling_params_for_context(dc, new_ctx); 1778 1779 if (result == DC_OK) 1780 if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx)) 1781 result = DC_FAIL_BANDWIDTH_VALIDATE; 1782 1783 return result; 1784 } 1785 1786 static void patch_gamut_packet_checksum( 1787 struct encoder_info_packet *gamut_packet) 1788 { 1789 /* For gamut we recalc checksum */ 1790 if (gamut_packet->valid) { 1791 uint8_t chk_sum = 0; 1792 uint8_t *ptr; 1793 uint8_t i; 1794 1795 /*start of the Gamut data. */ 1796 ptr = &gamut_packet->sb[3]; 1797 1798 for (i = 0; i <= gamut_packet->sb[1]; i++) 1799 chk_sum += ptr[i]; 1800 1801 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum); 1802 } 1803 } 1804 1805 static void set_avi_info_frame( 1806 struct encoder_info_packet *info_packet, 1807 struct pipe_ctx *pipe_ctx) 1808 { 1809 struct dc_stream_state *stream = pipe_ctx->stream; 1810 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN; 1811 struct info_frame info_frame = { {0} }; 1812 uint32_t pixel_encoding = 0; 1813 enum scanning_type scan_type = SCANNING_TYPE_NODATA; 1814 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA; 1815 bool itc = false; 1816 uint8_t itc_value = 0; 1817 uint8_t cn0_cn1 = 0; 1818 unsigned int cn0_cn1_value = 0; 1819 uint8_t *check_sum = NULL; 1820 uint8_t byte_index = 0; 1821 union hdmi_info_packet *hdmi_info = &info_frame.avi_info_packet.info_packet_hdmi; 1822 union display_content_support support = {0}; 1823 unsigned int vic = pipe_ctx->stream->timing.vic; 1824 enum dc_timing_3d_format format; 1825 1826 color_space = pipe_ctx->stream->output_color_space; 1827 if (color_space == COLOR_SPACE_UNKNOWN) 1828 color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ? 1829 COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709; 1830 1831 /* Initialize header */ 1832 hdmi_info->bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI; 1833 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall 1834 * not be used in HDMI 2.0 (Section 10.1) */ 1835 hdmi_info->bits.header.version = 2; 1836 hdmi_info->bits.header.length = HDMI_AVI_INFOFRAME_SIZE; 1837 1838 /* 1839 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built 1840 * according to HDMI 2.0 spec (Section 10.1) 1841 */ 1842 1843 switch (stream->timing.pixel_encoding) { 1844 case PIXEL_ENCODING_YCBCR422: 1845 pixel_encoding = 1; 1846 break; 1847 1848 case PIXEL_ENCODING_YCBCR444: 1849 pixel_encoding = 2; 1850 break; 1851 case PIXEL_ENCODING_YCBCR420: 1852 pixel_encoding = 3; 1853 break; 1854 1855 case PIXEL_ENCODING_RGB: 1856 default: 1857 pixel_encoding = 0; 1858 } 1859 1860 /* Y0_Y1_Y2 : The pixel encoding */ 1861 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */ 1862 hdmi_info->bits.Y0_Y1_Y2 = pixel_encoding; 1863 1864 /* A0 = 1 Active Format Information valid */ 1865 hdmi_info->bits.A0 = ACTIVE_FORMAT_VALID; 1866 1867 /* B0, B1 = 3; Bar info data is valid */ 1868 hdmi_info->bits.B0_B1 = BAR_INFO_BOTH_VALID; 1869 1870 hdmi_info->bits.SC0_SC1 = PICTURE_SCALING_UNIFORM; 1871 1872 /* S0, S1 : Underscan / Overscan */ 1873 /* TODO: un-hardcode scan type */ 1874 scan_type = SCANNING_TYPE_UNDERSCAN; 1875 hdmi_info->bits.S0_S1 = scan_type; 1876 1877 /* C0, C1 : Colorimetry */ 1878 if (color_space == COLOR_SPACE_YCBCR709 || 1879 color_space == COLOR_SPACE_YCBCR709_LIMITED) 1880 hdmi_info->bits.C0_C1 = COLORIMETRY_ITU709; 1881 else if (color_space == COLOR_SPACE_YCBCR601 || 1882 color_space == COLOR_SPACE_YCBCR601_LIMITED) 1883 hdmi_info->bits.C0_C1 = COLORIMETRY_ITU601; 1884 else { 1885 hdmi_info->bits.C0_C1 = COLORIMETRY_NO_DATA; 1886 } 1887 if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE || 1888 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE || 1889 color_space == COLOR_SPACE_2020_YCBCR) { 1890 hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR; 1891 hdmi_info->bits.C0_C1 = COLORIMETRY_EXTENDED; 1892 } else if (color_space == COLOR_SPACE_ADOBERGB) { 1893 hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB; 1894 hdmi_info->bits.C0_C1 = COLORIMETRY_EXTENDED; 1895 } 1896 1897 /* TODO: un-hardcode aspect ratio */ 1898 aspect = stream->timing.aspect_ratio; 1899 1900 switch (aspect) { 1901 case ASPECT_RATIO_4_3: 1902 case ASPECT_RATIO_16_9: 1903 hdmi_info->bits.M0_M1 = aspect; 1904 break; 1905 1906 case ASPECT_RATIO_NO_DATA: 1907 case ASPECT_RATIO_64_27: 1908 case ASPECT_RATIO_256_135: 1909 default: 1910 hdmi_info->bits.M0_M1 = 0; 1911 } 1912 1913 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */ 1914 hdmi_info->bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE; 1915 1916 /* TODO: un-hardcode cn0_cn1 and itc */ 1917 1918 cn0_cn1 = 0; 1919 cn0_cn1_value = 0; 1920 1921 itc = true; 1922 itc_value = 1; 1923 1924 support = stream->sink->edid_caps.content_support; 1925 1926 if (itc) { 1927 if (!support.bits.valid_content_type) { 1928 cn0_cn1_value = 0; 1929 } else { 1930 if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) { 1931 if (support.bits.graphics_content == 1) { 1932 cn0_cn1_value = 0; 1933 } 1934 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) { 1935 if (support.bits.photo_content == 1) { 1936 cn0_cn1_value = 1; 1937 } else { 1938 cn0_cn1_value = 0; 1939 itc_value = 0; 1940 } 1941 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) { 1942 if (support.bits.cinema_content == 1) { 1943 cn0_cn1_value = 2; 1944 } else { 1945 cn0_cn1_value = 0; 1946 itc_value = 0; 1947 } 1948 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) { 1949 if (support.bits.game_content == 1) { 1950 cn0_cn1_value = 3; 1951 } else { 1952 cn0_cn1_value = 0; 1953 itc_value = 0; 1954 } 1955 } 1956 } 1957 hdmi_info->bits.CN0_CN1 = cn0_cn1_value; 1958 hdmi_info->bits.ITC = itc_value; 1959 } 1960 1961 /* TODO : We should handle YCC quantization */ 1962 /* but we do not have matrix calculation */ 1963 if (stream->sink->edid_caps.qs_bit == 1 && 1964 stream->sink->edid_caps.qy_bit == 1) { 1965 if (color_space == COLOR_SPACE_SRGB || 1966 color_space == COLOR_SPACE_2020_RGB_FULLRANGE) { 1967 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_FULL_RANGE; 1968 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE; 1969 } else if (color_space == COLOR_SPACE_SRGB_LIMITED || 1970 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) { 1971 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_LIMITED_RANGE; 1972 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE; 1973 } else { 1974 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE; 1975 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE; 1976 } 1977 } else { 1978 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE; 1979 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE; 1980 } 1981 1982 ///VIC 1983 format = stream->timing.timing_3d_format; 1984 /*todo, add 3DStereo support*/ 1985 if (format != TIMING_3D_FORMAT_NONE) { 1986 // Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled 1987 switch (pipe_ctx->stream->timing.hdmi_vic) { 1988 case 1: 1989 vic = 95; 1990 break; 1991 case 2: 1992 vic = 94; 1993 break; 1994 case 3: 1995 vic = 93; 1996 break; 1997 case 4: 1998 vic = 98; 1999 break; 2000 default: 2001 break; 2002 } 2003 } 2004 hdmi_info->bits.VIC0_VIC7 = vic; 2005 2006 /* pixel repetition 2007 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel 2008 * repetition start from 1 */ 2009 hdmi_info->bits.PR0_PR3 = 0; 2010 2011 /* Bar Info 2012 * barTop: Line Number of End of Top Bar. 2013 * barBottom: Line Number of Start of Bottom Bar. 2014 * barLeft: Pixel Number of End of Left Bar. 2015 * barRight: Pixel Number of Start of Right Bar. */ 2016 hdmi_info->bits.bar_top = stream->timing.v_border_top; 2017 hdmi_info->bits.bar_bottom = (stream->timing.v_total 2018 - stream->timing.v_border_bottom + 1); 2019 hdmi_info->bits.bar_left = stream->timing.h_border_left; 2020 hdmi_info->bits.bar_right = (stream->timing.h_total 2021 - stream->timing.h_border_right + 1); 2022 2023 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */ 2024 check_sum = &info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0]; 2025 2026 *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2; 2027 2028 for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++) 2029 *check_sum += hdmi_info->packet_raw_data.sb[byte_index]; 2030 2031 /* one byte complement */ 2032 *check_sum = (uint8_t) (0x100 - *check_sum); 2033 2034 /* Store in hw_path_mode */ 2035 info_packet->hb0 = hdmi_info->packet_raw_data.hb0; 2036 info_packet->hb1 = hdmi_info->packet_raw_data.hb1; 2037 info_packet->hb2 = hdmi_info->packet_raw_data.hb2; 2038 2039 for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet. 2040 info_packet_hdmi.packet_raw_data.sb); byte_index++) 2041 info_packet->sb[byte_index] = info_frame.avi_info_packet. 2042 info_packet_hdmi.packet_raw_data.sb[byte_index]; 2043 2044 info_packet->valid = true; 2045 } 2046 2047 static void set_vendor_info_packet( 2048 struct encoder_info_packet *info_packet, 2049 struct dc_stream_state *stream) 2050 { 2051 uint32_t length = 0; 2052 bool hdmi_vic_mode = false; 2053 uint8_t checksum = 0; 2054 uint32_t i = 0; 2055 enum dc_timing_3d_format format; 2056 // Can be different depending on packet content /*todo*/ 2057 // unsigned int length = pPathMode->dolbyVision ? 24 : 5; 2058 2059 info_packet->valid = false; 2060 2061 format = stream->timing.timing_3d_format; 2062 if (stream->view_format == VIEW_3D_FORMAT_NONE) 2063 format = TIMING_3D_FORMAT_NONE; 2064 2065 /* Can be different depending on packet content */ 2066 length = 5; 2067 2068 if (stream->timing.hdmi_vic != 0 2069 && stream->timing.h_total >= 3840 2070 && stream->timing.v_total >= 2160) 2071 hdmi_vic_mode = true; 2072 2073 /* According to HDMI 1.4a CTS, VSIF should be sent 2074 * for both 3D stereo and HDMI VIC modes. 2075 * For all other modes, there is no VSIF sent. */ 2076 2077 if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode) 2078 return; 2079 2080 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */ 2081 info_packet->sb[1] = 0x03; 2082 info_packet->sb[2] = 0x0C; 2083 info_packet->sb[3] = 0x00; 2084 2085 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format. 2086 * The value for HDMI_Video_Format are: 2087 * 0x0 (0b000) - No additional HDMI video format is presented in this 2088 * packet 2089 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC 2090 * parameter follows 2091 * 0x2 (0b010) - 3D format indication present. 3D_Structure and 2092 * potentially 3D_Ext_Data follows 2093 * 0x3..0x7 (0b011..0b111) - reserved for future use */ 2094 if (format != TIMING_3D_FORMAT_NONE) 2095 info_packet->sb[4] = (2 << 5); 2096 else if (hdmi_vic_mode) 2097 info_packet->sb[4] = (1 << 5); 2098 2099 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2): 2100 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure. 2101 * The value for 3D_Structure are: 2102 * 0x0 - Frame Packing 2103 * 0x1 - Field Alternative 2104 * 0x2 - Line Alternative 2105 * 0x3 - Side-by-Side (full) 2106 * 0x4 - L + depth 2107 * 0x5 - L + depth + graphics + graphics-depth 2108 * 0x6 - Top-and-Bottom 2109 * 0x7 - Reserved for future use 2110 * 0x8 - Side-by-Side (Half) 2111 * 0x9..0xE - Reserved for future use 2112 * 0xF - Not used */ 2113 switch (format) { 2114 case TIMING_3D_FORMAT_HW_FRAME_PACKING: 2115 case TIMING_3D_FORMAT_SW_FRAME_PACKING: 2116 info_packet->sb[5] = (0x0 << 4); 2117 break; 2118 2119 case TIMING_3D_FORMAT_SIDE_BY_SIDE: 2120 case TIMING_3D_FORMAT_SBS_SW_PACKED: 2121 info_packet->sb[5] = (0x8 << 4); 2122 length = 6; 2123 break; 2124 2125 case TIMING_3D_FORMAT_TOP_AND_BOTTOM: 2126 case TIMING_3D_FORMAT_TB_SW_PACKED: 2127 info_packet->sb[5] = (0x6 << 4); 2128 break; 2129 2130 default: 2131 break; 2132 } 2133 2134 /*PB5: If PB4 is set to 0x1 (extended resolution format) 2135 * fill PB5 with the correct HDMI VIC code */ 2136 if (hdmi_vic_mode) 2137 info_packet->sb[5] = stream->timing.hdmi_vic; 2138 2139 /* Header */ 2140 info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */ 2141 info_packet->hb1 = 0x01; /* Version */ 2142 2143 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */ 2144 info_packet->hb2 = (uint8_t) (length); 2145 2146 /* Calculate checksum */ 2147 checksum = 0; 2148 checksum += info_packet->hb0; 2149 checksum += info_packet->hb1; 2150 checksum += info_packet->hb2; 2151 2152 for (i = 1; i <= length; i++) 2153 checksum += info_packet->sb[i]; 2154 2155 info_packet->sb[0] = (uint8_t) (0x100 - checksum); 2156 2157 info_packet->valid = true; 2158 } 2159 2160 static void set_spd_info_packet( 2161 struct encoder_info_packet *info_packet, 2162 struct dc_stream_state *stream) 2163 { 2164 /* SPD info packet for FreeSync */ 2165 2166 unsigned char checksum = 0; 2167 unsigned int idx, payload_size = 0; 2168 2169 /* Check if Freesync is supported. Return if false. If true, 2170 * set the corresponding bit in the info packet 2171 */ 2172 if (stream->freesync_ctx.supported == false) 2173 return; 2174 2175 if (dc_is_hdmi_signal(stream->signal)) { 2176 2177 /* HEADER */ 2178 2179 /* HB0 = Packet Type = 0x83 (Source Product 2180 * Descriptor InfoFrame) 2181 */ 2182 info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD; 2183 2184 /* HB1 = Version = 0x01 */ 2185 info_packet->hb1 = 0x01; 2186 2187 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */ 2188 info_packet->hb2 = 0x08; 2189 2190 payload_size = 0x08; 2191 2192 } else if (dc_is_dp_signal(stream->signal)) { 2193 2194 /* HEADER */ 2195 2196 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero 2197 * when used to associate audio related info packets 2198 */ 2199 info_packet->hb0 = 0x00; 2200 2201 /* HB1 = Packet Type = 0x83 (Source Product 2202 * Descriptor InfoFrame) 2203 */ 2204 info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD; 2205 2206 /* HB2 = [Bits 7:0 = Least significant eight bits - 2207 * For INFOFRAME, the value must be 1Bh] 2208 */ 2209 info_packet->hb2 = 0x1B; 2210 2211 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1] 2212 * [Bits 1:0 = Most significant two bits = 0x00] 2213 */ 2214 info_packet->hb3 = 0x04; 2215 2216 payload_size = 0x1B; 2217 } 2218 2219 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */ 2220 info_packet->sb[1] = 0x1A; 2221 2222 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */ 2223 info_packet->sb[2] = 0x00; 2224 2225 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */ 2226 info_packet->sb[3] = 0x00; 2227 2228 /* PB4 = Reserved */ 2229 info_packet->sb[4] = 0x00; 2230 2231 /* PB5 = Reserved */ 2232 info_packet->sb[5] = 0x00; 2233 2234 /* PB6 = [Bits 7:3 = Reserved] */ 2235 info_packet->sb[6] = 0x00; 2236 2237 if (stream->freesync_ctx.supported == true) 2238 /* PB6 = [Bit 0 = FreeSync Supported] */ 2239 info_packet->sb[6] |= 0x01; 2240 2241 if (stream->freesync_ctx.enabled == true) 2242 /* PB6 = [Bit 1 = FreeSync Enabled] */ 2243 info_packet->sb[6] |= 0x02; 2244 2245 if (stream->freesync_ctx.active == true) 2246 /* PB6 = [Bit 2 = FreeSync Active] */ 2247 info_packet->sb[6] |= 0x04; 2248 2249 /* PB7 = FreeSync Minimum refresh rate (Hz) */ 2250 info_packet->sb[7] = (unsigned char) (stream->freesync_ctx. 2251 min_refresh_in_micro_hz / 1000000); 2252 2253 /* PB8 = FreeSync Maximum refresh rate (Hz) 2254 * 2255 * Note: We do not use the maximum capable refresh rate 2256 * of the panel, because we should never go above the field 2257 * rate of the mode timing set. 2258 */ 2259 info_packet->sb[8] = (unsigned char) (stream->freesync_ctx. 2260 nominal_refresh_in_micro_hz / 1000000); 2261 2262 /* PB9 - PB27 = Reserved */ 2263 for (idx = 9; idx <= 27; idx++) 2264 info_packet->sb[idx] = 0x00; 2265 2266 /* Calculate checksum */ 2267 checksum += info_packet->hb0; 2268 checksum += info_packet->hb1; 2269 checksum += info_packet->hb2; 2270 checksum += info_packet->hb3; 2271 2272 for (idx = 1; idx <= payload_size; idx++) 2273 checksum += info_packet->sb[idx]; 2274 2275 /* PB0 = Checksum (one byte complement) */ 2276 info_packet->sb[0] = (unsigned char) (0x100 - checksum); 2277 2278 info_packet->valid = true; 2279 } 2280 2281 static void set_hdr_static_info_packet( 2282 struct encoder_info_packet *info_packet, 2283 struct dc_plane_state *plane_state, 2284 struct dc_stream_state *stream) 2285 { 2286 uint16_t i = 0; 2287 enum signal_type signal = stream->signal; 2288 struct dc_hdr_static_metadata hdr_metadata; 2289 uint32_t data; 2290 2291 if (!plane_state) 2292 return; 2293 2294 hdr_metadata = plane_state->hdr_static_ctx; 2295 2296 if (!hdr_metadata.hdr_supported) 2297 return; 2298 2299 if (dc_is_hdmi_signal(signal)) { 2300 info_packet->valid = true; 2301 2302 info_packet->hb0 = 0x87; 2303 info_packet->hb1 = 0x01; 2304 info_packet->hb2 = 0x1A; 2305 i = 1; 2306 } else if (dc_is_dp_signal(signal)) { 2307 info_packet->valid = true; 2308 2309 info_packet->hb0 = 0x00; 2310 info_packet->hb1 = 0x87; 2311 info_packet->hb2 = 0x1D; 2312 info_packet->hb3 = (0x13 << 2); 2313 i = 2; 2314 } 2315 2316 data = hdr_metadata.is_hdr; 2317 info_packet->sb[i++] = data ? 0x02 : 0x00; 2318 info_packet->sb[i++] = 0x00; 2319 2320 data = hdr_metadata.chromaticity_green_x / 2; 2321 info_packet->sb[i++] = data & 0xFF; 2322 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2323 2324 data = hdr_metadata.chromaticity_green_y / 2; 2325 info_packet->sb[i++] = data & 0xFF; 2326 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2327 2328 data = hdr_metadata.chromaticity_blue_x / 2; 2329 info_packet->sb[i++] = data & 0xFF; 2330 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2331 2332 data = hdr_metadata.chromaticity_blue_y / 2; 2333 info_packet->sb[i++] = data & 0xFF; 2334 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2335 2336 data = hdr_metadata.chromaticity_red_x / 2; 2337 info_packet->sb[i++] = data & 0xFF; 2338 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2339 2340 data = hdr_metadata.chromaticity_red_y / 2; 2341 info_packet->sb[i++] = data & 0xFF; 2342 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2343 2344 data = hdr_metadata.chromaticity_white_point_x / 2; 2345 info_packet->sb[i++] = data & 0xFF; 2346 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2347 2348 data = hdr_metadata.chromaticity_white_point_y / 2; 2349 info_packet->sb[i++] = data & 0xFF; 2350 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2351 2352 data = hdr_metadata.max_luminance; 2353 info_packet->sb[i++] = data & 0xFF; 2354 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2355 2356 data = hdr_metadata.min_luminance; 2357 info_packet->sb[i++] = data & 0xFF; 2358 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2359 2360 data = hdr_metadata.maximum_content_light_level; 2361 info_packet->sb[i++] = data & 0xFF; 2362 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2363 2364 data = hdr_metadata.maximum_frame_average_light_level; 2365 info_packet->sb[i++] = data & 0xFF; 2366 info_packet->sb[i++] = (data & 0xFF00) >> 8; 2367 2368 if (dc_is_hdmi_signal(signal)) { 2369 uint32_t checksum = 0; 2370 2371 checksum += info_packet->hb0; 2372 checksum += info_packet->hb1; 2373 checksum += info_packet->hb2; 2374 2375 for (i = 1; i <= info_packet->hb2; i++) 2376 checksum += info_packet->sb[i]; 2377 2378 info_packet->sb[0] = 0x100 - checksum; 2379 } else if (dc_is_dp_signal(signal)) { 2380 info_packet->sb[0] = 0x01; 2381 info_packet->sb[1] = 0x1A; 2382 } 2383 } 2384 2385 static void set_vsc_info_packet( 2386 struct encoder_info_packet *info_packet, 2387 struct dc_stream_state *stream) 2388 { 2389 unsigned int vscPacketRevision = 0; 2390 unsigned int i; 2391 2392 if (stream->sink->link->psr_enabled) { 2393 vscPacketRevision = 2; 2394 } 2395 2396 /* VSC packet not needed based on the features 2397 * supported by this DP display 2398 */ 2399 if (vscPacketRevision == 0) 2400 return; 2401 2402 if (vscPacketRevision == 0x2) { 2403 /* Secondary-data Packet ID = 0*/ 2404 info_packet->hb0 = 0x00; 2405 /* 07h - Packet Type Value indicating Video 2406 * Stream Configuration packet 2407 */ 2408 info_packet->hb1 = 0x07; 2409 /* 02h = VSC SDP supporting 3D stereo and PSR 2410 * (applies to eDP v1.3 or higher). 2411 */ 2412 info_packet->hb2 = 0x02; 2413 /* 08h = VSC packet supporting 3D stereo + PSR 2414 * (HB2 = 02h). 2415 */ 2416 info_packet->hb3 = 0x08; 2417 2418 for (i = 0; i < 28; i++) 2419 info_packet->sb[i] = 0; 2420 2421 info_packet->valid = true; 2422 } 2423 2424 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/ 2425 } 2426 2427 void dc_resource_state_destruct(struct dc_state *context) 2428 { 2429 int i, j; 2430 2431 for (i = 0; i < context->stream_count; i++) { 2432 for (j = 0; j < context->stream_status[i].plane_count; j++) 2433 dc_plane_state_release( 2434 context->stream_status[i].plane_states[j]); 2435 2436 context->stream_status[i].plane_count = 0; 2437 dc_stream_release(context->streams[i]); 2438 context->streams[i] = NULL; 2439 } 2440 } 2441 2442 /* 2443 * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced 2444 * by the src_ctx 2445 */ 2446 void dc_resource_state_copy_construct( 2447 const struct dc_state *src_ctx, 2448 struct dc_state *dst_ctx) 2449 { 2450 int i, j; 2451 struct kref refcount = dst_ctx->refcount; 2452 2453 *dst_ctx = *src_ctx; 2454 2455 for (i = 0; i < MAX_PIPES; i++) { 2456 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i]; 2457 2458 if (cur_pipe->top_pipe) 2459 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx]; 2460 2461 if (cur_pipe->bottom_pipe) 2462 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx]; 2463 2464 } 2465 2466 for (i = 0; i < dst_ctx->stream_count; i++) { 2467 dc_stream_retain(dst_ctx->streams[i]); 2468 for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++) 2469 dc_plane_state_retain( 2470 dst_ctx->stream_status[i].plane_states[j]); 2471 } 2472 2473 /* context refcount should not be overridden */ 2474 dst_ctx->refcount = refcount; 2475 2476 } 2477 2478 struct clock_source *dc_resource_find_first_free_pll( 2479 struct resource_context *res_ctx, 2480 const struct resource_pool *pool) 2481 { 2482 int i; 2483 2484 for (i = 0; i < pool->clk_src_count; ++i) { 2485 if (res_ctx->clock_source_ref_count[i] == 0) 2486 return pool->clock_sources[i]; 2487 } 2488 2489 return NULL; 2490 } 2491 2492 void resource_build_info_frame(struct pipe_ctx *pipe_ctx) 2493 { 2494 enum signal_type signal = SIGNAL_TYPE_NONE; 2495 struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame; 2496 2497 /* default all packets to invalid */ 2498 info->avi.valid = false; 2499 info->gamut.valid = false; 2500 info->vendor.valid = false; 2501 info->spd.valid = false; 2502 info->hdrsmd.valid = false; 2503 info->vsc.valid = false; 2504 2505 signal = pipe_ctx->stream->signal; 2506 2507 /* HDMi and DP have different info packets*/ 2508 if (dc_is_hdmi_signal(signal)) { 2509 set_avi_info_frame(&info->avi, pipe_ctx); 2510 2511 set_vendor_info_packet(&info->vendor, pipe_ctx->stream); 2512 2513 set_spd_info_packet(&info->spd, pipe_ctx->stream); 2514 2515 set_hdr_static_info_packet(&info->hdrsmd, 2516 pipe_ctx->plane_state, pipe_ctx->stream); 2517 2518 } else if (dc_is_dp_signal(signal)) { 2519 set_vsc_info_packet(&info->vsc, pipe_ctx->stream); 2520 2521 set_spd_info_packet(&info->spd, pipe_ctx->stream); 2522 2523 set_hdr_static_info_packet(&info->hdrsmd, 2524 pipe_ctx->plane_state, pipe_ctx->stream); 2525 } 2526 2527 patch_gamut_packet_checksum(&info->gamut); 2528 } 2529 2530 enum dc_status resource_map_clock_resources( 2531 const struct dc *dc, 2532 struct dc_state *context, 2533 struct dc_stream_state *stream) 2534 { 2535 /* acquire new resources */ 2536 const struct resource_pool *pool = dc->res_pool; 2537 struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream( 2538 &context->res_ctx, stream); 2539 2540 if (!pipe_ctx) 2541 return DC_ERROR_UNEXPECTED; 2542 2543 if (dc_is_dp_signal(pipe_ctx->stream->signal) 2544 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL) 2545 pipe_ctx->clock_source = pool->dp_clock_source; 2546 else { 2547 pipe_ctx->clock_source = NULL; 2548 2549 if (!dc->config.disable_disp_pll_sharing) 2550 pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing( 2551 &context->res_ctx, 2552 pipe_ctx); 2553 2554 if (pipe_ctx->clock_source == NULL) 2555 pipe_ctx->clock_source = 2556 dc_resource_find_first_free_pll( 2557 &context->res_ctx, 2558 pool); 2559 } 2560 2561 if (pipe_ctx->clock_source == NULL) 2562 return DC_NO_CLOCK_SOURCE_RESOURCE; 2563 2564 resource_reference_clock_source( 2565 &context->res_ctx, pool, 2566 pipe_ctx->clock_source); 2567 2568 return DC_OK; 2569 } 2570 2571 /* 2572 * Note: We need to disable output if clock sources change, 2573 * since bios does optimization and doesn't apply if changing 2574 * PHY when not already disabled. 2575 */ 2576 bool pipe_need_reprogram( 2577 struct pipe_ctx *pipe_ctx_old, 2578 struct pipe_ctx *pipe_ctx) 2579 { 2580 if (!pipe_ctx_old->stream) 2581 return false; 2582 2583 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink) 2584 return true; 2585 2586 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal) 2587 return true; 2588 2589 if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio) 2590 return true; 2591 2592 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source 2593 && pipe_ctx_old->stream != pipe_ctx->stream) 2594 return true; 2595 2596 if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc) 2597 return true; 2598 2599 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream)) 2600 return true; 2601 2602 2603 return false; 2604 } 2605 2606 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream, 2607 struct bit_depth_reduction_params *fmt_bit_depth) 2608 { 2609 enum dc_dither_option option = stream->dither_option; 2610 enum dc_pixel_encoding pixel_encoding = 2611 stream->timing.pixel_encoding; 2612 2613 memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth)); 2614 2615 if (option == DITHER_OPTION_DEFAULT) { 2616 switch (stream->timing.display_color_depth) { 2617 case COLOR_DEPTH_666: 2618 option = DITHER_OPTION_SPATIAL6; 2619 break; 2620 case COLOR_DEPTH_888: 2621 option = DITHER_OPTION_SPATIAL8; 2622 break; 2623 case COLOR_DEPTH_101010: 2624 option = DITHER_OPTION_SPATIAL10; 2625 break; 2626 default: 2627 option = DITHER_OPTION_DISABLE; 2628 } 2629 } 2630 2631 if (option == DITHER_OPTION_DISABLE) 2632 return; 2633 2634 if (option == DITHER_OPTION_TRUN6) { 2635 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1; 2636 fmt_bit_depth->flags.TRUNCATE_DEPTH = 0; 2637 } else if (option == DITHER_OPTION_TRUN8 || 2638 option == DITHER_OPTION_TRUN8_SPATIAL6 || 2639 option == DITHER_OPTION_TRUN8_FM6) { 2640 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1; 2641 fmt_bit_depth->flags.TRUNCATE_DEPTH = 1; 2642 } else if (option == DITHER_OPTION_TRUN10 || 2643 option == DITHER_OPTION_TRUN10_SPATIAL6 || 2644 option == DITHER_OPTION_TRUN10_SPATIAL8 || 2645 option == DITHER_OPTION_TRUN10_FM8 || 2646 option == DITHER_OPTION_TRUN10_FM6 || 2647 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) { 2648 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1; 2649 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2; 2650 } 2651 2652 /* special case - Formatter can only reduce by 4 bits at most. 2653 * When reducing from 12 to 6 bits, 2654 * HW recommends we use trunc with round mode 2655 * (if we did nothing, trunc to 10 bits would be used) 2656 * note that any 12->10 bit reduction is ignored prior to DCE8, 2657 * as the input was 10 bits. 2658 */ 2659 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM || 2660 option == DITHER_OPTION_SPATIAL6 || 2661 option == DITHER_OPTION_FM6) { 2662 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1; 2663 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2; 2664 fmt_bit_depth->flags.TRUNCATE_MODE = 1; 2665 } 2666 2667 /* spatial dither 2668 * note that spatial modes 1-3 are never used 2669 */ 2670 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM || 2671 option == DITHER_OPTION_SPATIAL6 || 2672 option == DITHER_OPTION_TRUN10_SPATIAL6 || 2673 option == DITHER_OPTION_TRUN8_SPATIAL6) { 2674 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1; 2675 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0; 2676 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1; 2677 fmt_bit_depth->flags.RGB_RANDOM = 2678 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0; 2679 } else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM || 2680 option == DITHER_OPTION_SPATIAL8 || 2681 option == DITHER_OPTION_SPATIAL8_FM6 || 2682 option == DITHER_OPTION_TRUN10_SPATIAL8 || 2683 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) { 2684 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1; 2685 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1; 2686 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1; 2687 fmt_bit_depth->flags.RGB_RANDOM = 2688 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0; 2689 } else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM || 2690 option == DITHER_OPTION_SPATIAL10 || 2691 option == DITHER_OPTION_SPATIAL10_FM8 || 2692 option == DITHER_OPTION_SPATIAL10_FM6) { 2693 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1; 2694 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2; 2695 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1; 2696 fmt_bit_depth->flags.RGB_RANDOM = 2697 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0; 2698 } 2699 2700 if (option == DITHER_OPTION_SPATIAL6 || 2701 option == DITHER_OPTION_SPATIAL8 || 2702 option == DITHER_OPTION_SPATIAL10) { 2703 fmt_bit_depth->flags.FRAME_RANDOM = 0; 2704 } else { 2705 fmt_bit_depth->flags.FRAME_RANDOM = 1; 2706 } 2707 2708 ////////////////////// 2709 //// temporal dither 2710 ////////////////////// 2711 if (option == DITHER_OPTION_FM6 || 2712 option == DITHER_OPTION_SPATIAL8_FM6 || 2713 option == DITHER_OPTION_SPATIAL10_FM6 || 2714 option == DITHER_OPTION_TRUN10_FM6 || 2715 option == DITHER_OPTION_TRUN8_FM6 || 2716 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) { 2717 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1; 2718 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0; 2719 } else if (option == DITHER_OPTION_FM8 || 2720 option == DITHER_OPTION_SPATIAL10_FM8 || 2721 option == DITHER_OPTION_TRUN10_FM8) { 2722 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1; 2723 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1; 2724 } else if (option == DITHER_OPTION_FM10) { 2725 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1; 2726 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2; 2727 } 2728 2729 fmt_bit_depth->pixel_encoding = pixel_encoding; 2730 } 2731 2732 bool dc_validate_stream(struct dc *dc, struct dc_stream_state *stream) 2733 { 2734 struct dc *core_dc = dc; 2735 struct dc_link *link = stream->sink->link; 2736 struct timing_generator *tg = core_dc->res_pool->timing_generators[0]; 2737 enum dc_status res = DC_OK; 2738 2739 calculate_phy_pix_clks(stream); 2740 2741 if (!tg->funcs->validate_timing(tg, &stream->timing)) 2742 res = DC_FAIL_CONTROLLER_VALIDATE; 2743 2744 if (res == DC_OK) 2745 if (!link->link_enc->funcs->validate_output_with_stream( 2746 link->link_enc, stream)) 2747 res = DC_FAIL_ENC_VALIDATE; 2748 2749 /* TODO: validate audio ASIC caps, encoder */ 2750 2751 if (res == DC_OK) 2752 res = dc_link_validate_mode_timing(stream, 2753 link, 2754 &stream->timing); 2755 2756 return res == DC_OK; 2757 } 2758 2759 bool dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state) 2760 { 2761 /* TODO For now validates pixel format only */ 2762 if (dc->res_pool->funcs->validate_plane) 2763 return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps) == DC_OK; 2764 2765 return true; 2766 } 2767