1 /*
2  * Copyright 2012-15 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 #include "dm_services.h"
26 
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "dpp.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 #include "dpcd_defs.h"
39 
40 #include "dce80/dce80_resource.h"
41 #include "dce100/dce100_resource.h"
42 #include "dce110/dce110_resource.h"
43 #include "dce112/dce112_resource.h"
44 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
45 #include "dcn10/dcn10_resource.h"
46 #endif
47 #include "dce120/dce120_resource.h"
48 
49 #define DC_LOGGER_INIT(logger)
50 
51 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
52 {
53 	enum dce_version dc_version = DCE_VERSION_UNKNOWN;
54 	switch (asic_id.chip_family) {
55 
56 	case FAMILY_CI:
57 		dc_version = DCE_VERSION_8_0;
58 		break;
59 	case FAMILY_KV:
60 		if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
61 		    ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
62 		    ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
63 			dc_version = DCE_VERSION_8_3;
64 		else
65 			dc_version = DCE_VERSION_8_1;
66 		break;
67 	case FAMILY_CZ:
68 		dc_version = DCE_VERSION_11_0;
69 		break;
70 
71 	case FAMILY_VI:
72 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
73 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
74 			dc_version = DCE_VERSION_10_0;
75 			break;
76 		}
77 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
78 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
79 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
80 			dc_version = DCE_VERSION_11_2;
81 		}
82 		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev))
83 			dc_version = DCE_VERSION_11_22;
84 		break;
85 	case FAMILY_AI:
86 		dc_version = DCE_VERSION_12_0;
87 		break;
88 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
89 	case FAMILY_RV:
90 		dc_version = DCN_VERSION_1_0;
91 		break;
92 #endif
93 	default:
94 		dc_version = DCE_VERSION_UNKNOWN;
95 		break;
96 	}
97 	return dc_version;
98 }
99 
100 struct resource_pool *dc_create_resource_pool(
101 				struct dc  *dc,
102 				int num_virtual_links,
103 				enum dce_version dc_version,
104 				struct hw_asic_id asic_id)
105 {
106 	struct resource_pool *res_pool = NULL;
107 
108 	switch (dc_version) {
109 	case DCE_VERSION_8_0:
110 		res_pool = dce80_create_resource_pool(
111 			num_virtual_links, dc);
112 		break;
113 	case DCE_VERSION_8_1:
114 		res_pool = dce81_create_resource_pool(
115 			num_virtual_links, dc);
116 		break;
117 	case DCE_VERSION_8_3:
118 		res_pool = dce83_create_resource_pool(
119 			num_virtual_links, dc);
120 		break;
121 	case DCE_VERSION_10_0:
122 		res_pool = dce100_create_resource_pool(
123 				num_virtual_links, dc);
124 		break;
125 	case DCE_VERSION_11_0:
126 		res_pool = dce110_create_resource_pool(
127 			num_virtual_links, dc, asic_id);
128 		break;
129 	case DCE_VERSION_11_2:
130 	case DCE_VERSION_11_22:
131 		res_pool = dce112_create_resource_pool(
132 			num_virtual_links, dc);
133 		break;
134 	case DCE_VERSION_12_0:
135 		res_pool = dce120_create_resource_pool(
136 			num_virtual_links, dc);
137 		break;
138 
139 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
140 	case DCN_VERSION_1_0:
141 		res_pool = dcn10_create_resource_pool(
142 				num_virtual_links, dc);
143 		break;
144 #endif
145 
146 
147 	default:
148 		break;
149 	}
150 	if (res_pool != NULL) {
151 		struct dc_firmware_info fw_info = { { 0 } };
152 
153 		if (dc->ctx->dc_bios->funcs->get_firmware_info(
154 				dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
155 				res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
156 			} else
157 				ASSERT_CRITICAL(false);
158 	}
159 
160 	return res_pool;
161 }
162 
163 void dc_destroy_resource_pool(struct dc  *dc)
164 {
165 	if (dc) {
166 		if (dc->res_pool)
167 			dc->res_pool->funcs->destroy(&dc->res_pool);
168 
169 		kfree(dc->hwseq);
170 	}
171 }
172 
173 static void update_num_audio(
174 	const struct resource_straps *straps,
175 	unsigned int *num_audio,
176 	struct audio_support *aud_support)
177 {
178 	aud_support->dp_audio = true;
179 	aud_support->hdmi_audio_native = false;
180 	aud_support->hdmi_audio_on_dongle = false;
181 
182 	if (straps->hdmi_disable == 0) {
183 		if (straps->dc_pinstraps_audio & 0x2) {
184 			aud_support->hdmi_audio_on_dongle = true;
185 			aud_support->hdmi_audio_native = true;
186 		}
187 	}
188 
189 	switch (straps->audio_stream_number) {
190 	case 0: /* multi streams supported */
191 		break;
192 	case 1: /* multi streams not supported */
193 		*num_audio = 1;
194 		break;
195 	default:
196 		DC_ERR("DC: unexpected audio fuse!\n");
197 	}
198 }
199 
200 bool resource_construct(
201 	unsigned int num_virtual_links,
202 	struct dc  *dc,
203 	struct resource_pool *pool,
204 	const struct resource_create_funcs *create_funcs)
205 {
206 	struct dc_context *ctx = dc->ctx;
207 	const struct resource_caps *caps = pool->res_cap;
208 	int i;
209 	unsigned int num_audio = caps->num_audio;
210 	struct resource_straps straps = {0};
211 
212 	if (create_funcs->read_dce_straps)
213 		create_funcs->read_dce_straps(dc->ctx, &straps);
214 
215 	pool->audio_count = 0;
216 	if (create_funcs->create_audio) {
217 		/* find the total number of streams available via the
218 		 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
219 		 * registers (one for each pin) starting from pin 1
220 		 * up to the max number of audio pins.
221 		 * We stop on the first pin where
222 		 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
223 		 */
224 		update_num_audio(&straps, &num_audio, &pool->audio_support);
225 		for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
226 			struct audio *aud = create_funcs->create_audio(ctx, i);
227 
228 			if (aud == NULL) {
229 				DC_ERR("DC: failed to create audio!\n");
230 				return false;
231 			}
232 
233 			if (!aud->funcs->endpoint_valid(aud)) {
234 				aud->funcs->destroy(&aud);
235 				break;
236 			}
237 
238 			pool->audios[i] = aud;
239 			pool->audio_count++;
240 		}
241 	}
242 
243 	pool->stream_enc_count = 0;
244 	if (create_funcs->create_stream_encoder) {
245 		for (i = 0; i < caps->num_stream_encoder; i++) {
246 			pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
247 			if (pool->stream_enc[i] == NULL)
248 				DC_ERR("DC: failed to create stream_encoder!\n");
249 			pool->stream_enc_count++;
250 		}
251 	}
252 	dc->caps.dynamic_audio = false;
253 	if (pool->audio_count < pool->stream_enc_count) {
254 		dc->caps.dynamic_audio = true;
255 	}
256 	for (i = 0; i < num_virtual_links; i++) {
257 		pool->stream_enc[pool->stream_enc_count] =
258 			virtual_stream_encoder_create(
259 					ctx, ctx->dc_bios);
260 		if (pool->stream_enc[pool->stream_enc_count] == NULL) {
261 			DC_ERR("DC: failed to create stream_encoder!\n");
262 			return false;
263 		}
264 		pool->stream_enc_count++;
265 	}
266 
267 	dc->hwseq = create_funcs->create_hwseq(ctx);
268 
269 	return true;
270 }
271 
272 
273 void resource_unreference_clock_source(
274 		struct resource_context *res_ctx,
275 		const struct resource_pool *pool,
276 		struct clock_source *clock_source)
277 {
278 	int i;
279 
280 	for (i = 0; i < pool->clk_src_count; i++) {
281 		if (pool->clock_sources[i] != clock_source)
282 			continue;
283 
284 		res_ctx->clock_source_ref_count[i]--;
285 
286 		break;
287 	}
288 
289 	if (pool->dp_clock_source == clock_source)
290 		res_ctx->dp_clock_source_ref_count--;
291 }
292 
293 void resource_reference_clock_source(
294 		struct resource_context *res_ctx,
295 		const struct resource_pool *pool,
296 		struct clock_source *clock_source)
297 {
298 	int i;
299 	for (i = 0; i < pool->clk_src_count; i++) {
300 		if (pool->clock_sources[i] != clock_source)
301 			continue;
302 
303 		res_ctx->clock_source_ref_count[i]++;
304 		break;
305 	}
306 
307 	if (pool->dp_clock_source == clock_source)
308 		res_ctx->dp_clock_source_ref_count++;
309 }
310 
311 bool resource_are_streams_timing_synchronizable(
312 	struct dc_stream_state *stream1,
313 	struct dc_stream_state *stream2)
314 {
315 	if (stream1->timing.h_total != stream2->timing.h_total)
316 		return false;
317 
318 	if (stream1->timing.v_total != stream2->timing.v_total)
319 		return false;
320 
321 	if (stream1->timing.h_addressable
322 				!= stream2->timing.h_addressable)
323 		return false;
324 
325 	if (stream1->timing.v_addressable
326 				!= stream2->timing.v_addressable)
327 		return false;
328 
329 	if (stream1->timing.pix_clk_khz
330 				!= stream2->timing.pix_clk_khz)
331 		return false;
332 
333 	if (stream1->phy_pix_clk != stream2->phy_pix_clk
334 			&& (!dc_is_dp_signal(stream1->signal)
335 			|| !dc_is_dp_signal(stream2->signal)))
336 		return false;
337 
338 	return true;
339 }
340 
341 static bool is_sharable_clk_src(
342 	const struct pipe_ctx *pipe_with_clk_src,
343 	const struct pipe_ctx *pipe)
344 {
345 	if (pipe_with_clk_src->clock_source == NULL)
346 		return false;
347 
348 	if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
349 		return false;
350 
351 	if (dc_is_dp_signal(pipe_with_clk_src->stream->signal))
352 		return false;
353 
354 	if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
355 			&& dc_is_dvi_signal(pipe->stream->signal))
356 		return false;
357 
358 	if (dc_is_hdmi_signal(pipe->stream->signal)
359 			&& dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
360 		return false;
361 
362 	if (!resource_are_streams_timing_synchronizable(
363 			pipe_with_clk_src->stream, pipe->stream))
364 		return false;
365 
366 	return true;
367 }
368 
369 struct clock_source *resource_find_used_clk_src_for_sharing(
370 					struct resource_context *res_ctx,
371 					struct pipe_ctx *pipe_ctx)
372 {
373 	int i;
374 
375 	for (i = 0; i < MAX_PIPES; i++) {
376 		if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
377 			return res_ctx->pipe_ctx[i].clock_source;
378 	}
379 
380 	return NULL;
381 }
382 
383 static enum pixel_format convert_pixel_format_to_dalsurface(
384 		enum surface_pixel_format surface_pixel_format)
385 {
386 	enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
387 
388 	switch (surface_pixel_format) {
389 	case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
390 		dal_pixel_format = PIXEL_FORMAT_INDEX8;
391 		break;
392 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
393 		dal_pixel_format = PIXEL_FORMAT_RGB565;
394 		break;
395 	case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
396 		dal_pixel_format = PIXEL_FORMAT_RGB565;
397 		break;
398 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
399 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
400 		break;
401 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
402 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
403 		break;
404 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
405 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
406 		break;
407 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
408 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
409 		break;
410 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
411 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
412 		break;
413 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
414 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
415 		dal_pixel_format = PIXEL_FORMAT_FP16;
416 		break;
417 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
418 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
419 		dal_pixel_format = PIXEL_FORMAT_420BPP8;
420 		break;
421 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
422 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
423 		dal_pixel_format = PIXEL_FORMAT_420BPP10;
424 		break;
425 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
426 	default:
427 		dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
428 		break;
429 	}
430 	return dal_pixel_format;
431 }
432 
433 static void rect_swap_helper(struct rect *rect)
434 {
435 	swap(rect->height, rect->width);
436 	swap(rect->x, rect->y);
437 }
438 
439 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
440 {
441 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
442 	const struct dc_stream_state *stream = pipe_ctx->stream;
443 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
444 	struct rect surf_src = plane_state->src_rect;
445 	struct rect clip = { 0 };
446 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
447 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
448 	bool pri_split = pipe_ctx->bottom_pipe &&
449 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
450 	bool sec_split = pipe_ctx->top_pipe &&
451 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
452 
453 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
454 		stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
455 		pri_split = false;
456 		sec_split = false;
457 	}
458 
459 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
460 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
461 		rect_swap_helper(&surf_src);
462 
463 	/* The actual clip is an intersection between stream
464 	 * source and surface clip
465 	 */
466 	clip.x = stream->src.x > plane_state->clip_rect.x ?
467 			stream->src.x : plane_state->clip_rect.x;
468 
469 	clip.width = stream->src.x + stream->src.width <
470 			plane_state->clip_rect.x + plane_state->clip_rect.width ?
471 			stream->src.x + stream->src.width - clip.x :
472 			plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
473 
474 	clip.y = stream->src.y > plane_state->clip_rect.y ?
475 			stream->src.y : plane_state->clip_rect.y;
476 
477 	clip.height = stream->src.y + stream->src.height <
478 			plane_state->clip_rect.y + plane_state->clip_rect.height ?
479 			stream->src.y + stream->src.height - clip.y :
480 			plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
481 
482 	/* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
483 	 * num_pixels = clip.num_pix * scl_ratio
484 	 */
485 	data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
486 			surf_src.width / plane_state->dst_rect.width;
487 	data->viewport.width = clip.width *
488 			surf_src.width / plane_state->dst_rect.width;
489 
490 	data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
491 			surf_src.height / plane_state->dst_rect.height;
492 	data->viewport.height = clip.height *
493 			surf_src.height / plane_state->dst_rect.height;
494 
495 	/* Round down, compensate in init */
496 	data->viewport_c.x = data->viewport.x / vpc_div;
497 	data->viewport_c.y = data->viewport.y / vpc_div;
498 	data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
499 			dc_fixpt_half : dc_fixpt_zero;
500 	data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
501 			dc_fixpt_half : dc_fixpt_zero;
502 	/* Round up, assume original video size always even dimensions */
503 	data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
504 	data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
505 
506 	/* Handle hsplit */
507 	if (sec_split) {
508 		data->viewport.x +=  data->viewport.width / 2;
509 		data->viewport_c.x +=  data->viewport_c.width / 2;
510 		/* Ceil offset pipe */
511 		data->viewport.width = (data->viewport.width + 1) / 2;
512 		data->viewport_c.width = (data->viewport_c.width + 1) / 2;
513 	} else if (pri_split) {
514 		data->viewport.width /= 2;
515 		data->viewport_c.width /= 2;
516 	}
517 
518 	if (plane_state->rotation == ROTATION_ANGLE_90 ||
519 			plane_state->rotation == ROTATION_ANGLE_270) {
520 		rect_swap_helper(&data->viewport_c);
521 		rect_swap_helper(&data->viewport);
522 	}
523 }
524 
525 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
526 {
527 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
528 	const struct dc_stream_state *stream = pipe_ctx->stream;
529 	struct rect surf_src = plane_state->src_rect;
530 	struct rect surf_clip = plane_state->clip_rect;
531 	bool pri_split = pipe_ctx->bottom_pipe &&
532 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
533 	bool sec_split = pipe_ctx->top_pipe &&
534 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
535 	bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
536 
537 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
538 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
539 		rect_swap_helper(&surf_src);
540 
541 	pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
542 	if (stream->src.x < surf_clip.x)
543 		pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
544 			- stream->src.x) * stream->dst.width
545 						/ stream->src.width;
546 
547 	pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
548 			stream->dst.width / stream->src.width;
549 	if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
550 			stream->dst.x + stream->dst.width)
551 		pipe_ctx->plane_res.scl_data.recout.width =
552 			stream->dst.x + stream->dst.width
553 						- pipe_ctx->plane_res.scl_data.recout.x;
554 
555 	pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
556 	if (stream->src.y < surf_clip.y)
557 		pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
558 			- stream->src.y) * stream->dst.height
559 						/ stream->src.height;
560 
561 	pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
562 			stream->dst.height / stream->src.height;
563 	if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
564 			stream->dst.y + stream->dst.height)
565 		pipe_ctx->plane_res.scl_data.recout.height =
566 			stream->dst.y + stream->dst.height
567 						- pipe_ctx->plane_res.scl_data.recout.y;
568 
569 	/* Handle h & vsplit */
570 	if (sec_split && top_bottom_split) {
571 		pipe_ctx->plane_res.scl_data.recout.y +=
572 				pipe_ctx->plane_res.scl_data.recout.height / 2;
573 		/* Floor primary pipe, ceil 2ndary pipe */
574 		pipe_ctx->plane_res.scl_data.recout.height =
575 				(pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
576 	} else if (pri_split && top_bottom_split)
577 		pipe_ctx->plane_res.scl_data.recout.height /= 2;
578 	else if (pri_split || sec_split) {
579 		/* HMirror XOR Secondary_pipe XOR Rotation_180 */
580 		bool right_view = (sec_split != plane_state->horizontal_mirror) !=
581 					(plane_state->rotation == ROTATION_ANGLE_180);
582 
583 		if (plane_state->rotation == ROTATION_ANGLE_90
584 				|| plane_state->rotation == ROTATION_ANGLE_270)
585 			/* Secondary_pipe XOR Rotation_270 */
586 			right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
587 
588 		if (right_view) {
589 			pipe_ctx->plane_res.scl_data.recout.x +=
590 					pipe_ctx->plane_res.scl_data.recout.width / 2;
591 			/* Ceil offset pipe */
592 			pipe_ctx->plane_res.scl_data.recout.width =
593 					(pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
594 		} else {
595 			pipe_ctx->plane_res.scl_data.recout.width /= 2;
596 		}
597 	}
598 	/* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
599 	 *			* 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
600 	 *			ratio)
601 	 */
602 	recout_full->x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
603 					* stream->dst.width / stream->src.width -
604 			surf_src.x * plane_state->dst_rect.width / surf_src.width
605 					* stream->dst.width / stream->src.width;
606 	recout_full->y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
607 					* stream->dst.height / stream->src.height -
608 			surf_src.y * plane_state->dst_rect.height / surf_src.height
609 					* stream->dst.height / stream->src.height;
610 
611 	recout_full->width = plane_state->dst_rect.width
612 					* stream->dst.width / stream->src.width;
613 	recout_full->height = plane_state->dst_rect.height
614 					* stream->dst.height / stream->src.height;
615 }
616 
617 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
618 {
619 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
620 	const struct dc_stream_state *stream = pipe_ctx->stream;
621 	struct rect surf_src = plane_state->src_rect;
622 	const int in_w = stream->src.width;
623 	const int in_h = stream->src.height;
624 	const int out_w = stream->dst.width;
625 	const int out_h = stream->dst.height;
626 
627 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
628 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
629 		rect_swap_helper(&surf_src);
630 
631 	pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_from_fraction(
632 					surf_src.width,
633 					plane_state->dst_rect.width);
634 	pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_from_fraction(
635 					surf_src.height,
636 					plane_state->dst_rect.height);
637 
638 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
639 		pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
640 	else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
641 		pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
642 
643 	pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
644 		pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
645 	pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
646 		pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
647 
648 	pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
649 	pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
650 
651 	if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
652 			|| pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
653 		pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
654 		pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
655 	}
656 	pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_truncate(
657 			pipe_ctx->plane_res.scl_data.ratios.horz, 19);
658 	pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_truncate(
659 			pipe_ctx->plane_res.scl_data.ratios.vert, 19);
660 	pipe_ctx->plane_res.scl_data.ratios.horz_c = dc_fixpt_truncate(
661 			pipe_ctx->plane_res.scl_data.ratios.horz_c, 19);
662 	pipe_ctx->plane_res.scl_data.ratios.vert_c = dc_fixpt_truncate(
663 			pipe_ctx->plane_res.scl_data.ratios.vert_c, 19);
664 }
665 
666 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
667 {
668 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
669 	struct rect src = pipe_ctx->plane_state->src_rect;
670 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
671 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
672 	bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
673 
674 	/*
675 	 * Need to calculate the scan direction for viewport to make adjustments
676 	 */
677 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
678 		flip_vert_scan_dir = true;
679 		flip_horz_scan_dir = true;
680 	} else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
681 		flip_vert_scan_dir = true;
682 	else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
683 		flip_horz_scan_dir = true;
684 
685 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
686 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
687 		rect_swap_helper(&src);
688 		rect_swap_helper(&data->viewport_c);
689 		rect_swap_helper(&data->viewport);
690 	} else if (pipe_ctx->plane_state->horizontal_mirror)
691 			flip_horz_scan_dir = !flip_horz_scan_dir;
692 
693 	/*
694 	 * Init calculated according to formula:
695 	 * 	init = (scaling_ratio + number_of_taps + 1) / 2
696 	 * 	init_bot = init + scaling_ratio
697 	 * 	init_c = init + truncated_vp_c_offset(from calculate viewport)
698 	 */
699 	data->inits.h = dc_fixpt_truncate(dc_fixpt_div_int(
700 			dc_fixpt_add_int(data->ratios.horz, data->taps.h_taps + 1), 2), 19);
701 
702 	data->inits.h_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.h_c, dc_fixpt_div_int(
703 			dc_fixpt_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2)), 19);
704 
705 	data->inits.v = dc_fixpt_truncate(dc_fixpt_div_int(
706 			dc_fixpt_add_int(data->ratios.vert, data->taps.v_taps + 1), 2), 19);
707 
708 	data->inits.v_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.v_c, dc_fixpt_div_int(
709 			dc_fixpt_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2)), 19);
710 
711 	if (!flip_horz_scan_dir) {
712 		/* Adjust for viewport end clip-off */
713 		if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
714 			int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
715 			int int_part = dc_fixpt_floor(
716 					dc_fixpt_sub(data->inits.h, data->ratios.horz));
717 
718 			int_part = int_part > 0 ? int_part : 0;
719 			data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
720 		}
721 		if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
722 			int vp_clip = (src.x + src.width) / vpc_div -
723 					data->viewport_c.width - data->viewport_c.x;
724 			int int_part = dc_fixpt_floor(
725 					dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
726 
727 			int_part = int_part > 0 ? int_part : 0;
728 			data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
729 		}
730 
731 		/* Adjust for non-0 viewport offset */
732 		if (data->viewport.x) {
733 			int int_part;
734 
735 			data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
736 					data->ratios.horz, data->recout.x - recout_full->x));
737 			int_part = dc_fixpt_floor(data->inits.h) - data->viewport.x;
738 			if (int_part < data->taps.h_taps) {
739 				int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
740 							(data->taps.h_taps - int_part) : data->viewport.x;
741 				data->viewport.x -= int_adj;
742 				data->viewport.width += int_adj;
743 				int_part += int_adj;
744 			} else if (int_part > data->taps.h_taps) {
745 				data->viewport.x += int_part - data->taps.h_taps;
746 				data->viewport.width -= int_part - data->taps.h_taps;
747 				int_part = data->taps.h_taps;
748 			}
749 			data->inits.h.value &= 0xffffffff;
750 			data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
751 		}
752 
753 		if (data->viewport_c.x) {
754 			int int_part;
755 
756 			data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
757 					data->ratios.horz_c, data->recout.x - recout_full->x));
758 			int_part = dc_fixpt_floor(data->inits.h_c) - data->viewport_c.x;
759 			if (int_part < data->taps.h_taps_c) {
760 				int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
761 						(data->taps.h_taps_c - int_part) : data->viewport_c.x;
762 				data->viewport_c.x -= int_adj;
763 				data->viewport_c.width += int_adj;
764 				int_part += int_adj;
765 			} else if (int_part > data->taps.h_taps_c) {
766 				data->viewport_c.x += int_part - data->taps.h_taps_c;
767 				data->viewport_c.width -= int_part - data->taps.h_taps_c;
768 				int_part = data->taps.h_taps_c;
769 			}
770 			data->inits.h_c.value &= 0xffffffff;
771 			data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
772 		}
773 	} else {
774 		/* Adjust for non-0 viewport offset */
775 		if (data->viewport.x) {
776 			int int_part = dc_fixpt_floor(
777 					dc_fixpt_sub(data->inits.h, data->ratios.horz));
778 
779 			int_part = int_part > 0 ? int_part : 0;
780 			data->viewport.width += int_part < data->viewport.x ? int_part : data->viewport.x;
781 			data->viewport.x -= int_part < data->viewport.x ? int_part : data->viewport.x;
782 		}
783 		if (data->viewport_c.x) {
784 			int int_part = dc_fixpt_floor(
785 					dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
786 
787 			int_part = int_part > 0 ? int_part : 0;
788 			data->viewport_c.width += int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
789 			data->viewport_c.x -= int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
790 		}
791 
792 		/* Adjust for viewport end clip-off */
793 		if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
794 			int int_part;
795 			int end_offset = src.x + src.width
796 					- data->viewport.x - data->viewport.width;
797 
798 			/*
799 			 * this is init if vp had no offset, keep in mind this is from the
800 			 * right side of vp due to scan direction
801 			 */
802 			data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
803 					data->ratios.horz, data->recout.x - recout_full->x));
804 			/*
805 			 * this is the difference between first pixel of viewport available to read
806 			 * and init position, takning into account scan direction
807 			 */
808 			int_part = dc_fixpt_floor(data->inits.h) - end_offset;
809 			if (int_part < data->taps.h_taps) {
810 				int int_adj = end_offset >= (data->taps.h_taps - int_part) ?
811 							(data->taps.h_taps - int_part) : end_offset;
812 				data->viewport.width += int_adj;
813 				int_part += int_adj;
814 			} else if (int_part > data->taps.h_taps) {
815 				data->viewport.width += int_part - data->taps.h_taps;
816 				int_part = data->taps.h_taps;
817 			}
818 			data->inits.h.value &= 0xffffffff;
819 			data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
820 		}
821 
822 		if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
823 			int int_part;
824 			int end_offset = (src.x + src.width) / vpc_div
825 					- data->viewport_c.x - data->viewport_c.width;
826 
827 			/*
828 			 * this is init if vp had no offset, keep in mind this is from the
829 			 * right side of vp due to scan direction
830 			 */
831 			data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
832 					data->ratios.horz_c, data->recout.x - recout_full->x));
833 			/*
834 			 * this is the difference between first pixel of viewport available to read
835 			 * and init position, takning into account scan direction
836 			 */
837 			int_part = dc_fixpt_floor(data->inits.h_c) - end_offset;
838 			if (int_part < data->taps.h_taps_c) {
839 				int int_adj = end_offset >= (data->taps.h_taps_c - int_part) ?
840 							(data->taps.h_taps_c - int_part) : end_offset;
841 				data->viewport_c.width += int_adj;
842 				int_part += int_adj;
843 			} else if (int_part > data->taps.h_taps_c) {
844 				data->viewport_c.width += int_part - data->taps.h_taps_c;
845 				int_part = data->taps.h_taps_c;
846 			}
847 			data->inits.h_c.value &= 0xffffffff;
848 			data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
849 		}
850 
851 	}
852 	if (!flip_vert_scan_dir) {
853 		/* Adjust for viewport end clip-off */
854 		if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
855 			int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
856 			int int_part = dc_fixpt_floor(
857 					dc_fixpt_sub(data->inits.v, data->ratios.vert));
858 
859 			int_part = int_part > 0 ? int_part : 0;
860 			data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
861 		}
862 		if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
863 			int vp_clip = (src.y + src.height) / vpc_div -
864 					data->viewport_c.height - data->viewport_c.y;
865 			int int_part = dc_fixpt_floor(
866 					dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
867 
868 			int_part = int_part > 0 ? int_part : 0;
869 			data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
870 		}
871 
872 		/* Adjust for non-0 viewport offset */
873 		if (data->viewport.y) {
874 			int int_part;
875 
876 			data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
877 					data->ratios.vert, data->recout.y - recout_full->y));
878 			int_part = dc_fixpt_floor(data->inits.v) - data->viewport.y;
879 			if (int_part < data->taps.v_taps) {
880 				int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
881 							(data->taps.v_taps - int_part) : data->viewport.y;
882 				data->viewport.y -= int_adj;
883 				data->viewport.height += int_adj;
884 				int_part += int_adj;
885 			} else if (int_part > data->taps.v_taps) {
886 				data->viewport.y += int_part - data->taps.v_taps;
887 				data->viewport.height -= int_part - data->taps.v_taps;
888 				int_part = data->taps.v_taps;
889 			}
890 			data->inits.v.value &= 0xffffffff;
891 			data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
892 		}
893 
894 		if (data->viewport_c.y) {
895 			int int_part;
896 
897 			data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
898 					data->ratios.vert_c, data->recout.y - recout_full->y));
899 			int_part = dc_fixpt_floor(data->inits.v_c) - data->viewport_c.y;
900 			if (int_part < data->taps.v_taps_c) {
901 				int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
902 						(data->taps.v_taps_c - int_part) : data->viewport_c.y;
903 				data->viewport_c.y -= int_adj;
904 				data->viewport_c.height += int_adj;
905 				int_part += int_adj;
906 			} else if (int_part > data->taps.v_taps_c) {
907 				data->viewport_c.y += int_part - data->taps.v_taps_c;
908 				data->viewport_c.height -= int_part - data->taps.v_taps_c;
909 				int_part = data->taps.v_taps_c;
910 			}
911 			data->inits.v_c.value &= 0xffffffff;
912 			data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
913 		}
914 	} else {
915 		/* Adjust for non-0 viewport offset */
916 		if (data->viewport.y) {
917 			int int_part = dc_fixpt_floor(
918 					dc_fixpt_sub(data->inits.v, data->ratios.vert));
919 
920 			int_part = int_part > 0 ? int_part : 0;
921 			data->viewport.height += int_part < data->viewport.y ? int_part : data->viewport.y;
922 			data->viewport.y -= int_part < data->viewport.y ? int_part : data->viewport.y;
923 		}
924 		if (data->viewport_c.y) {
925 			int int_part = dc_fixpt_floor(
926 					dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
927 
928 			int_part = int_part > 0 ? int_part : 0;
929 			data->viewport_c.height += int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
930 			data->viewport_c.y -= int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
931 		}
932 
933 		/* Adjust for viewport end clip-off */
934 		if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
935 			int int_part;
936 			int end_offset = src.y + src.height
937 					- data->viewport.y - data->viewport.height;
938 
939 			/*
940 			 * this is init if vp had no offset, keep in mind this is from the
941 			 * right side of vp due to scan direction
942 			 */
943 			data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
944 					data->ratios.vert, data->recout.y - recout_full->y));
945 			/*
946 			 * this is the difference between first pixel of viewport available to read
947 			 * and init position, taking into account scan direction
948 			 */
949 			int_part = dc_fixpt_floor(data->inits.v) - end_offset;
950 			if (int_part < data->taps.v_taps) {
951 				int int_adj = end_offset >= (data->taps.v_taps - int_part) ?
952 							(data->taps.v_taps - int_part) : end_offset;
953 				data->viewport.height += int_adj;
954 				int_part += int_adj;
955 			} else if (int_part > data->taps.v_taps) {
956 				data->viewport.height += int_part - data->taps.v_taps;
957 				int_part = data->taps.v_taps;
958 			}
959 			data->inits.v.value &= 0xffffffff;
960 			data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
961 		}
962 
963 		if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
964 			int int_part;
965 			int end_offset = (src.y + src.height) / vpc_div
966 					- data->viewport_c.y - data->viewport_c.height;
967 
968 			/*
969 			 * this is init if vp had no offset, keep in mind this is from the
970 			 * right side of vp due to scan direction
971 			 */
972 			data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
973 					data->ratios.vert_c, data->recout.y - recout_full->y));
974 			/*
975 			 * this is the difference between first pixel of viewport available to read
976 			 * and init position, taking into account scan direction
977 			 */
978 			int_part = dc_fixpt_floor(data->inits.v_c) - end_offset;
979 			if (int_part < data->taps.v_taps_c) {
980 				int int_adj = end_offset >= (data->taps.v_taps_c - int_part) ?
981 							(data->taps.v_taps_c - int_part) : end_offset;
982 				data->viewport_c.height += int_adj;
983 				int_part += int_adj;
984 			} else if (int_part > data->taps.v_taps_c) {
985 				data->viewport_c.height += int_part - data->taps.v_taps_c;
986 				int_part = data->taps.v_taps_c;
987 			}
988 			data->inits.v_c.value &= 0xffffffff;
989 			data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
990 		}
991 	}
992 
993 	/* Interlaced inits based on final vert inits */
994 	data->inits.v_bot = dc_fixpt_add(data->inits.v, data->ratios.vert);
995 	data->inits.v_c_bot = dc_fixpt_add(data->inits.v_c, data->ratios.vert_c);
996 
997 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
998 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
999 		rect_swap_helper(&data->viewport_c);
1000 		rect_swap_helper(&data->viewport);
1001 	}
1002 }
1003 
1004 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
1005 {
1006 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1007 	struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
1008 	struct rect recout_full = { 0 };
1009 	bool res = false;
1010 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
1011 	/* Important: scaling ratio calculation requires pixel format,
1012 	 * lb depth calculation requires recout and taps require scaling ratios.
1013 	 * Inits require viewport, taps, ratios and recout of split pipe
1014 	 */
1015 	pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
1016 			pipe_ctx->plane_state->format);
1017 
1018 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1019 		pipe_ctx->stream->dst.height *= 2;
1020 
1021 	calculate_scaling_ratios(pipe_ctx);
1022 
1023 	calculate_viewport(pipe_ctx);
1024 
1025 	if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
1026 		return false;
1027 
1028 	calculate_recout(pipe_ctx, &recout_full);
1029 
1030 	/**
1031 	 * Setting line buffer pixel depth to 24bpp yields banding
1032 	 * on certain displays, such as the Sharp 4k
1033 	 */
1034 	pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
1035 
1036 	pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
1037 	pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
1038 
1039 	pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
1040 	pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
1041 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1042 		pipe_ctx->plane_res.scl_data.v_active *= 2;
1043 
1044 
1045 	/* Taps calculations */
1046 	if (pipe_ctx->plane_res.xfm != NULL)
1047 		res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1048 				pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1049 
1050 	if (pipe_ctx->plane_res.dpp != NULL)
1051 		res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1052 				pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1053 	if (!res) {
1054 		/* Try 24 bpp linebuffer */
1055 		pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
1056 
1057 		if (pipe_ctx->plane_res.xfm != NULL)
1058 			res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1059 					pipe_ctx->plane_res.xfm,
1060 					&pipe_ctx->plane_res.scl_data,
1061 					&plane_state->scaling_quality);
1062 
1063 		if (pipe_ctx->plane_res.dpp != NULL)
1064 			res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1065 					pipe_ctx->plane_res.dpp,
1066 					&pipe_ctx->plane_res.scl_data,
1067 					&plane_state->scaling_quality);
1068 	}
1069 
1070 	if (res)
1071 		/* May need to re-check lb size after this in some obscure scenario */
1072 		calculate_inits_and_adj_vp(pipe_ctx, &recout_full);
1073 
1074 	DC_LOG_SCALER(
1075 				"%s: Viewport:\nheight:%d width:%d x:%d "
1076 				"y:%d\n dst_rect:\nheight:%d width:%d x:%d "
1077 				"y:%d\n",
1078 				__func__,
1079 				pipe_ctx->plane_res.scl_data.viewport.height,
1080 				pipe_ctx->plane_res.scl_data.viewport.width,
1081 				pipe_ctx->plane_res.scl_data.viewport.x,
1082 				pipe_ctx->plane_res.scl_data.viewport.y,
1083 				plane_state->dst_rect.height,
1084 				plane_state->dst_rect.width,
1085 				plane_state->dst_rect.x,
1086 				plane_state->dst_rect.y);
1087 
1088 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1089 		pipe_ctx->stream->dst.height /= 2;
1090 
1091 	return res;
1092 }
1093 
1094 
1095 enum dc_status resource_build_scaling_params_for_context(
1096 	const struct dc  *dc,
1097 	struct dc_state *context)
1098 {
1099 	int i;
1100 
1101 	for (i = 0; i < MAX_PIPES; i++) {
1102 		if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
1103 				context->res_ctx.pipe_ctx[i].stream != NULL)
1104 			if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
1105 				return DC_FAIL_SCALING;
1106 	}
1107 
1108 	return DC_OK;
1109 }
1110 
1111 struct pipe_ctx *find_idle_secondary_pipe(
1112 		struct resource_context *res_ctx,
1113 		const struct resource_pool *pool)
1114 {
1115 	int i;
1116 	struct pipe_ctx *secondary_pipe = NULL;
1117 
1118 	/*
1119 	 * search backwards for the second pipe to keep pipe
1120 	 * assignment more consistent
1121 	 */
1122 
1123 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1124 		if (res_ctx->pipe_ctx[i].stream == NULL) {
1125 			secondary_pipe = &res_ctx->pipe_ctx[i];
1126 			secondary_pipe->pipe_idx = i;
1127 			break;
1128 		}
1129 	}
1130 
1131 
1132 	return secondary_pipe;
1133 }
1134 
1135 struct pipe_ctx *resource_get_head_pipe_for_stream(
1136 		struct resource_context *res_ctx,
1137 		struct dc_stream_state *stream)
1138 {
1139 	int i;
1140 	for (i = 0; i < MAX_PIPES; i++) {
1141 		if (res_ctx->pipe_ctx[i].stream == stream &&
1142 				!res_ctx->pipe_ctx[i].top_pipe) {
1143 			return &res_ctx->pipe_ctx[i];
1144 			break;
1145 		}
1146 	}
1147 	return NULL;
1148 }
1149 
1150 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
1151 		struct resource_context *res_ctx,
1152 		struct dc_stream_state *stream)
1153 {
1154 	struct pipe_ctx *head_pipe, *tail_pipe;
1155 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1156 
1157 	if (!head_pipe)
1158 		return NULL;
1159 
1160 	tail_pipe = head_pipe->bottom_pipe;
1161 
1162 	while (tail_pipe) {
1163 		head_pipe = tail_pipe;
1164 		tail_pipe = tail_pipe->bottom_pipe;
1165 	}
1166 
1167 	return head_pipe;
1168 }
1169 
1170 /*
1171  * A free_pipe for a stream is defined here as a pipe
1172  * that has no surface attached yet
1173  */
1174 static struct pipe_ctx *acquire_free_pipe_for_stream(
1175 		struct dc_state *context,
1176 		const struct resource_pool *pool,
1177 		struct dc_stream_state *stream)
1178 {
1179 	int i;
1180 	struct resource_context *res_ctx = &context->res_ctx;
1181 
1182 	struct pipe_ctx *head_pipe = NULL;
1183 
1184 	/* Find head pipe, which has the back end set up*/
1185 
1186 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1187 
1188 	if (!head_pipe) {
1189 		ASSERT(0);
1190 		return NULL;
1191 	}
1192 
1193 	if (!head_pipe->plane_state)
1194 		return head_pipe;
1195 
1196 	/* Re-use pipe already acquired for this stream if available*/
1197 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1198 		if (res_ctx->pipe_ctx[i].stream == stream &&
1199 				!res_ctx->pipe_ctx[i].plane_state) {
1200 			return &res_ctx->pipe_ctx[i];
1201 		}
1202 	}
1203 
1204 	/*
1205 	 * At this point we have no re-useable pipe for this stream and we need
1206 	 * to acquire an idle one to satisfy the request
1207 	 */
1208 
1209 	if (!pool->funcs->acquire_idle_pipe_for_layer)
1210 		return NULL;
1211 
1212 	return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1213 
1214 }
1215 
1216 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1217 static int acquire_first_split_pipe(
1218 		struct resource_context *res_ctx,
1219 		const struct resource_pool *pool,
1220 		struct dc_stream_state *stream)
1221 {
1222 	int i;
1223 
1224 	for (i = 0; i < pool->pipe_count; i++) {
1225 		struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1226 
1227 		if (pipe_ctx->top_pipe &&
1228 				pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1229 			pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1230 			if (pipe_ctx->bottom_pipe)
1231 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1232 
1233 			memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1234 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1235 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1236 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1237 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1238 			pipe_ctx->stream_res.opp = pool->opps[i];
1239 			pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1240 			pipe_ctx->pipe_idx = i;
1241 
1242 			pipe_ctx->stream = stream;
1243 			return i;
1244 		}
1245 	}
1246 	return -1;
1247 }
1248 #endif
1249 
1250 bool dc_add_plane_to_context(
1251 		const struct dc *dc,
1252 		struct dc_stream_state *stream,
1253 		struct dc_plane_state *plane_state,
1254 		struct dc_state *context)
1255 {
1256 	int i;
1257 	struct resource_pool *pool = dc->res_pool;
1258 	struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1259 	struct dc_stream_status *stream_status = NULL;
1260 
1261 	for (i = 0; i < context->stream_count; i++)
1262 		if (context->streams[i] == stream) {
1263 			stream_status = &context->stream_status[i];
1264 			break;
1265 		}
1266 	if (stream_status == NULL) {
1267 		dm_error("Existing stream not found; failed to attach surface!\n");
1268 		return false;
1269 	}
1270 
1271 
1272 	if (stream_status->plane_count == MAX_SURFACE_NUM) {
1273 		dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1274 				plane_state, MAX_SURFACE_NUM);
1275 		return false;
1276 	}
1277 
1278 	head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1279 
1280 	if (!head_pipe) {
1281 		dm_error("Head pipe not found for stream_state %p !\n", stream);
1282 		return false;
1283 	}
1284 
1285 	free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1286 
1287 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1288 	if (!free_pipe) {
1289 		int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1290 		if (pipe_idx >= 0)
1291 			free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1292 	}
1293 #endif
1294 	if (!free_pipe)
1295 		return false;
1296 
1297 	/* retain new surfaces */
1298 	dc_plane_state_retain(plane_state);
1299 	free_pipe->plane_state = plane_state;
1300 
1301 	if (head_pipe != free_pipe) {
1302 
1303 		tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1304 		ASSERT(tail_pipe);
1305 
1306 		free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1307 		free_pipe->stream_res.abm = tail_pipe->stream_res.abm;
1308 		free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1309 		free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1310 		free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1311 		free_pipe->clock_source = tail_pipe->clock_source;
1312 		free_pipe->top_pipe = tail_pipe;
1313 		tail_pipe->bottom_pipe = free_pipe;
1314 	}
1315 
1316 	/* assign new surfaces*/
1317 	stream_status->plane_states[stream_status->plane_count] = plane_state;
1318 
1319 	stream_status->plane_count++;
1320 
1321 	return true;
1322 }
1323 
1324 bool dc_remove_plane_from_context(
1325 		const struct dc *dc,
1326 		struct dc_stream_state *stream,
1327 		struct dc_plane_state *plane_state,
1328 		struct dc_state *context)
1329 {
1330 	int i;
1331 	struct dc_stream_status *stream_status = NULL;
1332 	struct resource_pool *pool = dc->res_pool;
1333 
1334 	for (i = 0; i < context->stream_count; i++)
1335 		if (context->streams[i] == stream) {
1336 			stream_status = &context->stream_status[i];
1337 			break;
1338 		}
1339 
1340 	if (stream_status == NULL) {
1341 		dm_error("Existing stream not found; failed to remove plane.\n");
1342 		return false;
1343 	}
1344 
1345 	/* release pipe for plane*/
1346 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1347 		struct pipe_ctx *pipe_ctx;
1348 
1349 		if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1350 			pipe_ctx = &context->res_ctx.pipe_ctx[i];
1351 
1352 			if (pipe_ctx->top_pipe)
1353 				pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1354 
1355 			/* Second condition is to avoid setting NULL to top pipe
1356 			 * of tail pipe making it look like head pipe in subsequent
1357 			 * deletes
1358 			 */
1359 			if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1360 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1361 
1362 			/*
1363 			 * For head pipe detach surfaces from pipe for tail
1364 			 * pipe just zero it out
1365 			 */
1366 			if (!pipe_ctx->top_pipe) {
1367 				pipe_ctx->plane_state = NULL;
1368 				pipe_ctx->bottom_pipe = NULL;
1369 			} else  {
1370 				memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1371 			}
1372 		}
1373 	}
1374 
1375 
1376 	for (i = 0; i < stream_status->plane_count; i++) {
1377 		if (stream_status->plane_states[i] == plane_state) {
1378 
1379 			dc_plane_state_release(stream_status->plane_states[i]);
1380 			break;
1381 		}
1382 	}
1383 
1384 	if (i == stream_status->plane_count) {
1385 		dm_error("Existing plane_state not found; failed to detach it!\n");
1386 		return false;
1387 	}
1388 
1389 	stream_status->plane_count--;
1390 
1391 	/* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1392 	for (; i < stream_status->plane_count; i++)
1393 		stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1394 
1395 	stream_status->plane_states[stream_status->plane_count] = NULL;
1396 
1397 	return true;
1398 }
1399 
1400 bool dc_rem_all_planes_for_stream(
1401 		const struct dc *dc,
1402 		struct dc_stream_state *stream,
1403 		struct dc_state *context)
1404 {
1405 	int i, old_plane_count;
1406 	struct dc_stream_status *stream_status = NULL;
1407 	struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1408 
1409 	for (i = 0; i < context->stream_count; i++)
1410 			if (context->streams[i] == stream) {
1411 				stream_status = &context->stream_status[i];
1412 				break;
1413 			}
1414 
1415 	if (stream_status == NULL) {
1416 		dm_error("Existing stream %p not found!\n", stream);
1417 		return false;
1418 	}
1419 
1420 	old_plane_count = stream_status->plane_count;
1421 
1422 	for (i = 0; i < old_plane_count; i++)
1423 		del_planes[i] = stream_status->plane_states[i];
1424 
1425 	for (i = 0; i < old_plane_count; i++)
1426 		if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1427 			return false;
1428 
1429 	return true;
1430 }
1431 
1432 static bool add_all_planes_for_stream(
1433 		const struct dc *dc,
1434 		struct dc_stream_state *stream,
1435 		const struct dc_validation_set set[],
1436 		int set_count,
1437 		struct dc_state *context)
1438 {
1439 	int i, j;
1440 
1441 	for (i = 0; i < set_count; i++)
1442 		if (set[i].stream == stream)
1443 			break;
1444 
1445 	if (i == set_count) {
1446 		dm_error("Stream %p not found in set!\n", stream);
1447 		return false;
1448 	}
1449 
1450 	for (j = 0; j < set[i].plane_count; j++)
1451 		if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1452 			return false;
1453 
1454 	return true;
1455 }
1456 
1457 bool dc_add_all_planes_for_stream(
1458 		const struct dc *dc,
1459 		struct dc_stream_state *stream,
1460 		struct dc_plane_state * const *plane_states,
1461 		int plane_count,
1462 		struct dc_state *context)
1463 {
1464 	struct dc_validation_set set;
1465 	int i;
1466 
1467 	set.stream = stream;
1468 	set.plane_count = plane_count;
1469 
1470 	for (i = 0; i < plane_count; i++)
1471 		set.plane_states[i] = plane_states[i];
1472 
1473 	return add_all_planes_for_stream(dc, stream, &set, 1, context);
1474 }
1475 
1476 
1477 static bool is_hdr_static_meta_changed(struct dc_stream_state *cur_stream,
1478 	struct dc_stream_state *new_stream)
1479 {
1480 	if (cur_stream == NULL)
1481 		return true;
1482 
1483 	if (memcmp(&cur_stream->hdr_static_metadata,
1484 			&new_stream->hdr_static_metadata,
1485 			sizeof(struct dc_info_packet)) != 0)
1486 		return true;
1487 
1488 	return false;
1489 }
1490 
1491 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1492 		struct dc_stream_state *new_stream)
1493 {
1494 	if (cur_stream == NULL)
1495 		return true;
1496 
1497 	/* If sink pointer changed, it means this is a hotplug, we should do
1498 	 * full hw setting.
1499 	 */
1500 	if (cur_stream->sink != new_stream->sink)
1501 		return true;
1502 
1503 	/* If output color space is changed, need to reprogram info frames */
1504 	if (cur_stream->output_color_space != new_stream->output_color_space)
1505 		return true;
1506 
1507 	return memcmp(
1508 		&cur_stream->timing,
1509 		&new_stream->timing,
1510 		sizeof(struct dc_crtc_timing)) != 0;
1511 }
1512 
1513 static bool are_stream_backends_same(
1514 	struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1515 {
1516 	if (stream_a == stream_b)
1517 		return true;
1518 
1519 	if (stream_a == NULL || stream_b == NULL)
1520 		return false;
1521 
1522 	if (is_timing_changed(stream_a, stream_b))
1523 		return false;
1524 
1525 	if (is_hdr_static_meta_changed(stream_a, stream_b))
1526 		return false;
1527 
1528 	return true;
1529 }
1530 
1531 bool dc_is_stream_unchanged(
1532 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1533 {
1534 
1535 	if (!are_stream_backends_same(old_stream, stream))
1536 		return false;
1537 
1538 	return true;
1539 }
1540 
1541 bool dc_is_stream_scaling_unchanged(
1542 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1543 {
1544 	if (old_stream == stream)
1545 		return true;
1546 
1547 	if (old_stream == NULL || stream == NULL)
1548 		return false;
1549 
1550 	if (memcmp(&old_stream->src,
1551 			&stream->src,
1552 			sizeof(struct rect)) != 0)
1553 		return false;
1554 
1555 	if (memcmp(&old_stream->dst,
1556 			&stream->dst,
1557 			sizeof(struct rect)) != 0)
1558 		return false;
1559 
1560 	return true;
1561 }
1562 
1563 static void update_stream_engine_usage(
1564 		struct resource_context *res_ctx,
1565 		const struct resource_pool *pool,
1566 		struct stream_encoder *stream_enc,
1567 		bool acquired)
1568 {
1569 	int i;
1570 
1571 	for (i = 0; i < pool->stream_enc_count; i++) {
1572 		if (pool->stream_enc[i] == stream_enc)
1573 			res_ctx->is_stream_enc_acquired[i] = acquired;
1574 	}
1575 }
1576 
1577 /* TODO: release audio object */
1578 void update_audio_usage(
1579 		struct resource_context *res_ctx,
1580 		const struct resource_pool *pool,
1581 		struct audio *audio,
1582 		bool acquired)
1583 {
1584 	int i;
1585 	for (i = 0; i < pool->audio_count; i++) {
1586 		if (pool->audios[i] == audio)
1587 			res_ctx->is_audio_acquired[i] = acquired;
1588 	}
1589 }
1590 
1591 static int acquire_first_free_pipe(
1592 		struct resource_context *res_ctx,
1593 		const struct resource_pool *pool,
1594 		struct dc_stream_state *stream)
1595 {
1596 	int i;
1597 
1598 	for (i = 0; i < pool->pipe_count; i++) {
1599 		if (!res_ctx->pipe_ctx[i].stream) {
1600 			struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1601 
1602 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1603 			pipe_ctx->plane_res.mi = pool->mis[i];
1604 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1605 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1606 			pipe_ctx->plane_res.xfm = pool->transforms[i];
1607 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1608 			pipe_ctx->stream_res.opp = pool->opps[i];
1609 			if (pool->dpps[i])
1610 				pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1611 			pipe_ctx->pipe_idx = i;
1612 
1613 
1614 			pipe_ctx->stream = stream;
1615 			return i;
1616 		}
1617 	}
1618 	return -1;
1619 }
1620 
1621 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1622 		struct resource_context *res_ctx,
1623 		const struct resource_pool *pool,
1624 		struct dc_stream_state *stream)
1625 {
1626 	int i;
1627 	int j = -1;
1628 	struct dc_link *link = stream->sink->link;
1629 
1630 	for (i = 0; i < pool->stream_enc_count; i++) {
1631 		if (!res_ctx->is_stream_enc_acquired[i] &&
1632 				pool->stream_enc[i]) {
1633 			/* Store first available for MST second display
1634 			 * in daisy chain use case */
1635 			j = i;
1636 			if (pool->stream_enc[i]->id ==
1637 					link->link_enc->preferred_engine)
1638 				return pool->stream_enc[i];
1639 		}
1640 	}
1641 
1642 	/*
1643 	 * below can happen in cases when stream encoder is acquired:
1644 	 * 1) for second MST display in chain, so preferred engine already
1645 	 * acquired;
1646 	 * 2) for another link, which preferred engine already acquired by any
1647 	 * MST configuration.
1648 	 *
1649 	 * If signal is of DP type and preferred engine not found, return last available
1650 	 *
1651 	 * TODO - This is just a patch up and a generic solution is
1652 	 * required for non DP connectors.
1653 	 */
1654 
1655 	if (j >= 0 && dc_is_dp_signal(stream->signal))
1656 		return pool->stream_enc[j];
1657 
1658 	return NULL;
1659 }
1660 
1661 static struct audio *find_first_free_audio(
1662 		struct resource_context *res_ctx,
1663 		const struct resource_pool *pool,
1664 		enum engine_id id)
1665 {
1666 	int i;
1667 	for (i = 0; i < pool->audio_count; i++) {
1668 		if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1669 			/*we have enough audio endpoint, find the matching inst*/
1670 			if (id != i)
1671 				continue;
1672 
1673 			return pool->audios[i];
1674 		}
1675 	}
1676 	/*not found the matching one, first come first serve*/
1677 	for (i = 0; i < pool->audio_count; i++) {
1678 		if (res_ctx->is_audio_acquired[i] == false) {
1679 			return pool->audios[i];
1680 		}
1681 	}
1682 	return 0;
1683 }
1684 
1685 bool resource_is_stream_unchanged(
1686 	struct dc_state *old_context, struct dc_stream_state *stream)
1687 {
1688 	int i;
1689 
1690 	for (i = 0; i < old_context->stream_count; i++) {
1691 		struct dc_stream_state *old_stream = old_context->streams[i];
1692 
1693 		if (are_stream_backends_same(old_stream, stream))
1694 				return true;
1695 	}
1696 
1697 	return false;
1698 }
1699 
1700 enum dc_status dc_add_stream_to_ctx(
1701 		struct dc *dc,
1702 		struct dc_state *new_ctx,
1703 		struct dc_stream_state *stream)
1704 {
1705 	struct dc_context *dc_ctx = dc->ctx;
1706 	enum dc_status res;
1707 
1708 	if (new_ctx->stream_count >= dc->res_pool->pipe_count) {
1709 		DC_ERROR("Max streams reached, can add stream %p !\n", stream);
1710 		return DC_ERROR_UNEXPECTED;
1711 	}
1712 
1713 	new_ctx->streams[new_ctx->stream_count] = stream;
1714 	dc_stream_retain(stream);
1715 	new_ctx->stream_count++;
1716 
1717 	res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1718 	if (res != DC_OK)
1719 		DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1720 
1721 	return res;
1722 }
1723 
1724 enum dc_status dc_remove_stream_from_ctx(
1725 			struct dc *dc,
1726 			struct dc_state *new_ctx,
1727 			struct dc_stream_state *stream)
1728 {
1729 	int i;
1730 	struct dc_context *dc_ctx = dc->ctx;
1731 	struct pipe_ctx *del_pipe = NULL;
1732 
1733 	/* Release primary pipe */
1734 	for (i = 0; i < MAX_PIPES; i++) {
1735 		if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1736 				!new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1737 			del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1738 
1739 			ASSERT(del_pipe->stream_res.stream_enc);
1740 			update_stream_engine_usage(
1741 					&new_ctx->res_ctx,
1742 						dc->res_pool,
1743 					del_pipe->stream_res.stream_enc,
1744 					false);
1745 
1746 			if (del_pipe->stream_res.audio)
1747 				update_audio_usage(
1748 					&new_ctx->res_ctx,
1749 					dc->res_pool,
1750 					del_pipe->stream_res.audio,
1751 					false);
1752 
1753 			resource_unreference_clock_source(&new_ctx->res_ctx,
1754 							  dc->res_pool,
1755 							  del_pipe->clock_source);
1756 
1757 			if (dc->res_pool->funcs->remove_stream_from_ctx)
1758 				dc->res_pool->funcs->remove_stream_from_ctx(dc, new_ctx, stream);
1759 
1760 			memset(del_pipe, 0, sizeof(*del_pipe));
1761 
1762 			break;
1763 		}
1764 	}
1765 
1766 	if (!del_pipe) {
1767 		DC_ERROR("Pipe not found for stream %p !\n", stream);
1768 		return DC_ERROR_UNEXPECTED;
1769 	}
1770 
1771 	for (i = 0; i < new_ctx->stream_count; i++)
1772 		if (new_ctx->streams[i] == stream)
1773 			break;
1774 
1775 	if (new_ctx->streams[i] != stream) {
1776 		DC_ERROR("Context doesn't have stream %p !\n", stream);
1777 		return DC_ERROR_UNEXPECTED;
1778 	}
1779 
1780 	dc_stream_release(new_ctx->streams[i]);
1781 	new_ctx->stream_count--;
1782 
1783 	/* Trim back arrays */
1784 	for (; i < new_ctx->stream_count; i++) {
1785 		new_ctx->streams[i] = new_ctx->streams[i + 1];
1786 		new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1787 	}
1788 
1789 	new_ctx->streams[new_ctx->stream_count] = NULL;
1790 	memset(
1791 			&new_ctx->stream_status[new_ctx->stream_count],
1792 			0,
1793 			sizeof(new_ctx->stream_status[0]));
1794 
1795 	return DC_OK;
1796 }
1797 
1798 static struct dc_stream_state *find_pll_sharable_stream(
1799 		struct dc_stream_state *stream_needs_pll,
1800 		struct dc_state *context)
1801 {
1802 	int i;
1803 
1804 	for (i = 0; i < context->stream_count; i++) {
1805 		struct dc_stream_state *stream_has_pll = context->streams[i];
1806 
1807 		/* We are looking for non dp, non virtual stream */
1808 		if (resource_are_streams_timing_synchronizable(
1809 			stream_needs_pll, stream_has_pll)
1810 			&& !dc_is_dp_signal(stream_has_pll->signal)
1811 			&& stream_has_pll->sink->link->connector_signal
1812 			!= SIGNAL_TYPE_VIRTUAL)
1813 			return stream_has_pll;
1814 
1815 	}
1816 
1817 	return NULL;
1818 }
1819 
1820 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1821 {
1822 	uint32_t pix_clk = timing->pix_clk_khz;
1823 	uint32_t normalized_pix_clk = pix_clk;
1824 
1825 	if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1826 		pix_clk /= 2;
1827 	if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1828 		switch (timing->display_color_depth) {
1829 		case COLOR_DEPTH_888:
1830 			normalized_pix_clk = pix_clk;
1831 			break;
1832 		case COLOR_DEPTH_101010:
1833 			normalized_pix_clk = (pix_clk * 30) / 24;
1834 			break;
1835 		case COLOR_DEPTH_121212:
1836 			normalized_pix_clk = (pix_clk * 36) / 24;
1837 		break;
1838 		case COLOR_DEPTH_161616:
1839 			normalized_pix_clk = (pix_clk * 48) / 24;
1840 		break;
1841 		default:
1842 			ASSERT(0);
1843 		break;
1844 		}
1845 	}
1846 	return normalized_pix_clk;
1847 }
1848 
1849 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1850 {
1851 	/* update actual pixel clock on all streams */
1852 	if (dc_is_hdmi_signal(stream->signal))
1853 		stream->phy_pix_clk = get_norm_pix_clk(
1854 			&stream->timing);
1855 	else
1856 		stream->phy_pix_clk =
1857 			stream->timing.pix_clk_khz;
1858 }
1859 
1860 enum dc_status resource_map_pool_resources(
1861 		const struct dc  *dc,
1862 		struct dc_state *context,
1863 		struct dc_stream_state *stream)
1864 {
1865 	const struct resource_pool *pool = dc->res_pool;
1866 	int i;
1867 	struct dc_context *dc_ctx = dc->ctx;
1868 	struct pipe_ctx *pipe_ctx = NULL;
1869 	int pipe_idx = -1;
1870 
1871 	/* TODO Check if this is needed */
1872 	/*if (!resource_is_stream_unchanged(old_context, stream)) {
1873 			if (stream != NULL && old_context->streams[i] != NULL) {
1874 				stream->bit_depth_params =
1875 						old_context->streams[i]->bit_depth_params;
1876 				stream->clamping = old_context->streams[i]->clamping;
1877 				continue;
1878 			}
1879 		}
1880 	*/
1881 
1882 	/* acquire new resources */
1883 	pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1884 
1885 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1886 	if (pipe_idx < 0)
1887 		pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1888 #endif
1889 
1890 	if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL)
1891 		return DC_NO_CONTROLLER_RESOURCE;
1892 
1893 	pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1894 
1895 	pipe_ctx->stream_res.stream_enc =
1896 		find_first_free_match_stream_enc_for_link(
1897 			&context->res_ctx, pool, stream);
1898 
1899 	if (!pipe_ctx->stream_res.stream_enc)
1900 		return DC_NO_STREAM_ENG_RESOURCE;
1901 
1902 	update_stream_engine_usage(
1903 		&context->res_ctx, pool,
1904 		pipe_ctx->stream_res.stream_enc,
1905 		true);
1906 
1907 	/* TODO: Add check if ASIC support and EDID audio */
1908 	if (!stream->sink->converter_disable_audio &&
1909 	    dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1910 	    stream->audio_info.mode_count) {
1911 		pipe_ctx->stream_res.audio = find_first_free_audio(
1912 		&context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
1913 
1914 		/*
1915 		 * Audio assigned in order first come first get.
1916 		 * There are asics which has number of audio
1917 		 * resources less then number of pipes
1918 		 */
1919 		if (pipe_ctx->stream_res.audio)
1920 			update_audio_usage(&context->res_ctx, pool,
1921 					   pipe_ctx->stream_res.audio, true);
1922 	}
1923 
1924 	/* Add ABM to the resource if on EDP */
1925 	if (pipe_ctx->stream && dc_is_embedded_signal(pipe_ctx->stream->signal))
1926 		pipe_ctx->stream_res.abm = pool->abm;
1927 
1928 	for (i = 0; i < context->stream_count; i++)
1929 		if (context->streams[i] == stream) {
1930 			context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
1931 			context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
1932 			return DC_OK;
1933 		}
1934 
1935 	DC_ERROR("Stream %p not found in new ctx!\n", stream);
1936 	return DC_ERROR_UNEXPECTED;
1937 }
1938 
1939 void dc_resource_state_copy_construct_current(
1940 		const struct dc *dc,
1941 		struct dc_state *dst_ctx)
1942 {
1943 	dc_resource_state_copy_construct(dc->current_state, dst_ctx);
1944 }
1945 
1946 
1947 void dc_resource_state_construct(
1948 		const struct dc *dc,
1949 		struct dc_state *dst_ctx)
1950 {
1951 	dst_ctx->dis_clk = dc->res_pool->dccg;
1952 }
1953 
1954 enum dc_status dc_validate_global_state(
1955 		struct dc *dc,
1956 		struct dc_state *new_ctx)
1957 {
1958 	enum dc_status result = DC_ERROR_UNEXPECTED;
1959 	int i, j;
1960 
1961 	if (!new_ctx)
1962 		return DC_ERROR_UNEXPECTED;
1963 
1964 	if (dc->res_pool->funcs->validate_global) {
1965 		result = dc->res_pool->funcs->validate_global(dc, new_ctx);
1966 		if (result != DC_OK)
1967 			return result;
1968 	}
1969 
1970 	for (i = 0; i < new_ctx->stream_count; i++) {
1971 		struct dc_stream_state *stream = new_ctx->streams[i];
1972 
1973 		for (j = 0; j < dc->res_pool->pipe_count; j++) {
1974 			struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
1975 
1976 			if (pipe_ctx->stream != stream)
1977 				continue;
1978 
1979 			/* Switch to dp clock source only if there is
1980 			 * no non dp stream that shares the same timing
1981 			 * with the dp stream.
1982 			 */
1983 			if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1984 				!find_pll_sharable_stream(stream, new_ctx)) {
1985 
1986 				resource_unreference_clock_source(
1987 						&new_ctx->res_ctx,
1988 						dc->res_pool,
1989 						pipe_ctx->clock_source);
1990 
1991 				pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
1992 				resource_reference_clock_source(
1993 						&new_ctx->res_ctx,
1994 						dc->res_pool,
1995 						 pipe_ctx->clock_source);
1996 			}
1997 		}
1998 	}
1999 
2000 	result = resource_build_scaling_params_for_context(dc, new_ctx);
2001 
2002 	if (result == DC_OK)
2003 		if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
2004 			result = DC_FAIL_BANDWIDTH_VALIDATE;
2005 
2006 	return result;
2007 }
2008 
2009 static void patch_gamut_packet_checksum(
2010 		struct dc_info_packet *gamut_packet)
2011 {
2012 	/* For gamut we recalc checksum */
2013 	if (gamut_packet->valid) {
2014 		uint8_t chk_sum = 0;
2015 		uint8_t *ptr;
2016 		uint8_t i;
2017 
2018 		/*start of the Gamut data. */
2019 		ptr = &gamut_packet->sb[3];
2020 
2021 		for (i = 0; i <= gamut_packet->sb[1]; i++)
2022 			chk_sum += ptr[i];
2023 
2024 		gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
2025 	}
2026 }
2027 
2028 static void set_avi_info_frame(
2029 		struct dc_info_packet *info_packet,
2030 		struct pipe_ctx *pipe_ctx)
2031 {
2032 	struct dc_stream_state *stream = pipe_ctx->stream;
2033 	enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
2034 	uint32_t pixel_encoding = 0;
2035 	enum scanning_type scan_type = SCANNING_TYPE_NODATA;
2036 	enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
2037 	bool itc = false;
2038 	uint8_t itc_value = 0;
2039 	uint8_t cn0_cn1 = 0;
2040 	unsigned int cn0_cn1_value = 0;
2041 	uint8_t *check_sum = NULL;
2042 	uint8_t byte_index = 0;
2043 	union hdmi_info_packet hdmi_info;
2044 	union display_content_support support = {0};
2045 	unsigned int vic = pipe_ctx->stream->timing.vic;
2046 	enum dc_timing_3d_format format;
2047 
2048 	memset(&hdmi_info, 0, sizeof(union hdmi_info_packet));
2049 
2050 	color_space = pipe_ctx->stream->output_color_space;
2051 	if (color_space == COLOR_SPACE_UNKNOWN)
2052 		color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
2053 			COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
2054 
2055 	/* Initialize header */
2056 	hdmi_info.bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
2057 	/* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
2058 	* not be used in HDMI 2.0 (Section 10.1) */
2059 	hdmi_info.bits.header.version = 2;
2060 	hdmi_info.bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
2061 
2062 	/*
2063 	 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
2064 	 * according to HDMI 2.0 spec (Section 10.1)
2065 	 */
2066 
2067 	switch (stream->timing.pixel_encoding) {
2068 	case PIXEL_ENCODING_YCBCR422:
2069 		pixel_encoding = 1;
2070 		break;
2071 
2072 	case PIXEL_ENCODING_YCBCR444:
2073 		pixel_encoding = 2;
2074 		break;
2075 	case PIXEL_ENCODING_YCBCR420:
2076 		pixel_encoding = 3;
2077 		break;
2078 
2079 	case PIXEL_ENCODING_RGB:
2080 	default:
2081 		pixel_encoding = 0;
2082 	}
2083 
2084 	/* Y0_Y1_Y2 : The pixel encoding */
2085 	/* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
2086 	hdmi_info.bits.Y0_Y1_Y2 = pixel_encoding;
2087 
2088 	/* A0 = 1 Active Format Information valid */
2089 	hdmi_info.bits.A0 = ACTIVE_FORMAT_VALID;
2090 
2091 	/* B0, B1 = 3; Bar info data is valid */
2092 	hdmi_info.bits.B0_B1 = BAR_INFO_BOTH_VALID;
2093 
2094 	hdmi_info.bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
2095 
2096 	/* S0, S1 : Underscan / Overscan */
2097 	/* TODO: un-hardcode scan type */
2098 	scan_type = SCANNING_TYPE_UNDERSCAN;
2099 	hdmi_info.bits.S0_S1 = scan_type;
2100 
2101 	/* C0, C1 : Colorimetry */
2102 	if (color_space == COLOR_SPACE_YCBCR709 ||
2103 			color_space == COLOR_SPACE_YCBCR709_LIMITED)
2104 		hdmi_info.bits.C0_C1 = COLORIMETRY_ITU709;
2105 	else if (color_space == COLOR_SPACE_YCBCR601 ||
2106 			color_space == COLOR_SPACE_YCBCR601_LIMITED)
2107 		hdmi_info.bits.C0_C1 = COLORIMETRY_ITU601;
2108 	else {
2109 		hdmi_info.bits.C0_C1 = COLORIMETRY_NO_DATA;
2110 	}
2111 	if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
2112 			color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
2113 			color_space == COLOR_SPACE_2020_YCBCR) {
2114 		hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
2115 		hdmi_info.bits.C0_C1   = COLORIMETRY_EXTENDED;
2116 	} else if (color_space == COLOR_SPACE_ADOBERGB) {
2117 		hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
2118 		hdmi_info.bits.C0_C1   = COLORIMETRY_EXTENDED;
2119 	}
2120 
2121 	/* TODO: un-hardcode aspect ratio */
2122 	aspect = stream->timing.aspect_ratio;
2123 
2124 	switch (aspect) {
2125 	case ASPECT_RATIO_4_3:
2126 	case ASPECT_RATIO_16_9:
2127 		hdmi_info.bits.M0_M1 = aspect;
2128 		break;
2129 
2130 	case ASPECT_RATIO_NO_DATA:
2131 	case ASPECT_RATIO_64_27:
2132 	case ASPECT_RATIO_256_135:
2133 	default:
2134 		hdmi_info.bits.M0_M1 = 0;
2135 	}
2136 
2137 	/* Active Format Aspect ratio - same as Picture Aspect Ratio. */
2138 	hdmi_info.bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
2139 
2140 	/* TODO: un-hardcode cn0_cn1 and itc */
2141 
2142 	cn0_cn1 = 0;
2143 	cn0_cn1_value = 0;
2144 
2145 	itc = true;
2146 	itc_value = 1;
2147 
2148 	support = stream->sink->edid_caps.content_support;
2149 
2150 	if (itc) {
2151 		if (!support.bits.valid_content_type) {
2152 			cn0_cn1_value = 0;
2153 		} else {
2154 			if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
2155 				if (support.bits.graphics_content == 1) {
2156 					cn0_cn1_value = 0;
2157 				}
2158 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
2159 				if (support.bits.photo_content == 1) {
2160 					cn0_cn1_value = 1;
2161 				} else {
2162 					cn0_cn1_value = 0;
2163 					itc_value = 0;
2164 				}
2165 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
2166 				if (support.bits.cinema_content == 1) {
2167 					cn0_cn1_value = 2;
2168 				} else {
2169 					cn0_cn1_value = 0;
2170 					itc_value = 0;
2171 				}
2172 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2173 				if (support.bits.game_content == 1) {
2174 					cn0_cn1_value = 3;
2175 				} else {
2176 					cn0_cn1_value = 0;
2177 					itc_value = 0;
2178 				}
2179 			}
2180 		}
2181 		hdmi_info.bits.CN0_CN1 = cn0_cn1_value;
2182 		hdmi_info.bits.ITC = itc_value;
2183 	}
2184 
2185 	/* TODO : We should handle YCC quantization */
2186 	/* but we do not have matrix calculation */
2187 	if (stream->sink->edid_caps.qs_bit == 1 &&
2188 			stream->sink->edid_caps.qy_bit == 1) {
2189 		if (color_space == COLOR_SPACE_SRGB ||
2190 			color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2191 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_FULL_RANGE;
2192 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2193 		} else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2194 					color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2195 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_LIMITED_RANGE;
2196 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2197 		} else {
2198 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2199 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2200 		}
2201 	} else {
2202 		hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2203 		hdmi_info.bits.YQ0_YQ1   = YYC_QUANTIZATION_LIMITED_RANGE;
2204 	}
2205 
2206 	///VIC
2207 	format = stream->timing.timing_3d_format;
2208 	/*todo, add 3DStereo support*/
2209 	if (format != TIMING_3D_FORMAT_NONE) {
2210 		// Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2211 		switch (pipe_ctx->stream->timing.hdmi_vic) {
2212 		case 1:
2213 			vic = 95;
2214 			break;
2215 		case 2:
2216 			vic = 94;
2217 			break;
2218 		case 3:
2219 			vic = 93;
2220 			break;
2221 		case 4:
2222 			vic = 98;
2223 			break;
2224 		default:
2225 			break;
2226 		}
2227 	}
2228 	hdmi_info.bits.VIC0_VIC7 = vic;
2229 
2230 	/* pixel repetition
2231 	 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2232 	 * repetition start from 1 */
2233 	hdmi_info.bits.PR0_PR3 = 0;
2234 
2235 	/* Bar Info
2236 	 * barTop:    Line Number of End of Top Bar.
2237 	 * barBottom: Line Number of Start of Bottom Bar.
2238 	 * barLeft:   Pixel Number of End of Left Bar.
2239 	 * barRight:  Pixel Number of Start of Right Bar. */
2240 	hdmi_info.bits.bar_top = stream->timing.v_border_top;
2241 	hdmi_info.bits.bar_bottom = (stream->timing.v_total
2242 			- stream->timing.v_border_bottom + 1);
2243 	hdmi_info.bits.bar_left  = stream->timing.h_border_left;
2244 	hdmi_info.bits.bar_right = (stream->timing.h_total
2245 			- stream->timing.h_border_right + 1);
2246 
2247 	/* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2248 	check_sum = &hdmi_info.packet_raw_data.sb[0];
2249 
2250 	*check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2251 
2252 	for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2253 		*check_sum += hdmi_info.packet_raw_data.sb[byte_index];
2254 
2255 	/* one byte complement */
2256 	*check_sum = (uint8_t) (0x100 - *check_sum);
2257 
2258 	/* Store in hw_path_mode */
2259 	info_packet->hb0 = hdmi_info.packet_raw_data.hb0;
2260 	info_packet->hb1 = hdmi_info.packet_raw_data.hb1;
2261 	info_packet->hb2 = hdmi_info.packet_raw_data.hb2;
2262 
2263 	for (byte_index = 0; byte_index < sizeof(hdmi_info.packet_raw_data.sb); byte_index++)
2264 		info_packet->sb[byte_index] = hdmi_info.packet_raw_data.sb[byte_index];
2265 
2266 	info_packet->valid = true;
2267 }
2268 
2269 static void set_vendor_info_packet(
2270 		struct dc_info_packet *info_packet,
2271 		struct dc_stream_state *stream)
2272 {
2273 	uint32_t length = 0;
2274 	bool hdmi_vic_mode = false;
2275 	uint8_t checksum = 0;
2276 	uint32_t i = 0;
2277 	enum dc_timing_3d_format format;
2278 	// Can be different depending on packet content /*todo*/
2279 	// unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2280 
2281 	info_packet->valid = false;
2282 
2283 	format = stream->timing.timing_3d_format;
2284 	if (stream->view_format == VIEW_3D_FORMAT_NONE)
2285 		format = TIMING_3D_FORMAT_NONE;
2286 
2287 	/* Can be different depending on packet content */
2288 	length = 5;
2289 
2290 	if (stream->timing.hdmi_vic != 0
2291 			&& stream->timing.h_total >= 3840
2292 			&& stream->timing.v_total >= 2160)
2293 		hdmi_vic_mode = true;
2294 
2295 	/* According to HDMI 1.4a CTS, VSIF should be sent
2296 	 * for both 3D stereo and HDMI VIC modes.
2297 	 * For all other modes, there is no VSIF sent.  */
2298 
2299 	if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2300 		return;
2301 
2302 	/* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2303 	info_packet->sb[1] = 0x03;
2304 	info_packet->sb[2] = 0x0C;
2305 	info_packet->sb[3] = 0x00;
2306 
2307 	/*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2308 	 * The value for HDMI_Video_Format are:
2309 	 * 0x0 (0b000) - No additional HDMI video format is presented in this
2310 	 * packet
2311 	 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2312 	 * parameter follows
2313 	 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2314 	 * potentially 3D_Ext_Data follows
2315 	 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2316 	if (format != TIMING_3D_FORMAT_NONE)
2317 		info_packet->sb[4] = (2 << 5);
2318 	else if (hdmi_vic_mode)
2319 		info_packet->sb[4] = (1 << 5);
2320 
2321 	/* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2322 	 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2323 	 * The value for 3D_Structure are:
2324 	 * 0x0 - Frame Packing
2325 	 * 0x1 - Field Alternative
2326 	 * 0x2 - Line Alternative
2327 	 * 0x3 - Side-by-Side (full)
2328 	 * 0x4 - L + depth
2329 	 * 0x5 - L + depth + graphics + graphics-depth
2330 	 * 0x6 - Top-and-Bottom
2331 	 * 0x7 - Reserved for future use
2332 	 * 0x8 - Side-by-Side (Half)
2333 	 * 0x9..0xE - Reserved for future use
2334 	 * 0xF - Not used */
2335 	switch (format) {
2336 	case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2337 	case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2338 		info_packet->sb[5] = (0x0 << 4);
2339 		break;
2340 
2341 	case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2342 	case TIMING_3D_FORMAT_SBS_SW_PACKED:
2343 		info_packet->sb[5] = (0x8 << 4);
2344 		length = 6;
2345 		break;
2346 
2347 	case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2348 	case TIMING_3D_FORMAT_TB_SW_PACKED:
2349 		info_packet->sb[5] = (0x6 << 4);
2350 		break;
2351 
2352 	default:
2353 		break;
2354 	}
2355 
2356 	/*PB5: If PB4 is set to 0x1 (extended resolution format)
2357 	 * fill PB5 with the correct HDMI VIC code */
2358 	if (hdmi_vic_mode)
2359 		info_packet->sb[5] = stream->timing.hdmi_vic;
2360 
2361 	/* Header */
2362 	info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2363 	info_packet->hb1 = 0x01; /* Version */
2364 
2365 	/* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2366 	info_packet->hb2 = (uint8_t) (length);
2367 
2368 	/* Calculate checksum */
2369 	checksum = 0;
2370 	checksum += info_packet->hb0;
2371 	checksum += info_packet->hb1;
2372 	checksum += info_packet->hb2;
2373 
2374 	for (i = 1; i <= length; i++)
2375 		checksum += info_packet->sb[i];
2376 
2377 	info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2378 
2379 	info_packet->valid = true;
2380 }
2381 
2382 static void set_spd_info_packet(
2383 		struct dc_info_packet *info_packet,
2384 		struct dc_stream_state *stream)
2385 {
2386 	/* SPD info packet for FreeSync */
2387 
2388 	unsigned char checksum = 0;
2389 	unsigned int idx, payload_size = 0;
2390 
2391 	/* Check if Freesync is supported. Return if false. If true,
2392 	 * set the corresponding bit in the info packet
2393 	 */
2394 	if (stream->freesync_ctx.supported == false)
2395 		return;
2396 
2397 	if (dc_is_hdmi_signal(stream->signal)) {
2398 
2399 		/* HEADER */
2400 
2401 		/* HB0  = Packet Type = 0x83 (Source Product
2402 		 *	  Descriptor InfoFrame)
2403 		 */
2404 		info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2405 
2406 		/* HB1  = Version = 0x01 */
2407 		info_packet->hb1 = 0x01;
2408 
2409 		/* HB2  = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2410 		info_packet->hb2 = 0x08;
2411 
2412 		payload_size = 0x08;
2413 
2414 	} else if (dc_is_dp_signal(stream->signal)) {
2415 
2416 		/* HEADER */
2417 
2418 		/* HB0  = Secondary-data Packet ID = 0 - Only non-zero
2419 		 *	  when used to associate audio related info packets
2420 		 */
2421 		info_packet->hb0 = 0x00;
2422 
2423 		/* HB1  = Packet Type = 0x83 (Source Product
2424 		 *	  Descriptor InfoFrame)
2425 		 */
2426 		info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2427 
2428 		/* HB2  = [Bits 7:0 = Least significant eight bits -
2429 		 *	  For INFOFRAME, the value must be 1Bh]
2430 		 */
2431 		info_packet->hb2 = 0x1B;
2432 
2433 		/* HB3  = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2434 		 *	  [Bits 1:0 = Most significant two bits = 0x00]
2435 		 */
2436 		info_packet->hb3 = 0x04;
2437 
2438 		payload_size = 0x1B;
2439 	}
2440 
2441 	/* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2442 	info_packet->sb[1] = 0x1A;
2443 
2444 	/* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2445 	info_packet->sb[2] = 0x00;
2446 
2447 	/* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2448 	info_packet->sb[3] = 0x00;
2449 
2450 	/* PB4 = Reserved */
2451 	info_packet->sb[4] = 0x00;
2452 
2453 	/* PB5 = Reserved */
2454 	info_packet->sb[5] = 0x00;
2455 
2456 	/* PB6 = [Bits 7:3 = Reserved] */
2457 	info_packet->sb[6] = 0x00;
2458 
2459 	if (stream->freesync_ctx.supported == true)
2460 		/* PB6 = [Bit 0 = FreeSync Supported] */
2461 		info_packet->sb[6] |= 0x01;
2462 
2463 	if (stream->freesync_ctx.enabled == true)
2464 		/* PB6 = [Bit 1 = FreeSync Enabled] */
2465 		info_packet->sb[6] |= 0x02;
2466 
2467 	if (stream->freesync_ctx.active == true)
2468 		/* PB6 = [Bit 2 = FreeSync Active] */
2469 		info_packet->sb[6] |= 0x04;
2470 
2471 	/* PB7 = FreeSync Minimum refresh rate (Hz) */
2472 	info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2473 			min_refresh_in_micro_hz / 1000000);
2474 
2475 	/* PB8 = FreeSync Maximum refresh rate (Hz)
2476 	 *
2477 	 * Note: We do not use the maximum capable refresh rate
2478 	 * of the panel, because we should never go above the field
2479 	 * rate of the mode timing set.
2480 	 */
2481 	info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2482 			nominal_refresh_in_micro_hz / 1000000);
2483 
2484 	/* PB9 - PB27  = Reserved */
2485 	for (idx = 9; idx <= 27; idx++)
2486 		info_packet->sb[idx] = 0x00;
2487 
2488 	/* Calculate checksum */
2489 	checksum += info_packet->hb0;
2490 	checksum += info_packet->hb1;
2491 	checksum += info_packet->hb2;
2492 	checksum += info_packet->hb3;
2493 
2494 	for (idx = 1; idx <= payload_size; idx++)
2495 		checksum += info_packet->sb[idx];
2496 
2497 	/* PB0 = Checksum (one byte complement) */
2498 	info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2499 
2500 	info_packet->valid = true;
2501 }
2502 
2503 static void set_hdr_static_info_packet(
2504 		struct dc_info_packet *info_packet,
2505 		struct dc_stream_state *stream)
2506 {
2507 	/* HDR Static Metadata info packet for HDR10 */
2508 
2509 	if (!stream->hdr_static_metadata.valid ||
2510 			stream->use_dynamic_meta)
2511 		return;
2512 
2513 	*info_packet = stream->hdr_static_metadata;
2514 }
2515 
2516 static void set_vsc_info_packet(
2517 		struct dc_info_packet *info_packet,
2518 		struct dc_stream_state *stream)
2519 {
2520 	unsigned int vscPacketRevision = 0;
2521 	unsigned int i;
2522 
2523 	/*VSC packet set to 2 when DP revision >= 1.2*/
2524 	if (stream->psr_version != 0) {
2525 		vscPacketRevision = 2;
2526 	}
2527 
2528 	/* VSC packet not needed based on the features
2529 	 * supported by this DP display
2530 	 */
2531 	if (vscPacketRevision == 0)
2532 		return;
2533 
2534 	if (vscPacketRevision == 0x2) {
2535 		/* Secondary-data Packet ID = 0*/
2536 		info_packet->hb0 = 0x00;
2537 		/* 07h - Packet Type Value indicating Video
2538 		 * Stream Configuration packet
2539 		 */
2540 		info_packet->hb1 = 0x07;
2541 		/* 02h = VSC SDP supporting 3D stereo and PSR
2542 		 * (applies to eDP v1.3 or higher).
2543 		 */
2544 		info_packet->hb2 = 0x02;
2545 		/* 08h = VSC packet supporting 3D stereo + PSR
2546 		 * (HB2 = 02h).
2547 		 */
2548 		info_packet->hb3 = 0x08;
2549 
2550 		for (i = 0; i < 28; i++)
2551 			info_packet->sb[i] = 0;
2552 
2553 		info_packet->valid = true;
2554 	}
2555 
2556 	/*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2557 }
2558 
2559 void dc_resource_state_destruct(struct dc_state *context)
2560 {
2561 	int i, j;
2562 
2563 	for (i = 0; i < context->stream_count; i++) {
2564 		for (j = 0; j < context->stream_status[i].plane_count; j++)
2565 			dc_plane_state_release(
2566 				context->stream_status[i].plane_states[j]);
2567 
2568 		context->stream_status[i].plane_count = 0;
2569 		dc_stream_release(context->streams[i]);
2570 		context->streams[i] = NULL;
2571 	}
2572 }
2573 
2574 /*
2575  * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2576  * by the src_ctx
2577  */
2578 void dc_resource_state_copy_construct(
2579 		const struct dc_state *src_ctx,
2580 		struct dc_state *dst_ctx)
2581 {
2582 	int i, j;
2583 	struct kref refcount = dst_ctx->refcount;
2584 
2585 	*dst_ctx = *src_ctx;
2586 
2587 	for (i = 0; i < MAX_PIPES; i++) {
2588 		struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2589 
2590 		if (cur_pipe->top_pipe)
2591 			cur_pipe->top_pipe =  &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2592 
2593 		if (cur_pipe->bottom_pipe)
2594 			cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2595 
2596 	}
2597 
2598 	for (i = 0; i < dst_ctx->stream_count; i++) {
2599 		dc_stream_retain(dst_ctx->streams[i]);
2600 		for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2601 			dc_plane_state_retain(
2602 				dst_ctx->stream_status[i].plane_states[j]);
2603 	}
2604 
2605 	/* context refcount should not be overridden */
2606 	dst_ctx->refcount = refcount;
2607 
2608 }
2609 
2610 struct clock_source *dc_resource_find_first_free_pll(
2611 		struct resource_context *res_ctx,
2612 		const struct resource_pool *pool)
2613 {
2614 	int i;
2615 
2616 	for (i = 0; i < pool->clk_src_count; ++i) {
2617 		if (res_ctx->clock_source_ref_count[i] == 0)
2618 			return pool->clock_sources[i];
2619 	}
2620 
2621 	return NULL;
2622 }
2623 
2624 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2625 {
2626 	enum signal_type signal = SIGNAL_TYPE_NONE;
2627 	struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2628 
2629 	/* default all packets to invalid */
2630 	info->avi.valid = false;
2631 	info->gamut.valid = false;
2632 	info->vendor.valid = false;
2633 	info->spd.valid = false;
2634 	info->hdrsmd.valid = false;
2635 	info->vsc.valid = false;
2636 
2637 	signal = pipe_ctx->stream->signal;
2638 
2639 	/* HDMi and DP have different info packets*/
2640 	if (dc_is_hdmi_signal(signal)) {
2641 		set_avi_info_frame(&info->avi, pipe_ctx);
2642 
2643 		set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2644 
2645 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2646 
2647 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2648 
2649 	} else if (dc_is_dp_signal(signal)) {
2650 		set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2651 
2652 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2653 
2654 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2655 	}
2656 
2657 	patch_gamut_packet_checksum(&info->gamut);
2658 }
2659 
2660 enum dc_status resource_map_clock_resources(
2661 		const struct dc  *dc,
2662 		struct dc_state *context,
2663 		struct dc_stream_state *stream)
2664 {
2665 	/* acquire new resources */
2666 	const struct resource_pool *pool = dc->res_pool;
2667 	struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2668 				&context->res_ctx, stream);
2669 
2670 	if (!pipe_ctx)
2671 		return DC_ERROR_UNEXPECTED;
2672 
2673 	if (dc_is_dp_signal(pipe_ctx->stream->signal)
2674 		|| pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2675 		pipe_ctx->clock_source = pool->dp_clock_source;
2676 	else {
2677 		pipe_ctx->clock_source = NULL;
2678 
2679 		if (!dc->config.disable_disp_pll_sharing)
2680 			pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2681 				&context->res_ctx,
2682 				pipe_ctx);
2683 
2684 		if (pipe_ctx->clock_source == NULL)
2685 			pipe_ctx->clock_source =
2686 				dc_resource_find_first_free_pll(
2687 					&context->res_ctx,
2688 					pool);
2689 	}
2690 
2691 	if (pipe_ctx->clock_source == NULL)
2692 		return DC_NO_CLOCK_SOURCE_RESOURCE;
2693 
2694 	resource_reference_clock_source(
2695 		&context->res_ctx, pool,
2696 		pipe_ctx->clock_source);
2697 
2698 	return DC_OK;
2699 }
2700 
2701 /*
2702  * Note: We need to disable output if clock sources change,
2703  * since bios does optimization and doesn't apply if changing
2704  * PHY when not already disabled.
2705  */
2706 bool pipe_need_reprogram(
2707 		struct pipe_ctx *pipe_ctx_old,
2708 		struct pipe_ctx *pipe_ctx)
2709 {
2710 	if (!pipe_ctx_old->stream)
2711 		return false;
2712 
2713 	if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2714 		return true;
2715 
2716 	if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2717 		return true;
2718 
2719 	if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2720 		return true;
2721 
2722 	if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2723 			&& pipe_ctx_old->stream != pipe_ctx->stream)
2724 		return true;
2725 
2726 	if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2727 		return true;
2728 
2729 	if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2730 		return true;
2731 
2732 	if (is_hdr_static_meta_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2733 		return true;
2734 
2735 	return false;
2736 }
2737 
2738 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2739 		struct bit_depth_reduction_params *fmt_bit_depth)
2740 {
2741 	enum dc_dither_option option = stream->dither_option;
2742 	enum dc_pixel_encoding pixel_encoding =
2743 			stream->timing.pixel_encoding;
2744 
2745 	memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2746 
2747 	if (option == DITHER_OPTION_DEFAULT) {
2748 		switch (stream->timing.display_color_depth) {
2749 		case COLOR_DEPTH_666:
2750 			option = DITHER_OPTION_SPATIAL6;
2751 			break;
2752 		case COLOR_DEPTH_888:
2753 			option = DITHER_OPTION_SPATIAL8;
2754 			break;
2755 		case COLOR_DEPTH_101010:
2756 			option = DITHER_OPTION_SPATIAL10;
2757 			break;
2758 		default:
2759 			option = DITHER_OPTION_DISABLE;
2760 		}
2761 	}
2762 
2763 	if (option == DITHER_OPTION_DISABLE)
2764 		return;
2765 
2766 	if (option == DITHER_OPTION_TRUN6) {
2767 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2768 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2769 	} else if (option == DITHER_OPTION_TRUN8 ||
2770 			option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2771 			option == DITHER_OPTION_TRUN8_FM6) {
2772 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2773 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2774 	} else if (option == DITHER_OPTION_TRUN10        ||
2775 			option == DITHER_OPTION_TRUN10_SPATIAL6   ||
2776 			option == DITHER_OPTION_TRUN10_SPATIAL8   ||
2777 			option == DITHER_OPTION_TRUN10_FM8     ||
2778 			option == DITHER_OPTION_TRUN10_FM6     ||
2779 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2780 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2781 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2782 	}
2783 
2784 	/* special case - Formatter can only reduce by 4 bits at most.
2785 	 * When reducing from 12 to 6 bits,
2786 	 * HW recommends we use trunc with round mode
2787 	 * (if we did nothing, trunc to 10 bits would be used)
2788 	 * note that any 12->10 bit reduction is ignored prior to DCE8,
2789 	 * as the input was 10 bits.
2790 	 */
2791 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2792 			option == DITHER_OPTION_SPATIAL6 ||
2793 			option == DITHER_OPTION_FM6) {
2794 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2795 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2796 		fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2797 	}
2798 
2799 	/* spatial dither
2800 	 * note that spatial modes 1-3 are never used
2801 	 */
2802 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM            ||
2803 			option == DITHER_OPTION_SPATIAL6 ||
2804 			option == DITHER_OPTION_TRUN10_SPATIAL6      ||
2805 			option == DITHER_OPTION_TRUN8_SPATIAL6) {
2806 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2807 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2808 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2809 		fmt_bit_depth->flags.RGB_RANDOM =
2810 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2811 	} else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM            ||
2812 			option == DITHER_OPTION_SPATIAL8 ||
2813 			option == DITHER_OPTION_SPATIAL8_FM6        ||
2814 			option == DITHER_OPTION_TRUN10_SPATIAL8      ||
2815 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2816 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2817 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2818 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2819 		fmt_bit_depth->flags.RGB_RANDOM =
2820 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2821 	} else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2822 			option == DITHER_OPTION_SPATIAL10 ||
2823 			option == DITHER_OPTION_SPATIAL10_FM8 ||
2824 			option == DITHER_OPTION_SPATIAL10_FM6) {
2825 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2826 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2827 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2828 		fmt_bit_depth->flags.RGB_RANDOM =
2829 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2830 	}
2831 
2832 	if (option == DITHER_OPTION_SPATIAL6 ||
2833 			option == DITHER_OPTION_SPATIAL8 ||
2834 			option == DITHER_OPTION_SPATIAL10) {
2835 		fmt_bit_depth->flags.FRAME_RANDOM = 0;
2836 	} else {
2837 		fmt_bit_depth->flags.FRAME_RANDOM = 1;
2838 	}
2839 
2840 	//////////////////////
2841 	//// temporal dither
2842 	//////////////////////
2843 	if (option == DITHER_OPTION_FM6           ||
2844 			option == DITHER_OPTION_SPATIAL8_FM6     ||
2845 			option == DITHER_OPTION_SPATIAL10_FM6     ||
2846 			option == DITHER_OPTION_TRUN10_FM6     ||
2847 			option == DITHER_OPTION_TRUN8_FM6      ||
2848 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2849 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2850 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2851 	} else if (option == DITHER_OPTION_FM8        ||
2852 			option == DITHER_OPTION_SPATIAL10_FM8  ||
2853 			option == DITHER_OPTION_TRUN10_FM8) {
2854 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2855 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2856 	} else if (option == DITHER_OPTION_FM10) {
2857 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2858 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2859 	}
2860 
2861 	fmt_bit_depth->pixel_encoding = pixel_encoding;
2862 }
2863 
2864 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2865 {
2866 	struct dc  *core_dc = dc;
2867 	struct dc_link *link = stream->sink->link;
2868 	struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2869 	enum dc_status res = DC_OK;
2870 
2871 	calculate_phy_pix_clks(stream);
2872 
2873 	if (!tg->funcs->validate_timing(tg, &stream->timing))
2874 		res = DC_FAIL_CONTROLLER_VALIDATE;
2875 
2876 	if (res == DC_OK)
2877 		if (!link->link_enc->funcs->validate_output_with_stream(
2878 						link->link_enc, stream))
2879 			res = DC_FAIL_ENC_VALIDATE;
2880 
2881 	/* TODO: validate audio ASIC caps, encoder */
2882 
2883 	if (res == DC_OK)
2884 		res = dc_link_validate_mode_timing(stream,
2885 		      link,
2886 		      &stream->timing);
2887 
2888 	return res;
2889 }
2890 
2891 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2892 {
2893 	enum dc_status res = DC_OK;
2894 
2895 	/* TODO For now validates pixel format only */
2896 	if (dc->res_pool->funcs->validate_plane)
2897 		return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);
2898 
2899 	return res;
2900 }
2901