1 /*
2  * Copyright 2012-15 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 #include "dm_services.h"
26 
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "dpp.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 #include "dpcd_defs.h"
39 
40 #include "dce80/dce80_resource.h"
41 #include "dce100/dce100_resource.h"
42 #include "dce110/dce110_resource.h"
43 #include "dce112/dce112_resource.h"
44 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
45 #include "dcn10/dcn10_resource.h"
46 #endif
47 #include "dce120/dce120_resource.h"
48 
49 #define DC_LOGGER_INIT(logger)
50 
51 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
52 {
53 	enum dce_version dc_version = DCE_VERSION_UNKNOWN;
54 	switch (asic_id.chip_family) {
55 
56 	case FAMILY_CI:
57 		dc_version = DCE_VERSION_8_0;
58 		break;
59 	case FAMILY_KV:
60 		if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
61 		    ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
62 		    ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
63 			dc_version = DCE_VERSION_8_3;
64 		else
65 			dc_version = DCE_VERSION_8_1;
66 		break;
67 	case FAMILY_CZ:
68 		dc_version = DCE_VERSION_11_0;
69 		break;
70 
71 	case FAMILY_VI:
72 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
73 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
74 			dc_version = DCE_VERSION_10_0;
75 			break;
76 		}
77 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
78 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
79 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
80 			dc_version = DCE_VERSION_11_2;
81 		}
82 		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev))
83 			dc_version = DCE_VERSION_11_22;
84 		break;
85 	case FAMILY_AI:
86 		dc_version = DCE_VERSION_12_0;
87 		break;
88 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
89 	case FAMILY_RV:
90 		dc_version = DCN_VERSION_1_0;
91 #if defined(CONFIG_DRM_AMD_DC_DCN1_01)
92 		if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev))
93 			dc_version = DCN_VERSION_1_01;
94 #endif
95 		break;
96 #endif
97 	default:
98 		dc_version = DCE_VERSION_UNKNOWN;
99 		break;
100 	}
101 	return dc_version;
102 }
103 
104 struct resource_pool *dc_create_resource_pool(
105 				struct dc  *dc,
106 				int num_virtual_links,
107 				enum dce_version dc_version,
108 				struct hw_asic_id asic_id)
109 {
110 	struct resource_pool *res_pool = NULL;
111 
112 	switch (dc_version) {
113 	case DCE_VERSION_8_0:
114 		res_pool = dce80_create_resource_pool(
115 			num_virtual_links, dc);
116 		break;
117 	case DCE_VERSION_8_1:
118 		res_pool = dce81_create_resource_pool(
119 			num_virtual_links, dc);
120 		break;
121 	case DCE_VERSION_8_3:
122 		res_pool = dce83_create_resource_pool(
123 			num_virtual_links, dc);
124 		break;
125 	case DCE_VERSION_10_0:
126 		res_pool = dce100_create_resource_pool(
127 				num_virtual_links, dc);
128 		break;
129 	case DCE_VERSION_11_0:
130 		res_pool = dce110_create_resource_pool(
131 			num_virtual_links, dc, asic_id);
132 		break;
133 	case DCE_VERSION_11_2:
134 	case DCE_VERSION_11_22:
135 		res_pool = dce112_create_resource_pool(
136 			num_virtual_links, dc);
137 		break;
138 	case DCE_VERSION_12_0:
139 		res_pool = dce120_create_resource_pool(
140 			num_virtual_links, dc);
141 		break;
142 
143 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
144 	case DCN_VERSION_1_0:
145 #if defined(CONFIG_DRM_AMD_DC_DCN1_01)
146 	case DCN_VERSION_1_01:
147 #endif
148 		res_pool = dcn10_create_resource_pool(
149 				num_virtual_links, dc);
150 		break;
151 #endif
152 
153 
154 	default:
155 		break;
156 	}
157 	if (res_pool != NULL) {
158 		struct dc_firmware_info fw_info = { { 0 } };
159 
160 		if (dc->ctx->dc_bios->funcs->get_firmware_info(
161 				dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
162 				res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
163 			} else
164 				ASSERT_CRITICAL(false);
165 	}
166 
167 	return res_pool;
168 }
169 
170 void dc_destroy_resource_pool(struct dc  *dc)
171 {
172 	if (dc) {
173 		if (dc->res_pool)
174 			dc->res_pool->funcs->destroy(&dc->res_pool);
175 
176 		kfree(dc->hwseq);
177 	}
178 }
179 
180 static void update_num_audio(
181 	const struct resource_straps *straps,
182 	unsigned int *num_audio,
183 	struct audio_support *aud_support)
184 {
185 	aud_support->dp_audio = true;
186 	aud_support->hdmi_audio_native = false;
187 	aud_support->hdmi_audio_on_dongle = false;
188 
189 	if (straps->hdmi_disable == 0) {
190 		if (straps->dc_pinstraps_audio & 0x2) {
191 			aud_support->hdmi_audio_on_dongle = true;
192 			aud_support->hdmi_audio_native = true;
193 		}
194 	}
195 
196 	switch (straps->audio_stream_number) {
197 	case 0: /* multi streams supported */
198 		break;
199 	case 1: /* multi streams not supported */
200 		*num_audio = 1;
201 		break;
202 	default:
203 		DC_ERR("DC: unexpected audio fuse!\n");
204 	}
205 }
206 
207 bool resource_construct(
208 	unsigned int num_virtual_links,
209 	struct dc  *dc,
210 	struct resource_pool *pool,
211 	const struct resource_create_funcs *create_funcs)
212 {
213 	struct dc_context *ctx = dc->ctx;
214 	const struct resource_caps *caps = pool->res_cap;
215 	int i;
216 	unsigned int num_audio = caps->num_audio;
217 	struct resource_straps straps = {0};
218 
219 	if (create_funcs->read_dce_straps)
220 		create_funcs->read_dce_straps(dc->ctx, &straps);
221 
222 	pool->audio_count = 0;
223 	if (create_funcs->create_audio) {
224 		/* find the total number of streams available via the
225 		 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
226 		 * registers (one for each pin) starting from pin 1
227 		 * up to the max number of audio pins.
228 		 * We stop on the first pin where
229 		 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
230 		 */
231 		update_num_audio(&straps, &num_audio, &pool->audio_support);
232 		for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
233 			struct audio *aud = create_funcs->create_audio(ctx, i);
234 
235 			if (aud == NULL) {
236 				DC_ERR("DC: failed to create audio!\n");
237 				return false;
238 			}
239 
240 			if (!aud->funcs->endpoint_valid(aud)) {
241 				aud->funcs->destroy(&aud);
242 				break;
243 			}
244 
245 			pool->audios[i] = aud;
246 			pool->audio_count++;
247 		}
248 	}
249 
250 	pool->stream_enc_count = 0;
251 	if (create_funcs->create_stream_encoder) {
252 		for (i = 0; i < caps->num_stream_encoder; i++) {
253 			pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
254 			if (pool->stream_enc[i] == NULL)
255 				DC_ERR("DC: failed to create stream_encoder!\n");
256 			pool->stream_enc_count++;
257 		}
258 	}
259 	dc->caps.dynamic_audio = false;
260 	if (pool->audio_count < pool->stream_enc_count) {
261 		dc->caps.dynamic_audio = true;
262 	}
263 	for (i = 0; i < num_virtual_links; i++) {
264 		pool->stream_enc[pool->stream_enc_count] =
265 			virtual_stream_encoder_create(
266 					ctx, ctx->dc_bios);
267 		if (pool->stream_enc[pool->stream_enc_count] == NULL) {
268 			DC_ERR("DC: failed to create stream_encoder!\n");
269 			return false;
270 		}
271 		pool->stream_enc_count++;
272 	}
273 
274 	dc->hwseq = create_funcs->create_hwseq(ctx);
275 
276 	return true;
277 }
278 static int find_matching_clock_source(
279 		const struct resource_pool *pool,
280 		struct clock_source *clock_source)
281 {
282 
283 	int i;
284 
285 	for (i = 0; i < pool->clk_src_count; i++) {
286 		if (pool->clock_sources[i] == clock_source)
287 			return i;
288 	}
289 	return -1;
290 }
291 
292 void resource_unreference_clock_source(
293 		struct resource_context *res_ctx,
294 		const struct resource_pool *pool,
295 		struct clock_source *clock_source)
296 {
297 	int i = find_matching_clock_source(pool, clock_source);
298 
299 	if (i > -1)
300 		res_ctx->clock_source_ref_count[i]--;
301 
302 	if (pool->dp_clock_source == clock_source)
303 		res_ctx->dp_clock_source_ref_count--;
304 }
305 
306 void resource_reference_clock_source(
307 		struct resource_context *res_ctx,
308 		const struct resource_pool *pool,
309 		struct clock_source *clock_source)
310 {
311 	int i = find_matching_clock_source(pool, clock_source);
312 
313 	if (i > -1)
314 		res_ctx->clock_source_ref_count[i]++;
315 
316 	if (pool->dp_clock_source == clock_source)
317 		res_ctx->dp_clock_source_ref_count++;
318 }
319 
320 int resource_get_clock_source_reference(
321 		struct resource_context *res_ctx,
322 		const struct resource_pool *pool,
323 		struct clock_source *clock_source)
324 {
325 	int i = find_matching_clock_source(pool, clock_source);
326 
327 	if (i > -1)
328 		return res_ctx->clock_source_ref_count[i];
329 
330 	if (pool->dp_clock_source == clock_source)
331 		return res_ctx->dp_clock_source_ref_count;
332 
333 	return -1;
334 }
335 
336 bool resource_are_streams_timing_synchronizable(
337 	struct dc_stream_state *stream1,
338 	struct dc_stream_state *stream2)
339 {
340 	if (stream1->timing.h_total != stream2->timing.h_total)
341 		return false;
342 
343 	if (stream1->timing.v_total != stream2->timing.v_total)
344 		return false;
345 
346 	if (stream1->timing.h_addressable
347 				!= stream2->timing.h_addressable)
348 		return false;
349 
350 	if (stream1->timing.v_addressable
351 				!= stream2->timing.v_addressable)
352 		return false;
353 
354 	if (stream1->timing.pix_clk_khz
355 				!= stream2->timing.pix_clk_khz)
356 		return false;
357 
358 	if (stream1->clamping.c_depth != stream2->clamping.c_depth)
359 		return false;
360 
361 	if (stream1->phy_pix_clk != stream2->phy_pix_clk
362 			&& (!dc_is_dp_signal(stream1->signal)
363 			|| !dc_is_dp_signal(stream2->signal)))
364 		return false;
365 
366 	if (stream1->view_format != stream2->view_format)
367 		return false;
368 
369 	return true;
370 }
371 static bool is_dp_and_hdmi_sharable(
372 		struct dc_stream_state *stream1,
373 		struct dc_stream_state *stream2)
374 {
375 	if (stream1->ctx->dc->caps.disable_dp_clk_share)
376 		return false;
377 
378 	if (stream1->clamping.c_depth != COLOR_DEPTH_888 ||
379 		stream2->clamping.c_depth != COLOR_DEPTH_888)
380 		return false;
381 
382 	return true;
383 
384 }
385 
386 static bool is_sharable_clk_src(
387 	const struct pipe_ctx *pipe_with_clk_src,
388 	const struct pipe_ctx *pipe)
389 {
390 	if (pipe_with_clk_src->clock_source == NULL)
391 		return false;
392 
393 	if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
394 		return false;
395 
396 	if (dc_is_dp_signal(pipe_with_clk_src->stream->signal) ||
397 		(dc_is_dp_signal(pipe->stream->signal) &&
398 		!is_dp_and_hdmi_sharable(pipe_with_clk_src->stream,
399 				     pipe->stream)))
400 		return false;
401 
402 	if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
403 			&& dc_is_dual_link_signal(pipe->stream->signal))
404 		return false;
405 
406 	if (dc_is_hdmi_signal(pipe->stream->signal)
407 			&& dc_is_dual_link_signal(pipe_with_clk_src->stream->signal))
408 		return false;
409 
410 	if (!resource_are_streams_timing_synchronizable(
411 			pipe_with_clk_src->stream, pipe->stream))
412 		return false;
413 
414 	return true;
415 }
416 
417 struct clock_source *resource_find_used_clk_src_for_sharing(
418 					struct resource_context *res_ctx,
419 					struct pipe_ctx *pipe_ctx)
420 {
421 	int i;
422 
423 	for (i = 0; i < MAX_PIPES; i++) {
424 		if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
425 			return res_ctx->pipe_ctx[i].clock_source;
426 	}
427 
428 	return NULL;
429 }
430 
431 static enum pixel_format convert_pixel_format_to_dalsurface(
432 		enum surface_pixel_format surface_pixel_format)
433 {
434 	enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
435 
436 	switch (surface_pixel_format) {
437 	case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
438 		dal_pixel_format = PIXEL_FORMAT_INDEX8;
439 		break;
440 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
441 		dal_pixel_format = PIXEL_FORMAT_RGB565;
442 		break;
443 	case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
444 		dal_pixel_format = PIXEL_FORMAT_RGB565;
445 		break;
446 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
447 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
448 		break;
449 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
450 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
451 		break;
452 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
453 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
454 		break;
455 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
456 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
457 		break;
458 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
459 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
460 		break;
461 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
462 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
463 		dal_pixel_format = PIXEL_FORMAT_FP16;
464 		break;
465 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
466 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
467 		dal_pixel_format = PIXEL_FORMAT_420BPP8;
468 		break;
469 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
470 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
471 		dal_pixel_format = PIXEL_FORMAT_420BPP10;
472 		break;
473 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
474 	default:
475 		dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
476 		break;
477 	}
478 	return dal_pixel_format;
479 }
480 
481 static void rect_swap_helper(struct rect *rect)
482 {
483 	swap(rect->height, rect->width);
484 	swap(rect->x, rect->y);
485 }
486 
487 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
488 {
489 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
490 	const struct dc_stream_state *stream = pipe_ctx->stream;
491 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
492 	struct rect surf_src = plane_state->src_rect;
493 	struct rect clip = { 0 };
494 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
495 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
496 	bool pri_split = pipe_ctx->bottom_pipe &&
497 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
498 	bool sec_split = pipe_ctx->top_pipe &&
499 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
500 	bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
501 
502 
503 	/*
504 	 * We need take horizontal mirror into account. On an unrotated surface this means
505 	 * that the viewport offset is actually the offset from the other side of source
506 	 * image so we have to subtract the right edge of the viewport from the right edge of
507 	 * the source window. Similar to mirror we need to take into account how offset is
508 	 * affected for 270/180 rotations
509 	 */
510 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
511 		flip_vert_scan_dir = true;
512 		flip_horz_scan_dir = true;
513 	} else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
514 		flip_vert_scan_dir = true;
515 	else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
516 		flip_horz_scan_dir = true;
517 
518 	if (pipe_ctx->plane_state->horizontal_mirror)
519 		flip_horz_scan_dir = !flip_horz_scan_dir;
520 
521 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
522 		stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
523 		pri_split = false;
524 		sec_split = false;
525 	}
526 
527 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
528 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
529 		rect_swap_helper(&surf_src);
530 
531 	/* The actual clip is an intersection between stream
532 	 * source and surface clip
533 	 */
534 	clip.x = stream->src.x > plane_state->clip_rect.x ?
535 			stream->src.x : plane_state->clip_rect.x;
536 
537 	clip.width = stream->src.x + stream->src.width <
538 			plane_state->clip_rect.x + plane_state->clip_rect.width ?
539 			stream->src.x + stream->src.width - clip.x :
540 			plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
541 
542 	clip.y = stream->src.y > plane_state->clip_rect.y ?
543 			stream->src.y : plane_state->clip_rect.y;
544 
545 	clip.height = stream->src.y + stream->src.height <
546 			plane_state->clip_rect.y + plane_state->clip_rect.height ?
547 			stream->src.y + stream->src.height - clip.y :
548 			plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
549 
550 	/* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
551 	 * note: surf_src.ofs should be added after rotation/mirror offset direction
552 	 *       adjustment since it is already in viewport space
553 	 * num_pixels = clip.num_pix * scl_ratio
554 	 */
555 	data->viewport.x = (clip.x - plane_state->dst_rect.x) *
556 			surf_src.width / plane_state->dst_rect.width;
557 	data->viewport.width = clip.width *
558 			surf_src.width / plane_state->dst_rect.width;
559 
560 	data->viewport.y = (clip.y - plane_state->dst_rect.y) *
561 			surf_src.height / plane_state->dst_rect.height;
562 	data->viewport.height = clip.height *
563 			surf_src.height / plane_state->dst_rect.height;
564 
565 	if (flip_vert_scan_dir)
566 		data->viewport.y = surf_src.height - data->viewport.y - data->viewport.height;
567 	if (flip_horz_scan_dir)
568 		data->viewport.x = surf_src.width - data->viewport.x - data->viewport.width;
569 
570 	data->viewport.x += surf_src.x;
571 	data->viewport.y += surf_src.y;
572 
573 	/* Round down, compensate in init */
574 	data->viewport_c.x = data->viewport.x / vpc_div;
575 	data->viewport_c.y = data->viewport.y / vpc_div;
576 	data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
577 			dc_fixpt_half : dc_fixpt_zero;
578 	data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
579 			dc_fixpt_half : dc_fixpt_zero;
580 	/* Round up, assume original video size always even dimensions */
581 	data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
582 	data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
583 
584 	/* Handle hsplit */
585 	if (sec_split) {
586 		data->viewport.x +=  data->viewport.width / 2;
587 		data->viewport_c.x +=  data->viewport_c.width / 2;
588 		/* Ceil offset pipe */
589 		data->viewport.width = (data->viewport.width + 1) / 2;
590 		data->viewport_c.width = (data->viewport_c.width + 1) / 2;
591 	} else if (pri_split) {
592 		if (data->viewport.width > 1)
593 			data->viewport.width /= 2;
594 		if (data->viewport_c.width > 1)
595 			data->viewport_c.width /= 2;
596 	}
597 
598 	if (plane_state->rotation == ROTATION_ANGLE_90 ||
599 			plane_state->rotation == ROTATION_ANGLE_270) {
600 		rect_swap_helper(&data->viewport_c);
601 		rect_swap_helper(&data->viewport);
602 	}
603 }
604 
605 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
606 {
607 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
608 	const struct dc_stream_state *stream = pipe_ctx->stream;
609 	struct rect surf_src = plane_state->src_rect;
610 	struct rect surf_clip = plane_state->clip_rect;
611 	bool pri_split = pipe_ctx->bottom_pipe &&
612 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
613 	bool sec_split = pipe_ctx->top_pipe &&
614 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
615 	bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
616 
617 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
618 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
619 		rect_swap_helper(&surf_src);
620 
621 	pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
622 	if (stream->src.x < surf_clip.x)
623 		pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
624 			- stream->src.x) * stream->dst.width
625 						/ stream->src.width;
626 
627 	pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
628 			stream->dst.width / stream->src.width;
629 	if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
630 			stream->dst.x + stream->dst.width)
631 		pipe_ctx->plane_res.scl_data.recout.width =
632 			stream->dst.x + stream->dst.width
633 						- pipe_ctx->plane_res.scl_data.recout.x;
634 
635 	pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
636 	if (stream->src.y < surf_clip.y)
637 		pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
638 			- stream->src.y) * stream->dst.height
639 						/ stream->src.height;
640 
641 	pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
642 			stream->dst.height / stream->src.height;
643 	if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
644 			stream->dst.y + stream->dst.height)
645 		pipe_ctx->plane_res.scl_data.recout.height =
646 			stream->dst.y + stream->dst.height
647 						- pipe_ctx->plane_res.scl_data.recout.y;
648 
649 	/* Handle h & vsplit */
650 	if (sec_split && top_bottom_split) {
651 		pipe_ctx->plane_res.scl_data.recout.y +=
652 				pipe_ctx->plane_res.scl_data.recout.height / 2;
653 		/* Floor primary pipe, ceil 2ndary pipe */
654 		pipe_ctx->plane_res.scl_data.recout.height =
655 				(pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
656 	} else if (pri_split && top_bottom_split)
657 		pipe_ctx->plane_res.scl_data.recout.height /= 2;
658 	else if (pri_split || sec_split) {
659 		/* HMirror XOR Secondary_pipe XOR Rotation_180 */
660 		bool right_view = (sec_split != plane_state->horizontal_mirror) !=
661 					(plane_state->rotation == ROTATION_ANGLE_180);
662 
663 		if (plane_state->rotation == ROTATION_ANGLE_90
664 				|| plane_state->rotation == ROTATION_ANGLE_270)
665 			/* Secondary_pipe XOR Rotation_270 */
666 			right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
667 
668 		if (right_view) {
669 			pipe_ctx->plane_res.scl_data.recout.x +=
670 					pipe_ctx->plane_res.scl_data.recout.width / 2;
671 			/* Ceil offset pipe */
672 			pipe_ctx->plane_res.scl_data.recout.width =
673 					(pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
674 		} else {
675 			if (pipe_ctx->plane_res.scl_data.recout.width > 1)
676 				pipe_ctx->plane_res.scl_data.recout.width /= 2;
677 		}
678 	}
679 	/* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
680 	 *			* 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
681 	 *			ratio)
682 	 */
683 	recout_full->x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
684 					* stream->dst.width / stream->src.width -
685 			surf_src.x * plane_state->dst_rect.width / surf_src.width
686 					* stream->dst.width / stream->src.width;
687 	recout_full->y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
688 					* stream->dst.height / stream->src.height -
689 			surf_src.y * plane_state->dst_rect.height / surf_src.height
690 					* stream->dst.height / stream->src.height;
691 
692 	recout_full->width = plane_state->dst_rect.width
693 					* stream->dst.width / stream->src.width;
694 	recout_full->height = plane_state->dst_rect.height
695 					* stream->dst.height / stream->src.height;
696 }
697 
698 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
699 {
700 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
701 	const struct dc_stream_state *stream = pipe_ctx->stream;
702 	struct rect surf_src = plane_state->src_rect;
703 	const int in_w = stream->src.width;
704 	const int in_h = stream->src.height;
705 	const int out_w = stream->dst.width;
706 	const int out_h = stream->dst.height;
707 
708 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
709 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
710 		rect_swap_helper(&surf_src);
711 
712 	pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_from_fraction(
713 					surf_src.width,
714 					plane_state->dst_rect.width);
715 	pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_from_fraction(
716 					surf_src.height,
717 					plane_state->dst_rect.height);
718 
719 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
720 		pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
721 	else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
722 		pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
723 
724 	pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
725 		pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
726 	pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
727 		pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
728 
729 	pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
730 	pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
731 
732 	if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
733 			|| pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
734 		pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
735 		pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
736 	}
737 	pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_truncate(
738 			pipe_ctx->plane_res.scl_data.ratios.horz, 19);
739 	pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_truncate(
740 			pipe_ctx->plane_res.scl_data.ratios.vert, 19);
741 	pipe_ctx->plane_res.scl_data.ratios.horz_c = dc_fixpt_truncate(
742 			pipe_ctx->plane_res.scl_data.ratios.horz_c, 19);
743 	pipe_ctx->plane_res.scl_data.ratios.vert_c = dc_fixpt_truncate(
744 			pipe_ctx->plane_res.scl_data.ratios.vert_c, 19);
745 }
746 
747 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
748 {
749 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
750 	struct rect src = pipe_ctx->plane_state->src_rect;
751 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
752 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
753 	bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
754 
755 	/*
756 	 * Need to calculate the scan direction for viewport to make adjustments
757 	 */
758 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
759 		flip_vert_scan_dir = true;
760 		flip_horz_scan_dir = true;
761 	} else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
762 		flip_vert_scan_dir = true;
763 	else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
764 		flip_horz_scan_dir = true;
765 
766 	if (pipe_ctx->plane_state->horizontal_mirror)
767 			flip_horz_scan_dir = !flip_horz_scan_dir;
768 
769 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
770 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
771 		rect_swap_helper(&src);
772 		rect_swap_helper(&data->viewport_c);
773 		rect_swap_helper(&data->viewport);
774 	}
775 
776 	/*
777 	 * Init calculated according to formula:
778 	 * 	init = (scaling_ratio + number_of_taps + 1) / 2
779 	 * 	init_bot = init + scaling_ratio
780 	 * 	init_c = init + truncated_vp_c_offset(from calculate viewport)
781 	 */
782 	data->inits.h = dc_fixpt_truncate(dc_fixpt_div_int(
783 			dc_fixpt_add_int(data->ratios.horz, data->taps.h_taps + 1), 2), 19);
784 
785 	data->inits.h_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.h_c, dc_fixpt_div_int(
786 			dc_fixpt_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2)), 19);
787 
788 	data->inits.v = dc_fixpt_truncate(dc_fixpt_div_int(
789 			dc_fixpt_add_int(data->ratios.vert, data->taps.v_taps + 1), 2), 19);
790 
791 	data->inits.v_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.v_c, dc_fixpt_div_int(
792 			dc_fixpt_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2)), 19);
793 
794 	if (!flip_horz_scan_dir) {
795 		/* Adjust for viewport end clip-off */
796 		if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
797 			int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
798 			int int_part = dc_fixpt_floor(
799 					dc_fixpt_sub(data->inits.h, data->ratios.horz));
800 
801 			int_part = int_part > 0 ? int_part : 0;
802 			data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
803 		}
804 		if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
805 			int vp_clip = (src.x + src.width) / vpc_div -
806 					data->viewport_c.width - data->viewport_c.x;
807 			int int_part = dc_fixpt_floor(
808 					dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
809 
810 			int_part = int_part > 0 ? int_part : 0;
811 			data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
812 		}
813 
814 		/* Adjust for non-0 viewport offset */
815 		if (data->viewport.x) {
816 			int int_part;
817 
818 			data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
819 					data->ratios.horz, data->recout.x - recout_full->x));
820 			int_part = dc_fixpt_floor(data->inits.h) - data->viewport.x;
821 			if (int_part < data->taps.h_taps) {
822 				int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
823 							(data->taps.h_taps - int_part) : data->viewport.x;
824 				data->viewport.x -= int_adj;
825 				data->viewport.width += int_adj;
826 				int_part += int_adj;
827 			} else if (int_part > data->taps.h_taps) {
828 				data->viewport.x += int_part - data->taps.h_taps;
829 				data->viewport.width -= int_part - data->taps.h_taps;
830 				int_part = data->taps.h_taps;
831 			}
832 			data->inits.h.value &= 0xffffffff;
833 			data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
834 		}
835 
836 		if (data->viewport_c.x) {
837 			int int_part;
838 
839 			data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
840 					data->ratios.horz_c, data->recout.x - recout_full->x));
841 			int_part = dc_fixpt_floor(data->inits.h_c) - data->viewport_c.x;
842 			if (int_part < data->taps.h_taps_c) {
843 				int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
844 						(data->taps.h_taps_c - int_part) : data->viewport_c.x;
845 				data->viewport_c.x -= int_adj;
846 				data->viewport_c.width += int_adj;
847 				int_part += int_adj;
848 			} else if (int_part > data->taps.h_taps_c) {
849 				data->viewport_c.x += int_part - data->taps.h_taps_c;
850 				data->viewport_c.width -= int_part - data->taps.h_taps_c;
851 				int_part = data->taps.h_taps_c;
852 			}
853 			data->inits.h_c.value &= 0xffffffff;
854 			data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
855 		}
856 	} else {
857 		/* Adjust for non-0 viewport offset */
858 		if (data->viewport.x) {
859 			int int_part = dc_fixpt_floor(
860 					dc_fixpt_sub(data->inits.h, data->ratios.horz));
861 
862 			int_part = int_part > 0 ? int_part : 0;
863 			data->viewport.width += int_part < data->viewport.x ? int_part : data->viewport.x;
864 			data->viewport.x -= int_part < data->viewport.x ? int_part : data->viewport.x;
865 		}
866 		if (data->viewport_c.x) {
867 			int int_part = dc_fixpt_floor(
868 					dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
869 
870 			int_part = int_part > 0 ? int_part : 0;
871 			data->viewport_c.width += int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
872 			data->viewport_c.x -= int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
873 		}
874 
875 		/* Adjust for viewport end clip-off */
876 		if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
877 			int int_part;
878 			int end_offset = src.x + src.width
879 					- data->viewport.x - data->viewport.width;
880 
881 			/*
882 			 * this is init if vp had no offset, keep in mind this is from the
883 			 * right side of vp due to scan direction
884 			 */
885 			data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
886 					data->ratios.horz, data->recout.x - recout_full->x));
887 			/*
888 			 * this is the difference between first pixel of viewport available to read
889 			 * and init position, takning into account scan direction
890 			 */
891 			int_part = dc_fixpt_floor(data->inits.h) - end_offset;
892 			if (int_part < data->taps.h_taps) {
893 				int int_adj = end_offset >= (data->taps.h_taps - int_part) ?
894 							(data->taps.h_taps - int_part) : end_offset;
895 				data->viewport.width += int_adj;
896 				int_part += int_adj;
897 			} else if (int_part > data->taps.h_taps) {
898 				data->viewport.width += int_part - data->taps.h_taps;
899 				int_part = data->taps.h_taps;
900 			}
901 			data->inits.h.value &= 0xffffffff;
902 			data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
903 		}
904 
905 		if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
906 			int int_part;
907 			int end_offset = (src.x + src.width) / vpc_div
908 					- data->viewport_c.x - data->viewport_c.width;
909 
910 			/*
911 			 * this is init if vp had no offset, keep in mind this is from the
912 			 * right side of vp due to scan direction
913 			 */
914 			data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
915 					data->ratios.horz_c, data->recout.x - recout_full->x));
916 			/*
917 			 * this is the difference between first pixel of viewport available to read
918 			 * and init position, takning into account scan direction
919 			 */
920 			int_part = dc_fixpt_floor(data->inits.h_c) - end_offset;
921 			if (int_part < data->taps.h_taps_c) {
922 				int int_adj = end_offset >= (data->taps.h_taps_c - int_part) ?
923 							(data->taps.h_taps_c - int_part) : end_offset;
924 				data->viewport_c.width += int_adj;
925 				int_part += int_adj;
926 			} else if (int_part > data->taps.h_taps_c) {
927 				data->viewport_c.width += int_part - data->taps.h_taps_c;
928 				int_part = data->taps.h_taps_c;
929 			}
930 			data->inits.h_c.value &= 0xffffffff;
931 			data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
932 		}
933 
934 	}
935 	if (!flip_vert_scan_dir) {
936 		/* Adjust for viewport end clip-off */
937 		if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
938 			int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
939 			int int_part = dc_fixpt_floor(
940 					dc_fixpt_sub(data->inits.v, data->ratios.vert));
941 
942 			int_part = int_part > 0 ? int_part : 0;
943 			data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
944 		}
945 		if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
946 			int vp_clip = (src.y + src.height) / vpc_div -
947 					data->viewport_c.height - data->viewport_c.y;
948 			int int_part = dc_fixpt_floor(
949 					dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
950 
951 			int_part = int_part > 0 ? int_part : 0;
952 			data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
953 		}
954 
955 		/* Adjust for non-0 viewport offset */
956 		if (data->viewport.y) {
957 			int int_part;
958 
959 			data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
960 					data->ratios.vert, data->recout.y - recout_full->y));
961 			int_part = dc_fixpt_floor(data->inits.v) - data->viewport.y;
962 			if (int_part < data->taps.v_taps) {
963 				int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
964 							(data->taps.v_taps - int_part) : data->viewport.y;
965 				data->viewport.y -= int_adj;
966 				data->viewport.height += int_adj;
967 				int_part += int_adj;
968 			} else if (int_part > data->taps.v_taps) {
969 				data->viewport.y += int_part - data->taps.v_taps;
970 				data->viewport.height -= int_part - data->taps.v_taps;
971 				int_part = data->taps.v_taps;
972 			}
973 			data->inits.v.value &= 0xffffffff;
974 			data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
975 		}
976 
977 		if (data->viewport_c.y) {
978 			int int_part;
979 
980 			data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
981 					data->ratios.vert_c, data->recout.y - recout_full->y));
982 			int_part = dc_fixpt_floor(data->inits.v_c) - data->viewport_c.y;
983 			if (int_part < data->taps.v_taps_c) {
984 				int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
985 						(data->taps.v_taps_c - int_part) : data->viewport_c.y;
986 				data->viewport_c.y -= int_adj;
987 				data->viewport_c.height += int_adj;
988 				int_part += int_adj;
989 			} else if (int_part > data->taps.v_taps_c) {
990 				data->viewport_c.y += int_part - data->taps.v_taps_c;
991 				data->viewport_c.height -= int_part - data->taps.v_taps_c;
992 				int_part = data->taps.v_taps_c;
993 			}
994 			data->inits.v_c.value &= 0xffffffff;
995 			data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
996 		}
997 	} else {
998 		/* Adjust for non-0 viewport offset */
999 		if (data->viewport.y) {
1000 			int int_part = dc_fixpt_floor(
1001 					dc_fixpt_sub(data->inits.v, data->ratios.vert));
1002 
1003 			int_part = int_part > 0 ? int_part : 0;
1004 			data->viewport.height += int_part < data->viewport.y ? int_part : data->viewport.y;
1005 			data->viewport.y -= int_part < data->viewport.y ? int_part : data->viewport.y;
1006 		}
1007 		if (data->viewport_c.y) {
1008 			int int_part = dc_fixpt_floor(
1009 					dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
1010 
1011 			int_part = int_part > 0 ? int_part : 0;
1012 			data->viewport_c.height += int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
1013 			data->viewport_c.y -= int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
1014 		}
1015 
1016 		/* Adjust for viewport end clip-off */
1017 		if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
1018 			int int_part;
1019 			int end_offset = src.y + src.height
1020 					- data->viewport.y - data->viewport.height;
1021 
1022 			/*
1023 			 * this is init if vp had no offset, keep in mind this is from the
1024 			 * right side of vp due to scan direction
1025 			 */
1026 			data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
1027 					data->ratios.vert, data->recout.y - recout_full->y));
1028 			/*
1029 			 * this is the difference between first pixel of viewport available to read
1030 			 * and init position, taking into account scan direction
1031 			 */
1032 			int_part = dc_fixpt_floor(data->inits.v) - end_offset;
1033 			if (int_part < data->taps.v_taps) {
1034 				int int_adj = end_offset >= (data->taps.v_taps - int_part) ?
1035 							(data->taps.v_taps - int_part) : end_offset;
1036 				data->viewport.height += int_adj;
1037 				int_part += int_adj;
1038 			} else if (int_part > data->taps.v_taps) {
1039 				data->viewport.height += int_part - data->taps.v_taps;
1040 				int_part = data->taps.v_taps;
1041 			}
1042 			data->inits.v.value &= 0xffffffff;
1043 			data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
1044 		}
1045 
1046 		if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
1047 			int int_part;
1048 			int end_offset = (src.y + src.height) / vpc_div
1049 					- data->viewport_c.y - data->viewport_c.height;
1050 
1051 			/*
1052 			 * this is init if vp had no offset, keep in mind this is from the
1053 			 * right side of vp due to scan direction
1054 			 */
1055 			data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
1056 					data->ratios.vert_c, data->recout.y - recout_full->y));
1057 			/*
1058 			 * this is the difference between first pixel of viewport available to read
1059 			 * and init position, taking into account scan direction
1060 			 */
1061 			int_part = dc_fixpt_floor(data->inits.v_c) - end_offset;
1062 			if (int_part < data->taps.v_taps_c) {
1063 				int int_adj = end_offset >= (data->taps.v_taps_c - int_part) ?
1064 							(data->taps.v_taps_c - int_part) : end_offset;
1065 				data->viewport_c.height += int_adj;
1066 				int_part += int_adj;
1067 			} else if (int_part > data->taps.v_taps_c) {
1068 				data->viewport_c.height += int_part - data->taps.v_taps_c;
1069 				int_part = data->taps.v_taps_c;
1070 			}
1071 			data->inits.v_c.value &= 0xffffffff;
1072 			data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
1073 		}
1074 	}
1075 
1076 	/* Interlaced inits based on final vert inits */
1077 	data->inits.v_bot = dc_fixpt_add(data->inits.v, data->ratios.vert);
1078 	data->inits.v_c_bot = dc_fixpt_add(data->inits.v_c, data->ratios.vert_c);
1079 
1080 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
1081 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
1082 		rect_swap_helper(&data->viewport_c);
1083 		rect_swap_helper(&data->viewport);
1084 	}
1085 }
1086 
1087 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
1088 {
1089 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1090 	struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
1091 	struct rect recout_full = { 0 };
1092 	bool res = false;
1093 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
1094 	/* Important: scaling ratio calculation requires pixel format,
1095 	 * lb depth calculation requires recout and taps require scaling ratios.
1096 	 * Inits require viewport, taps, ratios and recout of split pipe
1097 	 */
1098 	pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
1099 			pipe_ctx->plane_state->format);
1100 
1101 	calculate_scaling_ratios(pipe_ctx);
1102 
1103 	calculate_viewport(pipe_ctx);
1104 
1105 	if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
1106 		return false;
1107 
1108 	calculate_recout(pipe_ctx, &recout_full);
1109 
1110 	/**
1111 	 * Setting line buffer pixel depth to 24bpp yields banding
1112 	 * on certain displays, such as the Sharp 4k
1113 	 */
1114 	pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
1115 
1116 	pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
1117 	pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
1118 
1119 	pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
1120 	pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
1121 
1122 	/* Taps calculations */
1123 	if (pipe_ctx->plane_res.xfm != NULL)
1124 		res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1125 				pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1126 
1127 	if (pipe_ctx->plane_res.dpp != NULL)
1128 		res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1129 				pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1130 	if (!res) {
1131 		/* Try 24 bpp linebuffer */
1132 		pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
1133 
1134 		if (pipe_ctx->plane_res.xfm != NULL)
1135 			res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1136 					pipe_ctx->plane_res.xfm,
1137 					&pipe_ctx->plane_res.scl_data,
1138 					&plane_state->scaling_quality);
1139 
1140 		if (pipe_ctx->plane_res.dpp != NULL)
1141 			res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1142 					pipe_ctx->plane_res.dpp,
1143 					&pipe_ctx->plane_res.scl_data,
1144 					&plane_state->scaling_quality);
1145 	}
1146 
1147 	if (res)
1148 		/* May need to re-check lb size after this in some obscure scenario */
1149 		calculate_inits_and_adj_vp(pipe_ctx, &recout_full);
1150 
1151 	DC_LOG_SCALER(
1152 				"%s: Viewport:\nheight:%d width:%d x:%d "
1153 				"y:%d\n dst_rect:\nheight:%d width:%d x:%d "
1154 				"y:%d\n",
1155 				__func__,
1156 				pipe_ctx->plane_res.scl_data.viewport.height,
1157 				pipe_ctx->plane_res.scl_data.viewport.width,
1158 				pipe_ctx->plane_res.scl_data.viewport.x,
1159 				pipe_ctx->plane_res.scl_data.viewport.y,
1160 				plane_state->dst_rect.height,
1161 				plane_state->dst_rect.width,
1162 				plane_state->dst_rect.x,
1163 				plane_state->dst_rect.y);
1164 
1165 	return res;
1166 }
1167 
1168 
1169 enum dc_status resource_build_scaling_params_for_context(
1170 	const struct dc  *dc,
1171 	struct dc_state *context)
1172 {
1173 	int i;
1174 
1175 	for (i = 0; i < MAX_PIPES; i++) {
1176 		if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
1177 				context->res_ctx.pipe_ctx[i].stream != NULL)
1178 			if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
1179 				return DC_FAIL_SCALING;
1180 	}
1181 
1182 	return DC_OK;
1183 }
1184 
1185 struct pipe_ctx *find_idle_secondary_pipe(
1186 		struct resource_context *res_ctx,
1187 		const struct resource_pool *pool)
1188 {
1189 	int i;
1190 	struct pipe_ctx *secondary_pipe = NULL;
1191 
1192 	/*
1193 	 * search backwards for the second pipe to keep pipe
1194 	 * assignment more consistent
1195 	 */
1196 
1197 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1198 		if (res_ctx->pipe_ctx[i].stream == NULL) {
1199 			secondary_pipe = &res_ctx->pipe_ctx[i];
1200 			secondary_pipe->pipe_idx = i;
1201 			break;
1202 		}
1203 	}
1204 
1205 
1206 	return secondary_pipe;
1207 }
1208 
1209 struct pipe_ctx *resource_get_head_pipe_for_stream(
1210 		struct resource_context *res_ctx,
1211 		struct dc_stream_state *stream)
1212 {
1213 	int i;
1214 	for (i = 0; i < MAX_PIPES; i++) {
1215 		if (res_ctx->pipe_ctx[i].stream == stream &&
1216 				!res_ctx->pipe_ctx[i].top_pipe) {
1217 			return &res_ctx->pipe_ctx[i];
1218 			break;
1219 		}
1220 	}
1221 	return NULL;
1222 }
1223 
1224 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
1225 		struct resource_context *res_ctx,
1226 		struct dc_stream_state *stream)
1227 {
1228 	struct pipe_ctx *head_pipe, *tail_pipe;
1229 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1230 
1231 	if (!head_pipe)
1232 		return NULL;
1233 
1234 	tail_pipe = head_pipe->bottom_pipe;
1235 
1236 	while (tail_pipe) {
1237 		head_pipe = tail_pipe;
1238 		tail_pipe = tail_pipe->bottom_pipe;
1239 	}
1240 
1241 	return head_pipe;
1242 }
1243 
1244 /*
1245  * A free_pipe for a stream is defined here as a pipe
1246  * that has no surface attached yet
1247  */
1248 static struct pipe_ctx *acquire_free_pipe_for_stream(
1249 		struct dc_state *context,
1250 		const struct resource_pool *pool,
1251 		struct dc_stream_state *stream)
1252 {
1253 	int i;
1254 	struct resource_context *res_ctx = &context->res_ctx;
1255 
1256 	struct pipe_ctx *head_pipe = NULL;
1257 
1258 	/* Find head pipe, which has the back end set up*/
1259 
1260 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1261 
1262 	if (!head_pipe) {
1263 		ASSERT(0);
1264 		return NULL;
1265 	}
1266 
1267 	if (!head_pipe->plane_state)
1268 		return head_pipe;
1269 
1270 	/* Re-use pipe already acquired for this stream if available*/
1271 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1272 		if (res_ctx->pipe_ctx[i].stream == stream &&
1273 				!res_ctx->pipe_ctx[i].plane_state) {
1274 			return &res_ctx->pipe_ctx[i];
1275 		}
1276 	}
1277 
1278 	/*
1279 	 * At this point we have no re-useable pipe for this stream and we need
1280 	 * to acquire an idle one to satisfy the request
1281 	 */
1282 
1283 	if (!pool->funcs->acquire_idle_pipe_for_layer)
1284 		return NULL;
1285 
1286 	return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1287 
1288 }
1289 
1290 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1291 static int acquire_first_split_pipe(
1292 		struct resource_context *res_ctx,
1293 		const struct resource_pool *pool,
1294 		struct dc_stream_state *stream)
1295 {
1296 	int i;
1297 
1298 	for (i = 0; i < pool->pipe_count; i++) {
1299 		struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1300 
1301 		if (pipe_ctx->top_pipe &&
1302 				pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1303 			pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1304 			if (pipe_ctx->bottom_pipe)
1305 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1306 
1307 			memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1308 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1309 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1310 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1311 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1312 			pipe_ctx->stream_res.opp = pool->opps[i];
1313 			pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1314 			pipe_ctx->pipe_idx = i;
1315 
1316 			pipe_ctx->stream = stream;
1317 			return i;
1318 		}
1319 	}
1320 	return -1;
1321 }
1322 #endif
1323 
1324 bool dc_add_plane_to_context(
1325 		const struct dc *dc,
1326 		struct dc_stream_state *stream,
1327 		struct dc_plane_state *plane_state,
1328 		struct dc_state *context)
1329 {
1330 	int i;
1331 	struct resource_pool *pool = dc->res_pool;
1332 	struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1333 	struct dc_stream_status *stream_status = NULL;
1334 
1335 	for (i = 0; i < context->stream_count; i++)
1336 		if (context->streams[i] == stream) {
1337 			stream_status = &context->stream_status[i];
1338 			break;
1339 		}
1340 	if (stream_status == NULL) {
1341 		dm_error("Existing stream not found; failed to attach surface!\n");
1342 		return false;
1343 	}
1344 
1345 
1346 	if (stream_status->plane_count == MAX_SURFACE_NUM) {
1347 		dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1348 				plane_state, MAX_SURFACE_NUM);
1349 		return false;
1350 	}
1351 
1352 	head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1353 
1354 	if (!head_pipe) {
1355 		dm_error("Head pipe not found for stream_state %p !\n", stream);
1356 		return false;
1357 	}
1358 
1359 	free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1360 
1361 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1362 	if (!free_pipe) {
1363 		int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1364 		if (pipe_idx >= 0)
1365 			free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1366 	}
1367 #endif
1368 	if (!free_pipe)
1369 		return false;
1370 
1371 	/* retain new surfaces */
1372 	dc_plane_state_retain(plane_state);
1373 	free_pipe->plane_state = plane_state;
1374 
1375 	if (head_pipe != free_pipe) {
1376 
1377 		tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1378 		ASSERT(tail_pipe);
1379 
1380 		free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1381 		free_pipe->stream_res.abm = tail_pipe->stream_res.abm;
1382 		free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1383 		free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1384 		free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1385 		free_pipe->clock_source = tail_pipe->clock_source;
1386 		free_pipe->top_pipe = tail_pipe;
1387 		tail_pipe->bottom_pipe = free_pipe;
1388 	}
1389 
1390 	/* assign new surfaces*/
1391 	stream_status->plane_states[stream_status->plane_count] = plane_state;
1392 
1393 	stream_status->plane_count++;
1394 
1395 	return true;
1396 }
1397 
1398 bool dc_remove_plane_from_context(
1399 		const struct dc *dc,
1400 		struct dc_stream_state *stream,
1401 		struct dc_plane_state *plane_state,
1402 		struct dc_state *context)
1403 {
1404 	int i;
1405 	struct dc_stream_status *stream_status = NULL;
1406 	struct resource_pool *pool = dc->res_pool;
1407 
1408 	for (i = 0; i < context->stream_count; i++)
1409 		if (context->streams[i] == stream) {
1410 			stream_status = &context->stream_status[i];
1411 			break;
1412 		}
1413 
1414 	if (stream_status == NULL) {
1415 		dm_error("Existing stream not found; failed to remove plane.\n");
1416 		return false;
1417 	}
1418 
1419 	/* release pipe for plane*/
1420 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1421 		struct pipe_ctx *pipe_ctx;
1422 
1423 		if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1424 			pipe_ctx = &context->res_ctx.pipe_ctx[i];
1425 
1426 			if (pipe_ctx->top_pipe)
1427 				pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1428 
1429 			/* Second condition is to avoid setting NULL to top pipe
1430 			 * of tail pipe making it look like head pipe in subsequent
1431 			 * deletes
1432 			 */
1433 			if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1434 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1435 
1436 			/*
1437 			 * For head pipe detach surfaces from pipe for tail
1438 			 * pipe just zero it out
1439 			 */
1440 			if (!pipe_ctx->top_pipe) {
1441 				pipe_ctx->plane_state = NULL;
1442 				pipe_ctx->bottom_pipe = NULL;
1443 			} else  {
1444 				memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1445 			}
1446 		}
1447 	}
1448 
1449 
1450 	for (i = 0; i < stream_status->plane_count; i++) {
1451 		if (stream_status->plane_states[i] == plane_state) {
1452 
1453 			dc_plane_state_release(stream_status->plane_states[i]);
1454 			break;
1455 		}
1456 	}
1457 
1458 	if (i == stream_status->plane_count) {
1459 		dm_error("Existing plane_state not found; failed to detach it!\n");
1460 		return false;
1461 	}
1462 
1463 	stream_status->plane_count--;
1464 
1465 	/* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1466 	for (; i < stream_status->plane_count; i++)
1467 		stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1468 
1469 	stream_status->plane_states[stream_status->plane_count] = NULL;
1470 
1471 	return true;
1472 }
1473 
1474 bool dc_rem_all_planes_for_stream(
1475 		const struct dc *dc,
1476 		struct dc_stream_state *stream,
1477 		struct dc_state *context)
1478 {
1479 	int i, old_plane_count;
1480 	struct dc_stream_status *stream_status = NULL;
1481 	struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1482 
1483 	for (i = 0; i < context->stream_count; i++)
1484 			if (context->streams[i] == stream) {
1485 				stream_status = &context->stream_status[i];
1486 				break;
1487 			}
1488 
1489 	if (stream_status == NULL) {
1490 		dm_error("Existing stream %p not found!\n", stream);
1491 		return false;
1492 	}
1493 
1494 	old_plane_count = stream_status->plane_count;
1495 
1496 	for (i = 0; i < old_plane_count; i++)
1497 		del_planes[i] = stream_status->plane_states[i];
1498 
1499 	for (i = 0; i < old_plane_count; i++)
1500 		if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1501 			return false;
1502 
1503 	return true;
1504 }
1505 
1506 static bool add_all_planes_for_stream(
1507 		const struct dc *dc,
1508 		struct dc_stream_state *stream,
1509 		const struct dc_validation_set set[],
1510 		int set_count,
1511 		struct dc_state *context)
1512 {
1513 	int i, j;
1514 
1515 	for (i = 0; i < set_count; i++)
1516 		if (set[i].stream == stream)
1517 			break;
1518 
1519 	if (i == set_count) {
1520 		dm_error("Stream %p not found in set!\n", stream);
1521 		return false;
1522 	}
1523 
1524 	for (j = 0; j < set[i].plane_count; j++)
1525 		if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1526 			return false;
1527 
1528 	return true;
1529 }
1530 
1531 bool dc_add_all_planes_for_stream(
1532 		const struct dc *dc,
1533 		struct dc_stream_state *stream,
1534 		struct dc_plane_state * const *plane_states,
1535 		int plane_count,
1536 		struct dc_state *context)
1537 {
1538 	struct dc_validation_set set;
1539 	int i;
1540 
1541 	set.stream = stream;
1542 	set.plane_count = plane_count;
1543 
1544 	for (i = 0; i < plane_count; i++)
1545 		set.plane_states[i] = plane_states[i];
1546 
1547 	return add_all_planes_for_stream(dc, stream, &set, 1, context);
1548 }
1549 
1550 
1551 static bool is_hdr_static_meta_changed(struct dc_stream_state *cur_stream,
1552 	struct dc_stream_state *new_stream)
1553 {
1554 	if (cur_stream == NULL)
1555 		return true;
1556 
1557 	if (memcmp(&cur_stream->hdr_static_metadata,
1558 			&new_stream->hdr_static_metadata,
1559 			sizeof(struct dc_info_packet)) != 0)
1560 		return true;
1561 
1562 	return false;
1563 }
1564 
1565 static bool is_vsc_info_packet_changed(struct dc_stream_state *cur_stream,
1566 		struct dc_stream_state *new_stream)
1567 {
1568 	if (cur_stream == NULL)
1569 		return true;
1570 
1571 	if (memcmp(&cur_stream->vsc_infopacket,
1572 			&new_stream->vsc_infopacket,
1573 			sizeof(struct dc_info_packet)) != 0)
1574 		return true;
1575 
1576 	return false;
1577 }
1578 
1579 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1580 		struct dc_stream_state *new_stream)
1581 {
1582 	if (cur_stream == NULL)
1583 		return true;
1584 
1585 	/* If sink pointer changed, it means this is a hotplug, we should do
1586 	 * full hw setting.
1587 	 */
1588 	if (cur_stream->sink != new_stream->sink)
1589 		return true;
1590 
1591 	/* If output color space is changed, need to reprogram info frames */
1592 	if (cur_stream->output_color_space != new_stream->output_color_space)
1593 		return true;
1594 
1595 	return memcmp(
1596 		&cur_stream->timing,
1597 		&new_stream->timing,
1598 		sizeof(struct dc_crtc_timing)) != 0;
1599 }
1600 
1601 static bool are_stream_backends_same(
1602 	struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1603 {
1604 	if (stream_a == stream_b)
1605 		return true;
1606 
1607 	if (stream_a == NULL || stream_b == NULL)
1608 		return false;
1609 
1610 	if (is_timing_changed(stream_a, stream_b))
1611 		return false;
1612 
1613 	if (is_hdr_static_meta_changed(stream_a, stream_b))
1614 		return false;
1615 
1616 	if (stream_a->dpms_off != stream_b->dpms_off)
1617 		return false;
1618 
1619 	if (is_vsc_info_packet_changed(stream_a, stream_b))
1620 		return false;
1621 
1622 	return true;
1623 }
1624 
1625 bool dc_is_stream_unchanged(
1626 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1627 {
1628 
1629 	if (!are_stream_backends_same(old_stream, stream))
1630 		return false;
1631 
1632 	return true;
1633 }
1634 
1635 bool dc_is_stream_scaling_unchanged(
1636 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1637 {
1638 	if (old_stream == stream)
1639 		return true;
1640 
1641 	if (old_stream == NULL || stream == NULL)
1642 		return false;
1643 
1644 	if (memcmp(&old_stream->src,
1645 			&stream->src,
1646 			sizeof(struct rect)) != 0)
1647 		return false;
1648 
1649 	if (memcmp(&old_stream->dst,
1650 			&stream->dst,
1651 			sizeof(struct rect)) != 0)
1652 		return false;
1653 
1654 	return true;
1655 }
1656 
1657 static void update_stream_engine_usage(
1658 		struct resource_context *res_ctx,
1659 		const struct resource_pool *pool,
1660 		struct stream_encoder *stream_enc,
1661 		bool acquired)
1662 {
1663 	int i;
1664 
1665 	for (i = 0; i < pool->stream_enc_count; i++) {
1666 		if (pool->stream_enc[i] == stream_enc)
1667 			res_ctx->is_stream_enc_acquired[i] = acquired;
1668 	}
1669 }
1670 
1671 /* TODO: release audio object */
1672 void update_audio_usage(
1673 		struct resource_context *res_ctx,
1674 		const struct resource_pool *pool,
1675 		struct audio *audio,
1676 		bool acquired)
1677 {
1678 	int i;
1679 	for (i = 0; i < pool->audio_count; i++) {
1680 		if (pool->audios[i] == audio)
1681 			res_ctx->is_audio_acquired[i] = acquired;
1682 	}
1683 }
1684 
1685 static int acquire_first_free_pipe(
1686 		struct resource_context *res_ctx,
1687 		const struct resource_pool *pool,
1688 		struct dc_stream_state *stream)
1689 {
1690 	int i;
1691 
1692 	for (i = 0; i < pool->pipe_count; i++) {
1693 		if (!res_ctx->pipe_ctx[i].stream) {
1694 			struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1695 
1696 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1697 			pipe_ctx->plane_res.mi = pool->mis[i];
1698 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1699 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1700 			pipe_ctx->plane_res.xfm = pool->transforms[i];
1701 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1702 			pipe_ctx->stream_res.opp = pool->opps[i];
1703 			if (pool->dpps[i])
1704 				pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1705 			pipe_ctx->pipe_idx = i;
1706 
1707 
1708 			pipe_ctx->stream = stream;
1709 			return i;
1710 		}
1711 	}
1712 	return -1;
1713 }
1714 
1715 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1716 		struct resource_context *res_ctx,
1717 		const struct resource_pool *pool,
1718 		struct dc_stream_state *stream)
1719 {
1720 	int i;
1721 	int j = -1;
1722 	struct dc_link *link = stream->sink->link;
1723 
1724 	for (i = 0; i < pool->stream_enc_count; i++) {
1725 		if (!res_ctx->is_stream_enc_acquired[i] &&
1726 				pool->stream_enc[i]) {
1727 			/* Store first available for MST second display
1728 			 * in daisy chain use case */
1729 			j = i;
1730 			if (pool->stream_enc[i]->id ==
1731 					link->link_enc->preferred_engine)
1732 				return pool->stream_enc[i];
1733 		}
1734 	}
1735 
1736 	/*
1737 	 * below can happen in cases when stream encoder is acquired:
1738 	 * 1) for second MST display in chain, so preferred engine already
1739 	 * acquired;
1740 	 * 2) for another link, which preferred engine already acquired by any
1741 	 * MST configuration.
1742 	 *
1743 	 * If signal is of DP type and preferred engine not found, return last available
1744 	 *
1745 	 * TODO - This is just a patch up and a generic solution is
1746 	 * required for non DP connectors.
1747 	 */
1748 
1749 	if (j >= 0 && link->connector_signal == SIGNAL_TYPE_DISPLAY_PORT)
1750 		return pool->stream_enc[j];
1751 
1752 	return NULL;
1753 }
1754 
1755 static struct audio *find_first_free_audio(
1756 		struct resource_context *res_ctx,
1757 		const struct resource_pool *pool,
1758 		enum engine_id id)
1759 {
1760 	int i;
1761 	for (i = 0; i < pool->audio_count; i++) {
1762 		if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1763 			/*we have enough audio endpoint, find the matching inst*/
1764 			if (id != i)
1765 				continue;
1766 
1767 			return pool->audios[i];
1768 		}
1769 	}
1770 	/*not found the matching one, first come first serve*/
1771 	for (i = 0; i < pool->audio_count; i++) {
1772 		if (res_ctx->is_audio_acquired[i] == false) {
1773 			return pool->audios[i];
1774 		}
1775 	}
1776 	return 0;
1777 }
1778 
1779 bool resource_is_stream_unchanged(
1780 	struct dc_state *old_context, struct dc_stream_state *stream)
1781 {
1782 	int i;
1783 
1784 	for (i = 0; i < old_context->stream_count; i++) {
1785 		struct dc_stream_state *old_stream = old_context->streams[i];
1786 
1787 		if (are_stream_backends_same(old_stream, stream))
1788 				return true;
1789 	}
1790 
1791 	return false;
1792 }
1793 
1794 enum dc_status dc_add_stream_to_ctx(
1795 		struct dc *dc,
1796 		struct dc_state *new_ctx,
1797 		struct dc_stream_state *stream)
1798 {
1799 	struct dc_context *dc_ctx = dc->ctx;
1800 	enum dc_status res;
1801 
1802 	if (new_ctx->stream_count >= dc->res_pool->timing_generator_count) {
1803 		DC_ERROR("Max streams reached, can't add stream %p !\n", stream);
1804 		return DC_ERROR_UNEXPECTED;
1805 	}
1806 
1807 	new_ctx->streams[new_ctx->stream_count] = stream;
1808 	dc_stream_retain(stream);
1809 	new_ctx->stream_count++;
1810 
1811 	res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1812 	if (res != DC_OK)
1813 		DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1814 
1815 	return res;
1816 }
1817 
1818 enum dc_status dc_remove_stream_from_ctx(
1819 			struct dc *dc,
1820 			struct dc_state *new_ctx,
1821 			struct dc_stream_state *stream)
1822 {
1823 	int i;
1824 	struct dc_context *dc_ctx = dc->ctx;
1825 	struct pipe_ctx *del_pipe = NULL;
1826 
1827 	/* Release primary pipe */
1828 	for (i = 0; i < MAX_PIPES; i++) {
1829 		if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1830 				!new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1831 			del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1832 
1833 			ASSERT(del_pipe->stream_res.stream_enc);
1834 			update_stream_engine_usage(
1835 					&new_ctx->res_ctx,
1836 						dc->res_pool,
1837 					del_pipe->stream_res.stream_enc,
1838 					false);
1839 
1840 			if (del_pipe->stream_res.audio)
1841 				update_audio_usage(
1842 					&new_ctx->res_ctx,
1843 					dc->res_pool,
1844 					del_pipe->stream_res.audio,
1845 					false);
1846 
1847 			resource_unreference_clock_source(&new_ctx->res_ctx,
1848 							  dc->res_pool,
1849 							  del_pipe->clock_source);
1850 
1851 			if (dc->res_pool->funcs->remove_stream_from_ctx)
1852 				dc->res_pool->funcs->remove_stream_from_ctx(dc, new_ctx, stream);
1853 
1854 			memset(del_pipe, 0, sizeof(*del_pipe));
1855 
1856 			break;
1857 		}
1858 	}
1859 
1860 	if (!del_pipe) {
1861 		DC_ERROR("Pipe not found for stream %p !\n", stream);
1862 		return DC_ERROR_UNEXPECTED;
1863 	}
1864 
1865 	for (i = 0; i < new_ctx->stream_count; i++)
1866 		if (new_ctx->streams[i] == stream)
1867 			break;
1868 
1869 	if (new_ctx->streams[i] != stream) {
1870 		DC_ERROR("Context doesn't have stream %p !\n", stream);
1871 		return DC_ERROR_UNEXPECTED;
1872 	}
1873 
1874 	dc_stream_release(new_ctx->streams[i]);
1875 	new_ctx->stream_count--;
1876 
1877 	/* Trim back arrays */
1878 	for (; i < new_ctx->stream_count; i++) {
1879 		new_ctx->streams[i] = new_ctx->streams[i + 1];
1880 		new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1881 	}
1882 
1883 	new_ctx->streams[new_ctx->stream_count] = NULL;
1884 	memset(
1885 			&new_ctx->stream_status[new_ctx->stream_count],
1886 			0,
1887 			sizeof(new_ctx->stream_status[0]));
1888 
1889 	return DC_OK;
1890 }
1891 
1892 static struct dc_stream_state *find_pll_sharable_stream(
1893 		struct dc_stream_state *stream_needs_pll,
1894 		struct dc_state *context)
1895 {
1896 	int i;
1897 
1898 	for (i = 0; i < context->stream_count; i++) {
1899 		struct dc_stream_state *stream_has_pll = context->streams[i];
1900 
1901 		/* We are looking for non dp, non virtual stream */
1902 		if (resource_are_streams_timing_synchronizable(
1903 			stream_needs_pll, stream_has_pll)
1904 			&& !dc_is_dp_signal(stream_has_pll->signal)
1905 			&& stream_has_pll->sink->link->connector_signal
1906 			!= SIGNAL_TYPE_VIRTUAL)
1907 			return stream_has_pll;
1908 
1909 	}
1910 
1911 	return NULL;
1912 }
1913 
1914 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1915 {
1916 	uint32_t pix_clk = timing->pix_clk_khz;
1917 	uint32_t normalized_pix_clk = pix_clk;
1918 
1919 	if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1920 		pix_clk /= 2;
1921 	if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1922 		switch (timing->display_color_depth) {
1923 		case COLOR_DEPTH_888:
1924 			normalized_pix_clk = pix_clk;
1925 			break;
1926 		case COLOR_DEPTH_101010:
1927 			normalized_pix_clk = (pix_clk * 30) / 24;
1928 			break;
1929 		case COLOR_DEPTH_121212:
1930 			normalized_pix_clk = (pix_clk * 36) / 24;
1931 		break;
1932 		case COLOR_DEPTH_161616:
1933 			normalized_pix_clk = (pix_clk * 48) / 24;
1934 		break;
1935 		default:
1936 			ASSERT(0);
1937 		break;
1938 		}
1939 	}
1940 	return normalized_pix_clk;
1941 }
1942 
1943 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1944 {
1945 	/* update actual pixel clock on all streams */
1946 	if (dc_is_hdmi_signal(stream->signal))
1947 		stream->phy_pix_clk = get_norm_pix_clk(
1948 			&stream->timing);
1949 	else
1950 		stream->phy_pix_clk =
1951 			stream->timing.pix_clk_khz;
1952 
1953 	if (stream->timing.timing_3d_format == TIMING_3D_FORMAT_HW_FRAME_PACKING)
1954 		stream->phy_pix_clk *= 2;
1955 }
1956 
1957 enum dc_status resource_map_pool_resources(
1958 		const struct dc  *dc,
1959 		struct dc_state *context,
1960 		struct dc_stream_state *stream)
1961 {
1962 	const struct resource_pool *pool = dc->res_pool;
1963 	int i;
1964 	struct dc_context *dc_ctx = dc->ctx;
1965 	struct pipe_ctx *pipe_ctx = NULL;
1966 	int pipe_idx = -1;
1967 
1968 	/* TODO Check if this is needed */
1969 	/*if (!resource_is_stream_unchanged(old_context, stream)) {
1970 			if (stream != NULL && old_context->streams[i] != NULL) {
1971 				stream->bit_depth_params =
1972 						old_context->streams[i]->bit_depth_params;
1973 				stream->clamping = old_context->streams[i]->clamping;
1974 				continue;
1975 			}
1976 		}
1977 	*/
1978 
1979 	/* acquire new resources */
1980 	pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1981 
1982 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1983 	if (pipe_idx < 0)
1984 		pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1985 #endif
1986 
1987 	if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL)
1988 		return DC_NO_CONTROLLER_RESOURCE;
1989 
1990 	pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1991 
1992 	pipe_ctx->stream_res.stream_enc =
1993 		find_first_free_match_stream_enc_for_link(
1994 			&context->res_ctx, pool, stream);
1995 
1996 	if (!pipe_ctx->stream_res.stream_enc)
1997 		return DC_NO_STREAM_ENG_RESOURCE;
1998 
1999 	update_stream_engine_usage(
2000 		&context->res_ctx, pool,
2001 		pipe_ctx->stream_res.stream_enc,
2002 		true);
2003 
2004 	/* TODO: Add check if ASIC support and EDID audio */
2005 	if (!stream->sink->converter_disable_audio &&
2006 	    dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
2007 	    stream->audio_info.mode_count) {
2008 		pipe_ctx->stream_res.audio = find_first_free_audio(
2009 		&context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
2010 
2011 		/*
2012 		 * Audio assigned in order first come first get.
2013 		 * There are asics which has number of audio
2014 		 * resources less then number of pipes
2015 		 */
2016 		if (pipe_ctx->stream_res.audio)
2017 			update_audio_usage(&context->res_ctx, pool,
2018 					   pipe_ctx->stream_res.audio, true);
2019 	}
2020 
2021 	/* Add ABM to the resource if on EDP */
2022 	if (pipe_ctx->stream && dc_is_embedded_signal(pipe_ctx->stream->signal))
2023 		pipe_ctx->stream_res.abm = pool->abm;
2024 
2025 	for (i = 0; i < context->stream_count; i++)
2026 		if (context->streams[i] == stream) {
2027 			context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
2028 			context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
2029 			return DC_OK;
2030 		}
2031 
2032 	DC_ERROR("Stream %p not found in new ctx!\n", stream);
2033 	return DC_ERROR_UNEXPECTED;
2034 }
2035 
2036 void dc_resource_state_copy_construct_current(
2037 		const struct dc *dc,
2038 		struct dc_state *dst_ctx)
2039 {
2040 	dc_resource_state_copy_construct(dc->current_state, dst_ctx);
2041 }
2042 
2043 
2044 void dc_resource_state_construct(
2045 		const struct dc *dc,
2046 		struct dc_state *dst_ctx)
2047 {
2048 	dst_ctx->dccg = dc->res_pool->clk_mgr;
2049 }
2050 
2051 enum dc_status dc_validate_global_state(
2052 		struct dc *dc,
2053 		struct dc_state *new_ctx)
2054 {
2055 	enum dc_status result = DC_ERROR_UNEXPECTED;
2056 	int i, j;
2057 
2058 	if (!new_ctx)
2059 		return DC_ERROR_UNEXPECTED;
2060 
2061 	if (dc->res_pool->funcs->validate_global) {
2062 		result = dc->res_pool->funcs->validate_global(dc, new_ctx);
2063 		if (result != DC_OK)
2064 			return result;
2065 	}
2066 
2067 	for (i = 0; i < new_ctx->stream_count; i++) {
2068 		struct dc_stream_state *stream = new_ctx->streams[i];
2069 
2070 		for (j = 0; j < dc->res_pool->pipe_count; j++) {
2071 			struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
2072 
2073 			if (pipe_ctx->stream != stream)
2074 				continue;
2075 
2076 			if (dc->res_pool->funcs->get_default_swizzle_mode &&
2077 					pipe_ctx->plane_state &&
2078 					pipe_ctx->plane_state->tiling_info.gfx9.swizzle == DC_SW_UNKNOWN) {
2079 				result = dc->res_pool->funcs->get_default_swizzle_mode(pipe_ctx->plane_state);
2080 				if (result != DC_OK)
2081 					return result;
2082 			}
2083 
2084 			/* Switch to dp clock source only if there is
2085 			 * no non dp stream that shares the same timing
2086 			 * with the dp stream.
2087 			 */
2088 			if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
2089 				!find_pll_sharable_stream(stream, new_ctx)) {
2090 
2091 				resource_unreference_clock_source(
2092 						&new_ctx->res_ctx,
2093 						dc->res_pool,
2094 						pipe_ctx->clock_source);
2095 
2096 				pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
2097 				resource_reference_clock_source(
2098 						&new_ctx->res_ctx,
2099 						dc->res_pool,
2100 						 pipe_ctx->clock_source);
2101 			}
2102 		}
2103 	}
2104 
2105 	result = resource_build_scaling_params_for_context(dc, new_ctx);
2106 
2107 	if (result == DC_OK)
2108 		if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
2109 			result = DC_FAIL_BANDWIDTH_VALIDATE;
2110 
2111 	return result;
2112 }
2113 
2114 static void patch_gamut_packet_checksum(
2115 		struct dc_info_packet *gamut_packet)
2116 {
2117 	/* For gamut we recalc checksum */
2118 	if (gamut_packet->valid) {
2119 		uint8_t chk_sum = 0;
2120 		uint8_t *ptr;
2121 		uint8_t i;
2122 
2123 		/*start of the Gamut data. */
2124 		ptr = &gamut_packet->sb[3];
2125 
2126 		for (i = 0; i <= gamut_packet->sb[1]; i++)
2127 			chk_sum += ptr[i];
2128 
2129 		gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
2130 	}
2131 }
2132 
2133 static void set_avi_info_frame(
2134 		struct dc_info_packet *info_packet,
2135 		struct pipe_ctx *pipe_ctx)
2136 {
2137 	struct dc_stream_state *stream = pipe_ctx->stream;
2138 	enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
2139 	uint32_t pixel_encoding = 0;
2140 	enum scanning_type scan_type = SCANNING_TYPE_NODATA;
2141 	enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
2142 	bool itc = false;
2143 	uint8_t itc_value = 0;
2144 	uint8_t cn0_cn1 = 0;
2145 	unsigned int cn0_cn1_value = 0;
2146 	uint8_t *check_sum = NULL;
2147 	uint8_t byte_index = 0;
2148 	union hdmi_info_packet hdmi_info;
2149 	union display_content_support support = {0};
2150 	unsigned int vic = pipe_ctx->stream->timing.vic;
2151 	enum dc_timing_3d_format format;
2152 
2153 	memset(&hdmi_info, 0, sizeof(union hdmi_info_packet));
2154 
2155 	color_space = pipe_ctx->stream->output_color_space;
2156 	if (color_space == COLOR_SPACE_UNKNOWN)
2157 		color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
2158 			COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
2159 
2160 	/* Initialize header */
2161 	hdmi_info.bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
2162 	/* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
2163 	* not be used in HDMI 2.0 (Section 10.1) */
2164 	hdmi_info.bits.header.version = 2;
2165 	hdmi_info.bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
2166 
2167 	/*
2168 	 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
2169 	 * according to HDMI 2.0 spec (Section 10.1)
2170 	 */
2171 
2172 	switch (stream->timing.pixel_encoding) {
2173 	case PIXEL_ENCODING_YCBCR422:
2174 		pixel_encoding = 1;
2175 		break;
2176 
2177 	case PIXEL_ENCODING_YCBCR444:
2178 		pixel_encoding = 2;
2179 		break;
2180 	case PIXEL_ENCODING_YCBCR420:
2181 		pixel_encoding = 3;
2182 		break;
2183 
2184 	case PIXEL_ENCODING_RGB:
2185 	default:
2186 		pixel_encoding = 0;
2187 	}
2188 
2189 	/* Y0_Y1_Y2 : The pixel encoding */
2190 	/* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
2191 	hdmi_info.bits.Y0_Y1_Y2 = pixel_encoding;
2192 
2193 	/* A0 = 1 Active Format Information valid */
2194 	hdmi_info.bits.A0 = ACTIVE_FORMAT_VALID;
2195 
2196 	/* B0, B1 = 3; Bar info data is valid */
2197 	hdmi_info.bits.B0_B1 = BAR_INFO_BOTH_VALID;
2198 
2199 	hdmi_info.bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
2200 
2201 	/* S0, S1 : Underscan / Overscan */
2202 	/* TODO: un-hardcode scan type */
2203 	scan_type = SCANNING_TYPE_UNDERSCAN;
2204 	hdmi_info.bits.S0_S1 = scan_type;
2205 
2206 	/* C0, C1 : Colorimetry */
2207 	if (color_space == COLOR_SPACE_YCBCR709 ||
2208 			color_space == COLOR_SPACE_YCBCR709_LIMITED)
2209 		hdmi_info.bits.C0_C1 = COLORIMETRY_ITU709;
2210 	else if (color_space == COLOR_SPACE_YCBCR601 ||
2211 			color_space == COLOR_SPACE_YCBCR601_LIMITED)
2212 		hdmi_info.bits.C0_C1 = COLORIMETRY_ITU601;
2213 	else {
2214 		hdmi_info.bits.C0_C1 = COLORIMETRY_NO_DATA;
2215 	}
2216 	if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
2217 			color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
2218 			color_space == COLOR_SPACE_2020_YCBCR) {
2219 		hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
2220 		hdmi_info.bits.C0_C1   = COLORIMETRY_EXTENDED;
2221 	} else if (color_space == COLOR_SPACE_ADOBERGB) {
2222 		hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
2223 		hdmi_info.bits.C0_C1   = COLORIMETRY_EXTENDED;
2224 	}
2225 
2226 	/* TODO: un-hardcode aspect ratio */
2227 	aspect = stream->timing.aspect_ratio;
2228 
2229 	switch (aspect) {
2230 	case ASPECT_RATIO_4_3:
2231 	case ASPECT_RATIO_16_9:
2232 		hdmi_info.bits.M0_M1 = aspect;
2233 		break;
2234 
2235 	case ASPECT_RATIO_NO_DATA:
2236 	case ASPECT_RATIO_64_27:
2237 	case ASPECT_RATIO_256_135:
2238 	default:
2239 		hdmi_info.bits.M0_M1 = 0;
2240 	}
2241 
2242 	/* Active Format Aspect ratio - same as Picture Aspect Ratio. */
2243 	hdmi_info.bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
2244 
2245 	/* TODO: un-hardcode cn0_cn1 and itc */
2246 
2247 	cn0_cn1 = 0;
2248 	cn0_cn1_value = 0;
2249 
2250 	itc = true;
2251 	itc_value = 1;
2252 
2253 	support = stream->sink->edid_caps.content_support;
2254 
2255 	if (itc) {
2256 		if (!support.bits.valid_content_type) {
2257 			cn0_cn1_value = 0;
2258 		} else {
2259 			if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
2260 				if (support.bits.graphics_content == 1) {
2261 					cn0_cn1_value = 0;
2262 				}
2263 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
2264 				if (support.bits.photo_content == 1) {
2265 					cn0_cn1_value = 1;
2266 				} else {
2267 					cn0_cn1_value = 0;
2268 					itc_value = 0;
2269 				}
2270 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
2271 				if (support.bits.cinema_content == 1) {
2272 					cn0_cn1_value = 2;
2273 				} else {
2274 					cn0_cn1_value = 0;
2275 					itc_value = 0;
2276 				}
2277 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2278 				if (support.bits.game_content == 1) {
2279 					cn0_cn1_value = 3;
2280 				} else {
2281 					cn0_cn1_value = 0;
2282 					itc_value = 0;
2283 				}
2284 			}
2285 		}
2286 		hdmi_info.bits.CN0_CN1 = cn0_cn1_value;
2287 		hdmi_info.bits.ITC = itc_value;
2288 	}
2289 
2290 	/* TODO : We should handle YCC quantization */
2291 	/* but we do not have matrix calculation */
2292 	if (stream->sink->edid_caps.qs_bit == 1 &&
2293 			stream->sink->edid_caps.qy_bit == 1) {
2294 		if (color_space == COLOR_SPACE_SRGB ||
2295 			color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2296 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_FULL_RANGE;
2297 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2298 		} else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2299 					color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2300 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_LIMITED_RANGE;
2301 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2302 		} else {
2303 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2304 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2305 		}
2306 	} else {
2307 		hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2308 		hdmi_info.bits.YQ0_YQ1   = YYC_QUANTIZATION_LIMITED_RANGE;
2309 	}
2310 
2311 	///VIC
2312 	format = stream->timing.timing_3d_format;
2313 	/*todo, add 3DStereo support*/
2314 	if (format != TIMING_3D_FORMAT_NONE) {
2315 		// Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2316 		switch (pipe_ctx->stream->timing.hdmi_vic) {
2317 		case 1:
2318 			vic = 95;
2319 			break;
2320 		case 2:
2321 			vic = 94;
2322 			break;
2323 		case 3:
2324 			vic = 93;
2325 			break;
2326 		case 4:
2327 			vic = 98;
2328 			break;
2329 		default:
2330 			break;
2331 		}
2332 	}
2333 	hdmi_info.bits.VIC0_VIC7 = vic;
2334 
2335 	/* pixel repetition
2336 	 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2337 	 * repetition start from 1 */
2338 	hdmi_info.bits.PR0_PR3 = 0;
2339 
2340 	/* Bar Info
2341 	 * barTop:    Line Number of End of Top Bar.
2342 	 * barBottom: Line Number of Start of Bottom Bar.
2343 	 * barLeft:   Pixel Number of End of Left Bar.
2344 	 * barRight:  Pixel Number of Start of Right Bar. */
2345 	hdmi_info.bits.bar_top = stream->timing.v_border_top;
2346 	hdmi_info.bits.bar_bottom = (stream->timing.v_total
2347 			- stream->timing.v_border_bottom + 1);
2348 	hdmi_info.bits.bar_left  = stream->timing.h_border_left;
2349 	hdmi_info.bits.bar_right = (stream->timing.h_total
2350 			- stream->timing.h_border_right + 1);
2351 
2352 	/* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2353 	check_sum = &hdmi_info.packet_raw_data.sb[0];
2354 
2355 	*check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2356 
2357 	for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2358 		*check_sum += hdmi_info.packet_raw_data.sb[byte_index];
2359 
2360 	/* one byte complement */
2361 	*check_sum = (uint8_t) (0x100 - *check_sum);
2362 
2363 	/* Store in hw_path_mode */
2364 	info_packet->hb0 = hdmi_info.packet_raw_data.hb0;
2365 	info_packet->hb1 = hdmi_info.packet_raw_data.hb1;
2366 	info_packet->hb2 = hdmi_info.packet_raw_data.hb2;
2367 
2368 	for (byte_index = 0; byte_index < sizeof(hdmi_info.packet_raw_data.sb); byte_index++)
2369 		info_packet->sb[byte_index] = hdmi_info.packet_raw_data.sb[byte_index];
2370 
2371 	info_packet->valid = true;
2372 }
2373 
2374 static void set_vendor_info_packet(
2375 		struct dc_info_packet *info_packet,
2376 		struct dc_stream_state *stream)
2377 {
2378 	uint32_t length = 0;
2379 	bool hdmi_vic_mode = false;
2380 	uint8_t checksum = 0;
2381 	uint32_t i = 0;
2382 	enum dc_timing_3d_format format;
2383 	// Can be different depending on packet content /*todo*/
2384 	// unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2385 
2386 	info_packet->valid = false;
2387 
2388 	format = stream->timing.timing_3d_format;
2389 	if (stream->view_format == VIEW_3D_FORMAT_NONE)
2390 		format = TIMING_3D_FORMAT_NONE;
2391 
2392 	/* Can be different depending on packet content */
2393 	length = 5;
2394 
2395 	if (stream->timing.hdmi_vic != 0
2396 			&& stream->timing.h_total >= 3840
2397 			&& stream->timing.v_total >= 2160)
2398 		hdmi_vic_mode = true;
2399 
2400 	/* According to HDMI 1.4a CTS, VSIF should be sent
2401 	 * for both 3D stereo and HDMI VIC modes.
2402 	 * For all other modes, there is no VSIF sent.  */
2403 
2404 	if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2405 		return;
2406 
2407 	/* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2408 	info_packet->sb[1] = 0x03;
2409 	info_packet->sb[2] = 0x0C;
2410 	info_packet->sb[3] = 0x00;
2411 
2412 	/*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2413 	 * The value for HDMI_Video_Format are:
2414 	 * 0x0 (0b000) - No additional HDMI video format is presented in this
2415 	 * packet
2416 	 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2417 	 * parameter follows
2418 	 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2419 	 * potentially 3D_Ext_Data follows
2420 	 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2421 	if (format != TIMING_3D_FORMAT_NONE)
2422 		info_packet->sb[4] = (2 << 5);
2423 	else if (hdmi_vic_mode)
2424 		info_packet->sb[4] = (1 << 5);
2425 
2426 	/* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2427 	 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2428 	 * The value for 3D_Structure are:
2429 	 * 0x0 - Frame Packing
2430 	 * 0x1 - Field Alternative
2431 	 * 0x2 - Line Alternative
2432 	 * 0x3 - Side-by-Side (full)
2433 	 * 0x4 - L + depth
2434 	 * 0x5 - L + depth + graphics + graphics-depth
2435 	 * 0x6 - Top-and-Bottom
2436 	 * 0x7 - Reserved for future use
2437 	 * 0x8 - Side-by-Side (Half)
2438 	 * 0x9..0xE - Reserved for future use
2439 	 * 0xF - Not used */
2440 	switch (format) {
2441 	case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2442 	case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2443 		info_packet->sb[5] = (0x0 << 4);
2444 		break;
2445 
2446 	case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2447 	case TIMING_3D_FORMAT_SBS_SW_PACKED:
2448 		info_packet->sb[5] = (0x8 << 4);
2449 		length = 6;
2450 		break;
2451 
2452 	case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2453 	case TIMING_3D_FORMAT_TB_SW_PACKED:
2454 		info_packet->sb[5] = (0x6 << 4);
2455 		break;
2456 
2457 	default:
2458 		break;
2459 	}
2460 
2461 	/*PB5: If PB4 is set to 0x1 (extended resolution format)
2462 	 * fill PB5 with the correct HDMI VIC code */
2463 	if (hdmi_vic_mode)
2464 		info_packet->sb[5] = stream->timing.hdmi_vic;
2465 
2466 	/* Header */
2467 	info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2468 	info_packet->hb1 = 0x01; /* Version */
2469 
2470 	/* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2471 	info_packet->hb2 = (uint8_t) (length);
2472 
2473 	/* Calculate checksum */
2474 	checksum = 0;
2475 	checksum += info_packet->hb0;
2476 	checksum += info_packet->hb1;
2477 	checksum += info_packet->hb2;
2478 
2479 	for (i = 1; i <= length; i++)
2480 		checksum += info_packet->sb[i];
2481 
2482 	info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2483 
2484 	info_packet->valid = true;
2485 }
2486 
2487 static void set_spd_info_packet(
2488 		struct dc_info_packet *info_packet,
2489 		struct dc_stream_state *stream)
2490 {
2491 	/* SPD info packet for FreeSync */
2492 
2493 	/* Check if Freesync is supported. Return if false. If true,
2494 	 * set the corresponding bit in the info packet
2495 	 */
2496 	if (!stream->vrr_infopacket.valid)
2497 		return;
2498 
2499 	*info_packet = stream->vrr_infopacket;
2500 }
2501 
2502 static void set_hdr_static_info_packet(
2503 		struct dc_info_packet *info_packet,
2504 		struct dc_stream_state *stream)
2505 {
2506 	/* HDR Static Metadata info packet for HDR10 */
2507 
2508 	if (!stream->hdr_static_metadata.valid ||
2509 			stream->use_dynamic_meta)
2510 		return;
2511 
2512 	*info_packet = stream->hdr_static_metadata;
2513 }
2514 
2515 static void set_vsc_info_packet(
2516 		struct dc_info_packet *info_packet,
2517 		struct dc_stream_state *stream)
2518 {
2519 	if (!stream->vsc_infopacket.valid)
2520 		return;
2521 
2522 	*info_packet = stream->vsc_infopacket;
2523 }
2524 
2525 void dc_resource_state_destruct(struct dc_state *context)
2526 {
2527 	int i, j;
2528 
2529 	for (i = 0; i < context->stream_count; i++) {
2530 		for (j = 0; j < context->stream_status[i].plane_count; j++)
2531 			dc_plane_state_release(
2532 				context->stream_status[i].plane_states[j]);
2533 
2534 		context->stream_status[i].plane_count = 0;
2535 		dc_stream_release(context->streams[i]);
2536 		context->streams[i] = NULL;
2537 	}
2538 }
2539 
2540 /*
2541  * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2542  * by the src_ctx
2543  */
2544 void dc_resource_state_copy_construct(
2545 		const struct dc_state *src_ctx,
2546 		struct dc_state *dst_ctx)
2547 {
2548 	int i, j;
2549 	struct kref refcount = dst_ctx->refcount;
2550 
2551 	*dst_ctx = *src_ctx;
2552 
2553 	for (i = 0; i < MAX_PIPES; i++) {
2554 		struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2555 
2556 		if (cur_pipe->top_pipe)
2557 			cur_pipe->top_pipe =  &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2558 
2559 		if (cur_pipe->bottom_pipe)
2560 			cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2561 
2562 	}
2563 
2564 	for (i = 0; i < dst_ctx->stream_count; i++) {
2565 		dc_stream_retain(dst_ctx->streams[i]);
2566 		for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2567 			dc_plane_state_retain(
2568 				dst_ctx->stream_status[i].plane_states[j]);
2569 	}
2570 
2571 	/* context refcount should not be overridden */
2572 	dst_ctx->refcount = refcount;
2573 
2574 }
2575 
2576 struct clock_source *dc_resource_find_first_free_pll(
2577 		struct resource_context *res_ctx,
2578 		const struct resource_pool *pool)
2579 {
2580 	int i;
2581 
2582 	for (i = 0; i < pool->clk_src_count; ++i) {
2583 		if (res_ctx->clock_source_ref_count[i] == 0)
2584 			return pool->clock_sources[i];
2585 	}
2586 
2587 	return NULL;
2588 }
2589 
2590 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2591 {
2592 	enum signal_type signal = SIGNAL_TYPE_NONE;
2593 	struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2594 
2595 	/* default all packets to invalid */
2596 	info->avi.valid = false;
2597 	info->gamut.valid = false;
2598 	info->vendor.valid = false;
2599 	info->spd.valid = false;
2600 	info->hdrsmd.valid = false;
2601 	info->vsc.valid = false;
2602 
2603 	signal = pipe_ctx->stream->signal;
2604 
2605 	/* HDMi and DP have different info packets*/
2606 	if (dc_is_hdmi_signal(signal)) {
2607 		set_avi_info_frame(&info->avi, pipe_ctx);
2608 
2609 		set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2610 
2611 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2612 
2613 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2614 
2615 	} else if (dc_is_dp_signal(signal)) {
2616 		set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2617 
2618 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2619 
2620 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2621 	}
2622 
2623 	patch_gamut_packet_checksum(&info->gamut);
2624 }
2625 
2626 enum dc_status resource_map_clock_resources(
2627 		const struct dc  *dc,
2628 		struct dc_state *context,
2629 		struct dc_stream_state *stream)
2630 {
2631 	/* acquire new resources */
2632 	const struct resource_pool *pool = dc->res_pool;
2633 	struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2634 				&context->res_ctx, stream);
2635 
2636 	if (!pipe_ctx)
2637 		return DC_ERROR_UNEXPECTED;
2638 
2639 	if (dc_is_dp_signal(pipe_ctx->stream->signal)
2640 		|| pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2641 		pipe_ctx->clock_source = pool->dp_clock_source;
2642 	else {
2643 		pipe_ctx->clock_source = NULL;
2644 
2645 		if (!dc->config.disable_disp_pll_sharing)
2646 			pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2647 				&context->res_ctx,
2648 				pipe_ctx);
2649 
2650 		if (pipe_ctx->clock_source == NULL)
2651 			pipe_ctx->clock_source =
2652 				dc_resource_find_first_free_pll(
2653 					&context->res_ctx,
2654 					pool);
2655 	}
2656 
2657 	if (pipe_ctx->clock_source == NULL)
2658 		return DC_NO_CLOCK_SOURCE_RESOURCE;
2659 
2660 	resource_reference_clock_source(
2661 		&context->res_ctx, pool,
2662 		pipe_ctx->clock_source);
2663 
2664 	return DC_OK;
2665 }
2666 
2667 /*
2668  * Note: We need to disable output if clock sources change,
2669  * since bios does optimization and doesn't apply if changing
2670  * PHY when not already disabled.
2671  */
2672 bool pipe_need_reprogram(
2673 		struct pipe_ctx *pipe_ctx_old,
2674 		struct pipe_ctx *pipe_ctx)
2675 {
2676 	if (!pipe_ctx_old->stream)
2677 		return false;
2678 
2679 	if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2680 		return true;
2681 
2682 	if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2683 		return true;
2684 
2685 	if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2686 		return true;
2687 
2688 	if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2689 			&& pipe_ctx_old->stream != pipe_ctx->stream)
2690 		return true;
2691 
2692 	if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2693 		return true;
2694 
2695 	if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2696 		return true;
2697 
2698 	if (is_hdr_static_meta_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2699 		return true;
2700 
2701 	if (pipe_ctx_old->stream->dpms_off != pipe_ctx->stream->dpms_off)
2702 		return true;
2703 
2704 	if (is_vsc_info_packet_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2705 		return true;
2706 
2707 	return false;
2708 }
2709 
2710 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2711 		struct bit_depth_reduction_params *fmt_bit_depth)
2712 {
2713 	enum dc_dither_option option = stream->dither_option;
2714 	enum dc_pixel_encoding pixel_encoding =
2715 			stream->timing.pixel_encoding;
2716 
2717 	memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2718 
2719 	if (option == DITHER_OPTION_DEFAULT) {
2720 		switch (stream->timing.display_color_depth) {
2721 		case COLOR_DEPTH_666:
2722 			option = DITHER_OPTION_SPATIAL6;
2723 			break;
2724 		case COLOR_DEPTH_888:
2725 			option = DITHER_OPTION_SPATIAL8;
2726 			break;
2727 		case COLOR_DEPTH_101010:
2728 			option = DITHER_OPTION_SPATIAL10;
2729 			break;
2730 		default:
2731 			option = DITHER_OPTION_DISABLE;
2732 		}
2733 	}
2734 
2735 	if (option == DITHER_OPTION_DISABLE)
2736 		return;
2737 
2738 	if (option == DITHER_OPTION_TRUN6) {
2739 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2740 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2741 	} else if (option == DITHER_OPTION_TRUN8 ||
2742 			option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2743 			option == DITHER_OPTION_TRUN8_FM6) {
2744 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2745 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2746 	} else if (option == DITHER_OPTION_TRUN10        ||
2747 			option == DITHER_OPTION_TRUN10_SPATIAL6   ||
2748 			option == DITHER_OPTION_TRUN10_SPATIAL8   ||
2749 			option == DITHER_OPTION_TRUN10_FM8     ||
2750 			option == DITHER_OPTION_TRUN10_FM6     ||
2751 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2752 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2753 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2754 	}
2755 
2756 	/* special case - Formatter can only reduce by 4 bits at most.
2757 	 * When reducing from 12 to 6 bits,
2758 	 * HW recommends we use trunc with round mode
2759 	 * (if we did nothing, trunc to 10 bits would be used)
2760 	 * note that any 12->10 bit reduction is ignored prior to DCE8,
2761 	 * as the input was 10 bits.
2762 	 */
2763 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2764 			option == DITHER_OPTION_SPATIAL6 ||
2765 			option == DITHER_OPTION_FM6) {
2766 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2767 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2768 		fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2769 	}
2770 
2771 	/* spatial dither
2772 	 * note that spatial modes 1-3 are never used
2773 	 */
2774 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM            ||
2775 			option == DITHER_OPTION_SPATIAL6 ||
2776 			option == DITHER_OPTION_TRUN10_SPATIAL6      ||
2777 			option == DITHER_OPTION_TRUN8_SPATIAL6) {
2778 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2779 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2780 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2781 		fmt_bit_depth->flags.RGB_RANDOM =
2782 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2783 	} else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM            ||
2784 			option == DITHER_OPTION_SPATIAL8 ||
2785 			option == DITHER_OPTION_SPATIAL8_FM6        ||
2786 			option == DITHER_OPTION_TRUN10_SPATIAL8      ||
2787 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2788 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2789 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2790 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2791 		fmt_bit_depth->flags.RGB_RANDOM =
2792 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2793 	} else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2794 			option == DITHER_OPTION_SPATIAL10 ||
2795 			option == DITHER_OPTION_SPATIAL10_FM8 ||
2796 			option == DITHER_OPTION_SPATIAL10_FM6) {
2797 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2798 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2799 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2800 		fmt_bit_depth->flags.RGB_RANDOM =
2801 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2802 	}
2803 
2804 	if (option == DITHER_OPTION_SPATIAL6 ||
2805 			option == DITHER_OPTION_SPATIAL8 ||
2806 			option == DITHER_OPTION_SPATIAL10) {
2807 		fmt_bit_depth->flags.FRAME_RANDOM = 0;
2808 	} else {
2809 		fmt_bit_depth->flags.FRAME_RANDOM = 1;
2810 	}
2811 
2812 	//////////////////////
2813 	//// temporal dither
2814 	//////////////////////
2815 	if (option == DITHER_OPTION_FM6           ||
2816 			option == DITHER_OPTION_SPATIAL8_FM6     ||
2817 			option == DITHER_OPTION_SPATIAL10_FM6     ||
2818 			option == DITHER_OPTION_TRUN10_FM6     ||
2819 			option == DITHER_OPTION_TRUN8_FM6      ||
2820 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2821 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2822 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2823 	} else if (option == DITHER_OPTION_FM8        ||
2824 			option == DITHER_OPTION_SPATIAL10_FM8  ||
2825 			option == DITHER_OPTION_TRUN10_FM8) {
2826 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2827 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2828 	} else if (option == DITHER_OPTION_FM10) {
2829 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2830 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2831 	}
2832 
2833 	fmt_bit_depth->pixel_encoding = pixel_encoding;
2834 }
2835 
2836 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2837 {
2838 	struct dc  *core_dc = dc;
2839 	struct dc_link *link = stream->sink->link;
2840 	struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2841 	enum dc_status res = DC_OK;
2842 
2843 	calculate_phy_pix_clks(stream);
2844 
2845 	if (!tg->funcs->validate_timing(tg, &stream->timing))
2846 		res = DC_FAIL_CONTROLLER_VALIDATE;
2847 
2848 	if (res == DC_OK)
2849 		if (!link->link_enc->funcs->validate_output_with_stream(
2850 						link->link_enc, stream))
2851 			res = DC_FAIL_ENC_VALIDATE;
2852 
2853 	/* TODO: validate audio ASIC caps, encoder */
2854 
2855 	if (res == DC_OK)
2856 		res = dc_link_validate_mode_timing(stream,
2857 		      link,
2858 		      &stream->timing);
2859 
2860 	return res;
2861 }
2862 
2863 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2864 {
2865 	enum dc_status res = DC_OK;
2866 
2867 	/* TODO For now validates pixel format only */
2868 	if (dc->res_pool->funcs->validate_plane)
2869 		return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);
2870 
2871 	return res;
2872 }
2873 
2874 unsigned int resource_pixel_format_to_bpp(enum surface_pixel_format format)
2875 {
2876 	switch (format) {
2877 	case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
2878 		return 8;
2879 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
2880 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
2881 		return 12;
2882 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
2883 	case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
2884 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
2885 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
2886 		return 16;
2887 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
2888 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
2889 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
2890 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
2891 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
2892 		return 32;
2893 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
2894 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
2895 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
2896 		return 64;
2897 	default:
2898 		ASSERT_CRITICAL(false);
2899 		return -1;
2900 	}
2901 }
2902