1 /*
2  * Copyright 2012-15 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 #include "dm_services.h"
26 
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "dpp.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 #include "dpcd_defs.h"
39 
40 #include "dce80/dce80_resource.h"
41 #include "dce100/dce100_resource.h"
42 #include "dce110/dce110_resource.h"
43 #include "dce112/dce112_resource.h"
44 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
45 #include "dcn10/dcn10_resource.h"
46 #endif
47 #include "dce120/dce120_resource.h"
48 
49 #define DC_LOGGER_INIT(logger)
50 
51 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
52 {
53 	enum dce_version dc_version = DCE_VERSION_UNKNOWN;
54 	switch (asic_id.chip_family) {
55 
56 	case FAMILY_CI:
57 		dc_version = DCE_VERSION_8_0;
58 		break;
59 	case FAMILY_KV:
60 		if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
61 		    ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
62 		    ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
63 			dc_version = DCE_VERSION_8_3;
64 		else
65 			dc_version = DCE_VERSION_8_1;
66 		break;
67 	case FAMILY_CZ:
68 		dc_version = DCE_VERSION_11_0;
69 		break;
70 
71 	case FAMILY_VI:
72 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
73 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
74 			dc_version = DCE_VERSION_10_0;
75 			break;
76 		}
77 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
78 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
79 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
80 			dc_version = DCE_VERSION_11_2;
81 		}
82 		if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev))
83 			dc_version = DCE_VERSION_11_22;
84 		break;
85 	case FAMILY_AI:
86 		dc_version = DCE_VERSION_12_0;
87 		break;
88 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
89 	case FAMILY_RV:
90 		dc_version = DCN_VERSION_1_0;
91 		break;
92 #endif
93 	default:
94 		dc_version = DCE_VERSION_UNKNOWN;
95 		break;
96 	}
97 	return dc_version;
98 }
99 
100 struct resource_pool *dc_create_resource_pool(
101 				struct dc  *dc,
102 				int num_virtual_links,
103 				enum dce_version dc_version,
104 				struct hw_asic_id asic_id)
105 {
106 	struct resource_pool *res_pool = NULL;
107 
108 	switch (dc_version) {
109 	case DCE_VERSION_8_0:
110 		res_pool = dce80_create_resource_pool(
111 			num_virtual_links, dc);
112 		break;
113 	case DCE_VERSION_8_1:
114 		res_pool = dce81_create_resource_pool(
115 			num_virtual_links, dc);
116 		break;
117 	case DCE_VERSION_8_3:
118 		res_pool = dce83_create_resource_pool(
119 			num_virtual_links, dc);
120 		break;
121 	case DCE_VERSION_10_0:
122 		res_pool = dce100_create_resource_pool(
123 				num_virtual_links, dc);
124 		break;
125 	case DCE_VERSION_11_0:
126 		res_pool = dce110_create_resource_pool(
127 			num_virtual_links, dc, asic_id);
128 		break;
129 	case DCE_VERSION_11_2:
130 	case DCE_VERSION_11_22:
131 		res_pool = dce112_create_resource_pool(
132 			num_virtual_links, dc);
133 		break;
134 	case DCE_VERSION_12_0:
135 		res_pool = dce120_create_resource_pool(
136 			num_virtual_links, dc);
137 		break;
138 
139 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
140 	case DCN_VERSION_1_0:
141 		res_pool = dcn10_create_resource_pool(
142 				num_virtual_links, dc);
143 		break;
144 #endif
145 
146 
147 	default:
148 		break;
149 	}
150 	if (res_pool != NULL) {
151 		struct dc_firmware_info fw_info = { { 0 } };
152 
153 		if (dc->ctx->dc_bios->funcs->get_firmware_info(
154 				dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
155 				res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
156 			} else
157 				ASSERT_CRITICAL(false);
158 	}
159 
160 	return res_pool;
161 }
162 
163 void dc_destroy_resource_pool(struct dc  *dc)
164 {
165 	if (dc) {
166 		if (dc->res_pool)
167 			dc->res_pool->funcs->destroy(&dc->res_pool);
168 
169 		kfree(dc->hwseq);
170 	}
171 }
172 
173 static void update_num_audio(
174 	const struct resource_straps *straps,
175 	unsigned int *num_audio,
176 	struct audio_support *aud_support)
177 {
178 	aud_support->dp_audio = true;
179 	aud_support->hdmi_audio_native = false;
180 	aud_support->hdmi_audio_on_dongle = false;
181 
182 	if (straps->hdmi_disable == 0) {
183 		if (straps->dc_pinstraps_audio & 0x2) {
184 			aud_support->hdmi_audio_on_dongle = true;
185 			aud_support->hdmi_audio_native = true;
186 		}
187 	}
188 
189 	switch (straps->audio_stream_number) {
190 	case 0: /* multi streams supported */
191 		break;
192 	case 1: /* multi streams not supported */
193 		*num_audio = 1;
194 		break;
195 	default:
196 		DC_ERR("DC: unexpected audio fuse!\n");
197 	}
198 }
199 
200 bool resource_construct(
201 	unsigned int num_virtual_links,
202 	struct dc  *dc,
203 	struct resource_pool *pool,
204 	const struct resource_create_funcs *create_funcs)
205 {
206 	struct dc_context *ctx = dc->ctx;
207 	const struct resource_caps *caps = pool->res_cap;
208 	int i;
209 	unsigned int num_audio = caps->num_audio;
210 	struct resource_straps straps = {0};
211 
212 	if (create_funcs->read_dce_straps)
213 		create_funcs->read_dce_straps(dc->ctx, &straps);
214 
215 	pool->audio_count = 0;
216 	if (create_funcs->create_audio) {
217 		/* find the total number of streams available via the
218 		 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
219 		 * registers (one for each pin) starting from pin 1
220 		 * up to the max number of audio pins.
221 		 * We stop on the first pin where
222 		 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
223 		 */
224 		update_num_audio(&straps, &num_audio, &pool->audio_support);
225 		for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
226 			struct audio *aud = create_funcs->create_audio(ctx, i);
227 
228 			if (aud == NULL) {
229 				DC_ERR("DC: failed to create audio!\n");
230 				return false;
231 			}
232 
233 			if (!aud->funcs->endpoint_valid(aud)) {
234 				aud->funcs->destroy(&aud);
235 				break;
236 			}
237 
238 			pool->audios[i] = aud;
239 			pool->audio_count++;
240 		}
241 	}
242 
243 	pool->stream_enc_count = 0;
244 	if (create_funcs->create_stream_encoder) {
245 		for (i = 0; i < caps->num_stream_encoder; i++) {
246 			pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
247 			if (pool->stream_enc[i] == NULL)
248 				DC_ERR("DC: failed to create stream_encoder!\n");
249 			pool->stream_enc_count++;
250 		}
251 	}
252 	dc->caps.dynamic_audio = false;
253 	if (pool->audio_count < pool->stream_enc_count) {
254 		dc->caps.dynamic_audio = true;
255 	}
256 	for (i = 0; i < num_virtual_links; i++) {
257 		pool->stream_enc[pool->stream_enc_count] =
258 			virtual_stream_encoder_create(
259 					ctx, ctx->dc_bios);
260 		if (pool->stream_enc[pool->stream_enc_count] == NULL) {
261 			DC_ERR("DC: failed to create stream_encoder!\n");
262 			return false;
263 		}
264 		pool->stream_enc_count++;
265 	}
266 
267 	dc->hwseq = create_funcs->create_hwseq(ctx);
268 
269 	return true;
270 }
271 static int find_matching_clock_source(
272 		const struct resource_pool *pool,
273 		struct clock_source *clock_source)
274 {
275 
276 	int i;
277 
278 	for (i = 0; i < pool->clk_src_count; i++) {
279 		if (pool->clock_sources[i] == clock_source)
280 			return i;
281 	}
282 	return -1;
283 }
284 
285 void resource_unreference_clock_source(
286 		struct resource_context *res_ctx,
287 		const struct resource_pool *pool,
288 		struct clock_source *clock_source)
289 {
290 	int i = find_matching_clock_source(pool, clock_source);
291 
292 	if (i > -1)
293 		res_ctx->clock_source_ref_count[i]--;
294 
295 	if (pool->dp_clock_source == clock_source)
296 		res_ctx->dp_clock_source_ref_count--;
297 }
298 
299 void resource_reference_clock_source(
300 		struct resource_context *res_ctx,
301 		const struct resource_pool *pool,
302 		struct clock_source *clock_source)
303 {
304 	int i = find_matching_clock_source(pool, clock_source);
305 
306 	if (i > -1)
307 		res_ctx->clock_source_ref_count[i]++;
308 
309 	if (pool->dp_clock_source == clock_source)
310 		res_ctx->dp_clock_source_ref_count++;
311 }
312 
313 int resource_get_clock_source_reference(
314 		struct resource_context *res_ctx,
315 		const struct resource_pool *pool,
316 		struct clock_source *clock_source)
317 {
318 	int i = find_matching_clock_source(pool, clock_source);
319 
320 	if (i > -1)
321 		return res_ctx->clock_source_ref_count[i];
322 
323 	if (pool->dp_clock_source == clock_source)
324 		return res_ctx->dp_clock_source_ref_count;
325 
326 	return -1;
327 }
328 
329 bool resource_are_streams_timing_synchronizable(
330 	struct dc_stream_state *stream1,
331 	struct dc_stream_state *stream2)
332 {
333 	if (stream1->timing.h_total != stream2->timing.h_total)
334 		return false;
335 
336 	if (stream1->timing.v_total != stream2->timing.v_total)
337 		return false;
338 
339 	if (stream1->timing.h_addressable
340 				!= stream2->timing.h_addressable)
341 		return false;
342 
343 	if (stream1->timing.v_addressable
344 				!= stream2->timing.v_addressable)
345 		return false;
346 
347 	if (stream1->timing.pix_clk_khz
348 				!= stream2->timing.pix_clk_khz)
349 		return false;
350 
351 	if (stream1->clamping.c_depth != stream2->clamping.c_depth)
352 		return false;
353 
354 	if (stream1->phy_pix_clk != stream2->phy_pix_clk
355 			&& (!dc_is_dp_signal(stream1->signal)
356 			|| !dc_is_dp_signal(stream2->signal)))
357 		return false;
358 
359 	return true;
360 }
361 static bool is_dp_and_hdmi_sharable(
362 		struct dc_stream_state *stream1,
363 		struct dc_stream_state *stream2)
364 {
365 	if (stream1->ctx->dc->caps.disable_dp_clk_share)
366 		return false;
367 
368 	if (stream1->clamping.c_depth != COLOR_DEPTH_888 ||
369 	    stream2->clamping.c_depth != COLOR_DEPTH_888)
370 	return false;
371 
372 	return true;
373 
374 }
375 
376 static bool is_sharable_clk_src(
377 	const struct pipe_ctx *pipe_with_clk_src,
378 	const struct pipe_ctx *pipe)
379 {
380 	if (pipe_with_clk_src->clock_source == NULL)
381 		return false;
382 
383 	if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
384 		return false;
385 
386 	if (dc_is_dp_signal(pipe_with_clk_src->stream->signal) ||
387 		(dc_is_dp_signal(pipe->stream->signal) &&
388 		!is_dp_and_hdmi_sharable(pipe_with_clk_src->stream,
389 				     pipe->stream)))
390 		return false;
391 
392 	if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
393 			&& dc_is_dual_link_signal(pipe->stream->signal))
394 		return false;
395 
396 	if (dc_is_hdmi_signal(pipe->stream->signal)
397 			&& dc_is_dual_link_signal(pipe_with_clk_src->stream->signal))
398 		return false;
399 
400 	if (!resource_are_streams_timing_synchronizable(
401 			pipe_with_clk_src->stream, pipe->stream))
402 		return false;
403 
404 	return true;
405 }
406 
407 struct clock_source *resource_find_used_clk_src_for_sharing(
408 					struct resource_context *res_ctx,
409 					struct pipe_ctx *pipe_ctx)
410 {
411 	int i;
412 
413 	for (i = 0; i < MAX_PIPES; i++) {
414 		if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
415 			return res_ctx->pipe_ctx[i].clock_source;
416 	}
417 
418 	return NULL;
419 }
420 
421 static enum pixel_format convert_pixel_format_to_dalsurface(
422 		enum surface_pixel_format surface_pixel_format)
423 {
424 	enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
425 
426 	switch (surface_pixel_format) {
427 	case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
428 		dal_pixel_format = PIXEL_FORMAT_INDEX8;
429 		break;
430 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
431 		dal_pixel_format = PIXEL_FORMAT_RGB565;
432 		break;
433 	case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
434 		dal_pixel_format = PIXEL_FORMAT_RGB565;
435 		break;
436 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
437 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
438 		break;
439 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
440 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
441 		break;
442 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
443 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
444 		break;
445 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
446 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
447 		break;
448 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
449 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
450 		break;
451 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
452 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
453 		dal_pixel_format = PIXEL_FORMAT_FP16;
454 		break;
455 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
456 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
457 		dal_pixel_format = PIXEL_FORMAT_420BPP8;
458 		break;
459 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
460 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
461 		dal_pixel_format = PIXEL_FORMAT_420BPP10;
462 		break;
463 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
464 	default:
465 		dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
466 		break;
467 	}
468 	return dal_pixel_format;
469 }
470 
471 static void rect_swap_helper(struct rect *rect)
472 {
473 	swap(rect->height, rect->width);
474 	swap(rect->x, rect->y);
475 }
476 
477 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
478 {
479 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
480 	const struct dc_stream_state *stream = pipe_ctx->stream;
481 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
482 	struct rect surf_src = plane_state->src_rect;
483 	struct rect clip = { 0 };
484 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
485 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
486 	bool pri_split = pipe_ctx->bottom_pipe &&
487 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
488 	bool sec_split = pipe_ctx->top_pipe &&
489 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
490 	bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
491 
492 	/*
493 	 * Need to calculate the scan direction for viewport to properly determine offset
494 	 */
495 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
496 		flip_vert_scan_dir = true;
497 		flip_horz_scan_dir = true;
498 	} else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
499 		flip_vert_scan_dir = true;
500 	else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
501 		flip_horz_scan_dir = true;
502 
503 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
504 		stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
505 		pri_split = false;
506 		sec_split = false;
507 	}
508 
509 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
510 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
511 		rect_swap_helper(&surf_src);
512 
513 	/* The actual clip is an intersection between stream
514 	 * source and surface clip
515 	 */
516 	clip.x = stream->src.x > plane_state->clip_rect.x ?
517 			stream->src.x : plane_state->clip_rect.x;
518 
519 	clip.width = stream->src.x + stream->src.width <
520 			plane_state->clip_rect.x + plane_state->clip_rect.width ?
521 			stream->src.x + stream->src.width - clip.x :
522 			plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
523 
524 	clip.y = stream->src.y > plane_state->clip_rect.y ?
525 			stream->src.y : plane_state->clip_rect.y;
526 
527 	clip.height = stream->src.y + stream->src.height <
528 			plane_state->clip_rect.y + plane_state->clip_rect.height ?
529 			stream->src.y + stream->src.height - clip.y :
530 			plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
531 
532 	/* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
533 	 * num_pixels = clip.num_pix * scl_ratio
534 	 */
535 	data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
536 			surf_src.width / plane_state->dst_rect.width;
537 	data->viewport.width = clip.width *
538 			surf_src.width / plane_state->dst_rect.width;
539 
540 	data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
541 			surf_src.height / plane_state->dst_rect.height;
542 	data->viewport.height = clip.height *
543 			surf_src.height / plane_state->dst_rect.height;
544 
545 	/* To transfer the x, y to correct coordinate on mirror image (camera).
546 	 * deg  0 : transfer x,
547 	 * deg 90 : don't need to transfer,
548 	 * deg180 : transfer y,
549 	 * deg270 : transfer x and y.
550 	 * To transfer the x, y to correct coordinate on non-mirror image (video).
551 	 * deg  0 : don't need to transfer,
552 	 * deg 90 : transfer y,
553 	 * deg180 : transfer x and y,
554 	 * deg270 : transfer x.
555 	 */
556 	if (pipe_ctx->plane_state->horizontal_mirror) {
557 		if (flip_horz_scan_dir && !flip_vert_scan_dir) {
558 			data->viewport.y = surf_src.height - data->viewport.y - data->viewport.height;
559 			data->viewport.x = surf_src.width - data->viewport.x - data->viewport.width;
560 		} else if (flip_horz_scan_dir && flip_vert_scan_dir)
561 			data->viewport.y = surf_src.height - data->viewport.y - data->viewport.height;
562 		else {
563 			if (!flip_horz_scan_dir && !flip_vert_scan_dir)
564 				data->viewport.x = surf_src.width - data->viewport.x - data->viewport.width;
565 		}
566 	} else {
567 		if (flip_horz_scan_dir)
568 			data->viewport.x = surf_src.width - data->viewport.x - data->viewport.width;
569 		if (flip_vert_scan_dir)
570 			data->viewport.y = surf_src.height - data->viewport.y - data->viewport.height;
571 	}
572 
573 	/* Round down, compensate in init */
574 	data->viewport_c.x = data->viewport.x / vpc_div;
575 	data->viewport_c.y = data->viewport.y / vpc_div;
576 	data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
577 			dc_fixpt_half : dc_fixpt_zero;
578 	data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
579 			dc_fixpt_half : dc_fixpt_zero;
580 	/* Round up, assume original video size always even dimensions */
581 	data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
582 	data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
583 
584 	/* Handle hsplit */
585 	if (sec_split) {
586 		data->viewport.x +=  data->viewport.width / 2;
587 		data->viewport_c.x +=  data->viewport_c.width / 2;
588 		/* Ceil offset pipe */
589 		data->viewport.width = (data->viewport.width + 1) / 2;
590 		data->viewport_c.width = (data->viewport_c.width + 1) / 2;
591 	} else if (pri_split) {
592 		if (data->viewport.width > 1)
593 			data->viewport.width /= 2;
594 		if (data->viewport_c.width > 1)
595 			data->viewport_c.width /= 2;
596 	}
597 
598 	if (plane_state->rotation == ROTATION_ANGLE_90 ||
599 			plane_state->rotation == ROTATION_ANGLE_270) {
600 		rect_swap_helper(&data->viewport_c);
601 		rect_swap_helper(&data->viewport);
602 	}
603 }
604 
605 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
606 {
607 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
608 	const struct dc_stream_state *stream = pipe_ctx->stream;
609 	struct rect surf_src = plane_state->src_rect;
610 	struct rect surf_clip = plane_state->clip_rect;
611 	bool pri_split = pipe_ctx->bottom_pipe &&
612 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
613 	bool sec_split = pipe_ctx->top_pipe &&
614 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
615 	bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
616 
617 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
618 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
619 		rect_swap_helper(&surf_src);
620 
621 	pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
622 	if (stream->src.x < surf_clip.x)
623 		pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
624 			- stream->src.x) * stream->dst.width
625 						/ stream->src.width;
626 
627 	pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
628 			stream->dst.width / stream->src.width;
629 	if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
630 			stream->dst.x + stream->dst.width)
631 		pipe_ctx->plane_res.scl_data.recout.width =
632 			stream->dst.x + stream->dst.width
633 						- pipe_ctx->plane_res.scl_data.recout.x;
634 
635 	pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
636 	if (stream->src.y < surf_clip.y)
637 		pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
638 			- stream->src.y) * stream->dst.height
639 						/ stream->src.height;
640 
641 	pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
642 			stream->dst.height / stream->src.height;
643 	if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
644 			stream->dst.y + stream->dst.height)
645 		pipe_ctx->plane_res.scl_data.recout.height =
646 			stream->dst.y + stream->dst.height
647 						- pipe_ctx->plane_res.scl_data.recout.y;
648 
649 	/* Handle h & vsplit */
650 	if (sec_split && top_bottom_split) {
651 		pipe_ctx->plane_res.scl_data.recout.y +=
652 				pipe_ctx->plane_res.scl_data.recout.height / 2;
653 		/* Floor primary pipe, ceil 2ndary pipe */
654 		pipe_ctx->plane_res.scl_data.recout.height =
655 				(pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
656 	} else if (pri_split && top_bottom_split)
657 		pipe_ctx->plane_res.scl_data.recout.height /= 2;
658 	else if (pri_split || sec_split) {
659 		/* HMirror XOR Secondary_pipe XOR Rotation_180 */
660 		bool right_view = (sec_split != plane_state->horizontal_mirror) !=
661 					(plane_state->rotation == ROTATION_ANGLE_180);
662 
663 		if (plane_state->rotation == ROTATION_ANGLE_90
664 				|| plane_state->rotation == ROTATION_ANGLE_270)
665 			/* Secondary_pipe XOR Rotation_270 */
666 			right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
667 
668 		if (right_view) {
669 			pipe_ctx->plane_res.scl_data.recout.x +=
670 					pipe_ctx->plane_res.scl_data.recout.width / 2;
671 			/* Ceil offset pipe */
672 			pipe_ctx->plane_res.scl_data.recout.width =
673 					(pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
674 		} else {
675 			if (pipe_ctx->plane_res.scl_data.recout.width > 1)
676 				pipe_ctx->plane_res.scl_data.recout.width /= 2;
677 		}
678 	}
679 	/* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
680 	 *			* 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
681 	 *			ratio)
682 	 */
683 	recout_full->x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
684 					* stream->dst.width / stream->src.width -
685 			surf_src.x * plane_state->dst_rect.width / surf_src.width
686 					* stream->dst.width / stream->src.width;
687 	recout_full->y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
688 					* stream->dst.height / stream->src.height -
689 			surf_src.y * plane_state->dst_rect.height / surf_src.height
690 					* stream->dst.height / stream->src.height;
691 
692 	recout_full->width = plane_state->dst_rect.width
693 					* stream->dst.width / stream->src.width;
694 	recout_full->height = plane_state->dst_rect.height
695 					* stream->dst.height / stream->src.height;
696 }
697 
698 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
699 {
700 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
701 	const struct dc_stream_state *stream = pipe_ctx->stream;
702 	struct rect surf_src = plane_state->src_rect;
703 	const int in_w = stream->src.width;
704 	const int in_h = stream->src.height;
705 	const int out_w = stream->dst.width;
706 	const int out_h = stream->dst.height;
707 
708 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
709 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
710 		rect_swap_helper(&surf_src);
711 
712 	pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_from_fraction(
713 					surf_src.width,
714 					plane_state->dst_rect.width);
715 	pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_from_fraction(
716 					surf_src.height,
717 					plane_state->dst_rect.height);
718 
719 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
720 		pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
721 	else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
722 		pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
723 
724 	pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
725 		pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
726 	pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
727 		pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
728 
729 	pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
730 	pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
731 
732 	if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
733 			|| pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
734 		pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
735 		pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
736 	}
737 	pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_truncate(
738 			pipe_ctx->plane_res.scl_data.ratios.horz, 19);
739 	pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_truncate(
740 			pipe_ctx->plane_res.scl_data.ratios.vert, 19);
741 	pipe_ctx->plane_res.scl_data.ratios.horz_c = dc_fixpt_truncate(
742 			pipe_ctx->plane_res.scl_data.ratios.horz_c, 19);
743 	pipe_ctx->plane_res.scl_data.ratios.vert_c = dc_fixpt_truncate(
744 			pipe_ctx->plane_res.scl_data.ratios.vert_c, 19);
745 }
746 
747 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct rect *recout_full)
748 {
749 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
750 	struct rect src = pipe_ctx->plane_state->src_rect;
751 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
752 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
753 	bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
754 
755 	/*
756 	 * Need to calculate the scan direction for viewport to make adjustments
757 	 */
758 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
759 		flip_vert_scan_dir = true;
760 		flip_horz_scan_dir = true;
761 	} else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
762 		flip_vert_scan_dir = true;
763 	else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
764 		flip_horz_scan_dir = true;
765 
766 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
767 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
768 		rect_swap_helper(&src);
769 		rect_swap_helper(&data->viewport_c);
770 		rect_swap_helper(&data->viewport);
771 
772 		if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270 &&
773 			pipe_ctx->plane_state->horizontal_mirror) {
774 			flip_vert_scan_dir = true;
775 		}
776 		if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 &&
777 			pipe_ctx->plane_state->horizontal_mirror) {
778 			flip_vert_scan_dir = false;
779 		}
780 	} else if (pipe_ctx->plane_state->horizontal_mirror)
781 			flip_horz_scan_dir = !flip_horz_scan_dir;
782 
783 	/*
784 	 * Init calculated according to formula:
785 	 * 	init = (scaling_ratio + number_of_taps + 1) / 2
786 	 * 	init_bot = init + scaling_ratio
787 	 * 	init_c = init + truncated_vp_c_offset(from calculate viewport)
788 	 */
789 	data->inits.h = dc_fixpt_truncate(dc_fixpt_div_int(
790 			dc_fixpt_add_int(data->ratios.horz, data->taps.h_taps + 1), 2), 19);
791 
792 	data->inits.h_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.h_c, dc_fixpt_div_int(
793 			dc_fixpt_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2)), 19);
794 
795 	data->inits.v = dc_fixpt_truncate(dc_fixpt_div_int(
796 			dc_fixpt_add_int(data->ratios.vert, data->taps.v_taps + 1), 2), 19);
797 
798 	data->inits.v_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.v_c, dc_fixpt_div_int(
799 			dc_fixpt_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2)), 19);
800 
801 	if (!flip_horz_scan_dir) {
802 		/* Adjust for viewport end clip-off */
803 		if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
804 			int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
805 			int int_part = dc_fixpt_floor(
806 					dc_fixpt_sub(data->inits.h, data->ratios.horz));
807 
808 			int_part = int_part > 0 ? int_part : 0;
809 			data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
810 		}
811 		if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
812 			int vp_clip = (src.x + src.width) / vpc_div -
813 					data->viewport_c.width - data->viewport_c.x;
814 			int int_part = dc_fixpt_floor(
815 					dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
816 
817 			int_part = int_part > 0 ? int_part : 0;
818 			data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
819 		}
820 
821 		/* Adjust for non-0 viewport offset */
822 		if (data->viewport.x) {
823 			int int_part;
824 
825 			data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
826 					data->ratios.horz, data->recout.x - recout_full->x));
827 			int_part = dc_fixpt_floor(data->inits.h) - data->viewport.x;
828 			if (int_part < data->taps.h_taps) {
829 				int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
830 							(data->taps.h_taps - int_part) : data->viewport.x;
831 				data->viewport.x -= int_adj;
832 				data->viewport.width += int_adj;
833 				int_part += int_adj;
834 			} else if (int_part > data->taps.h_taps) {
835 				data->viewport.x += int_part - data->taps.h_taps;
836 				data->viewport.width -= int_part - data->taps.h_taps;
837 				int_part = data->taps.h_taps;
838 			}
839 			data->inits.h.value &= 0xffffffff;
840 			data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
841 		}
842 
843 		if (data->viewport_c.x) {
844 			int int_part;
845 
846 			data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
847 					data->ratios.horz_c, data->recout.x - recout_full->x));
848 			int_part = dc_fixpt_floor(data->inits.h_c) - data->viewport_c.x;
849 			if (int_part < data->taps.h_taps_c) {
850 				int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
851 						(data->taps.h_taps_c - int_part) : data->viewport_c.x;
852 				data->viewport_c.x -= int_adj;
853 				data->viewport_c.width += int_adj;
854 				int_part += int_adj;
855 			} else if (int_part > data->taps.h_taps_c) {
856 				data->viewport_c.x += int_part - data->taps.h_taps_c;
857 				data->viewport_c.width -= int_part - data->taps.h_taps_c;
858 				int_part = data->taps.h_taps_c;
859 			}
860 			data->inits.h_c.value &= 0xffffffff;
861 			data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
862 		}
863 	} else {
864 		/* Adjust for non-0 viewport offset */
865 		if (data->viewport.x) {
866 			int int_part = dc_fixpt_floor(
867 					dc_fixpt_sub(data->inits.h, data->ratios.horz));
868 
869 			int_part = int_part > 0 ? int_part : 0;
870 			data->viewport.width += int_part < data->viewport.x ? int_part : data->viewport.x;
871 			data->viewport.x -= int_part < data->viewport.x ? int_part : data->viewport.x;
872 		}
873 		if (data->viewport_c.x) {
874 			int int_part = dc_fixpt_floor(
875 					dc_fixpt_sub(data->inits.h_c, data->ratios.horz_c));
876 
877 			int_part = int_part > 0 ? int_part : 0;
878 			data->viewport_c.width += int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
879 			data->viewport_c.x -= int_part < data->viewport_c.x ? int_part : data->viewport_c.x;
880 		}
881 
882 		/* Adjust for viewport end clip-off */
883 		if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
884 			int int_part;
885 			int end_offset = src.x + src.width
886 					- data->viewport.x - data->viewport.width;
887 
888 			/*
889 			 * this is init if vp had no offset, keep in mind this is from the
890 			 * right side of vp due to scan direction
891 			 */
892 			data->inits.h = dc_fixpt_add(data->inits.h, dc_fixpt_mul_int(
893 					data->ratios.horz, data->recout.x - recout_full->x));
894 			/*
895 			 * this is the difference between first pixel of viewport available to read
896 			 * and init position, takning into account scan direction
897 			 */
898 			int_part = dc_fixpt_floor(data->inits.h) - end_offset;
899 			if (int_part < data->taps.h_taps) {
900 				int int_adj = end_offset >= (data->taps.h_taps - int_part) ?
901 							(data->taps.h_taps - int_part) : end_offset;
902 				data->viewport.width += int_adj;
903 				int_part += int_adj;
904 			} else if (int_part > data->taps.h_taps) {
905 				data->viewport.width += int_part - data->taps.h_taps;
906 				int_part = data->taps.h_taps;
907 			}
908 			data->inits.h.value &= 0xffffffff;
909 			data->inits.h = dc_fixpt_add_int(data->inits.h, int_part);
910 		}
911 
912 		if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
913 			int int_part;
914 			int end_offset = (src.x + src.width) / vpc_div
915 					- data->viewport_c.x - data->viewport_c.width;
916 
917 			/*
918 			 * this is init if vp had no offset, keep in mind this is from the
919 			 * right side of vp due to scan direction
920 			 */
921 			data->inits.h_c = dc_fixpt_add(data->inits.h_c, dc_fixpt_mul_int(
922 					data->ratios.horz_c, data->recout.x - recout_full->x));
923 			/*
924 			 * this is the difference between first pixel of viewport available to read
925 			 * and init position, takning into account scan direction
926 			 */
927 			int_part = dc_fixpt_floor(data->inits.h_c) - end_offset;
928 			if (int_part < data->taps.h_taps_c) {
929 				int int_adj = end_offset >= (data->taps.h_taps_c - int_part) ?
930 							(data->taps.h_taps_c - int_part) : end_offset;
931 				data->viewport_c.width += int_adj;
932 				int_part += int_adj;
933 			} else if (int_part > data->taps.h_taps_c) {
934 				data->viewport_c.width += int_part - data->taps.h_taps_c;
935 				int_part = data->taps.h_taps_c;
936 			}
937 			data->inits.h_c.value &= 0xffffffff;
938 			data->inits.h_c = dc_fixpt_add_int(data->inits.h_c, int_part);
939 		}
940 
941 	}
942 	if (!flip_vert_scan_dir) {
943 		/* Adjust for viewport end clip-off */
944 		if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
945 			int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
946 			int int_part = dc_fixpt_floor(
947 					dc_fixpt_sub(data->inits.v, data->ratios.vert));
948 
949 			int_part = int_part > 0 ? int_part : 0;
950 			data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
951 		}
952 		if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
953 			int vp_clip = (src.y + src.height) / vpc_div -
954 					data->viewport_c.height - data->viewport_c.y;
955 			int int_part = dc_fixpt_floor(
956 					dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
957 
958 			int_part = int_part > 0 ? int_part : 0;
959 			data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
960 		}
961 
962 		/* Adjust for non-0 viewport offset */
963 		if (data->viewport.y) {
964 			int int_part;
965 
966 			data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
967 					data->ratios.vert, data->recout.y - recout_full->y));
968 			int_part = dc_fixpt_floor(data->inits.v) - data->viewport.y;
969 			if (int_part < data->taps.v_taps) {
970 				int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
971 							(data->taps.v_taps - int_part) : data->viewport.y;
972 				data->viewport.y -= int_adj;
973 				data->viewport.height += int_adj;
974 				int_part += int_adj;
975 			} else if (int_part > data->taps.v_taps) {
976 				data->viewport.y += int_part - data->taps.v_taps;
977 				data->viewport.height -= int_part - data->taps.v_taps;
978 				int_part = data->taps.v_taps;
979 			}
980 			data->inits.v.value &= 0xffffffff;
981 			data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
982 		}
983 
984 		if (data->viewport_c.y) {
985 			int int_part;
986 
987 			data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
988 					data->ratios.vert_c, data->recout.y - recout_full->y));
989 			int_part = dc_fixpt_floor(data->inits.v_c) - data->viewport_c.y;
990 			if (int_part < data->taps.v_taps_c) {
991 				int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
992 						(data->taps.v_taps_c - int_part) : data->viewport_c.y;
993 				data->viewport_c.y -= int_adj;
994 				data->viewport_c.height += int_adj;
995 				int_part += int_adj;
996 			} else if (int_part > data->taps.v_taps_c) {
997 				data->viewport_c.y += int_part - data->taps.v_taps_c;
998 				data->viewport_c.height -= int_part - data->taps.v_taps_c;
999 				int_part = data->taps.v_taps_c;
1000 			}
1001 			data->inits.v_c.value &= 0xffffffff;
1002 			data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
1003 		}
1004 	} else {
1005 		/* Adjust for non-0 viewport offset */
1006 		if (data->viewport.y) {
1007 			int int_part = dc_fixpt_floor(
1008 					dc_fixpt_sub(data->inits.v, data->ratios.vert));
1009 
1010 			int_part = int_part > 0 ? int_part : 0;
1011 			data->viewport.height += int_part < data->viewport.y ? int_part : data->viewport.y;
1012 			data->viewport.y -= int_part < data->viewport.y ? int_part : data->viewport.y;
1013 		}
1014 		if (data->viewport_c.y) {
1015 			int int_part = dc_fixpt_floor(
1016 					dc_fixpt_sub(data->inits.v_c, data->ratios.vert_c));
1017 
1018 			int_part = int_part > 0 ? int_part : 0;
1019 			data->viewport_c.height += int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
1020 			data->viewport_c.y -= int_part < data->viewport_c.y ? int_part : data->viewport_c.y;
1021 		}
1022 
1023 		/* Adjust for viewport end clip-off */
1024 		if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
1025 			int int_part;
1026 			int end_offset = src.y + src.height
1027 					- data->viewport.y - data->viewport.height;
1028 
1029 			/*
1030 			 * this is init if vp had no offset, keep in mind this is from the
1031 			 * right side of vp due to scan direction
1032 			 */
1033 			data->inits.v = dc_fixpt_add(data->inits.v, dc_fixpt_mul_int(
1034 					data->ratios.vert, data->recout.y - recout_full->y));
1035 			/*
1036 			 * this is the difference between first pixel of viewport available to read
1037 			 * and init position, taking into account scan direction
1038 			 */
1039 			int_part = dc_fixpt_floor(data->inits.v) - end_offset;
1040 			if (int_part < data->taps.v_taps) {
1041 				int int_adj = end_offset >= (data->taps.v_taps - int_part) ?
1042 							(data->taps.v_taps - int_part) : end_offset;
1043 				data->viewport.height += int_adj;
1044 				int_part += int_adj;
1045 			} else if (int_part > data->taps.v_taps) {
1046 				data->viewport.height += int_part - data->taps.v_taps;
1047 				int_part = data->taps.v_taps;
1048 			}
1049 			data->inits.v.value &= 0xffffffff;
1050 			data->inits.v = dc_fixpt_add_int(data->inits.v, int_part);
1051 		}
1052 
1053 		if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
1054 			int int_part;
1055 			int end_offset = (src.y + src.height) / vpc_div
1056 					- data->viewport_c.y - data->viewport_c.height;
1057 
1058 			/*
1059 			 * this is init if vp had no offset, keep in mind this is from the
1060 			 * right side of vp due to scan direction
1061 			 */
1062 			data->inits.v_c = dc_fixpt_add(data->inits.v_c, dc_fixpt_mul_int(
1063 					data->ratios.vert_c, data->recout.y - recout_full->y));
1064 			/*
1065 			 * this is the difference between first pixel of viewport available to read
1066 			 * and init position, taking into account scan direction
1067 			 */
1068 			int_part = dc_fixpt_floor(data->inits.v_c) - end_offset;
1069 			if (int_part < data->taps.v_taps_c) {
1070 				int int_adj = end_offset >= (data->taps.v_taps_c - int_part) ?
1071 							(data->taps.v_taps_c - int_part) : end_offset;
1072 				data->viewport_c.height += int_adj;
1073 				int_part += int_adj;
1074 			} else if (int_part > data->taps.v_taps_c) {
1075 				data->viewport_c.height += int_part - data->taps.v_taps_c;
1076 				int_part = data->taps.v_taps_c;
1077 			}
1078 			data->inits.v_c.value &= 0xffffffff;
1079 			data->inits.v_c = dc_fixpt_add_int(data->inits.v_c, int_part);
1080 		}
1081 	}
1082 
1083 	/* Interlaced inits based on final vert inits */
1084 	data->inits.v_bot = dc_fixpt_add(data->inits.v, data->ratios.vert);
1085 	data->inits.v_c_bot = dc_fixpt_add(data->inits.v_c, data->ratios.vert_c);
1086 
1087 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
1088 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
1089 		rect_swap_helper(&data->viewport_c);
1090 		rect_swap_helper(&data->viewport);
1091 	}
1092 }
1093 
1094 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
1095 {
1096 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
1097 	struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
1098 	struct rect recout_full = { 0 };
1099 	bool res = false;
1100 	DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
1101 	/* Important: scaling ratio calculation requires pixel format,
1102 	 * lb depth calculation requires recout and taps require scaling ratios.
1103 	 * Inits require viewport, taps, ratios and recout of split pipe
1104 	 */
1105 	pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
1106 			pipe_ctx->plane_state->format);
1107 
1108 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1109 		pipe_ctx->stream->dst.height *= 2;
1110 
1111 	calculate_scaling_ratios(pipe_ctx);
1112 
1113 	calculate_viewport(pipe_ctx);
1114 
1115 	if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
1116 		return false;
1117 
1118 	calculate_recout(pipe_ctx, &recout_full);
1119 
1120 	/**
1121 	 * Setting line buffer pixel depth to 24bpp yields banding
1122 	 * on certain displays, such as the Sharp 4k
1123 	 */
1124 	pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
1125 
1126 	pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
1127 	pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
1128 
1129 	pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
1130 	pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
1131 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1132 		pipe_ctx->plane_res.scl_data.v_active *= 2;
1133 
1134 
1135 	/* Taps calculations */
1136 	if (pipe_ctx->plane_res.xfm != NULL)
1137 		res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1138 				pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1139 
1140 	if (pipe_ctx->plane_res.dpp != NULL)
1141 		res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1142 				pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
1143 	if (!res) {
1144 		/* Try 24 bpp linebuffer */
1145 		pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
1146 
1147 		if (pipe_ctx->plane_res.xfm != NULL)
1148 			res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
1149 					pipe_ctx->plane_res.xfm,
1150 					&pipe_ctx->plane_res.scl_data,
1151 					&plane_state->scaling_quality);
1152 
1153 		if (pipe_ctx->plane_res.dpp != NULL)
1154 			res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
1155 					pipe_ctx->plane_res.dpp,
1156 					&pipe_ctx->plane_res.scl_data,
1157 					&plane_state->scaling_quality);
1158 	}
1159 
1160 	if (res)
1161 		/* May need to re-check lb size after this in some obscure scenario */
1162 		calculate_inits_and_adj_vp(pipe_ctx, &recout_full);
1163 
1164 	DC_LOG_SCALER(
1165 				"%s: Viewport:\nheight:%d width:%d x:%d "
1166 				"y:%d\n dst_rect:\nheight:%d width:%d x:%d "
1167 				"y:%d\n",
1168 				__func__,
1169 				pipe_ctx->plane_res.scl_data.viewport.height,
1170 				pipe_ctx->plane_res.scl_data.viewport.width,
1171 				pipe_ctx->plane_res.scl_data.viewport.x,
1172 				pipe_ctx->plane_res.scl_data.viewport.y,
1173 				plane_state->dst_rect.height,
1174 				plane_state->dst_rect.width,
1175 				plane_state->dst_rect.x,
1176 				plane_state->dst_rect.y);
1177 
1178 	if (pipe_ctx->stream->timing.flags.INTERLACE)
1179 		pipe_ctx->stream->dst.height /= 2;
1180 
1181 	return res;
1182 }
1183 
1184 
1185 enum dc_status resource_build_scaling_params_for_context(
1186 	const struct dc  *dc,
1187 	struct dc_state *context)
1188 {
1189 	int i;
1190 
1191 	for (i = 0; i < MAX_PIPES; i++) {
1192 		if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
1193 				context->res_ctx.pipe_ctx[i].stream != NULL)
1194 			if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
1195 				return DC_FAIL_SCALING;
1196 	}
1197 
1198 	return DC_OK;
1199 }
1200 
1201 struct pipe_ctx *find_idle_secondary_pipe(
1202 		struct resource_context *res_ctx,
1203 		const struct resource_pool *pool)
1204 {
1205 	int i;
1206 	struct pipe_ctx *secondary_pipe = NULL;
1207 
1208 	/*
1209 	 * search backwards for the second pipe to keep pipe
1210 	 * assignment more consistent
1211 	 */
1212 
1213 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1214 		if (res_ctx->pipe_ctx[i].stream == NULL) {
1215 			secondary_pipe = &res_ctx->pipe_ctx[i];
1216 			secondary_pipe->pipe_idx = i;
1217 			break;
1218 		}
1219 	}
1220 
1221 
1222 	return secondary_pipe;
1223 }
1224 
1225 struct pipe_ctx *resource_get_head_pipe_for_stream(
1226 		struct resource_context *res_ctx,
1227 		struct dc_stream_state *stream)
1228 {
1229 	int i;
1230 	for (i = 0; i < MAX_PIPES; i++) {
1231 		if (res_ctx->pipe_ctx[i].stream == stream &&
1232 				!res_ctx->pipe_ctx[i].top_pipe) {
1233 			return &res_ctx->pipe_ctx[i];
1234 			break;
1235 		}
1236 	}
1237 	return NULL;
1238 }
1239 
1240 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
1241 		struct resource_context *res_ctx,
1242 		struct dc_stream_state *stream)
1243 {
1244 	struct pipe_ctx *head_pipe, *tail_pipe;
1245 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1246 
1247 	if (!head_pipe)
1248 		return NULL;
1249 
1250 	tail_pipe = head_pipe->bottom_pipe;
1251 
1252 	while (tail_pipe) {
1253 		head_pipe = tail_pipe;
1254 		tail_pipe = tail_pipe->bottom_pipe;
1255 	}
1256 
1257 	return head_pipe;
1258 }
1259 
1260 /*
1261  * A free_pipe for a stream is defined here as a pipe
1262  * that has no surface attached yet
1263  */
1264 static struct pipe_ctx *acquire_free_pipe_for_stream(
1265 		struct dc_state *context,
1266 		const struct resource_pool *pool,
1267 		struct dc_stream_state *stream)
1268 {
1269 	int i;
1270 	struct resource_context *res_ctx = &context->res_ctx;
1271 
1272 	struct pipe_ctx *head_pipe = NULL;
1273 
1274 	/* Find head pipe, which has the back end set up*/
1275 
1276 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1277 
1278 	if (!head_pipe) {
1279 		ASSERT(0);
1280 		return NULL;
1281 	}
1282 
1283 	if (!head_pipe->plane_state)
1284 		return head_pipe;
1285 
1286 	/* Re-use pipe already acquired for this stream if available*/
1287 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1288 		if (res_ctx->pipe_ctx[i].stream == stream &&
1289 				!res_ctx->pipe_ctx[i].plane_state) {
1290 			return &res_ctx->pipe_ctx[i];
1291 		}
1292 	}
1293 
1294 	/*
1295 	 * At this point we have no re-useable pipe for this stream and we need
1296 	 * to acquire an idle one to satisfy the request
1297 	 */
1298 
1299 	if (!pool->funcs->acquire_idle_pipe_for_layer)
1300 		return NULL;
1301 
1302 	return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1303 
1304 }
1305 
1306 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1307 static int acquire_first_split_pipe(
1308 		struct resource_context *res_ctx,
1309 		const struct resource_pool *pool,
1310 		struct dc_stream_state *stream)
1311 {
1312 	int i;
1313 
1314 	for (i = 0; i < pool->pipe_count; i++) {
1315 		struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1316 
1317 		if (pipe_ctx->top_pipe &&
1318 				pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1319 			pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1320 			if (pipe_ctx->bottom_pipe)
1321 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1322 
1323 			memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1324 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1325 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1326 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1327 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1328 			pipe_ctx->stream_res.opp = pool->opps[i];
1329 			pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1330 			pipe_ctx->pipe_idx = i;
1331 
1332 			pipe_ctx->stream = stream;
1333 			return i;
1334 		}
1335 	}
1336 	return -1;
1337 }
1338 #endif
1339 
1340 bool dc_add_plane_to_context(
1341 		const struct dc *dc,
1342 		struct dc_stream_state *stream,
1343 		struct dc_plane_state *plane_state,
1344 		struct dc_state *context)
1345 {
1346 	int i;
1347 	struct resource_pool *pool = dc->res_pool;
1348 	struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1349 	struct dc_stream_status *stream_status = NULL;
1350 
1351 	for (i = 0; i < context->stream_count; i++)
1352 		if (context->streams[i] == stream) {
1353 			stream_status = &context->stream_status[i];
1354 			break;
1355 		}
1356 	if (stream_status == NULL) {
1357 		dm_error("Existing stream not found; failed to attach surface!\n");
1358 		return false;
1359 	}
1360 
1361 
1362 	if (stream_status->plane_count == MAX_SURFACE_NUM) {
1363 		dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1364 				plane_state, MAX_SURFACE_NUM);
1365 		return false;
1366 	}
1367 
1368 	head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1369 
1370 	if (!head_pipe) {
1371 		dm_error("Head pipe not found for stream_state %p !\n", stream);
1372 		return false;
1373 	}
1374 
1375 	free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1376 
1377 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1378 	if (!free_pipe) {
1379 		int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1380 		if (pipe_idx >= 0)
1381 			free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1382 	}
1383 #endif
1384 	if (!free_pipe)
1385 		return false;
1386 
1387 	/* retain new surfaces */
1388 	dc_plane_state_retain(plane_state);
1389 	free_pipe->plane_state = plane_state;
1390 
1391 	if (head_pipe != free_pipe) {
1392 
1393 		tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1394 		ASSERT(tail_pipe);
1395 
1396 		free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1397 		free_pipe->stream_res.abm = tail_pipe->stream_res.abm;
1398 		free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1399 		free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1400 		free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1401 		free_pipe->clock_source = tail_pipe->clock_source;
1402 		free_pipe->top_pipe = tail_pipe;
1403 		tail_pipe->bottom_pipe = free_pipe;
1404 	}
1405 
1406 	/* assign new surfaces*/
1407 	stream_status->plane_states[stream_status->plane_count] = plane_state;
1408 
1409 	stream_status->plane_count++;
1410 
1411 	return true;
1412 }
1413 
1414 bool dc_remove_plane_from_context(
1415 		const struct dc *dc,
1416 		struct dc_stream_state *stream,
1417 		struct dc_plane_state *plane_state,
1418 		struct dc_state *context)
1419 {
1420 	int i;
1421 	struct dc_stream_status *stream_status = NULL;
1422 	struct resource_pool *pool = dc->res_pool;
1423 
1424 	for (i = 0; i < context->stream_count; i++)
1425 		if (context->streams[i] == stream) {
1426 			stream_status = &context->stream_status[i];
1427 			break;
1428 		}
1429 
1430 	if (stream_status == NULL) {
1431 		dm_error("Existing stream not found; failed to remove plane.\n");
1432 		return false;
1433 	}
1434 
1435 	/* release pipe for plane*/
1436 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1437 		struct pipe_ctx *pipe_ctx;
1438 
1439 		if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1440 			pipe_ctx = &context->res_ctx.pipe_ctx[i];
1441 
1442 			if (pipe_ctx->top_pipe)
1443 				pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1444 
1445 			/* Second condition is to avoid setting NULL to top pipe
1446 			 * of tail pipe making it look like head pipe in subsequent
1447 			 * deletes
1448 			 */
1449 			if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1450 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1451 
1452 			/*
1453 			 * For head pipe detach surfaces from pipe for tail
1454 			 * pipe just zero it out
1455 			 */
1456 			if (!pipe_ctx->top_pipe) {
1457 				pipe_ctx->plane_state = NULL;
1458 				pipe_ctx->bottom_pipe = NULL;
1459 			} else  {
1460 				memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1461 			}
1462 		}
1463 	}
1464 
1465 
1466 	for (i = 0; i < stream_status->plane_count; i++) {
1467 		if (stream_status->plane_states[i] == plane_state) {
1468 
1469 			dc_plane_state_release(stream_status->plane_states[i]);
1470 			break;
1471 		}
1472 	}
1473 
1474 	if (i == stream_status->plane_count) {
1475 		dm_error("Existing plane_state not found; failed to detach it!\n");
1476 		return false;
1477 	}
1478 
1479 	stream_status->plane_count--;
1480 
1481 	/* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1482 	for (; i < stream_status->plane_count; i++)
1483 		stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1484 
1485 	stream_status->plane_states[stream_status->plane_count] = NULL;
1486 
1487 	return true;
1488 }
1489 
1490 bool dc_rem_all_planes_for_stream(
1491 		const struct dc *dc,
1492 		struct dc_stream_state *stream,
1493 		struct dc_state *context)
1494 {
1495 	int i, old_plane_count;
1496 	struct dc_stream_status *stream_status = NULL;
1497 	struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1498 
1499 	for (i = 0; i < context->stream_count; i++)
1500 			if (context->streams[i] == stream) {
1501 				stream_status = &context->stream_status[i];
1502 				break;
1503 			}
1504 
1505 	if (stream_status == NULL) {
1506 		dm_error("Existing stream %p not found!\n", stream);
1507 		return false;
1508 	}
1509 
1510 	old_plane_count = stream_status->plane_count;
1511 
1512 	for (i = 0; i < old_plane_count; i++)
1513 		del_planes[i] = stream_status->plane_states[i];
1514 
1515 	for (i = 0; i < old_plane_count; i++)
1516 		if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1517 			return false;
1518 
1519 	return true;
1520 }
1521 
1522 static bool add_all_planes_for_stream(
1523 		const struct dc *dc,
1524 		struct dc_stream_state *stream,
1525 		const struct dc_validation_set set[],
1526 		int set_count,
1527 		struct dc_state *context)
1528 {
1529 	int i, j;
1530 
1531 	for (i = 0; i < set_count; i++)
1532 		if (set[i].stream == stream)
1533 			break;
1534 
1535 	if (i == set_count) {
1536 		dm_error("Stream %p not found in set!\n", stream);
1537 		return false;
1538 	}
1539 
1540 	for (j = 0; j < set[i].plane_count; j++)
1541 		if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1542 			return false;
1543 
1544 	return true;
1545 }
1546 
1547 bool dc_add_all_planes_for_stream(
1548 		const struct dc *dc,
1549 		struct dc_stream_state *stream,
1550 		struct dc_plane_state * const *plane_states,
1551 		int plane_count,
1552 		struct dc_state *context)
1553 {
1554 	struct dc_validation_set set;
1555 	int i;
1556 
1557 	set.stream = stream;
1558 	set.plane_count = plane_count;
1559 
1560 	for (i = 0; i < plane_count; i++)
1561 		set.plane_states[i] = plane_states[i];
1562 
1563 	return add_all_planes_for_stream(dc, stream, &set, 1, context);
1564 }
1565 
1566 
1567 static bool is_hdr_static_meta_changed(struct dc_stream_state *cur_stream,
1568 	struct dc_stream_state *new_stream)
1569 {
1570 	if (cur_stream == NULL)
1571 		return true;
1572 
1573 	if (memcmp(&cur_stream->hdr_static_metadata,
1574 			&new_stream->hdr_static_metadata,
1575 			sizeof(struct dc_info_packet)) != 0)
1576 		return true;
1577 
1578 	return false;
1579 }
1580 
1581 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1582 		struct dc_stream_state *new_stream)
1583 {
1584 	if (cur_stream == NULL)
1585 		return true;
1586 
1587 	/* If sink pointer changed, it means this is a hotplug, we should do
1588 	 * full hw setting.
1589 	 */
1590 	if (cur_stream->sink != new_stream->sink)
1591 		return true;
1592 
1593 	/* If output color space is changed, need to reprogram info frames */
1594 	if (cur_stream->output_color_space != new_stream->output_color_space)
1595 		return true;
1596 
1597 	return memcmp(
1598 		&cur_stream->timing,
1599 		&new_stream->timing,
1600 		sizeof(struct dc_crtc_timing)) != 0;
1601 }
1602 
1603 static bool are_stream_backends_same(
1604 	struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1605 {
1606 	if (stream_a == stream_b)
1607 		return true;
1608 
1609 	if (stream_a == NULL || stream_b == NULL)
1610 		return false;
1611 
1612 	if (is_timing_changed(stream_a, stream_b))
1613 		return false;
1614 
1615 	if (is_hdr_static_meta_changed(stream_a, stream_b))
1616 		return false;
1617 
1618 	return true;
1619 }
1620 
1621 bool dc_is_stream_unchanged(
1622 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1623 {
1624 
1625 	if (!are_stream_backends_same(old_stream, stream))
1626 		return false;
1627 
1628 	return true;
1629 }
1630 
1631 bool dc_is_stream_scaling_unchanged(
1632 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1633 {
1634 	if (old_stream == stream)
1635 		return true;
1636 
1637 	if (old_stream == NULL || stream == NULL)
1638 		return false;
1639 
1640 	if (memcmp(&old_stream->src,
1641 			&stream->src,
1642 			sizeof(struct rect)) != 0)
1643 		return false;
1644 
1645 	if (memcmp(&old_stream->dst,
1646 			&stream->dst,
1647 			sizeof(struct rect)) != 0)
1648 		return false;
1649 
1650 	return true;
1651 }
1652 
1653 static void update_stream_engine_usage(
1654 		struct resource_context *res_ctx,
1655 		const struct resource_pool *pool,
1656 		struct stream_encoder *stream_enc,
1657 		bool acquired)
1658 {
1659 	int i;
1660 
1661 	for (i = 0; i < pool->stream_enc_count; i++) {
1662 		if (pool->stream_enc[i] == stream_enc)
1663 			res_ctx->is_stream_enc_acquired[i] = acquired;
1664 	}
1665 }
1666 
1667 /* TODO: release audio object */
1668 void update_audio_usage(
1669 		struct resource_context *res_ctx,
1670 		const struct resource_pool *pool,
1671 		struct audio *audio,
1672 		bool acquired)
1673 {
1674 	int i;
1675 	for (i = 0; i < pool->audio_count; i++) {
1676 		if (pool->audios[i] == audio)
1677 			res_ctx->is_audio_acquired[i] = acquired;
1678 	}
1679 }
1680 
1681 static int acquire_first_free_pipe(
1682 		struct resource_context *res_ctx,
1683 		const struct resource_pool *pool,
1684 		struct dc_stream_state *stream)
1685 {
1686 	int i;
1687 
1688 	for (i = 0; i < pool->pipe_count; i++) {
1689 		if (!res_ctx->pipe_ctx[i].stream) {
1690 			struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1691 
1692 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1693 			pipe_ctx->plane_res.mi = pool->mis[i];
1694 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1695 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1696 			pipe_ctx->plane_res.xfm = pool->transforms[i];
1697 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1698 			pipe_ctx->stream_res.opp = pool->opps[i];
1699 			if (pool->dpps[i])
1700 				pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1701 			pipe_ctx->pipe_idx = i;
1702 
1703 
1704 			pipe_ctx->stream = stream;
1705 			return i;
1706 		}
1707 	}
1708 	return -1;
1709 }
1710 
1711 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1712 		struct resource_context *res_ctx,
1713 		const struct resource_pool *pool,
1714 		struct dc_stream_state *stream)
1715 {
1716 	int i;
1717 	int j = -1;
1718 	struct dc_link *link = stream->sink->link;
1719 
1720 	for (i = 0; i < pool->stream_enc_count; i++) {
1721 		if (!res_ctx->is_stream_enc_acquired[i] &&
1722 				pool->stream_enc[i]) {
1723 			/* Store first available for MST second display
1724 			 * in daisy chain use case */
1725 			j = i;
1726 			if (pool->stream_enc[i]->id ==
1727 					link->link_enc->preferred_engine)
1728 				return pool->stream_enc[i];
1729 		}
1730 	}
1731 
1732 	/*
1733 	 * below can happen in cases when stream encoder is acquired:
1734 	 * 1) for second MST display in chain, so preferred engine already
1735 	 * acquired;
1736 	 * 2) for another link, which preferred engine already acquired by any
1737 	 * MST configuration.
1738 	 *
1739 	 * If signal is of DP type and preferred engine not found, return last available
1740 	 *
1741 	 * TODO - This is just a patch up and a generic solution is
1742 	 * required for non DP connectors.
1743 	 */
1744 
1745 	if (j >= 0 && dc_is_dp_signal(stream->signal))
1746 		return pool->stream_enc[j];
1747 
1748 	return NULL;
1749 }
1750 
1751 static struct audio *find_first_free_audio(
1752 		struct resource_context *res_ctx,
1753 		const struct resource_pool *pool,
1754 		enum engine_id id)
1755 {
1756 	int i;
1757 	for (i = 0; i < pool->audio_count; i++) {
1758 		if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1759 			/*we have enough audio endpoint, find the matching inst*/
1760 			if (id != i)
1761 				continue;
1762 
1763 			return pool->audios[i];
1764 		}
1765 	}
1766 	/*not found the matching one, first come first serve*/
1767 	for (i = 0; i < pool->audio_count; i++) {
1768 		if (res_ctx->is_audio_acquired[i] == false) {
1769 			return pool->audios[i];
1770 		}
1771 	}
1772 	return 0;
1773 }
1774 
1775 bool resource_is_stream_unchanged(
1776 	struct dc_state *old_context, struct dc_stream_state *stream)
1777 {
1778 	int i;
1779 
1780 	for (i = 0; i < old_context->stream_count; i++) {
1781 		struct dc_stream_state *old_stream = old_context->streams[i];
1782 
1783 		if (are_stream_backends_same(old_stream, stream))
1784 				return true;
1785 	}
1786 
1787 	return false;
1788 }
1789 
1790 enum dc_status dc_add_stream_to_ctx(
1791 		struct dc *dc,
1792 		struct dc_state *new_ctx,
1793 		struct dc_stream_state *stream)
1794 {
1795 	struct dc_context *dc_ctx = dc->ctx;
1796 	enum dc_status res;
1797 
1798 	if (new_ctx->stream_count >= dc->res_pool->timing_generator_count) {
1799 		DC_ERROR("Max streams reached, can't add stream %p !\n", stream);
1800 		return DC_ERROR_UNEXPECTED;
1801 	}
1802 
1803 	new_ctx->streams[new_ctx->stream_count] = stream;
1804 	dc_stream_retain(stream);
1805 	new_ctx->stream_count++;
1806 
1807 	res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1808 	if (res != DC_OK)
1809 		DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1810 
1811 	return res;
1812 }
1813 
1814 enum dc_status dc_remove_stream_from_ctx(
1815 			struct dc *dc,
1816 			struct dc_state *new_ctx,
1817 			struct dc_stream_state *stream)
1818 {
1819 	int i;
1820 	struct dc_context *dc_ctx = dc->ctx;
1821 	struct pipe_ctx *del_pipe = NULL;
1822 
1823 	/* Release primary pipe */
1824 	for (i = 0; i < MAX_PIPES; i++) {
1825 		if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1826 				!new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1827 			del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1828 
1829 			ASSERT(del_pipe->stream_res.stream_enc);
1830 			update_stream_engine_usage(
1831 					&new_ctx->res_ctx,
1832 						dc->res_pool,
1833 					del_pipe->stream_res.stream_enc,
1834 					false);
1835 
1836 			if (del_pipe->stream_res.audio)
1837 				update_audio_usage(
1838 					&new_ctx->res_ctx,
1839 					dc->res_pool,
1840 					del_pipe->stream_res.audio,
1841 					false);
1842 
1843 			resource_unreference_clock_source(&new_ctx->res_ctx,
1844 							  dc->res_pool,
1845 							  del_pipe->clock_source);
1846 
1847 			if (dc->res_pool->funcs->remove_stream_from_ctx)
1848 				dc->res_pool->funcs->remove_stream_from_ctx(dc, new_ctx, stream);
1849 
1850 			memset(del_pipe, 0, sizeof(*del_pipe));
1851 
1852 			break;
1853 		}
1854 	}
1855 
1856 	if (!del_pipe) {
1857 		DC_ERROR("Pipe not found for stream %p !\n", stream);
1858 		return DC_ERROR_UNEXPECTED;
1859 	}
1860 
1861 	for (i = 0; i < new_ctx->stream_count; i++)
1862 		if (new_ctx->streams[i] == stream)
1863 			break;
1864 
1865 	if (new_ctx->streams[i] != stream) {
1866 		DC_ERROR("Context doesn't have stream %p !\n", stream);
1867 		return DC_ERROR_UNEXPECTED;
1868 	}
1869 
1870 	dc_stream_release(new_ctx->streams[i]);
1871 	new_ctx->stream_count--;
1872 
1873 	/* Trim back arrays */
1874 	for (; i < new_ctx->stream_count; i++) {
1875 		new_ctx->streams[i] = new_ctx->streams[i + 1];
1876 		new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1877 	}
1878 
1879 	new_ctx->streams[new_ctx->stream_count] = NULL;
1880 	memset(
1881 			&new_ctx->stream_status[new_ctx->stream_count],
1882 			0,
1883 			sizeof(new_ctx->stream_status[0]));
1884 
1885 	return DC_OK;
1886 }
1887 
1888 static struct dc_stream_state *find_pll_sharable_stream(
1889 		struct dc_stream_state *stream_needs_pll,
1890 		struct dc_state *context)
1891 {
1892 	int i;
1893 
1894 	for (i = 0; i < context->stream_count; i++) {
1895 		struct dc_stream_state *stream_has_pll = context->streams[i];
1896 
1897 		/* We are looking for non dp, non virtual stream */
1898 		if (resource_are_streams_timing_synchronizable(
1899 			stream_needs_pll, stream_has_pll)
1900 			&& !dc_is_dp_signal(stream_has_pll->signal)
1901 			&& stream_has_pll->sink->link->connector_signal
1902 			!= SIGNAL_TYPE_VIRTUAL)
1903 			return stream_has_pll;
1904 
1905 	}
1906 
1907 	return NULL;
1908 }
1909 
1910 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1911 {
1912 	uint32_t pix_clk = timing->pix_clk_khz;
1913 	uint32_t normalized_pix_clk = pix_clk;
1914 
1915 	if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1916 		pix_clk /= 2;
1917 	if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1918 		switch (timing->display_color_depth) {
1919 		case COLOR_DEPTH_888:
1920 			normalized_pix_clk = pix_clk;
1921 			break;
1922 		case COLOR_DEPTH_101010:
1923 			normalized_pix_clk = (pix_clk * 30) / 24;
1924 			break;
1925 		case COLOR_DEPTH_121212:
1926 			normalized_pix_clk = (pix_clk * 36) / 24;
1927 		break;
1928 		case COLOR_DEPTH_161616:
1929 			normalized_pix_clk = (pix_clk * 48) / 24;
1930 		break;
1931 		default:
1932 			ASSERT(0);
1933 		break;
1934 		}
1935 	}
1936 	return normalized_pix_clk;
1937 }
1938 
1939 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1940 {
1941 	/* update actual pixel clock on all streams */
1942 	if (dc_is_hdmi_signal(stream->signal))
1943 		stream->phy_pix_clk = get_norm_pix_clk(
1944 			&stream->timing);
1945 	else
1946 		stream->phy_pix_clk =
1947 			stream->timing.pix_clk_khz;
1948 }
1949 
1950 enum dc_status resource_map_pool_resources(
1951 		const struct dc  *dc,
1952 		struct dc_state *context,
1953 		struct dc_stream_state *stream)
1954 {
1955 	const struct resource_pool *pool = dc->res_pool;
1956 	int i;
1957 	struct dc_context *dc_ctx = dc->ctx;
1958 	struct pipe_ctx *pipe_ctx = NULL;
1959 	int pipe_idx = -1;
1960 
1961 	/* TODO Check if this is needed */
1962 	/*if (!resource_is_stream_unchanged(old_context, stream)) {
1963 			if (stream != NULL && old_context->streams[i] != NULL) {
1964 				stream->bit_depth_params =
1965 						old_context->streams[i]->bit_depth_params;
1966 				stream->clamping = old_context->streams[i]->clamping;
1967 				continue;
1968 			}
1969 		}
1970 	*/
1971 
1972 	/* acquire new resources */
1973 	pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1974 
1975 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1976 	if (pipe_idx < 0)
1977 		pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1978 #endif
1979 
1980 	if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL)
1981 		return DC_NO_CONTROLLER_RESOURCE;
1982 
1983 	pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1984 
1985 	pipe_ctx->stream_res.stream_enc =
1986 		find_first_free_match_stream_enc_for_link(
1987 			&context->res_ctx, pool, stream);
1988 
1989 	if (!pipe_ctx->stream_res.stream_enc)
1990 		return DC_NO_STREAM_ENG_RESOURCE;
1991 
1992 	update_stream_engine_usage(
1993 		&context->res_ctx, pool,
1994 		pipe_ctx->stream_res.stream_enc,
1995 		true);
1996 
1997 	/* TODO: Add check if ASIC support and EDID audio */
1998 	if (!stream->sink->converter_disable_audio &&
1999 	    dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
2000 	    stream->audio_info.mode_count) {
2001 		pipe_ctx->stream_res.audio = find_first_free_audio(
2002 		&context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
2003 
2004 		/*
2005 		 * Audio assigned in order first come first get.
2006 		 * There are asics which has number of audio
2007 		 * resources less then number of pipes
2008 		 */
2009 		if (pipe_ctx->stream_res.audio)
2010 			update_audio_usage(&context->res_ctx, pool,
2011 					   pipe_ctx->stream_res.audio, true);
2012 	}
2013 
2014 	/* Add ABM to the resource if on EDP */
2015 	if (pipe_ctx->stream && dc_is_embedded_signal(pipe_ctx->stream->signal))
2016 		pipe_ctx->stream_res.abm = pool->abm;
2017 
2018 	for (i = 0; i < context->stream_count; i++)
2019 		if (context->streams[i] == stream) {
2020 			context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
2021 			context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
2022 			return DC_OK;
2023 		}
2024 
2025 	DC_ERROR("Stream %p not found in new ctx!\n", stream);
2026 	return DC_ERROR_UNEXPECTED;
2027 }
2028 
2029 void dc_resource_state_copy_construct_current(
2030 		const struct dc *dc,
2031 		struct dc_state *dst_ctx)
2032 {
2033 	dc_resource_state_copy_construct(dc->current_state, dst_ctx);
2034 }
2035 
2036 
2037 void dc_resource_state_construct(
2038 		const struct dc *dc,
2039 		struct dc_state *dst_ctx)
2040 {
2041 	dst_ctx->dis_clk = dc->res_pool->dccg;
2042 }
2043 
2044 enum dc_status dc_validate_global_state(
2045 		struct dc *dc,
2046 		struct dc_state *new_ctx)
2047 {
2048 	enum dc_status result = DC_ERROR_UNEXPECTED;
2049 	int i, j;
2050 
2051 	if (!new_ctx)
2052 		return DC_ERROR_UNEXPECTED;
2053 
2054 	if (dc->res_pool->funcs->validate_global) {
2055 		result = dc->res_pool->funcs->validate_global(dc, new_ctx);
2056 		if (result != DC_OK)
2057 			return result;
2058 	}
2059 
2060 	for (i = 0; i < new_ctx->stream_count; i++) {
2061 		struct dc_stream_state *stream = new_ctx->streams[i];
2062 
2063 		for (j = 0; j < dc->res_pool->pipe_count; j++) {
2064 			struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
2065 
2066 			if (pipe_ctx->stream != stream)
2067 				continue;
2068 
2069 			/* Switch to dp clock source only if there is
2070 			 * no non dp stream that shares the same timing
2071 			 * with the dp stream.
2072 			 */
2073 			if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
2074 				!find_pll_sharable_stream(stream, new_ctx)) {
2075 
2076 				resource_unreference_clock_source(
2077 						&new_ctx->res_ctx,
2078 						dc->res_pool,
2079 						pipe_ctx->clock_source);
2080 
2081 				pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
2082 				resource_reference_clock_source(
2083 						&new_ctx->res_ctx,
2084 						dc->res_pool,
2085 						 pipe_ctx->clock_source);
2086 			}
2087 		}
2088 	}
2089 
2090 	result = resource_build_scaling_params_for_context(dc, new_ctx);
2091 
2092 	if (result == DC_OK)
2093 		if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
2094 			result = DC_FAIL_BANDWIDTH_VALIDATE;
2095 
2096 	return result;
2097 }
2098 
2099 static void patch_gamut_packet_checksum(
2100 		struct dc_info_packet *gamut_packet)
2101 {
2102 	/* For gamut we recalc checksum */
2103 	if (gamut_packet->valid) {
2104 		uint8_t chk_sum = 0;
2105 		uint8_t *ptr;
2106 		uint8_t i;
2107 
2108 		/*start of the Gamut data. */
2109 		ptr = &gamut_packet->sb[3];
2110 
2111 		for (i = 0; i <= gamut_packet->sb[1]; i++)
2112 			chk_sum += ptr[i];
2113 
2114 		gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
2115 	}
2116 }
2117 
2118 static void set_avi_info_frame(
2119 		struct dc_info_packet *info_packet,
2120 		struct pipe_ctx *pipe_ctx)
2121 {
2122 	struct dc_stream_state *stream = pipe_ctx->stream;
2123 	enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
2124 	uint32_t pixel_encoding = 0;
2125 	enum scanning_type scan_type = SCANNING_TYPE_NODATA;
2126 	enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
2127 	bool itc = false;
2128 	uint8_t itc_value = 0;
2129 	uint8_t cn0_cn1 = 0;
2130 	unsigned int cn0_cn1_value = 0;
2131 	uint8_t *check_sum = NULL;
2132 	uint8_t byte_index = 0;
2133 	union hdmi_info_packet hdmi_info;
2134 	union display_content_support support = {0};
2135 	unsigned int vic = pipe_ctx->stream->timing.vic;
2136 	enum dc_timing_3d_format format;
2137 
2138 	memset(&hdmi_info, 0, sizeof(union hdmi_info_packet));
2139 
2140 	color_space = pipe_ctx->stream->output_color_space;
2141 	if (color_space == COLOR_SPACE_UNKNOWN)
2142 		color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
2143 			COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
2144 
2145 	/* Initialize header */
2146 	hdmi_info.bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
2147 	/* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
2148 	* not be used in HDMI 2.0 (Section 10.1) */
2149 	hdmi_info.bits.header.version = 2;
2150 	hdmi_info.bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
2151 
2152 	/*
2153 	 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
2154 	 * according to HDMI 2.0 spec (Section 10.1)
2155 	 */
2156 
2157 	switch (stream->timing.pixel_encoding) {
2158 	case PIXEL_ENCODING_YCBCR422:
2159 		pixel_encoding = 1;
2160 		break;
2161 
2162 	case PIXEL_ENCODING_YCBCR444:
2163 		pixel_encoding = 2;
2164 		break;
2165 	case PIXEL_ENCODING_YCBCR420:
2166 		pixel_encoding = 3;
2167 		break;
2168 
2169 	case PIXEL_ENCODING_RGB:
2170 	default:
2171 		pixel_encoding = 0;
2172 	}
2173 
2174 	/* Y0_Y1_Y2 : The pixel encoding */
2175 	/* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
2176 	hdmi_info.bits.Y0_Y1_Y2 = pixel_encoding;
2177 
2178 	/* A0 = 1 Active Format Information valid */
2179 	hdmi_info.bits.A0 = ACTIVE_FORMAT_VALID;
2180 
2181 	/* B0, B1 = 3; Bar info data is valid */
2182 	hdmi_info.bits.B0_B1 = BAR_INFO_BOTH_VALID;
2183 
2184 	hdmi_info.bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
2185 
2186 	/* S0, S1 : Underscan / Overscan */
2187 	/* TODO: un-hardcode scan type */
2188 	scan_type = SCANNING_TYPE_UNDERSCAN;
2189 	hdmi_info.bits.S0_S1 = scan_type;
2190 
2191 	/* C0, C1 : Colorimetry */
2192 	if (color_space == COLOR_SPACE_YCBCR709 ||
2193 			color_space == COLOR_SPACE_YCBCR709_LIMITED)
2194 		hdmi_info.bits.C0_C1 = COLORIMETRY_ITU709;
2195 	else if (color_space == COLOR_SPACE_YCBCR601 ||
2196 			color_space == COLOR_SPACE_YCBCR601_LIMITED)
2197 		hdmi_info.bits.C0_C1 = COLORIMETRY_ITU601;
2198 	else {
2199 		hdmi_info.bits.C0_C1 = COLORIMETRY_NO_DATA;
2200 	}
2201 	if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
2202 			color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
2203 			color_space == COLOR_SPACE_2020_YCBCR) {
2204 		hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
2205 		hdmi_info.bits.C0_C1   = COLORIMETRY_EXTENDED;
2206 	} else if (color_space == COLOR_SPACE_ADOBERGB) {
2207 		hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
2208 		hdmi_info.bits.C0_C1   = COLORIMETRY_EXTENDED;
2209 	}
2210 
2211 	/* TODO: un-hardcode aspect ratio */
2212 	aspect = stream->timing.aspect_ratio;
2213 
2214 	switch (aspect) {
2215 	case ASPECT_RATIO_4_3:
2216 	case ASPECT_RATIO_16_9:
2217 		hdmi_info.bits.M0_M1 = aspect;
2218 		break;
2219 
2220 	case ASPECT_RATIO_NO_DATA:
2221 	case ASPECT_RATIO_64_27:
2222 	case ASPECT_RATIO_256_135:
2223 	default:
2224 		hdmi_info.bits.M0_M1 = 0;
2225 	}
2226 
2227 	/* Active Format Aspect ratio - same as Picture Aspect Ratio. */
2228 	hdmi_info.bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
2229 
2230 	/* TODO: un-hardcode cn0_cn1 and itc */
2231 
2232 	cn0_cn1 = 0;
2233 	cn0_cn1_value = 0;
2234 
2235 	itc = true;
2236 	itc_value = 1;
2237 
2238 	support = stream->sink->edid_caps.content_support;
2239 
2240 	if (itc) {
2241 		if (!support.bits.valid_content_type) {
2242 			cn0_cn1_value = 0;
2243 		} else {
2244 			if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
2245 				if (support.bits.graphics_content == 1) {
2246 					cn0_cn1_value = 0;
2247 				}
2248 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
2249 				if (support.bits.photo_content == 1) {
2250 					cn0_cn1_value = 1;
2251 				} else {
2252 					cn0_cn1_value = 0;
2253 					itc_value = 0;
2254 				}
2255 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
2256 				if (support.bits.cinema_content == 1) {
2257 					cn0_cn1_value = 2;
2258 				} else {
2259 					cn0_cn1_value = 0;
2260 					itc_value = 0;
2261 				}
2262 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2263 				if (support.bits.game_content == 1) {
2264 					cn0_cn1_value = 3;
2265 				} else {
2266 					cn0_cn1_value = 0;
2267 					itc_value = 0;
2268 				}
2269 			}
2270 		}
2271 		hdmi_info.bits.CN0_CN1 = cn0_cn1_value;
2272 		hdmi_info.bits.ITC = itc_value;
2273 	}
2274 
2275 	/* TODO : We should handle YCC quantization */
2276 	/* but we do not have matrix calculation */
2277 	if (stream->sink->edid_caps.qs_bit == 1 &&
2278 			stream->sink->edid_caps.qy_bit == 1) {
2279 		if (color_space == COLOR_SPACE_SRGB ||
2280 			color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2281 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_FULL_RANGE;
2282 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2283 		} else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2284 					color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2285 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_LIMITED_RANGE;
2286 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2287 		} else {
2288 			hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2289 			hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2290 		}
2291 	} else {
2292 		hdmi_info.bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2293 		hdmi_info.bits.YQ0_YQ1   = YYC_QUANTIZATION_LIMITED_RANGE;
2294 	}
2295 
2296 	///VIC
2297 	format = stream->timing.timing_3d_format;
2298 	/*todo, add 3DStereo support*/
2299 	if (format != TIMING_3D_FORMAT_NONE) {
2300 		// Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2301 		switch (pipe_ctx->stream->timing.hdmi_vic) {
2302 		case 1:
2303 			vic = 95;
2304 			break;
2305 		case 2:
2306 			vic = 94;
2307 			break;
2308 		case 3:
2309 			vic = 93;
2310 			break;
2311 		case 4:
2312 			vic = 98;
2313 			break;
2314 		default:
2315 			break;
2316 		}
2317 	}
2318 	hdmi_info.bits.VIC0_VIC7 = vic;
2319 
2320 	/* pixel repetition
2321 	 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2322 	 * repetition start from 1 */
2323 	hdmi_info.bits.PR0_PR3 = 0;
2324 
2325 	/* Bar Info
2326 	 * barTop:    Line Number of End of Top Bar.
2327 	 * barBottom: Line Number of Start of Bottom Bar.
2328 	 * barLeft:   Pixel Number of End of Left Bar.
2329 	 * barRight:  Pixel Number of Start of Right Bar. */
2330 	hdmi_info.bits.bar_top = stream->timing.v_border_top;
2331 	hdmi_info.bits.bar_bottom = (stream->timing.v_total
2332 			- stream->timing.v_border_bottom + 1);
2333 	hdmi_info.bits.bar_left  = stream->timing.h_border_left;
2334 	hdmi_info.bits.bar_right = (stream->timing.h_total
2335 			- stream->timing.h_border_right + 1);
2336 
2337 	/* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2338 	check_sum = &hdmi_info.packet_raw_data.sb[0];
2339 
2340 	*check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2341 
2342 	for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2343 		*check_sum += hdmi_info.packet_raw_data.sb[byte_index];
2344 
2345 	/* one byte complement */
2346 	*check_sum = (uint8_t) (0x100 - *check_sum);
2347 
2348 	/* Store in hw_path_mode */
2349 	info_packet->hb0 = hdmi_info.packet_raw_data.hb0;
2350 	info_packet->hb1 = hdmi_info.packet_raw_data.hb1;
2351 	info_packet->hb2 = hdmi_info.packet_raw_data.hb2;
2352 
2353 	for (byte_index = 0; byte_index < sizeof(hdmi_info.packet_raw_data.sb); byte_index++)
2354 		info_packet->sb[byte_index] = hdmi_info.packet_raw_data.sb[byte_index];
2355 
2356 	info_packet->valid = true;
2357 }
2358 
2359 static void set_vendor_info_packet(
2360 		struct dc_info_packet *info_packet,
2361 		struct dc_stream_state *stream)
2362 {
2363 	uint32_t length = 0;
2364 	bool hdmi_vic_mode = false;
2365 	uint8_t checksum = 0;
2366 	uint32_t i = 0;
2367 	enum dc_timing_3d_format format;
2368 	// Can be different depending on packet content /*todo*/
2369 	// unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2370 
2371 	info_packet->valid = false;
2372 
2373 	format = stream->timing.timing_3d_format;
2374 	if (stream->view_format == VIEW_3D_FORMAT_NONE)
2375 		format = TIMING_3D_FORMAT_NONE;
2376 
2377 	/* Can be different depending on packet content */
2378 	length = 5;
2379 
2380 	if (stream->timing.hdmi_vic != 0
2381 			&& stream->timing.h_total >= 3840
2382 			&& stream->timing.v_total >= 2160)
2383 		hdmi_vic_mode = true;
2384 
2385 	/* According to HDMI 1.4a CTS, VSIF should be sent
2386 	 * for both 3D stereo and HDMI VIC modes.
2387 	 * For all other modes, there is no VSIF sent.  */
2388 
2389 	if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2390 		return;
2391 
2392 	/* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2393 	info_packet->sb[1] = 0x03;
2394 	info_packet->sb[2] = 0x0C;
2395 	info_packet->sb[3] = 0x00;
2396 
2397 	/*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2398 	 * The value for HDMI_Video_Format are:
2399 	 * 0x0 (0b000) - No additional HDMI video format is presented in this
2400 	 * packet
2401 	 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2402 	 * parameter follows
2403 	 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2404 	 * potentially 3D_Ext_Data follows
2405 	 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2406 	if (format != TIMING_3D_FORMAT_NONE)
2407 		info_packet->sb[4] = (2 << 5);
2408 	else if (hdmi_vic_mode)
2409 		info_packet->sb[4] = (1 << 5);
2410 
2411 	/* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2412 	 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2413 	 * The value for 3D_Structure are:
2414 	 * 0x0 - Frame Packing
2415 	 * 0x1 - Field Alternative
2416 	 * 0x2 - Line Alternative
2417 	 * 0x3 - Side-by-Side (full)
2418 	 * 0x4 - L + depth
2419 	 * 0x5 - L + depth + graphics + graphics-depth
2420 	 * 0x6 - Top-and-Bottom
2421 	 * 0x7 - Reserved for future use
2422 	 * 0x8 - Side-by-Side (Half)
2423 	 * 0x9..0xE - Reserved for future use
2424 	 * 0xF - Not used */
2425 	switch (format) {
2426 	case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2427 	case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2428 		info_packet->sb[5] = (0x0 << 4);
2429 		break;
2430 
2431 	case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2432 	case TIMING_3D_FORMAT_SBS_SW_PACKED:
2433 		info_packet->sb[5] = (0x8 << 4);
2434 		length = 6;
2435 		break;
2436 
2437 	case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2438 	case TIMING_3D_FORMAT_TB_SW_PACKED:
2439 		info_packet->sb[5] = (0x6 << 4);
2440 		break;
2441 
2442 	default:
2443 		break;
2444 	}
2445 
2446 	/*PB5: If PB4 is set to 0x1 (extended resolution format)
2447 	 * fill PB5 with the correct HDMI VIC code */
2448 	if (hdmi_vic_mode)
2449 		info_packet->sb[5] = stream->timing.hdmi_vic;
2450 
2451 	/* Header */
2452 	info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2453 	info_packet->hb1 = 0x01; /* Version */
2454 
2455 	/* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2456 	info_packet->hb2 = (uint8_t) (length);
2457 
2458 	/* Calculate checksum */
2459 	checksum = 0;
2460 	checksum += info_packet->hb0;
2461 	checksum += info_packet->hb1;
2462 	checksum += info_packet->hb2;
2463 
2464 	for (i = 1; i <= length; i++)
2465 		checksum += info_packet->sb[i];
2466 
2467 	info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2468 
2469 	info_packet->valid = true;
2470 }
2471 
2472 static void set_spd_info_packet(
2473 		struct dc_info_packet *info_packet,
2474 		struct dc_stream_state *stream)
2475 {
2476 	/* SPD info packet for FreeSync */
2477 
2478 	unsigned char checksum = 0;
2479 	unsigned int idx, payload_size = 0;
2480 
2481 	/* Check if Freesync is supported. Return if false. If true,
2482 	 * set the corresponding bit in the info packet
2483 	 */
2484 	if (stream->freesync_ctx.supported == false)
2485 		return;
2486 
2487 	if (dc_is_hdmi_signal(stream->signal)) {
2488 
2489 		/* HEADER */
2490 
2491 		/* HB0  = Packet Type = 0x83 (Source Product
2492 		 *	  Descriptor InfoFrame)
2493 		 */
2494 		info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2495 
2496 		/* HB1  = Version = 0x01 */
2497 		info_packet->hb1 = 0x01;
2498 
2499 		/* HB2  = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2500 		info_packet->hb2 = 0x08;
2501 
2502 		payload_size = 0x08;
2503 
2504 	} else if (dc_is_dp_signal(stream->signal)) {
2505 
2506 		/* HEADER */
2507 
2508 		/* HB0  = Secondary-data Packet ID = 0 - Only non-zero
2509 		 *	  when used to associate audio related info packets
2510 		 */
2511 		info_packet->hb0 = 0x00;
2512 
2513 		/* HB1  = Packet Type = 0x83 (Source Product
2514 		 *	  Descriptor InfoFrame)
2515 		 */
2516 		info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2517 
2518 		/* HB2  = [Bits 7:0 = Least significant eight bits -
2519 		 *	  For INFOFRAME, the value must be 1Bh]
2520 		 */
2521 		info_packet->hb2 = 0x1B;
2522 
2523 		/* HB3  = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2524 		 *	  [Bits 1:0 = Most significant two bits = 0x00]
2525 		 */
2526 		info_packet->hb3 = 0x04;
2527 
2528 		payload_size = 0x1B;
2529 	}
2530 
2531 	/* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2532 	info_packet->sb[1] = 0x1A;
2533 
2534 	/* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2535 	info_packet->sb[2] = 0x00;
2536 
2537 	/* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2538 	info_packet->sb[3] = 0x00;
2539 
2540 	/* PB4 = Reserved */
2541 	info_packet->sb[4] = 0x00;
2542 
2543 	/* PB5 = Reserved */
2544 	info_packet->sb[5] = 0x00;
2545 
2546 	/* PB6 = [Bits 7:3 = Reserved] */
2547 	info_packet->sb[6] = 0x00;
2548 
2549 	if (stream->freesync_ctx.supported == true)
2550 		/* PB6 = [Bit 0 = FreeSync Supported] */
2551 		info_packet->sb[6] |= 0x01;
2552 
2553 	if (stream->freesync_ctx.enabled == true)
2554 		/* PB6 = [Bit 1 = FreeSync Enabled] */
2555 		info_packet->sb[6] |= 0x02;
2556 
2557 	if (stream->freesync_ctx.active == true)
2558 		/* PB6 = [Bit 2 = FreeSync Active] */
2559 		info_packet->sb[6] |= 0x04;
2560 
2561 	/* PB7 = FreeSync Minimum refresh rate (Hz) */
2562 	info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2563 			min_refresh_in_micro_hz / 1000000);
2564 
2565 	/* PB8 = FreeSync Maximum refresh rate (Hz)
2566 	 *
2567 	 * Note: We do not use the maximum capable refresh rate
2568 	 * of the panel, because we should never go above the field
2569 	 * rate of the mode timing set.
2570 	 */
2571 	info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2572 			nominal_refresh_in_micro_hz / 1000000);
2573 
2574 	/* PB9 - PB27  = Reserved */
2575 	for (idx = 9; idx <= 27; idx++)
2576 		info_packet->sb[idx] = 0x00;
2577 
2578 	/* Calculate checksum */
2579 	checksum += info_packet->hb0;
2580 	checksum += info_packet->hb1;
2581 	checksum += info_packet->hb2;
2582 	checksum += info_packet->hb3;
2583 
2584 	for (idx = 1; idx <= payload_size; idx++)
2585 		checksum += info_packet->sb[idx];
2586 
2587 	/* PB0 = Checksum (one byte complement) */
2588 	info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2589 
2590 	info_packet->valid = true;
2591 }
2592 
2593 static void set_hdr_static_info_packet(
2594 		struct dc_info_packet *info_packet,
2595 		struct dc_stream_state *stream)
2596 {
2597 	/* HDR Static Metadata info packet for HDR10 */
2598 
2599 	if (!stream->hdr_static_metadata.valid ||
2600 			stream->use_dynamic_meta)
2601 		return;
2602 
2603 	*info_packet = stream->hdr_static_metadata;
2604 }
2605 
2606 static void set_vsc_info_packet(
2607 		struct dc_info_packet *info_packet,
2608 		struct dc_stream_state *stream)
2609 {
2610 	unsigned int vscPacketRevision = 0;
2611 	unsigned int i;
2612 
2613 	/*VSC packet set to 2 when DP revision >= 1.2*/
2614 	if (stream->psr_version != 0) {
2615 		vscPacketRevision = 2;
2616 	}
2617 
2618 	/* VSC packet not needed based on the features
2619 	 * supported by this DP display
2620 	 */
2621 	if (vscPacketRevision == 0)
2622 		return;
2623 
2624 	if (vscPacketRevision == 0x2) {
2625 		/* Secondary-data Packet ID = 0*/
2626 		info_packet->hb0 = 0x00;
2627 		/* 07h - Packet Type Value indicating Video
2628 		 * Stream Configuration packet
2629 		 */
2630 		info_packet->hb1 = 0x07;
2631 		/* 02h = VSC SDP supporting 3D stereo and PSR
2632 		 * (applies to eDP v1.3 or higher).
2633 		 */
2634 		info_packet->hb2 = 0x02;
2635 		/* 08h = VSC packet supporting 3D stereo + PSR
2636 		 * (HB2 = 02h).
2637 		 */
2638 		info_packet->hb3 = 0x08;
2639 
2640 		for (i = 0; i < 28; i++)
2641 			info_packet->sb[i] = 0;
2642 
2643 		info_packet->valid = true;
2644 	}
2645 
2646 	/*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2647 }
2648 
2649 void dc_resource_state_destruct(struct dc_state *context)
2650 {
2651 	int i, j;
2652 
2653 	for (i = 0; i < context->stream_count; i++) {
2654 		for (j = 0; j < context->stream_status[i].plane_count; j++)
2655 			dc_plane_state_release(
2656 				context->stream_status[i].plane_states[j]);
2657 
2658 		context->stream_status[i].plane_count = 0;
2659 		dc_stream_release(context->streams[i]);
2660 		context->streams[i] = NULL;
2661 	}
2662 }
2663 
2664 /*
2665  * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2666  * by the src_ctx
2667  */
2668 void dc_resource_state_copy_construct(
2669 		const struct dc_state *src_ctx,
2670 		struct dc_state *dst_ctx)
2671 {
2672 	int i, j;
2673 	struct kref refcount = dst_ctx->refcount;
2674 
2675 	*dst_ctx = *src_ctx;
2676 
2677 	for (i = 0; i < MAX_PIPES; i++) {
2678 		struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2679 
2680 		if (cur_pipe->top_pipe)
2681 			cur_pipe->top_pipe =  &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2682 
2683 		if (cur_pipe->bottom_pipe)
2684 			cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2685 
2686 	}
2687 
2688 	for (i = 0; i < dst_ctx->stream_count; i++) {
2689 		dc_stream_retain(dst_ctx->streams[i]);
2690 		for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2691 			dc_plane_state_retain(
2692 				dst_ctx->stream_status[i].plane_states[j]);
2693 	}
2694 
2695 	/* context refcount should not be overridden */
2696 	dst_ctx->refcount = refcount;
2697 
2698 }
2699 
2700 struct clock_source *dc_resource_find_first_free_pll(
2701 		struct resource_context *res_ctx,
2702 		const struct resource_pool *pool)
2703 {
2704 	int i;
2705 
2706 	for (i = 0; i < pool->clk_src_count; ++i) {
2707 		if (res_ctx->clock_source_ref_count[i] == 0)
2708 			return pool->clock_sources[i];
2709 	}
2710 
2711 	return NULL;
2712 }
2713 
2714 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2715 {
2716 	enum signal_type signal = SIGNAL_TYPE_NONE;
2717 	struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2718 
2719 	/* default all packets to invalid */
2720 	info->avi.valid = false;
2721 	info->gamut.valid = false;
2722 	info->vendor.valid = false;
2723 	info->spd.valid = false;
2724 	info->hdrsmd.valid = false;
2725 	info->vsc.valid = false;
2726 
2727 	signal = pipe_ctx->stream->signal;
2728 
2729 	/* HDMi and DP have different info packets*/
2730 	if (dc_is_hdmi_signal(signal)) {
2731 		set_avi_info_frame(&info->avi, pipe_ctx);
2732 
2733 		set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2734 
2735 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2736 
2737 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2738 
2739 	} else if (dc_is_dp_signal(signal)) {
2740 		set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2741 
2742 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2743 
2744 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2745 	}
2746 
2747 	patch_gamut_packet_checksum(&info->gamut);
2748 }
2749 
2750 enum dc_status resource_map_clock_resources(
2751 		const struct dc  *dc,
2752 		struct dc_state *context,
2753 		struct dc_stream_state *stream)
2754 {
2755 	/* acquire new resources */
2756 	const struct resource_pool *pool = dc->res_pool;
2757 	struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2758 				&context->res_ctx, stream);
2759 
2760 	if (!pipe_ctx)
2761 		return DC_ERROR_UNEXPECTED;
2762 
2763 	if (dc_is_dp_signal(pipe_ctx->stream->signal)
2764 		|| pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2765 		pipe_ctx->clock_source = pool->dp_clock_source;
2766 	else {
2767 		pipe_ctx->clock_source = NULL;
2768 
2769 		if (!dc->config.disable_disp_pll_sharing)
2770 			pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2771 				&context->res_ctx,
2772 				pipe_ctx);
2773 
2774 		if (pipe_ctx->clock_source == NULL)
2775 			pipe_ctx->clock_source =
2776 				dc_resource_find_first_free_pll(
2777 					&context->res_ctx,
2778 					pool);
2779 	}
2780 
2781 	if (pipe_ctx->clock_source == NULL)
2782 		return DC_NO_CLOCK_SOURCE_RESOURCE;
2783 
2784 	resource_reference_clock_source(
2785 		&context->res_ctx, pool,
2786 		pipe_ctx->clock_source);
2787 
2788 	return DC_OK;
2789 }
2790 
2791 /*
2792  * Note: We need to disable output if clock sources change,
2793  * since bios does optimization and doesn't apply if changing
2794  * PHY when not already disabled.
2795  */
2796 bool pipe_need_reprogram(
2797 		struct pipe_ctx *pipe_ctx_old,
2798 		struct pipe_ctx *pipe_ctx)
2799 {
2800 	if (!pipe_ctx_old->stream)
2801 		return false;
2802 
2803 	if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2804 		return true;
2805 
2806 	if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2807 		return true;
2808 
2809 	if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2810 		return true;
2811 
2812 	if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2813 			&& pipe_ctx_old->stream != pipe_ctx->stream)
2814 		return true;
2815 
2816 	if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2817 		return true;
2818 
2819 	if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2820 		return true;
2821 
2822 	if (is_hdr_static_meta_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2823 		return true;
2824 
2825 	return false;
2826 }
2827 
2828 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2829 		struct bit_depth_reduction_params *fmt_bit_depth)
2830 {
2831 	enum dc_dither_option option = stream->dither_option;
2832 	enum dc_pixel_encoding pixel_encoding =
2833 			stream->timing.pixel_encoding;
2834 
2835 	memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2836 
2837 	if (option == DITHER_OPTION_DEFAULT) {
2838 		switch (stream->timing.display_color_depth) {
2839 		case COLOR_DEPTH_666:
2840 			option = DITHER_OPTION_SPATIAL6;
2841 			break;
2842 		case COLOR_DEPTH_888:
2843 			option = DITHER_OPTION_SPATIAL8;
2844 			break;
2845 		case COLOR_DEPTH_101010:
2846 			option = DITHER_OPTION_SPATIAL10;
2847 			break;
2848 		default:
2849 			option = DITHER_OPTION_DISABLE;
2850 		}
2851 	}
2852 
2853 	if (option == DITHER_OPTION_DISABLE)
2854 		return;
2855 
2856 	if (option == DITHER_OPTION_TRUN6) {
2857 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2858 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2859 	} else if (option == DITHER_OPTION_TRUN8 ||
2860 			option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2861 			option == DITHER_OPTION_TRUN8_FM6) {
2862 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2863 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2864 	} else if (option == DITHER_OPTION_TRUN10        ||
2865 			option == DITHER_OPTION_TRUN10_SPATIAL6   ||
2866 			option == DITHER_OPTION_TRUN10_SPATIAL8   ||
2867 			option == DITHER_OPTION_TRUN10_FM8     ||
2868 			option == DITHER_OPTION_TRUN10_FM6     ||
2869 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2870 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2871 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2872 	}
2873 
2874 	/* special case - Formatter can only reduce by 4 bits at most.
2875 	 * When reducing from 12 to 6 bits,
2876 	 * HW recommends we use trunc with round mode
2877 	 * (if we did nothing, trunc to 10 bits would be used)
2878 	 * note that any 12->10 bit reduction is ignored prior to DCE8,
2879 	 * as the input was 10 bits.
2880 	 */
2881 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2882 			option == DITHER_OPTION_SPATIAL6 ||
2883 			option == DITHER_OPTION_FM6) {
2884 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2885 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2886 		fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2887 	}
2888 
2889 	/* spatial dither
2890 	 * note that spatial modes 1-3 are never used
2891 	 */
2892 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM            ||
2893 			option == DITHER_OPTION_SPATIAL6 ||
2894 			option == DITHER_OPTION_TRUN10_SPATIAL6      ||
2895 			option == DITHER_OPTION_TRUN8_SPATIAL6) {
2896 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2897 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2898 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2899 		fmt_bit_depth->flags.RGB_RANDOM =
2900 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2901 	} else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM            ||
2902 			option == DITHER_OPTION_SPATIAL8 ||
2903 			option == DITHER_OPTION_SPATIAL8_FM6        ||
2904 			option == DITHER_OPTION_TRUN10_SPATIAL8      ||
2905 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2906 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2907 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2908 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2909 		fmt_bit_depth->flags.RGB_RANDOM =
2910 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2911 	} else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2912 			option == DITHER_OPTION_SPATIAL10 ||
2913 			option == DITHER_OPTION_SPATIAL10_FM8 ||
2914 			option == DITHER_OPTION_SPATIAL10_FM6) {
2915 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2916 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2917 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2918 		fmt_bit_depth->flags.RGB_RANDOM =
2919 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2920 	}
2921 
2922 	if (option == DITHER_OPTION_SPATIAL6 ||
2923 			option == DITHER_OPTION_SPATIAL8 ||
2924 			option == DITHER_OPTION_SPATIAL10) {
2925 		fmt_bit_depth->flags.FRAME_RANDOM = 0;
2926 	} else {
2927 		fmt_bit_depth->flags.FRAME_RANDOM = 1;
2928 	}
2929 
2930 	//////////////////////
2931 	//// temporal dither
2932 	//////////////////////
2933 	if (option == DITHER_OPTION_FM6           ||
2934 			option == DITHER_OPTION_SPATIAL8_FM6     ||
2935 			option == DITHER_OPTION_SPATIAL10_FM6     ||
2936 			option == DITHER_OPTION_TRUN10_FM6     ||
2937 			option == DITHER_OPTION_TRUN8_FM6      ||
2938 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2939 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2940 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2941 	} else if (option == DITHER_OPTION_FM8        ||
2942 			option == DITHER_OPTION_SPATIAL10_FM8  ||
2943 			option == DITHER_OPTION_TRUN10_FM8) {
2944 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2945 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2946 	} else if (option == DITHER_OPTION_FM10) {
2947 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2948 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2949 	}
2950 
2951 	fmt_bit_depth->pixel_encoding = pixel_encoding;
2952 }
2953 
2954 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2955 {
2956 	struct dc  *core_dc = dc;
2957 	struct dc_link *link = stream->sink->link;
2958 	struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2959 	enum dc_status res = DC_OK;
2960 
2961 	calculate_phy_pix_clks(stream);
2962 
2963 	if (!tg->funcs->validate_timing(tg, &stream->timing))
2964 		res = DC_FAIL_CONTROLLER_VALIDATE;
2965 
2966 	if (res == DC_OK)
2967 		if (!link->link_enc->funcs->validate_output_with_stream(
2968 						link->link_enc, stream))
2969 			res = DC_FAIL_ENC_VALIDATE;
2970 
2971 	/* TODO: validate audio ASIC caps, encoder */
2972 
2973 	if (res == DC_OK)
2974 		res = dc_link_validate_mode_timing(stream,
2975 		      link,
2976 		      &stream->timing);
2977 
2978 	return res;
2979 }
2980 
2981 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2982 {
2983 	enum dc_status res = DC_OK;
2984 
2985 	/* TODO For now validates pixel format only */
2986 	if (dc->res_pool->funcs->validate_plane)
2987 		return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);
2988 
2989 	return res;
2990 }
2991