1 /*
2 * Copyright 2012-15 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 #include "dm_services.h"
26 
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "core_types.h"
35 #include "set_mode_types.h"
36 #include "virtual/virtual_stream_encoder.h"
37 
38 #include "dce80/dce80_resource.h"
39 #include "dce100/dce100_resource.h"
40 #include "dce110/dce110_resource.h"
41 #include "dce112/dce112_resource.h"
42 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
43 #include "dcn10/dcn10_resource.h"
44 #endif
45 #include "dce120/dce120_resource.h"
46 
47 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
48 {
49 	enum dce_version dc_version = DCE_VERSION_UNKNOWN;
50 	switch (asic_id.chip_family) {
51 
52 	case FAMILY_CI:
53 		dc_version = DCE_VERSION_8_0;
54 		break;
55 	case FAMILY_KV:
56 		if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
57 		    ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
58 		    ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
59 			dc_version = DCE_VERSION_8_3;
60 		else
61 			dc_version = DCE_VERSION_8_1;
62 		break;
63 	case FAMILY_CZ:
64 		dc_version = DCE_VERSION_11_0;
65 		break;
66 
67 	case FAMILY_VI:
68 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
69 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
70 			dc_version = DCE_VERSION_10_0;
71 			break;
72 		}
73 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
74 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
75 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
76 			dc_version = DCE_VERSION_11_2;
77 		}
78 		break;
79 	case FAMILY_AI:
80 		dc_version = DCE_VERSION_12_0;
81 		break;
82 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
83 	case FAMILY_RV:
84 		dc_version = DCN_VERSION_1_0;
85 		break;
86 #endif
87 	default:
88 		dc_version = DCE_VERSION_UNKNOWN;
89 		break;
90 	}
91 	return dc_version;
92 }
93 
94 struct resource_pool *dc_create_resource_pool(
95 				struct dc  *dc,
96 				int num_virtual_links,
97 				enum dce_version dc_version,
98 				struct hw_asic_id asic_id)
99 {
100 	struct resource_pool *res_pool = NULL;
101 
102 	switch (dc_version) {
103 	case DCE_VERSION_8_0:
104 		res_pool = dce80_create_resource_pool(
105 			num_virtual_links, dc);
106 		break;
107 	case DCE_VERSION_8_1:
108 		res_pool = dce81_create_resource_pool(
109 			num_virtual_links, dc);
110 		break;
111 	case DCE_VERSION_8_3:
112 		res_pool = dce83_create_resource_pool(
113 			num_virtual_links, dc);
114 		break;
115 	case DCE_VERSION_10_0:
116 		res_pool = dce100_create_resource_pool(
117 				num_virtual_links, dc);
118 		break;
119 	case DCE_VERSION_11_0:
120 		res_pool = dce110_create_resource_pool(
121 			num_virtual_links, dc, asic_id);
122 		break;
123 	case DCE_VERSION_11_2:
124 		res_pool = dce112_create_resource_pool(
125 			num_virtual_links, dc);
126 		break;
127 	case DCE_VERSION_12_0:
128 		res_pool = dce120_create_resource_pool(
129 			num_virtual_links, dc);
130 		break;
131 
132 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
133 	case DCN_VERSION_1_0:
134 		res_pool = dcn10_create_resource_pool(
135 				num_virtual_links, dc);
136 		break;
137 #endif
138 
139 
140 	default:
141 		break;
142 	}
143 	if (res_pool != NULL) {
144 		struct dc_firmware_info fw_info = { { 0 } };
145 
146 		if (dc->ctx->dc_bios->funcs->get_firmware_info(
147 				dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
148 				res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
149 			} else
150 				ASSERT_CRITICAL(false);
151 	}
152 
153 	return res_pool;
154 }
155 
156 void dc_destroy_resource_pool(struct dc  *dc)
157 {
158 	if (dc) {
159 		if (dc->res_pool)
160 			dc->res_pool->funcs->destroy(&dc->res_pool);
161 
162 		kfree(dc->hwseq);
163 	}
164 }
165 
166 static void update_num_audio(
167 	const struct resource_straps *straps,
168 	unsigned int *num_audio,
169 	struct audio_support *aud_support)
170 {
171 	aud_support->dp_audio = true;
172 	aud_support->hdmi_audio_native = false;
173 	aud_support->hdmi_audio_on_dongle = false;
174 
175 	if (straps->hdmi_disable == 0) {
176 		if (straps->dc_pinstraps_audio & 0x2) {
177 			aud_support->hdmi_audio_on_dongle = true;
178 			aud_support->hdmi_audio_native = true;
179 		}
180 	}
181 
182 	switch (straps->audio_stream_number) {
183 	case 0: /* multi streams supported */
184 		break;
185 	case 1: /* multi streams not supported */
186 		*num_audio = 1;
187 		break;
188 	default:
189 		DC_ERR("DC: unexpected audio fuse!\n");
190 	}
191 }
192 
193 bool resource_construct(
194 	unsigned int num_virtual_links,
195 	struct dc  *dc,
196 	struct resource_pool *pool,
197 	const struct resource_create_funcs *create_funcs)
198 {
199 	struct dc_context *ctx = dc->ctx;
200 	const struct resource_caps *caps = pool->res_cap;
201 	int i;
202 	unsigned int num_audio = caps->num_audio;
203 	struct resource_straps straps = {0};
204 
205 	if (create_funcs->read_dce_straps)
206 		create_funcs->read_dce_straps(dc->ctx, &straps);
207 
208 	pool->audio_count = 0;
209 	if (create_funcs->create_audio) {
210 		/* find the total number of streams available via the
211 		 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
212 		 * registers (one for each pin) starting from pin 1
213 		 * up to the max number of audio pins.
214 		 * We stop on the first pin where
215 		 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
216 		 */
217 		update_num_audio(&straps, &num_audio, &pool->audio_support);
218 		for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
219 			struct audio *aud = create_funcs->create_audio(ctx, i);
220 
221 			if (aud == NULL) {
222 				DC_ERR("DC: failed to create audio!\n");
223 				return false;
224 			}
225 
226 			if (!aud->funcs->endpoint_valid(aud)) {
227 				aud->funcs->destroy(&aud);
228 				break;
229 			}
230 
231 			pool->audios[i] = aud;
232 			pool->audio_count++;
233 		}
234 	}
235 
236 	pool->stream_enc_count = 0;
237 	if (create_funcs->create_stream_encoder) {
238 		for (i = 0; i < caps->num_stream_encoder; i++) {
239 			pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
240 			if (pool->stream_enc[i] == NULL)
241 				DC_ERR("DC: failed to create stream_encoder!\n");
242 			pool->stream_enc_count++;
243 		}
244 	}
245 	dc->caps.dynamic_audio = false;
246 	if (pool->audio_count < pool->stream_enc_count) {
247 		dc->caps.dynamic_audio = true;
248 	}
249 	for (i = 0; i < num_virtual_links; i++) {
250 		pool->stream_enc[pool->stream_enc_count] =
251 			virtual_stream_encoder_create(
252 					ctx, ctx->dc_bios);
253 		if (pool->stream_enc[pool->stream_enc_count] == NULL) {
254 			DC_ERR("DC: failed to create stream_encoder!\n");
255 			return false;
256 		}
257 		pool->stream_enc_count++;
258 	}
259 
260 	dc->hwseq = create_funcs->create_hwseq(ctx);
261 
262 	return true;
263 }
264 
265 
266 void resource_unreference_clock_source(
267 		struct resource_context *res_ctx,
268 		const struct resource_pool *pool,
269 		struct clock_source *clock_source)
270 {
271 	int i;
272 
273 	for (i = 0; i < pool->clk_src_count; i++) {
274 		if (pool->clock_sources[i] != clock_source)
275 			continue;
276 
277 		res_ctx->clock_source_ref_count[i]--;
278 
279 		break;
280 	}
281 
282 	if (pool->dp_clock_source == clock_source)
283 		res_ctx->dp_clock_source_ref_count--;
284 }
285 
286 void resource_reference_clock_source(
287 		struct resource_context *res_ctx,
288 		const struct resource_pool *pool,
289 		struct clock_source *clock_source)
290 {
291 	int i;
292 	for (i = 0; i < pool->clk_src_count; i++) {
293 		if (pool->clock_sources[i] != clock_source)
294 			continue;
295 
296 		res_ctx->clock_source_ref_count[i]++;
297 		break;
298 	}
299 
300 	if (pool->dp_clock_source == clock_source)
301 		res_ctx->dp_clock_source_ref_count++;
302 }
303 
304 bool resource_are_streams_timing_synchronizable(
305 	struct dc_stream_state *stream1,
306 	struct dc_stream_state *stream2)
307 {
308 	if (stream1->timing.h_total != stream2->timing.h_total)
309 		return false;
310 
311 	if (stream1->timing.v_total != stream2->timing.v_total)
312 		return false;
313 
314 	if (stream1->timing.h_addressable
315 				!= stream2->timing.h_addressable)
316 		return false;
317 
318 	if (stream1->timing.v_addressable
319 				!= stream2->timing.v_addressable)
320 		return false;
321 
322 	if (stream1->timing.pix_clk_khz
323 				!= stream2->timing.pix_clk_khz)
324 		return false;
325 
326 	if (stream1->phy_pix_clk != stream2->phy_pix_clk
327 			&& (!dc_is_dp_signal(stream1->signal)
328 			|| !dc_is_dp_signal(stream2->signal)))
329 		return false;
330 
331 	return true;
332 }
333 
334 static bool is_sharable_clk_src(
335 	const struct pipe_ctx *pipe_with_clk_src,
336 	const struct pipe_ctx *pipe)
337 {
338 	if (pipe_with_clk_src->clock_source == NULL)
339 		return false;
340 
341 	if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
342 		return false;
343 
344 	if (dc_is_dp_signal(pipe_with_clk_src->stream->signal))
345 		return false;
346 
347 	if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
348 			&& dc_is_dvi_signal(pipe->stream->signal))
349 		return false;
350 
351 	if (dc_is_hdmi_signal(pipe->stream->signal)
352 			&& dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
353 		return false;
354 
355 	if (!resource_are_streams_timing_synchronizable(
356 			pipe_with_clk_src->stream, pipe->stream))
357 		return false;
358 
359 	return true;
360 }
361 
362 struct clock_source *resource_find_used_clk_src_for_sharing(
363 					struct resource_context *res_ctx,
364 					struct pipe_ctx *pipe_ctx)
365 {
366 	int i;
367 
368 	for (i = 0; i < MAX_PIPES; i++) {
369 		if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
370 			return res_ctx->pipe_ctx[i].clock_source;
371 	}
372 
373 	return NULL;
374 }
375 
376 static enum pixel_format convert_pixel_format_to_dalsurface(
377 		enum surface_pixel_format surface_pixel_format)
378 {
379 	enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
380 
381 	switch (surface_pixel_format) {
382 	case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
383 		dal_pixel_format = PIXEL_FORMAT_INDEX8;
384 		break;
385 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
386 		dal_pixel_format = PIXEL_FORMAT_RGB565;
387 		break;
388 	case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
389 		dal_pixel_format = PIXEL_FORMAT_RGB565;
390 		break;
391 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
392 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
393 		break;
394 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
395 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
396 		break;
397 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
398 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
399 		break;
400 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
401 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
402 		break;
403 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
404 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
405 		break;
406 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
407 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
408 		dal_pixel_format = PIXEL_FORMAT_FP16;
409 		break;
410 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
411 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
412 		dal_pixel_format = PIXEL_FORMAT_420BPP8;
413 		break;
414 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
415 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
416 		dal_pixel_format = PIXEL_FORMAT_420BPP10;
417 		break;
418 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
419 	default:
420 		dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
421 		break;
422 	}
423 	return dal_pixel_format;
424 }
425 
426 static void rect_swap_helper(struct rect *rect)
427 {
428 	uint32_t temp = 0;
429 
430 	temp = rect->height;
431 	rect->height = rect->width;
432 	rect->width = temp;
433 
434 	temp = rect->x;
435 	rect->x = rect->y;
436 	rect->y = temp;
437 }
438 
439 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
440 {
441 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
442 	const struct dc_stream_state *stream = pipe_ctx->stream;
443 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
444 	struct rect surf_src = plane_state->src_rect;
445 	struct rect clip = { 0 };
446 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
447 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
448 	bool pri_split = pipe_ctx->bottom_pipe &&
449 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
450 	bool sec_split = pipe_ctx->top_pipe &&
451 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
452 
453 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
454 		stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
455 		pri_split = false;
456 		sec_split = false;
457 	}
458 
459 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
460 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
461 		rect_swap_helper(&surf_src);
462 
463 	/* The actual clip is an intersection between stream
464 	 * source and surface clip
465 	 */
466 	clip.x = stream->src.x > plane_state->clip_rect.x ?
467 			stream->src.x : plane_state->clip_rect.x;
468 
469 	clip.width = stream->src.x + stream->src.width <
470 			plane_state->clip_rect.x + plane_state->clip_rect.width ?
471 			stream->src.x + stream->src.width - clip.x :
472 			plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
473 
474 	clip.y = stream->src.y > plane_state->clip_rect.y ?
475 			stream->src.y : plane_state->clip_rect.y;
476 
477 	clip.height = stream->src.y + stream->src.height <
478 			plane_state->clip_rect.y + plane_state->clip_rect.height ?
479 			stream->src.y + stream->src.height - clip.y :
480 			plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
481 
482 	/* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
483 	 * num_pixels = clip.num_pix * scl_ratio
484 	 */
485 	data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
486 			surf_src.width / plane_state->dst_rect.width;
487 	data->viewport.width = clip.width *
488 			surf_src.width / plane_state->dst_rect.width;
489 
490 	data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
491 			surf_src.height / plane_state->dst_rect.height;
492 	data->viewport.height = clip.height *
493 			surf_src.height / plane_state->dst_rect.height;
494 
495 	/* Round down, compensate in init */
496 	data->viewport_c.x = data->viewport.x / vpc_div;
497 	data->viewport_c.y = data->viewport.y / vpc_div;
498 	data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
499 			dal_fixed31_32_half : dal_fixed31_32_zero;
500 	data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
501 			dal_fixed31_32_half : dal_fixed31_32_zero;
502 	/* Round up, assume original video size always even dimensions */
503 	data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
504 	data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
505 
506 	/* Handle hsplit */
507 	if (pri_split || sec_split) {
508 		/* HMirror XOR Secondary_pipe XOR Rotation_180 */
509 		bool right_view = (sec_split != plane_state->horizontal_mirror) !=
510 					(plane_state->rotation == ROTATION_ANGLE_180);
511 
512 		if (plane_state->rotation == ROTATION_ANGLE_90
513 				|| plane_state->rotation == ROTATION_ANGLE_270)
514 			/* Secondary_pipe XOR Rotation_270 */
515 			right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
516 
517 		if (right_view) {
518 			data->viewport.width /= 2;
519 			data->viewport_c.width /= 2;
520 			data->viewport.x +=  data->viewport.width;
521 			data->viewport_c.x +=  data->viewport_c.width;
522 			/* Ceil offset pipe */
523 			data->viewport.width += data->viewport.width % 2;
524 			data->viewport_c.width += data->viewport_c.width % 2;
525 		} else {
526 			data->viewport.width /= 2;
527 			data->viewport_c.width /= 2;
528 		}
529 	}
530 
531 	if (plane_state->rotation == ROTATION_ANGLE_90 ||
532 			plane_state->rotation == ROTATION_ANGLE_270) {
533 		rect_swap_helper(&data->viewport_c);
534 		rect_swap_helper(&data->viewport);
535 	}
536 }
537 
538 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
539 {
540 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
541 	const struct dc_stream_state *stream = pipe_ctx->stream;
542 	struct rect surf_src = plane_state->src_rect;
543 	struct rect surf_clip = plane_state->clip_rect;
544 	int recout_full_x, recout_full_y;
545 
546 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
547 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
548 		rect_swap_helper(&surf_src);
549 
550 	pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
551 	if (stream->src.x < surf_clip.x)
552 		pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
553 			- stream->src.x) * stream->dst.width
554 						/ stream->src.width;
555 
556 	pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
557 			stream->dst.width / stream->src.width;
558 	if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
559 			stream->dst.x + stream->dst.width)
560 		pipe_ctx->plane_res.scl_data.recout.width =
561 			stream->dst.x + stream->dst.width
562 						- pipe_ctx->plane_res.scl_data.recout.x;
563 
564 	pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
565 	if (stream->src.y < surf_clip.y)
566 		pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
567 			- stream->src.y) * stream->dst.height
568 						/ stream->src.height;
569 
570 	pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
571 			stream->dst.height / stream->src.height;
572 	if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
573 			stream->dst.y + stream->dst.height)
574 		pipe_ctx->plane_res.scl_data.recout.height =
575 			stream->dst.y + stream->dst.height
576 						- pipe_ctx->plane_res.scl_data.recout.y;
577 
578 	/* Handle h & vsplit */
579 	if (pipe_ctx->top_pipe && pipe_ctx->top_pipe->plane_state ==
580 		pipe_ctx->plane_state) {
581 		if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
582 			pipe_ctx->plane_res.scl_data.recout.height /= 2;
583 			pipe_ctx->plane_res.scl_data.recout.y += pipe_ctx->plane_res.scl_data.recout.height;
584 			/* Floor primary pipe, ceil 2ndary pipe */
585 			pipe_ctx->plane_res.scl_data.recout.height += pipe_ctx->plane_res.scl_data.recout.height % 2;
586 		} else {
587 			pipe_ctx->plane_res.scl_data.recout.width /= 2;
588 			pipe_ctx->plane_res.scl_data.recout.x += pipe_ctx->plane_res.scl_data.recout.width;
589 			pipe_ctx->plane_res.scl_data.recout.width += pipe_ctx->plane_res.scl_data.recout.width % 2;
590 		}
591 	} else if (pipe_ctx->bottom_pipe &&
592 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state) {
593 		if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
594 			pipe_ctx->plane_res.scl_data.recout.height /= 2;
595 		else
596 			pipe_ctx->plane_res.scl_data.recout.width /= 2;
597 	}
598 
599 	/* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
600 	 * 				* 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
601 	 * 				ratio)
602 	 */
603 	recout_full_x = stream->dst.x + (plane_state->dst_rect.x -  stream->src.x)
604 					* stream->dst.width / stream->src.width -
605 			surf_src.x * plane_state->dst_rect.width / surf_src.width
606 					* stream->dst.width / stream->src.width;
607 	recout_full_y = stream->dst.y + (plane_state->dst_rect.y -  stream->src.y)
608 					* stream->dst.height / stream->src.height -
609 			surf_src.y * plane_state->dst_rect.height / surf_src.height
610 					* stream->dst.height / stream->src.height;
611 
612 	recout_skip->width = pipe_ctx->plane_res.scl_data.recout.x - recout_full_x;
613 	recout_skip->height = pipe_ctx->plane_res.scl_data.recout.y - recout_full_y;
614 }
615 
616 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
617 {
618 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
619 	const struct dc_stream_state *stream = pipe_ctx->stream;
620 	struct rect surf_src = plane_state->src_rect;
621 	const int in_w = stream->src.width;
622 	const int in_h = stream->src.height;
623 	const int out_w = stream->dst.width;
624 	const int out_h = stream->dst.height;
625 
626 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
627 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
628 		rect_swap_helper(&surf_src);
629 
630 	pipe_ctx->plane_res.scl_data.ratios.horz = dal_fixed31_32_from_fraction(
631 					surf_src.width,
632 					plane_state->dst_rect.width);
633 	pipe_ctx->plane_res.scl_data.ratios.vert = dal_fixed31_32_from_fraction(
634 					surf_src.height,
635 					plane_state->dst_rect.height);
636 
637 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
638 		pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
639 	else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
640 		pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
641 
642 	pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
643 		pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
644 	pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
645 		pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
646 
647 	pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
648 	pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
649 
650 	if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
651 			|| pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
652 		pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
653 		pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
654 	}
655 }
656 
657 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
658 {
659 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
660 	struct rect src = pipe_ctx->plane_state->src_rect;
661 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
662 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
663 
664 
665 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
666 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
667 		rect_swap_helper(&src);
668 		rect_swap_helper(&data->viewport_c);
669 		rect_swap_helper(&data->viewport);
670 	}
671 
672 	/*
673 	 * Init calculated according to formula:
674 	 * 	init = (scaling_ratio + number_of_taps + 1) / 2
675 	 * 	init_bot = init + scaling_ratio
676 	 * 	init_c = init + truncated_vp_c_offset(from calculate viewport)
677 	 */
678 	data->inits.h = dal_fixed31_32_div_int(
679 			dal_fixed31_32_add_int(data->ratios.horz, data->taps.h_taps + 1), 2);
680 
681 	data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_div_int(
682 			dal_fixed31_32_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2));
683 
684 	data->inits.v = dal_fixed31_32_div_int(
685 			dal_fixed31_32_add_int(data->ratios.vert, data->taps.v_taps + 1), 2);
686 
687 	data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_div_int(
688 			dal_fixed31_32_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2));
689 
690 
691 	/* Adjust for viewport end clip-off */
692 	if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
693 		int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
694 		int int_part = dal_fixed31_32_floor(
695 				dal_fixed31_32_sub(data->inits.h, data->ratios.horz));
696 
697 		int_part = int_part > 0 ? int_part : 0;
698 		data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
699 	}
700 	if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
701 		int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
702 		int int_part = dal_fixed31_32_floor(
703 				dal_fixed31_32_sub(data->inits.v, data->ratios.vert));
704 
705 		int_part = int_part > 0 ? int_part : 0;
706 		data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
707 	}
708 	if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
709 		int vp_clip = (src.x + src.width) / vpc_div -
710 				data->viewport_c.width - data->viewport_c.x;
711 		int int_part = dal_fixed31_32_floor(
712 				dal_fixed31_32_sub(data->inits.h_c, data->ratios.horz_c));
713 
714 		int_part = int_part > 0 ? int_part : 0;
715 		data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
716 	}
717 	if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
718 		int vp_clip = (src.y + src.height) / vpc_div -
719 				data->viewport_c.height - data->viewport_c.y;
720 		int int_part = dal_fixed31_32_floor(
721 				dal_fixed31_32_sub(data->inits.v_c, data->ratios.vert_c));
722 
723 		int_part = int_part > 0 ? int_part : 0;
724 		data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
725 	}
726 
727 	/* Adjust for non-0 viewport offset */
728 	if (data->viewport.x) {
729 		int int_part;
730 
731 		data->inits.h = dal_fixed31_32_add(data->inits.h, dal_fixed31_32_mul_int(
732 				data->ratios.horz, recout_skip->width));
733 		int_part = dal_fixed31_32_floor(data->inits.h) - data->viewport.x;
734 		if (int_part < data->taps.h_taps) {
735 			int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
736 						(data->taps.h_taps - int_part) : data->viewport.x;
737 			data->viewport.x -= int_adj;
738 			data->viewport.width += int_adj;
739 			int_part += int_adj;
740 		} else if (int_part > data->taps.h_taps) {
741 			data->viewport.x += int_part - data->taps.h_taps;
742 			data->viewport.width -= int_part - data->taps.h_taps;
743 			int_part = data->taps.h_taps;
744 		}
745 		data->inits.h.value &= 0xffffffff;
746 		data->inits.h = dal_fixed31_32_add_int(data->inits.h, int_part);
747 	}
748 
749 	if (data->viewport_c.x) {
750 		int int_part;
751 
752 		data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_mul_int(
753 				data->ratios.horz_c, recout_skip->width));
754 		int_part = dal_fixed31_32_floor(data->inits.h_c) - data->viewport_c.x;
755 		if (int_part < data->taps.h_taps_c) {
756 			int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
757 					(data->taps.h_taps_c - int_part) : data->viewport_c.x;
758 			data->viewport_c.x -= int_adj;
759 			data->viewport_c.width += int_adj;
760 			int_part += int_adj;
761 		} else if (int_part > data->taps.h_taps_c) {
762 			data->viewport_c.x += int_part - data->taps.h_taps_c;
763 			data->viewport_c.width -= int_part - data->taps.h_taps_c;
764 			int_part = data->taps.h_taps_c;
765 		}
766 		data->inits.h_c.value &= 0xffffffff;
767 		data->inits.h_c = dal_fixed31_32_add_int(data->inits.h_c, int_part);
768 	}
769 
770 	if (data->viewport.y) {
771 		int int_part;
772 
773 		data->inits.v = dal_fixed31_32_add(data->inits.v, dal_fixed31_32_mul_int(
774 				data->ratios.vert, recout_skip->height));
775 		int_part = dal_fixed31_32_floor(data->inits.v) - data->viewport.y;
776 		if (int_part < data->taps.v_taps) {
777 			int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
778 						(data->taps.v_taps - int_part) : data->viewport.y;
779 			data->viewport.y -= int_adj;
780 			data->viewport.height += int_adj;
781 			int_part += int_adj;
782 		} else if (int_part > data->taps.v_taps) {
783 			data->viewport.y += int_part - data->taps.v_taps;
784 			data->viewport.height -= int_part - data->taps.v_taps;
785 			int_part = data->taps.v_taps;
786 		}
787 		data->inits.v.value &= 0xffffffff;
788 		data->inits.v = dal_fixed31_32_add_int(data->inits.v, int_part);
789 	}
790 
791 	if (data->viewport_c.y) {
792 		int int_part;
793 
794 		data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_mul_int(
795 				data->ratios.vert_c, recout_skip->height));
796 		int_part = dal_fixed31_32_floor(data->inits.v_c) - data->viewport_c.y;
797 		if (int_part < data->taps.v_taps_c) {
798 			int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
799 					(data->taps.v_taps_c - int_part) : data->viewport_c.y;
800 			data->viewport_c.y -= int_adj;
801 			data->viewport_c.height += int_adj;
802 			int_part += int_adj;
803 		} else if (int_part > data->taps.v_taps_c) {
804 			data->viewport_c.y += int_part - data->taps.v_taps_c;
805 			data->viewport_c.height -= int_part - data->taps.v_taps_c;
806 			int_part = data->taps.v_taps_c;
807 		}
808 		data->inits.v_c.value &= 0xffffffff;
809 		data->inits.v_c = dal_fixed31_32_add_int(data->inits.v_c, int_part);
810 	}
811 
812 	/* Interlaced inits based on final vert inits */
813 	data->inits.v_bot = dal_fixed31_32_add(data->inits.v, data->ratios.vert);
814 	data->inits.v_c_bot = dal_fixed31_32_add(data->inits.v_c, data->ratios.vert_c);
815 
816 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
817 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
818 		rect_swap_helper(&data->viewport_c);
819 		rect_swap_helper(&data->viewport);
820 	}
821 }
822 
823 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
824 {
825 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
826 	struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
827 	struct view recout_skip = { 0 };
828 	bool res = false;
829 
830 	/* Important: scaling ratio calculation requires pixel format,
831 	 * lb depth calculation requires recout and taps require scaling ratios.
832 	 * Inits require viewport, taps, ratios and recout of split pipe
833 	 */
834 	pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
835 			pipe_ctx->plane_state->format);
836 
837 	calculate_scaling_ratios(pipe_ctx);
838 
839 	calculate_viewport(pipe_ctx);
840 
841 	if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
842 		return false;
843 
844 	calculate_recout(pipe_ctx, &recout_skip);
845 
846 	/**
847 	 * Setting line buffer pixel depth to 24bpp yields banding
848 	 * on certain displays, such as the Sharp 4k
849 	 */
850 	pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
851 
852 	pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable;
853 	pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable;
854 
855 	/* Taps calculations */
856 	res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
857 		pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
858 
859 	if (!res) {
860 		/* Try 24 bpp linebuffer */
861 		pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
862 
863 		res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
864 			pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
865 	}
866 
867 	if (res)
868 		/* May need to re-check lb size after this in some obscure scenario */
869 		calculate_inits_and_adj_vp(pipe_ctx, &recout_skip);
870 
871 	dm_logger_write(pipe_ctx->stream->ctx->logger, LOG_SCALER,
872 				"%s: Viewport:\nheight:%d width:%d x:%d "
873 				"y:%d\n dst_rect:\nheight:%d width:%d x:%d "
874 				"y:%d\n",
875 				__func__,
876 				pipe_ctx->plane_res.scl_data.viewport.height,
877 				pipe_ctx->plane_res.scl_data.viewport.width,
878 				pipe_ctx->plane_res.scl_data.viewport.x,
879 				pipe_ctx->plane_res.scl_data.viewport.y,
880 				plane_state->dst_rect.height,
881 				plane_state->dst_rect.width,
882 				plane_state->dst_rect.x,
883 				plane_state->dst_rect.y);
884 
885 	return res;
886 }
887 
888 
889 enum dc_status resource_build_scaling_params_for_context(
890 	const struct dc  *dc,
891 	struct dc_state *context)
892 {
893 	int i;
894 
895 	for (i = 0; i < MAX_PIPES; i++) {
896 		if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
897 				context->res_ctx.pipe_ctx[i].stream != NULL)
898 			if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
899 				return DC_FAIL_SCALING;
900 	}
901 
902 	return DC_OK;
903 }
904 
905 struct pipe_ctx *find_idle_secondary_pipe(
906 		struct resource_context *res_ctx,
907 		const struct resource_pool *pool)
908 {
909 	int i;
910 	struct pipe_ctx *secondary_pipe = NULL;
911 
912 	/*
913 	 * search backwards for the second pipe to keep pipe
914 	 * assignment more consistent
915 	 */
916 
917 	for (i = pool->pipe_count - 1; i >= 0; i--) {
918 		if (res_ctx->pipe_ctx[i].stream == NULL) {
919 			secondary_pipe = &res_ctx->pipe_ctx[i];
920 			secondary_pipe->pipe_idx = i;
921 			break;
922 		}
923 	}
924 
925 
926 	return secondary_pipe;
927 }
928 
929 struct pipe_ctx *resource_get_head_pipe_for_stream(
930 		struct resource_context *res_ctx,
931 		struct dc_stream_state *stream)
932 {
933 	int i;
934 	for (i = 0; i < MAX_PIPES; i++) {
935 		if (res_ctx->pipe_ctx[i].stream == stream &&
936 				!res_ctx->pipe_ctx[i].top_pipe) {
937 			return &res_ctx->pipe_ctx[i];
938 			break;
939 		}
940 	}
941 	return NULL;
942 }
943 
944 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
945 		struct resource_context *res_ctx,
946 		struct dc_stream_state *stream)
947 {
948 	struct pipe_ctx *head_pipe, *tail_pipe;
949 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
950 
951 	if (!head_pipe)
952 		return NULL;
953 
954 	tail_pipe = head_pipe->bottom_pipe;
955 
956 	while (tail_pipe) {
957 		head_pipe = tail_pipe;
958 		tail_pipe = tail_pipe->bottom_pipe;
959 	}
960 
961 	return head_pipe;
962 }
963 
964 /*
965  * A free_pipe for a stream is defined here as a pipe
966  * that has no surface attached yet
967  */
968 static struct pipe_ctx *acquire_free_pipe_for_stream(
969 		struct dc_state *context,
970 		const struct resource_pool *pool,
971 		struct dc_stream_state *stream)
972 {
973 	int i;
974 	struct resource_context *res_ctx = &context->res_ctx;
975 
976 	struct pipe_ctx *head_pipe = NULL;
977 
978 	/* Find head pipe, which has the back end set up*/
979 
980 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
981 
982 	if (!head_pipe)
983 		ASSERT(0);
984 
985 	if (!head_pipe->plane_state)
986 		return head_pipe;
987 
988 	/* Re-use pipe already acquired for this stream if available*/
989 	for (i = pool->pipe_count - 1; i >= 0; i--) {
990 		if (res_ctx->pipe_ctx[i].stream == stream &&
991 				!res_ctx->pipe_ctx[i].plane_state) {
992 			return &res_ctx->pipe_ctx[i];
993 		}
994 	}
995 
996 	/*
997 	 * At this point we have no re-useable pipe for this stream and we need
998 	 * to acquire an idle one to satisfy the request
999 	 */
1000 
1001 	if (!pool->funcs->acquire_idle_pipe_for_layer)
1002 		return NULL;
1003 
1004 	return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1005 
1006 }
1007 
1008 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1009 static int acquire_first_split_pipe(
1010 		struct resource_context *res_ctx,
1011 		const struct resource_pool *pool,
1012 		struct dc_stream_state *stream)
1013 {
1014 	int i;
1015 
1016 	for (i = 0; i < pool->pipe_count; i++) {
1017 		struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1018 
1019 		if (pipe_ctx->top_pipe &&
1020 				pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1021 			pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1022 			if (pipe_ctx->bottom_pipe)
1023 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1024 
1025 			memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1026 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1027 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1028 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1029 			pipe_ctx->plane_res.xfm = pool->transforms[i];
1030 			pipe_ctx->stream_res.opp = pool->opps[i];
1031 			pipe_ctx->pipe_idx = i;
1032 
1033 			pipe_ctx->stream = stream;
1034 			return i;
1035 		}
1036 	}
1037 	return -1;
1038 }
1039 #endif
1040 
1041 bool dc_add_plane_to_context(
1042 		const struct dc *dc,
1043 		struct dc_stream_state *stream,
1044 		struct dc_plane_state *plane_state,
1045 		struct dc_state *context)
1046 {
1047 	int i;
1048 	struct resource_pool *pool = dc->res_pool;
1049 	struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1050 	struct dc_stream_status *stream_status = NULL;
1051 
1052 	for (i = 0; i < context->stream_count; i++)
1053 		if (context->streams[i] == stream) {
1054 			stream_status = &context->stream_status[i];
1055 			break;
1056 		}
1057 	if (stream_status == NULL) {
1058 		dm_error("Existing stream not found; failed to attach surface!\n");
1059 		return false;
1060 	}
1061 
1062 
1063 	if (stream_status->plane_count == MAX_SURFACE_NUM) {
1064 		dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1065 				plane_state, MAX_SURFACE_NUM);
1066 		return false;
1067 	}
1068 
1069 	head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1070 
1071 	if (!head_pipe) {
1072 		dm_error("Head pipe not found for stream_state %p !\n", stream);
1073 		return false;
1074 	}
1075 
1076 	/* retain new surfaces */
1077 	dc_plane_state_retain(plane_state);
1078 
1079 	free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1080 
1081 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1082 	if (!free_pipe) {
1083 		int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1084 		if (pipe_idx >= 0)
1085 			free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1086 	}
1087 #endif
1088 	if (!free_pipe) {
1089 		stream_status->plane_states[i] = NULL;
1090 		return false;
1091 	}
1092 
1093 	free_pipe->plane_state = plane_state;
1094 
1095 	if (head_pipe != free_pipe) {
1096 
1097 		tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1098 		ASSERT(tail_pipe);
1099 
1100 		free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1101 		free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1102 		free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1103 		free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1104 		free_pipe->clock_source = tail_pipe->clock_source;
1105 		free_pipe->top_pipe = tail_pipe;
1106 		tail_pipe->bottom_pipe = free_pipe;
1107 	}
1108 
1109 	/* assign new surfaces*/
1110 	stream_status->plane_states[stream_status->plane_count] = plane_state;
1111 
1112 	stream_status->plane_count++;
1113 
1114 	return true;
1115 }
1116 
1117 bool dc_remove_plane_from_context(
1118 		const struct dc *dc,
1119 		struct dc_stream_state *stream,
1120 		struct dc_plane_state *plane_state,
1121 		struct dc_state *context)
1122 {
1123 	int i;
1124 	struct dc_stream_status *stream_status = NULL;
1125 	struct resource_pool *pool = dc->res_pool;
1126 
1127 	for (i = 0; i < context->stream_count; i++)
1128 		if (context->streams[i] == stream) {
1129 			stream_status = &context->stream_status[i];
1130 			break;
1131 		}
1132 
1133 	if (stream_status == NULL) {
1134 		dm_error("Existing stream not found; failed to remove plane.\n");
1135 		return false;
1136 	}
1137 
1138 	/* release pipe for plane*/
1139 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1140 		struct pipe_ctx *pipe_ctx;
1141 
1142 		if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1143 			pipe_ctx = &context->res_ctx.pipe_ctx[i];
1144 
1145 			if (pipe_ctx->top_pipe)
1146 				pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1147 
1148 			/* Second condition is to avoid setting NULL to top pipe
1149 			 * of tail pipe making it look like head pipe in subsequent
1150 			 * deletes
1151 			 */
1152 			if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1153 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1154 
1155 			/*
1156 			 * For head pipe detach surfaces from pipe for tail
1157 			 * pipe just zero it out
1158 			 */
1159 			if (!pipe_ctx->top_pipe) {
1160 				pipe_ctx->plane_state = NULL;
1161 				pipe_ctx->bottom_pipe = NULL;
1162 			} else  {
1163 				memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1164 			}
1165 		}
1166 	}
1167 
1168 
1169 	for (i = 0; i < stream_status->plane_count; i++) {
1170 		if (stream_status->plane_states[i] == plane_state) {
1171 
1172 			dc_plane_state_release(stream_status->plane_states[i]);
1173 			break;
1174 		}
1175 	}
1176 
1177 	if (i == stream_status->plane_count) {
1178 		dm_error("Existing plane_state not found; failed to detach it!\n");
1179 		return false;
1180 	}
1181 
1182 	stream_status->plane_count--;
1183 
1184 	/* Trim back arrays */
1185 	for (i = 0; i < stream_status->plane_count; i++)
1186 		stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1187 
1188 	stream_status->plane_states[stream_status->plane_count] = NULL;
1189 
1190 	return true;
1191 }
1192 
1193 bool dc_rem_all_planes_for_stream(
1194 		const struct dc *dc,
1195 		struct dc_stream_state *stream,
1196 		struct dc_state *context)
1197 {
1198 	int i, old_plane_count;
1199 	struct dc_stream_status *stream_status = NULL;
1200 	struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1201 
1202 	for (i = 0; i < context->stream_count; i++)
1203 			if (context->streams[i] == stream) {
1204 				stream_status = &context->stream_status[i];
1205 				break;
1206 			}
1207 
1208 	if (stream_status == NULL) {
1209 		dm_error("Existing stream %p not found!\n", stream);
1210 		return false;
1211 	}
1212 
1213 	old_plane_count = stream_status->plane_count;
1214 
1215 	for (i = 0; i < old_plane_count; i++)
1216 		del_planes[i] = stream_status->plane_states[i];
1217 
1218 	for (i = 0; i < old_plane_count; i++)
1219 		if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1220 			return false;
1221 
1222 	return true;
1223 }
1224 
1225 static bool add_all_planes_for_stream(
1226 		const struct dc *dc,
1227 		struct dc_stream_state *stream,
1228 		const struct dc_validation_set set[],
1229 		int set_count,
1230 		struct dc_state *context)
1231 {
1232 	int i, j;
1233 
1234 	for (i = 0; i < set_count; i++)
1235 		if (set[i].stream == stream)
1236 			break;
1237 
1238 	if (i == set_count) {
1239 		dm_error("Stream %p not found in set!\n", stream);
1240 		return false;
1241 	}
1242 
1243 	for (j = 0; j < set[i].plane_count; j++)
1244 		if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1245 			return false;
1246 
1247 	return true;
1248 }
1249 
1250 bool dc_add_all_planes_for_stream(
1251 		const struct dc *dc,
1252 		struct dc_stream_state *stream,
1253 		struct dc_plane_state * const *plane_states,
1254 		int plane_count,
1255 		struct dc_state *context)
1256 {
1257 	struct dc_validation_set set;
1258 	int i;
1259 
1260 	set.stream = stream;
1261 	set.plane_count = plane_count;
1262 
1263 	for (i = 0; i < plane_count; i++)
1264 		set.plane_states[i] = plane_states[i];
1265 
1266 	return add_all_planes_for_stream(dc, stream, &set, 1, context);
1267 }
1268 
1269 
1270 
1271 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1272 		struct dc_stream_state *new_stream)
1273 {
1274 	if (cur_stream == NULL)
1275 		return true;
1276 
1277 	/* If sink pointer changed, it means this is a hotplug, we should do
1278 	 * full hw setting.
1279 	 */
1280 	if (cur_stream->sink != new_stream->sink)
1281 		return true;
1282 
1283 	/* If output color space is changed, need to reprogram info frames */
1284 	if (cur_stream->output_color_space != new_stream->output_color_space)
1285 		return true;
1286 
1287 	return memcmp(
1288 		&cur_stream->timing,
1289 		&new_stream->timing,
1290 		sizeof(struct dc_crtc_timing)) != 0;
1291 }
1292 
1293 static bool are_stream_backends_same(
1294 	struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1295 {
1296 	if (stream_a == stream_b)
1297 		return true;
1298 
1299 	if (stream_a == NULL || stream_b == NULL)
1300 		return false;
1301 
1302 	if (is_timing_changed(stream_a, stream_b))
1303 		return false;
1304 
1305 	return true;
1306 }
1307 
1308 bool dc_is_stream_unchanged(
1309 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1310 {
1311 
1312 	if (!are_stream_backends_same(old_stream, stream))
1313 		return false;
1314 
1315 	return true;
1316 }
1317 
1318 /* Maximum TMDS single link pixel clock 165MHz */
1319 #define TMDS_MAX_PIXEL_CLOCK_IN_KHZ 165000
1320 
1321 static void update_stream_engine_usage(
1322 		struct resource_context *res_ctx,
1323 		const struct resource_pool *pool,
1324 		struct stream_encoder *stream_enc,
1325 		bool acquired)
1326 {
1327 	int i;
1328 
1329 	for (i = 0; i < pool->stream_enc_count; i++) {
1330 		if (pool->stream_enc[i] == stream_enc)
1331 			res_ctx->is_stream_enc_acquired[i] = acquired;
1332 	}
1333 }
1334 
1335 /* TODO: release audio object */
1336 void update_audio_usage(
1337 		struct resource_context *res_ctx,
1338 		const struct resource_pool *pool,
1339 		struct audio *audio,
1340 		bool acquired)
1341 {
1342 	int i;
1343 	for (i = 0; i < pool->audio_count; i++) {
1344 		if (pool->audios[i] == audio)
1345 			res_ctx->is_audio_acquired[i] = acquired;
1346 	}
1347 }
1348 
1349 static int acquire_first_free_pipe(
1350 		struct resource_context *res_ctx,
1351 		const struct resource_pool *pool,
1352 		struct dc_stream_state *stream)
1353 {
1354 	int i;
1355 
1356 	for (i = 0; i < pool->pipe_count; i++) {
1357 		if (!res_ctx->pipe_ctx[i].stream) {
1358 			struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1359 
1360 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1361 			pipe_ctx->plane_res.mi = pool->mis[i];
1362 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1363 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1364 			pipe_ctx->plane_res.xfm = pool->transforms[i];
1365 			pipe_ctx->stream_res.opp = pool->opps[i];
1366 			pipe_ctx->pipe_idx = i;
1367 
1368 
1369 			pipe_ctx->stream = stream;
1370 			return i;
1371 		}
1372 	}
1373 	return -1;
1374 }
1375 
1376 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1377 		struct resource_context *res_ctx,
1378 		const struct resource_pool *pool,
1379 		struct dc_stream_state *stream)
1380 {
1381 	int i;
1382 	int j = -1;
1383 	struct dc_link *link = stream->sink->link;
1384 
1385 	for (i = 0; i < pool->stream_enc_count; i++) {
1386 		if (!res_ctx->is_stream_enc_acquired[i] &&
1387 				pool->stream_enc[i]) {
1388 			/* Store first available for MST second display
1389 			 * in daisy chain use case */
1390 			j = i;
1391 			if (pool->stream_enc[i]->id ==
1392 					link->link_enc->preferred_engine)
1393 				return pool->stream_enc[i];
1394 		}
1395 	}
1396 
1397 	/*
1398 	 * below can happen in cases when stream encoder is acquired:
1399 	 * 1) for second MST display in chain, so preferred engine already
1400 	 * acquired;
1401 	 * 2) for another link, which preferred engine already acquired by any
1402 	 * MST configuration.
1403 	 *
1404 	 * If signal is of DP type and preferred engine not found, return last available
1405 	 *
1406 	 * TODO - This is just a patch up and a generic solution is
1407 	 * required for non DP connectors.
1408 	 */
1409 
1410 	if (j >= 0 && dc_is_dp_signal(stream->signal))
1411 		return pool->stream_enc[j];
1412 
1413 	return NULL;
1414 }
1415 
1416 static struct audio *find_first_free_audio(
1417 		struct resource_context *res_ctx,
1418 		const struct resource_pool *pool)
1419 {
1420 	int i;
1421 	for (i = 0; i < pool->audio_count; i++) {
1422 		if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1423 			return pool->audios[i];
1424 		}
1425 	}
1426 	/*not found the matching one, first come first serve*/
1427 	for (i = 0; i < pool->audio_count; i++) {
1428 		if (res_ctx->is_audio_acquired[i] == false) {
1429 			return pool->audios[i];
1430 		}
1431 	}
1432 	return 0;
1433 }
1434 
1435 bool resource_is_stream_unchanged(
1436 	struct dc_state *old_context, struct dc_stream_state *stream)
1437 {
1438 	int i;
1439 
1440 	for (i = 0; i < old_context->stream_count; i++) {
1441 		struct dc_stream_state *old_stream = old_context->streams[i];
1442 
1443 		if (are_stream_backends_same(old_stream, stream))
1444 				return true;
1445 	}
1446 
1447 	return false;
1448 }
1449 
1450 enum dc_status dc_add_stream_to_ctx(
1451 		struct dc *dc,
1452 		struct dc_state *new_ctx,
1453 		struct dc_stream_state *stream)
1454 {
1455 	struct dc_context *dc_ctx = dc->ctx;
1456 	enum dc_status res;
1457 
1458 	if (new_ctx->stream_count >= dc->res_pool->pipe_count) {
1459 		DC_ERROR("Max streams reached, can add stream %p !\n", stream);
1460 		return DC_ERROR_UNEXPECTED;
1461 	}
1462 
1463 	new_ctx->streams[new_ctx->stream_count] = stream;
1464 	dc_stream_retain(stream);
1465 	new_ctx->stream_count++;
1466 
1467 	res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1468 	if (res != DC_OK)
1469 		DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1470 
1471 	return res;
1472 }
1473 
1474 bool dc_remove_stream_from_ctx(
1475 			struct dc *dc,
1476 			struct dc_state *new_ctx,
1477 			struct dc_stream_state *stream)
1478 {
1479 	int i;
1480 	struct dc_context *dc_ctx = dc->ctx;
1481 	struct pipe_ctx *del_pipe = NULL;
1482 
1483 	/* Release primary pipe */
1484 	for (i = 0; i < MAX_PIPES; i++) {
1485 		if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1486 				!new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1487 			del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1488 
1489 			ASSERT(del_pipe->stream_res.stream_enc);
1490 			update_stream_engine_usage(
1491 					&new_ctx->res_ctx,
1492 						dc->res_pool,
1493 					del_pipe->stream_res.stream_enc,
1494 					false);
1495 
1496 			if (del_pipe->stream_res.audio)
1497 				update_audio_usage(
1498 					&new_ctx->res_ctx,
1499 					dc->res_pool,
1500 					del_pipe->stream_res.audio,
1501 					false);
1502 
1503 			resource_unreference_clock_source(&new_ctx->res_ctx,
1504 							  dc->res_pool,
1505 							  del_pipe->clock_source);
1506 
1507 			memset(del_pipe, 0, sizeof(*del_pipe));
1508 
1509 			break;
1510 		}
1511 	}
1512 
1513 	if (!del_pipe) {
1514 		DC_ERROR("Pipe not found for stream %p !\n", stream);
1515 		return DC_ERROR_UNEXPECTED;
1516 	}
1517 
1518 	for (i = 0; i < new_ctx->stream_count; i++)
1519 		if (new_ctx->streams[i] == stream)
1520 			break;
1521 
1522 	if (new_ctx->streams[i] != stream) {
1523 		DC_ERROR("Context doesn't have stream %p !\n", stream);
1524 		return DC_ERROR_UNEXPECTED;
1525 	}
1526 
1527 	dc_stream_release(new_ctx->streams[i]);
1528 	new_ctx->stream_count--;
1529 
1530 	/* Trim back arrays */
1531 	for (; i < new_ctx->stream_count; i++) {
1532 		new_ctx->streams[i] = new_ctx->streams[i + 1];
1533 		new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1534 	}
1535 
1536 	new_ctx->streams[new_ctx->stream_count] = NULL;
1537 	memset(
1538 			&new_ctx->stream_status[new_ctx->stream_count],
1539 			0,
1540 			sizeof(new_ctx->stream_status[0]));
1541 
1542 	return DC_OK;
1543 }
1544 
1545 static void copy_pipe_ctx(
1546 	const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx)
1547 {
1548 	struct dc_plane_state *plane_state = to_pipe_ctx->plane_state;
1549 	struct dc_stream_state *stream = to_pipe_ctx->stream;
1550 
1551 	*to_pipe_ctx = *from_pipe_ctx;
1552 	to_pipe_ctx->stream = stream;
1553 	if (plane_state != NULL)
1554 		to_pipe_ctx->plane_state = plane_state;
1555 }
1556 
1557 static struct dc_stream_state *find_pll_sharable_stream(
1558 		struct dc_stream_state *stream_needs_pll,
1559 		struct dc_state *context)
1560 {
1561 	int i;
1562 
1563 	for (i = 0; i < context->stream_count; i++) {
1564 		struct dc_stream_state *stream_has_pll = context->streams[i];
1565 
1566 		/* We are looking for non dp, non virtual stream */
1567 		if (resource_are_streams_timing_synchronizable(
1568 			stream_needs_pll, stream_has_pll)
1569 			&& !dc_is_dp_signal(stream_has_pll->signal)
1570 			&& stream_has_pll->sink->link->connector_signal
1571 			!= SIGNAL_TYPE_VIRTUAL)
1572 			return stream_has_pll;
1573 
1574 	}
1575 
1576 	return NULL;
1577 }
1578 
1579 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1580 {
1581 	uint32_t pix_clk = timing->pix_clk_khz;
1582 	uint32_t normalized_pix_clk = pix_clk;
1583 
1584 	if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1585 		pix_clk /= 2;
1586 	if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1587 		switch (timing->display_color_depth) {
1588 		case COLOR_DEPTH_888:
1589 			normalized_pix_clk = pix_clk;
1590 			break;
1591 		case COLOR_DEPTH_101010:
1592 			normalized_pix_clk = (pix_clk * 30) / 24;
1593 			break;
1594 		case COLOR_DEPTH_121212:
1595 			normalized_pix_clk = (pix_clk * 36) / 24;
1596 		break;
1597 		case COLOR_DEPTH_161616:
1598 			normalized_pix_clk = (pix_clk * 48) / 24;
1599 		break;
1600 		default:
1601 			ASSERT(0);
1602 		break;
1603 		}
1604 	}
1605 	return normalized_pix_clk;
1606 }
1607 
1608 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1609 {
1610 	/* update actual pixel clock on all streams */
1611 	if (dc_is_hdmi_signal(stream->signal))
1612 		stream->phy_pix_clk = get_norm_pix_clk(
1613 			&stream->timing);
1614 	else
1615 		stream->phy_pix_clk =
1616 			stream->timing.pix_clk_khz;
1617 }
1618 
1619 enum dc_status resource_map_pool_resources(
1620 		const struct dc  *dc,
1621 		struct dc_state *context,
1622 		struct dc_stream_state *stream)
1623 {
1624 	const struct resource_pool *pool = dc->res_pool;
1625 	int i;
1626 	struct dc_context *dc_ctx = dc->ctx;
1627 	struct pipe_ctx *pipe_ctx = NULL;
1628 	int pipe_idx = -1;
1629 
1630 	/* TODO Check if this is needed */
1631 	/*if (!resource_is_stream_unchanged(old_context, stream)) {
1632 			if (stream != NULL && old_context->streams[i] != NULL) {
1633 				stream->bit_depth_params =
1634 						old_context->streams[i]->bit_depth_params;
1635 				stream->clamping = old_context->streams[i]->clamping;
1636 				continue;
1637 			}
1638 		}
1639 	*/
1640 
1641 	/* acquire new resources */
1642 	pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1643 
1644 	if (pipe_idx < 0)
1645 		pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1646 
1647 	if (pipe_idx < 0)
1648 		return DC_NO_CONTROLLER_RESOURCE;
1649 
1650 	pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1651 
1652 	pipe_ctx->stream_res.stream_enc =
1653 		find_first_free_match_stream_enc_for_link(
1654 			&context->res_ctx, pool, stream);
1655 
1656 	if (!pipe_ctx->stream_res.stream_enc)
1657 		return DC_NO_STREAM_ENG_RESOURCE;
1658 
1659 	update_stream_engine_usage(
1660 		&context->res_ctx, pool,
1661 		pipe_ctx->stream_res.stream_enc,
1662 		true);
1663 
1664 	/* TODO: Add check if ASIC support and EDID audio */
1665 	if (!stream->sink->converter_disable_audio &&
1666 	    dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1667 	    stream->audio_info.mode_count) {
1668 		pipe_ctx->stream_res.audio = find_first_free_audio(
1669 		&context->res_ctx, pool);
1670 
1671 		/*
1672 		 * Audio assigned in order first come first get.
1673 		 * There are asics which has number of audio
1674 		 * resources less then number of pipes
1675 		 */
1676 		if (pipe_ctx->stream_res.audio)
1677 			update_audio_usage(&context->res_ctx, pool,
1678 					   pipe_ctx->stream_res.audio, true);
1679 	}
1680 
1681 	for (i = 0; i < context->stream_count; i++)
1682 		if (context->streams[i] == stream) {
1683 			context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
1684 			context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
1685 			return DC_OK;
1686 		}
1687 
1688 	DC_ERROR("Stream %p not found in new ctx!\n", stream);
1689 	return DC_ERROR_UNEXPECTED;
1690 }
1691 
1692 /* first stream in the context is used to populate the rest */
1693 void validate_guaranteed_copy_streams(
1694 		struct dc_state *context,
1695 		int max_streams)
1696 {
1697 	int i;
1698 
1699 	for (i = 1; i < max_streams; i++) {
1700 		context->streams[i] = context->streams[0];
1701 
1702 		copy_pipe_ctx(&context->res_ctx.pipe_ctx[0],
1703 			      &context->res_ctx.pipe_ctx[i]);
1704 		context->res_ctx.pipe_ctx[i].stream =
1705 				context->res_ctx.pipe_ctx[0].stream;
1706 
1707 		dc_stream_retain(context->streams[i]);
1708 		context->stream_count++;
1709 	}
1710 }
1711 
1712 void dc_resource_state_copy_construct_current(
1713 		const struct dc *dc,
1714 		struct dc_state *dst_ctx)
1715 {
1716 	dc_resource_state_copy_construct(dc->current_state, dst_ctx);
1717 }
1718 
1719 
1720 void dc_resource_state_construct(
1721 		const struct dc *dc,
1722 		struct dc_state *dst_ctx)
1723 {
1724 	dst_ctx->dis_clk = dc->res_pool->display_clock;
1725 }
1726 
1727 enum dc_status dc_validate_global_state(
1728 		struct dc *dc,
1729 		struct dc_state *new_ctx)
1730 {
1731 	enum dc_status result = DC_ERROR_UNEXPECTED;
1732 	int i, j;
1733 
1734 	if (dc->res_pool->funcs->validate_global) {
1735 			result = dc->res_pool->funcs->validate_global(dc, new_ctx);
1736 			if (result != DC_OK)
1737 				return result;
1738 	}
1739 
1740 	for (i = 0; new_ctx && i < new_ctx->stream_count; i++) {
1741 		struct dc_stream_state *stream = new_ctx->streams[i];
1742 
1743 		for (j = 0; j < dc->res_pool->pipe_count; j++) {
1744 			struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
1745 
1746 			if (pipe_ctx->stream != stream)
1747 				continue;
1748 
1749 			/* Switch to dp clock source only if there is
1750 			 * no non dp stream that shares the same timing
1751 			 * with the dp stream.
1752 			 */
1753 			if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1754 				!find_pll_sharable_stream(stream, new_ctx)) {
1755 
1756 				resource_unreference_clock_source(
1757 						&new_ctx->res_ctx,
1758 						dc->res_pool,
1759 						pipe_ctx->clock_source);
1760 
1761 				pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
1762 				resource_reference_clock_source(
1763 						&new_ctx->res_ctx,
1764 						dc->res_pool,
1765 						 pipe_ctx->clock_source);
1766 			}
1767 		}
1768 	}
1769 
1770 	result = resource_build_scaling_params_for_context(dc, new_ctx);
1771 
1772 	if (result == DC_OK)
1773 		if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
1774 			result = DC_FAIL_BANDWIDTH_VALIDATE;
1775 
1776 	return result;
1777 }
1778 
1779 static void patch_gamut_packet_checksum(
1780 		struct encoder_info_packet *gamut_packet)
1781 {
1782 	/* For gamut we recalc checksum */
1783 	if (gamut_packet->valid) {
1784 		uint8_t chk_sum = 0;
1785 		uint8_t *ptr;
1786 		uint8_t i;
1787 
1788 		/*start of the Gamut data. */
1789 		ptr = &gamut_packet->sb[3];
1790 
1791 		for (i = 0; i <= gamut_packet->sb[1]; i++)
1792 			chk_sum += ptr[i];
1793 
1794 		gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
1795 	}
1796 }
1797 
1798 static void set_avi_info_frame(
1799 		struct encoder_info_packet *info_packet,
1800 		struct pipe_ctx *pipe_ctx)
1801 {
1802 	struct dc_stream_state *stream = pipe_ctx->stream;
1803 	enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
1804 	struct info_frame info_frame = { {0} };
1805 	uint32_t pixel_encoding = 0;
1806 	enum scanning_type scan_type = SCANNING_TYPE_NODATA;
1807 	enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
1808 	bool itc = false;
1809 	uint8_t itc_value = 0;
1810 	uint8_t cn0_cn1 = 0;
1811 	unsigned int cn0_cn1_value = 0;
1812 	uint8_t *check_sum = NULL;
1813 	uint8_t byte_index = 0;
1814 	union hdmi_info_packet *hdmi_info = &info_frame.avi_info_packet.info_packet_hdmi;
1815 	union display_content_support support = {0};
1816 	unsigned int vic = pipe_ctx->stream->timing.vic;
1817 	enum dc_timing_3d_format format;
1818 
1819 	color_space = pipe_ctx->stream->output_color_space;
1820 	if (color_space == COLOR_SPACE_UNKNOWN)
1821 		color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
1822 			COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
1823 
1824 	/* Initialize header */
1825 	hdmi_info->bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
1826 	/* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1827 	* not be used in HDMI 2.0 (Section 10.1) */
1828 	hdmi_info->bits.header.version = 2;
1829 	hdmi_info->bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
1830 
1831 	/*
1832 	 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1833 	 * according to HDMI 2.0 spec (Section 10.1)
1834 	 */
1835 
1836 	switch (stream->timing.pixel_encoding) {
1837 	case PIXEL_ENCODING_YCBCR422:
1838 		pixel_encoding = 1;
1839 		break;
1840 
1841 	case PIXEL_ENCODING_YCBCR444:
1842 		pixel_encoding = 2;
1843 		break;
1844 	case PIXEL_ENCODING_YCBCR420:
1845 		pixel_encoding = 3;
1846 		break;
1847 
1848 	case PIXEL_ENCODING_RGB:
1849 	default:
1850 		pixel_encoding = 0;
1851 	}
1852 
1853 	/* Y0_Y1_Y2 : The pixel encoding */
1854 	/* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1855 	hdmi_info->bits.Y0_Y1_Y2 = pixel_encoding;
1856 
1857 	/* A0 = 1 Active Format Information valid */
1858 	hdmi_info->bits.A0 = ACTIVE_FORMAT_VALID;
1859 
1860 	/* B0, B1 = 3; Bar info data is valid */
1861 	hdmi_info->bits.B0_B1 = BAR_INFO_BOTH_VALID;
1862 
1863 	hdmi_info->bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
1864 
1865 	/* S0, S1 : Underscan / Overscan */
1866 	/* TODO: un-hardcode scan type */
1867 	scan_type = SCANNING_TYPE_UNDERSCAN;
1868 	hdmi_info->bits.S0_S1 = scan_type;
1869 
1870 	/* C0, C1 : Colorimetry */
1871 	if (color_space == COLOR_SPACE_YCBCR709 ||
1872 			color_space == COLOR_SPACE_YCBCR709_LIMITED)
1873 		hdmi_info->bits.C0_C1 = COLORIMETRY_ITU709;
1874 	else if (color_space == COLOR_SPACE_YCBCR601 ||
1875 			color_space == COLOR_SPACE_YCBCR601_LIMITED)
1876 		hdmi_info->bits.C0_C1 = COLORIMETRY_ITU601;
1877 	else {
1878 		hdmi_info->bits.C0_C1 = COLORIMETRY_NO_DATA;
1879 	}
1880 	if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
1881 			color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
1882 			color_space == COLOR_SPACE_2020_YCBCR) {
1883 		hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
1884 		hdmi_info->bits.C0_C1   = COLORIMETRY_EXTENDED;
1885 	} else if (color_space == COLOR_SPACE_ADOBERGB) {
1886 		hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
1887 		hdmi_info->bits.C0_C1   = COLORIMETRY_EXTENDED;
1888 	}
1889 
1890 	/* TODO: un-hardcode aspect ratio */
1891 	aspect = stream->timing.aspect_ratio;
1892 
1893 	switch (aspect) {
1894 	case ASPECT_RATIO_4_3:
1895 	case ASPECT_RATIO_16_9:
1896 		hdmi_info->bits.M0_M1 = aspect;
1897 		break;
1898 
1899 	case ASPECT_RATIO_NO_DATA:
1900 	case ASPECT_RATIO_64_27:
1901 	case ASPECT_RATIO_256_135:
1902 	default:
1903 		hdmi_info->bits.M0_M1 = 0;
1904 	}
1905 
1906 	/* Active Format Aspect ratio - same as Picture Aspect Ratio. */
1907 	hdmi_info->bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
1908 
1909 	/* TODO: un-hardcode cn0_cn1 and itc */
1910 
1911 	cn0_cn1 = 0;
1912 	cn0_cn1_value = 0;
1913 
1914 	itc = true;
1915 	itc_value = 1;
1916 
1917 	support = stream->sink->edid_caps.content_support;
1918 
1919 	if (itc) {
1920 		if (!support.bits.valid_content_type) {
1921 			cn0_cn1_value = 0;
1922 		} else {
1923 			if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
1924 				if (support.bits.graphics_content == 1) {
1925 					cn0_cn1_value = 0;
1926 				}
1927 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
1928 				if (support.bits.photo_content == 1) {
1929 					cn0_cn1_value = 1;
1930 				} else {
1931 					cn0_cn1_value = 0;
1932 					itc_value = 0;
1933 				}
1934 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
1935 				if (support.bits.cinema_content == 1) {
1936 					cn0_cn1_value = 2;
1937 				} else {
1938 					cn0_cn1_value = 0;
1939 					itc_value = 0;
1940 				}
1941 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
1942 				if (support.bits.game_content == 1) {
1943 					cn0_cn1_value = 3;
1944 				} else {
1945 					cn0_cn1_value = 0;
1946 					itc_value = 0;
1947 				}
1948 			}
1949 		}
1950 		hdmi_info->bits.CN0_CN1 = cn0_cn1_value;
1951 		hdmi_info->bits.ITC = itc_value;
1952 	}
1953 
1954 	/* TODO : We should handle YCC quantization */
1955 	/* but we do not have matrix calculation */
1956 	if (stream->sink->edid_caps.qs_bit == 1 &&
1957 			stream->sink->edid_caps.qy_bit == 1) {
1958 		if (color_space == COLOR_SPACE_SRGB ||
1959 			color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
1960 			hdmi_info->bits.Q0_Q1   = RGB_QUANTIZATION_FULL_RANGE;
1961 			hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
1962 		} else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
1963 					color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
1964 			hdmi_info->bits.Q0_Q1   = RGB_QUANTIZATION_LIMITED_RANGE;
1965 			hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
1966 		} else {
1967 			hdmi_info->bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
1968 			hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
1969 		}
1970 	} else {
1971 		hdmi_info->bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
1972 		hdmi_info->bits.YQ0_YQ1   = YYC_QUANTIZATION_LIMITED_RANGE;
1973 	}
1974 
1975 	///VIC
1976 	format = stream->timing.timing_3d_format;
1977 	/*todo, add 3DStereo support*/
1978 	if (format != TIMING_3D_FORMAT_NONE) {
1979 		// Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
1980 		switch (pipe_ctx->stream->timing.hdmi_vic) {
1981 		case 1:
1982 			vic = 95;
1983 			break;
1984 		case 2:
1985 			vic = 94;
1986 			break;
1987 		case 3:
1988 			vic = 93;
1989 			break;
1990 		case 4:
1991 			vic = 98;
1992 			break;
1993 		default:
1994 			break;
1995 		}
1996 	}
1997 	hdmi_info->bits.VIC0_VIC7 = vic;
1998 
1999 	/* pixel repetition
2000 	 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2001 	 * repetition start from 1 */
2002 	hdmi_info->bits.PR0_PR3 = 0;
2003 
2004 	/* Bar Info
2005 	 * barTop:    Line Number of End of Top Bar.
2006 	 * barBottom: Line Number of Start of Bottom Bar.
2007 	 * barLeft:   Pixel Number of End of Left Bar.
2008 	 * barRight:  Pixel Number of Start of Right Bar. */
2009 	hdmi_info->bits.bar_top = stream->timing.v_border_top;
2010 	hdmi_info->bits.bar_bottom = (stream->timing.v_total
2011 			- stream->timing.v_border_bottom + 1);
2012 	hdmi_info->bits.bar_left  = stream->timing.h_border_left;
2013 	hdmi_info->bits.bar_right = (stream->timing.h_total
2014 			- stream->timing.h_border_right + 1);
2015 
2016 	/* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2017 	check_sum = &info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0];
2018 
2019 	*check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2020 
2021 	for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2022 		*check_sum += hdmi_info->packet_raw_data.sb[byte_index];
2023 
2024 	/* one byte complement */
2025 	*check_sum = (uint8_t) (0x100 - *check_sum);
2026 
2027 	/* Store in hw_path_mode */
2028 	info_packet->hb0 = hdmi_info->packet_raw_data.hb0;
2029 	info_packet->hb1 = hdmi_info->packet_raw_data.hb1;
2030 	info_packet->hb2 = hdmi_info->packet_raw_data.hb2;
2031 
2032 	for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet.
2033 				info_packet_hdmi.packet_raw_data.sb); byte_index++)
2034 		info_packet->sb[byte_index] = info_frame.avi_info_packet.
2035 				info_packet_hdmi.packet_raw_data.sb[byte_index];
2036 
2037 	info_packet->valid = true;
2038 }
2039 
2040 static void set_vendor_info_packet(
2041 		struct encoder_info_packet *info_packet,
2042 		struct dc_stream_state *stream)
2043 {
2044 	uint32_t length = 0;
2045 	bool hdmi_vic_mode = false;
2046 	uint8_t checksum = 0;
2047 	uint32_t i = 0;
2048 	enum dc_timing_3d_format format;
2049 	// Can be different depending on packet content /*todo*/
2050 	// unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2051 
2052 	info_packet->valid = false;
2053 
2054 	format = stream->timing.timing_3d_format;
2055 	if (stream->view_format == VIEW_3D_FORMAT_NONE)
2056 		format = TIMING_3D_FORMAT_NONE;
2057 
2058 	/* Can be different depending on packet content */
2059 	length = 5;
2060 
2061 	if (stream->timing.hdmi_vic != 0
2062 			&& stream->timing.h_total >= 3840
2063 			&& stream->timing.v_total >= 2160)
2064 		hdmi_vic_mode = true;
2065 
2066 	/* According to HDMI 1.4a CTS, VSIF should be sent
2067 	 * for both 3D stereo and HDMI VIC modes.
2068 	 * For all other modes, there is no VSIF sent.  */
2069 
2070 	if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2071 		return;
2072 
2073 	/* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2074 	info_packet->sb[1] = 0x03;
2075 	info_packet->sb[2] = 0x0C;
2076 	info_packet->sb[3] = 0x00;
2077 
2078 	/*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2079 	 * The value for HDMI_Video_Format are:
2080 	 * 0x0 (0b000) - No additional HDMI video format is presented in this
2081 	 * packet
2082 	 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2083 	 * parameter follows
2084 	 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2085 	 * potentially 3D_Ext_Data follows
2086 	 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2087 	if (format != TIMING_3D_FORMAT_NONE)
2088 		info_packet->sb[4] = (2 << 5);
2089 	else if (hdmi_vic_mode)
2090 		info_packet->sb[4] = (1 << 5);
2091 
2092 	/* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2093 	 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2094 	 * The value for 3D_Structure are:
2095 	 * 0x0 - Frame Packing
2096 	 * 0x1 - Field Alternative
2097 	 * 0x2 - Line Alternative
2098 	 * 0x3 - Side-by-Side (full)
2099 	 * 0x4 - L + depth
2100 	 * 0x5 - L + depth + graphics + graphics-depth
2101 	 * 0x6 - Top-and-Bottom
2102 	 * 0x7 - Reserved for future use
2103 	 * 0x8 - Side-by-Side (Half)
2104 	 * 0x9..0xE - Reserved for future use
2105 	 * 0xF - Not used */
2106 	switch (format) {
2107 	case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2108 	case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2109 		info_packet->sb[5] = (0x0 << 4);
2110 		break;
2111 
2112 	case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2113 	case TIMING_3D_FORMAT_SBS_SW_PACKED:
2114 		info_packet->sb[5] = (0x8 << 4);
2115 		length = 6;
2116 		break;
2117 
2118 	case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2119 	case TIMING_3D_FORMAT_TB_SW_PACKED:
2120 		info_packet->sb[5] = (0x6 << 4);
2121 		break;
2122 
2123 	default:
2124 		break;
2125 	}
2126 
2127 	/*PB5: If PB4 is set to 0x1 (extended resolution format)
2128 	 * fill PB5 with the correct HDMI VIC code */
2129 	if (hdmi_vic_mode)
2130 		info_packet->sb[5] = stream->timing.hdmi_vic;
2131 
2132 	/* Header */
2133 	info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2134 	info_packet->hb1 = 0x01; /* Version */
2135 
2136 	/* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2137 	info_packet->hb2 = (uint8_t) (length);
2138 
2139 	/* Calculate checksum */
2140 	checksum = 0;
2141 	checksum += info_packet->hb0;
2142 	checksum += info_packet->hb1;
2143 	checksum += info_packet->hb2;
2144 
2145 	for (i = 1; i <= length; i++)
2146 		checksum += info_packet->sb[i];
2147 
2148 	info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2149 
2150 	info_packet->valid = true;
2151 }
2152 
2153 static void set_spd_info_packet(
2154 		struct encoder_info_packet *info_packet,
2155 		struct dc_stream_state *stream)
2156 {
2157 	/* SPD info packet for FreeSync */
2158 
2159 	unsigned char checksum = 0;
2160 	unsigned int idx, payload_size = 0;
2161 
2162 	/* Check if Freesync is supported. Return if false. If true,
2163 	 * set the corresponding bit in the info packet
2164 	 */
2165 	if (stream->freesync_ctx.supported == false)
2166 		return;
2167 
2168 	if (dc_is_hdmi_signal(stream->signal)) {
2169 
2170 		/* HEADER */
2171 
2172 		/* HB0  = Packet Type = 0x83 (Source Product
2173 		 *	  Descriptor InfoFrame)
2174 		 */
2175 		info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2176 
2177 		/* HB1  = Version = 0x01 */
2178 		info_packet->hb1 = 0x01;
2179 
2180 		/* HB2  = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2181 		info_packet->hb2 = 0x08;
2182 
2183 		payload_size = 0x08;
2184 
2185 	} else if (dc_is_dp_signal(stream->signal)) {
2186 
2187 		/* HEADER */
2188 
2189 		/* HB0  = Secondary-data Packet ID = 0 - Only non-zero
2190 		 *	  when used to associate audio related info packets
2191 		 */
2192 		info_packet->hb0 = 0x00;
2193 
2194 		/* HB1  = Packet Type = 0x83 (Source Product
2195 		 *	  Descriptor InfoFrame)
2196 		 */
2197 		info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2198 
2199 		/* HB2  = [Bits 7:0 = Least significant eight bits -
2200 		 *	  For INFOFRAME, the value must be 1Bh]
2201 		 */
2202 		info_packet->hb2 = 0x1B;
2203 
2204 		/* HB3  = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2205 		 *	  [Bits 1:0 = Most significant two bits = 0x00]
2206 		 */
2207 		info_packet->hb3 = 0x04;
2208 
2209 		payload_size = 0x1B;
2210 	}
2211 
2212 	/* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2213 	info_packet->sb[1] = 0x1A;
2214 
2215 	/* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2216 	info_packet->sb[2] = 0x00;
2217 
2218 	/* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2219 	info_packet->sb[3] = 0x00;
2220 
2221 	/* PB4 = Reserved */
2222 	info_packet->sb[4] = 0x00;
2223 
2224 	/* PB5 = Reserved */
2225 	info_packet->sb[5] = 0x00;
2226 
2227 	/* PB6 = [Bits 7:3 = Reserved] */
2228 	info_packet->sb[6] = 0x00;
2229 
2230 	if (stream->freesync_ctx.supported == true)
2231 		/* PB6 = [Bit 0 = FreeSync Supported] */
2232 		info_packet->sb[6] |= 0x01;
2233 
2234 	if (stream->freesync_ctx.enabled == true)
2235 		/* PB6 = [Bit 1 = FreeSync Enabled] */
2236 		info_packet->sb[6] |= 0x02;
2237 
2238 	if (stream->freesync_ctx.active == true)
2239 		/* PB6 = [Bit 2 = FreeSync Active] */
2240 		info_packet->sb[6] |= 0x04;
2241 
2242 	/* PB7 = FreeSync Minimum refresh rate (Hz) */
2243 	info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2244 			min_refresh_in_micro_hz / 1000000);
2245 
2246 	/* PB8 = FreeSync Maximum refresh rate (Hz)
2247 	 *
2248 	 * Note: We do not use the maximum capable refresh rate
2249 	 * of the panel, because we should never go above the field
2250 	 * rate of the mode timing set.
2251 	 */
2252 	info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2253 			nominal_refresh_in_micro_hz / 1000000);
2254 
2255 	/* PB9 - PB27  = Reserved */
2256 	for (idx = 9; idx <= 27; idx++)
2257 		info_packet->sb[idx] = 0x00;
2258 
2259 	/* Calculate checksum */
2260 	checksum += info_packet->hb0;
2261 	checksum += info_packet->hb1;
2262 	checksum += info_packet->hb2;
2263 	checksum += info_packet->hb3;
2264 
2265 	for (idx = 1; idx <= payload_size; idx++)
2266 		checksum += info_packet->sb[idx];
2267 
2268 	/* PB0 = Checksum (one byte complement) */
2269 	info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2270 
2271 	info_packet->valid = true;
2272 }
2273 
2274 static void set_hdr_static_info_packet(
2275 		struct encoder_info_packet *info_packet,
2276 		struct dc_plane_state *plane_state,
2277 		struct dc_stream_state *stream)
2278 {
2279 	uint16_t i = 0;
2280 	enum signal_type signal = stream->signal;
2281 	struct dc_hdr_static_metadata hdr_metadata;
2282 	uint32_t data;
2283 
2284 	if (!plane_state)
2285 		return;
2286 
2287 	hdr_metadata = plane_state->hdr_static_ctx;
2288 
2289 	if (!hdr_metadata.hdr_supported)
2290 		return;
2291 
2292 	if (dc_is_hdmi_signal(signal)) {
2293 		info_packet->valid = true;
2294 
2295 		info_packet->hb0 = 0x87;
2296 		info_packet->hb1 = 0x01;
2297 		info_packet->hb2 = 0x1A;
2298 		i = 1;
2299 	} else if (dc_is_dp_signal(signal)) {
2300 		info_packet->valid = true;
2301 
2302 		info_packet->hb0 = 0x00;
2303 		info_packet->hb1 = 0x87;
2304 		info_packet->hb2 = 0x1D;
2305 		info_packet->hb3 = (0x13 << 2);
2306 		i = 2;
2307 	}
2308 
2309 	data = hdr_metadata.is_hdr;
2310 	info_packet->sb[i++] = data ? 0x02 : 0x00;
2311 	info_packet->sb[i++] = 0x00;
2312 
2313 	data = hdr_metadata.chromaticity_green_x / 2;
2314 	info_packet->sb[i++] = data & 0xFF;
2315 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2316 
2317 	data = hdr_metadata.chromaticity_green_y / 2;
2318 	info_packet->sb[i++] = data & 0xFF;
2319 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2320 
2321 	data = hdr_metadata.chromaticity_blue_x / 2;
2322 	info_packet->sb[i++] = data & 0xFF;
2323 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2324 
2325 	data = hdr_metadata.chromaticity_blue_y / 2;
2326 	info_packet->sb[i++] = data & 0xFF;
2327 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2328 
2329 	data = hdr_metadata.chromaticity_red_x / 2;
2330 	info_packet->sb[i++] = data & 0xFF;
2331 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2332 
2333 	data = hdr_metadata.chromaticity_red_y / 2;
2334 	info_packet->sb[i++] = data & 0xFF;
2335 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2336 
2337 	data = hdr_metadata.chromaticity_white_point_x / 2;
2338 	info_packet->sb[i++] = data & 0xFF;
2339 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2340 
2341 	data = hdr_metadata.chromaticity_white_point_y / 2;
2342 	info_packet->sb[i++] = data & 0xFF;
2343 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2344 
2345 	data = hdr_metadata.max_luminance;
2346 	info_packet->sb[i++] = data & 0xFF;
2347 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2348 
2349 	data = hdr_metadata.min_luminance;
2350 	info_packet->sb[i++] = data & 0xFF;
2351 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2352 
2353 	data = hdr_metadata.maximum_content_light_level;
2354 	info_packet->sb[i++] = data & 0xFF;
2355 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2356 
2357 	data = hdr_metadata.maximum_frame_average_light_level;
2358 	info_packet->sb[i++] = data & 0xFF;
2359 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2360 
2361 	if (dc_is_hdmi_signal(signal)) {
2362 		uint32_t checksum = 0;
2363 
2364 		checksum += info_packet->hb0;
2365 		checksum += info_packet->hb1;
2366 		checksum += info_packet->hb2;
2367 
2368 		for (i = 1; i <= info_packet->hb2; i++)
2369 			checksum += info_packet->sb[i];
2370 
2371 		info_packet->sb[0] = 0x100 - checksum;
2372 	} else if (dc_is_dp_signal(signal)) {
2373 		info_packet->sb[0] = 0x01;
2374 		info_packet->sb[1] = 0x1A;
2375 	}
2376 }
2377 
2378 static void set_vsc_info_packet(
2379 		struct encoder_info_packet *info_packet,
2380 		struct dc_stream_state *stream)
2381 {
2382 	unsigned int vscPacketRevision = 0;
2383 	unsigned int i;
2384 
2385 	if (stream->sink->link->psr_enabled) {
2386 		vscPacketRevision = 2;
2387 	}
2388 
2389 	/* VSC packet not needed based on the features
2390 	 * supported by this DP display
2391 	 */
2392 	if (vscPacketRevision == 0)
2393 		return;
2394 
2395 	if (vscPacketRevision == 0x2) {
2396 		/* Secondary-data Packet ID = 0*/
2397 		info_packet->hb0 = 0x00;
2398 		/* 07h - Packet Type Value indicating Video
2399 		 * Stream Configuration packet
2400 		 */
2401 		info_packet->hb1 = 0x07;
2402 		/* 02h = VSC SDP supporting 3D stereo and PSR
2403 		 * (applies to eDP v1.3 or higher).
2404 		 */
2405 		info_packet->hb2 = 0x02;
2406 		/* 08h = VSC packet supporting 3D stereo + PSR
2407 		 * (HB2 = 02h).
2408 		 */
2409 		info_packet->hb3 = 0x08;
2410 
2411 		for (i = 0; i < 28; i++)
2412 			info_packet->sb[i] = 0;
2413 
2414 		info_packet->valid = true;
2415 	}
2416 
2417 	/*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2418 }
2419 
2420 void dc_resource_state_destruct(struct dc_state *context)
2421 {
2422 	int i, j;
2423 
2424 	for (i = 0; i < context->stream_count; i++) {
2425 		for (j = 0; j < context->stream_status[i].plane_count; j++)
2426 			dc_plane_state_release(
2427 				context->stream_status[i].plane_states[j]);
2428 
2429 		context->stream_status[i].plane_count = 0;
2430 		dc_stream_release(context->streams[i]);
2431 		context->streams[i] = NULL;
2432 	}
2433 }
2434 
2435 /*
2436  * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2437  * by the src_ctx
2438  */
2439 void dc_resource_state_copy_construct(
2440 		const struct dc_state *src_ctx,
2441 		struct dc_state *dst_ctx)
2442 {
2443 	int i, j;
2444 	struct kref refcount = dst_ctx->refcount;
2445 
2446 	*dst_ctx = *src_ctx;
2447 
2448 	for (i = 0; i < MAX_PIPES; i++) {
2449 		struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2450 
2451 		if (cur_pipe->top_pipe)
2452 			cur_pipe->top_pipe =  &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2453 
2454 		if (cur_pipe->bottom_pipe)
2455 			cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2456 
2457 	}
2458 
2459 	for (i = 0; i < dst_ctx->stream_count; i++) {
2460 		dc_stream_retain(dst_ctx->streams[i]);
2461 		for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2462 			dc_plane_state_retain(
2463 				dst_ctx->stream_status[i].plane_states[j]);
2464 	}
2465 
2466 	/* context refcount should not be overridden */
2467 	dst_ctx->refcount = refcount;
2468 
2469 }
2470 
2471 struct clock_source *dc_resource_find_first_free_pll(
2472 		struct resource_context *res_ctx,
2473 		const struct resource_pool *pool)
2474 {
2475 	int i;
2476 
2477 	for (i = 0; i < pool->clk_src_count; ++i) {
2478 		if (res_ctx->clock_source_ref_count[i] == 0)
2479 			return pool->clock_sources[i];
2480 	}
2481 
2482 	return NULL;
2483 }
2484 
2485 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2486 {
2487 	enum signal_type signal = SIGNAL_TYPE_NONE;
2488 	struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2489 
2490 	/* default all packets to invalid */
2491 	info->avi.valid = false;
2492 	info->gamut.valid = false;
2493 	info->vendor.valid = false;
2494 	info->spd.valid = false;
2495 	info->hdrsmd.valid = false;
2496 	info->vsc.valid = false;
2497 
2498 	signal = pipe_ctx->stream->signal;
2499 
2500 	/* HDMi and DP have different info packets*/
2501 	if (dc_is_hdmi_signal(signal)) {
2502 		set_avi_info_frame(&info->avi, pipe_ctx);
2503 
2504 		set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2505 
2506 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2507 
2508 		set_hdr_static_info_packet(&info->hdrsmd,
2509 				pipe_ctx->plane_state, pipe_ctx->stream);
2510 
2511 	} else if (dc_is_dp_signal(signal)) {
2512 		set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2513 
2514 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2515 
2516 		set_hdr_static_info_packet(&info->hdrsmd,
2517 				pipe_ctx->plane_state, pipe_ctx->stream);
2518 	}
2519 
2520 	patch_gamut_packet_checksum(&info->gamut);
2521 }
2522 
2523 enum dc_status resource_map_clock_resources(
2524 		const struct dc  *dc,
2525 		struct dc_state *context,
2526 		struct dc_stream_state *stream)
2527 {
2528 	/* acquire new resources */
2529 	const struct resource_pool *pool = dc->res_pool;
2530 	struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2531 				&context->res_ctx, stream);
2532 
2533 	if (!pipe_ctx)
2534 		return DC_ERROR_UNEXPECTED;
2535 
2536 	if (dc_is_dp_signal(pipe_ctx->stream->signal)
2537 		|| pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2538 		pipe_ctx->clock_source = pool->dp_clock_source;
2539 	else {
2540 		pipe_ctx->clock_source = NULL;
2541 
2542 		if (!dc->config.disable_disp_pll_sharing)
2543 			pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2544 				&context->res_ctx,
2545 				pipe_ctx);
2546 
2547 		if (pipe_ctx->clock_source == NULL)
2548 			pipe_ctx->clock_source =
2549 				dc_resource_find_first_free_pll(
2550 					&context->res_ctx,
2551 					pool);
2552 	}
2553 
2554 	if (pipe_ctx->clock_source == NULL)
2555 		return DC_NO_CLOCK_SOURCE_RESOURCE;
2556 
2557 	resource_reference_clock_source(
2558 		&context->res_ctx, pool,
2559 		pipe_ctx->clock_source);
2560 
2561 	return DC_OK;
2562 }
2563 
2564 /*
2565  * Note: We need to disable output if clock sources change,
2566  * since bios does optimization and doesn't apply if changing
2567  * PHY when not already disabled.
2568  */
2569 bool pipe_need_reprogram(
2570 		struct pipe_ctx *pipe_ctx_old,
2571 		struct pipe_ctx *pipe_ctx)
2572 {
2573 	if (!pipe_ctx_old->stream)
2574 		return false;
2575 
2576 	if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2577 		return true;
2578 
2579 	if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2580 		return true;
2581 
2582 	if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2583 		return true;
2584 
2585 	if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2586 			&& pipe_ctx_old->stream != pipe_ctx->stream)
2587 		return true;
2588 
2589 	if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2590 		return true;
2591 
2592 	if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2593 		return true;
2594 
2595 
2596 	return false;
2597 }
2598 
2599 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2600 		struct bit_depth_reduction_params *fmt_bit_depth)
2601 {
2602 	enum dc_dither_option option = stream->dither_option;
2603 	enum dc_pixel_encoding pixel_encoding =
2604 			stream->timing.pixel_encoding;
2605 
2606 	memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2607 
2608 	if (option == DITHER_OPTION_DEFAULT) {
2609 		switch (stream->timing.display_color_depth) {
2610 		case COLOR_DEPTH_666:
2611 			option = DITHER_OPTION_SPATIAL6;
2612 			break;
2613 		case COLOR_DEPTH_888:
2614 			option = DITHER_OPTION_SPATIAL8;
2615 			break;
2616 		case COLOR_DEPTH_101010:
2617 			option = DITHER_OPTION_SPATIAL10;
2618 			break;
2619 		default:
2620 			option = DITHER_OPTION_DISABLE;
2621 		}
2622 	}
2623 
2624 	if (option == DITHER_OPTION_DISABLE)
2625 		return;
2626 
2627 	if (option == DITHER_OPTION_TRUN6) {
2628 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2629 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2630 	} else if (option == DITHER_OPTION_TRUN8 ||
2631 			option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2632 			option == DITHER_OPTION_TRUN8_FM6) {
2633 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2634 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2635 	} else if (option == DITHER_OPTION_TRUN10        ||
2636 			option == DITHER_OPTION_TRUN10_SPATIAL6   ||
2637 			option == DITHER_OPTION_TRUN10_SPATIAL8   ||
2638 			option == DITHER_OPTION_TRUN10_FM8     ||
2639 			option == DITHER_OPTION_TRUN10_FM6     ||
2640 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2641 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2642 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2643 	}
2644 
2645 	/* special case - Formatter can only reduce by 4 bits at most.
2646 	 * When reducing from 12 to 6 bits,
2647 	 * HW recommends we use trunc with round mode
2648 	 * (if we did nothing, trunc to 10 bits would be used)
2649 	 * note that any 12->10 bit reduction is ignored prior to DCE8,
2650 	 * as the input was 10 bits.
2651 	 */
2652 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2653 			option == DITHER_OPTION_SPATIAL6 ||
2654 			option == DITHER_OPTION_FM6) {
2655 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2656 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2657 		fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2658 	}
2659 
2660 	/* spatial dither
2661 	 * note that spatial modes 1-3 are never used
2662 	 */
2663 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM            ||
2664 			option == DITHER_OPTION_SPATIAL6 ||
2665 			option == DITHER_OPTION_TRUN10_SPATIAL6      ||
2666 			option == DITHER_OPTION_TRUN8_SPATIAL6) {
2667 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2668 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2669 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2670 		fmt_bit_depth->flags.RGB_RANDOM =
2671 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2672 	} else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM            ||
2673 			option == DITHER_OPTION_SPATIAL8 ||
2674 			option == DITHER_OPTION_SPATIAL8_FM6        ||
2675 			option == DITHER_OPTION_TRUN10_SPATIAL8      ||
2676 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2677 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2678 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2679 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2680 		fmt_bit_depth->flags.RGB_RANDOM =
2681 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2682 	} else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2683 			option == DITHER_OPTION_SPATIAL10 ||
2684 			option == DITHER_OPTION_SPATIAL10_FM8 ||
2685 			option == DITHER_OPTION_SPATIAL10_FM6) {
2686 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2687 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2688 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2689 		fmt_bit_depth->flags.RGB_RANDOM =
2690 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2691 	}
2692 
2693 	if (option == DITHER_OPTION_SPATIAL6 ||
2694 			option == DITHER_OPTION_SPATIAL8 ||
2695 			option == DITHER_OPTION_SPATIAL10) {
2696 		fmt_bit_depth->flags.FRAME_RANDOM = 0;
2697 	} else {
2698 		fmt_bit_depth->flags.FRAME_RANDOM = 1;
2699 	}
2700 
2701 	//////////////////////
2702 	//// temporal dither
2703 	//////////////////////
2704 	if (option == DITHER_OPTION_FM6           ||
2705 			option == DITHER_OPTION_SPATIAL8_FM6     ||
2706 			option == DITHER_OPTION_SPATIAL10_FM6     ||
2707 			option == DITHER_OPTION_TRUN10_FM6     ||
2708 			option == DITHER_OPTION_TRUN8_FM6      ||
2709 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2710 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2711 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2712 	} else if (option == DITHER_OPTION_FM8        ||
2713 			option == DITHER_OPTION_SPATIAL10_FM8  ||
2714 			option == DITHER_OPTION_TRUN10_FM8) {
2715 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2716 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2717 	} else if (option == DITHER_OPTION_FM10) {
2718 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2719 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2720 	}
2721 
2722 	fmt_bit_depth->pixel_encoding = pixel_encoding;
2723 }
2724 
2725 bool dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2726 {
2727 	struct dc  *core_dc = dc;
2728 	struct dc_link *link = stream->sink->link;
2729 	struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2730 	enum dc_status res = DC_OK;
2731 
2732 	calculate_phy_pix_clks(stream);
2733 
2734 	if (!tg->funcs->validate_timing(tg, &stream->timing))
2735 		res = DC_FAIL_CONTROLLER_VALIDATE;
2736 
2737 	if (res == DC_OK)
2738 		if (!link->link_enc->funcs->validate_output_with_stream(
2739 						link->link_enc, stream))
2740 			res = DC_FAIL_ENC_VALIDATE;
2741 
2742 	/* TODO: validate audio ASIC caps, encoder */
2743 
2744 	if (res == DC_OK)
2745 		res = dc_link_validate_mode_timing(stream,
2746 		      link,
2747 		      &stream->timing);
2748 
2749 	return res == DC_OK;
2750 }
2751 
2752 bool dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2753 {
2754 	/* TODO For now validates pixel format only */
2755 	if (dc->res_pool->funcs->validate_plane)
2756 		return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps) == DC_OK;
2757 
2758 	return true;
2759 }
2760