1 /*
2  * Copyright 2012-15 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 #include "dm_services.h"
26 
27 #include "resource.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
31 #include "opp.h"
32 #include "timing_generator.h"
33 #include "transform.h"
34 #include "dpp.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 
39 #include "dce80/dce80_resource.h"
40 #include "dce100/dce100_resource.h"
41 #include "dce110/dce110_resource.h"
42 #include "dce112/dce112_resource.h"
43 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
44 #include "dcn10/dcn10_resource.h"
45 #endif
46 #include "dce120/dce120_resource.h"
47 
48 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
49 {
50 	enum dce_version dc_version = DCE_VERSION_UNKNOWN;
51 	switch (asic_id.chip_family) {
52 
53 	case FAMILY_CI:
54 		dc_version = DCE_VERSION_8_0;
55 		break;
56 	case FAMILY_KV:
57 		if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
58 		    ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
59 		    ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
60 			dc_version = DCE_VERSION_8_3;
61 		else
62 			dc_version = DCE_VERSION_8_1;
63 		break;
64 	case FAMILY_CZ:
65 		dc_version = DCE_VERSION_11_0;
66 		break;
67 
68 	case FAMILY_VI:
69 		if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
70 				ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
71 			dc_version = DCE_VERSION_10_0;
72 			break;
73 		}
74 		if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
75 				ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
76 				ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
77 			dc_version = DCE_VERSION_11_2;
78 		}
79 		break;
80 	case FAMILY_AI:
81 		dc_version = DCE_VERSION_12_0;
82 		break;
83 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
84 	case FAMILY_RV:
85 		dc_version = DCN_VERSION_1_0;
86 		break;
87 #endif
88 	default:
89 		dc_version = DCE_VERSION_UNKNOWN;
90 		break;
91 	}
92 	return dc_version;
93 }
94 
95 struct resource_pool *dc_create_resource_pool(
96 				struct dc  *dc,
97 				int num_virtual_links,
98 				enum dce_version dc_version,
99 				struct hw_asic_id asic_id)
100 {
101 	struct resource_pool *res_pool = NULL;
102 
103 	switch (dc_version) {
104 	case DCE_VERSION_8_0:
105 		res_pool = dce80_create_resource_pool(
106 			num_virtual_links, dc);
107 		break;
108 	case DCE_VERSION_8_1:
109 		res_pool = dce81_create_resource_pool(
110 			num_virtual_links, dc);
111 		break;
112 	case DCE_VERSION_8_3:
113 		res_pool = dce83_create_resource_pool(
114 			num_virtual_links, dc);
115 		break;
116 	case DCE_VERSION_10_0:
117 		res_pool = dce100_create_resource_pool(
118 				num_virtual_links, dc);
119 		break;
120 	case DCE_VERSION_11_0:
121 		res_pool = dce110_create_resource_pool(
122 			num_virtual_links, dc, asic_id);
123 		break;
124 	case DCE_VERSION_11_2:
125 		res_pool = dce112_create_resource_pool(
126 			num_virtual_links, dc);
127 		break;
128 	case DCE_VERSION_12_0:
129 		res_pool = dce120_create_resource_pool(
130 			num_virtual_links, dc);
131 		break;
132 
133 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
134 	case DCN_VERSION_1_0:
135 		res_pool = dcn10_create_resource_pool(
136 				num_virtual_links, dc);
137 		break;
138 #endif
139 
140 
141 	default:
142 		break;
143 	}
144 	if (res_pool != NULL) {
145 		struct dc_firmware_info fw_info = { { 0 } };
146 
147 		if (dc->ctx->dc_bios->funcs->get_firmware_info(
148 				dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
149 				res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
150 			} else
151 				ASSERT_CRITICAL(false);
152 	}
153 
154 	return res_pool;
155 }
156 
157 void dc_destroy_resource_pool(struct dc  *dc)
158 {
159 	if (dc) {
160 		if (dc->res_pool)
161 			dc->res_pool->funcs->destroy(&dc->res_pool);
162 
163 		kfree(dc->hwseq);
164 	}
165 }
166 
167 static void update_num_audio(
168 	const struct resource_straps *straps,
169 	unsigned int *num_audio,
170 	struct audio_support *aud_support)
171 {
172 	aud_support->dp_audio = true;
173 	aud_support->hdmi_audio_native = false;
174 	aud_support->hdmi_audio_on_dongle = false;
175 
176 	if (straps->hdmi_disable == 0) {
177 		if (straps->dc_pinstraps_audio & 0x2) {
178 			aud_support->hdmi_audio_on_dongle = true;
179 			aud_support->hdmi_audio_native = true;
180 		}
181 	}
182 
183 	switch (straps->audio_stream_number) {
184 	case 0: /* multi streams supported */
185 		break;
186 	case 1: /* multi streams not supported */
187 		*num_audio = 1;
188 		break;
189 	default:
190 		DC_ERR("DC: unexpected audio fuse!\n");
191 	}
192 }
193 
194 bool resource_construct(
195 	unsigned int num_virtual_links,
196 	struct dc  *dc,
197 	struct resource_pool *pool,
198 	const struct resource_create_funcs *create_funcs)
199 {
200 	struct dc_context *ctx = dc->ctx;
201 	const struct resource_caps *caps = pool->res_cap;
202 	int i;
203 	unsigned int num_audio = caps->num_audio;
204 	struct resource_straps straps = {0};
205 
206 	if (create_funcs->read_dce_straps)
207 		create_funcs->read_dce_straps(dc->ctx, &straps);
208 
209 	pool->audio_count = 0;
210 	if (create_funcs->create_audio) {
211 		/* find the total number of streams available via the
212 		 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
213 		 * registers (one for each pin) starting from pin 1
214 		 * up to the max number of audio pins.
215 		 * We stop on the first pin where
216 		 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
217 		 */
218 		update_num_audio(&straps, &num_audio, &pool->audio_support);
219 		for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
220 			struct audio *aud = create_funcs->create_audio(ctx, i);
221 
222 			if (aud == NULL) {
223 				DC_ERR("DC: failed to create audio!\n");
224 				return false;
225 			}
226 
227 			if (!aud->funcs->endpoint_valid(aud)) {
228 				aud->funcs->destroy(&aud);
229 				break;
230 			}
231 
232 			pool->audios[i] = aud;
233 			pool->audio_count++;
234 		}
235 	}
236 
237 	pool->stream_enc_count = 0;
238 	if (create_funcs->create_stream_encoder) {
239 		for (i = 0; i < caps->num_stream_encoder; i++) {
240 			pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
241 			if (pool->stream_enc[i] == NULL)
242 				DC_ERR("DC: failed to create stream_encoder!\n");
243 			pool->stream_enc_count++;
244 		}
245 	}
246 	dc->caps.dynamic_audio = false;
247 	if (pool->audio_count < pool->stream_enc_count) {
248 		dc->caps.dynamic_audio = true;
249 	}
250 	for (i = 0; i < num_virtual_links; i++) {
251 		pool->stream_enc[pool->stream_enc_count] =
252 			virtual_stream_encoder_create(
253 					ctx, ctx->dc_bios);
254 		if (pool->stream_enc[pool->stream_enc_count] == NULL) {
255 			DC_ERR("DC: failed to create stream_encoder!\n");
256 			return false;
257 		}
258 		pool->stream_enc_count++;
259 	}
260 
261 	dc->hwseq = create_funcs->create_hwseq(ctx);
262 
263 	return true;
264 }
265 
266 
267 void resource_unreference_clock_source(
268 		struct resource_context *res_ctx,
269 		const struct resource_pool *pool,
270 		struct clock_source *clock_source)
271 {
272 	int i;
273 
274 	for (i = 0; i < pool->clk_src_count; i++) {
275 		if (pool->clock_sources[i] != clock_source)
276 			continue;
277 
278 		res_ctx->clock_source_ref_count[i]--;
279 
280 		break;
281 	}
282 
283 	if (pool->dp_clock_source == clock_source)
284 		res_ctx->dp_clock_source_ref_count--;
285 }
286 
287 void resource_reference_clock_source(
288 		struct resource_context *res_ctx,
289 		const struct resource_pool *pool,
290 		struct clock_source *clock_source)
291 {
292 	int i;
293 	for (i = 0; i < pool->clk_src_count; i++) {
294 		if (pool->clock_sources[i] != clock_source)
295 			continue;
296 
297 		res_ctx->clock_source_ref_count[i]++;
298 		break;
299 	}
300 
301 	if (pool->dp_clock_source == clock_source)
302 		res_ctx->dp_clock_source_ref_count++;
303 }
304 
305 bool resource_are_streams_timing_synchronizable(
306 	struct dc_stream_state *stream1,
307 	struct dc_stream_state *stream2)
308 {
309 	if (stream1->timing.h_total != stream2->timing.h_total)
310 		return false;
311 
312 	if (stream1->timing.v_total != stream2->timing.v_total)
313 		return false;
314 
315 	if (stream1->timing.h_addressable
316 				!= stream2->timing.h_addressable)
317 		return false;
318 
319 	if (stream1->timing.v_addressable
320 				!= stream2->timing.v_addressable)
321 		return false;
322 
323 	if (stream1->timing.pix_clk_khz
324 				!= stream2->timing.pix_clk_khz)
325 		return false;
326 
327 	if (stream1->phy_pix_clk != stream2->phy_pix_clk
328 			&& (!dc_is_dp_signal(stream1->signal)
329 			|| !dc_is_dp_signal(stream2->signal)))
330 		return false;
331 
332 	return true;
333 }
334 
335 static bool is_sharable_clk_src(
336 	const struct pipe_ctx *pipe_with_clk_src,
337 	const struct pipe_ctx *pipe)
338 {
339 	if (pipe_with_clk_src->clock_source == NULL)
340 		return false;
341 
342 	if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
343 		return false;
344 
345 	if (dc_is_dp_signal(pipe_with_clk_src->stream->signal))
346 		return false;
347 
348 	if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
349 			&& dc_is_dvi_signal(pipe->stream->signal))
350 		return false;
351 
352 	if (dc_is_hdmi_signal(pipe->stream->signal)
353 			&& dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
354 		return false;
355 
356 	if (!resource_are_streams_timing_synchronizable(
357 			pipe_with_clk_src->stream, pipe->stream))
358 		return false;
359 
360 	return true;
361 }
362 
363 struct clock_source *resource_find_used_clk_src_for_sharing(
364 					struct resource_context *res_ctx,
365 					struct pipe_ctx *pipe_ctx)
366 {
367 	int i;
368 
369 	for (i = 0; i < MAX_PIPES; i++) {
370 		if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
371 			return res_ctx->pipe_ctx[i].clock_source;
372 	}
373 
374 	return NULL;
375 }
376 
377 static enum pixel_format convert_pixel_format_to_dalsurface(
378 		enum surface_pixel_format surface_pixel_format)
379 {
380 	enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
381 
382 	switch (surface_pixel_format) {
383 	case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
384 		dal_pixel_format = PIXEL_FORMAT_INDEX8;
385 		break;
386 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
387 		dal_pixel_format = PIXEL_FORMAT_RGB565;
388 		break;
389 	case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
390 		dal_pixel_format = PIXEL_FORMAT_RGB565;
391 		break;
392 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
393 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
394 		break;
395 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
396 		dal_pixel_format = PIXEL_FORMAT_ARGB8888;
397 		break;
398 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
399 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
400 		break;
401 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
402 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
403 		break;
404 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
405 		dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
406 		break;
407 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
408 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
409 		dal_pixel_format = PIXEL_FORMAT_FP16;
410 		break;
411 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
412 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
413 		dal_pixel_format = PIXEL_FORMAT_420BPP8;
414 		break;
415 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
416 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
417 		dal_pixel_format = PIXEL_FORMAT_420BPP10;
418 		break;
419 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
420 	default:
421 		dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
422 		break;
423 	}
424 	return dal_pixel_format;
425 }
426 
427 static void rect_swap_helper(struct rect *rect)
428 {
429 	swap(rect->height, rect->width);
430 	swap(rect->x, rect->y);
431 }
432 
433 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
434 {
435 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
436 	const struct dc_stream_state *stream = pipe_ctx->stream;
437 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
438 	struct rect surf_src = plane_state->src_rect;
439 	struct rect clip = { 0 };
440 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
441 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
442 	bool pri_split = pipe_ctx->bottom_pipe &&
443 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
444 	bool sec_split = pipe_ctx->top_pipe &&
445 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
446 
447 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
448 		stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
449 		pri_split = false;
450 		sec_split = false;
451 	}
452 
453 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
454 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
455 		rect_swap_helper(&surf_src);
456 
457 	/* The actual clip is an intersection between stream
458 	 * source and surface clip
459 	 */
460 	clip.x = stream->src.x > plane_state->clip_rect.x ?
461 			stream->src.x : plane_state->clip_rect.x;
462 
463 	clip.width = stream->src.x + stream->src.width <
464 			plane_state->clip_rect.x + plane_state->clip_rect.width ?
465 			stream->src.x + stream->src.width - clip.x :
466 			plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
467 
468 	clip.y = stream->src.y > plane_state->clip_rect.y ?
469 			stream->src.y : plane_state->clip_rect.y;
470 
471 	clip.height = stream->src.y + stream->src.height <
472 			plane_state->clip_rect.y + plane_state->clip_rect.height ?
473 			stream->src.y + stream->src.height - clip.y :
474 			plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
475 
476 	/* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
477 	 * num_pixels = clip.num_pix * scl_ratio
478 	 */
479 	data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
480 			surf_src.width / plane_state->dst_rect.width;
481 	data->viewport.width = clip.width *
482 			surf_src.width / plane_state->dst_rect.width;
483 
484 	data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
485 			surf_src.height / plane_state->dst_rect.height;
486 	data->viewport.height = clip.height *
487 			surf_src.height / plane_state->dst_rect.height;
488 
489 	/* Round down, compensate in init */
490 	data->viewport_c.x = data->viewport.x / vpc_div;
491 	data->viewport_c.y = data->viewport.y / vpc_div;
492 	data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
493 			dal_fixed31_32_half : dal_fixed31_32_zero;
494 	data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
495 			dal_fixed31_32_half : dal_fixed31_32_zero;
496 	/* Round up, assume original video size always even dimensions */
497 	data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
498 	data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
499 
500 	/* Handle hsplit */
501 	if (sec_split) {
502 		data->viewport.x +=  data->viewport.width / 2;
503 		data->viewport_c.x +=  data->viewport_c.width / 2;
504 		/* Ceil offset pipe */
505 		data->viewport.width = (data->viewport.width + 1) / 2;
506 		data->viewport_c.width = (data->viewport_c.width + 1) / 2;
507 	} else if (pri_split) {
508 		data->viewport.width /= 2;
509 		data->viewport_c.width /= 2;
510 	}
511 
512 	if (plane_state->rotation == ROTATION_ANGLE_90 ||
513 			plane_state->rotation == ROTATION_ANGLE_270) {
514 		rect_swap_helper(&data->viewport_c);
515 		rect_swap_helper(&data->viewport);
516 	}
517 }
518 
519 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
520 {
521 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
522 	const struct dc_stream_state *stream = pipe_ctx->stream;
523 	struct rect surf_src = plane_state->src_rect;
524 	struct rect surf_clip = plane_state->clip_rect;
525 	int recout_full_x, recout_full_y;
526 	bool pri_split = pipe_ctx->bottom_pipe &&
527 			pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
528 	bool sec_split = pipe_ctx->top_pipe &&
529 			pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
530 	bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
531 
532 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
533 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
534 		rect_swap_helper(&surf_src);
535 
536 	pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
537 	if (stream->src.x < surf_clip.x)
538 		pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
539 			- stream->src.x) * stream->dst.width
540 						/ stream->src.width;
541 
542 	pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
543 			stream->dst.width / stream->src.width;
544 	if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
545 			stream->dst.x + stream->dst.width)
546 		pipe_ctx->plane_res.scl_data.recout.width =
547 			stream->dst.x + stream->dst.width
548 						- pipe_ctx->plane_res.scl_data.recout.x;
549 
550 	pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
551 	if (stream->src.y < surf_clip.y)
552 		pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
553 			- stream->src.y) * stream->dst.height
554 						/ stream->src.height;
555 
556 	pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
557 			stream->dst.height / stream->src.height;
558 	if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
559 			stream->dst.y + stream->dst.height)
560 		pipe_ctx->plane_res.scl_data.recout.height =
561 			stream->dst.y + stream->dst.height
562 						- pipe_ctx->plane_res.scl_data.recout.y;
563 
564 	/* Handle h & vsplit */
565 	if (sec_split && top_bottom_split) {
566 		pipe_ctx->plane_res.scl_data.recout.y +=
567 				pipe_ctx->plane_res.scl_data.recout.height / 2;
568 		/* Floor primary pipe, ceil 2ndary pipe */
569 		pipe_ctx->plane_res.scl_data.recout.height =
570 				(pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
571 	} else if (pri_split && top_bottom_split)
572 		pipe_ctx->plane_res.scl_data.recout.height /= 2;
573 	else if (pri_split || sec_split) {
574 		/* HMirror XOR Secondary_pipe XOR Rotation_180 */
575 		bool right_view = (sec_split != plane_state->horizontal_mirror) !=
576 					(plane_state->rotation == ROTATION_ANGLE_180);
577 
578 		if (plane_state->rotation == ROTATION_ANGLE_90
579 				|| plane_state->rotation == ROTATION_ANGLE_270)
580 			/* Secondary_pipe XOR Rotation_270 */
581 			right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
582 
583 		if (right_view) {
584 			pipe_ctx->plane_res.scl_data.recout.x +=
585 					pipe_ctx->plane_res.scl_data.recout.width / 2;
586 			/* Ceil offset pipe */
587 			pipe_ctx->plane_res.scl_data.recout.width =
588 					(pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
589 		} else {
590 			pipe_ctx->plane_res.scl_data.recout.width /= 2;
591 		}
592 	}
593 	/* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
594 	 * 				* 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
595 	 * 				ratio)
596 	 */
597 	recout_full_x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
598 					* stream->dst.width / stream->src.width -
599 			surf_src.x * plane_state->dst_rect.width / surf_src.width
600 					* stream->dst.width / stream->src.width;
601 	recout_full_y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
602 					* stream->dst.height / stream->src.height -
603 			surf_src.y * plane_state->dst_rect.height / surf_src.height
604 					* stream->dst.height / stream->src.height;
605 
606 	recout_skip->width = pipe_ctx->plane_res.scl_data.recout.x - recout_full_x;
607 	recout_skip->height = pipe_ctx->plane_res.scl_data.recout.y - recout_full_y;
608 }
609 
610 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
611 {
612 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
613 	const struct dc_stream_state *stream = pipe_ctx->stream;
614 	struct rect surf_src = plane_state->src_rect;
615 	const int in_w = stream->src.width;
616 	const int in_h = stream->src.height;
617 	const int out_w = stream->dst.width;
618 	const int out_h = stream->dst.height;
619 
620 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
621 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
622 		rect_swap_helper(&surf_src);
623 
624 	pipe_ctx->plane_res.scl_data.ratios.horz = dal_fixed31_32_from_fraction(
625 					surf_src.width,
626 					plane_state->dst_rect.width);
627 	pipe_ctx->plane_res.scl_data.ratios.vert = dal_fixed31_32_from_fraction(
628 					surf_src.height,
629 					plane_state->dst_rect.height);
630 
631 	if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
632 		pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
633 	else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
634 		pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
635 
636 	pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
637 		pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
638 	pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
639 		pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
640 
641 	pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
642 	pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
643 
644 	if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
645 			|| pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
646 		pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
647 		pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
648 	}
649 }
650 
651 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
652 {
653 	struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
654 	struct rect src = pipe_ctx->plane_state->src_rect;
655 	int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
656 			|| data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
657 	bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
658 
659 	/*
660 	 * Need to calculate the scan direction for viewport to make adjustments
661 	 */
662 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
663 		flip_vert_scan_dir = true;
664 		flip_horz_scan_dir = true;
665 	} else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
666 		flip_vert_scan_dir = true;
667 	else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
668 		flip_horz_scan_dir = true;
669 	if (pipe_ctx->plane_state->horizontal_mirror)
670 		flip_horz_scan_dir = !flip_horz_scan_dir;
671 
672 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
673 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
674 		rect_swap_helper(&src);
675 		rect_swap_helper(&data->viewport_c);
676 		rect_swap_helper(&data->viewport);
677 	}
678 
679 	/*
680 	 * Init calculated according to formula:
681 	 * 	init = (scaling_ratio + number_of_taps + 1) / 2
682 	 * 	init_bot = init + scaling_ratio
683 	 * 	init_c = init + truncated_vp_c_offset(from calculate viewport)
684 	 */
685 	data->inits.h = dal_fixed31_32_div_int(
686 			dal_fixed31_32_add_int(data->ratios.horz, data->taps.h_taps + 1), 2);
687 
688 	data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_div_int(
689 			dal_fixed31_32_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2));
690 
691 	data->inits.v = dal_fixed31_32_div_int(
692 			dal_fixed31_32_add_int(data->ratios.vert, data->taps.v_taps + 1), 2);
693 
694 	data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_div_int(
695 			dal_fixed31_32_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2));
696 
697 
698 	/* Adjust for viewport end clip-off */
699 	if ((data->viewport.x + data->viewport.width) < (src.x + src.width)) {
700 		int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
701 		int int_part = dal_fixed31_32_floor(
702 				dal_fixed31_32_sub(data->inits.h, data->ratios.horz));
703 
704 		int_part = int_part > 0 ? int_part : 0;
705 		data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
706 	}
707 	if ((data->viewport.y + data->viewport.height) < (src.y + src.height)) {
708 		int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
709 		int int_part = dal_fixed31_32_floor(
710 				dal_fixed31_32_sub(data->inits.v, data->ratios.vert));
711 
712 		int_part = int_part > 0 ? int_part : 0;
713 		data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
714 	}
715 	if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div) {
716 		int vp_clip = (src.x + src.width) / vpc_div -
717 				data->viewport_c.width - data->viewport_c.x;
718 		int int_part = dal_fixed31_32_floor(
719 				dal_fixed31_32_sub(data->inits.h_c, data->ratios.horz_c));
720 
721 		int_part = int_part > 0 ? int_part : 0;
722 		data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
723 	}
724 	if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div) {
725 		int vp_clip = (src.y + src.height) / vpc_div -
726 				data->viewport_c.height - data->viewport_c.y;
727 		int int_part = dal_fixed31_32_floor(
728 				dal_fixed31_32_sub(data->inits.v_c, data->ratios.vert_c));
729 
730 		int_part = int_part > 0 ? int_part : 0;
731 		data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
732 	}
733 
734 	/* Adjust for non-0 viewport offset */
735 	if (data->viewport.x && !flip_horz_scan_dir) {
736 		int int_part;
737 
738 		data->inits.h = dal_fixed31_32_add(data->inits.h, dal_fixed31_32_mul_int(
739 				data->ratios.horz, recout_skip->width));
740 		int_part = dal_fixed31_32_floor(data->inits.h) - data->viewport.x;
741 		if (int_part < data->taps.h_taps) {
742 			int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
743 						(data->taps.h_taps - int_part) : data->viewport.x;
744 			data->viewport.x -= int_adj;
745 			data->viewport.width += int_adj;
746 			int_part += int_adj;
747 		} else if (int_part > data->taps.h_taps) {
748 			data->viewport.x += int_part - data->taps.h_taps;
749 			data->viewport.width -= int_part - data->taps.h_taps;
750 			int_part = data->taps.h_taps;
751 		}
752 		data->inits.h.value &= 0xffffffff;
753 		data->inits.h = dal_fixed31_32_add_int(data->inits.h, int_part);
754 	}
755 
756 	if (data->viewport_c.x && !flip_horz_scan_dir) {
757 		int int_part;
758 
759 		data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_mul_int(
760 				data->ratios.horz_c, recout_skip->width));
761 		int_part = dal_fixed31_32_floor(data->inits.h_c) - data->viewport_c.x;
762 		if (int_part < data->taps.h_taps_c) {
763 			int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
764 					(data->taps.h_taps_c - int_part) : data->viewport_c.x;
765 			data->viewport_c.x -= int_adj;
766 			data->viewport_c.width += int_adj;
767 			int_part += int_adj;
768 		} else if (int_part > data->taps.h_taps_c) {
769 			data->viewport_c.x += int_part - data->taps.h_taps_c;
770 			data->viewport_c.width -= int_part - data->taps.h_taps_c;
771 			int_part = data->taps.h_taps_c;
772 		}
773 		data->inits.h_c.value &= 0xffffffff;
774 		data->inits.h_c = dal_fixed31_32_add_int(data->inits.h_c, int_part);
775 	}
776 
777 	if (data->viewport.y && !flip_vert_scan_dir) {
778 		int int_part;
779 
780 		data->inits.v = dal_fixed31_32_add(data->inits.v, dal_fixed31_32_mul_int(
781 				data->ratios.vert, recout_skip->height));
782 		int_part = dal_fixed31_32_floor(data->inits.v) - data->viewport.y;
783 		if (int_part < data->taps.v_taps) {
784 			int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
785 						(data->taps.v_taps - int_part) : data->viewport.y;
786 			data->viewport.y -= int_adj;
787 			data->viewport.height += int_adj;
788 			int_part += int_adj;
789 		} else if (int_part > data->taps.v_taps) {
790 			data->viewport.y += int_part - data->taps.v_taps;
791 			data->viewport.height -= int_part - data->taps.v_taps;
792 			int_part = data->taps.v_taps;
793 		}
794 		data->inits.v.value &= 0xffffffff;
795 		data->inits.v = dal_fixed31_32_add_int(data->inits.v, int_part);
796 	}
797 
798 	if (data->viewport_c.y && !flip_vert_scan_dir) {
799 		int int_part;
800 
801 		data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_mul_int(
802 				data->ratios.vert_c, recout_skip->height));
803 		int_part = dal_fixed31_32_floor(data->inits.v_c) - data->viewport_c.y;
804 		if (int_part < data->taps.v_taps_c) {
805 			int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
806 					(data->taps.v_taps_c - int_part) : data->viewport_c.y;
807 			data->viewport_c.y -= int_adj;
808 			data->viewport_c.height += int_adj;
809 			int_part += int_adj;
810 		} else if (int_part > data->taps.v_taps_c) {
811 			data->viewport_c.y += int_part - data->taps.v_taps_c;
812 			data->viewport_c.height -= int_part - data->taps.v_taps_c;
813 			int_part = data->taps.v_taps_c;
814 		}
815 		data->inits.v_c.value &= 0xffffffff;
816 		data->inits.v_c = dal_fixed31_32_add_int(data->inits.v_c, int_part);
817 	}
818 
819 	/* Interlaced inits based on final vert inits */
820 	data->inits.v_bot = dal_fixed31_32_add(data->inits.v, data->ratios.vert);
821 	data->inits.v_c_bot = dal_fixed31_32_add(data->inits.v_c, data->ratios.vert_c);
822 
823 	if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
824 			pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
825 		rect_swap_helper(&data->viewport_c);
826 		rect_swap_helper(&data->viewport);
827 	}
828 }
829 
830 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
831 {
832 	const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
833 	struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
834 	struct view recout_skip = { 0 };
835 	bool res = false;
836 
837 	/* Important: scaling ratio calculation requires pixel format,
838 	 * lb depth calculation requires recout and taps require scaling ratios.
839 	 * Inits require viewport, taps, ratios and recout of split pipe
840 	 */
841 	pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
842 			pipe_ctx->plane_state->format);
843 
844 	calculate_scaling_ratios(pipe_ctx);
845 
846 	calculate_viewport(pipe_ctx);
847 
848 	if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
849 		return false;
850 
851 	calculate_recout(pipe_ctx, &recout_skip);
852 
853 	/**
854 	 * Setting line buffer pixel depth to 24bpp yields banding
855 	 * on certain displays, such as the Sharp 4k
856 	 */
857 	pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
858 
859 	pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
860 	pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
861 
862 	pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
863 	pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
864 
865 
866 	/* Taps calculations */
867 	if (pipe_ctx->plane_res.xfm != NULL)
868 		res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
869 				pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
870 
871 	if (pipe_ctx->plane_res.dpp != NULL)
872 		res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
873 				pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
874 	if (!res) {
875 		/* Try 24 bpp linebuffer */
876 		pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
877 
878 		if (pipe_ctx->plane_res.xfm != NULL)
879 			res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
880 					pipe_ctx->plane_res.xfm,
881 					&pipe_ctx->plane_res.scl_data,
882 					&plane_state->scaling_quality);
883 
884 		if (pipe_ctx->plane_res.dpp != NULL)
885 			res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
886 					pipe_ctx->plane_res.dpp,
887 					&pipe_ctx->plane_res.scl_data,
888 					&plane_state->scaling_quality);
889 	}
890 
891 	if (res)
892 		/* May need to re-check lb size after this in some obscure scenario */
893 		calculate_inits_and_adj_vp(pipe_ctx, &recout_skip);
894 
895 	dm_logger_write(pipe_ctx->stream->ctx->logger, LOG_SCALER,
896 				"%s: Viewport:\nheight:%d width:%d x:%d "
897 				"y:%d\n dst_rect:\nheight:%d width:%d x:%d "
898 				"y:%d\n",
899 				__func__,
900 				pipe_ctx->plane_res.scl_data.viewport.height,
901 				pipe_ctx->plane_res.scl_data.viewport.width,
902 				pipe_ctx->plane_res.scl_data.viewport.x,
903 				pipe_ctx->plane_res.scl_data.viewport.y,
904 				plane_state->dst_rect.height,
905 				plane_state->dst_rect.width,
906 				plane_state->dst_rect.x,
907 				plane_state->dst_rect.y);
908 
909 	return res;
910 }
911 
912 
913 enum dc_status resource_build_scaling_params_for_context(
914 	const struct dc  *dc,
915 	struct dc_state *context)
916 {
917 	int i;
918 
919 	for (i = 0; i < MAX_PIPES; i++) {
920 		if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
921 				context->res_ctx.pipe_ctx[i].stream != NULL)
922 			if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
923 				return DC_FAIL_SCALING;
924 	}
925 
926 	return DC_OK;
927 }
928 
929 struct pipe_ctx *find_idle_secondary_pipe(
930 		struct resource_context *res_ctx,
931 		const struct resource_pool *pool)
932 {
933 	int i;
934 	struct pipe_ctx *secondary_pipe = NULL;
935 
936 	/*
937 	 * search backwards for the second pipe to keep pipe
938 	 * assignment more consistent
939 	 */
940 
941 	for (i = pool->pipe_count - 1; i >= 0; i--) {
942 		if (res_ctx->pipe_ctx[i].stream == NULL) {
943 			secondary_pipe = &res_ctx->pipe_ctx[i];
944 			secondary_pipe->pipe_idx = i;
945 			break;
946 		}
947 	}
948 
949 
950 	return secondary_pipe;
951 }
952 
953 struct pipe_ctx *resource_get_head_pipe_for_stream(
954 		struct resource_context *res_ctx,
955 		struct dc_stream_state *stream)
956 {
957 	int i;
958 	for (i = 0; i < MAX_PIPES; i++) {
959 		if (res_ctx->pipe_ctx[i].stream == stream &&
960 				!res_ctx->pipe_ctx[i].top_pipe) {
961 			return &res_ctx->pipe_ctx[i];
962 			break;
963 		}
964 	}
965 	return NULL;
966 }
967 
968 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
969 		struct resource_context *res_ctx,
970 		struct dc_stream_state *stream)
971 {
972 	struct pipe_ctx *head_pipe, *tail_pipe;
973 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
974 
975 	if (!head_pipe)
976 		return NULL;
977 
978 	tail_pipe = head_pipe->bottom_pipe;
979 
980 	while (tail_pipe) {
981 		head_pipe = tail_pipe;
982 		tail_pipe = tail_pipe->bottom_pipe;
983 	}
984 
985 	return head_pipe;
986 }
987 
988 /*
989  * A free_pipe for a stream is defined here as a pipe
990  * that has no surface attached yet
991  */
992 static struct pipe_ctx *acquire_free_pipe_for_stream(
993 		struct dc_state *context,
994 		const struct resource_pool *pool,
995 		struct dc_stream_state *stream)
996 {
997 	int i;
998 	struct resource_context *res_ctx = &context->res_ctx;
999 
1000 	struct pipe_ctx *head_pipe = NULL;
1001 
1002 	/* Find head pipe, which has the back end set up*/
1003 
1004 	head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1005 
1006 	if (!head_pipe) {
1007 		ASSERT(0);
1008 		return NULL;
1009 	}
1010 
1011 	if (!head_pipe->plane_state)
1012 		return head_pipe;
1013 
1014 	/* Re-use pipe already acquired for this stream if available*/
1015 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1016 		if (res_ctx->pipe_ctx[i].stream == stream &&
1017 				!res_ctx->pipe_ctx[i].plane_state) {
1018 			return &res_ctx->pipe_ctx[i];
1019 		}
1020 	}
1021 
1022 	/*
1023 	 * At this point we have no re-useable pipe for this stream and we need
1024 	 * to acquire an idle one to satisfy the request
1025 	 */
1026 
1027 	if (!pool->funcs->acquire_idle_pipe_for_layer)
1028 		return NULL;
1029 
1030 	return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1031 
1032 }
1033 
1034 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1035 static int acquire_first_split_pipe(
1036 		struct resource_context *res_ctx,
1037 		const struct resource_pool *pool,
1038 		struct dc_stream_state *stream)
1039 {
1040 	int i;
1041 
1042 	for (i = 0; i < pool->pipe_count; i++) {
1043 		struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1044 
1045 		if (pipe_ctx->top_pipe &&
1046 				pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1047 			pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1048 			if (pipe_ctx->bottom_pipe)
1049 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1050 
1051 			memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1052 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1053 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1054 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1055 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1056 			pipe_ctx->stream_res.opp = pool->opps[i];
1057 			pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1058 			pipe_ctx->pipe_idx = i;
1059 
1060 			pipe_ctx->stream = stream;
1061 			return i;
1062 		}
1063 	}
1064 	return -1;
1065 }
1066 #endif
1067 
1068 bool dc_add_plane_to_context(
1069 		const struct dc *dc,
1070 		struct dc_stream_state *stream,
1071 		struct dc_plane_state *plane_state,
1072 		struct dc_state *context)
1073 {
1074 	int i;
1075 	struct resource_pool *pool = dc->res_pool;
1076 	struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1077 	struct dc_stream_status *stream_status = NULL;
1078 
1079 	for (i = 0; i < context->stream_count; i++)
1080 		if (context->streams[i] == stream) {
1081 			stream_status = &context->stream_status[i];
1082 			break;
1083 		}
1084 	if (stream_status == NULL) {
1085 		dm_error("Existing stream not found; failed to attach surface!\n");
1086 		return false;
1087 	}
1088 
1089 
1090 	if (stream_status->plane_count == MAX_SURFACE_NUM) {
1091 		dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1092 				plane_state, MAX_SURFACE_NUM);
1093 		return false;
1094 	}
1095 
1096 	head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1097 
1098 	if (!head_pipe) {
1099 		dm_error("Head pipe not found for stream_state %p !\n", stream);
1100 		return false;
1101 	}
1102 
1103 	free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1104 
1105 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1106 	if (!free_pipe) {
1107 		int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1108 		if (pipe_idx >= 0)
1109 			free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1110 	}
1111 #endif
1112 	if (!free_pipe)
1113 		return false;
1114 
1115 	/* retain new surfaces */
1116 	dc_plane_state_retain(plane_state);
1117 	free_pipe->plane_state = plane_state;
1118 
1119 	if (head_pipe != free_pipe) {
1120 
1121 		tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1122 		ASSERT(tail_pipe);
1123 
1124 		free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1125 		free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1126 		free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1127 		free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1128 		free_pipe->clock_source = tail_pipe->clock_source;
1129 		free_pipe->top_pipe = tail_pipe;
1130 		tail_pipe->bottom_pipe = free_pipe;
1131 	}
1132 
1133 	/* assign new surfaces*/
1134 	stream_status->plane_states[stream_status->plane_count] = plane_state;
1135 
1136 	stream_status->plane_count++;
1137 
1138 	return true;
1139 }
1140 
1141 bool dc_remove_plane_from_context(
1142 		const struct dc *dc,
1143 		struct dc_stream_state *stream,
1144 		struct dc_plane_state *plane_state,
1145 		struct dc_state *context)
1146 {
1147 	int i;
1148 	struct dc_stream_status *stream_status = NULL;
1149 	struct resource_pool *pool = dc->res_pool;
1150 
1151 	for (i = 0; i < context->stream_count; i++)
1152 		if (context->streams[i] == stream) {
1153 			stream_status = &context->stream_status[i];
1154 			break;
1155 		}
1156 
1157 	if (stream_status == NULL) {
1158 		dm_error("Existing stream not found; failed to remove plane.\n");
1159 		return false;
1160 	}
1161 
1162 	/* release pipe for plane*/
1163 	for (i = pool->pipe_count - 1; i >= 0; i--) {
1164 		struct pipe_ctx *pipe_ctx;
1165 
1166 		if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1167 			pipe_ctx = &context->res_ctx.pipe_ctx[i];
1168 
1169 			if (pipe_ctx->top_pipe)
1170 				pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1171 
1172 			/* Second condition is to avoid setting NULL to top pipe
1173 			 * of tail pipe making it look like head pipe in subsequent
1174 			 * deletes
1175 			 */
1176 			if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1177 				pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1178 
1179 			/*
1180 			 * For head pipe detach surfaces from pipe for tail
1181 			 * pipe just zero it out
1182 			 */
1183 			if (!pipe_ctx->top_pipe) {
1184 				pipe_ctx->plane_state = NULL;
1185 				pipe_ctx->bottom_pipe = NULL;
1186 			} else  {
1187 				memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1188 			}
1189 		}
1190 	}
1191 
1192 
1193 	for (i = 0; i < stream_status->plane_count; i++) {
1194 		if (stream_status->plane_states[i] == plane_state) {
1195 
1196 			dc_plane_state_release(stream_status->plane_states[i]);
1197 			break;
1198 		}
1199 	}
1200 
1201 	if (i == stream_status->plane_count) {
1202 		dm_error("Existing plane_state not found; failed to detach it!\n");
1203 		return false;
1204 	}
1205 
1206 	stream_status->plane_count--;
1207 
1208 	/* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1209 	for (; i < stream_status->plane_count; i++)
1210 		stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1211 
1212 	stream_status->plane_states[stream_status->plane_count] = NULL;
1213 
1214 	return true;
1215 }
1216 
1217 bool dc_rem_all_planes_for_stream(
1218 		const struct dc *dc,
1219 		struct dc_stream_state *stream,
1220 		struct dc_state *context)
1221 {
1222 	int i, old_plane_count;
1223 	struct dc_stream_status *stream_status = NULL;
1224 	struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1225 
1226 	for (i = 0; i < context->stream_count; i++)
1227 			if (context->streams[i] == stream) {
1228 				stream_status = &context->stream_status[i];
1229 				break;
1230 			}
1231 
1232 	if (stream_status == NULL) {
1233 		dm_error("Existing stream %p not found!\n", stream);
1234 		return false;
1235 	}
1236 
1237 	old_plane_count = stream_status->plane_count;
1238 
1239 	for (i = 0; i < old_plane_count; i++)
1240 		del_planes[i] = stream_status->plane_states[i];
1241 
1242 	for (i = 0; i < old_plane_count; i++)
1243 		if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1244 			return false;
1245 
1246 	return true;
1247 }
1248 
1249 static bool add_all_planes_for_stream(
1250 		const struct dc *dc,
1251 		struct dc_stream_state *stream,
1252 		const struct dc_validation_set set[],
1253 		int set_count,
1254 		struct dc_state *context)
1255 {
1256 	int i, j;
1257 
1258 	for (i = 0; i < set_count; i++)
1259 		if (set[i].stream == stream)
1260 			break;
1261 
1262 	if (i == set_count) {
1263 		dm_error("Stream %p not found in set!\n", stream);
1264 		return false;
1265 	}
1266 
1267 	for (j = 0; j < set[i].plane_count; j++)
1268 		if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1269 			return false;
1270 
1271 	return true;
1272 }
1273 
1274 bool dc_add_all_planes_for_stream(
1275 		const struct dc *dc,
1276 		struct dc_stream_state *stream,
1277 		struct dc_plane_state * const *plane_states,
1278 		int plane_count,
1279 		struct dc_state *context)
1280 {
1281 	struct dc_validation_set set;
1282 	int i;
1283 
1284 	set.stream = stream;
1285 	set.plane_count = plane_count;
1286 
1287 	for (i = 0; i < plane_count; i++)
1288 		set.plane_states[i] = plane_states[i];
1289 
1290 	return add_all_planes_for_stream(dc, stream, &set, 1, context);
1291 }
1292 
1293 
1294 
1295 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1296 		struct dc_stream_state *new_stream)
1297 {
1298 	if (cur_stream == NULL)
1299 		return true;
1300 
1301 	/* If sink pointer changed, it means this is a hotplug, we should do
1302 	 * full hw setting.
1303 	 */
1304 	if (cur_stream->sink != new_stream->sink)
1305 		return true;
1306 
1307 	/* If output color space is changed, need to reprogram info frames */
1308 	if (cur_stream->output_color_space != new_stream->output_color_space)
1309 		return true;
1310 
1311 	return memcmp(
1312 		&cur_stream->timing,
1313 		&new_stream->timing,
1314 		sizeof(struct dc_crtc_timing)) != 0;
1315 }
1316 
1317 static bool are_stream_backends_same(
1318 	struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1319 {
1320 	if (stream_a == stream_b)
1321 		return true;
1322 
1323 	if (stream_a == NULL || stream_b == NULL)
1324 		return false;
1325 
1326 	if (is_timing_changed(stream_a, stream_b))
1327 		return false;
1328 
1329 	return true;
1330 }
1331 
1332 bool dc_is_stream_unchanged(
1333 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1334 {
1335 
1336 	if (!are_stream_backends_same(old_stream, stream))
1337 		return false;
1338 
1339 	return true;
1340 }
1341 
1342 bool dc_is_stream_scaling_unchanged(
1343 	struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1344 {
1345 	if (old_stream == stream)
1346 		return true;
1347 
1348 	if (old_stream == NULL || stream == NULL)
1349 		return false;
1350 
1351 	if (memcmp(&old_stream->src,
1352 			&stream->src,
1353 			sizeof(struct rect)) != 0)
1354 		return false;
1355 
1356 	if (memcmp(&old_stream->dst,
1357 			&stream->dst,
1358 			sizeof(struct rect)) != 0)
1359 		return false;
1360 
1361 	return true;
1362 }
1363 
1364 static void update_stream_engine_usage(
1365 		struct resource_context *res_ctx,
1366 		const struct resource_pool *pool,
1367 		struct stream_encoder *stream_enc,
1368 		bool acquired)
1369 {
1370 	int i;
1371 
1372 	for (i = 0; i < pool->stream_enc_count; i++) {
1373 		if (pool->stream_enc[i] == stream_enc)
1374 			res_ctx->is_stream_enc_acquired[i] = acquired;
1375 	}
1376 }
1377 
1378 /* TODO: release audio object */
1379 void update_audio_usage(
1380 		struct resource_context *res_ctx,
1381 		const struct resource_pool *pool,
1382 		struct audio *audio,
1383 		bool acquired)
1384 {
1385 	int i;
1386 	for (i = 0; i < pool->audio_count; i++) {
1387 		if (pool->audios[i] == audio)
1388 			res_ctx->is_audio_acquired[i] = acquired;
1389 	}
1390 }
1391 
1392 static int acquire_first_free_pipe(
1393 		struct resource_context *res_ctx,
1394 		const struct resource_pool *pool,
1395 		struct dc_stream_state *stream)
1396 {
1397 	int i;
1398 
1399 	for (i = 0; i < pool->pipe_count; i++) {
1400 		if (!res_ctx->pipe_ctx[i].stream) {
1401 			struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1402 
1403 			pipe_ctx->stream_res.tg = pool->timing_generators[i];
1404 			pipe_ctx->plane_res.mi = pool->mis[i];
1405 			pipe_ctx->plane_res.hubp = pool->hubps[i];
1406 			pipe_ctx->plane_res.ipp = pool->ipps[i];
1407 			pipe_ctx->plane_res.xfm = pool->transforms[i];
1408 			pipe_ctx->plane_res.dpp = pool->dpps[i];
1409 			pipe_ctx->stream_res.opp = pool->opps[i];
1410 			pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1411 			pipe_ctx->pipe_idx = i;
1412 
1413 
1414 			pipe_ctx->stream = stream;
1415 			return i;
1416 		}
1417 	}
1418 	return -1;
1419 }
1420 
1421 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1422 		struct resource_context *res_ctx,
1423 		const struct resource_pool *pool,
1424 		struct dc_stream_state *stream)
1425 {
1426 	int i;
1427 	int j = -1;
1428 	struct dc_link *link = stream->sink->link;
1429 
1430 	for (i = 0; i < pool->stream_enc_count; i++) {
1431 		if (!res_ctx->is_stream_enc_acquired[i] &&
1432 				pool->stream_enc[i]) {
1433 			/* Store first available for MST second display
1434 			 * in daisy chain use case */
1435 			j = i;
1436 			if (pool->stream_enc[i]->id ==
1437 					link->link_enc->preferred_engine)
1438 				return pool->stream_enc[i];
1439 		}
1440 	}
1441 
1442 	/*
1443 	 * below can happen in cases when stream encoder is acquired:
1444 	 * 1) for second MST display in chain, so preferred engine already
1445 	 * acquired;
1446 	 * 2) for another link, which preferred engine already acquired by any
1447 	 * MST configuration.
1448 	 *
1449 	 * If signal is of DP type and preferred engine not found, return last available
1450 	 *
1451 	 * TODO - This is just a patch up and a generic solution is
1452 	 * required for non DP connectors.
1453 	 */
1454 
1455 	if (j >= 0 && dc_is_dp_signal(stream->signal))
1456 		return pool->stream_enc[j];
1457 
1458 	return NULL;
1459 }
1460 
1461 static struct audio *find_first_free_audio(
1462 		struct resource_context *res_ctx,
1463 		const struct resource_pool *pool,
1464 		enum engine_id id)
1465 {
1466 	int i;
1467 	for (i = 0; i < pool->audio_count; i++) {
1468 		if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1469 			/*we have enough audio endpoint, find the matching inst*/
1470 			if (id != i)
1471 				continue;
1472 
1473 			return pool->audios[i];
1474 		}
1475 	}
1476 	/*not found the matching one, first come first serve*/
1477 	for (i = 0; i < pool->audio_count; i++) {
1478 		if (res_ctx->is_audio_acquired[i] == false) {
1479 			return pool->audios[i];
1480 		}
1481 	}
1482 	return 0;
1483 }
1484 
1485 bool resource_is_stream_unchanged(
1486 	struct dc_state *old_context, struct dc_stream_state *stream)
1487 {
1488 	int i;
1489 
1490 	for (i = 0; i < old_context->stream_count; i++) {
1491 		struct dc_stream_state *old_stream = old_context->streams[i];
1492 
1493 		if (are_stream_backends_same(old_stream, stream))
1494 				return true;
1495 	}
1496 
1497 	return false;
1498 }
1499 
1500 enum dc_status dc_add_stream_to_ctx(
1501 		struct dc *dc,
1502 		struct dc_state *new_ctx,
1503 		struct dc_stream_state *stream)
1504 {
1505 	struct dc_context *dc_ctx = dc->ctx;
1506 	enum dc_status res;
1507 
1508 	if (new_ctx->stream_count >= dc->res_pool->pipe_count) {
1509 		DC_ERROR("Max streams reached, can add stream %p !\n", stream);
1510 		return DC_ERROR_UNEXPECTED;
1511 	}
1512 
1513 	new_ctx->streams[new_ctx->stream_count] = stream;
1514 	dc_stream_retain(stream);
1515 	new_ctx->stream_count++;
1516 
1517 	res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1518 	if (res != DC_OK)
1519 		DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1520 
1521 	return res;
1522 }
1523 
1524 enum dc_status dc_remove_stream_from_ctx(
1525 			struct dc *dc,
1526 			struct dc_state *new_ctx,
1527 			struct dc_stream_state *stream)
1528 {
1529 	int i;
1530 	struct dc_context *dc_ctx = dc->ctx;
1531 	struct pipe_ctx *del_pipe = NULL;
1532 
1533 	/* Release primary pipe */
1534 	for (i = 0; i < MAX_PIPES; i++) {
1535 		if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1536 				!new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1537 			del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1538 
1539 			ASSERT(del_pipe->stream_res.stream_enc);
1540 			update_stream_engine_usage(
1541 					&new_ctx->res_ctx,
1542 						dc->res_pool,
1543 					del_pipe->stream_res.stream_enc,
1544 					false);
1545 
1546 			if (del_pipe->stream_res.audio)
1547 				update_audio_usage(
1548 					&new_ctx->res_ctx,
1549 					dc->res_pool,
1550 					del_pipe->stream_res.audio,
1551 					false);
1552 
1553 			resource_unreference_clock_source(&new_ctx->res_ctx,
1554 							  dc->res_pool,
1555 							  del_pipe->clock_source);
1556 
1557 			memset(del_pipe, 0, sizeof(*del_pipe));
1558 
1559 			break;
1560 		}
1561 	}
1562 
1563 	if (!del_pipe) {
1564 		DC_ERROR("Pipe not found for stream %p !\n", stream);
1565 		return DC_ERROR_UNEXPECTED;
1566 	}
1567 
1568 	for (i = 0; i < new_ctx->stream_count; i++)
1569 		if (new_ctx->streams[i] == stream)
1570 			break;
1571 
1572 	if (new_ctx->streams[i] != stream) {
1573 		DC_ERROR("Context doesn't have stream %p !\n", stream);
1574 		return DC_ERROR_UNEXPECTED;
1575 	}
1576 
1577 	dc_stream_release(new_ctx->streams[i]);
1578 	new_ctx->stream_count--;
1579 
1580 	/* Trim back arrays */
1581 	for (; i < new_ctx->stream_count; i++) {
1582 		new_ctx->streams[i] = new_ctx->streams[i + 1];
1583 		new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1584 	}
1585 
1586 	new_ctx->streams[new_ctx->stream_count] = NULL;
1587 	memset(
1588 			&new_ctx->stream_status[new_ctx->stream_count],
1589 			0,
1590 			sizeof(new_ctx->stream_status[0]));
1591 
1592 	return DC_OK;
1593 }
1594 
1595 static void copy_pipe_ctx(
1596 	const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx)
1597 {
1598 	struct dc_plane_state *plane_state = to_pipe_ctx->plane_state;
1599 	struct dc_stream_state *stream = to_pipe_ctx->stream;
1600 
1601 	*to_pipe_ctx = *from_pipe_ctx;
1602 	to_pipe_ctx->stream = stream;
1603 	if (plane_state != NULL)
1604 		to_pipe_ctx->plane_state = plane_state;
1605 }
1606 
1607 static struct dc_stream_state *find_pll_sharable_stream(
1608 		struct dc_stream_state *stream_needs_pll,
1609 		struct dc_state *context)
1610 {
1611 	int i;
1612 
1613 	for (i = 0; i < context->stream_count; i++) {
1614 		struct dc_stream_state *stream_has_pll = context->streams[i];
1615 
1616 		/* We are looking for non dp, non virtual stream */
1617 		if (resource_are_streams_timing_synchronizable(
1618 			stream_needs_pll, stream_has_pll)
1619 			&& !dc_is_dp_signal(stream_has_pll->signal)
1620 			&& stream_has_pll->sink->link->connector_signal
1621 			!= SIGNAL_TYPE_VIRTUAL)
1622 			return stream_has_pll;
1623 
1624 	}
1625 
1626 	return NULL;
1627 }
1628 
1629 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1630 {
1631 	uint32_t pix_clk = timing->pix_clk_khz;
1632 	uint32_t normalized_pix_clk = pix_clk;
1633 
1634 	if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1635 		pix_clk /= 2;
1636 	if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1637 		switch (timing->display_color_depth) {
1638 		case COLOR_DEPTH_888:
1639 			normalized_pix_clk = pix_clk;
1640 			break;
1641 		case COLOR_DEPTH_101010:
1642 			normalized_pix_clk = (pix_clk * 30) / 24;
1643 			break;
1644 		case COLOR_DEPTH_121212:
1645 			normalized_pix_clk = (pix_clk * 36) / 24;
1646 		break;
1647 		case COLOR_DEPTH_161616:
1648 			normalized_pix_clk = (pix_clk * 48) / 24;
1649 		break;
1650 		default:
1651 			ASSERT(0);
1652 		break;
1653 		}
1654 	}
1655 	return normalized_pix_clk;
1656 }
1657 
1658 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1659 {
1660 	/* update actual pixel clock on all streams */
1661 	if (dc_is_hdmi_signal(stream->signal))
1662 		stream->phy_pix_clk = get_norm_pix_clk(
1663 			&stream->timing);
1664 	else
1665 		stream->phy_pix_clk =
1666 			stream->timing.pix_clk_khz;
1667 }
1668 
1669 enum dc_status resource_map_pool_resources(
1670 		const struct dc  *dc,
1671 		struct dc_state *context,
1672 		struct dc_stream_state *stream)
1673 {
1674 	const struct resource_pool *pool = dc->res_pool;
1675 	int i;
1676 	struct dc_context *dc_ctx = dc->ctx;
1677 	struct pipe_ctx *pipe_ctx = NULL;
1678 	int pipe_idx = -1;
1679 
1680 	/* TODO Check if this is needed */
1681 	/*if (!resource_is_stream_unchanged(old_context, stream)) {
1682 			if (stream != NULL && old_context->streams[i] != NULL) {
1683 				stream->bit_depth_params =
1684 						old_context->streams[i]->bit_depth_params;
1685 				stream->clamping = old_context->streams[i]->clamping;
1686 				continue;
1687 			}
1688 		}
1689 	*/
1690 
1691 	/* acquire new resources */
1692 	pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1693 
1694 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1695 	if (pipe_idx < 0)
1696 		pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1697 #endif
1698 
1699 	if (pipe_idx < 0)
1700 		return DC_NO_CONTROLLER_RESOURCE;
1701 
1702 	pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1703 
1704 	pipe_ctx->stream_res.stream_enc =
1705 		find_first_free_match_stream_enc_for_link(
1706 			&context->res_ctx, pool, stream);
1707 
1708 	if (!pipe_ctx->stream_res.stream_enc)
1709 		return DC_NO_STREAM_ENG_RESOURCE;
1710 
1711 	update_stream_engine_usage(
1712 		&context->res_ctx, pool,
1713 		pipe_ctx->stream_res.stream_enc,
1714 		true);
1715 
1716 	/* TODO: Add check if ASIC support and EDID audio */
1717 	if (!stream->sink->converter_disable_audio &&
1718 	    dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1719 	    stream->audio_info.mode_count) {
1720 		pipe_ctx->stream_res.audio = find_first_free_audio(
1721 		&context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
1722 
1723 		/*
1724 		 * Audio assigned in order first come first get.
1725 		 * There are asics which has number of audio
1726 		 * resources less then number of pipes
1727 		 */
1728 		if (pipe_ctx->stream_res.audio)
1729 			update_audio_usage(&context->res_ctx, pool,
1730 					   pipe_ctx->stream_res.audio, true);
1731 	}
1732 
1733 	for (i = 0; i < context->stream_count; i++)
1734 		if (context->streams[i] == stream) {
1735 			context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
1736 			context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
1737 			return DC_OK;
1738 		}
1739 
1740 	DC_ERROR("Stream %p not found in new ctx!\n", stream);
1741 	return DC_ERROR_UNEXPECTED;
1742 }
1743 
1744 /* first stream in the context is used to populate the rest */
1745 void validate_guaranteed_copy_streams(
1746 		struct dc_state *context,
1747 		int max_streams)
1748 {
1749 	int i;
1750 
1751 	for (i = 1; i < max_streams; i++) {
1752 		context->streams[i] = context->streams[0];
1753 
1754 		copy_pipe_ctx(&context->res_ctx.pipe_ctx[0],
1755 			      &context->res_ctx.pipe_ctx[i]);
1756 		context->res_ctx.pipe_ctx[i].stream =
1757 				context->res_ctx.pipe_ctx[0].stream;
1758 
1759 		dc_stream_retain(context->streams[i]);
1760 		context->stream_count++;
1761 	}
1762 }
1763 
1764 void dc_resource_state_copy_construct_current(
1765 		const struct dc *dc,
1766 		struct dc_state *dst_ctx)
1767 {
1768 	dc_resource_state_copy_construct(dc->current_state, dst_ctx);
1769 }
1770 
1771 
1772 void dc_resource_state_construct(
1773 		const struct dc *dc,
1774 		struct dc_state *dst_ctx)
1775 {
1776 	dst_ctx->dis_clk = dc->res_pool->display_clock;
1777 }
1778 
1779 enum dc_status dc_validate_global_state(
1780 		struct dc *dc,
1781 		struct dc_state *new_ctx)
1782 {
1783 	enum dc_status result = DC_ERROR_UNEXPECTED;
1784 	int i, j;
1785 
1786 	if (!new_ctx)
1787 		return DC_ERROR_UNEXPECTED;
1788 
1789 	if (dc->res_pool->funcs->validate_global) {
1790 			result = dc->res_pool->funcs->validate_global(dc, new_ctx);
1791 			if (result != DC_OK)
1792 				return result;
1793 	}
1794 
1795 	for (i = 0; i < new_ctx->stream_count; i++) {
1796 		struct dc_stream_state *stream = new_ctx->streams[i];
1797 
1798 		for (j = 0; j < dc->res_pool->pipe_count; j++) {
1799 			struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
1800 
1801 			if (pipe_ctx->stream != stream)
1802 				continue;
1803 
1804 			/* Switch to dp clock source only if there is
1805 			 * no non dp stream that shares the same timing
1806 			 * with the dp stream.
1807 			 */
1808 			if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1809 				!find_pll_sharable_stream(stream, new_ctx)) {
1810 
1811 				resource_unreference_clock_source(
1812 						&new_ctx->res_ctx,
1813 						dc->res_pool,
1814 						pipe_ctx->clock_source);
1815 
1816 				pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
1817 				resource_reference_clock_source(
1818 						&new_ctx->res_ctx,
1819 						dc->res_pool,
1820 						 pipe_ctx->clock_source);
1821 			}
1822 		}
1823 	}
1824 
1825 	result = resource_build_scaling_params_for_context(dc, new_ctx);
1826 
1827 	if (result == DC_OK)
1828 		if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
1829 			result = DC_FAIL_BANDWIDTH_VALIDATE;
1830 
1831 	return result;
1832 }
1833 
1834 static void patch_gamut_packet_checksum(
1835 		struct encoder_info_packet *gamut_packet)
1836 {
1837 	/* For gamut we recalc checksum */
1838 	if (gamut_packet->valid) {
1839 		uint8_t chk_sum = 0;
1840 		uint8_t *ptr;
1841 		uint8_t i;
1842 
1843 		/*start of the Gamut data. */
1844 		ptr = &gamut_packet->sb[3];
1845 
1846 		for (i = 0; i <= gamut_packet->sb[1]; i++)
1847 			chk_sum += ptr[i];
1848 
1849 		gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
1850 	}
1851 }
1852 
1853 static void set_avi_info_frame(
1854 		struct encoder_info_packet *info_packet,
1855 		struct pipe_ctx *pipe_ctx)
1856 {
1857 	struct dc_stream_state *stream = pipe_ctx->stream;
1858 	enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
1859 	struct info_frame info_frame = { {0} };
1860 	uint32_t pixel_encoding = 0;
1861 	enum scanning_type scan_type = SCANNING_TYPE_NODATA;
1862 	enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
1863 	bool itc = false;
1864 	uint8_t itc_value = 0;
1865 	uint8_t cn0_cn1 = 0;
1866 	unsigned int cn0_cn1_value = 0;
1867 	uint8_t *check_sum = NULL;
1868 	uint8_t byte_index = 0;
1869 	union hdmi_info_packet *hdmi_info = &info_frame.avi_info_packet.info_packet_hdmi;
1870 	union display_content_support support = {0};
1871 	unsigned int vic = pipe_ctx->stream->timing.vic;
1872 	enum dc_timing_3d_format format;
1873 
1874 	color_space = pipe_ctx->stream->output_color_space;
1875 	if (color_space == COLOR_SPACE_UNKNOWN)
1876 		color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
1877 			COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
1878 
1879 	/* Initialize header */
1880 	hdmi_info->bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
1881 	/* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1882 	* not be used in HDMI 2.0 (Section 10.1) */
1883 	hdmi_info->bits.header.version = 2;
1884 	hdmi_info->bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
1885 
1886 	/*
1887 	 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1888 	 * according to HDMI 2.0 spec (Section 10.1)
1889 	 */
1890 
1891 	switch (stream->timing.pixel_encoding) {
1892 	case PIXEL_ENCODING_YCBCR422:
1893 		pixel_encoding = 1;
1894 		break;
1895 
1896 	case PIXEL_ENCODING_YCBCR444:
1897 		pixel_encoding = 2;
1898 		break;
1899 	case PIXEL_ENCODING_YCBCR420:
1900 		pixel_encoding = 3;
1901 		break;
1902 
1903 	case PIXEL_ENCODING_RGB:
1904 	default:
1905 		pixel_encoding = 0;
1906 	}
1907 
1908 	/* Y0_Y1_Y2 : The pixel encoding */
1909 	/* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1910 	hdmi_info->bits.Y0_Y1_Y2 = pixel_encoding;
1911 
1912 	/* A0 = 1 Active Format Information valid */
1913 	hdmi_info->bits.A0 = ACTIVE_FORMAT_VALID;
1914 
1915 	/* B0, B1 = 3; Bar info data is valid */
1916 	hdmi_info->bits.B0_B1 = BAR_INFO_BOTH_VALID;
1917 
1918 	hdmi_info->bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
1919 
1920 	/* S0, S1 : Underscan / Overscan */
1921 	/* TODO: un-hardcode scan type */
1922 	scan_type = SCANNING_TYPE_UNDERSCAN;
1923 	hdmi_info->bits.S0_S1 = scan_type;
1924 
1925 	/* C0, C1 : Colorimetry */
1926 	if (color_space == COLOR_SPACE_YCBCR709 ||
1927 			color_space == COLOR_SPACE_YCBCR709_LIMITED)
1928 		hdmi_info->bits.C0_C1 = COLORIMETRY_ITU709;
1929 	else if (color_space == COLOR_SPACE_YCBCR601 ||
1930 			color_space == COLOR_SPACE_YCBCR601_LIMITED)
1931 		hdmi_info->bits.C0_C1 = COLORIMETRY_ITU601;
1932 	else {
1933 		hdmi_info->bits.C0_C1 = COLORIMETRY_NO_DATA;
1934 	}
1935 	if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
1936 			color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
1937 			color_space == COLOR_SPACE_2020_YCBCR) {
1938 		hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
1939 		hdmi_info->bits.C0_C1   = COLORIMETRY_EXTENDED;
1940 	} else if (color_space == COLOR_SPACE_ADOBERGB) {
1941 		hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
1942 		hdmi_info->bits.C0_C1   = COLORIMETRY_EXTENDED;
1943 	}
1944 
1945 	/* TODO: un-hardcode aspect ratio */
1946 	aspect = stream->timing.aspect_ratio;
1947 
1948 	switch (aspect) {
1949 	case ASPECT_RATIO_4_3:
1950 	case ASPECT_RATIO_16_9:
1951 		hdmi_info->bits.M0_M1 = aspect;
1952 		break;
1953 
1954 	case ASPECT_RATIO_NO_DATA:
1955 	case ASPECT_RATIO_64_27:
1956 	case ASPECT_RATIO_256_135:
1957 	default:
1958 		hdmi_info->bits.M0_M1 = 0;
1959 	}
1960 
1961 	/* Active Format Aspect ratio - same as Picture Aspect Ratio. */
1962 	hdmi_info->bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
1963 
1964 	/* TODO: un-hardcode cn0_cn1 and itc */
1965 
1966 	cn0_cn1 = 0;
1967 	cn0_cn1_value = 0;
1968 
1969 	itc = true;
1970 	itc_value = 1;
1971 
1972 	support = stream->sink->edid_caps.content_support;
1973 
1974 	if (itc) {
1975 		if (!support.bits.valid_content_type) {
1976 			cn0_cn1_value = 0;
1977 		} else {
1978 			if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
1979 				if (support.bits.graphics_content == 1) {
1980 					cn0_cn1_value = 0;
1981 				}
1982 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
1983 				if (support.bits.photo_content == 1) {
1984 					cn0_cn1_value = 1;
1985 				} else {
1986 					cn0_cn1_value = 0;
1987 					itc_value = 0;
1988 				}
1989 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
1990 				if (support.bits.cinema_content == 1) {
1991 					cn0_cn1_value = 2;
1992 				} else {
1993 					cn0_cn1_value = 0;
1994 					itc_value = 0;
1995 				}
1996 			} else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
1997 				if (support.bits.game_content == 1) {
1998 					cn0_cn1_value = 3;
1999 				} else {
2000 					cn0_cn1_value = 0;
2001 					itc_value = 0;
2002 				}
2003 			}
2004 		}
2005 		hdmi_info->bits.CN0_CN1 = cn0_cn1_value;
2006 		hdmi_info->bits.ITC = itc_value;
2007 	}
2008 
2009 	/* TODO : We should handle YCC quantization */
2010 	/* but we do not have matrix calculation */
2011 	if (stream->sink->edid_caps.qs_bit == 1 &&
2012 			stream->sink->edid_caps.qy_bit == 1) {
2013 		if (color_space == COLOR_SPACE_SRGB ||
2014 			color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2015 			hdmi_info->bits.Q0_Q1   = RGB_QUANTIZATION_FULL_RANGE;
2016 			hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2017 		} else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2018 					color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2019 			hdmi_info->bits.Q0_Q1   = RGB_QUANTIZATION_LIMITED_RANGE;
2020 			hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2021 		} else {
2022 			hdmi_info->bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2023 			hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2024 		}
2025 	} else {
2026 		hdmi_info->bits.Q0_Q1   = RGB_QUANTIZATION_DEFAULT_RANGE;
2027 		hdmi_info->bits.YQ0_YQ1   = YYC_QUANTIZATION_LIMITED_RANGE;
2028 	}
2029 
2030 	///VIC
2031 	format = stream->timing.timing_3d_format;
2032 	/*todo, add 3DStereo support*/
2033 	if (format != TIMING_3D_FORMAT_NONE) {
2034 		// Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2035 		switch (pipe_ctx->stream->timing.hdmi_vic) {
2036 		case 1:
2037 			vic = 95;
2038 			break;
2039 		case 2:
2040 			vic = 94;
2041 			break;
2042 		case 3:
2043 			vic = 93;
2044 			break;
2045 		case 4:
2046 			vic = 98;
2047 			break;
2048 		default:
2049 			break;
2050 		}
2051 	}
2052 	hdmi_info->bits.VIC0_VIC7 = vic;
2053 
2054 	/* pixel repetition
2055 	 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2056 	 * repetition start from 1 */
2057 	hdmi_info->bits.PR0_PR3 = 0;
2058 
2059 	/* Bar Info
2060 	 * barTop:    Line Number of End of Top Bar.
2061 	 * barBottom: Line Number of Start of Bottom Bar.
2062 	 * barLeft:   Pixel Number of End of Left Bar.
2063 	 * barRight:  Pixel Number of Start of Right Bar. */
2064 	hdmi_info->bits.bar_top = stream->timing.v_border_top;
2065 	hdmi_info->bits.bar_bottom = (stream->timing.v_total
2066 			- stream->timing.v_border_bottom + 1);
2067 	hdmi_info->bits.bar_left  = stream->timing.h_border_left;
2068 	hdmi_info->bits.bar_right = (stream->timing.h_total
2069 			- stream->timing.h_border_right + 1);
2070 
2071 	/* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2072 	check_sum = &info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0];
2073 
2074 	*check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2075 
2076 	for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2077 		*check_sum += hdmi_info->packet_raw_data.sb[byte_index];
2078 
2079 	/* one byte complement */
2080 	*check_sum = (uint8_t) (0x100 - *check_sum);
2081 
2082 	/* Store in hw_path_mode */
2083 	info_packet->hb0 = hdmi_info->packet_raw_data.hb0;
2084 	info_packet->hb1 = hdmi_info->packet_raw_data.hb1;
2085 	info_packet->hb2 = hdmi_info->packet_raw_data.hb2;
2086 
2087 	for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet.
2088 				info_packet_hdmi.packet_raw_data.sb); byte_index++)
2089 		info_packet->sb[byte_index] = info_frame.avi_info_packet.
2090 				info_packet_hdmi.packet_raw_data.sb[byte_index];
2091 
2092 	info_packet->valid = true;
2093 }
2094 
2095 static void set_vendor_info_packet(
2096 		struct encoder_info_packet *info_packet,
2097 		struct dc_stream_state *stream)
2098 {
2099 	uint32_t length = 0;
2100 	bool hdmi_vic_mode = false;
2101 	uint8_t checksum = 0;
2102 	uint32_t i = 0;
2103 	enum dc_timing_3d_format format;
2104 	// Can be different depending on packet content /*todo*/
2105 	// unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2106 
2107 	info_packet->valid = false;
2108 
2109 	format = stream->timing.timing_3d_format;
2110 	if (stream->view_format == VIEW_3D_FORMAT_NONE)
2111 		format = TIMING_3D_FORMAT_NONE;
2112 
2113 	/* Can be different depending on packet content */
2114 	length = 5;
2115 
2116 	if (stream->timing.hdmi_vic != 0
2117 			&& stream->timing.h_total >= 3840
2118 			&& stream->timing.v_total >= 2160)
2119 		hdmi_vic_mode = true;
2120 
2121 	/* According to HDMI 1.4a CTS, VSIF should be sent
2122 	 * for both 3D stereo and HDMI VIC modes.
2123 	 * For all other modes, there is no VSIF sent.  */
2124 
2125 	if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2126 		return;
2127 
2128 	/* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2129 	info_packet->sb[1] = 0x03;
2130 	info_packet->sb[2] = 0x0C;
2131 	info_packet->sb[3] = 0x00;
2132 
2133 	/*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2134 	 * The value for HDMI_Video_Format are:
2135 	 * 0x0 (0b000) - No additional HDMI video format is presented in this
2136 	 * packet
2137 	 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2138 	 * parameter follows
2139 	 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2140 	 * potentially 3D_Ext_Data follows
2141 	 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2142 	if (format != TIMING_3D_FORMAT_NONE)
2143 		info_packet->sb[4] = (2 << 5);
2144 	else if (hdmi_vic_mode)
2145 		info_packet->sb[4] = (1 << 5);
2146 
2147 	/* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2148 	 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2149 	 * The value for 3D_Structure are:
2150 	 * 0x0 - Frame Packing
2151 	 * 0x1 - Field Alternative
2152 	 * 0x2 - Line Alternative
2153 	 * 0x3 - Side-by-Side (full)
2154 	 * 0x4 - L + depth
2155 	 * 0x5 - L + depth + graphics + graphics-depth
2156 	 * 0x6 - Top-and-Bottom
2157 	 * 0x7 - Reserved for future use
2158 	 * 0x8 - Side-by-Side (Half)
2159 	 * 0x9..0xE - Reserved for future use
2160 	 * 0xF - Not used */
2161 	switch (format) {
2162 	case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2163 	case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2164 		info_packet->sb[5] = (0x0 << 4);
2165 		break;
2166 
2167 	case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2168 	case TIMING_3D_FORMAT_SBS_SW_PACKED:
2169 		info_packet->sb[5] = (0x8 << 4);
2170 		length = 6;
2171 		break;
2172 
2173 	case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2174 	case TIMING_3D_FORMAT_TB_SW_PACKED:
2175 		info_packet->sb[5] = (0x6 << 4);
2176 		break;
2177 
2178 	default:
2179 		break;
2180 	}
2181 
2182 	/*PB5: If PB4 is set to 0x1 (extended resolution format)
2183 	 * fill PB5 with the correct HDMI VIC code */
2184 	if (hdmi_vic_mode)
2185 		info_packet->sb[5] = stream->timing.hdmi_vic;
2186 
2187 	/* Header */
2188 	info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2189 	info_packet->hb1 = 0x01; /* Version */
2190 
2191 	/* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2192 	info_packet->hb2 = (uint8_t) (length);
2193 
2194 	/* Calculate checksum */
2195 	checksum = 0;
2196 	checksum += info_packet->hb0;
2197 	checksum += info_packet->hb1;
2198 	checksum += info_packet->hb2;
2199 
2200 	for (i = 1; i <= length; i++)
2201 		checksum += info_packet->sb[i];
2202 
2203 	info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2204 
2205 	info_packet->valid = true;
2206 }
2207 
2208 static void set_spd_info_packet(
2209 		struct encoder_info_packet *info_packet,
2210 		struct dc_stream_state *stream)
2211 {
2212 	/* SPD info packet for FreeSync */
2213 
2214 	unsigned char checksum = 0;
2215 	unsigned int idx, payload_size = 0;
2216 
2217 	/* Check if Freesync is supported. Return if false. If true,
2218 	 * set the corresponding bit in the info packet
2219 	 */
2220 	if (stream->freesync_ctx.supported == false)
2221 		return;
2222 
2223 	if (dc_is_hdmi_signal(stream->signal)) {
2224 
2225 		/* HEADER */
2226 
2227 		/* HB0  = Packet Type = 0x83 (Source Product
2228 		 *	  Descriptor InfoFrame)
2229 		 */
2230 		info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2231 
2232 		/* HB1  = Version = 0x01 */
2233 		info_packet->hb1 = 0x01;
2234 
2235 		/* HB2  = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2236 		info_packet->hb2 = 0x08;
2237 
2238 		payload_size = 0x08;
2239 
2240 	} else if (dc_is_dp_signal(stream->signal)) {
2241 
2242 		/* HEADER */
2243 
2244 		/* HB0  = Secondary-data Packet ID = 0 - Only non-zero
2245 		 *	  when used to associate audio related info packets
2246 		 */
2247 		info_packet->hb0 = 0x00;
2248 
2249 		/* HB1  = Packet Type = 0x83 (Source Product
2250 		 *	  Descriptor InfoFrame)
2251 		 */
2252 		info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2253 
2254 		/* HB2  = [Bits 7:0 = Least significant eight bits -
2255 		 *	  For INFOFRAME, the value must be 1Bh]
2256 		 */
2257 		info_packet->hb2 = 0x1B;
2258 
2259 		/* HB3  = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2260 		 *	  [Bits 1:0 = Most significant two bits = 0x00]
2261 		 */
2262 		info_packet->hb3 = 0x04;
2263 
2264 		payload_size = 0x1B;
2265 	}
2266 
2267 	/* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2268 	info_packet->sb[1] = 0x1A;
2269 
2270 	/* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2271 	info_packet->sb[2] = 0x00;
2272 
2273 	/* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2274 	info_packet->sb[3] = 0x00;
2275 
2276 	/* PB4 = Reserved */
2277 	info_packet->sb[4] = 0x00;
2278 
2279 	/* PB5 = Reserved */
2280 	info_packet->sb[5] = 0x00;
2281 
2282 	/* PB6 = [Bits 7:3 = Reserved] */
2283 	info_packet->sb[6] = 0x00;
2284 
2285 	if (stream->freesync_ctx.supported == true)
2286 		/* PB6 = [Bit 0 = FreeSync Supported] */
2287 		info_packet->sb[6] |= 0x01;
2288 
2289 	if (stream->freesync_ctx.enabled == true)
2290 		/* PB6 = [Bit 1 = FreeSync Enabled] */
2291 		info_packet->sb[6] |= 0x02;
2292 
2293 	if (stream->freesync_ctx.active == true)
2294 		/* PB6 = [Bit 2 = FreeSync Active] */
2295 		info_packet->sb[6] |= 0x04;
2296 
2297 	/* PB7 = FreeSync Minimum refresh rate (Hz) */
2298 	info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2299 			min_refresh_in_micro_hz / 1000000);
2300 
2301 	/* PB8 = FreeSync Maximum refresh rate (Hz)
2302 	 *
2303 	 * Note: We do not use the maximum capable refresh rate
2304 	 * of the panel, because we should never go above the field
2305 	 * rate of the mode timing set.
2306 	 */
2307 	info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2308 			nominal_refresh_in_micro_hz / 1000000);
2309 
2310 	/* PB9 - PB27  = Reserved */
2311 	for (idx = 9; idx <= 27; idx++)
2312 		info_packet->sb[idx] = 0x00;
2313 
2314 	/* Calculate checksum */
2315 	checksum += info_packet->hb0;
2316 	checksum += info_packet->hb1;
2317 	checksum += info_packet->hb2;
2318 	checksum += info_packet->hb3;
2319 
2320 	for (idx = 1; idx <= payload_size; idx++)
2321 		checksum += info_packet->sb[idx];
2322 
2323 	/* PB0 = Checksum (one byte complement) */
2324 	info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2325 
2326 	info_packet->valid = true;
2327 }
2328 
2329 static void set_hdr_static_info_packet(
2330 		struct encoder_info_packet *info_packet,
2331 		struct dc_stream_state *stream)
2332 {
2333 	uint16_t i = 0;
2334 	enum signal_type signal = stream->signal;
2335 	uint32_t data;
2336 
2337 	if (!stream->hdr_static_metadata.hdr_supported)
2338 		return;
2339 
2340 	if (dc_is_hdmi_signal(signal)) {
2341 		info_packet->valid = true;
2342 
2343 		info_packet->hb0 = 0x87;
2344 		info_packet->hb1 = 0x01;
2345 		info_packet->hb2 = 0x1A;
2346 		i = 1;
2347 	} else if (dc_is_dp_signal(signal)) {
2348 		info_packet->valid = true;
2349 
2350 		info_packet->hb0 = 0x00;
2351 		info_packet->hb1 = 0x87;
2352 		info_packet->hb2 = 0x1D;
2353 		info_packet->hb3 = (0x13 << 2);
2354 		i = 2;
2355 	}
2356 
2357 	data = stream->hdr_static_metadata.is_hdr;
2358 	info_packet->sb[i++] = data ? 0x02 : 0x00;
2359 	info_packet->sb[i++] = 0x00;
2360 
2361 	data = stream->hdr_static_metadata.chromaticity_green_x / 2;
2362 	info_packet->sb[i++] = data & 0xFF;
2363 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2364 
2365 	data = stream->hdr_static_metadata.chromaticity_green_y / 2;
2366 	info_packet->sb[i++] = data & 0xFF;
2367 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2368 
2369 	data = stream->hdr_static_metadata.chromaticity_blue_x / 2;
2370 	info_packet->sb[i++] = data & 0xFF;
2371 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2372 
2373 	data = stream->hdr_static_metadata.chromaticity_blue_y / 2;
2374 	info_packet->sb[i++] = data & 0xFF;
2375 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2376 
2377 	data = stream->hdr_static_metadata.chromaticity_red_x / 2;
2378 	info_packet->sb[i++] = data & 0xFF;
2379 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2380 
2381 	data = stream->hdr_static_metadata.chromaticity_red_y / 2;
2382 	info_packet->sb[i++] = data & 0xFF;
2383 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2384 
2385 	data = stream->hdr_static_metadata.chromaticity_white_point_x / 2;
2386 	info_packet->sb[i++] = data & 0xFF;
2387 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2388 
2389 	data = stream->hdr_static_metadata.chromaticity_white_point_y / 2;
2390 	info_packet->sb[i++] = data & 0xFF;
2391 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2392 
2393 	data = stream->hdr_static_metadata.max_luminance;
2394 	info_packet->sb[i++] = data & 0xFF;
2395 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2396 
2397 	data = stream->hdr_static_metadata.min_luminance;
2398 	info_packet->sb[i++] = data & 0xFF;
2399 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2400 
2401 	data = stream->hdr_static_metadata.maximum_content_light_level;
2402 	info_packet->sb[i++] = data & 0xFF;
2403 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2404 
2405 	data = stream->hdr_static_metadata.maximum_frame_average_light_level;
2406 	info_packet->sb[i++] = data & 0xFF;
2407 	info_packet->sb[i++] = (data & 0xFF00) >> 8;
2408 
2409 	if (dc_is_hdmi_signal(signal)) {
2410 		uint32_t checksum = 0;
2411 
2412 		checksum += info_packet->hb0;
2413 		checksum += info_packet->hb1;
2414 		checksum += info_packet->hb2;
2415 
2416 		for (i = 1; i <= info_packet->hb2; i++)
2417 			checksum += info_packet->sb[i];
2418 
2419 		info_packet->sb[0] = 0x100 - checksum;
2420 	} else if (dc_is_dp_signal(signal)) {
2421 		info_packet->sb[0] = 0x01;
2422 		info_packet->sb[1] = 0x1A;
2423 	}
2424 }
2425 
2426 static void set_vsc_info_packet(
2427 		struct encoder_info_packet *info_packet,
2428 		struct dc_stream_state *stream)
2429 {
2430 	unsigned int vscPacketRevision = 0;
2431 	unsigned int i;
2432 
2433 	if (stream->sink->link->psr_enabled) {
2434 		vscPacketRevision = 2;
2435 	}
2436 
2437 	/* VSC packet not needed based on the features
2438 	 * supported by this DP display
2439 	 */
2440 	if (vscPacketRevision == 0)
2441 		return;
2442 
2443 	if (vscPacketRevision == 0x2) {
2444 		/* Secondary-data Packet ID = 0*/
2445 		info_packet->hb0 = 0x00;
2446 		/* 07h - Packet Type Value indicating Video
2447 		 * Stream Configuration packet
2448 		 */
2449 		info_packet->hb1 = 0x07;
2450 		/* 02h = VSC SDP supporting 3D stereo and PSR
2451 		 * (applies to eDP v1.3 or higher).
2452 		 */
2453 		info_packet->hb2 = 0x02;
2454 		/* 08h = VSC packet supporting 3D stereo + PSR
2455 		 * (HB2 = 02h).
2456 		 */
2457 		info_packet->hb3 = 0x08;
2458 
2459 		for (i = 0; i < 28; i++)
2460 			info_packet->sb[i] = 0;
2461 
2462 		info_packet->valid = true;
2463 	}
2464 
2465 	/*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2466 }
2467 
2468 void dc_resource_state_destruct(struct dc_state *context)
2469 {
2470 	int i, j;
2471 
2472 	for (i = 0; i < context->stream_count; i++) {
2473 		for (j = 0; j < context->stream_status[i].plane_count; j++)
2474 			dc_plane_state_release(
2475 				context->stream_status[i].plane_states[j]);
2476 
2477 		context->stream_status[i].plane_count = 0;
2478 		dc_stream_release(context->streams[i]);
2479 		context->streams[i] = NULL;
2480 	}
2481 }
2482 
2483 /*
2484  * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2485  * by the src_ctx
2486  */
2487 void dc_resource_state_copy_construct(
2488 		const struct dc_state *src_ctx,
2489 		struct dc_state *dst_ctx)
2490 {
2491 	int i, j;
2492 	struct kref refcount = dst_ctx->refcount;
2493 
2494 	*dst_ctx = *src_ctx;
2495 
2496 	for (i = 0; i < MAX_PIPES; i++) {
2497 		struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2498 
2499 		if (cur_pipe->top_pipe)
2500 			cur_pipe->top_pipe =  &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2501 
2502 		if (cur_pipe->bottom_pipe)
2503 			cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2504 
2505 	}
2506 
2507 	for (i = 0; i < dst_ctx->stream_count; i++) {
2508 		dc_stream_retain(dst_ctx->streams[i]);
2509 		for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2510 			dc_plane_state_retain(
2511 				dst_ctx->stream_status[i].plane_states[j]);
2512 	}
2513 
2514 	/* context refcount should not be overridden */
2515 	dst_ctx->refcount = refcount;
2516 
2517 }
2518 
2519 struct clock_source *dc_resource_find_first_free_pll(
2520 		struct resource_context *res_ctx,
2521 		const struct resource_pool *pool)
2522 {
2523 	int i;
2524 
2525 	for (i = 0; i < pool->clk_src_count; ++i) {
2526 		if (res_ctx->clock_source_ref_count[i] == 0)
2527 			return pool->clock_sources[i];
2528 	}
2529 
2530 	return NULL;
2531 }
2532 
2533 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2534 {
2535 	enum signal_type signal = SIGNAL_TYPE_NONE;
2536 	struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2537 
2538 	/* default all packets to invalid */
2539 	info->avi.valid = false;
2540 	info->gamut.valid = false;
2541 	info->vendor.valid = false;
2542 	info->spd.valid = false;
2543 	info->hdrsmd.valid = false;
2544 	info->vsc.valid = false;
2545 
2546 	signal = pipe_ctx->stream->signal;
2547 
2548 	/* HDMi and DP have different info packets*/
2549 	if (dc_is_hdmi_signal(signal)) {
2550 		set_avi_info_frame(&info->avi, pipe_ctx);
2551 
2552 		set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2553 
2554 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2555 
2556 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2557 
2558 	} else if (dc_is_dp_signal(signal)) {
2559 		set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2560 
2561 		set_spd_info_packet(&info->spd, pipe_ctx->stream);
2562 
2563 		set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2564 	}
2565 
2566 	patch_gamut_packet_checksum(&info->gamut);
2567 }
2568 
2569 enum dc_status resource_map_clock_resources(
2570 		const struct dc  *dc,
2571 		struct dc_state *context,
2572 		struct dc_stream_state *stream)
2573 {
2574 	/* acquire new resources */
2575 	const struct resource_pool *pool = dc->res_pool;
2576 	struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2577 				&context->res_ctx, stream);
2578 
2579 	if (!pipe_ctx)
2580 		return DC_ERROR_UNEXPECTED;
2581 
2582 	if (dc_is_dp_signal(pipe_ctx->stream->signal)
2583 		|| pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2584 		pipe_ctx->clock_source = pool->dp_clock_source;
2585 	else {
2586 		pipe_ctx->clock_source = NULL;
2587 
2588 		if (!dc->config.disable_disp_pll_sharing)
2589 			pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2590 				&context->res_ctx,
2591 				pipe_ctx);
2592 
2593 		if (pipe_ctx->clock_source == NULL)
2594 			pipe_ctx->clock_source =
2595 				dc_resource_find_first_free_pll(
2596 					&context->res_ctx,
2597 					pool);
2598 	}
2599 
2600 	if (pipe_ctx->clock_source == NULL)
2601 		return DC_NO_CLOCK_SOURCE_RESOURCE;
2602 
2603 	resource_reference_clock_source(
2604 		&context->res_ctx, pool,
2605 		pipe_ctx->clock_source);
2606 
2607 	return DC_OK;
2608 }
2609 
2610 /*
2611  * Note: We need to disable output if clock sources change,
2612  * since bios does optimization and doesn't apply if changing
2613  * PHY when not already disabled.
2614  */
2615 bool pipe_need_reprogram(
2616 		struct pipe_ctx *pipe_ctx_old,
2617 		struct pipe_ctx *pipe_ctx)
2618 {
2619 	if (!pipe_ctx_old->stream)
2620 		return false;
2621 
2622 	if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2623 		return true;
2624 
2625 	if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2626 		return true;
2627 
2628 	if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2629 		return true;
2630 
2631 	if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2632 			&& pipe_ctx_old->stream != pipe_ctx->stream)
2633 		return true;
2634 
2635 	if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2636 		return true;
2637 
2638 	if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2639 		return true;
2640 
2641 
2642 	return false;
2643 }
2644 
2645 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2646 		struct bit_depth_reduction_params *fmt_bit_depth)
2647 {
2648 	enum dc_dither_option option = stream->dither_option;
2649 	enum dc_pixel_encoding pixel_encoding =
2650 			stream->timing.pixel_encoding;
2651 
2652 	memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2653 
2654 	if (option == DITHER_OPTION_DEFAULT) {
2655 		switch (stream->timing.display_color_depth) {
2656 		case COLOR_DEPTH_666:
2657 			option = DITHER_OPTION_SPATIAL6;
2658 			break;
2659 		case COLOR_DEPTH_888:
2660 			option = DITHER_OPTION_SPATIAL8;
2661 			break;
2662 		case COLOR_DEPTH_101010:
2663 			option = DITHER_OPTION_SPATIAL10;
2664 			break;
2665 		default:
2666 			option = DITHER_OPTION_DISABLE;
2667 		}
2668 	}
2669 
2670 	if (option == DITHER_OPTION_DISABLE)
2671 		return;
2672 
2673 	if (option == DITHER_OPTION_TRUN6) {
2674 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2675 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2676 	} else if (option == DITHER_OPTION_TRUN8 ||
2677 			option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2678 			option == DITHER_OPTION_TRUN8_FM6) {
2679 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2680 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2681 	} else if (option == DITHER_OPTION_TRUN10        ||
2682 			option == DITHER_OPTION_TRUN10_SPATIAL6   ||
2683 			option == DITHER_OPTION_TRUN10_SPATIAL8   ||
2684 			option == DITHER_OPTION_TRUN10_FM8     ||
2685 			option == DITHER_OPTION_TRUN10_FM6     ||
2686 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2687 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2688 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2689 	}
2690 
2691 	/* special case - Formatter can only reduce by 4 bits at most.
2692 	 * When reducing from 12 to 6 bits,
2693 	 * HW recommends we use trunc with round mode
2694 	 * (if we did nothing, trunc to 10 bits would be used)
2695 	 * note that any 12->10 bit reduction is ignored prior to DCE8,
2696 	 * as the input was 10 bits.
2697 	 */
2698 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2699 			option == DITHER_OPTION_SPATIAL6 ||
2700 			option == DITHER_OPTION_FM6) {
2701 		fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2702 		fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2703 		fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2704 	}
2705 
2706 	/* spatial dither
2707 	 * note that spatial modes 1-3 are never used
2708 	 */
2709 	if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM            ||
2710 			option == DITHER_OPTION_SPATIAL6 ||
2711 			option == DITHER_OPTION_TRUN10_SPATIAL6      ||
2712 			option == DITHER_OPTION_TRUN8_SPATIAL6) {
2713 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2714 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2715 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2716 		fmt_bit_depth->flags.RGB_RANDOM =
2717 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2718 	} else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM            ||
2719 			option == DITHER_OPTION_SPATIAL8 ||
2720 			option == DITHER_OPTION_SPATIAL8_FM6        ||
2721 			option == DITHER_OPTION_TRUN10_SPATIAL8      ||
2722 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2723 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2724 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2725 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2726 		fmt_bit_depth->flags.RGB_RANDOM =
2727 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2728 	} else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2729 			option == DITHER_OPTION_SPATIAL10 ||
2730 			option == DITHER_OPTION_SPATIAL10_FM8 ||
2731 			option == DITHER_OPTION_SPATIAL10_FM6) {
2732 		fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2733 		fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2734 		fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2735 		fmt_bit_depth->flags.RGB_RANDOM =
2736 				(pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2737 	}
2738 
2739 	if (option == DITHER_OPTION_SPATIAL6 ||
2740 			option == DITHER_OPTION_SPATIAL8 ||
2741 			option == DITHER_OPTION_SPATIAL10) {
2742 		fmt_bit_depth->flags.FRAME_RANDOM = 0;
2743 	} else {
2744 		fmt_bit_depth->flags.FRAME_RANDOM = 1;
2745 	}
2746 
2747 	//////////////////////
2748 	//// temporal dither
2749 	//////////////////////
2750 	if (option == DITHER_OPTION_FM6           ||
2751 			option == DITHER_OPTION_SPATIAL8_FM6     ||
2752 			option == DITHER_OPTION_SPATIAL10_FM6     ||
2753 			option == DITHER_OPTION_TRUN10_FM6     ||
2754 			option == DITHER_OPTION_TRUN8_FM6      ||
2755 			option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2756 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2757 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2758 	} else if (option == DITHER_OPTION_FM8        ||
2759 			option == DITHER_OPTION_SPATIAL10_FM8  ||
2760 			option == DITHER_OPTION_TRUN10_FM8) {
2761 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2762 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2763 	} else if (option == DITHER_OPTION_FM10) {
2764 		fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2765 		fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2766 	}
2767 
2768 	fmt_bit_depth->pixel_encoding = pixel_encoding;
2769 }
2770 
2771 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2772 {
2773 	struct dc  *core_dc = dc;
2774 	struct dc_link *link = stream->sink->link;
2775 	struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2776 	enum dc_status res = DC_OK;
2777 
2778 	calculate_phy_pix_clks(stream);
2779 
2780 	if (!tg->funcs->validate_timing(tg, &stream->timing))
2781 		res = DC_FAIL_CONTROLLER_VALIDATE;
2782 
2783 	if (res == DC_OK)
2784 		if (!link->link_enc->funcs->validate_output_with_stream(
2785 						link->link_enc, stream))
2786 			res = DC_FAIL_ENC_VALIDATE;
2787 
2788 	/* TODO: validate audio ASIC caps, encoder */
2789 
2790 	if (res == DC_OK)
2791 		res = dc_link_validate_mode_timing(stream,
2792 		      link,
2793 		      &stream->timing);
2794 
2795 	return res;
2796 }
2797 
2798 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2799 {
2800 	enum dc_status res = DC_OK;
2801 
2802 	/* TODO For now validates pixel format only */
2803 	if (dc->res_pool->funcs->validate_plane)
2804 		return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);
2805 
2806 	return res;
2807 }
2808