1 /*
2  * Copyright 2012-17 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25 
26 #include "dcn20_hubp.h"
27 
28 #include "dm_services.h"
29 #include "dce_calcs.h"
30 #include "reg_helper.h"
31 #include "basics/conversion.h"
32 
33 #define REG(reg)\
34 	hubp2->hubp_regs->reg
35 
36 #define CTX \
37 	hubp2->base.ctx
38 
39 #undef FN
40 #define FN(reg_name, field_name) \
41 	hubp2->hubp_shift->field_name, hubp2->hubp_mask->field_name
42 
43 void hubp2_set_vm_system_aperture_settings(struct hubp *hubp,
44 		struct vm_system_aperture_param *apt)
45 {
46 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
47 
48 	PHYSICAL_ADDRESS_LOC mc_vm_apt_default;
49 	PHYSICAL_ADDRESS_LOC mc_vm_apt_low;
50 	PHYSICAL_ADDRESS_LOC mc_vm_apt_high;
51 
52 	// The format of default addr is 48:12 of the 48 bit addr
53 	mc_vm_apt_default.quad_part = apt->sys_default.quad_part >> 12;
54 
55 	// The format of high/low are 48:18 of the 48 bit addr
56 	mc_vm_apt_low.quad_part = apt->sys_low.quad_part >> 18;
57 	mc_vm_apt_high.quad_part = apt->sys_high.quad_part >> 18;
58 
59 	REG_UPDATE_2(DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB,
60 		DCN_VM_SYSTEM_APERTURE_DEFAULT_SYSTEM, 1, /* 1 = system physical memory */
61 		DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_MSB, mc_vm_apt_default.high_part);
62 
63 	REG_SET(DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB, 0,
64 			DCN_VM_SYSTEM_APERTURE_DEFAULT_ADDR_LSB, mc_vm_apt_default.low_part);
65 
66 	REG_SET(DCN_VM_SYSTEM_APERTURE_LOW_ADDR, 0,
67 			MC_VM_SYSTEM_APERTURE_LOW_ADDR, mc_vm_apt_low.quad_part);
68 
69 	REG_SET(DCN_VM_SYSTEM_APERTURE_HIGH_ADDR, 0,
70 			MC_VM_SYSTEM_APERTURE_HIGH_ADDR, mc_vm_apt_high.quad_part);
71 
72 	REG_SET_2(DCN_VM_MX_L1_TLB_CNTL, 0,
73 			ENABLE_L1_TLB, 1,
74 			SYSTEM_ACCESS_MODE, 0x3);
75 }
76 
77 void hubp2_program_deadline(
78 		struct hubp *hubp,
79 		struct _vcs_dpi_display_dlg_regs_st *dlg_attr,
80 		struct _vcs_dpi_display_ttu_regs_st *ttu_attr)
81 {
82 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
83 
84 	/* DLG - Per hubp */
85 	REG_SET_2(BLANK_OFFSET_0, 0,
86 		REFCYC_H_BLANK_END, dlg_attr->refcyc_h_blank_end,
87 		DLG_V_BLANK_END, dlg_attr->dlg_vblank_end);
88 
89 	REG_SET(BLANK_OFFSET_1, 0,
90 		MIN_DST_Y_NEXT_START, dlg_attr->min_dst_y_next_start);
91 
92 	REG_SET(DST_DIMENSIONS, 0,
93 		REFCYC_PER_HTOTAL, dlg_attr->refcyc_per_htotal);
94 
95 	REG_SET_2(DST_AFTER_SCALER, 0,
96 		REFCYC_X_AFTER_SCALER, dlg_attr->refcyc_x_after_scaler,
97 		DST_Y_AFTER_SCALER, dlg_attr->dst_y_after_scaler);
98 
99 	REG_SET(REF_FREQ_TO_PIX_FREQ, 0,
100 		REF_FREQ_TO_PIX_FREQ, dlg_attr->ref_freq_to_pix_freq);
101 
102 	/* DLG - Per luma/chroma */
103 	REG_SET(VBLANK_PARAMETERS_1, 0,
104 		REFCYC_PER_PTE_GROUP_VBLANK_L, dlg_attr->refcyc_per_pte_group_vblank_l);
105 
106 	if (REG(NOM_PARAMETERS_0))
107 		REG_SET(NOM_PARAMETERS_0, 0,
108 			DST_Y_PER_PTE_ROW_NOM_L, dlg_attr->dst_y_per_pte_row_nom_l);
109 
110 	if (REG(NOM_PARAMETERS_1))
111 		REG_SET(NOM_PARAMETERS_1, 0,
112 			REFCYC_PER_PTE_GROUP_NOM_L, dlg_attr->refcyc_per_pte_group_nom_l);
113 
114 	REG_SET(NOM_PARAMETERS_4, 0,
115 		DST_Y_PER_META_ROW_NOM_L, dlg_attr->dst_y_per_meta_row_nom_l);
116 
117 	REG_SET(NOM_PARAMETERS_5, 0,
118 		REFCYC_PER_META_CHUNK_NOM_L, dlg_attr->refcyc_per_meta_chunk_nom_l);
119 
120 	REG_SET_2(PER_LINE_DELIVERY, 0,
121 		REFCYC_PER_LINE_DELIVERY_L, dlg_attr->refcyc_per_line_delivery_l,
122 		REFCYC_PER_LINE_DELIVERY_C, dlg_attr->refcyc_per_line_delivery_c);
123 
124 	REG_SET(VBLANK_PARAMETERS_2, 0,
125 		REFCYC_PER_PTE_GROUP_VBLANK_C, dlg_attr->refcyc_per_pte_group_vblank_c);
126 
127 	if (REG(NOM_PARAMETERS_2))
128 		REG_SET(NOM_PARAMETERS_2, 0,
129 			DST_Y_PER_PTE_ROW_NOM_C, dlg_attr->dst_y_per_pte_row_nom_c);
130 
131 	if (REG(NOM_PARAMETERS_3))
132 		REG_SET(NOM_PARAMETERS_3, 0,
133 			REFCYC_PER_PTE_GROUP_NOM_C, dlg_attr->refcyc_per_pte_group_nom_c);
134 
135 	REG_SET(NOM_PARAMETERS_6, 0,
136 		DST_Y_PER_META_ROW_NOM_C, dlg_attr->dst_y_per_meta_row_nom_c);
137 
138 	REG_SET(NOM_PARAMETERS_7, 0,
139 		REFCYC_PER_META_CHUNK_NOM_C, dlg_attr->refcyc_per_meta_chunk_nom_c);
140 
141 	/* TTU - per hubp */
142 	REG_SET_2(DCN_TTU_QOS_WM, 0,
143 		QoS_LEVEL_LOW_WM, ttu_attr->qos_level_low_wm,
144 		QoS_LEVEL_HIGH_WM, ttu_attr->qos_level_high_wm);
145 
146 	/* TTU - per luma/chroma */
147 	/* Assumed surf0 is luma and 1 is chroma */
148 
149 	REG_SET_3(DCN_SURF0_TTU_CNTL0, 0,
150 		REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_l,
151 		QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_l,
152 		QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_l);
153 
154 	REG_SET_3(DCN_SURF1_TTU_CNTL0, 0,
155 		REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_c,
156 		QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_c,
157 		QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_c);
158 
159 	REG_SET_3(DCN_CUR0_TTU_CNTL0, 0,
160 		REFCYC_PER_REQ_DELIVERY, ttu_attr->refcyc_per_req_delivery_cur0,
161 		QoS_LEVEL_FIXED, ttu_attr->qos_level_fixed_cur0,
162 		QoS_RAMP_DISABLE, ttu_attr->qos_ramp_disable_cur0);
163 
164 	REG_SET(FLIP_PARAMETERS_1, 0,
165 		REFCYC_PER_PTE_GROUP_FLIP_L, dlg_attr->refcyc_per_pte_group_flip_l);
166 }
167 
168 void hubp2_vready_at_or_After_vsync(struct hubp *hubp,
169 		struct _vcs_dpi_display_pipe_dest_params_st *pipe_dest)
170 {
171 	uint32_t value = 0;
172 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
173 	/* disable_dlg_test_mode Set 9th bit to 1 to disable "dv" mode */
174 	REG_WRITE(HUBPREQ_DEBUG_DB, 1 << 8);
175 	/*
176 	if (VSTARTUP_START - (VREADY_OFFSET+VUPDATE_WIDTH+VUPDATE_OFFSET)/htotal)
177 	<= OTG_V_BLANK_END
178 		Set HUBP_VREADY_AT_OR_AFTER_VSYNC = 1
179 	else
180 		Set HUBP_VREADY_AT_OR_AFTER_VSYNC = 0
181 	*/
182 	if ((pipe_dest->vstartup_start - (pipe_dest->vready_offset+pipe_dest->vupdate_width
183 		+ pipe_dest->vupdate_offset) / pipe_dest->htotal) <= pipe_dest->vblank_end) {
184 		value = 1;
185 	} else
186 		value = 0;
187 	REG_UPDATE(DCHUBP_CNTL, HUBP_VREADY_AT_OR_AFTER_VSYNC, value);
188 }
189 
190 void hubp2_program_requestor(
191 		struct hubp *hubp,
192 		struct _vcs_dpi_display_rq_regs_st *rq_regs)
193 {
194 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
195 
196 	REG_UPDATE(HUBPRET_CONTROL,
197 			DET_BUF_PLANE1_BASE_ADDRESS, rq_regs->plane1_base_address);
198 	REG_SET_4(DCN_EXPANSION_MODE, 0,
199 			DRQ_EXPANSION_MODE, rq_regs->drq_expansion_mode,
200 			PRQ_EXPANSION_MODE, rq_regs->prq_expansion_mode,
201 			MRQ_EXPANSION_MODE, rq_regs->mrq_expansion_mode,
202 			CRQ_EXPANSION_MODE, rq_regs->crq_expansion_mode);
203 	REG_SET_8(DCHUBP_REQ_SIZE_CONFIG, 0,
204 		CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size,
205 		MIN_CHUNK_SIZE, rq_regs->rq_regs_l.min_chunk_size,
206 		META_CHUNK_SIZE, rq_regs->rq_regs_l.meta_chunk_size,
207 		MIN_META_CHUNK_SIZE, rq_regs->rq_regs_l.min_meta_chunk_size,
208 		DPTE_GROUP_SIZE, rq_regs->rq_regs_l.dpte_group_size,
209 		MPTE_GROUP_SIZE, rq_regs->rq_regs_l.mpte_group_size,
210 		SWATH_HEIGHT, rq_regs->rq_regs_l.swath_height,
211 		PTE_ROW_HEIGHT_LINEAR, rq_regs->rq_regs_l.pte_row_height_linear);
212 	REG_SET_8(DCHUBP_REQ_SIZE_CONFIG_C, 0,
213 		CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size,
214 		MIN_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_chunk_size,
215 		META_CHUNK_SIZE_C, rq_regs->rq_regs_c.meta_chunk_size,
216 		MIN_META_CHUNK_SIZE_C, rq_regs->rq_regs_c.min_meta_chunk_size,
217 		DPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.dpte_group_size,
218 		MPTE_GROUP_SIZE_C, rq_regs->rq_regs_c.mpte_group_size,
219 		SWATH_HEIGHT_C, rq_regs->rq_regs_c.swath_height,
220 		PTE_ROW_HEIGHT_LINEAR_C, rq_regs->rq_regs_c.pte_row_height_linear);
221 }
222 
223 static void hubp2_setup(
224 		struct hubp *hubp,
225 		struct _vcs_dpi_display_dlg_regs_st *dlg_attr,
226 		struct _vcs_dpi_display_ttu_regs_st *ttu_attr,
227 		struct _vcs_dpi_display_rq_regs_st *rq_regs,
228 		struct _vcs_dpi_display_pipe_dest_params_st *pipe_dest)
229 {
230 	/* otg is locked when this func is called. Register are double buffered.
231 	 * disable the requestors is not needed
232 	 */
233 
234 	hubp2_vready_at_or_After_vsync(hubp, pipe_dest);
235 	hubp2_program_requestor(hubp, rq_regs);
236 	hubp2_program_deadline(hubp, dlg_attr, ttu_attr);
237 
238 }
239 
240 void hubp2_setup_interdependent(
241 		struct hubp *hubp,
242 		struct _vcs_dpi_display_dlg_regs_st *dlg_attr,
243 		struct _vcs_dpi_display_ttu_regs_st *ttu_attr)
244 {
245 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
246 
247 	REG_SET_2(PREFETCH_SETTINGS, 0,
248 			DST_Y_PREFETCH, dlg_attr->dst_y_prefetch,
249 			VRATIO_PREFETCH, dlg_attr->vratio_prefetch);
250 
251 	REG_SET(PREFETCH_SETTINGS_C, 0,
252 			VRATIO_PREFETCH_C, dlg_attr->vratio_prefetch_c);
253 
254 	REG_SET_2(VBLANK_PARAMETERS_0, 0,
255 		DST_Y_PER_VM_VBLANK, dlg_attr->dst_y_per_vm_vblank,
256 		DST_Y_PER_ROW_VBLANK, dlg_attr->dst_y_per_row_vblank);
257 
258 	REG_SET_2(FLIP_PARAMETERS_0, 0,
259 		DST_Y_PER_VM_FLIP, dlg_attr->dst_y_per_vm_flip,
260 		DST_Y_PER_ROW_FLIP, dlg_attr->dst_y_per_row_flip);
261 
262 	REG_SET(VBLANK_PARAMETERS_3, 0,
263 		REFCYC_PER_META_CHUNK_VBLANK_L, dlg_attr->refcyc_per_meta_chunk_vblank_l);
264 
265 	REG_SET(VBLANK_PARAMETERS_4, 0,
266 		REFCYC_PER_META_CHUNK_VBLANK_C, dlg_attr->refcyc_per_meta_chunk_vblank_c);
267 
268 	REG_SET(FLIP_PARAMETERS_2, 0,
269 		REFCYC_PER_META_CHUNK_FLIP_L, dlg_attr->refcyc_per_meta_chunk_flip_l);
270 
271 	REG_SET_2(PER_LINE_DELIVERY_PRE, 0,
272 		REFCYC_PER_LINE_DELIVERY_PRE_L, dlg_attr->refcyc_per_line_delivery_pre_l,
273 		REFCYC_PER_LINE_DELIVERY_PRE_C, dlg_attr->refcyc_per_line_delivery_pre_c);
274 
275 	REG_SET(DCN_SURF0_TTU_CNTL1, 0,
276 		REFCYC_PER_REQ_DELIVERY_PRE,
277 		ttu_attr->refcyc_per_req_delivery_pre_l);
278 	REG_SET(DCN_SURF1_TTU_CNTL1, 0,
279 		REFCYC_PER_REQ_DELIVERY_PRE,
280 		ttu_attr->refcyc_per_req_delivery_pre_c);
281 	REG_SET(DCN_CUR0_TTU_CNTL1, 0,
282 		REFCYC_PER_REQ_DELIVERY_PRE, ttu_attr->refcyc_per_req_delivery_pre_cur0);
283 	REG_SET(DCN_CUR1_TTU_CNTL1, 0,
284 		REFCYC_PER_REQ_DELIVERY_PRE, ttu_attr->refcyc_per_req_delivery_pre_cur1);
285 
286 	REG_SET_2(DCN_GLOBAL_TTU_CNTL, 0,
287 		MIN_TTU_VBLANK, ttu_attr->min_ttu_vblank,
288 		QoS_LEVEL_FLIP, ttu_attr->qos_level_flip);
289 }
290 
291 /* DCN2 (GFX10), the following GFX fields are deprecated. They can be set but they will not be used:
292  *	NUM_BANKS
293  *	NUM_SE
294  *	NUM_RB_PER_SE
295  *	RB_ALIGNED
296  * Other things can be defaulted, since they never change:
297  *	PIPE_ALIGNED = 0
298  *	META_LINEAR = 0
299  * In GFX10, only these apply:
300  *	PIPE_INTERLEAVE
301  *	NUM_PIPES
302  *	MAX_COMPRESSED_FRAGS
303  *	SW_MODE
304  */
305 static void hubp2_program_tiling(
306 	struct dcn20_hubp *hubp2,
307 	const union dc_tiling_info *info,
308 	const enum surface_pixel_format pixel_format)
309 {
310 	REG_UPDATE_3(DCSURF_ADDR_CONFIG,
311 			NUM_PIPES, log_2(info->gfx9.num_pipes),
312 			PIPE_INTERLEAVE, info->gfx9.pipe_interleave,
313 			MAX_COMPRESSED_FRAGS, log_2(info->gfx9.max_compressed_frags));
314 
315 	REG_UPDATE_4(DCSURF_TILING_CONFIG,
316 			SW_MODE, info->gfx9.swizzle,
317 			META_LINEAR, 0,
318 			RB_ALIGNED, 0,
319 			PIPE_ALIGNED, 0);
320 }
321 
322 void hubp2_program_size(
323 	struct hubp *hubp,
324 	enum surface_pixel_format format,
325 	const struct plane_size *plane_size,
326 	struct dc_plane_dcc_param *dcc)
327 {
328 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
329 	uint32_t pitch, meta_pitch, pitch_c, meta_pitch_c;
330 	bool use_pitch_c = false;
331 
332 	/* Program data and meta surface pitch (calculation from addrlib)
333 	 * 444 or 420 luma
334 	 */
335 	use_pitch_c = format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN
336 		&& format < SURFACE_PIXEL_FORMAT_SUBSAMPLE_END;
337 	if (use_pitch_c) {
338 		ASSERT(plane_size->chroma_pitch != 0);
339 		/* Chroma pitch zero can cause system hang! */
340 
341 		pitch = plane_size->surface_pitch - 1;
342 		meta_pitch = dcc->meta_pitch - 1;
343 		pitch_c = plane_size->chroma_pitch - 1;
344 		meta_pitch_c = dcc->meta_pitch_c - 1;
345 	} else {
346 		pitch = plane_size->surface_pitch - 1;
347 		meta_pitch = dcc->meta_pitch - 1;
348 		pitch_c = 0;
349 		meta_pitch_c = 0;
350 	}
351 
352 	if (!dcc->enable) {
353 		meta_pitch = 0;
354 		meta_pitch_c = 0;
355 	}
356 
357 	REG_UPDATE_2(DCSURF_SURFACE_PITCH,
358 			PITCH, pitch, META_PITCH, meta_pitch);
359 
360 	use_pitch_c = format >= SURFACE_PIXEL_FORMAT_VIDEO_BEGIN;
361 	if (use_pitch_c)
362 		REG_UPDATE_2(DCSURF_SURFACE_PITCH_C,
363 			PITCH_C, pitch_c, META_PITCH_C, meta_pitch_c);
364 }
365 
366 void hubp2_program_rotation(
367 	struct hubp *hubp,
368 	enum dc_rotation_angle rotation,
369 	bool horizontal_mirror)
370 {
371 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
372 	uint32_t mirror;
373 
374 
375 	if (horizontal_mirror)
376 		mirror = 1;
377 	else
378 		mirror = 0;
379 
380 	/* Program rotation angle and horz mirror - no mirror */
381 	if (rotation == ROTATION_ANGLE_0)
382 		REG_UPDATE_2(DCSURF_SURFACE_CONFIG,
383 				ROTATION_ANGLE, 0,
384 				H_MIRROR_EN, mirror);
385 	else if (rotation == ROTATION_ANGLE_90)
386 		REG_UPDATE_2(DCSURF_SURFACE_CONFIG,
387 				ROTATION_ANGLE, 1,
388 				H_MIRROR_EN, mirror);
389 	else if (rotation == ROTATION_ANGLE_180)
390 		REG_UPDATE_2(DCSURF_SURFACE_CONFIG,
391 				ROTATION_ANGLE, 2,
392 				H_MIRROR_EN, mirror);
393 	else if (rotation == ROTATION_ANGLE_270)
394 		REG_UPDATE_2(DCSURF_SURFACE_CONFIG,
395 				ROTATION_ANGLE, 3,
396 				H_MIRROR_EN, mirror);
397 }
398 
399 void hubp2_dcc_control(struct hubp *hubp, bool enable,
400 		enum hubp_ind_block_size independent_64b_blks)
401 {
402 	uint32_t dcc_en = enable ? 1 : 0;
403 	uint32_t dcc_ind_64b_blk = independent_64b_blks ? 1 : 0;
404 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
405 
406 	REG_UPDATE_4(DCSURF_SURFACE_CONTROL,
407 			PRIMARY_SURFACE_DCC_EN, dcc_en,
408 			PRIMARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk,
409 			SECONDARY_SURFACE_DCC_EN, dcc_en,
410 			SECONDARY_SURFACE_DCC_IND_64B_BLK, dcc_ind_64b_blk);
411 }
412 
413 void hubp2_program_pixel_format(
414 	struct hubp *hubp,
415 	enum surface_pixel_format format)
416 {
417 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
418 	uint32_t red_bar = 3;
419 	uint32_t blue_bar = 2;
420 
421 	/* swap for ABGR format */
422 	if (format == SURFACE_PIXEL_FORMAT_GRPH_ABGR8888
423 			|| format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010
424 			|| format == SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS
425 			|| format == SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F) {
426 		red_bar = 2;
427 		blue_bar = 3;
428 	}
429 
430 	REG_UPDATE_2(HUBPRET_CONTROL,
431 			CROSSBAR_SRC_CB_B, blue_bar,
432 			CROSSBAR_SRC_CR_R, red_bar);
433 
434 	/* Mapping is same as ipp programming (cnvc) */
435 
436 	switch (format)	{
437 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
438 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
439 				SURFACE_PIXEL_FORMAT, 1);
440 		break;
441 	case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
442 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
443 				SURFACE_PIXEL_FORMAT, 3);
444 		break;
445 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
446 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
447 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
448 				SURFACE_PIXEL_FORMAT, 8);
449 		break;
450 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
451 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
452 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
453 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
454 				SURFACE_PIXEL_FORMAT, 10);
455 		break;
456 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
457 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
458 				SURFACE_PIXEL_FORMAT, 22);
459 		break;
460 	case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
461 	case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:/*we use crossbar already*/
462 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
463 				SURFACE_PIXEL_FORMAT, 24);
464 		break;
465 
466 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
467 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
468 				SURFACE_PIXEL_FORMAT, 65);
469 		break;
470 	case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
471 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
472 				SURFACE_PIXEL_FORMAT, 64);
473 		break;
474 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
475 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
476 				SURFACE_PIXEL_FORMAT, 67);
477 		break;
478 	case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
479 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
480 				SURFACE_PIXEL_FORMAT, 66);
481 		break;
482 	case SURFACE_PIXEL_FORMAT_VIDEO_AYCrCb8888:
483 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
484 				SURFACE_PIXEL_FORMAT, 12);
485 		break;
486 #if defined(CONFIG_DRM_AMD_DC_DCN2_0)
487 	case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FIX:
488 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
489 				SURFACE_PIXEL_FORMAT, 112);
490 		break;
491 	case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FIX:
492 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
493 				SURFACE_PIXEL_FORMAT, 113);
494 		break;
495 	case SURFACE_PIXEL_FORMAT_VIDEO_ACrYCb2101010:
496 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
497 				SURFACE_PIXEL_FORMAT, 114);
498 		break;
499 	case SURFACE_PIXEL_FORMAT_GRPH_RGB111110_FLOAT:
500 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
501 				SURFACE_PIXEL_FORMAT, 118);
502 		break;
503 	case SURFACE_PIXEL_FORMAT_GRPH_BGR101111_FLOAT:
504 		REG_UPDATE(DCSURF_SURFACE_CONFIG,
505 				SURFACE_PIXEL_FORMAT, 119);
506 		break;
507 #endif
508 	default:
509 		BREAK_TO_DEBUGGER();
510 		break;
511 	}
512 
513 	/* don't see the need of program the xbar in DCN 1.0 */
514 }
515 
516 void hubp2_program_surface_config(
517 	struct hubp *hubp,
518 	enum surface_pixel_format format,
519 	union dc_tiling_info *tiling_info,
520 	struct plane_size *plane_size,
521 	enum dc_rotation_angle rotation,
522 	struct dc_plane_dcc_param *dcc,
523 	bool horizontal_mirror,
524 	unsigned int compat_level)
525 {
526 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
527 
528 	hubp2_dcc_control(hubp, dcc->enable, dcc->independent_64b_blks);
529 	hubp2_program_tiling(hubp2, tiling_info, format);
530 	hubp2_program_size(hubp, format, plane_size, dcc);
531 	hubp2_program_rotation(hubp, rotation, horizontal_mirror);
532 	hubp2_program_pixel_format(hubp, format);
533 }
534 
535 enum cursor_lines_per_chunk hubp2_get_lines_per_chunk(
536 	unsigned int cursor_width,
537 	enum dc_cursor_color_format cursor_mode)
538 {
539 	enum cursor_lines_per_chunk line_per_chunk = CURSOR_LINE_PER_CHUNK_16;
540 
541 	if (cursor_mode == CURSOR_MODE_MONO)
542 		line_per_chunk = CURSOR_LINE_PER_CHUNK_16;
543 	else if (cursor_mode == CURSOR_MODE_COLOR_1BIT_AND ||
544 		 cursor_mode == CURSOR_MODE_COLOR_PRE_MULTIPLIED_ALPHA ||
545 		 cursor_mode == CURSOR_MODE_COLOR_UN_PRE_MULTIPLIED_ALPHA) {
546 		if (cursor_width >= 1   && cursor_width <= 32)
547 			line_per_chunk = CURSOR_LINE_PER_CHUNK_16;
548 		else if (cursor_width >= 33  && cursor_width <= 64)
549 			line_per_chunk = CURSOR_LINE_PER_CHUNK_8;
550 		else if (cursor_width >= 65  && cursor_width <= 128)
551 			line_per_chunk = CURSOR_LINE_PER_CHUNK_4;
552 		else if (cursor_width >= 129 && cursor_width <= 256)
553 			line_per_chunk = CURSOR_LINE_PER_CHUNK_2;
554 	} else if (cursor_mode == CURSOR_MODE_COLOR_64BIT_FP_PRE_MULTIPLIED ||
555 		   cursor_mode == CURSOR_MODE_COLOR_64BIT_FP_UN_PRE_MULTIPLIED) {
556 		if (cursor_width >= 1   && cursor_width <= 16)
557 			line_per_chunk = CURSOR_LINE_PER_CHUNK_16;
558 		else if (cursor_width >= 17  && cursor_width <= 32)
559 			line_per_chunk = CURSOR_LINE_PER_CHUNK_8;
560 		else if (cursor_width >= 33  && cursor_width <= 64)
561 			line_per_chunk = CURSOR_LINE_PER_CHUNK_4;
562 		else if (cursor_width >= 65 && cursor_width <= 128)
563 			line_per_chunk = CURSOR_LINE_PER_CHUNK_2;
564 		else if (cursor_width >= 129 && cursor_width <= 256)
565 			line_per_chunk = CURSOR_LINE_PER_CHUNK_1;
566 	}
567 
568 	return line_per_chunk;
569 }
570 
571 void hubp2_cursor_set_attributes(
572 		struct hubp *hubp,
573 		const struct dc_cursor_attributes *attr)
574 {
575 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
576 	enum cursor_pitch hw_pitch = hubp1_get_cursor_pitch(attr->pitch);
577 	enum cursor_lines_per_chunk lpc = hubp2_get_lines_per_chunk(
578 			attr->width, attr->color_format);
579 
580 	hubp->curs_attr = *attr;
581 
582 	REG_UPDATE(CURSOR_SURFACE_ADDRESS_HIGH,
583 			CURSOR_SURFACE_ADDRESS_HIGH, attr->address.high_part);
584 	REG_UPDATE(CURSOR_SURFACE_ADDRESS,
585 			CURSOR_SURFACE_ADDRESS, attr->address.low_part);
586 
587 	REG_UPDATE_2(CURSOR_SIZE,
588 			CURSOR_WIDTH, attr->width,
589 			CURSOR_HEIGHT, attr->height);
590 
591 	REG_UPDATE_4(CURSOR_CONTROL,
592 			CURSOR_MODE, attr->color_format,
593 			CURSOR_2X_MAGNIFY, attr->attribute_flags.bits.ENABLE_MAGNIFICATION,
594 			CURSOR_PITCH, hw_pitch,
595 			CURSOR_LINES_PER_CHUNK, lpc);
596 
597 	REG_SET_2(CURSOR_SETTINGS, 0,
598 			/* no shift of the cursor HDL schedule */
599 			CURSOR0_DST_Y_OFFSET, 0,
600 			 /* used to shift the cursor chunk request deadline */
601 			CURSOR0_CHUNK_HDL_ADJUST, 3);
602 }
603 
604 void hubp2_dmdata_set_attributes(
605 		struct hubp *hubp,
606 		const struct dc_dmdata_attributes *attr)
607 {
608 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
609 
610 	if (attr->dmdata_mode == DMDATA_HW_MODE) {
611 		/* set to HW mode */
612 		REG_UPDATE(DMDATA_CNTL,
613 				DMDATA_MODE, 1);
614 
615 		/* for DMDATA flip, need to use SURFACE_UPDATE_LOCK */
616 		REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_UPDATE_LOCK, 1);
617 
618 		/* toggle DMDATA_UPDATED and set repeat and size */
619 		REG_UPDATE(DMDATA_CNTL,
620 				DMDATA_UPDATED, 0);
621 		REG_UPDATE_3(DMDATA_CNTL,
622 				DMDATA_UPDATED, 1,
623 				DMDATA_REPEAT, attr->dmdata_repeat,
624 				DMDATA_SIZE, attr->dmdata_size);
625 
626 		/* set DMDATA address */
627 		REG_WRITE(DMDATA_ADDRESS_LOW, attr->address.low_part);
628 		REG_UPDATE(DMDATA_ADDRESS_HIGH,
629 				DMDATA_ADDRESS_HIGH, attr->address.high_part);
630 
631 		REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_UPDATE_LOCK, 0);
632 
633 	} else {
634 		/* set to SW mode before loading data */
635 		REG_SET(DMDATA_CNTL, 0,
636 				DMDATA_MODE, 0);
637 		/* toggle DMDATA_SW_UPDATED to start loading sequence */
638 		REG_UPDATE(DMDATA_SW_CNTL,
639 				DMDATA_SW_UPDATED, 0);
640 		REG_UPDATE_3(DMDATA_SW_CNTL,
641 				DMDATA_SW_UPDATED, 1,
642 				DMDATA_SW_REPEAT, attr->dmdata_repeat,
643 				DMDATA_SW_SIZE, attr->dmdata_size);
644 		/* load data into hubp dmdata buffer */
645 		hubp2_dmdata_load(hubp, attr->dmdata_size, attr->dmdata_sw_data);
646 	}
647 
648 	/* Note that DL_DELTA must be programmed if we want to use TTU mode */
649 	REG_SET_3(DMDATA_QOS_CNTL, 0,
650 			DMDATA_QOS_MODE, attr->dmdata_qos_mode,
651 			DMDATA_QOS_LEVEL, attr->dmdata_qos_level,
652 			DMDATA_DL_DELTA, attr->dmdata_dl_delta);
653 }
654 
655 void hubp2_dmdata_load(
656 		struct hubp *hubp,
657 		uint32_t dmdata_sw_size,
658 		const uint32_t *dmdata_sw_data)
659 {
660 	int i;
661 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
662 
663 	/* load dmdata into HUBP buffer in SW mode */
664 	for (i = 0; i < dmdata_sw_size / 4; i++)
665 		REG_WRITE(DMDATA_SW_DATA, dmdata_sw_data[i]);
666 }
667 
668 bool hubp2_dmdata_status_done(struct hubp *hubp)
669 {
670 	uint32_t status;
671 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
672 
673 	REG_GET(DMDATA_STATUS, DMDATA_DONE, &status);
674 	return (status == 1);
675 }
676 
677 bool hubp2_program_surface_flip_and_addr(
678 	struct hubp *hubp,
679 	const struct dc_plane_address *address,
680 	bool flip_immediate)
681 {
682 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
683 
684 	//program flip type
685 	REG_UPDATE(DCSURF_FLIP_CONTROL,
686 			SURFACE_FLIP_TYPE, flip_immediate);
687 
688 	// Program VMID reg
689 	REG_UPDATE(VMID_SETTINGS_0,
690 			VMID, address->vmid);
691 
692 	if (address->type == PLN_ADDR_TYPE_GRPH_STEREO) {
693 		REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x1);
694 		REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x1);
695 
696 	} else {
697 		// turn off stereo if not in stereo
698 		REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_MODE_FOR_STEREOSYNC, 0x0);
699 		REG_UPDATE(DCSURF_FLIP_CONTROL, SURFACE_FLIP_IN_STEREOSYNC, 0x0);
700 	}
701 
702 
703 
704 	/* HW automatically latch rest of address register on write to
705 	 * DCSURF_PRIMARY_SURFACE_ADDRESS if SURFACE_UPDATE_LOCK is not used
706 	 *
707 	 * program high first and then the low addr, order matters!
708 	 */
709 	switch (address->type) {
710 	case PLN_ADDR_TYPE_GRAPHICS:
711 		/* DCN1.0 does not support const color
712 		 * TODO: program DCHUBBUB_RET_PATH_DCC_CFGx_0/1
713 		 * base on address->grph.dcc_const_color
714 		 * x = 0, 2, 4, 6 for pipe 0, 1, 2, 3 for rgb and luma
715 		 * x = 1, 3, 5, 7 for pipe 0, 1, 2, 3 for chroma
716 		 */
717 
718 		if (address->grph.addr.quad_part == 0)
719 			break;
720 
721 		REG_UPDATE_2(DCSURF_SURFACE_CONTROL,
722 				PRIMARY_SURFACE_TMZ, address->tmz_surface,
723 				PRIMARY_META_SURFACE_TMZ, address->tmz_surface);
724 
725 		if (address->grph.meta_addr.quad_part != 0) {
726 			REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0,
727 					PRIMARY_META_SURFACE_ADDRESS_HIGH,
728 					address->grph.meta_addr.high_part);
729 
730 			REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0,
731 					PRIMARY_META_SURFACE_ADDRESS,
732 					address->grph.meta_addr.low_part);
733 		}
734 
735 		REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0,
736 				PRIMARY_SURFACE_ADDRESS_HIGH,
737 				address->grph.addr.high_part);
738 
739 		REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0,
740 				PRIMARY_SURFACE_ADDRESS,
741 				address->grph.addr.low_part);
742 		break;
743 	case PLN_ADDR_TYPE_VIDEO_PROGRESSIVE:
744 		if (address->video_progressive.luma_addr.quad_part == 0
745 				|| address->video_progressive.chroma_addr.quad_part == 0)
746 			break;
747 
748 		REG_UPDATE_4(DCSURF_SURFACE_CONTROL,
749 				PRIMARY_SURFACE_TMZ, address->tmz_surface,
750 				PRIMARY_SURFACE_TMZ_C, address->tmz_surface,
751 				PRIMARY_META_SURFACE_TMZ, address->tmz_surface,
752 				PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface);
753 
754 		if (address->video_progressive.luma_meta_addr.quad_part != 0) {
755 			REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH_C, 0,
756 					PRIMARY_META_SURFACE_ADDRESS_HIGH_C,
757 					address->video_progressive.chroma_meta_addr.high_part);
758 
759 			REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_C, 0,
760 					PRIMARY_META_SURFACE_ADDRESS_C,
761 					address->video_progressive.chroma_meta_addr.low_part);
762 
763 			REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0,
764 					PRIMARY_META_SURFACE_ADDRESS_HIGH,
765 					address->video_progressive.luma_meta_addr.high_part);
766 
767 			REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0,
768 					PRIMARY_META_SURFACE_ADDRESS,
769 					address->video_progressive.luma_meta_addr.low_part);
770 		}
771 
772 		REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH_C, 0,
773 				PRIMARY_SURFACE_ADDRESS_HIGH_C,
774 				address->video_progressive.chroma_addr.high_part);
775 
776 		REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_C, 0,
777 				PRIMARY_SURFACE_ADDRESS_C,
778 				address->video_progressive.chroma_addr.low_part);
779 
780 		REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0,
781 				PRIMARY_SURFACE_ADDRESS_HIGH,
782 				address->video_progressive.luma_addr.high_part);
783 
784 		REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0,
785 				PRIMARY_SURFACE_ADDRESS,
786 				address->video_progressive.luma_addr.low_part);
787 		break;
788 	case PLN_ADDR_TYPE_GRPH_STEREO:
789 		if (address->grph_stereo.left_addr.quad_part == 0)
790 			break;
791 		if (address->grph_stereo.right_addr.quad_part == 0)
792 			break;
793 
794 		REG_UPDATE_8(DCSURF_SURFACE_CONTROL,
795 				PRIMARY_SURFACE_TMZ, address->tmz_surface,
796 				PRIMARY_SURFACE_TMZ_C, address->tmz_surface,
797 				PRIMARY_META_SURFACE_TMZ, address->tmz_surface,
798 				PRIMARY_META_SURFACE_TMZ_C, address->tmz_surface,
799 				SECONDARY_SURFACE_TMZ, address->tmz_surface,
800 				SECONDARY_SURFACE_TMZ_C, address->tmz_surface,
801 				SECONDARY_META_SURFACE_TMZ, address->tmz_surface,
802 				SECONDARY_META_SURFACE_TMZ_C, address->tmz_surface);
803 
804 		if (address->grph_stereo.right_meta_addr.quad_part != 0) {
805 
806 			REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS_HIGH, 0,
807 					SECONDARY_META_SURFACE_ADDRESS_HIGH,
808 					address->grph_stereo.right_meta_addr.high_part);
809 
810 			REG_SET(DCSURF_SECONDARY_META_SURFACE_ADDRESS, 0,
811 					SECONDARY_META_SURFACE_ADDRESS,
812 					address->grph_stereo.right_meta_addr.low_part);
813 		}
814 		if (address->grph_stereo.left_meta_addr.quad_part != 0) {
815 
816 			REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS_HIGH, 0,
817 					PRIMARY_META_SURFACE_ADDRESS_HIGH,
818 					address->grph_stereo.left_meta_addr.high_part);
819 
820 			REG_SET(DCSURF_PRIMARY_META_SURFACE_ADDRESS, 0,
821 					PRIMARY_META_SURFACE_ADDRESS,
822 					address->grph_stereo.left_meta_addr.low_part);
823 		}
824 
825 		REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS_HIGH, 0,
826 				SECONDARY_SURFACE_ADDRESS_HIGH,
827 				address->grph_stereo.right_addr.high_part);
828 
829 		REG_SET(DCSURF_SECONDARY_SURFACE_ADDRESS, 0,
830 				SECONDARY_SURFACE_ADDRESS,
831 				address->grph_stereo.right_addr.low_part);
832 
833 		REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS_HIGH, 0,
834 				PRIMARY_SURFACE_ADDRESS_HIGH,
835 				address->grph_stereo.left_addr.high_part);
836 
837 		REG_SET(DCSURF_PRIMARY_SURFACE_ADDRESS, 0,
838 				PRIMARY_SURFACE_ADDRESS,
839 				address->grph_stereo.left_addr.low_part);
840 		break;
841 	default:
842 		BREAK_TO_DEBUGGER();
843 		break;
844 	}
845 
846 	hubp->request_address = *address;
847 
848 	return true;
849 }
850 
851 void hubp2_enable_triplebuffer(
852 	struct hubp *hubp,
853 	bool enable)
854 {
855 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
856 	uint32_t triple_buffer_en = 0;
857 	bool tri_buffer_en;
858 
859 	REG_GET(DCSURF_FLIP_CONTROL2, SURFACE_TRIPLE_BUFFER_ENABLE, &triple_buffer_en);
860 	tri_buffer_en = (triple_buffer_en == 1);
861 	if (tri_buffer_en != enable) {
862 		REG_UPDATE(DCSURF_FLIP_CONTROL2,
863 			SURFACE_TRIPLE_BUFFER_ENABLE, enable ? DC_TRIPLEBUFFER_ENABLE : DC_TRIPLEBUFFER_DISABLE);
864 	}
865 }
866 
867 bool hubp2_is_triplebuffer_enabled(
868 	struct hubp *hubp)
869 {
870 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
871 	uint32_t triple_buffer_en = 0;
872 
873 	REG_GET(DCSURF_FLIP_CONTROL2, SURFACE_TRIPLE_BUFFER_ENABLE, &triple_buffer_en);
874 
875 	return (bool)triple_buffer_en;
876 }
877 
878 void hubp2_set_flip_control_surface_gsl(struct hubp *hubp, bool enable)
879 {
880 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
881 
882 	REG_UPDATE(DCSURF_FLIP_CONTROL2, SURFACE_GSL_ENABLE, enable ? 1 : 0);
883 }
884 
885 bool hubp2_is_flip_pending(struct hubp *hubp)
886 {
887 	uint32_t flip_pending = 0;
888 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
889 	struct dc_plane_address earliest_inuse_address;
890 
891 	REG_GET(DCSURF_FLIP_CONTROL,
892 			SURFACE_FLIP_PENDING, &flip_pending);
893 
894 	REG_GET(DCSURF_SURFACE_EARLIEST_INUSE,
895 			SURFACE_EARLIEST_INUSE_ADDRESS, &earliest_inuse_address.grph.addr.low_part);
896 
897 	REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH,
898 			SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &earliest_inuse_address.grph.addr.high_part);
899 
900 	if (flip_pending)
901 		return true;
902 
903 	if (earliest_inuse_address.grph.addr.quad_part != hubp->request_address.grph.addr.quad_part)
904 		return true;
905 
906 	return false;
907 }
908 
909 void hubp2_set_blank(struct hubp *hubp, bool blank)
910 {
911 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
912 	uint32_t blank_en = blank ? 1 : 0;
913 
914 	REG_UPDATE_2(DCHUBP_CNTL,
915 			HUBP_BLANK_EN, blank_en,
916 			HUBP_TTU_DISABLE, blank_en);
917 
918 	if (blank) {
919 		uint32_t reg_val = REG_READ(DCHUBP_CNTL);
920 
921 		if (reg_val) {
922 			/* init sequence workaround: in case HUBP is
923 			 * power gated, this wait would timeout.
924 			 *
925 			 * we just wrote reg_val to non-0, if it stay 0
926 			 * it means HUBP is gated
927 			 */
928 			REG_WAIT(DCHUBP_CNTL,
929 					HUBP_NO_OUTSTANDING_REQ, 1,
930 					1, 200);
931 		}
932 
933 		hubp->mpcc_id = 0xf;
934 		hubp->opp_id = OPP_ID_INVALID;
935 	}
936 }
937 
938 void hubp2_cursor_set_position(
939 		struct hubp *hubp,
940 		const struct dc_cursor_position *pos,
941 		const struct dc_cursor_mi_param *param)
942 {
943 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
944 	int src_x_offset = pos->x - pos->x_hotspot - param->viewport.x;
945 	int src_y_offset = pos->y - pos->y_hotspot - param->viewport.y;
946 	int x_hotspot = pos->x_hotspot;
947 	int y_hotspot = pos->y_hotspot;
948 	int cursor_height = (int)hubp->curs_attr.height;
949 	int cursor_width = (int)hubp->curs_attr.width;
950 	uint32_t dst_x_offset;
951 	uint32_t cur_en = pos->enable ? 1 : 0;
952 
953 	/*
954 	 * Guard aganst cursor_set_position() from being called with invalid
955 	 * attributes
956 	 *
957 	 * TODO: Look at combining cursor_set_position() and
958 	 * cursor_set_attributes() into cursor_update()
959 	 */
960 	if (hubp->curs_attr.address.quad_part == 0)
961 		return;
962 
963 	// Rotated cursor width/height and hotspots tweaks for offset calculation
964 	if (param->rotation == ROTATION_ANGLE_90 || param->rotation == ROTATION_ANGLE_270) {
965 		swap(cursor_height, cursor_width);
966 		if (param->rotation == ROTATION_ANGLE_90) {
967 			src_x_offset = pos->x - pos->y_hotspot - param->viewport.x;
968 			src_y_offset = pos->y - pos->x_hotspot - param->viewport.y;
969 		}
970 	} else if (param->rotation == ROTATION_ANGLE_180) {
971 		src_x_offset = pos->x - param->viewport.x;
972 		src_y_offset = pos->y - param->viewport.y;
973 	}
974 
975 	if (param->mirror) {
976 		x_hotspot = param->viewport.width - x_hotspot;
977 		src_x_offset = param->viewport.x + param->viewport.width - src_x_offset;
978 	}
979 
980 	dst_x_offset = (src_x_offset >= 0) ? src_x_offset : 0;
981 	dst_x_offset *= param->ref_clk_khz;
982 	dst_x_offset /= param->pixel_clk_khz;
983 
984 	ASSERT(param->h_scale_ratio.value);
985 
986 	if (param->h_scale_ratio.value)
987 		dst_x_offset = dc_fixpt_floor(dc_fixpt_div(
988 				dc_fixpt_from_int(dst_x_offset),
989 				param->h_scale_ratio));
990 
991 	if (src_x_offset >= (int)param->viewport.width)
992 		cur_en = 0;  /* not visible beyond right edge*/
993 
994 	if (src_x_offset + cursor_width <= 0)
995 		cur_en = 0;  /* not visible beyond left edge*/
996 
997 	if (src_y_offset >= (int)param->viewport.height)
998 		cur_en = 0;  /* not visible beyond bottom edge*/
999 
1000 	if (src_y_offset + cursor_height <= 0)
1001 		cur_en = 0;  /* not visible beyond top edge*/
1002 
1003 	if (cur_en && REG_READ(CURSOR_SURFACE_ADDRESS) == 0)
1004 		hubp->funcs->set_cursor_attributes(hubp, &hubp->curs_attr);
1005 
1006 	REG_UPDATE(CURSOR_CONTROL,
1007 			CURSOR_ENABLE, cur_en);
1008 
1009 	REG_SET_2(CURSOR_POSITION, 0,
1010 			CURSOR_X_POSITION, pos->x,
1011 			CURSOR_Y_POSITION, pos->y);
1012 
1013 	REG_SET_2(CURSOR_HOT_SPOT, 0,
1014 			CURSOR_HOT_SPOT_X, x_hotspot,
1015 			CURSOR_HOT_SPOT_Y, y_hotspot);
1016 
1017 	REG_SET(CURSOR_DST_OFFSET, 0,
1018 			CURSOR_DST_X_OFFSET, dst_x_offset);
1019 	/* TODO Handle surface pixel formats other than 4:4:4 */
1020 }
1021 
1022 void hubp2_clk_cntl(struct hubp *hubp, bool enable)
1023 {
1024 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
1025 	uint32_t clk_enable = enable ? 1 : 0;
1026 
1027 	REG_UPDATE(HUBP_CLK_CNTL, HUBP_CLOCK_ENABLE, clk_enable);
1028 }
1029 
1030 void hubp2_vtg_sel(struct hubp *hubp, uint32_t otg_inst)
1031 {
1032 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
1033 
1034 	REG_UPDATE(DCHUBP_CNTL, HUBP_VTG_SEL, otg_inst);
1035 }
1036 
1037 void hubp2_clear_underflow(struct hubp *hubp)
1038 {
1039 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
1040 
1041 	REG_UPDATE(DCHUBP_CNTL, HUBP_UNDERFLOW_CLEAR, 1);
1042 }
1043 
1044 void hubp2_read_state_common(struct hubp *hubp)
1045 {
1046 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
1047 	struct dcn_hubp_state *s = &hubp2->state;
1048 	struct _vcs_dpi_display_dlg_regs_st *dlg_attr = &s->dlg_attr;
1049 	struct _vcs_dpi_display_ttu_regs_st *ttu_attr = &s->ttu_attr;
1050 	struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs;
1051 
1052 	/* Requester */
1053 	REG_GET(HUBPRET_CONTROL,
1054 			DET_BUF_PLANE1_BASE_ADDRESS, &rq_regs->plane1_base_address);
1055 	REG_GET_4(DCN_EXPANSION_MODE,
1056 			DRQ_EXPANSION_MODE, &rq_regs->drq_expansion_mode,
1057 			PRQ_EXPANSION_MODE, &rq_regs->prq_expansion_mode,
1058 			MRQ_EXPANSION_MODE, &rq_regs->mrq_expansion_mode,
1059 			CRQ_EXPANSION_MODE, &rq_regs->crq_expansion_mode);
1060 
1061 	/* DLG - Per hubp */
1062 	REG_GET_2(BLANK_OFFSET_0,
1063 		REFCYC_H_BLANK_END, &dlg_attr->refcyc_h_blank_end,
1064 		DLG_V_BLANK_END, &dlg_attr->dlg_vblank_end);
1065 
1066 	REG_GET(BLANK_OFFSET_1,
1067 		MIN_DST_Y_NEXT_START, &dlg_attr->min_dst_y_next_start);
1068 
1069 	REG_GET(DST_DIMENSIONS,
1070 		REFCYC_PER_HTOTAL, &dlg_attr->refcyc_per_htotal);
1071 
1072 	REG_GET_2(DST_AFTER_SCALER,
1073 		REFCYC_X_AFTER_SCALER, &dlg_attr->refcyc_x_after_scaler,
1074 		DST_Y_AFTER_SCALER, &dlg_attr->dst_y_after_scaler);
1075 
1076 	if (REG(PREFETCH_SETTINS))
1077 		REG_GET_2(PREFETCH_SETTINS,
1078 			DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch,
1079 			VRATIO_PREFETCH, &dlg_attr->vratio_prefetch);
1080 	else
1081 		REG_GET_2(PREFETCH_SETTINGS,
1082 			DST_Y_PREFETCH, &dlg_attr->dst_y_prefetch,
1083 			VRATIO_PREFETCH, &dlg_attr->vratio_prefetch);
1084 
1085 	REG_GET_2(VBLANK_PARAMETERS_0,
1086 		DST_Y_PER_VM_VBLANK, &dlg_attr->dst_y_per_vm_vblank,
1087 		DST_Y_PER_ROW_VBLANK, &dlg_attr->dst_y_per_row_vblank);
1088 
1089 	REG_GET(REF_FREQ_TO_PIX_FREQ,
1090 		REF_FREQ_TO_PIX_FREQ, &dlg_attr->ref_freq_to_pix_freq);
1091 
1092 	/* DLG - Per luma/chroma */
1093 	REG_GET(VBLANK_PARAMETERS_1,
1094 		REFCYC_PER_PTE_GROUP_VBLANK_L, &dlg_attr->refcyc_per_pte_group_vblank_l);
1095 
1096 	REG_GET(VBLANK_PARAMETERS_3,
1097 		REFCYC_PER_META_CHUNK_VBLANK_L, &dlg_attr->refcyc_per_meta_chunk_vblank_l);
1098 
1099 	if (REG(NOM_PARAMETERS_0))
1100 		REG_GET(NOM_PARAMETERS_0,
1101 			DST_Y_PER_PTE_ROW_NOM_L, &dlg_attr->dst_y_per_pte_row_nom_l);
1102 
1103 	if (REG(NOM_PARAMETERS_1))
1104 		REG_GET(NOM_PARAMETERS_1,
1105 			REFCYC_PER_PTE_GROUP_NOM_L, &dlg_attr->refcyc_per_pte_group_nom_l);
1106 
1107 	REG_GET(NOM_PARAMETERS_4,
1108 		DST_Y_PER_META_ROW_NOM_L, &dlg_attr->dst_y_per_meta_row_nom_l);
1109 
1110 	REG_GET(NOM_PARAMETERS_5,
1111 		REFCYC_PER_META_CHUNK_NOM_L, &dlg_attr->refcyc_per_meta_chunk_nom_l);
1112 
1113 	REG_GET_2(PER_LINE_DELIVERY_PRE,
1114 		REFCYC_PER_LINE_DELIVERY_PRE_L, &dlg_attr->refcyc_per_line_delivery_pre_l,
1115 		REFCYC_PER_LINE_DELIVERY_PRE_C, &dlg_attr->refcyc_per_line_delivery_pre_c);
1116 
1117 	REG_GET_2(PER_LINE_DELIVERY,
1118 		REFCYC_PER_LINE_DELIVERY_L, &dlg_attr->refcyc_per_line_delivery_l,
1119 		REFCYC_PER_LINE_DELIVERY_C, &dlg_attr->refcyc_per_line_delivery_c);
1120 
1121 	if (REG(PREFETCH_SETTINS_C))
1122 		REG_GET(PREFETCH_SETTINS_C,
1123 			VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c);
1124 	else
1125 		REG_GET(PREFETCH_SETTINGS_C,
1126 			VRATIO_PREFETCH_C, &dlg_attr->vratio_prefetch_c);
1127 
1128 	REG_GET(VBLANK_PARAMETERS_2,
1129 		REFCYC_PER_PTE_GROUP_VBLANK_C, &dlg_attr->refcyc_per_pte_group_vblank_c);
1130 
1131 	REG_GET(VBLANK_PARAMETERS_4,
1132 		REFCYC_PER_META_CHUNK_VBLANK_C, &dlg_attr->refcyc_per_meta_chunk_vblank_c);
1133 
1134 	if (REG(NOM_PARAMETERS_2))
1135 		REG_GET(NOM_PARAMETERS_2,
1136 			DST_Y_PER_PTE_ROW_NOM_C, &dlg_attr->dst_y_per_pte_row_nom_c);
1137 
1138 	if (REG(NOM_PARAMETERS_3))
1139 		REG_GET(NOM_PARAMETERS_3,
1140 			REFCYC_PER_PTE_GROUP_NOM_C, &dlg_attr->refcyc_per_pte_group_nom_c);
1141 
1142 	REG_GET(NOM_PARAMETERS_6,
1143 		DST_Y_PER_META_ROW_NOM_C, &dlg_attr->dst_y_per_meta_row_nom_c);
1144 
1145 	REG_GET(NOM_PARAMETERS_7,
1146 		REFCYC_PER_META_CHUNK_NOM_C, &dlg_attr->refcyc_per_meta_chunk_nom_c);
1147 
1148 	/* TTU - per hubp */
1149 	REG_GET_2(DCN_TTU_QOS_WM,
1150 		QoS_LEVEL_LOW_WM, &ttu_attr->qos_level_low_wm,
1151 		QoS_LEVEL_HIGH_WM, &ttu_attr->qos_level_high_wm);
1152 
1153 	REG_GET_2(DCN_GLOBAL_TTU_CNTL,
1154 		MIN_TTU_VBLANK, &ttu_attr->min_ttu_vblank,
1155 		QoS_LEVEL_FLIP, &ttu_attr->qos_level_flip);
1156 
1157 	/* TTU - per luma/chroma */
1158 	/* Assumed surf0 is luma and 1 is chroma */
1159 
1160 	REG_GET_3(DCN_SURF0_TTU_CNTL0,
1161 		REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_l,
1162 		QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_l,
1163 		QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_l);
1164 
1165 	REG_GET(DCN_SURF0_TTU_CNTL1,
1166 		REFCYC_PER_REQ_DELIVERY_PRE,
1167 		&ttu_attr->refcyc_per_req_delivery_pre_l);
1168 
1169 	REG_GET_3(DCN_SURF1_TTU_CNTL0,
1170 		REFCYC_PER_REQ_DELIVERY, &ttu_attr->refcyc_per_req_delivery_c,
1171 		QoS_LEVEL_FIXED, &ttu_attr->qos_level_fixed_c,
1172 		QoS_RAMP_DISABLE, &ttu_attr->qos_ramp_disable_c);
1173 
1174 	REG_GET(DCN_SURF1_TTU_CNTL1,
1175 		REFCYC_PER_REQ_DELIVERY_PRE,
1176 		&ttu_attr->refcyc_per_req_delivery_pre_c);
1177 
1178 	/* Rest of hubp */
1179 	REG_GET(DCSURF_SURFACE_CONFIG,
1180 			SURFACE_PIXEL_FORMAT, &s->pixel_format);
1181 
1182 	REG_GET(DCSURF_SURFACE_EARLIEST_INUSE_HIGH,
1183 			SURFACE_EARLIEST_INUSE_ADDRESS_HIGH, &s->inuse_addr_hi);
1184 
1185 	REG_GET(DCSURF_SURFACE_EARLIEST_INUSE,
1186 			SURFACE_EARLIEST_INUSE_ADDRESS, &s->inuse_addr_lo);
1187 
1188 	REG_GET_2(DCSURF_PRI_VIEWPORT_DIMENSION,
1189 			PRI_VIEWPORT_WIDTH, &s->viewport_width,
1190 			PRI_VIEWPORT_HEIGHT, &s->viewport_height);
1191 
1192 	REG_GET_2(DCSURF_SURFACE_CONFIG,
1193 			ROTATION_ANGLE, &s->rotation_angle,
1194 			H_MIRROR_EN, &s->h_mirror_en);
1195 
1196 	REG_GET(DCSURF_TILING_CONFIG,
1197 			SW_MODE, &s->sw_mode);
1198 
1199 	REG_GET(DCSURF_SURFACE_CONTROL,
1200 			PRIMARY_SURFACE_DCC_EN, &s->dcc_en);
1201 
1202 	REG_GET_3(DCHUBP_CNTL,
1203 			HUBP_BLANK_EN, &s->blank_en,
1204 			HUBP_TTU_DISABLE, &s->ttu_disable,
1205 			HUBP_UNDERFLOW_STATUS, &s->underflow_status);
1206 
1207 	REG_GET(DCN_GLOBAL_TTU_CNTL,
1208 			MIN_TTU_VBLANK, &s->min_ttu_vblank);
1209 
1210 	REG_GET_2(DCN_TTU_QOS_WM,
1211 			QoS_LEVEL_LOW_WM, &s->qos_level_low_wm,
1212 			QoS_LEVEL_HIGH_WM, &s->qos_level_high_wm);
1213 
1214 }
1215 
1216 void hubp2_read_state(struct hubp *hubp)
1217 {
1218 	struct dcn20_hubp *hubp2 = TO_DCN20_HUBP(hubp);
1219 	struct dcn_hubp_state *s = &hubp2->state;
1220 	struct _vcs_dpi_display_rq_regs_st *rq_regs = &s->rq_regs;
1221 
1222 	hubp2_read_state_common(hubp);
1223 
1224 	REG_GET_8(DCHUBP_REQ_SIZE_CONFIG,
1225 		CHUNK_SIZE, &rq_regs->rq_regs_l.chunk_size,
1226 		MIN_CHUNK_SIZE, &rq_regs->rq_regs_l.min_chunk_size,
1227 		META_CHUNK_SIZE, &rq_regs->rq_regs_l.meta_chunk_size,
1228 		MIN_META_CHUNK_SIZE, &rq_regs->rq_regs_l.min_meta_chunk_size,
1229 		DPTE_GROUP_SIZE, &rq_regs->rq_regs_l.dpte_group_size,
1230 		MPTE_GROUP_SIZE, &rq_regs->rq_regs_l.mpte_group_size,
1231 		SWATH_HEIGHT, &rq_regs->rq_regs_l.swath_height,
1232 		PTE_ROW_HEIGHT_LINEAR, &rq_regs->rq_regs_l.pte_row_height_linear);
1233 
1234 	REG_GET_8(DCHUBP_REQ_SIZE_CONFIG_C,
1235 		CHUNK_SIZE_C, &rq_regs->rq_regs_c.chunk_size,
1236 		MIN_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_chunk_size,
1237 		META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.meta_chunk_size,
1238 		MIN_META_CHUNK_SIZE_C, &rq_regs->rq_regs_c.min_meta_chunk_size,
1239 		DPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.dpte_group_size,
1240 		MPTE_GROUP_SIZE_C, &rq_regs->rq_regs_c.mpte_group_size,
1241 		SWATH_HEIGHT_C, &rq_regs->rq_regs_c.swath_height,
1242 		PTE_ROW_HEIGHT_LINEAR_C, &rq_regs->rq_regs_c.pte_row_height_linear);
1243 
1244 }
1245 
1246 static struct hubp_funcs dcn20_hubp_funcs = {
1247 	.hubp_enable_tripleBuffer = hubp2_enable_triplebuffer,
1248 	.hubp_is_triplebuffer_enabled = hubp2_is_triplebuffer_enabled,
1249 	.hubp_program_surface_flip_and_addr = hubp2_program_surface_flip_and_addr,
1250 	.hubp_program_surface_config = hubp2_program_surface_config,
1251 	.hubp_is_flip_pending = hubp2_is_flip_pending,
1252 	.hubp_setup = hubp2_setup,
1253 	.hubp_setup_interdependent = hubp2_setup_interdependent,
1254 	.hubp_set_vm_system_aperture_settings = hubp2_set_vm_system_aperture_settings,
1255 	.set_blank = hubp2_set_blank,
1256 	.dcc_control = hubp2_dcc_control,
1257 	.mem_program_viewport = min_set_viewport,
1258 	.set_cursor_attributes	= hubp2_cursor_set_attributes,
1259 	.set_cursor_position	= hubp2_cursor_set_position,
1260 	.hubp_clk_cntl = hubp2_clk_cntl,
1261 	.hubp_vtg_sel = hubp2_vtg_sel,
1262 	.dmdata_set_attributes = hubp2_dmdata_set_attributes,
1263 	.dmdata_load = hubp2_dmdata_load,
1264 	.dmdata_status_done = hubp2_dmdata_status_done,
1265 	.hubp_read_state = hubp2_read_state,
1266 	.hubp_clear_underflow = hubp2_clear_underflow,
1267 	.hubp_set_flip_control_surface_gsl = hubp2_set_flip_control_surface_gsl,
1268 	.hubp_init = hubp1_init,
1269 };
1270 
1271 
1272 bool hubp2_construct(
1273 	struct dcn20_hubp *hubp2,
1274 	struct dc_context *ctx,
1275 	uint32_t inst,
1276 	const struct dcn_hubp2_registers *hubp_regs,
1277 	const struct dcn_hubp2_shift *hubp_shift,
1278 	const struct dcn_hubp2_mask *hubp_mask)
1279 {
1280 	hubp2->base.funcs = &dcn20_hubp_funcs;
1281 	hubp2->base.ctx = ctx;
1282 	hubp2->hubp_regs = hubp_regs;
1283 	hubp2->hubp_shift = hubp_shift;
1284 	hubp2->hubp_mask = hubp_mask;
1285 	hubp2->base.inst = inst;
1286 	hubp2->base.opp_id = OPP_ID_INVALID;
1287 	hubp2->base.mpcc_id = 0xf;
1288 
1289 	return true;
1290 }
1291