1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Copyright (c) 2022-2023 Qualcomm Innovation Center, Inc. All rights reserved.
4 * Copyright (c) 2015-2018, The Linux Foundation. All rights reserved.
5 */
6 #define pr_fmt(fmt) "[drm:%s:%d] " fmt, __func__, __LINE__
7
8 #include "msm_drv.h"
9 #include "dpu_kms.h"
10 #include "dpu_hw_mdss.h"
11 #include "dpu_hw_util.h"
12
13 /* using a file static variables for debugfs access */
14 static u32 dpu_hw_util_log_mask = DPU_DBG_MASK_NONE;
15
16 /* DPU_SCALER_QSEED3 */
17 #define QSEED3_HW_VERSION 0x00
18 #define QSEED3_OP_MODE 0x04
19 #define QSEED3_RGB2Y_COEFF 0x08
20 #define QSEED3_PHASE_INIT 0x0C
21 #define QSEED3_PHASE_STEP_Y_H 0x10
22 #define QSEED3_PHASE_STEP_Y_V 0x14
23 #define QSEED3_PHASE_STEP_UV_H 0x18
24 #define QSEED3_PHASE_STEP_UV_V 0x1C
25 #define QSEED3_PRELOAD 0x20
26 #define QSEED3_DE_SHARPEN 0x24
27 #define QSEED3_DE_SHARPEN_CTL 0x28
28 #define QSEED3_DE_SHAPE_CTL 0x2C
29 #define QSEED3_DE_THRESHOLD 0x30
30 #define QSEED3_DE_ADJUST_DATA_0 0x34
31 #define QSEED3_DE_ADJUST_DATA_1 0x38
32 #define QSEED3_DE_ADJUST_DATA_2 0x3C
33 #define QSEED3_SRC_SIZE_Y_RGB_A 0x40
34 #define QSEED3_SRC_SIZE_UV 0x44
35 #define QSEED3_DST_SIZE 0x48
36 #define QSEED3_COEF_LUT_CTRL 0x4C
37 #define QSEED3_COEF_LUT_SWAP_BIT 0
38 #define QSEED3_COEF_LUT_DIR_BIT 1
39 #define QSEED3_COEF_LUT_Y_CIR_BIT 2
40 #define QSEED3_COEF_LUT_UV_CIR_BIT 3
41 #define QSEED3_COEF_LUT_Y_SEP_BIT 4
42 #define QSEED3_COEF_LUT_UV_SEP_BIT 5
43 #define QSEED3_BUFFER_CTRL 0x50
44 #define QSEED3_CLK_CTRL0 0x54
45 #define QSEED3_CLK_CTRL1 0x58
46 #define QSEED3_CLK_STATUS 0x5C
47 #define QSEED3_PHASE_INIT_Y_H 0x90
48 #define QSEED3_PHASE_INIT_Y_V 0x94
49 #define QSEED3_PHASE_INIT_UV_H 0x98
50 #define QSEED3_PHASE_INIT_UV_V 0x9C
51 #define QSEED3_COEF_LUT 0x100
52 #define QSEED3_FILTERS 5
53 #define QSEED3_LUT_REGIONS 4
54 #define QSEED3_CIRCULAR_LUTS 9
55 #define QSEED3_SEPARABLE_LUTS 10
56 #define QSEED3_LUT_SIZE 60
57 #define QSEED3_ENABLE 2
58 #define QSEED3_DIR_LUT_SIZE (200 * sizeof(u32))
59 #define QSEED3_CIR_LUT_SIZE \
60 (QSEED3_LUT_SIZE * QSEED3_CIRCULAR_LUTS * sizeof(u32))
61 #define QSEED3_SEP_LUT_SIZE \
62 (QSEED3_LUT_SIZE * QSEED3_SEPARABLE_LUTS * sizeof(u32))
63
64 /* DPU_SCALER_QSEED3LITE */
65 #define QSEED3LITE_COEF_LUT_Y_SEP_BIT 4
66 #define QSEED3LITE_COEF_LUT_UV_SEP_BIT 5
67 #define QSEED3LITE_COEF_LUT_CTRL 0x4C
68 #define QSEED3LITE_COEF_LUT_SWAP_BIT 0
69 #define QSEED3LITE_DIR_FILTER_WEIGHT 0x60
70 #define QSEED3LITE_FILTERS 2
71 #define QSEED3LITE_SEPARABLE_LUTS 10
72 #define QSEED3LITE_LUT_SIZE 33
73 #define QSEED3LITE_SEP_LUT_SIZE \
74 (QSEED3LITE_LUT_SIZE * QSEED3LITE_SEPARABLE_LUTS * sizeof(u32))
75
76 /* QOS_LUT */
77 #define QOS_DANGER_LUT 0x00
78 #define QOS_SAFE_LUT 0x04
79 #define QOS_CREQ_LUT 0x08
80 #define QOS_QOS_CTRL 0x0C
81 #define QOS_CREQ_LUT_0 0x14
82 #define QOS_CREQ_LUT_1 0x18
83
84 /* QOS_QOS_CTRL */
85 #define QOS_QOS_CTRL_DANGER_SAFE_EN BIT(0)
86 #define QOS_QOS_CTRL_DANGER_VBLANK_MASK GENMASK(5, 4)
87 #define QOS_QOS_CTRL_VBLANK_EN BIT(16)
88 #define QOS_QOS_CTRL_CREQ_VBLANK_MASK GENMASK(21, 20)
89
dpu_reg_write(struct dpu_hw_blk_reg_map * c,u32 reg_off,u32 val,const char * name)90 void dpu_reg_write(struct dpu_hw_blk_reg_map *c,
91 u32 reg_off,
92 u32 val,
93 const char *name)
94 {
95 /* don't need to mutex protect this */
96 if (c->log_mask & dpu_hw_util_log_mask)
97 DPU_DEBUG_DRIVER("[%s:0x%X] <= 0x%X\n",
98 name, reg_off, val);
99 writel_relaxed(val, c->blk_addr + reg_off);
100 }
101
dpu_reg_read(struct dpu_hw_blk_reg_map * c,u32 reg_off)102 int dpu_reg_read(struct dpu_hw_blk_reg_map *c, u32 reg_off)
103 {
104 return readl_relaxed(c->blk_addr + reg_off);
105 }
106
dpu_hw_util_get_log_mask_ptr(void)107 u32 *dpu_hw_util_get_log_mask_ptr(void)
108 {
109 return &dpu_hw_util_log_mask;
110 }
111
_dpu_hw_setup_scaler3_lut(struct dpu_hw_blk_reg_map * c,struct dpu_hw_scaler3_cfg * scaler3_cfg,u32 offset)112 static void _dpu_hw_setup_scaler3_lut(struct dpu_hw_blk_reg_map *c,
113 struct dpu_hw_scaler3_cfg *scaler3_cfg, u32 offset)
114 {
115 int i, j, filter;
116 int config_lut = 0x0;
117 unsigned long lut_flags;
118 u32 lut_addr, lut_offset, lut_len;
119 u32 *lut[QSEED3_FILTERS] = {NULL, NULL, NULL, NULL, NULL};
120 static const uint32_t off_tbl[QSEED3_FILTERS][QSEED3_LUT_REGIONS][2] = {
121 {{18, 0x000}, {12, 0x120}, {12, 0x1E0}, {8, 0x2A0} },
122 {{6, 0x320}, {3, 0x3E0}, {3, 0x440}, {3, 0x4A0} },
123 {{6, 0x500}, {3, 0x5c0}, {3, 0x620}, {3, 0x680} },
124 {{6, 0x380}, {3, 0x410}, {3, 0x470}, {3, 0x4d0} },
125 {{6, 0x560}, {3, 0x5f0}, {3, 0x650}, {3, 0x6b0} },
126 };
127
128 lut_flags = (unsigned long) scaler3_cfg->lut_flag;
129 if (test_bit(QSEED3_COEF_LUT_DIR_BIT, &lut_flags) &&
130 (scaler3_cfg->dir_len == QSEED3_DIR_LUT_SIZE)) {
131 lut[0] = scaler3_cfg->dir_lut;
132 config_lut = 1;
133 }
134 if (test_bit(QSEED3_COEF_LUT_Y_CIR_BIT, &lut_flags) &&
135 (scaler3_cfg->y_rgb_cir_lut_idx < QSEED3_CIRCULAR_LUTS) &&
136 (scaler3_cfg->cir_len == QSEED3_CIR_LUT_SIZE)) {
137 lut[1] = scaler3_cfg->cir_lut +
138 scaler3_cfg->y_rgb_cir_lut_idx * QSEED3_LUT_SIZE;
139 config_lut = 1;
140 }
141 if (test_bit(QSEED3_COEF_LUT_UV_CIR_BIT, &lut_flags) &&
142 (scaler3_cfg->uv_cir_lut_idx < QSEED3_CIRCULAR_LUTS) &&
143 (scaler3_cfg->cir_len == QSEED3_CIR_LUT_SIZE)) {
144 lut[2] = scaler3_cfg->cir_lut +
145 scaler3_cfg->uv_cir_lut_idx * QSEED3_LUT_SIZE;
146 config_lut = 1;
147 }
148 if (test_bit(QSEED3_COEF_LUT_Y_SEP_BIT, &lut_flags) &&
149 (scaler3_cfg->y_rgb_sep_lut_idx < QSEED3_SEPARABLE_LUTS) &&
150 (scaler3_cfg->sep_len == QSEED3_SEP_LUT_SIZE)) {
151 lut[3] = scaler3_cfg->sep_lut +
152 scaler3_cfg->y_rgb_sep_lut_idx * QSEED3_LUT_SIZE;
153 config_lut = 1;
154 }
155 if (test_bit(QSEED3_COEF_LUT_UV_SEP_BIT, &lut_flags) &&
156 (scaler3_cfg->uv_sep_lut_idx < QSEED3_SEPARABLE_LUTS) &&
157 (scaler3_cfg->sep_len == QSEED3_SEP_LUT_SIZE)) {
158 lut[4] = scaler3_cfg->sep_lut +
159 scaler3_cfg->uv_sep_lut_idx * QSEED3_LUT_SIZE;
160 config_lut = 1;
161 }
162
163 if (config_lut) {
164 for (filter = 0; filter < QSEED3_FILTERS; filter++) {
165 if (!lut[filter])
166 continue;
167 lut_offset = 0;
168 for (i = 0; i < QSEED3_LUT_REGIONS; i++) {
169 lut_addr = QSEED3_COEF_LUT + offset
170 + off_tbl[filter][i][1];
171 lut_len = off_tbl[filter][i][0] << 2;
172 for (j = 0; j < lut_len; j++) {
173 DPU_REG_WRITE(c,
174 lut_addr,
175 (lut[filter])[lut_offset++]);
176 lut_addr += 4;
177 }
178 }
179 }
180 }
181
182 if (test_bit(QSEED3_COEF_LUT_SWAP_BIT, &lut_flags))
183 DPU_REG_WRITE(c, QSEED3_COEF_LUT_CTRL + offset, BIT(0));
184
185 }
186
_dpu_hw_setup_scaler3lite_lut(struct dpu_hw_blk_reg_map * c,struct dpu_hw_scaler3_cfg * scaler3_cfg,u32 offset)187 static void _dpu_hw_setup_scaler3lite_lut(struct dpu_hw_blk_reg_map *c,
188 struct dpu_hw_scaler3_cfg *scaler3_cfg, u32 offset)
189 {
190 int j, filter;
191 int config_lut = 0x0;
192 unsigned long lut_flags;
193 u32 lut_addr, lut_offset;
194 u32 *lut[QSEED3LITE_FILTERS] = {NULL, NULL};
195 static const uint32_t off_tbl[QSEED3_FILTERS] = { 0x000, 0x200 };
196
197 DPU_REG_WRITE(c, QSEED3LITE_DIR_FILTER_WEIGHT + offset, scaler3_cfg->dir_weight);
198
199 if (!scaler3_cfg->sep_lut)
200 return;
201
202 lut_flags = (unsigned long) scaler3_cfg->lut_flag;
203 if (test_bit(QSEED3_COEF_LUT_Y_SEP_BIT, &lut_flags) &&
204 (scaler3_cfg->y_rgb_sep_lut_idx < QSEED3LITE_SEPARABLE_LUTS) &&
205 (scaler3_cfg->sep_len == QSEED3LITE_SEP_LUT_SIZE)) {
206 lut[0] = scaler3_cfg->sep_lut +
207 scaler3_cfg->y_rgb_sep_lut_idx * QSEED3LITE_LUT_SIZE;
208 config_lut = 1;
209 }
210 if (test_bit(QSEED3_COEF_LUT_UV_SEP_BIT, &lut_flags) &&
211 (scaler3_cfg->uv_sep_lut_idx < QSEED3LITE_SEPARABLE_LUTS) &&
212 (scaler3_cfg->sep_len == QSEED3LITE_SEP_LUT_SIZE)) {
213 lut[1] = scaler3_cfg->sep_lut +
214 scaler3_cfg->uv_sep_lut_idx * QSEED3LITE_LUT_SIZE;
215 config_lut = 1;
216 }
217
218 if (config_lut) {
219 for (filter = 0; filter < QSEED3LITE_FILTERS; filter++) {
220 if (!lut[filter])
221 continue;
222 lut_offset = 0;
223 lut_addr = QSEED3_COEF_LUT + offset + off_tbl[filter];
224 for (j = 0; j < QSEED3LITE_LUT_SIZE; j++) {
225 DPU_REG_WRITE(c,
226 lut_addr,
227 (lut[filter])[lut_offset++]);
228 lut_addr += 4;
229 }
230 }
231 }
232
233 if (test_bit(QSEED3_COEF_LUT_SWAP_BIT, &lut_flags))
234 DPU_REG_WRITE(c, QSEED3_COEF_LUT_CTRL + offset, BIT(0));
235
236 }
237
_dpu_hw_setup_scaler3_de(struct dpu_hw_blk_reg_map * c,struct dpu_hw_scaler3_de_cfg * de_cfg,u32 offset)238 static void _dpu_hw_setup_scaler3_de(struct dpu_hw_blk_reg_map *c,
239 struct dpu_hw_scaler3_de_cfg *de_cfg, u32 offset)
240 {
241 u32 sharp_lvl, sharp_ctl, shape_ctl, de_thr;
242 u32 adjust_a, adjust_b, adjust_c;
243
244 if (!de_cfg->enable)
245 return;
246
247 sharp_lvl = (de_cfg->sharpen_level1 & 0x1FF) |
248 ((de_cfg->sharpen_level2 & 0x1FF) << 16);
249
250 sharp_ctl = ((de_cfg->limit & 0xF) << 9) |
251 ((de_cfg->prec_shift & 0x7) << 13) |
252 ((de_cfg->clip & 0x7) << 16);
253
254 shape_ctl = (de_cfg->thr_quiet & 0xFF) |
255 ((de_cfg->thr_dieout & 0x3FF) << 16);
256
257 de_thr = (de_cfg->thr_low & 0x3FF) |
258 ((de_cfg->thr_high & 0x3FF) << 16);
259
260 adjust_a = (de_cfg->adjust_a[0] & 0x3FF) |
261 ((de_cfg->adjust_a[1] & 0x3FF) << 10) |
262 ((de_cfg->adjust_a[2] & 0x3FF) << 20);
263
264 adjust_b = (de_cfg->adjust_b[0] & 0x3FF) |
265 ((de_cfg->adjust_b[1] & 0x3FF) << 10) |
266 ((de_cfg->adjust_b[2] & 0x3FF) << 20);
267
268 adjust_c = (de_cfg->adjust_c[0] & 0x3FF) |
269 ((de_cfg->adjust_c[1] & 0x3FF) << 10) |
270 ((de_cfg->adjust_c[2] & 0x3FF) << 20);
271
272 DPU_REG_WRITE(c, QSEED3_DE_SHARPEN + offset, sharp_lvl);
273 DPU_REG_WRITE(c, QSEED3_DE_SHARPEN_CTL + offset, sharp_ctl);
274 DPU_REG_WRITE(c, QSEED3_DE_SHAPE_CTL + offset, shape_ctl);
275 DPU_REG_WRITE(c, QSEED3_DE_THRESHOLD + offset, de_thr);
276 DPU_REG_WRITE(c, QSEED3_DE_ADJUST_DATA_0 + offset, adjust_a);
277 DPU_REG_WRITE(c, QSEED3_DE_ADJUST_DATA_1 + offset, adjust_b);
278 DPU_REG_WRITE(c, QSEED3_DE_ADJUST_DATA_2 + offset, adjust_c);
279
280 }
281
dpu_hw_setup_scaler3(struct dpu_hw_blk_reg_map * c,struct dpu_hw_scaler3_cfg * scaler3_cfg,u32 scaler_offset,u32 scaler_version,const struct dpu_format * format)282 void dpu_hw_setup_scaler3(struct dpu_hw_blk_reg_map *c,
283 struct dpu_hw_scaler3_cfg *scaler3_cfg,
284 u32 scaler_offset, u32 scaler_version,
285 const struct dpu_format *format)
286 {
287 u32 op_mode = 0;
288 u32 phase_init, preload, src_y_rgb, src_uv, dst;
289
290 if (!scaler3_cfg->enable)
291 goto end;
292
293 op_mode |= BIT(0);
294 op_mode |= (scaler3_cfg->y_rgb_filter_cfg & 0x3) << 16;
295
296 if (format && DPU_FORMAT_IS_YUV(format)) {
297 op_mode |= BIT(12);
298 op_mode |= (scaler3_cfg->uv_filter_cfg & 0x3) << 24;
299 }
300
301 op_mode |= (scaler3_cfg->blend_cfg & 1) << 31;
302 op_mode |= (scaler3_cfg->dir_en) ? BIT(4) : 0;
303
304 preload =
305 ((scaler3_cfg->preload_x[0] & 0x7F) << 0) |
306 ((scaler3_cfg->preload_y[0] & 0x7F) << 8) |
307 ((scaler3_cfg->preload_x[1] & 0x7F) << 16) |
308 ((scaler3_cfg->preload_y[1] & 0x7F) << 24);
309
310 src_y_rgb = (scaler3_cfg->src_width[0] & 0x1FFFF) |
311 ((scaler3_cfg->src_height[0] & 0x1FFFF) << 16);
312
313 src_uv = (scaler3_cfg->src_width[1] & 0x1FFFF) |
314 ((scaler3_cfg->src_height[1] & 0x1FFFF) << 16);
315
316 dst = (scaler3_cfg->dst_width & 0x1FFFF) |
317 ((scaler3_cfg->dst_height & 0x1FFFF) << 16);
318
319 if (scaler3_cfg->de.enable) {
320 _dpu_hw_setup_scaler3_de(c, &scaler3_cfg->de, scaler_offset);
321 op_mode |= BIT(8);
322 }
323
324 if (scaler3_cfg->lut_flag) {
325 if (scaler_version < 0x2004)
326 _dpu_hw_setup_scaler3_lut(c, scaler3_cfg, scaler_offset);
327 else
328 _dpu_hw_setup_scaler3lite_lut(c, scaler3_cfg, scaler_offset);
329 }
330
331 if (scaler_version == 0x1002) {
332 phase_init =
333 ((scaler3_cfg->init_phase_x[0] & 0x3F) << 0) |
334 ((scaler3_cfg->init_phase_y[0] & 0x3F) << 8) |
335 ((scaler3_cfg->init_phase_x[1] & 0x3F) << 16) |
336 ((scaler3_cfg->init_phase_y[1] & 0x3F) << 24);
337 DPU_REG_WRITE(c, QSEED3_PHASE_INIT + scaler_offset, phase_init);
338 } else {
339 DPU_REG_WRITE(c, QSEED3_PHASE_INIT_Y_H + scaler_offset,
340 scaler3_cfg->init_phase_x[0] & 0x1FFFFF);
341 DPU_REG_WRITE(c, QSEED3_PHASE_INIT_Y_V + scaler_offset,
342 scaler3_cfg->init_phase_y[0] & 0x1FFFFF);
343 DPU_REG_WRITE(c, QSEED3_PHASE_INIT_UV_H + scaler_offset,
344 scaler3_cfg->init_phase_x[1] & 0x1FFFFF);
345 DPU_REG_WRITE(c, QSEED3_PHASE_INIT_UV_V + scaler_offset,
346 scaler3_cfg->init_phase_y[1] & 0x1FFFFF);
347 }
348
349 DPU_REG_WRITE(c, QSEED3_PHASE_STEP_Y_H + scaler_offset,
350 scaler3_cfg->phase_step_x[0] & 0xFFFFFF);
351
352 DPU_REG_WRITE(c, QSEED3_PHASE_STEP_Y_V + scaler_offset,
353 scaler3_cfg->phase_step_y[0] & 0xFFFFFF);
354
355 DPU_REG_WRITE(c, QSEED3_PHASE_STEP_UV_H + scaler_offset,
356 scaler3_cfg->phase_step_x[1] & 0xFFFFFF);
357
358 DPU_REG_WRITE(c, QSEED3_PHASE_STEP_UV_V + scaler_offset,
359 scaler3_cfg->phase_step_y[1] & 0xFFFFFF);
360
361 DPU_REG_WRITE(c, QSEED3_PRELOAD + scaler_offset, preload);
362
363 DPU_REG_WRITE(c, QSEED3_SRC_SIZE_Y_RGB_A + scaler_offset, src_y_rgb);
364
365 DPU_REG_WRITE(c, QSEED3_SRC_SIZE_UV + scaler_offset, src_uv);
366
367 DPU_REG_WRITE(c, QSEED3_DST_SIZE + scaler_offset, dst);
368
369 end:
370 if (format && !DPU_FORMAT_IS_DX(format))
371 op_mode |= BIT(14);
372
373 if (format && format->alpha_enable) {
374 op_mode |= BIT(10);
375 if (scaler_version == 0x1002)
376 op_mode |= (scaler3_cfg->alpha_filter_cfg & 0x1) << 30;
377 else
378 op_mode |= (scaler3_cfg->alpha_filter_cfg & 0x3) << 29;
379 }
380
381 DPU_REG_WRITE(c, QSEED3_OP_MODE + scaler_offset, op_mode);
382 }
383
dpu_hw_get_scaler3_ver(struct dpu_hw_blk_reg_map * c,u32 scaler_offset)384 u32 dpu_hw_get_scaler3_ver(struct dpu_hw_blk_reg_map *c,
385 u32 scaler_offset)
386 {
387 return DPU_REG_READ(c, QSEED3_HW_VERSION + scaler_offset);
388 }
389
dpu_hw_csc_setup(struct dpu_hw_blk_reg_map * c,u32 csc_reg_off,const struct dpu_csc_cfg * data,bool csc10)390 void dpu_hw_csc_setup(struct dpu_hw_blk_reg_map *c,
391 u32 csc_reg_off,
392 const struct dpu_csc_cfg *data, bool csc10)
393 {
394 static const u32 matrix_shift = 7;
395 u32 clamp_shift = csc10 ? 16 : 8;
396 u32 val;
397
398 /* matrix coeff - convert S15.16 to S4.9 */
399 val = ((data->csc_mv[0] >> matrix_shift) & 0x1FFF) |
400 (((data->csc_mv[1] >> matrix_shift) & 0x1FFF) << 16);
401 DPU_REG_WRITE(c, csc_reg_off, val);
402 val = ((data->csc_mv[2] >> matrix_shift) & 0x1FFF) |
403 (((data->csc_mv[3] >> matrix_shift) & 0x1FFF) << 16);
404 DPU_REG_WRITE(c, csc_reg_off + 0x4, val);
405 val = ((data->csc_mv[4] >> matrix_shift) & 0x1FFF) |
406 (((data->csc_mv[5] >> matrix_shift) & 0x1FFF) << 16);
407 DPU_REG_WRITE(c, csc_reg_off + 0x8, val);
408 val = ((data->csc_mv[6] >> matrix_shift) & 0x1FFF) |
409 (((data->csc_mv[7] >> matrix_shift) & 0x1FFF) << 16);
410 DPU_REG_WRITE(c, csc_reg_off + 0xc, val);
411 val = (data->csc_mv[8] >> matrix_shift) & 0x1FFF;
412 DPU_REG_WRITE(c, csc_reg_off + 0x10, val);
413
414 /* Pre clamp */
415 val = (data->csc_pre_lv[0] << clamp_shift) | data->csc_pre_lv[1];
416 DPU_REG_WRITE(c, csc_reg_off + 0x14, val);
417 val = (data->csc_pre_lv[2] << clamp_shift) | data->csc_pre_lv[3];
418 DPU_REG_WRITE(c, csc_reg_off + 0x18, val);
419 val = (data->csc_pre_lv[4] << clamp_shift) | data->csc_pre_lv[5];
420 DPU_REG_WRITE(c, csc_reg_off + 0x1c, val);
421
422 /* Post clamp */
423 val = (data->csc_post_lv[0] << clamp_shift) | data->csc_post_lv[1];
424 DPU_REG_WRITE(c, csc_reg_off + 0x20, val);
425 val = (data->csc_post_lv[2] << clamp_shift) | data->csc_post_lv[3];
426 DPU_REG_WRITE(c, csc_reg_off + 0x24, val);
427 val = (data->csc_post_lv[4] << clamp_shift) | data->csc_post_lv[5];
428 DPU_REG_WRITE(c, csc_reg_off + 0x28, val);
429
430 /* Pre-Bias */
431 DPU_REG_WRITE(c, csc_reg_off + 0x2c, data->csc_pre_bv[0]);
432 DPU_REG_WRITE(c, csc_reg_off + 0x30, data->csc_pre_bv[1]);
433 DPU_REG_WRITE(c, csc_reg_off + 0x34, data->csc_pre_bv[2]);
434
435 /* Post-Bias */
436 DPU_REG_WRITE(c, csc_reg_off + 0x38, data->csc_post_bv[0]);
437 DPU_REG_WRITE(c, csc_reg_off + 0x3c, data->csc_post_bv[1]);
438 DPU_REG_WRITE(c, csc_reg_off + 0x40, data->csc_post_bv[2]);
439 }
440
441 /**
442 * _dpu_hw_get_qos_lut - get LUT mapping based on fill level
443 * @tbl: Pointer to LUT table
444 * @total_fl: fill level
445 * Return: LUT setting corresponding to the fill level
446 */
_dpu_hw_get_qos_lut(const struct dpu_qos_lut_tbl * tbl,u32 total_fl)447 u64 _dpu_hw_get_qos_lut(const struct dpu_qos_lut_tbl *tbl,
448 u32 total_fl)
449 {
450 int i;
451
452 if (!tbl || !tbl->nentry || !tbl->entries)
453 return 0;
454
455 for (i = 0; i < tbl->nentry; i++)
456 if (total_fl <= tbl->entries[i].fl)
457 return tbl->entries[i].lut;
458
459 /* if last fl is zero, use as default */
460 if (!tbl->entries[i-1].fl)
461 return tbl->entries[i-1].lut;
462
463 return 0;
464 }
465
_dpu_hw_setup_qos_lut(struct dpu_hw_blk_reg_map * c,u32 offset,bool qos_8lvl,const struct dpu_hw_qos_cfg * cfg)466 void _dpu_hw_setup_qos_lut(struct dpu_hw_blk_reg_map *c, u32 offset,
467 bool qos_8lvl,
468 const struct dpu_hw_qos_cfg *cfg)
469 {
470 DPU_REG_WRITE(c, offset + QOS_DANGER_LUT, cfg->danger_lut);
471 DPU_REG_WRITE(c, offset + QOS_SAFE_LUT, cfg->safe_lut);
472
473 if (qos_8lvl) {
474 DPU_REG_WRITE(c, offset + QOS_CREQ_LUT_0, cfg->creq_lut);
475 DPU_REG_WRITE(c, offset + QOS_CREQ_LUT_1, cfg->creq_lut >> 32);
476 } else {
477 DPU_REG_WRITE(c, offset + QOS_CREQ_LUT, cfg->creq_lut);
478 }
479
480 DPU_REG_WRITE(c, offset + QOS_QOS_CTRL,
481 cfg->danger_safe_en ? QOS_QOS_CTRL_DANGER_SAFE_EN : 0);
482 }
483
484 /*
485 * note: Aside from encoders, input_sel should be set to 0x0 by default
486 */
dpu_hw_setup_misr(struct dpu_hw_blk_reg_map * c,u32 misr_ctrl_offset,u8 input_sel)487 void dpu_hw_setup_misr(struct dpu_hw_blk_reg_map *c,
488 u32 misr_ctrl_offset, u8 input_sel)
489 {
490 u32 config = 0;
491
492 DPU_REG_WRITE(c, misr_ctrl_offset, MISR_CTRL_STATUS_CLEAR);
493
494 /* Clear old MISR value (in case it's read before a new value is calculated)*/
495 wmb();
496
497 config = MISR_FRAME_COUNT | MISR_CTRL_ENABLE | MISR_CTRL_FREE_RUN_MASK |
498 ((input_sel & 0xF) << 24);
499 DPU_REG_WRITE(c, misr_ctrl_offset, config);
500 }
501
dpu_hw_collect_misr(struct dpu_hw_blk_reg_map * c,u32 misr_ctrl_offset,u32 misr_signature_offset,u32 * misr_value)502 int dpu_hw_collect_misr(struct dpu_hw_blk_reg_map *c,
503 u32 misr_ctrl_offset,
504 u32 misr_signature_offset,
505 u32 *misr_value)
506 {
507 u32 ctrl = 0;
508
509 if (!misr_value)
510 return -EINVAL;
511
512 ctrl = DPU_REG_READ(c, misr_ctrl_offset);
513
514 if (!(ctrl & MISR_CTRL_ENABLE))
515 return -ENODATA;
516
517 if (!(ctrl & MISR_CTRL_STATUS))
518 return -EINVAL;
519
520 *misr_value = DPU_REG_READ(c, misr_signature_offset);
521
522 return 0;
523 }
524
525 #define CDP_ENABLE BIT(0)
526 #define CDP_UBWC_META_ENABLE BIT(1)
527 #define CDP_TILE_AMORTIZE_ENABLE BIT(2)
528 #define CDP_PRELOAD_AHEAD_64 BIT(3)
529
dpu_setup_cdp(struct dpu_hw_blk_reg_map * c,u32 offset,const struct dpu_format * fmt,bool enable)530 void dpu_setup_cdp(struct dpu_hw_blk_reg_map *c, u32 offset,
531 const struct dpu_format *fmt, bool enable)
532 {
533 u32 cdp_cntl = CDP_PRELOAD_AHEAD_64;
534
535 if (enable)
536 cdp_cntl |= CDP_ENABLE;
537 if (DPU_FORMAT_IS_UBWC(fmt))
538 cdp_cntl |= CDP_UBWC_META_ENABLE;
539 if (DPU_FORMAT_IS_UBWC(fmt) ||
540 DPU_FORMAT_IS_TILE(fmt))
541 cdp_cntl |= CDP_TILE_AMORTIZE_ENABLE;
542
543 DPU_REG_WRITE(c, offset, cdp_cntl);
544 }
545