1 /*
2  * Copyright (c) 2014, The Linux Foundation. All rights reserved.
3  * Copyright (C) 2013 Red Hat
4  * Author: Rob Clark <robdclark@gmail.com>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 as published by
8  * the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13  * more details.
14  *
15  * You should have received a copy of the GNU General Public License along with
16  * this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 
19 #include <drm/drm_crtc.h>
20 #include <drm/drm_crtc_helper.h>
21 
22 #include "mdp5_kms.h"
23 
24 static struct mdp5_kms *get_kms(struct drm_encoder *encoder)
25 {
26 	struct msm_drm_private *priv = encoder->dev->dev_private;
27 	return to_mdp5_kms(to_mdp_kms(priv->kms));
28 }
29 
30 #ifdef DOWNSTREAM_CONFIG_MSM_BUS_SCALING
31 #include <mach/board.h>
32 #include <mach/msm_bus.h>
33 #include <mach/msm_bus_board.h>
34 #define MDP_BUS_VECTOR_ENTRY(ab_val, ib_val)		\
35 	{						\
36 		.src = MSM_BUS_MASTER_MDP_PORT0,	\
37 		.dst = MSM_BUS_SLAVE_EBI_CH0,		\
38 		.ab = (ab_val),				\
39 		.ib = (ib_val),				\
40 	}
41 
42 static struct msm_bus_vectors mdp_bus_vectors[] = {
43 	MDP_BUS_VECTOR_ENTRY(0, 0),
44 	MDP_BUS_VECTOR_ENTRY(2000000000, 2000000000),
45 };
46 static struct msm_bus_paths mdp_bus_usecases[] = { {
47 		.num_paths = 1,
48 		.vectors = &mdp_bus_vectors[0],
49 }, {
50 		.num_paths = 1,
51 		.vectors = &mdp_bus_vectors[1],
52 } };
53 static struct msm_bus_scale_pdata mdp_bus_scale_table = {
54 	.usecase = mdp_bus_usecases,
55 	.num_usecases = ARRAY_SIZE(mdp_bus_usecases),
56 	.name = "mdss_mdp",
57 };
58 
59 static void bs_init(struct mdp5_encoder *mdp5_encoder)
60 {
61 	mdp5_encoder->bsc = msm_bus_scale_register_client(
62 			&mdp_bus_scale_table);
63 	DBG("bus scale client: %08x", mdp5_encoder->bsc);
64 }
65 
66 static void bs_fini(struct mdp5_encoder *mdp5_encoder)
67 {
68 	if (mdp5_encoder->bsc) {
69 		msm_bus_scale_unregister_client(mdp5_encoder->bsc);
70 		mdp5_encoder->bsc = 0;
71 	}
72 }
73 
74 static void bs_set(struct mdp5_encoder *mdp5_encoder, int idx)
75 {
76 	if (mdp5_encoder->bsc) {
77 		DBG("set bus scaling: %d", idx);
78 		/* HACK: scaling down, and then immediately back up
79 		 * seems to leave things broken (underflow).. so
80 		 * never disable:
81 		 */
82 		idx = 1;
83 		msm_bus_scale_client_update_request(mdp5_encoder->bsc, idx);
84 	}
85 }
86 #else
87 static void bs_init(struct mdp5_encoder *mdp5_encoder) {}
88 static void bs_fini(struct mdp5_encoder *mdp5_encoder) {}
89 static void bs_set(struct mdp5_encoder *mdp5_encoder, int idx) {}
90 #endif
91 
92 static void mdp5_encoder_destroy(struct drm_encoder *encoder)
93 {
94 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
95 	bs_fini(mdp5_encoder);
96 	drm_encoder_cleanup(encoder);
97 	kfree(mdp5_encoder);
98 }
99 
100 static const struct drm_encoder_funcs mdp5_encoder_funcs = {
101 	.destroy = mdp5_encoder_destroy,
102 };
103 
104 static void mdp5_vid_encoder_mode_set(struct drm_encoder *encoder,
105 				      struct drm_display_mode *mode,
106 				      struct drm_display_mode *adjusted_mode)
107 {
108 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
109 	struct mdp5_kms *mdp5_kms = get_kms(encoder);
110 	struct drm_device *dev = encoder->dev;
111 	struct drm_connector *connector;
112 	int intf = mdp5_encoder->intf->num;
113 	uint32_t dtv_hsync_skew, vsync_period, vsync_len, ctrl_pol;
114 	uint32_t display_v_start, display_v_end;
115 	uint32_t hsync_start_x, hsync_end_x;
116 	uint32_t format = 0x2100;
117 	unsigned long flags;
118 
119 	mode = adjusted_mode;
120 
121 	DBG("set mode: %d:\"%s\" %d %d %d %d %d %d %d %d %d %d 0x%x 0x%x",
122 			mode->base.id, mode->name,
123 			mode->vrefresh, mode->clock,
124 			mode->hdisplay, mode->hsync_start,
125 			mode->hsync_end, mode->htotal,
126 			mode->vdisplay, mode->vsync_start,
127 			mode->vsync_end, mode->vtotal,
128 			mode->type, mode->flags);
129 
130 	ctrl_pol = 0;
131 
132 	/* DSI controller cannot handle active-low sync signals. */
133 	if (mdp5_encoder->intf->type != INTF_DSI) {
134 		if (mode->flags & DRM_MODE_FLAG_NHSYNC)
135 			ctrl_pol |= MDP5_INTF_POLARITY_CTL_HSYNC_LOW;
136 		if (mode->flags & DRM_MODE_FLAG_NVSYNC)
137 			ctrl_pol |= MDP5_INTF_POLARITY_CTL_VSYNC_LOW;
138 	}
139 	/* probably need to get DATA_EN polarity from panel.. */
140 
141 	dtv_hsync_skew = 0;  /* get this from panel? */
142 
143 	/* Get color format from panel, default is 8bpc */
144 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
145 		if (connector->encoder == encoder) {
146 			switch (connector->display_info.bpc) {
147 			case 4:
148 				format |= 0;
149 				break;
150 			case 5:
151 				format |= 0x15;
152 				break;
153 			case 6:
154 				format |= 0x2A;
155 				break;
156 			case 8:
157 			default:
158 				format |= 0x3F;
159 				break;
160 			}
161 			break;
162 		}
163 	}
164 
165 	hsync_start_x = (mode->htotal - mode->hsync_start);
166 	hsync_end_x = mode->htotal - (mode->hsync_start - mode->hdisplay) - 1;
167 
168 	vsync_period = mode->vtotal * mode->htotal;
169 	vsync_len = (mode->vsync_end - mode->vsync_start) * mode->htotal;
170 	display_v_start = (mode->vtotal - mode->vsync_start) * mode->htotal + dtv_hsync_skew;
171 	display_v_end = vsync_period - ((mode->vsync_start - mode->vdisplay) * mode->htotal) + dtv_hsync_skew - 1;
172 
173 	/*
174 	 * For edp only:
175 	 * DISPLAY_V_START = (VBP * HCYCLE) + HBP
176 	 * DISPLAY_V_END = (VBP + VACTIVE) * HCYCLE - 1 - HFP
177 	 */
178 	if (mdp5_encoder->intf->type == INTF_eDP) {
179 		display_v_start += mode->htotal - mode->hsync_start;
180 		display_v_end -= mode->hsync_start - mode->hdisplay;
181 	}
182 
183 	spin_lock_irqsave(&mdp5_encoder->intf_lock, flags);
184 
185 	mdp5_write(mdp5_kms, REG_MDP5_INTF_HSYNC_CTL(intf),
186 			MDP5_INTF_HSYNC_CTL_PULSEW(mode->hsync_end - mode->hsync_start) |
187 			MDP5_INTF_HSYNC_CTL_PERIOD(mode->htotal));
188 	mdp5_write(mdp5_kms, REG_MDP5_INTF_VSYNC_PERIOD_F0(intf), vsync_period);
189 	mdp5_write(mdp5_kms, REG_MDP5_INTF_VSYNC_LEN_F0(intf), vsync_len);
190 	mdp5_write(mdp5_kms, REG_MDP5_INTF_DISPLAY_HCTL(intf),
191 			MDP5_INTF_DISPLAY_HCTL_START(hsync_start_x) |
192 			MDP5_INTF_DISPLAY_HCTL_END(hsync_end_x));
193 	mdp5_write(mdp5_kms, REG_MDP5_INTF_DISPLAY_VSTART_F0(intf), display_v_start);
194 	mdp5_write(mdp5_kms, REG_MDP5_INTF_DISPLAY_VEND_F0(intf), display_v_end);
195 	mdp5_write(mdp5_kms, REG_MDP5_INTF_BORDER_COLOR(intf), 0);
196 	mdp5_write(mdp5_kms, REG_MDP5_INTF_UNDERFLOW_COLOR(intf), 0xff);
197 	mdp5_write(mdp5_kms, REG_MDP5_INTF_HSYNC_SKEW(intf), dtv_hsync_skew);
198 	mdp5_write(mdp5_kms, REG_MDP5_INTF_POLARITY_CTL(intf), ctrl_pol);
199 	mdp5_write(mdp5_kms, REG_MDP5_INTF_ACTIVE_HCTL(intf),
200 			MDP5_INTF_ACTIVE_HCTL_START(0) |
201 			MDP5_INTF_ACTIVE_HCTL_END(0));
202 	mdp5_write(mdp5_kms, REG_MDP5_INTF_ACTIVE_VSTART_F0(intf), 0);
203 	mdp5_write(mdp5_kms, REG_MDP5_INTF_ACTIVE_VEND_F0(intf), 0);
204 	mdp5_write(mdp5_kms, REG_MDP5_INTF_PANEL_FORMAT(intf), format);
205 	mdp5_write(mdp5_kms, REG_MDP5_INTF_FRAME_LINE_COUNT_EN(intf), 0x3);  /* frame+line? */
206 
207 	spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags);
208 
209 	mdp5_crtc_set_pipeline(encoder->crtc);
210 }
211 
212 static void mdp5_vid_encoder_disable(struct drm_encoder *encoder)
213 {
214 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
215 	struct mdp5_kms *mdp5_kms = get_kms(encoder);
216 	struct mdp5_ctl *ctl = mdp5_encoder->ctl;
217 	struct mdp5_pipeline *pipeline = mdp5_crtc_get_pipeline(encoder->crtc);
218 	struct mdp5_hw_mixer *mixer = mdp5_crtc_get_mixer(encoder->crtc);
219 	struct mdp5_interface *intf = mdp5_encoder->intf;
220 	int intfn = mdp5_encoder->intf->num;
221 	unsigned long flags;
222 
223 	if (WARN_ON(!mdp5_encoder->enabled))
224 		return;
225 
226 	mdp5_ctl_set_encoder_state(ctl, pipeline, false);
227 
228 	spin_lock_irqsave(&mdp5_encoder->intf_lock, flags);
229 	mdp5_write(mdp5_kms, REG_MDP5_INTF_TIMING_ENGINE_EN(intfn), 0);
230 	spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags);
231 	mdp5_ctl_commit(ctl, pipeline, mdp_ctl_flush_mask_encoder(intf), true);
232 
233 	/*
234 	 * Wait for a vsync so we know the ENABLE=0 latched before
235 	 * the (connector) source of the vsync's gets disabled,
236 	 * otherwise we end up in a funny state if we re-enable
237 	 * before the disable latches, which results that some of
238 	 * the settings changes for the new modeset (like new
239 	 * scanout buffer) don't latch properly..
240 	 */
241 	mdp_irq_wait(&mdp5_kms->base, intf2vblank(mixer, intf));
242 
243 	bs_set(mdp5_encoder, 0);
244 
245 	mdp5_encoder->enabled = false;
246 }
247 
248 static void mdp5_vid_encoder_enable(struct drm_encoder *encoder)
249 {
250 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
251 	struct mdp5_kms *mdp5_kms = get_kms(encoder);
252 	struct mdp5_ctl *ctl = mdp5_encoder->ctl;
253 	struct mdp5_interface *intf = mdp5_encoder->intf;
254 	struct mdp5_pipeline *pipeline = mdp5_crtc_get_pipeline(encoder->crtc);
255 	int intfn = intf->num;
256 	unsigned long flags;
257 
258 	if (WARN_ON(mdp5_encoder->enabled))
259 		return;
260 
261 	bs_set(mdp5_encoder, 1);
262 	spin_lock_irqsave(&mdp5_encoder->intf_lock, flags);
263 	mdp5_write(mdp5_kms, REG_MDP5_INTF_TIMING_ENGINE_EN(intfn), 1);
264 	spin_unlock_irqrestore(&mdp5_encoder->intf_lock, flags);
265 	mdp5_ctl_commit(ctl, pipeline, mdp_ctl_flush_mask_encoder(intf), true);
266 
267 	mdp5_ctl_set_encoder_state(ctl, pipeline, true);
268 
269 	mdp5_encoder->enabled = true;
270 }
271 
272 static void mdp5_encoder_mode_set(struct drm_encoder *encoder,
273 				  struct drm_display_mode *mode,
274 				  struct drm_display_mode *adjusted_mode)
275 {
276 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
277 	struct mdp5_interface *intf = mdp5_encoder->intf;
278 
279 	if (intf->mode == MDP5_INTF_DSI_MODE_COMMAND)
280 		mdp5_cmd_encoder_mode_set(encoder, mode, adjusted_mode);
281 	else
282 		mdp5_vid_encoder_mode_set(encoder, mode, adjusted_mode);
283 }
284 
285 static void mdp5_encoder_disable(struct drm_encoder *encoder)
286 {
287 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
288 	struct mdp5_interface *intf = mdp5_encoder->intf;
289 
290 	if (intf->mode == MDP5_INTF_DSI_MODE_COMMAND)
291 		mdp5_cmd_encoder_disable(encoder);
292 	else
293 		mdp5_vid_encoder_disable(encoder);
294 }
295 
296 static void mdp5_encoder_enable(struct drm_encoder *encoder)
297 {
298 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
299 	struct mdp5_interface *intf = mdp5_encoder->intf;
300 	/* this isn't right I think */
301 	struct drm_crtc_state *cstate = encoder->crtc->state;
302 
303 	mdp5_encoder_mode_set(encoder, &cstate->mode, &cstate->adjusted_mode);
304 
305 	if (intf->mode == MDP5_INTF_DSI_MODE_COMMAND)
306 		mdp5_cmd_encoder_enable(encoder);
307 	else
308 		mdp5_vid_encoder_enable(encoder);
309 }
310 
311 static int mdp5_encoder_atomic_check(struct drm_encoder *encoder,
312 				     struct drm_crtc_state *crtc_state,
313 				     struct drm_connector_state *conn_state)
314 {
315 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
316 	struct mdp5_crtc_state *mdp5_cstate = to_mdp5_crtc_state(crtc_state);
317 	struct mdp5_interface *intf = mdp5_encoder->intf;
318 	struct mdp5_ctl *ctl = mdp5_encoder->ctl;
319 
320 	mdp5_cstate->ctl = ctl;
321 	mdp5_cstate->pipeline.intf = intf;
322 
323 	/*
324 	 * This is a bit awkward, but we want to flush the CTL and hit the
325 	 * START bit at most once for an atomic update.  In the non-full-
326 	 * modeset case, this is done from crtc->atomic_flush(), but that
327 	 * is too early in the case of full modeset, in which case we
328 	 * defer to encoder->enable().  But we need to *know* whether
329 	 * encoder->enable() will be called to do this:
330 	 */
331 	if (drm_atomic_crtc_needs_modeset(crtc_state))
332 		mdp5_cstate->defer_start = true;
333 
334 	return 0;
335 }
336 
337 static const struct drm_encoder_helper_funcs mdp5_encoder_helper_funcs = {
338 	.disable = mdp5_encoder_disable,
339 	.enable = mdp5_encoder_enable,
340 	.atomic_check = mdp5_encoder_atomic_check,
341 };
342 
343 int mdp5_encoder_get_linecount(struct drm_encoder *encoder)
344 {
345 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
346 	struct mdp5_kms *mdp5_kms = get_kms(encoder);
347 	int intf = mdp5_encoder->intf->num;
348 
349 	return mdp5_read(mdp5_kms, REG_MDP5_INTF_LINE_COUNT(intf));
350 }
351 
352 u32 mdp5_encoder_get_framecount(struct drm_encoder *encoder)
353 {
354 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
355 	struct mdp5_kms *mdp5_kms = get_kms(encoder);
356 	int intf = mdp5_encoder->intf->num;
357 
358 	return mdp5_read(mdp5_kms, REG_MDP5_INTF_FRAME_COUNT(intf));
359 }
360 
361 int mdp5_vid_encoder_set_split_display(struct drm_encoder *encoder,
362 				       struct drm_encoder *slave_encoder)
363 {
364 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
365 	struct mdp5_encoder *mdp5_slave_enc = to_mdp5_encoder(slave_encoder);
366 	struct mdp5_kms *mdp5_kms;
367 	struct device *dev;
368 	int intf_num;
369 	u32 data = 0;
370 
371 	if (!encoder || !slave_encoder)
372 		return -EINVAL;
373 
374 	mdp5_kms = get_kms(encoder);
375 	intf_num = mdp5_encoder->intf->num;
376 
377 	/* Switch slave encoder's TimingGen Sync mode,
378 	 * to use the master's enable signal for the slave encoder.
379 	 */
380 	if (intf_num == 1)
381 		data |= MDP5_SPLIT_DPL_LOWER_INTF2_TG_SYNC;
382 	else if (intf_num == 2)
383 		data |= MDP5_SPLIT_DPL_LOWER_INTF1_TG_SYNC;
384 	else
385 		return -EINVAL;
386 
387 	dev = &mdp5_kms->pdev->dev;
388 	/* Make sure clocks are on when connectors calling this function. */
389 	pm_runtime_get_sync(dev);
390 
391 	/* Dumb Panel, Sync mode */
392 	mdp5_write(mdp5_kms, REG_MDP5_SPLIT_DPL_UPPER, 0);
393 	mdp5_write(mdp5_kms, REG_MDP5_SPLIT_DPL_LOWER, data);
394 	mdp5_write(mdp5_kms, REG_MDP5_SPLIT_DPL_EN, 1);
395 
396 	mdp5_ctl_pair(mdp5_encoder->ctl, mdp5_slave_enc->ctl, true);
397 
398 	pm_runtime_put_sync(dev);
399 
400 	return 0;
401 }
402 
403 void mdp5_encoder_set_intf_mode(struct drm_encoder *encoder, bool cmd_mode)
404 {
405 	struct mdp5_encoder *mdp5_encoder = to_mdp5_encoder(encoder);
406 	struct mdp5_interface *intf = mdp5_encoder->intf;
407 
408 	/* TODO: Expand this to set writeback modes too */
409 	if (cmd_mode) {
410 		WARN_ON(intf->type != INTF_DSI);
411 		intf->mode = MDP5_INTF_DSI_MODE_COMMAND;
412 	} else {
413 		if (intf->type == INTF_DSI)
414 			intf->mode = MDP5_INTF_DSI_MODE_VIDEO;
415 		else
416 			intf->mode = MDP5_INTF_MODE_NONE;
417 	}
418 }
419 
420 /* initialize encoder */
421 struct drm_encoder *mdp5_encoder_init(struct drm_device *dev,
422 				      struct mdp5_interface *intf,
423 				      struct mdp5_ctl *ctl)
424 {
425 	struct drm_encoder *encoder = NULL;
426 	struct mdp5_encoder *mdp5_encoder;
427 	int enc_type = (intf->type == INTF_DSI) ?
428 		DRM_MODE_ENCODER_DSI : DRM_MODE_ENCODER_TMDS;
429 	int ret;
430 
431 	mdp5_encoder = kzalloc(sizeof(*mdp5_encoder), GFP_KERNEL);
432 	if (!mdp5_encoder) {
433 		ret = -ENOMEM;
434 		goto fail;
435 	}
436 
437 	encoder = &mdp5_encoder->base;
438 	mdp5_encoder->ctl = ctl;
439 	mdp5_encoder->intf = intf;
440 
441 	spin_lock_init(&mdp5_encoder->intf_lock);
442 
443 	drm_encoder_init(dev, encoder, &mdp5_encoder_funcs, enc_type, NULL);
444 
445 	drm_encoder_helper_add(encoder, &mdp5_encoder_helper_funcs);
446 
447 	bs_init(mdp5_encoder);
448 
449 	return encoder;
450 
451 fail:
452 	if (encoder)
453 		mdp5_encoder_destroy(encoder);
454 
455 	return ERR_PTR(ret);
456 }
457