1 // SPDX-License-Identifier: GPL-2.0
2 // Copyright (C) 2019 Spreadtrum Communications Inc.
3 
4 #include <linux/dma-mapping.h>
5 #include <linux/dmaengine.h>
6 #include <linux/dma/sprd-dma.h>
7 #include <linux/kernel.h>
8 #include <linux/module.h>
9 #include <sound/pcm.h>
10 #include <sound/pcm_params.h>
11 #include <sound/soc.h>
12 #include <sound/compress_driver.h>
13 
14 #include "sprd-pcm-dma.h"
15 
16 #define SPRD_COMPR_DMA_CHANS		2
17 
18 /* Default values if userspace does not set */
19 #define SPRD_COMPR_MIN_FRAGMENT_SIZE	SZ_8K
20 #define SPRD_COMPR_MAX_FRAGMENT_SIZE	SZ_128K
21 #define SPRD_COMPR_MIN_NUM_FRAGMENTS	4
22 #define SPRD_COMPR_MAX_NUM_FRAGMENTS	64
23 
24 /* DSP FIFO size */
25 #define SPRD_COMPR_MCDT_EMPTY_WMK	0
26 #define SPRD_COMPR_MCDT_FIFO_SIZE	512
27 
28 /* Stage 0 IRAM buffer size definition */
29 #define SPRD_COMPR_IRAM_BUF_SIZE	SZ_32K
30 #define SPRD_COMPR_IRAM_INFO_SIZE	(sizeof(struct sprd_compr_playinfo))
31 #define SPRD_COMPR_IRAM_LINKLIST_SIZE	(1024 - SPRD_COMPR_IRAM_INFO_SIZE)
32 #define SPRD_COMPR_IRAM_SIZE		(SPRD_COMPR_IRAM_BUF_SIZE + \
33 					 SPRD_COMPR_IRAM_INFO_SIZE + \
34 					 SPRD_COMPR_IRAM_LINKLIST_SIZE)
35 
36 /* Stage 1 DDR buffer size definition */
37 #define SPRD_COMPR_AREA_BUF_SIZE	SZ_2M
38 #define SPRD_COMPR_AREA_LINKLIST_SIZE	1024
39 #define SPRD_COMPR_AREA_SIZE		(SPRD_COMPR_AREA_BUF_SIZE + \
40 					 SPRD_COMPR_AREA_LINKLIST_SIZE)
41 
42 struct sprd_compr_dma {
43 	struct dma_chan *chan;
44 	struct dma_async_tx_descriptor *desc;
45 	dma_cookie_t cookie;
46 	dma_addr_t phys;
47 	void *virt;
48 	int trans_len;
49 };
50 
51 /*
52  * The Spreadtrum Audio compress offload mode will use 2-stage DMA transfer to
53  * save power. That means we can request 2 dma channels, one for source channel,
54  * and another one for destination channel. Once the source channel's transaction
55  * is done, it will trigger the destination channel's transaction automatically
56  * by hardware signal.
57  *
58  * For 2-stage DMA transfer, we can allocate 2 buffers: IRAM buffer (always
59  * power-on) and DDR buffer. The source channel will transfer data from IRAM
60  * buffer to the DSP fifo to decoding/encoding, once IRAM buffer is empty by
61  * transferring done, the destination channel will start to transfer data from
62  * DDR buffer to IRAM buffer.
63  *
64  * Since the DSP fifo is only 512B, IRAM buffer is allocated by 32K, and DDR
65  * buffer is larger to 2M. That means only the IRAM 32k data is transferred
66  * done, we can wake up the AP system to transfer data from DDR to IRAM, and
67  * other time the AP system can be suspended to save power.
68  */
69 struct sprd_compr_stream {
70 	struct snd_compr_stream *cstream;
71 	struct sprd_compr_ops *compr_ops;
72 	struct sprd_compr_dma dma[SPRD_COMPR_DMA_CHANS];
73 
74 	/* DMA engine channel number */
75 	int num_channels;
76 
77 	/* Stage 0 IRAM buffer */
78 	struct snd_dma_buffer iram_buffer;
79 	/* Stage 1 DDR buffer */
80 	struct snd_dma_buffer compr_buffer;
81 
82 	/* DSP play information IRAM buffer */
83 	dma_addr_t info_phys;
84 	void *info_area;
85 	int info_size;
86 
87 	/* Data size copied to IRAM buffer */
88 	int copied_total;
89 	/* Total received data size from userspace */
90 	int received_total;
91 	/* Stage 0 IRAM buffer received data size */
92 	int received_stage0;
93 	/* Stage 1 DDR buffer received data size */
94 	int received_stage1;
95 	/* Stage 1 DDR buffer pointer */
96 	int stage1_pointer;
97 };
98 
99 static int sprd_platform_compr_trigger(struct snd_compr_stream *cstream,
100 				       int cmd);
101 
102 static void sprd_platform_compr_drain_notify(void *arg)
103 {
104 	struct snd_compr_stream *cstream = arg;
105 	struct snd_compr_runtime *runtime = cstream->runtime;
106 	struct sprd_compr_stream *stream = runtime->private_data;
107 
108 	memset(stream->info_area, 0, sizeof(struct sprd_compr_playinfo));
109 
110 	snd_compr_drain_notify(cstream);
111 }
112 
113 static void sprd_platform_compr_dma_complete(void *data)
114 {
115 	struct snd_compr_stream *cstream = data;
116 	struct snd_compr_runtime *runtime = cstream->runtime;
117 	struct sprd_compr_stream *stream = runtime->private_data;
118 	struct sprd_compr_dma *dma = &stream->dma[1];
119 
120 	/* Update data size copied to IRAM buffer */
121 	stream->copied_total += dma->trans_len;
122 	if (stream->copied_total > stream->received_total)
123 		stream->copied_total = stream->received_total;
124 
125 	snd_compr_fragment_elapsed(cstream);
126 }
127 
128 static int sprd_platform_compr_dma_config(struct snd_compr_stream *cstream,
129 					  struct snd_compr_params *params,
130 					  int channel)
131 {
132 	struct snd_compr_runtime *runtime = cstream->runtime;
133 	struct sprd_compr_stream *stream = runtime->private_data;
134 	struct snd_soc_pcm_runtime *rtd = cstream->private_data;
135 	struct snd_soc_component *component =
136 		snd_soc_rtdcom_lookup(rtd, DRV_NAME);
137 	struct device *dev = component->dev;
138 	struct sprd_compr_data *data = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0));
139 	struct sprd_pcm_dma_params *dma_params = data->dma_params;
140 	struct sprd_compr_dma *dma = &stream->dma[channel];
141 	struct dma_slave_config config = { };
142 	struct sprd_dma_linklist link = { };
143 	enum dma_transfer_direction dir;
144 	struct scatterlist *sg, *sgt;
145 	enum dma_slave_buswidth bus_width;
146 	int period, period_cnt, sg_num = 2;
147 	dma_addr_t src_addr, dst_addr;
148 	unsigned long flags;
149 	int ret, j;
150 
151 	if (!dma_params) {
152 		dev_err(dev, "no dma parameters setting\n");
153 		return -EINVAL;
154 	}
155 
156 	dma->chan = dma_request_slave_channel(dev,
157 					      dma_params->chan_name[channel]);
158 	if (!dma->chan) {
159 		dev_err(dev, "failed to request dma channel\n");
160 		return -ENODEV;
161 	}
162 
163 	sgt = sg = devm_kcalloc(dev, sg_num, sizeof(*sg), GFP_KERNEL);
164 	if (!sg) {
165 		ret = -ENOMEM;
166 		goto sg_err;
167 	}
168 
169 	switch (channel) {
170 	case 0:
171 		bus_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
172 		period = (SPRD_COMPR_MCDT_FIFO_SIZE - SPRD_COMPR_MCDT_EMPTY_WMK) * 4;
173 		period_cnt = params->buffer.fragment_size / period;
174 		src_addr = stream->iram_buffer.addr;
175 		dst_addr = dma_params->dev_phys[channel];
176 		flags = SPRD_DMA_FLAGS(SPRD_DMA_SRC_CHN1,
177 				       SPRD_DMA_TRANS_DONE_TRG,
178 				       SPRD_DMA_FRAG_REQ,
179 				       SPRD_DMA_TRANS_INT);
180 		break;
181 
182 	case 1:
183 		bus_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
184 		period = params->buffer.fragment_size;
185 		period_cnt = params->buffer.fragments;
186 		src_addr = stream->compr_buffer.addr;
187 		dst_addr = stream->iram_buffer.addr;
188 		flags = SPRD_DMA_FLAGS(SPRD_DMA_DST_CHN1,
189 				       SPRD_DMA_TRANS_DONE_TRG,
190 				       SPRD_DMA_FRAG_REQ,
191 				       SPRD_DMA_TRANS_INT);
192 		break;
193 
194 	default:
195 		ret = -EINVAL;
196 		goto config_err;
197 	}
198 
199 	dma->trans_len = period * period_cnt;
200 
201 	config.src_maxburst = period;
202 	config.src_addr_width = bus_width;
203 	config.dst_addr_width = bus_width;
204 	if (cstream->direction == SND_COMPRESS_PLAYBACK) {
205 		config.src_addr = src_addr;
206 		config.dst_addr = dst_addr;
207 		dir = DMA_MEM_TO_DEV;
208 	} else {
209 		config.src_addr = dst_addr;
210 		config.dst_addr = src_addr;
211 		dir = DMA_DEV_TO_MEM;
212 	}
213 
214 	sg_init_table(sgt, sg_num);
215 	for (j = 0; j < sg_num; j++, sgt++) {
216 		sg_dma_len(sgt) = dma->trans_len;
217 		sg_dma_address(sgt) = dst_addr;
218 	}
219 
220 	/*
221 	 * Configure the link-list address for the DMA engine link-list
222 	 * mode.
223 	 */
224 	link.virt_addr = (unsigned long)dma->virt;
225 	link.phy_addr = dma->phys;
226 
227 	ret = dmaengine_slave_config(dma->chan, &config);
228 	if (ret) {
229 		dev_err(dev,
230 			"failed to set slave configuration: %d\n", ret);
231 		goto config_err;
232 	}
233 
234 	/*
235 	 * We configure the DMA request mode, interrupt mode, channel
236 	 * mode and channel trigger mode by the flags.
237 	 */
238 	dma->desc = dma->chan->device->device_prep_slave_sg(dma->chan, sg,
239 							    sg_num, dir,
240 							    flags, &link);
241 	if (!dma->desc) {
242 		dev_err(dev, "failed to prepare slave sg\n");
243 		ret = -ENOMEM;
244 		goto config_err;
245 	}
246 
247 	/* Only channel 1 transfer can wake up the AP system. */
248 	if (!params->no_wake_mode && channel == 1) {
249 		dma->desc->callback = sprd_platform_compr_dma_complete;
250 		dma->desc->callback_param = cstream;
251 	}
252 
253 	devm_kfree(dev, sg);
254 
255 	return 0;
256 
257 config_err:
258 	devm_kfree(dev, sg);
259 sg_err:
260 	dma_release_channel(dma->chan);
261 	return ret;
262 }
263 
264 static int sprd_platform_compr_set_params(struct snd_compr_stream *cstream,
265 					  struct snd_compr_params *params)
266 {
267 	struct snd_compr_runtime *runtime = cstream->runtime;
268 	struct sprd_compr_stream *stream = runtime->private_data;
269 	struct snd_soc_pcm_runtime *rtd = cstream->private_data;
270 	struct snd_soc_component *component =
271 		snd_soc_rtdcom_lookup(rtd, DRV_NAME);
272 	struct device *dev = component->dev;
273 	struct sprd_compr_params compr_params = { };
274 	int ret;
275 
276 	/*
277 	 * Configure the DMA engine 2-stage transfer mode. Channel 1 set as the
278 	 * destination channel, and channel 0 set as the source channel, that
279 	 * means once the source channel's transaction is done, it will trigger
280 	 * the destination channel's transaction automatically.
281 	 */
282 	ret = sprd_platform_compr_dma_config(cstream, params, 1);
283 	if (ret) {
284 		dev_err(dev, "failed to config stage 1 DMA: %d\n", ret);
285 		return ret;
286 	}
287 
288 	ret = sprd_platform_compr_dma_config(cstream, params, 0);
289 	if (ret) {
290 		dev_err(dev, "failed to config stage 0 DMA: %d\n", ret);
291 		goto config_err;
292 	}
293 
294 	compr_params.direction = cstream->direction;
295 	compr_params.sample_rate = params->codec.sample_rate;
296 	compr_params.channels = stream->num_channels;
297 	compr_params.info_phys = stream->info_phys;
298 	compr_params.info_size = stream->info_size;
299 	compr_params.rate = params->codec.bit_rate;
300 	compr_params.format = params->codec.id;
301 
302 	ret = stream->compr_ops->set_params(cstream->direction, &compr_params);
303 	if (ret) {
304 		dev_err(dev, "failed to set parameters: %d\n", ret);
305 		goto params_err;
306 	}
307 
308 	return 0;
309 
310 params_err:
311 	dma_release_channel(stream->dma[0].chan);
312 config_err:
313 	dma_release_channel(stream->dma[1].chan);
314 	return ret;
315 }
316 
317 static int sprd_platform_compr_open(struct snd_compr_stream *cstream)
318 {
319 	struct snd_compr_runtime *runtime = cstream->runtime;
320 	struct snd_soc_pcm_runtime *rtd = cstream->private_data;
321 	struct snd_soc_component *component =
322 		snd_soc_rtdcom_lookup(rtd, DRV_NAME);
323 	struct device *dev = component->dev;
324 	struct sprd_compr_data *data = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0));
325 	struct sprd_compr_stream *stream;
326 	struct sprd_compr_callback cb;
327 	int stream_id = cstream->direction, ret;
328 
329 	ret = dma_coerce_mask_and_coherent(dev, DMA_BIT_MASK(32));
330 	if (ret)
331 		return ret;
332 
333 	stream = devm_kzalloc(dev, sizeof(*stream), GFP_KERNEL);
334 	if (!stream)
335 		return -ENOMEM;
336 
337 	stream->cstream = cstream;
338 	stream->num_channels = 2;
339 	stream->compr_ops = data->ops;
340 
341 	/*
342 	 * Allocate the stage 0 IRAM buffer size, including the DMA 0
343 	 * link-list size and play information of DSP address size.
344 	 */
345 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV_IRAM, dev,
346 				  SPRD_COMPR_IRAM_SIZE, &stream->iram_buffer);
347 	if (ret < 0)
348 		goto err_iram;
349 
350 	/* Use to save link-list configuration for DMA 0. */
351 	stream->dma[0].virt = stream->iram_buffer.area + SPRD_COMPR_IRAM_SIZE;
352 	stream->dma[0].phys = stream->iram_buffer.addr + SPRD_COMPR_IRAM_SIZE;
353 
354 	/* Use to update the current data offset of DSP. */
355 	stream->info_phys = stream->iram_buffer.addr + SPRD_COMPR_IRAM_SIZE +
356 		SPRD_COMPR_IRAM_LINKLIST_SIZE;
357 	stream->info_area = stream->iram_buffer.area + SPRD_COMPR_IRAM_SIZE +
358 		SPRD_COMPR_IRAM_LINKLIST_SIZE;
359 	stream->info_size = SPRD_COMPR_IRAM_INFO_SIZE;
360 
361 	/*
362 	 * Allocate the stage 1 DDR buffer size, including the DMA 1 link-list
363 	 * size.
364 	 */
365 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, dev,
366 				  SPRD_COMPR_AREA_SIZE, &stream->compr_buffer);
367 	if (ret < 0)
368 		goto err_compr;
369 
370 	/* Use to save link-list configuration for DMA 1. */
371 	stream->dma[1].virt = stream->compr_buffer.area + SPRD_COMPR_AREA_SIZE;
372 	stream->dma[1].phys = stream->compr_buffer.addr + SPRD_COMPR_AREA_SIZE;
373 
374 	cb.drain_notify = sprd_platform_compr_drain_notify;
375 	cb.drain_data = cstream;
376 	ret = stream->compr_ops->open(stream_id, &cb);
377 	if (ret) {
378 		dev_err(dev, "failed to open compress platform: %d\n", ret);
379 		goto err_open;
380 	}
381 
382 	runtime->private_data = stream;
383 	return 0;
384 
385 err_open:
386 	snd_dma_free_pages(&stream->compr_buffer);
387 err_compr:
388 	snd_dma_free_pages(&stream->iram_buffer);
389 err_iram:
390 	devm_kfree(dev, stream);
391 
392 	return ret;
393 }
394 
395 static int sprd_platform_compr_free(struct snd_compr_stream *cstream)
396 {
397 	struct snd_compr_runtime *runtime = cstream->runtime;
398 	struct sprd_compr_stream *stream = runtime->private_data;
399 	struct snd_soc_pcm_runtime *rtd = cstream->private_data;
400 	struct snd_soc_component *component =
401 		snd_soc_rtdcom_lookup(rtd, DRV_NAME);
402 	struct device *dev = component->dev;
403 	int stream_id = cstream->direction, i;
404 
405 	for (i = 0; i < stream->num_channels; i++) {
406 		struct sprd_compr_dma *dma = &stream->dma[i];
407 
408 		if (dma->chan) {
409 			dma_release_channel(dma->chan);
410 			dma->chan = NULL;
411 		}
412 	}
413 
414 	snd_dma_free_pages(&stream->compr_buffer);
415 	snd_dma_free_pages(&stream->iram_buffer);
416 
417 	stream->compr_ops->close(stream_id);
418 
419 	devm_kfree(dev, stream);
420 	return 0;
421 }
422 
423 static int sprd_platform_compr_trigger(struct snd_compr_stream *cstream,
424 				       int cmd)
425 {
426 	struct snd_compr_runtime *runtime = cstream->runtime;
427 	struct sprd_compr_stream *stream = runtime->private_data;
428 	struct snd_soc_pcm_runtime *rtd = cstream->private_data;
429 	struct snd_soc_component *component =
430 		snd_soc_rtdcom_lookup(rtd, DRV_NAME);
431 	struct device *dev = component->dev;
432 	int channels = stream->num_channels, ret = 0, i;
433 	int stream_id = cstream->direction;
434 
435 	if (cstream->direction != SND_COMPRESS_PLAYBACK) {
436 		dev_err(dev, "unsupported compress direction\n");
437 		return -EINVAL;
438 	}
439 
440 	switch (cmd) {
441 	case SNDRV_PCM_TRIGGER_START:
442 		for (i = channels - 1; i >= 0; i--) {
443 			struct sprd_compr_dma *dma = &stream->dma[i];
444 
445 			if (!dma->desc)
446 				continue;
447 
448 			dma->cookie = dmaengine_submit(dma->desc);
449 			ret = dma_submit_error(dma->cookie);
450 			if (ret) {
451 				dev_err(dev, "failed to submit request: %d\n",
452 					ret);
453 				return ret;
454 			}
455 		}
456 
457 		for (i = channels - 1; i >= 0; i--) {
458 			struct sprd_compr_dma *dma = &stream->dma[i];
459 
460 			if (dma->chan)
461 				dma_async_issue_pending(dma->chan);
462 		}
463 
464 		ret = stream->compr_ops->start(stream_id);
465 		break;
466 
467 	case SNDRV_PCM_TRIGGER_STOP:
468 		for (i = channels - 1; i >= 0; i--) {
469 			struct sprd_compr_dma *dma = &stream->dma[i];
470 
471 			if (dma->chan)
472 				dmaengine_terminate_async(dma->chan);
473 		}
474 
475 		stream->copied_total = 0;
476 		stream->stage1_pointer  = 0;
477 		stream->received_total = 0;
478 		stream->received_stage0 = 0;
479 		stream->received_stage1 = 0;
480 
481 		ret = stream->compr_ops->stop(stream_id);
482 		break;
483 
484 	case SNDRV_PCM_TRIGGER_SUSPEND:
485 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
486 		for (i = channels - 1; i >= 0; i--) {
487 			struct sprd_compr_dma *dma = &stream->dma[i];
488 
489 			if (dma->chan)
490 				dmaengine_pause(dma->chan);
491 		}
492 
493 		ret = stream->compr_ops->pause(stream_id);
494 		break;
495 
496 	case SNDRV_PCM_TRIGGER_RESUME:
497 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
498 		for (i = channels - 1; i >= 0; i--) {
499 			struct sprd_compr_dma *dma = &stream->dma[i];
500 
501 			if (dma->chan)
502 				dmaengine_resume(dma->chan);
503 		}
504 
505 		ret = stream->compr_ops->pause_release(stream_id);
506 		break;
507 
508 	case SND_COMPR_TRIGGER_PARTIAL_DRAIN:
509 	case SND_COMPR_TRIGGER_DRAIN:
510 		ret = stream->compr_ops->drain(stream->received_total);
511 		break;
512 
513 	default:
514 		ret = -EINVAL;
515 		break;
516 	}
517 
518 	return ret;
519 }
520 
521 static int sprd_platform_compr_pointer(struct snd_compr_stream *cstream,
522 				       struct snd_compr_tstamp *tstamp)
523 {
524 	struct snd_compr_runtime *runtime = cstream->runtime;
525 	struct sprd_compr_stream *stream = runtime->private_data;
526 	struct sprd_compr_playinfo *info =
527 		(struct sprd_compr_playinfo *)stream->info_area;
528 
529 	tstamp->copied_total = stream->copied_total;
530 	tstamp->pcm_io_frames = info->current_data_offset;
531 
532 	return 0;
533 }
534 
535 static int sprd_platform_compr_copy(struct snd_compr_stream *cstream,
536 				    char __user *buf, size_t count)
537 {
538 	struct snd_compr_runtime *runtime = cstream->runtime;
539 	struct sprd_compr_stream *stream = runtime->private_data;
540 	int avail_bytes, data_count = count;
541 	void *dst;
542 
543 	/*
544 	 * We usually set fragment size as 32K, and the stage 0 IRAM buffer
545 	 * size is 32K too. So if now the received data size of the stage 0
546 	 * IRAM buffer is less than 32K, that means we have some available
547 	 * spaces for the stage 0 IRAM buffer.
548 	 */
549 	if (stream->received_stage0 < runtime->fragment_size) {
550 		avail_bytes = runtime->fragment_size - stream->received_stage0;
551 		dst = stream->iram_buffer.area + stream->received_stage0;
552 
553 		if (avail_bytes >= data_count) {
554 			/*
555 			 * Copy data to the stage 0 IRAM buffer directly if
556 			 * spaces are enough.
557 			 */
558 			if (copy_from_user(dst, buf, data_count))
559 				return -EFAULT;
560 
561 			stream->received_stage0 += data_count;
562 			stream->copied_total += data_count;
563 			goto copy_done;
564 		} else {
565 			/*
566 			 * If the data count is larger than the available spaces
567 			 * of the the stage 0 IRAM buffer, we should copy one
568 			 * partial data to the stage 0 IRAM buffer, and copy
569 			 * the left to the stage 1 DDR buffer.
570 			 */
571 			if (copy_from_user(dst, buf, avail_bytes))
572 				return -EFAULT;
573 
574 			data_count -= avail_bytes;
575 			stream->received_stage0 += avail_bytes;
576 			stream->copied_total += avail_bytes;
577 			buf += avail_bytes;
578 		}
579 	}
580 
581 	/*
582 	 * Copy data to the stage 1 DDR buffer if no spaces for the stage 0 IRAM
583 	 * buffer.
584 	 */
585 	dst = stream->compr_buffer.area + stream->stage1_pointer;
586 	if (data_count < stream->compr_buffer.bytes - stream->stage1_pointer) {
587 		if (copy_from_user(dst, buf, data_count))
588 			return -EFAULT;
589 
590 		stream->stage1_pointer += data_count;
591 	} else {
592 		avail_bytes = stream->compr_buffer.bytes - stream->stage1_pointer;
593 
594 		if (copy_from_user(dst, buf, avail_bytes))
595 			return -EFAULT;
596 
597 		if (copy_from_user(stream->compr_buffer.area, buf + avail_bytes,
598 				   data_count - avail_bytes))
599 			return -EFAULT;
600 
601 		stream->stage1_pointer = data_count - avail_bytes;
602 	}
603 
604 	stream->received_stage1 += data_count;
605 
606 copy_done:
607 	/* Update the copied data size. */
608 	stream->received_total += count;
609 	return count;
610 }
611 
612 static int sprd_platform_compr_get_caps(struct snd_compr_stream *cstream,
613 					struct snd_compr_caps *caps)
614 {
615 	caps->direction = cstream->direction;
616 	caps->min_fragment_size = SPRD_COMPR_MIN_FRAGMENT_SIZE;
617 	caps->max_fragment_size = SPRD_COMPR_MAX_FRAGMENT_SIZE;
618 	caps->min_fragments = SPRD_COMPR_MIN_NUM_FRAGMENTS;
619 	caps->max_fragments = SPRD_COMPR_MAX_NUM_FRAGMENTS;
620 	caps->num_codecs = 2;
621 	caps->codecs[0] = SND_AUDIOCODEC_MP3;
622 	caps->codecs[1] = SND_AUDIOCODEC_AAC;
623 
624 	return 0;
625 }
626 
627 static int
628 sprd_platform_compr_get_codec_caps(struct snd_compr_stream *cstream,
629 				   struct snd_compr_codec_caps *codec)
630 {
631 	switch (codec->codec) {
632 	case SND_AUDIOCODEC_MP3:
633 		codec->num_descriptors = 2;
634 		codec->descriptor[0].max_ch = 2;
635 		codec->descriptor[0].bit_rate[0] = 320;
636 		codec->descriptor[0].bit_rate[1] = 128;
637 		codec->descriptor[0].num_bitrates = 2;
638 		codec->descriptor[0].profiles = 0;
639 		codec->descriptor[0].modes = SND_AUDIOCHANMODE_MP3_STEREO;
640 		codec->descriptor[0].formats = 0;
641 		break;
642 
643 	case SND_AUDIOCODEC_AAC:
644 		codec->num_descriptors = 2;
645 		codec->descriptor[1].max_ch = 2;
646 		codec->descriptor[1].bit_rate[0] = 320;
647 		codec->descriptor[1].bit_rate[1] = 128;
648 		codec->descriptor[1].num_bitrates = 2;
649 		codec->descriptor[1].profiles = 0;
650 		codec->descriptor[1].modes = 0;
651 		codec->descriptor[1].formats = 0;
652 		break;
653 
654 	default:
655 		return -EINVAL;
656 	}
657 
658 	return 0;
659 }
660 
661 const struct snd_compr_ops sprd_platform_compr_ops = {
662 	.open = sprd_platform_compr_open,
663 	.free = sprd_platform_compr_free,
664 	.set_params = sprd_platform_compr_set_params,
665 	.trigger = sprd_platform_compr_trigger,
666 	.pointer = sprd_platform_compr_pointer,
667 	.copy = sprd_platform_compr_copy,
668 	.get_caps = sprd_platform_compr_get_caps,
669 	.get_codec_caps = sprd_platform_compr_get_codec_caps,
670 };
671 
672 MODULE_DESCRIPTION("Spreadtrum ASoC Compress Platform Driver");
673 MODULE_LICENSE("GPL v2");
674 MODULE_ALIAS("platform:compress-platform");
675