Orange Pi5 kernel

Deprecated Linux kernel 5.10.110 for OrangePi 5/5B/5+ boards

3 Commits   0 Branches   0 Tags
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   1) // SPDX-License-Identifier: GPL-2.0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   2) // Copyright (C) 2019 Spreadtrum Communications Inc.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   3) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   4) #include <linux/dma-mapping.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   5) #include <linux/dmaengine.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   6) #include <linux/dma/sprd-dma.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   7) #include <linux/kernel.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   8) #include <linux/module.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300   9) #include <sound/pcm.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  10) #include <sound/pcm_params.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  11) #include <sound/soc.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  12) #include <sound/compress_driver.h>
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  13) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  14) #include "sprd-pcm-dma.h"
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  15) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  16) #define SPRD_COMPR_DMA_CHANS		2
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  17) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  18) /* Default values if userspace does not set */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  19) #define SPRD_COMPR_MIN_FRAGMENT_SIZE	SZ_8K
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  20) #define SPRD_COMPR_MAX_FRAGMENT_SIZE	SZ_128K
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  21) #define SPRD_COMPR_MIN_NUM_FRAGMENTS	4
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  22) #define SPRD_COMPR_MAX_NUM_FRAGMENTS	64
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  23) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  24) /* DSP FIFO size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  25) #define SPRD_COMPR_MCDT_EMPTY_WMK	0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  26) #define SPRD_COMPR_MCDT_FIFO_SIZE	512
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  27) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  28) /* Stage 0 IRAM buffer size definition */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  29) #define SPRD_COMPR_IRAM_BUF_SIZE	SZ_32K
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  30) #define SPRD_COMPR_IRAM_INFO_SIZE	(sizeof(struct sprd_compr_playinfo))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  31) #define SPRD_COMPR_IRAM_LINKLIST_SIZE	(1024 - SPRD_COMPR_IRAM_INFO_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  32) #define SPRD_COMPR_IRAM_SIZE		(SPRD_COMPR_IRAM_BUF_SIZE + \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  33) 					 SPRD_COMPR_IRAM_INFO_SIZE + \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  34) 					 SPRD_COMPR_IRAM_LINKLIST_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  35) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  36) /* Stage 1 DDR buffer size definition */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  37) #define SPRD_COMPR_AREA_BUF_SIZE	SZ_2M
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  38) #define SPRD_COMPR_AREA_LINKLIST_SIZE	1024
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  39) #define SPRD_COMPR_AREA_SIZE		(SPRD_COMPR_AREA_BUF_SIZE + \
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  40) 					 SPRD_COMPR_AREA_LINKLIST_SIZE)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  41) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  42) struct sprd_compr_dma {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  43) 	struct dma_chan *chan;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  44) 	struct dma_async_tx_descriptor *desc;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  45) 	dma_cookie_t cookie;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  46) 	dma_addr_t phys;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  47) 	void *virt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  48) 	int trans_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  49) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  50) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  51) /*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  52)  * The Spreadtrum Audio compress offload mode will use 2-stage DMA transfer to
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  53)  * save power. That means we can request 2 dma channels, one for source channel,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  54)  * and another one for destination channel. Once the source channel's transaction
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  55)  * is done, it will trigger the destination channel's transaction automatically
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  56)  * by hardware signal.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  57)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  58)  * For 2-stage DMA transfer, we can allocate 2 buffers: IRAM buffer (always
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  59)  * power-on) and DDR buffer. The source channel will transfer data from IRAM
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  60)  * buffer to the DSP fifo to decoding/encoding, once IRAM buffer is empty by
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  61)  * transferring done, the destination channel will start to transfer data from
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  62)  * DDR buffer to IRAM buffer.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  63)  *
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  64)  * Since the DSP fifo is only 512B, IRAM buffer is allocated by 32K, and DDR
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  65)  * buffer is larger to 2M. That means only the IRAM 32k data is transferred
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  66)  * done, we can wake up the AP system to transfer data from DDR to IRAM, and
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  67)  * other time the AP system can be suspended to save power.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  68)  */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  69) struct sprd_compr_stream {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  70) 	struct snd_compr_stream *cstream;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  71) 	struct sprd_compr_ops *compr_ops;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  72) 	struct sprd_compr_dma dma[SPRD_COMPR_DMA_CHANS];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  73) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  74) 	/* DMA engine channel number */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  75) 	int num_channels;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  76) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  77) 	/* Stage 0 IRAM buffer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  78) 	struct snd_dma_buffer iram_buffer;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  79) 	/* Stage 1 DDR buffer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  80) 	struct snd_dma_buffer compr_buffer;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  81) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  82) 	/* DSP play information IRAM buffer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  83) 	dma_addr_t info_phys;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  84) 	void *info_area;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  85) 	int info_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  86) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  87) 	/* Data size copied to IRAM buffer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  88) 	int copied_total;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  89) 	/* Total received data size from userspace */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  90) 	int received_total;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  91) 	/* Stage 0 IRAM buffer received data size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  92) 	int received_stage0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  93) 	/* Stage 1 DDR buffer received data size */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  94) 	int received_stage1;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  95) 	/* Stage 1 DDR buffer pointer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  96) 	int stage1_pointer;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  97) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  98) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300  99) static int sprd_platform_compr_trigger(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 100) 				       struct snd_compr_stream *cstream,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 101) 				       int cmd);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 102) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 103) static void sprd_platform_compr_drain_notify(void *arg)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 104) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 105) 	struct snd_compr_stream *cstream = arg;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 106) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 107) 	struct sprd_compr_stream *stream = runtime->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 108) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 109) 	memset(stream->info_area, 0, sizeof(struct sprd_compr_playinfo));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 110) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 111) 	snd_compr_drain_notify(cstream);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 112) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 113) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 114) static void sprd_platform_compr_dma_complete(void *data)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 115) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 116) 	struct snd_compr_stream *cstream = data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 117) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 118) 	struct sprd_compr_stream *stream = runtime->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 119) 	struct sprd_compr_dma *dma = &stream->dma[1];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 120) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 121) 	/* Update data size copied to IRAM buffer */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 122) 	stream->copied_total += dma->trans_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 123) 	if (stream->copied_total > stream->received_total)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 124) 		stream->copied_total = stream->received_total;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 125) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 126) 	snd_compr_fragment_elapsed(cstream);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 127) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 128) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 129) static int sprd_platform_compr_dma_config(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 130) 					  struct snd_compr_stream *cstream,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 131) 					  struct snd_compr_params *params,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 132) 					  int channel)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 133) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 134) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 135) 	struct sprd_compr_stream *stream = runtime->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 136) 	struct snd_soc_pcm_runtime *rtd = cstream->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 137) 	struct device *dev = component->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 138) 	struct sprd_compr_data *data = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 139) 	struct sprd_pcm_dma_params *dma_params = data->dma_params;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 140) 	struct sprd_compr_dma *dma = &stream->dma[channel];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 141) 	struct dma_slave_config config = { };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 142) 	struct sprd_dma_linklist link = { };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 143) 	enum dma_transfer_direction dir;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 144) 	struct scatterlist *sg, *sgt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 145) 	enum dma_slave_buswidth bus_width;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 146) 	int period, period_cnt, sg_num = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 147) 	dma_addr_t src_addr, dst_addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 148) 	unsigned long flags;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 149) 	int ret, j;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 150) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 151) 	if (!dma_params) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 152) 		dev_err(dev, "no dma parameters setting\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 153) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 154) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 155) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 156) 	dma->chan = dma_request_slave_channel(dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 157) 					      dma_params->chan_name[channel]);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 158) 	if (!dma->chan) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 159) 		dev_err(dev, "failed to request dma channel\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 160) 		return -ENODEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 161) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 162) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 163) 	sgt = sg = devm_kcalloc(dev, sg_num, sizeof(*sg), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 164) 	if (!sg) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 165) 		ret = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 166) 		goto sg_err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 167) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 168) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 169) 	switch (channel) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 170) 	case 0:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 171) 		bus_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 172) 		period = (SPRD_COMPR_MCDT_FIFO_SIZE - SPRD_COMPR_MCDT_EMPTY_WMK) * 4;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 173) 		period_cnt = params->buffer.fragment_size / period;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 174) 		src_addr = stream->iram_buffer.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 175) 		dst_addr = dma_params->dev_phys[channel];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 176) 		flags = SPRD_DMA_FLAGS(SPRD_DMA_SRC_CHN1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 177) 				       SPRD_DMA_TRANS_DONE_TRG,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 178) 				       SPRD_DMA_FRAG_REQ,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 179) 				       SPRD_DMA_TRANS_INT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 180) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 181) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 182) 	case 1:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 183) 		bus_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 184) 		period = params->buffer.fragment_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 185) 		period_cnt = params->buffer.fragments;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 186) 		src_addr = stream->compr_buffer.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 187) 		dst_addr = stream->iram_buffer.addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 188) 		flags = SPRD_DMA_FLAGS(SPRD_DMA_DST_CHN1,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 189) 				       SPRD_DMA_TRANS_DONE_TRG,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 190) 				       SPRD_DMA_FRAG_REQ,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 191) 				       SPRD_DMA_TRANS_INT);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 192) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 193) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 194) 	default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 195) 		ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 196) 		goto config_err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 197) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 198) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 199) 	dma->trans_len = period * period_cnt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 200) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 201) 	config.src_maxburst = period;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 202) 	config.src_addr_width = bus_width;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 203) 	config.dst_addr_width = bus_width;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 204) 	if (cstream->direction == SND_COMPRESS_PLAYBACK) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 205) 		config.src_addr = src_addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 206) 		config.dst_addr = dst_addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 207) 		dir = DMA_MEM_TO_DEV;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 208) 	} else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 209) 		config.src_addr = dst_addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 210) 		config.dst_addr = src_addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 211) 		dir = DMA_DEV_TO_MEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 212) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 213) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 214) 	sg_init_table(sgt, sg_num);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 215) 	for (j = 0; j < sg_num; j++, sgt++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 216) 		sg_dma_len(sgt) = dma->trans_len;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 217) 		sg_dma_address(sgt) = dst_addr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 218) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 219) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 220) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 221) 	 * Configure the link-list address for the DMA engine link-list
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 222) 	 * mode.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 223) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 224) 	link.virt_addr = (unsigned long)dma->virt;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 225) 	link.phy_addr = dma->phys;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 226) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 227) 	ret = dmaengine_slave_config(dma->chan, &config);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 228) 	if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 229) 		dev_err(dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 230) 			"failed to set slave configuration: %d\n", ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 231) 		goto config_err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 232) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 233) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 234) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 235) 	 * We configure the DMA request mode, interrupt mode, channel
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 236) 	 * mode and channel trigger mode by the flags.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 237) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 238) 	dma->desc = dma->chan->device->device_prep_slave_sg(dma->chan, sg,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 239) 							    sg_num, dir,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 240) 							    flags, &link);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 241) 	if (!dma->desc) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 242) 		dev_err(dev, "failed to prepare slave sg\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 243) 		ret = -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 244) 		goto config_err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 245) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 246) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 247) 	/* Only channel 1 transfer can wake up the AP system. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 248) 	if (!params->no_wake_mode && channel == 1) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 249) 		dma->desc->callback = sprd_platform_compr_dma_complete;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 250) 		dma->desc->callback_param = cstream;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 251) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 252) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 253) 	devm_kfree(dev, sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 254) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 255) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 256) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 257) config_err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 258) 	devm_kfree(dev, sg);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 259) sg_err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 260) 	dma_release_channel(dma->chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 261) 	return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 262) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 263) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 264) static int sprd_platform_compr_set_params(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 265) 					  struct snd_compr_stream *cstream,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 266) 					  struct snd_compr_params *params)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 267) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 268) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 269) 	struct sprd_compr_stream *stream = runtime->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 270) 	struct device *dev = component->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 271) 	struct sprd_compr_params compr_params = { };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 272) 	int ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 273) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 274) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 275) 	 * Configure the DMA engine 2-stage transfer mode. Channel 1 set as the
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 276) 	 * destination channel, and channel 0 set as the source channel, that
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 277) 	 * means once the source channel's transaction is done, it will trigger
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 278) 	 * the destination channel's transaction automatically.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 279) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 280) 	ret = sprd_platform_compr_dma_config(component, cstream, params, 1);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 281) 	if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 282) 		dev_err(dev, "failed to config stage 1 DMA: %d\n", ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 283) 		return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 284) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 285) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 286) 	ret = sprd_platform_compr_dma_config(component, cstream, params, 0);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 287) 	if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 288) 		dev_err(dev, "failed to config stage 0 DMA: %d\n", ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 289) 		goto config_err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 290) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 291) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 292) 	compr_params.direction = cstream->direction;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 293) 	compr_params.sample_rate = params->codec.sample_rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 294) 	compr_params.channels = stream->num_channels;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 295) 	compr_params.info_phys = stream->info_phys;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 296) 	compr_params.info_size = stream->info_size;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 297) 	compr_params.rate = params->codec.bit_rate;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 298) 	compr_params.format = params->codec.id;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 299) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 300) 	ret = stream->compr_ops->set_params(cstream->direction, &compr_params);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 301) 	if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 302) 		dev_err(dev, "failed to set parameters: %d\n", ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 303) 		goto params_err;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 304) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 305) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 306) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 307) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 308) params_err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 309) 	dma_release_channel(stream->dma[0].chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 310) config_err:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 311) 	dma_release_channel(stream->dma[1].chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 312) 	return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 313) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 314) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 315) static int sprd_platform_compr_open(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 316) 				    struct snd_compr_stream *cstream)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 317) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 318) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 319) 	struct snd_soc_pcm_runtime *rtd = cstream->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 320) 	struct device *dev = component->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 321) 	struct sprd_compr_data *data = snd_soc_dai_get_drvdata(asoc_rtd_to_cpu(rtd, 0));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 322) 	struct sprd_compr_stream *stream;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 323) 	struct sprd_compr_callback cb;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 324) 	int stream_id = cstream->direction, ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 325) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 326) 	ret = dma_coerce_mask_and_coherent(dev, DMA_BIT_MASK(32));
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 327) 	if (ret)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 328) 		return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 329) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 330) 	stream = devm_kzalloc(dev, sizeof(*stream), GFP_KERNEL);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 331) 	if (!stream)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 332) 		return -ENOMEM;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 333) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 334) 	stream->cstream = cstream;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 335) 	stream->num_channels = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 336) 	stream->compr_ops = data->ops;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 337) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 338) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 339) 	 * Allocate the stage 0 IRAM buffer size, including the DMA 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 340) 	 * link-list size and play information of DSP address size.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 341) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 342) 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV_IRAM, dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 343) 				  SPRD_COMPR_IRAM_SIZE, &stream->iram_buffer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 344) 	if (ret < 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 345) 		goto err_iram;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 346) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 347) 	/* Use to save link-list configuration for DMA 0. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 348) 	stream->dma[0].virt = stream->iram_buffer.area + SPRD_COMPR_IRAM_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 349) 	stream->dma[0].phys = stream->iram_buffer.addr + SPRD_COMPR_IRAM_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 350) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 351) 	/* Use to update the current data offset of DSP. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 352) 	stream->info_phys = stream->iram_buffer.addr + SPRD_COMPR_IRAM_SIZE +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 353) 		SPRD_COMPR_IRAM_LINKLIST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 354) 	stream->info_area = stream->iram_buffer.area + SPRD_COMPR_IRAM_SIZE +
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 355) 		SPRD_COMPR_IRAM_LINKLIST_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 356) 	stream->info_size = SPRD_COMPR_IRAM_INFO_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 357) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 358) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 359) 	 * Allocate the stage 1 DDR buffer size, including the DMA 1 link-list
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 360) 	 * size.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 361) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 362) 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, dev,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 363) 				  SPRD_COMPR_AREA_SIZE, &stream->compr_buffer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 364) 	if (ret < 0)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 365) 		goto err_compr;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 366) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 367) 	/* Use to save link-list configuration for DMA 1. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 368) 	stream->dma[1].virt = stream->compr_buffer.area + SPRD_COMPR_AREA_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 369) 	stream->dma[1].phys = stream->compr_buffer.addr + SPRD_COMPR_AREA_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 370) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 371) 	cb.drain_notify = sprd_platform_compr_drain_notify;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 372) 	cb.drain_data = cstream;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 373) 	ret = stream->compr_ops->open(stream_id, &cb);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 374) 	if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 375) 		dev_err(dev, "failed to open compress platform: %d\n", ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 376) 		goto err_open;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 377) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 378) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 379) 	runtime->private_data = stream;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 380) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 381) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 382) err_open:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 383) 	snd_dma_free_pages(&stream->compr_buffer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 384) err_compr:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 385) 	snd_dma_free_pages(&stream->iram_buffer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 386) err_iram:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 387) 	devm_kfree(dev, stream);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 388) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 389) 	return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 390) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 391) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 392) static int sprd_platform_compr_free(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 393) 				    struct snd_compr_stream *cstream)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 394) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 395) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 396) 	struct sprd_compr_stream *stream = runtime->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 397) 	struct device *dev = component->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 398) 	int stream_id = cstream->direction, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 399) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 400) 	for (i = 0; i < stream->num_channels; i++) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 401) 		struct sprd_compr_dma *dma = &stream->dma[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 402) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 403) 		if (dma->chan) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 404) 			dma_release_channel(dma->chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 405) 			dma->chan = NULL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 406) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 407) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 408) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 409) 	snd_dma_free_pages(&stream->compr_buffer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 410) 	snd_dma_free_pages(&stream->iram_buffer);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 411) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 412) 	stream->compr_ops->close(stream_id);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 413) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 414) 	devm_kfree(dev, stream);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 415) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 416) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 417) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 418) static int sprd_platform_compr_trigger(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 419) 				       struct snd_compr_stream *cstream,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 420) 				       int cmd)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 421) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 422) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 423) 	struct sprd_compr_stream *stream = runtime->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 424) 	struct device *dev = component->dev;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 425) 	int channels = stream->num_channels, ret = 0, i;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 426) 	int stream_id = cstream->direction;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 427) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 428) 	if (cstream->direction != SND_COMPRESS_PLAYBACK) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 429) 		dev_err(dev, "unsupported compress direction\n");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 430) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 431) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 432) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 433) 	switch (cmd) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 434) 	case SNDRV_PCM_TRIGGER_START:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 435) 		for (i = channels - 1; i >= 0; i--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 436) 			struct sprd_compr_dma *dma = &stream->dma[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 437) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 438) 			if (!dma->desc)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 439) 				continue;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 440) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 441) 			dma->cookie = dmaengine_submit(dma->desc);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 442) 			ret = dma_submit_error(dma->cookie);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 443) 			if (ret) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 444) 				dev_err(dev, "failed to submit request: %d\n",
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 445) 					ret);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 446) 				return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 447) 			}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 448) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 449) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 450) 		for (i = channels - 1; i >= 0; i--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 451) 			struct sprd_compr_dma *dma = &stream->dma[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 452) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 453) 			if (dma->chan)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 454) 				dma_async_issue_pending(dma->chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 455) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 456) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 457) 		ret = stream->compr_ops->start(stream_id);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 458) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 459) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 460) 	case SNDRV_PCM_TRIGGER_STOP:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 461) 		for (i = channels - 1; i >= 0; i--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 462) 			struct sprd_compr_dma *dma = &stream->dma[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 463) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 464) 			if (dma->chan)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 465) 				dmaengine_terminate_async(dma->chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 466) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 467) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 468) 		stream->copied_total = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 469) 		stream->stage1_pointer  = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 470) 		stream->received_total = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 471) 		stream->received_stage0 = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 472) 		stream->received_stage1 = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 473) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 474) 		ret = stream->compr_ops->stop(stream_id);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 475) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 476) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 477) 	case SNDRV_PCM_TRIGGER_SUSPEND:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 478) 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 479) 		for (i = channels - 1; i >= 0; i--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 480) 			struct sprd_compr_dma *dma = &stream->dma[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 481) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 482) 			if (dma->chan)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 483) 				dmaengine_pause(dma->chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 484) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 485) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 486) 		ret = stream->compr_ops->pause(stream_id);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 487) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 488) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 489) 	case SNDRV_PCM_TRIGGER_RESUME:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 490) 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 491) 		for (i = channels - 1; i >= 0; i--) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 492) 			struct sprd_compr_dma *dma = &stream->dma[i];
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 493) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 494) 			if (dma->chan)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 495) 				dmaengine_resume(dma->chan);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 496) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 497) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 498) 		ret = stream->compr_ops->pause_release(stream_id);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 499) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 500) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 501) 	case SND_COMPR_TRIGGER_PARTIAL_DRAIN:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 502) 	case SND_COMPR_TRIGGER_DRAIN:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 503) 		ret = stream->compr_ops->drain(stream->received_total);
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 504) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 505) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 506) 	default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 507) 		ret = -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 508) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 509) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 510) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 511) 	return ret;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 512) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 513) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 514) static int sprd_platform_compr_pointer(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 515) 				       struct snd_compr_stream *cstream,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 516) 				       struct snd_compr_tstamp *tstamp)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 517) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 518) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 519) 	struct sprd_compr_stream *stream = runtime->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 520) 	struct sprd_compr_playinfo *info =
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 521) 		(struct sprd_compr_playinfo *)stream->info_area;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 522) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 523) 	tstamp->copied_total = stream->copied_total;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 524) 	tstamp->pcm_io_frames = info->current_data_offset;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 525) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 526) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 527) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 528) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 529) static int sprd_platform_compr_copy(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 530) 				    struct snd_compr_stream *cstream,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 531) 				    char __user *buf, size_t count)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 532) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 533) 	struct snd_compr_runtime *runtime = cstream->runtime;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 534) 	struct sprd_compr_stream *stream = runtime->private_data;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 535) 	int avail_bytes, data_count = count;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 536) 	void *dst;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 537) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 538) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 539) 	 * We usually set fragment size as 32K, and the stage 0 IRAM buffer
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 540) 	 * size is 32K too. So if now the received data size of the stage 0
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 541) 	 * IRAM buffer is less than 32K, that means we have some available
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 542) 	 * spaces for the stage 0 IRAM buffer.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 543) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 544) 	if (stream->received_stage0 < runtime->fragment_size) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 545) 		avail_bytes = runtime->fragment_size - stream->received_stage0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 546) 		dst = stream->iram_buffer.area + stream->received_stage0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 547) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 548) 		if (avail_bytes >= data_count) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 549) 			/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 550) 			 * Copy data to the stage 0 IRAM buffer directly if
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 551) 			 * spaces are enough.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 552) 			 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 553) 			if (copy_from_user(dst, buf, data_count))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 554) 				return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 555) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 556) 			stream->received_stage0 += data_count;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 557) 			stream->copied_total += data_count;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 558) 			goto copy_done;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 559) 		} else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 560) 			/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 561) 			 * If the data count is larger than the available spaces
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 562) 			 * of the stage 0 IRAM buffer, we should copy one
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 563) 			 * partial data to the stage 0 IRAM buffer, and copy
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 564) 			 * the left to the stage 1 DDR buffer.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 565) 			 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 566) 			if (copy_from_user(dst, buf, avail_bytes))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 567) 				return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 568) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 569) 			data_count -= avail_bytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 570) 			stream->received_stage0 += avail_bytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 571) 			stream->copied_total += avail_bytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 572) 			buf += avail_bytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 573) 		}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 574) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 575) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 576) 	/*
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 577) 	 * Copy data to the stage 1 DDR buffer if no spaces for the stage 0 IRAM
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 578) 	 * buffer.
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 579) 	 */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 580) 	dst = stream->compr_buffer.area + stream->stage1_pointer;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 581) 	if (data_count < stream->compr_buffer.bytes - stream->stage1_pointer) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 582) 		if (copy_from_user(dst, buf, data_count))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 583) 			return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 584) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 585) 		stream->stage1_pointer += data_count;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 586) 	} else {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 587) 		avail_bytes = stream->compr_buffer.bytes - stream->stage1_pointer;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 588) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 589) 		if (copy_from_user(dst, buf, avail_bytes))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 590) 			return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 591) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 592) 		if (copy_from_user(stream->compr_buffer.area, buf + avail_bytes,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 593) 				   data_count - avail_bytes))
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 594) 			return -EFAULT;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 595) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 596) 		stream->stage1_pointer = data_count - avail_bytes;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 597) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 598) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 599) 	stream->received_stage1 += data_count;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 600) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 601) copy_done:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 602) 	/* Update the copied data size. */
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 603) 	stream->received_total += count;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 604) 	return count;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 605) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 606) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 607) static int sprd_platform_compr_get_caps(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 608) 					struct snd_compr_stream *cstream,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 609) 					struct snd_compr_caps *caps)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 610) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 611) 	caps->direction = cstream->direction;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 612) 	caps->min_fragment_size = SPRD_COMPR_MIN_FRAGMENT_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 613) 	caps->max_fragment_size = SPRD_COMPR_MAX_FRAGMENT_SIZE;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 614) 	caps->min_fragments = SPRD_COMPR_MIN_NUM_FRAGMENTS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 615) 	caps->max_fragments = SPRD_COMPR_MAX_NUM_FRAGMENTS;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 616) 	caps->num_codecs = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 617) 	caps->codecs[0] = SND_AUDIOCODEC_MP3;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 618) 	caps->codecs[1] = SND_AUDIOCODEC_AAC;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 619) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 620) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 621) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 622) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 623) static int
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 624) sprd_platform_compr_get_codec_caps(struct snd_soc_component *component,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 625) 				   struct snd_compr_stream *cstream,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 626) 				   struct snd_compr_codec_caps *codec)
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 627) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 628) 	switch (codec->codec) {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 629) 	case SND_AUDIOCODEC_MP3:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 630) 		codec->num_descriptors = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 631) 		codec->descriptor[0].max_ch = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 632) 		codec->descriptor[0].bit_rate[0] = 320;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 633) 		codec->descriptor[0].bit_rate[1] = 128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 634) 		codec->descriptor[0].num_bitrates = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 635) 		codec->descriptor[0].profiles = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 636) 		codec->descriptor[0].modes = SND_AUDIOCHANMODE_MP3_STEREO;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 637) 		codec->descriptor[0].formats = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 638) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 639) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 640) 	case SND_AUDIOCODEC_AAC:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 641) 		codec->num_descriptors = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 642) 		codec->descriptor[1].max_ch = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 643) 		codec->descriptor[1].bit_rate[0] = 320;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 644) 		codec->descriptor[1].bit_rate[1] = 128;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 645) 		codec->descriptor[1].num_bitrates = 2;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 646) 		codec->descriptor[1].profiles = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 647) 		codec->descriptor[1].modes = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 648) 		codec->descriptor[1].formats = 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 649) 		break;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 650) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 651) 	default:
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 652) 		return -EINVAL;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 653) 	}
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 654) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 655) 	return 0;
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 656) }
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 657) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 658) const struct snd_compress_ops sprd_platform_compress_ops = {
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 659) 	.open = sprd_platform_compr_open,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 660) 	.free = sprd_platform_compr_free,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 661) 	.set_params = sprd_platform_compr_set_params,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 662) 	.trigger = sprd_platform_compr_trigger,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 663) 	.pointer = sprd_platform_compr_pointer,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 664) 	.copy = sprd_platform_compr_copy,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 665) 	.get_caps = sprd_platform_compr_get_caps,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 666) 	.get_codec_caps = sprd_platform_compr_get_codec_caps,
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 667) };
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 668) 
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 669) MODULE_DESCRIPTION("Spreadtrum ASoC Compress Platform Driver");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 670) MODULE_LICENSE("GPL v2");
^8f3ce5b39 (kx 2023-10-28 12:00:06 +0300 671) MODULE_ALIAS("platform:compress-platform");