xref: /openbmc/linux/drivers/crypto/qce/dma.c (revision d6364b81)
197fb5e8dSThomas Gleixner // SPDX-License-Identifier: GPL-2.0-only
2ec8f5d8fSStanimir Varbanov /*
3ec8f5d8fSStanimir Varbanov  * Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
4ec8f5d8fSStanimir Varbanov  */
5ec8f5d8fSStanimir Varbanov 
6ec8f5d8fSStanimir Varbanov #include <linux/dmaengine.h>
7ec8f5d8fSStanimir Varbanov #include <crypto/scatterwalk.h>
8ec8f5d8fSStanimir Varbanov 
9ec8f5d8fSStanimir Varbanov #include "dma.h"
10ec8f5d8fSStanimir Varbanov 
qce_dma_request(struct device * dev,struct qce_dma_data * dma)11ec8f5d8fSStanimir Varbanov int qce_dma_request(struct device *dev, struct qce_dma_data *dma)
12ec8f5d8fSStanimir Varbanov {
13ec8f5d8fSStanimir Varbanov 	int ret;
14ec8f5d8fSStanimir Varbanov 
153e8b4ccdSPeter Ujfalusi 	dma->txchan = dma_request_chan(dev, "tx");
16ec8f5d8fSStanimir Varbanov 	if (IS_ERR(dma->txchan))
17ec8f5d8fSStanimir Varbanov 		return PTR_ERR(dma->txchan);
18ec8f5d8fSStanimir Varbanov 
193e8b4ccdSPeter Ujfalusi 	dma->rxchan = dma_request_chan(dev, "rx");
20ec8f5d8fSStanimir Varbanov 	if (IS_ERR(dma->rxchan)) {
21ec8f5d8fSStanimir Varbanov 		ret = PTR_ERR(dma->rxchan);
22ec8f5d8fSStanimir Varbanov 		goto error_rx;
23ec8f5d8fSStanimir Varbanov 	}
24ec8f5d8fSStanimir Varbanov 
25ec8f5d8fSStanimir Varbanov 	dma->result_buf = kmalloc(QCE_RESULT_BUF_SZ + QCE_IGNORE_BUF_SZ,
26ec8f5d8fSStanimir Varbanov 				  GFP_KERNEL);
27ec8f5d8fSStanimir Varbanov 	if (!dma->result_buf) {
28ec8f5d8fSStanimir Varbanov 		ret = -ENOMEM;
29ec8f5d8fSStanimir Varbanov 		goto error_nomem;
30ec8f5d8fSStanimir Varbanov 	}
31ec8f5d8fSStanimir Varbanov 
32ec8f5d8fSStanimir Varbanov 	dma->ignore_buf = dma->result_buf + QCE_RESULT_BUF_SZ;
33ec8f5d8fSStanimir Varbanov 
34ec8f5d8fSStanimir Varbanov 	return 0;
35ec8f5d8fSStanimir Varbanov error_nomem:
36ec8f5d8fSStanimir Varbanov 	dma_release_channel(dma->rxchan);
37ec8f5d8fSStanimir Varbanov error_rx:
38ec8f5d8fSStanimir Varbanov 	dma_release_channel(dma->txchan);
39ec8f5d8fSStanimir Varbanov 	return ret;
40ec8f5d8fSStanimir Varbanov }
41ec8f5d8fSStanimir Varbanov 
qce_dma_release(struct qce_dma_data * dma)42ec8f5d8fSStanimir Varbanov void qce_dma_release(struct qce_dma_data *dma)
43ec8f5d8fSStanimir Varbanov {
44ec8f5d8fSStanimir Varbanov 	dma_release_channel(dma->txchan);
45ec8f5d8fSStanimir Varbanov 	dma_release_channel(dma->rxchan);
46ec8f5d8fSStanimir Varbanov 	kfree(dma->result_buf);
47ec8f5d8fSStanimir Varbanov }
48ec8f5d8fSStanimir Varbanov 
49ec8f5d8fSStanimir Varbanov struct scatterlist *
qce_sgtable_add(struct sg_table * sgt,struct scatterlist * new_sgl,unsigned int max_len)503ee50c89SEneas U de Queiroz qce_sgtable_add(struct sg_table *sgt, struct scatterlist *new_sgl,
51d6364b81SEneas U de Queiroz 		unsigned int max_len)
52ec8f5d8fSStanimir Varbanov {
53ec8f5d8fSStanimir Varbanov 	struct scatterlist *sg = sgt->sgl, *sg_last = NULL;
54d6364b81SEneas U de Queiroz 	unsigned int new_len;
55ec8f5d8fSStanimir Varbanov 
56ec8f5d8fSStanimir Varbanov 	while (sg) {
57ec8f5d8fSStanimir Varbanov 		if (!sg_page(sg))
58ec8f5d8fSStanimir Varbanov 			break;
59ec8f5d8fSStanimir Varbanov 		sg = sg_next(sg);
60ec8f5d8fSStanimir Varbanov 	}
61ec8f5d8fSStanimir Varbanov 
62ec8f5d8fSStanimir Varbanov 	if (!sg)
63ec8f5d8fSStanimir Varbanov 		return ERR_PTR(-EINVAL);
64ec8f5d8fSStanimir Varbanov 
65d6364b81SEneas U de Queiroz 	while (new_sgl && sg && max_len) {
66d6364b81SEneas U de Queiroz 		new_len = new_sgl->length > max_len ? max_len : new_sgl->length;
67d6364b81SEneas U de Queiroz 		sg_set_page(sg, sg_page(new_sgl), new_len, new_sgl->offset);
68ec8f5d8fSStanimir Varbanov 		sg_last = sg;
69ec8f5d8fSStanimir Varbanov 		sg = sg_next(sg);
70ec8f5d8fSStanimir Varbanov 		new_sgl = sg_next(new_sgl);
71d6364b81SEneas U de Queiroz 		max_len -= new_len;
72ec8f5d8fSStanimir Varbanov 	}
73ec8f5d8fSStanimir Varbanov 
74ec8f5d8fSStanimir Varbanov 	return sg_last;
75ec8f5d8fSStanimir Varbanov }
76ec8f5d8fSStanimir Varbanov 
qce_dma_prep_sg(struct dma_chan * chan,struct scatterlist * sg,int nents,unsigned long flags,enum dma_transfer_direction dir,dma_async_tx_callback cb,void * cb_param)77ec8f5d8fSStanimir Varbanov static int qce_dma_prep_sg(struct dma_chan *chan, struct scatterlist *sg,
78ec8f5d8fSStanimir Varbanov 			   int nents, unsigned long flags,
79ec8f5d8fSStanimir Varbanov 			   enum dma_transfer_direction dir,
80ec8f5d8fSStanimir Varbanov 			   dma_async_tx_callback cb, void *cb_param)
81ec8f5d8fSStanimir Varbanov {
82ec8f5d8fSStanimir Varbanov 	struct dma_async_tx_descriptor *desc;
83ec8f5d8fSStanimir Varbanov 	dma_cookie_t cookie;
84ec8f5d8fSStanimir Varbanov 
85ec8f5d8fSStanimir Varbanov 	if (!sg || !nents)
86ec8f5d8fSStanimir Varbanov 		return -EINVAL;
87ec8f5d8fSStanimir Varbanov 
88ec8f5d8fSStanimir Varbanov 	desc = dmaengine_prep_slave_sg(chan, sg, nents, dir, flags);
89ec8f5d8fSStanimir Varbanov 	if (!desc)
90ec8f5d8fSStanimir Varbanov 		return -EINVAL;
91ec8f5d8fSStanimir Varbanov 
92ec8f5d8fSStanimir Varbanov 	desc->callback = cb;
93ec8f5d8fSStanimir Varbanov 	desc->callback_param = cb_param;
94ec8f5d8fSStanimir Varbanov 	cookie = dmaengine_submit(desc);
95ec8f5d8fSStanimir Varbanov 
96ec8f5d8fSStanimir Varbanov 	return dma_submit_error(cookie);
97ec8f5d8fSStanimir Varbanov }
98ec8f5d8fSStanimir Varbanov 
qce_dma_prep_sgs(struct qce_dma_data * dma,struct scatterlist * rx_sg,int rx_nents,struct scatterlist * tx_sg,int tx_nents,dma_async_tx_callback cb,void * cb_param)99ec8f5d8fSStanimir Varbanov int qce_dma_prep_sgs(struct qce_dma_data *dma, struct scatterlist *rx_sg,
100ec8f5d8fSStanimir Varbanov 		     int rx_nents, struct scatterlist *tx_sg, int tx_nents,
101ec8f5d8fSStanimir Varbanov 		     dma_async_tx_callback cb, void *cb_param)
102ec8f5d8fSStanimir Varbanov {
103ec8f5d8fSStanimir Varbanov 	struct dma_chan *rxchan = dma->rxchan;
104ec8f5d8fSStanimir Varbanov 	struct dma_chan *txchan = dma->txchan;
105ec8f5d8fSStanimir Varbanov 	unsigned long flags = DMA_PREP_INTERRUPT | DMA_CTRL_ACK;
106ec8f5d8fSStanimir Varbanov 	int ret;
107ec8f5d8fSStanimir Varbanov 
108ec8f5d8fSStanimir Varbanov 	ret = qce_dma_prep_sg(rxchan, rx_sg, rx_nents, flags, DMA_MEM_TO_DEV,
109ec8f5d8fSStanimir Varbanov 			     NULL, NULL);
110ec8f5d8fSStanimir Varbanov 	if (ret)
111ec8f5d8fSStanimir Varbanov 		return ret;
112ec8f5d8fSStanimir Varbanov 
113ec8f5d8fSStanimir Varbanov 	return qce_dma_prep_sg(txchan, tx_sg, tx_nents, flags, DMA_DEV_TO_MEM,
114ec8f5d8fSStanimir Varbanov 			       cb, cb_param);
115ec8f5d8fSStanimir Varbanov }
116ec8f5d8fSStanimir Varbanov 
qce_dma_issue_pending(struct qce_dma_data * dma)117ec8f5d8fSStanimir Varbanov void qce_dma_issue_pending(struct qce_dma_data *dma)
118ec8f5d8fSStanimir Varbanov {
119ec8f5d8fSStanimir Varbanov 	dma_async_issue_pending(dma->rxchan);
120ec8f5d8fSStanimir Varbanov 	dma_async_issue_pending(dma->txchan);
121ec8f5d8fSStanimir Varbanov }
122ec8f5d8fSStanimir Varbanov 
qce_dma_terminate_all(struct qce_dma_data * dma)123ec8f5d8fSStanimir Varbanov int qce_dma_terminate_all(struct qce_dma_data *dma)
124ec8f5d8fSStanimir Varbanov {
125ec8f5d8fSStanimir Varbanov 	int ret;
126ec8f5d8fSStanimir Varbanov 
127ec8f5d8fSStanimir Varbanov 	ret = dmaengine_terminate_all(dma->rxchan);
128ec8f5d8fSStanimir Varbanov 	return ret ?: dmaengine_terminate_all(dma->txchan);
129ec8f5d8fSStanimir Varbanov }
130