1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <linux/dma-mapping.h>
9 #include <linux/spinlock.h>
10 
11 #include "safexcel.h"
12 
13 int safexcel_init_ring_descriptors(struct safexcel_crypto_priv *priv,
14 				   struct safexcel_desc_ring *cdr,
15 				   struct safexcel_desc_ring *rdr)
16 {
17 	cdr->offset = sizeof(u32) * priv->config.cd_offset;
18 	cdr->base = dmam_alloc_coherent(priv->dev,
19 					cdr->offset * EIP197_DEFAULT_RING_SIZE,
20 					&cdr->base_dma, GFP_KERNEL);
21 	if (!cdr->base)
22 		return -ENOMEM;
23 	cdr->write = cdr->base;
24 	cdr->base_end = cdr->base + cdr->offset * (EIP197_DEFAULT_RING_SIZE - 1);
25 	cdr->read = cdr->base;
26 
27 	rdr->offset = sizeof(u32) * priv->config.rd_offset;
28 	rdr->base = dmam_alloc_coherent(priv->dev,
29 					rdr->offset * EIP197_DEFAULT_RING_SIZE,
30 					&rdr->base_dma, GFP_KERNEL);
31 	if (!rdr->base)
32 		return -ENOMEM;
33 	rdr->write = rdr->base;
34 	rdr->base_end = rdr->base + rdr->offset  * (EIP197_DEFAULT_RING_SIZE - 1);
35 	rdr->read = rdr->base;
36 
37 	return 0;
38 }
39 
40 inline int safexcel_select_ring(struct safexcel_crypto_priv *priv)
41 {
42 	return (atomic_inc_return(&priv->ring_used) % priv->config.rings);
43 }
44 
45 static void *safexcel_ring_next_wptr(struct safexcel_crypto_priv *priv,
46 				     struct safexcel_desc_ring *ring)
47 {
48 	void *ptr = ring->write;
49 
50 	if ((ring->write == ring->read - ring->offset) ||
51 	    (ring->read == ring->base && ring->write == ring->base_end))
52 		return ERR_PTR(-ENOMEM);
53 
54 	if (ring->write == ring->base_end)
55 		ring->write = ring->base;
56 	else
57 		ring->write += ring->offset;
58 
59 	return ptr;
60 }
61 
62 void *safexcel_ring_next_rptr(struct safexcel_crypto_priv *priv,
63 			      struct safexcel_desc_ring *ring)
64 {
65 	void *ptr = ring->read;
66 
67 	if (ring->write == ring->read)
68 		return ERR_PTR(-ENOENT);
69 
70 	if (ring->read == ring->base_end)
71 		ring->read = ring->base;
72 	else
73 		ring->read += ring->offset;
74 
75 	return ptr;
76 }
77 
78 inline void *safexcel_ring_curr_rptr(struct safexcel_crypto_priv *priv,
79 				     int ring)
80 {
81 	struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
82 
83 	return rdr->read;
84 }
85 
86 inline int safexcel_ring_first_rdr_index(struct safexcel_crypto_priv *priv,
87 					 int ring)
88 {
89 	struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
90 
91 	return (rdr->read - rdr->base) / rdr->offset;
92 }
93 
94 inline int safexcel_ring_rdr_rdesc_index(struct safexcel_crypto_priv *priv,
95 					 int ring,
96 					 struct safexcel_result_desc *rdesc)
97 {
98 	struct safexcel_desc_ring *rdr = &priv->ring[ring].rdr;
99 
100 	return ((void *)rdesc - rdr->base) / rdr->offset;
101 }
102 
103 void safexcel_ring_rollback_wptr(struct safexcel_crypto_priv *priv,
104 				 struct safexcel_desc_ring *ring)
105 {
106 	if (ring->write == ring->read)
107 		return;
108 
109 	if (ring->write == ring->base)
110 		ring->write = ring->base_end;
111 	else
112 		ring->write -= ring->offset;
113 }
114 
115 struct safexcel_command_desc *safexcel_add_cdesc(struct safexcel_crypto_priv *priv,
116 						 int ring_id,
117 						 bool first, bool last,
118 						 dma_addr_t data, u32 data_len,
119 						 u32 full_data_len,
120 						 dma_addr_t context) {
121 	struct safexcel_command_desc *cdesc;
122 	int i;
123 
124 	cdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].cdr);
125 	if (IS_ERR(cdesc))
126 		return cdesc;
127 
128 	memset(cdesc, 0, sizeof(struct safexcel_command_desc));
129 
130 	cdesc->first_seg = first;
131 	cdesc->last_seg = last;
132 	cdesc->particle_size = data_len;
133 	cdesc->data_lo = lower_32_bits(data);
134 	cdesc->data_hi = upper_32_bits(data);
135 
136 	if (first && context) {
137 		struct safexcel_token *token =
138 			(struct safexcel_token *)cdesc->control_data.token;
139 
140 		cdesc->control_data.packet_length = full_data_len;
141 		cdesc->control_data.options = EIP197_OPTION_MAGIC_VALUE |
142 					      EIP197_OPTION_64BIT_CTX |
143 					      EIP197_OPTION_CTX_CTRL_IN_CMD;
144 		cdesc->control_data.context_lo =
145 			(lower_32_bits(context) & GENMASK(31, 2)) >> 2;
146 		cdesc->control_data.context_hi = upper_32_bits(context);
147 
148 		/* TODO: large xform HMAC with SHA-384/512 uses refresh = 3 */
149 		cdesc->control_data.refresh = 2;
150 
151 		for (i = 0; i < EIP197_MAX_TOKENS; i++)
152 			eip197_noop_token(&token[i]);
153 	}
154 
155 	return cdesc;
156 }
157 
158 struct safexcel_result_desc *safexcel_add_rdesc(struct safexcel_crypto_priv *priv,
159 						int ring_id,
160 						bool first, bool last,
161 						dma_addr_t data, u32 len)
162 {
163 	struct safexcel_result_desc *rdesc;
164 
165 	rdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].rdr);
166 	if (IS_ERR(rdesc))
167 		return rdesc;
168 
169 	memset(rdesc, 0, sizeof(struct safexcel_result_desc));
170 
171 	rdesc->first_seg = first;
172 	rdesc->last_seg = last;
173 	rdesc->particle_size = len;
174 	rdesc->data_lo = lower_32_bits(data);
175 	rdesc->data_hi = upper_32_bits(data);
176 
177 	return rdesc;
178 }
179