1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Symmetric key ciphers.
4  *
5  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9 #define _CRYPTO_INTERNAL_SKCIPHER_H
10 
11 #include <crypto/algapi.h>
12 #include <crypto/skcipher.h>
13 #include <linux/list.h>
14 #include <linux/types.h>
15 
16 struct aead_request;
17 struct rtattr;
18 
19 struct skcipher_instance {
20 	void (*free)(struct skcipher_instance *inst);
21 	union {
22 		struct {
23 			char head[offsetof(struct skcipher_alg, base)];
24 			struct crypto_instance base;
25 		} s;
26 		struct skcipher_alg alg;
27 	};
28 };
29 
30 struct crypto_skcipher_spawn {
31 	struct crypto_spawn base;
32 };
33 
34 struct skcipher_walk {
35 	union {
36 		struct {
37 			struct page *page;
38 			unsigned long offset;
39 		} phys;
40 
41 		struct {
42 			u8 *page;
43 			void *addr;
44 		} virt;
45 	} src, dst;
46 
47 	struct scatter_walk in;
48 	unsigned int nbytes;
49 
50 	struct scatter_walk out;
51 	unsigned int total;
52 
53 	struct list_head buffers;
54 
55 	u8 *page;
56 	u8 *buffer;
57 	u8 *oiv;
58 	void *iv;
59 
60 	unsigned int ivsize;
61 
62 	int flags;
63 	unsigned int blocksize;
64 	unsigned int stride;
65 	unsigned int alignmask;
66 };
67 
68 static inline struct crypto_instance *skcipher_crypto_instance(
69 	struct skcipher_instance *inst)
70 {
71 	return &inst->s.base;
72 }
73 
74 static inline struct skcipher_instance *skcipher_alg_instance(
75 	struct crypto_skcipher *skcipher)
76 {
77 	return container_of(crypto_skcipher_alg(skcipher),
78 			    struct skcipher_instance, alg);
79 }
80 
81 static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
82 {
83 	return crypto_instance_ctx(skcipher_crypto_instance(inst));
84 }
85 
86 static inline void skcipher_request_complete(struct skcipher_request *req, int err)
87 {
88 	req->base.complete(&req->base, err);
89 }
90 
91 static inline void crypto_set_skcipher_spawn(
92 	struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
93 {
94 	crypto_set_spawn(&spawn->base, inst);
95 }
96 
97 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
98 			 u32 type, u32 mask);
99 
100 static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
101 {
102 	crypto_drop_spawn(&spawn->base);
103 }
104 
105 static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
106 	struct crypto_skcipher_spawn *spawn)
107 {
108 	return container_of(spawn->base.alg, struct skcipher_alg, base);
109 }
110 
111 static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
112 	struct crypto_skcipher_spawn *spawn)
113 {
114 	return crypto_skcipher_spawn_alg(spawn);
115 }
116 
117 static inline struct crypto_skcipher *crypto_spawn_skcipher(
118 	struct crypto_skcipher_spawn *spawn)
119 {
120 	return crypto_spawn_tfm2(&spawn->base);
121 }
122 
123 static inline void crypto_skcipher_set_reqsize(
124 	struct crypto_skcipher *skcipher, unsigned int reqsize)
125 {
126 	skcipher->reqsize = reqsize;
127 }
128 
129 int crypto_register_skcipher(struct skcipher_alg *alg);
130 void crypto_unregister_skcipher(struct skcipher_alg *alg);
131 int crypto_register_skciphers(struct skcipher_alg *algs, int count);
132 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
133 int skcipher_register_instance(struct crypto_template *tmpl,
134 			       struct skcipher_instance *inst);
135 
136 int skcipher_walk_done(struct skcipher_walk *walk, int err);
137 int skcipher_walk_virt(struct skcipher_walk *walk,
138 		       struct skcipher_request *req,
139 		       bool atomic);
140 void skcipher_walk_atomise(struct skcipher_walk *walk);
141 int skcipher_walk_async(struct skcipher_walk *walk,
142 			struct skcipher_request *req);
143 int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
144 		       bool atomic);
145 int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
146 			       struct aead_request *req, bool atomic);
147 int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
148 			       struct aead_request *req, bool atomic);
149 void skcipher_walk_complete(struct skcipher_walk *walk, int err);
150 
151 static inline void ablkcipher_request_complete(struct ablkcipher_request *req,
152 					       int err)
153 {
154 	req->base.complete(&req->base, err);
155 }
156 
157 static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req)
158 {
159 	return req->base.flags;
160 }
161 
162 static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
163 {
164 	return crypto_tfm_ctx(&tfm->base);
165 }
166 
167 static inline void *skcipher_request_ctx(struct skcipher_request *req)
168 {
169 	return req->__ctx;
170 }
171 
172 static inline u32 skcipher_request_flags(struct skcipher_request *req)
173 {
174 	return req->base.flags;
175 }
176 
177 static inline unsigned int crypto_skcipher_alg_min_keysize(
178 	struct skcipher_alg *alg)
179 {
180 	if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
181 	    CRYPTO_ALG_TYPE_BLKCIPHER)
182 		return alg->base.cra_blkcipher.min_keysize;
183 
184 	if (alg->base.cra_ablkcipher.encrypt)
185 		return alg->base.cra_ablkcipher.min_keysize;
186 
187 	return alg->min_keysize;
188 }
189 
190 static inline unsigned int crypto_skcipher_alg_max_keysize(
191 	struct skcipher_alg *alg)
192 {
193 	if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
194 	    CRYPTO_ALG_TYPE_BLKCIPHER)
195 		return alg->base.cra_blkcipher.max_keysize;
196 
197 	if (alg->base.cra_ablkcipher.encrypt)
198 		return alg->base.cra_ablkcipher.max_keysize;
199 
200 	return alg->max_keysize;
201 }
202 
203 static inline unsigned int crypto_skcipher_alg_chunksize(
204 	struct skcipher_alg *alg)
205 {
206 	if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
207 	    CRYPTO_ALG_TYPE_BLKCIPHER)
208 		return alg->base.cra_blocksize;
209 
210 	if (alg->base.cra_ablkcipher.encrypt)
211 		return alg->base.cra_blocksize;
212 
213 	return alg->chunksize;
214 }
215 
216 static inline unsigned int crypto_skcipher_alg_walksize(
217 	struct skcipher_alg *alg)
218 {
219 	if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
220 	    CRYPTO_ALG_TYPE_BLKCIPHER)
221 		return alg->base.cra_blocksize;
222 
223 	if (alg->base.cra_ablkcipher.encrypt)
224 		return alg->base.cra_blocksize;
225 
226 	return alg->walksize;
227 }
228 
229 /**
230  * crypto_skcipher_chunksize() - obtain chunk size
231  * @tfm: cipher handle
232  *
233  * The block size is set to one for ciphers such as CTR.  However,
234  * you still need to provide incremental updates in multiples of
235  * the underlying block size as the IV does not have sub-block
236  * granularity.  This is known in this API as the chunk size.
237  *
238  * Return: chunk size in bytes
239  */
240 static inline unsigned int crypto_skcipher_chunksize(
241 	struct crypto_skcipher *tfm)
242 {
243 	return crypto_skcipher_alg_chunksize(crypto_skcipher_alg(tfm));
244 }
245 
246 /**
247  * crypto_skcipher_walksize() - obtain walk size
248  * @tfm: cipher handle
249  *
250  * In some cases, algorithms can only perform optimally when operating on
251  * multiple blocks in parallel. This is reflected by the walksize, which
252  * must be a multiple of the chunksize (or equal if the concern does not
253  * apply)
254  *
255  * Return: walk size in bytes
256  */
257 static inline unsigned int crypto_skcipher_walksize(
258 	struct crypto_skcipher *tfm)
259 {
260 	return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
261 }
262 
263 /* Helpers for simple block cipher modes of operation */
264 struct skcipher_ctx_simple {
265 	struct crypto_cipher *cipher;	/* underlying block cipher */
266 };
267 static inline struct crypto_cipher *
268 skcipher_cipher_simple(struct crypto_skcipher *tfm)
269 {
270 	struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
271 
272 	return ctx->cipher;
273 }
274 struct skcipher_instance *
275 skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
276 			       struct crypto_alg **cipher_alg_ret);
277 
278 #endif	/* _CRYPTO_INTERNAL_SKCIPHER_H */
279 
280