xref: /openbmc/linux/include/crypto/algapi.h (revision e6c81cce)
1 /*
2  * Cryptographic API for algorithms (i.e., low-level API).
3  *
4  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
14 
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 #include <linux/skbuff.h>
19 
20 struct module;
21 struct rtattr;
22 struct seq_file;
23 
24 struct crypto_type {
25 	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
26 	unsigned int (*extsize)(struct crypto_alg *alg);
27 	int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
28 	int (*init_tfm)(struct crypto_tfm *tfm);
29 	void (*show)(struct seq_file *m, struct crypto_alg *alg);
30 	int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
31 	struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
32 
33 	unsigned int type;
34 	unsigned int maskclear;
35 	unsigned int maskset;
36 	unsigned int tfmsize;
37 };
38 
39 struct crypto_instance {
40 	struct crypto_alg alg;
41 
42 	struct crypto_template *tmpl;
43 	struct hlist_node list;
44 
45 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
46 };
47 
48 struct crypto_template {
49 	struct list_head list;
50 	struct hlist_head instances;
51 	struct module *module;
52 
53 	struct crypto_instance *(*alloc)(struct rtattr **tb);
54 	void (*free)(struct crypto_instance *inst);
55 	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
56 
57 	char name[CRYPTO_MAX_ALG_NAME];
58 };
59 
60 struct crypto_spawn {
61 	struct list_head list;
62 	struct crypto_alg *alg;
63 	struct crypto_instance *inst;
64 	const struct crypto_type *frontend;
65 	u32 mask;
66 };
67 
68 struct crypto_queue {
69 	struct list_head list;
70 	struct list_head *backlog;
71 
72 	unsigned int qlen;
73 	unsigned int max_qlen;
74 };
75 
76 struct scatter_walk {
77 	struct scatterlist *sg;
78 	unsigned int offset;
79 };
80 
81 struct blkcipher_walk {
82 	union {
83 		struct {
84 			struct page *page;
85 			unsigned long offset;
86 		} phys;
87 
88 		struct {
89 			u8 *page;
90 			u8 *addr;
91 		} virt;
92 	} src, dst;
93 
94 	struct scatter_walk in;
95 	unsigned int nbytes;
96 
97 	struct scatter_walk out;
98 	unsigned int total;
99 
100 	void *page;
101 	u8 *buffer;
102 	u8 *iv;
103 	unsigned int ivsize;
104 
105 	int flags;
106 	unsigned int walk_blocksize;
107 	unsigned int cipher_blocksize;
108 	unsigned int alignmask;
109 };
110 
111 struct ablkcipher_walk {
112 	struct {
113 		struct page *page;
114 		unsigned int offset;
115 	} src, dst;
116 
117 	struct scatter_walk	in;
118 	unsigned int		nbytes;
119 	struct scatter_walk	out;
120 	unsigned int		total;
121 	struct list_head	buffers;
122 	u8			*iv_buffer;
123 	u8			*iv;
124 	int			flags;
125 	unsigned int		blocksize;
126 };
127 
128 extern const struct crypto_type crypto_ablkcipher_type;
129 extern const struct crypto_type crypto_aead_type;
130 extern const struct crypto_type crypto_blkcipher_type;
131 
132 void crypto_mod_put(struct crypto_alg *alg);
133 
134 int crypto_register_template(struct crypto_template *tmpl);
135 void crypto_unregister_template(struct crypto_template *tmpl);
136 struct crypto_template *crypto_lookup_template(const char *name);
137 
138 int crypto_register_instance(struct crypto_template *tmpl,
139 			     struct crypto_instance *inst);
140 int crypto_unregister_instance(struct crypto_instance *inst);
141 
142 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
143 		      struct crypto_instance *inst, u32 mask);
144 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
145 		       struct crypto_instance *inst,
146 		       const struct crypto_type *frontend);
147 
148 void crypto_drop_spawn(struct crypto_spawn *spawn);
149 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
150 				    u32 mask);
151 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
152 
153 static inline void crypto_set_spawn(struct crypto_spawn *spawn,
154 				    struct crypto_instance *inst)
155 {
156 	spawn->inst = inst;
157 }
158 
159 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
160 int crypto_check_attr_type(struct rtattr **tb, u32 type);
161 const char *crypto_attr_alg_name(struct rtattr *rta);
162 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
163 				    const struct crypto_type *frontend,
164 				    u32 type, u32 mask);
165 
166 static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
167 						 u32 type, u32 mask)
168 {
169 	return crypto_attr_alg2(rta, NULL, type, mask);
170 }
171 
172 int crypto_attr_u32(struct rtattr *rta, u32 *num);
173 void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
174 			     unsigned int head);
175 struct crypto_instance *crypto_alloc_instance(const char *name,
176 					      struct crypto_alg *alg);
177 
178 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
179 int crypto_enqueue_request(struct crypto_queue *queue,
180 			   struct crypto_async_request *request);
181 void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset);
182 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
183 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
184 
185 /* These functions require the input/output to be aligned as u32. */
186 void crypto_inc(u8 *a, unsigned int size);
187 void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
188 
189 int blkcipher_walk_done(struct blkcipher_desc *desc,
190 			struct blkcipher_walk *walk, int err);
191 int blkcipher_walk_virt(struct blkcipher_desc *desc,
192 			struct blkcipher_walk *walk);
193 int blkcipher_walk_phys(struct blkcipher_desc *desc,
194 			struct blkcipher_walk *walk);
195 int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
196 			      struct blkcipher_walk *walk,
197 			      unsigned int blocksize);
198 int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
199 				   struct blkcipher_walk *walk,
200 				   struct crypto_aead *tfm,
201 				   unsigned int blocksize);
202 
203 int ablkcipher_walk_done(struct ablkcipher_request *req,
204 			 struct ablkcipher_walk *walk, int err);
205 int ablkcipher_walk_phys(struct ablkcipher_request *req,
206 			 struct ablkcipher_walk *walk);
207 void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
208 
209 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
210 {
211 	return PTR_ALIGN(crypto_tfm_ctx(tfm),
212 			 crypto_tfm_alg_alignmask(tfm) + 1);
213 }
214 
215 static inline struct crypto_instance *crypto_tfm_alg_instance(
216 	struct crypto_tfm *tfm)
217 {
218 	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
219 }
220 
221 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
222 {
223 	return inst->__ctx;
224 }
225 
226 static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
227 	struct crypto_ablkcipher *tfm)
228 {
229 	return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
230 }
231 
232 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
233 {
234 	return crypto_tfm_ctx(&tfm->base);
235 }
236 
237 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
238 {
239 	return crypto_tfm_ctx_aligned(&tfm->base);
240 }
241 
242 static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm)
243 {
244 	return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead;
245 }
246 
247 static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
248 {
249 	return crypto_tfm_ctx(&tfm->base);
250 }
251 
252 static inline struct crypto_instance *crypto_aead_alg_instance(
253 	struct crypto_aead *aead)
254 {
255 	return crypto_tfm_alg_instance(&aead->base);
256 }
257 
258 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
259 	struct crypto_spawn *spawn)
260 {
261 	u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
262 	u32 mask = CRYPTO_ALG_TYPE_MASK;
263 
264 	return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
265 }
266 
267 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
268 {
269 	return crypto_tfm_ctx(&tfm->base);
270 }
271 
272 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
273 {
274 	return crypto_tfm_ctx_aligned(&tfm->base);
275 }
276 
277 static inline struct crypto_cipher *crypto_spawn_cipher(
278 	struct crypto_spawn *spawn)
279 {
280 	u32 type = CRYPTO_ALG_TYPE_CIPHER;
281 	u32 mask = CRYPTO_ALG_TYPE_MASK;
282 
283 	return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
284 }
285 
286 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
287 {
288 	return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
289 }
290 
291 static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
292 {
293 	u32 type = CRYPTO_ALG_TYPE_HASH;
294 	u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
295 
296 	return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
297 }
298 
299 static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
300 {
301 	return crypto_tfm_ctx(&tfm->base);
302 }
303 
304 static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
305 {
306 	return crypto_tfm_ctx_aligned(&tfm->base);
307 }
308 
309 static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
310 				       struct scatterlist *dst,
311 				       struct scatterlist *src,
312 				       unsigned int nbytes)
313 {
314 	walk->in.sg = src;
315 	walk->out.sg = dst;
316 	walk->total = nbytes;
317 }
318 
319 static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
320 					struct scatterlist *dst,
321 					struct scatterlist *src,
322 					unsigned int nbytes)
323 {
324 	walk->in.sg = src;
325 	walk->out.sg = dst;
326 	walk->total = nbytes;
327 	INIT_LIST_HEAD(&walk->buffers);
328 }
329 
330 static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
331 {
332 	if (unlikely(!list_empty(&walk->buffers)))
333 		__ablkcipher_walk_complete(walk);
334 }
335 
336 static inline struct crypto_async_request *crypto_get_backlog(
337 	struct crypto_queue *queue)
338 {
339 	return queue->backlog == &queue->list ? NULL :
340 	       container_of(queue->backlog, struct crypto_async_request, list);
341 }
342 
343 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
344 					     struct ablkcipher_request *request)
345 {
346 	return crypto_enqueue_request(queue, &request->base);
347 }
348 
349 static inline struct ablkcipher_request *ablkcipher_dequeue_request(
350 	struct crypto_queue *queue)
351 {
352 	return ablkcipher_request_cast(crypto_dequeue_request(queue));
353 }
354 
355 static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
356 {
357 	return req->__ctx;
358 }
359 
360 static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
361 					  struct crypto_ablkcipher *tfm)
362 {
363 	return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
364 }
365 
366 static inline void *aead_request_ctx(struct aead_request *req)
367 {
368 	return req->__ctx;
369 }
370 
371 static inline void aead_request_complete(struct aead_request *req, int err)
372 {
373 	req->base.complete(&req->base, err);
374 }
375 
376 static inline u32 aead_request_flags(struct aead_request *req)
377 {
378 	return req->base.flags;
379 }
380 
381 static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
382 						     u32 type, u32 mask)
383 {
384 	return crypto_attr_alg(tb[1], type, mask);
385 }
386 
387 /*
388  * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
389  * Otherwise returns zero.
390  */
391 static inline int crypto_requires_sync(u32 type, u32 mask)
392 {
393 	return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
394 }
395 
396 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
397 
398 /**
399  * crypto_memneq - Compare two areas of memory without leaking
400  *		   timing information.
401  *
402  * @a: One area of memory
403  * @b: Another area of memory
404  * @size: The size of the area.
405  *
406  * Returns 0 when data is equal, 1 otherwise.
407  */
408 static inline int crypto_memneq(const void *a, const void *b, size_t size)
409 {
410 	return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
411 }
412 
413 static inline void crypto_yield(u32 flags)
414 {
415 	if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
416 		cond_resched();
417 }
418 
419 #endif	/* _CRYPTO_ALGAPI_H */
420