xref: /openbmc/linux/include/crypto/algapi.h (revision 612a462a)
1 /*
2  * Cryptographic API for algorithms (i.e., low-level API).
3  *
4  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
14 
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 #include <linux/skbuff.h>
19 
20 struct crypto_aead;
21 struct crypto_instance;
22 struct module;
23 struct rtattr;
24 struct seq_file;
25 
26 struct crypto_type {
27 	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
28 	unsigned int (*extsize)(struct crypto_alg *alg);
29 	int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
30 	int (*init_tfm)(struct crypto_tfm *tfm);
31 	void (*show)(struct seq_file *m, struct crypto_alg *alg);
32 	int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
33 	struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
34 	void (*free)(struct crypto_instance *inst);
35 
36 	unsigned int type;
37 	unsigned int maskclear;
38 	unsigned int maskset;
39 	unsigned int tfmsize;
40 };
41 
42 struct crypto_instance {
43 	struct crypto_alg alg;
44 
45 	struct crypto_template *tmpl;
46 	struct hlist_node list;
47 
48 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
49 };
50 
51 struct crypto_template {
52 	struct list_head list;
53 	struct hlist_head instances;
54 	struct module *module;
55 
56 	struct crypto_instance *(*alloc)(struct rtattr **tb);
57 	void (*free)(struct crypto_instance *inst);
58 	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
59 
60 	char name[CRYPTO_MAX_ALG_NAME];
61 };
62 
63 struct crypto_spawn {
64 	struct list_head list;
65 	struct crypto_alg *alg;
66 	struct crypto_instance *inst;
67 	const struct crypto_type *frontend;
68 	u32 mask;
69 };
70 
71 struct crypto_queue {
72 	struct list_head list;
73 	struct list_head *backlog;
74 
75 	unsigned int qlen;
76 	unsigned int max_qlen;
77 };
78 
79 struct scatter_walk {
80 	struct scatterlist *sg;
81 	unsigned int offset;
82 };
83 
84 struct blkcipher_walk {
85 	union {
86 		struct {
87 			struct page *page;
88 			unsigned long offset;
89 		} phys;
90 
91 		struct {
92 			u8 *page;
93 			u8 *addr;
94 		} virt;
95 	} src, dst;
96 
97 	struct scatter_walk in;
98 	unsigned int nbytes;
99 
100 	struct scatter_walk out;
101 	unsigned int total;
102 
103 	void *page;
104 	u8 *buffer;
105 	u8 *iv;
106 	unsigned int ivsize;
107 
108 	int flags;
109 	unsigned int walk_blocksize;
110 	unsigned int cipher_blocksize;
111 	unsigned int alignmask;
112 };
113 
114 struct ablkcipher_walk {
115 	struct {
116 		struct page *page;
117 		unsigned int offset;
118 	} src, dst;
119 
120 	struct scatter_walk	in;
121 	unsigned int		nbytes;
122 	struct scatter_walk	out;
123 	unsigned int		total;
124 	struct list_head	buffers;
125 	u8			*iv_buffer;
126 	u8			*iv;
127 	int			flags;
128 	unsigned int		blocksize;
129 };
130 
131 extern const struct crypto_type crypto_ablkcipher_type;
132 extern const struct crypto_type crypto_blkcipher_type;
133 
134 void crypto_mod_put(struct crypto_alg *alg);
135 
136 int crypto_register_template(struct crypto_template *tmpl);
137 void crypto_unregister_template(struct crypto_template *tmpl);
138 struct crypto_template *crypto_lookup_template(const char *name);
139 
140 int crypto_register_instance(struct crypto_template *tmpl,
141 			     struct crypto_instance *inst);
142 int crypto_unregister_instance(struct crypto_instance *inst);
143 
144 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
145 		      struct crypto_instance *inst, u32 mask);
146 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
147 		       struct crypto_instance *inst,
148 		       const struct crypto_type *frontend);
149 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
150 		      u32 type, u32 mask);
151 
152 void crypto_drop_spawn(struct crypto_spawn *spawn);
153 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
154 				    u32 mask);
155 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
156 
157 static inline void crypto_set_spawn(struct crypto_spawn *spawn,
158 				    struct crypto_instance *inst)
159 {
160 	spawn->inst = inst;
161 }
162 
163 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
164 int crypto_check_attr_type(struct rtattr **tb, u32 type);
165 const char *crypto_attr_alg_name(struct rtattr *rta);
166 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
167 				    const struct crypto_type *frontend,
168 				    u32 type, u32 mask);
169 
170 static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
171 						 u32 type, u32 mask)
172 {
173 	return crypto_attr_alg2(rta, NULL, type, mask);
174 }
175 
176 int crypto_attr_u32(struct rtattr *rta, u32 *num);
177 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
178 			struct crypto_alg *alg);
179 void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
180 			     unsigned int head);
181 struct crypto_instance *crypto_alloc_instance(const char *name,
182 					      struct crypto_alg *alg);
183 
184 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
185 int crypto_enqueue_request(struct crypto_queue *queue,
186 			   struct crypto_async_request *request);
187 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
188 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
189 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
190 {
191 	return queue->qlen;
192 }
193 
194 void crypto_inc(u8 *a, unsigned int size);
195 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
196 
197 static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
198 {
199 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
200 	    __builtin_constant_p(size) &&
201 	    (size % sizeof(unsigned long)) == 0) {
202 		unsigned long *d = (unsigned long *)dst;
203 		unsigned long *s = (unsigned long *)src;
204 
205 		while (size > 0) {
206 			*d++ ^= *s++;
207 			size -= sizeof(unsigned long);
208 		}
209 	} else {
210 		__crypto_xor(dst, dst, src, size);
211 	}
212 }
213 
214 static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
215 				  unsigned int size)
216 {
217 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
218 	    __builtin_constant_p(size) &&
219 	    (size % sizeof(unsigned long)) == 0) {
220 		unsigned long *d = (unsigned long *)dst;
221 		unsigned long *s1 = (unsigned long *)src1;
222 		unsigned long *s2 = (unsigned long *)src2;
223 
224 		while (size > 0) {
225 			*d++ = *s1++ ^ *s2++;
226 			size -= sizeof(unsigned long);
227 		}
228 	} else {
229 		__crypto_xor(dst, src1, src2, size);
230 	}
231 }
232 
233 int blkcipher_walk_done(struct blkcipher_desc *desc,
234 			struct blkcipher_walk *walk, int err);
235 int blkcipher_walk_virt(struct blkcipher_desc *desc,
236 			struct blkcipher_walk *walk);
237 int blkcipher_walk_phys(struct blkcipher_desc *desc,
238 			struct blkcipher_walk *walk);
239 int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
240 			      struct blkcipher_walk *walk,
241 			      unsigned int blocksize);
242 int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
243 				   struct blkcipher_walk *walk,
244 				   struct crypto_aead *tfm,
245 				   unsigned int blocksize);
246 
247 int ablkcipher_walk_done(struct ablkcipher_request *req,
248 			 struct ablkcipher_walk *walk, int err);
249 int ablkcipher_walk_phys(struct ablkcipher_request *req,
250 			 struct ablkcipher_walk *walk);
251 void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
252 
253 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
254 {
255 	return PTR_ALIGN(crypto_tfm_ctx(tfm),
256 			 crypto_tfm_alg_alignmask(tfm) + 1);
257 }
258 
259 static inline struct crypto_instance *crypto_tfm_alg_instance(
260 	struct crypto_tfm *tfm)
261 {
262 	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
263 }
264 
265 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
266 {
267 	return inst->__ctx;
268 }
269 
270 static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
271 	struct crypto_ablkcipher *tfm)
272 {
273 	return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
274 }
275 
276 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
277 {
278 	return crypto_tfm_ctx(&tfm->base);
279 }
280 
281 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
282 {
283 	return crypto_tfm_ctx_aligned(&tfm->base);
284 }
285 
286 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
287 	struct crypto_spawn *spawn)
288 {
289 	u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
290 	u32 mask = CRYPTO_ALG_TYPE_MASK;
291 
292 	return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
293 }
294 
295 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
296 {
297 	return crypto_tfm_ctx(&tfm->base);
298 }
299 
300 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
301 {
302 	return crypto_tfm_ctx_aligned(&tfm->base);
303 }
304 
305 static inline struct crypto_cipher *crypto_spawn_cipher(
306 	struct crypto_spawn *spawn)
307 {
308 	u32 type = CRYPTO_ALG_TYPE_CIPHER;
309 	u32 mask = CRYPTO_ALG_TYPE_MASK;
310 
311 	return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
312 }
313 
314 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
315 {
316 	return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
317 }
318 
319 static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
320 				       struct scatterlist *dst,
321 				       struct scatterlist *src,
322 				       unsigned int nbytes)
323 {
324 	walk->in.sg = src;
325 	walk->out.sg = dst;
326 	walk->total = nbytes;
327 }
328 
329 static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
330 					struct scatterlist *dst,
331 					struct scatterlist *src,
332 					unsigned int nbytes)
333 {
334 	walk->in.sg = src;
335 	walk->out.sg = dst;
336 	walk->total = nbytes;
337 	INIT_LIST_HEAD(&walk->buffers);
338 }
339 
340 static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
341 {
342 	if (unlikely(!list_empty(&walk->buffers)))
343 		__ablkcipher_walk_complete(walk);
344 }
345 
346 static inline struct crypto_async_request *crypto_get_backlog(
347 	struct crypto_queue *queue)
348 {
349 	return queue->backlog == &queue->list ? NULL :
350 	       container_of(queue->backlog, struct crypto_async_request, list);
351 }
352 
353 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
354 					     struct ablkcipher_request *request)
355 {
356 	return crypto_enqueue_request(queue, &request->base);
357 }
358 
359 static inline struct ablkcipher_request *ablkcipher_dequeue_request(
360 	struct crypto_queue *queue)
361 {
362 	return ablkcipher_request_cast(crypto_dequeue_request(queue));
363 }
364 
365 static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
366 {
367 	return req->__ctx;
368 }
369 
370 static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
371 					  struct crypto_ablkcipher *tfm)
372 {
373 	return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
374 }
375 
376 static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
377 						     u32 type, u32 mask)
378 {
379 	return crypto_attr_alg(tb[1], type, mask);
380 }
381 
382 static inline int crypto_requires_off(u32 type, u32 mask, u32 off)
383 {
384 	return (type ^ off) & mask & off;
385 }
386 
387 /*
388  * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
389  * Otherwise returns zero.
390  */
391 static inline int crypto_requires_sync(u32 type, u32 mask)
392 {
393 	return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC);
394 }
395 
396 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
397 
398 /**
399  * crypto_memneq - Compare two areas of memory without leaking
400  *		   timing information.
401  *
402  * @a: One area of memory
403  * @b: Another area of memory
404  * @size: The size of the area.
405  *
406  * Returns 0 when data is equal, 1 otherwise.
407  */
408 static inline int crypto_memneq(const void *a, const void *b, size_t size)
409 {
410 	return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
411 }
412 
413 static inline void crypto_yield(u32 flags)
414 {
415 #if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY)
416 	if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
417 		cond_resched();
418 #endif
419 }
420 
421 #endif	/* _CRYPTO_ALGAPI_H */
422