xref: /openbmc/linux/include/crypto/algapi.h (revision 643d1f7f)
1 /*
2  * Cryptographic API for algorithms (i.e., low-level API).
3  *
4  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
14 
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 
19 struct module;
20 struct rtattr;
21 struct seq_file;
22 
23 struct crypto_type {
24 	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
25 	int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
26 	void (*exit)(struct crypto_tfm *tfm);
27 	void (*show)(struct seq_file *m, struct crypto_alg *alg);
28 };
29 
30 struct crypto_instance {
31 	struct crypto_alg alg;
32 
33 	struct crypto_template *tmpl;
34 	struct hlist_node list;
35 
36 	void *__ctx[] CRYPTO_MINALIGN_ATTR;
37 };
38 
39 struct crypto_template {
40 	struct list_head list;
41 	struct hlist_head instances;
42 	struct module *module;
43 
44 	struct crypto_instance *(*alloc)(struct rtattr **tb);
45 	void (*free)(struct crypto_instance *inst);
46 
47 	char name[CRYPTO_MAX_ALG_NAME];
48 };
49 
50 struct crypto_spawn {
51 	struct list_head list;
52 	struct crypto_alg *alg;
53 	struct crypto_instance *inst;
54 	u32 mask;
55 };
56 
57 struct crypto_queue {
58 	struct list_head list;
59 	struct list_head *backlog;
60 
61 	unsigned int qlen;
62 	unsigned int max_qlen;
63 };
64 
65 struct scatter_walk {
66 	struct scatterlist *sg;
67 	unsigned int offset;
68 };
69 
70 struct blkcipher_walk {
71 	union {
72 		struct {
73 			struct page *page;
74 			unsigned long offset;
75 		} phys;
76 
77 		struct {
78 			u8 *page;
79 			u8 *addr;
80 		} virt;
81 	} src, dst;
82 
83 	struct scatter_walk in;
84 	unsigned int nbytes;
85 
86 	struct scatter_walk out;
87 	unsigned int total;
88 
89 	void *page;
90 	u8 *buffer;
91 	u8 *iv;
92 
93 	int flags;
94 	unsigned int blocksize;
95 };
96 
97 extern const struct crypto_type crypto_ablkcipher_type;
98 extern const struct crypto_type crypto_aead_type;
99 extern const struct crypto_type crypto_blkcipher_type;
100 extern const struct crypto_type crypto_hash_type;
101 
102 void crypto_mod_put(struct crypto_alg *alg);
103 
104 int crypto_register_template(struct crypto_template *tmpl);
105 void crypto_unregister_template(struct crypto_template *tmpl);
106 struct crypto_template *crypto_lookup_template(const char *name);
107 
108 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
109 		      struct crypto_instance *inst, u32 mask);
110 void crypto_drop_spawn(struct crypto_spawn *spawn);
111 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
112 				    u32 mask);
113 
114 static inline void crypto_set_spawn(struct crypto_spawn *spawn,
115 				    struct crypto_instance *inst)
116 {
117 	spawn->inst = inst;
118 }
119 
120 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
121 int crypto_check_attr_type(struct rtattr **tb, u32 type);
122 const char *crypto_attr_alg_name(struct rtattr *rta);
123 struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask);
124 int crypto_attr_u32(struct rtattr *rta, u32 *num);
125 struct crypto_instance *crypto_alloc_instance(const char *name,
126 					      struct crypto_alg *alg);
127 
128 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
129 int crypto_enqueue_request(struct crypto_queue *queue,
130 			   struct crypto_async_request *request);
131 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
132 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
133 
134 /* These functions require the input/output to be aligned as u32. */
135 void crypto_inc(u8 *a, unsigned int size);
136 void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
137 
138 int blkcipher_walk_done(struct blkcipher_desc *desc,
139 			struct blkcipher_walk *walk, int err);
140 int blkcipher_walk_virt(struct blkcipher_desc *desc,
141 			struct blkcipher_walk *walk);
142 int blkcipher_walk_phys(struct blkcipher_desc *desc,
143 			struct blkcipher_walk *walk);
144 int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
145 			      struct blkcipher_walk *walk,
146 			      unsigned int blocksize);
147 
148 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
149 {
150 	unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm);
151 	unsigned long align = crypto_tfm_alg_alignmask(tfm);
152 
153 	if (align <= crypto_tfm_ctx_alignment())
154 		align = 1;
155 	return (void *)ALIGN(addr, align);
156 }
157 
158 static inline struct crypto_instance *crypto_tfm_alg_instance(
159 	struct crypto_tfm *tfm)
160 {
161 	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
162 }
163 
164 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
165 {
166 	return inst->__ctx;
167 }
168 
169 static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
170 	struct crypto_ablkcipher *tfm)
171 {
172 	return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
173 }
174 
175 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
176 {
177 	return crypto_tfm_ctx(&tfm->base);
178 }
179 
180 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
181 {
182 	return crypto_tfm_ctx_aligned(&tfm->base);
183 }
184 
185 static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm)
186 {
187 	return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead;
188 }
189 
190 static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
191 {
192 	return crypto_tfm_ctx(&tfm->base);
193 }
194 
195 static inline struct crypto_instance *crypto_aead_alg_instance(
196 	struct crypto_aead *aead)
197 {
198 	return crypto_tfm_alg_instance(&aead->base);
199 }
200 
201 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
202 	struct crypto_spawn *spawn)
203 {
204 	u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
205 	u32 mask = CRYPTO_ALG_TYPE_MASK;
206 
207 	return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
208 }
209 
210 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
211 {
212 	return crypto_tfm_ctx(&tfm->base);
213 }
214 
215 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
216 {
217 	return crypto_tfm_ctx_aligned(&tfm->base);
218 }
219 
220 static inline struct crypto_cipher *crypto_spawn_cipher(
221 	struct crypto_spawn *spawn)
222 {
223 	u32 type = CRYPTO_ALG_TYPE_CIPHER;
224 	u32 mask = CRYPTO_ALG_TYPE_MASK;
225 
226 	return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
227 }
228 
229 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
230 {
231 	return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
232 }
233 
234 static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
235 {
236 	u32 type = CRYPTO_ALG_TYPE_HASH;
237 	u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
238 
239 	return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
240 }
241 
242 static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
243 {
244 	return crypto_tfm_ctx_aligned(&tfm->base);
245 }
246 
247 static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
248 				       struct scatterlist *dst,
249 				       struct scatterlist *src,
250 				       unsigned int nbytes)
251 {
252 	walk->in.sg = src;
253 	walk->out.sg = dst;
254 	walk->total = nbytes;
255 }
256 
257 static inline struct crypto_async_request *crypto_get_backlog(
258 	struct crypto_queue *queue)
259 {
260 	return queue->backlog == &queue->list ? NULL :
261 	       container_of(queue->backlog, struct crypto_async_request, list);
262 }
263 
264 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
265 					     struct ablkcipher_request *request)
266 {
267 	return crypto_enqueue_request(queue, &request->base);
268 }
269 
270 static inline struct ablkcipher_request *ablkcipher_dequeue_request(
271 	struct crypto_queue *queue)
272 {
273 	return ablkcipher_request_cast(crypto_dequeue_request(queue));
274 }
275 
276 static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
277 {
278 	return req->__ctx;
279 }
280 
281 static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
282 					  struct crypto_ablkcipher *tfm)
283 {
284 	return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
285 }
286 
287 static inline void *aead_request_ctx(struct aead_request *req)
288 {
289 	return req->__ctx;
290 }
291 
292 static inline void aead_request_complete(struct aead_request *req, int err)
293 {
294 	req->base.complete(&req->base, err);
295 }
296 
297 static inline u32 aead_request_flags(struct aead_request *req)
298 {
299 	return req->base.flags;
300 }
301 
302 static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
303 						     u32 type, u32 mask)
304 {
305 	return crypto_attr_alg(tb[1], type, mask);
306 }
307 
308 /*
309  * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
310  * Otherwise returns zero.
311  */
312 static inline int crypto_requires_sync(u32 type, u32 mask)
313 {
314 	return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
315 }
316 
317 #endif	/* _CRYPTO_ALGAPI_H */
318 
319