1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7 #ifndef _CRYPTO_ALGAPI_H
8 #define _CRYPTO_ALGAPI_H
9
10 #include <crypto/utils.h>
11 #include <linux/align.h>
12 #include <linux/cache.h>
13 #include <linux/crypto.h>
14 #include <linux/types.h>
15 #include <linux/workqueue.h>
16
17 /*
18 * Maximum values for blocksize and alignmask, used to allocate
19 * static buffers that are big enough for any combination of
20 * algs and architectures. Ciphers have a lower maximum size.
21 */
22 #define MAX_ALGAPI_BLOCKSIZE 160
23 #define MAX_ALGAPI_ALIGNMASK 127
24 #define MAX_CIPHER_BLOCKSIZE 16
25 #define MAX_CIPHER_ALIGNMASK 15
26
27 #ifdef ARCH_DMA_MINALIGN
28 #define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
29 #else
30 #define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
31 #endif
32
33 #define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
34
35 /*
36 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
37 * arbitrary modules to be loaded. Loading from userspace may still need the
38 * unprefixed names, so retains those aliases as well.
39 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
40 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
41 * expands twice on the same line. Instead, use a separate base name for the
42 * alias.
43 */
44 #define MODULE_ALIAS_CRYPTO(name) \
45 __MODULE_INFO(alias, alias_userspace, name); \
46 __MODULE_INFO(alias, alias_crypto, "crypto-" name)
47
48 struct crypto_aead;
49 struct crypto_instance;
50 struct module;
51 struct notifier_block;
52 struct rtattr;
53 struct scatterlist;
54 struct seq_file;
55 struct sk_buff;
56
57 struct crypto_type {
58 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
59 unsigned int (*extsize)(struct crypto_alg *alg);
60 int (*init_tfm)(struct crypto_tfm *tfm);
61 void (*show)(struct seq_file *m, struct crypto_alg *alg);
62 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
63 void (*free)(struct crypto_instance *inst);
64 #ifdef CONFIG_CRYPTO_STATS
65 int (*report_stat)(struct sk_buff *skb, struct crypto_alg *alg);
66 #endif
67
68 unsigned int type;
69 unsigned int maskclear;
70 unsigned int maskset;
71 unsigned int tfmsize;
72 };
73
74 struct crypto_instance {
75 struct crypto_alg alg;
76
77 struct crypto_template *tmpl;
78
79 union {
80 /* Node in list of instances after registration. */
81 struct hlist_node list;
82 /* List of attached spawns before registration. */
83 struct crypto_spawn *spawns;
84 };
85
86 struct work_struct free_work;
87
88 void *__ctx[] CRYPTO_MINALIGN_ATTR;
89 };
90
91 struct crypto_template {
92 struct list_head list;
93 struct hlist_head instances;
94 struct module *module;
95
96 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
97
98 char name[CRYPTO_MAX_ALG_NAME];
99 };
100
101 struct crypto_spawn {
102 struct list_head list;
103 struct crypto_alg *alg;
104 union {
105 /* Back pointer to instance after registration.*/
106 struct crypto_instance *inst;
107 /* Spawn list pointer prior to registration. */
108 struct crypto_spawn *next;
109 };
110 const struct crypto_type *frontend;
111 u32 mask;
112 bool dead;
113 bool registered;
114 };
115
116 struct crypto_queue {
117 struct list_head list;
118 struct list_head *backlog;
119
120 unsigned int qlen;
121 unsigned int max_qlen;
122 };
123
124 struct scatter_walk {
125 struct scatterlist *sg;
126 unsigned int offset;
127 };
128
129 struct crypto_attr_alg {
130 char name[CRYPTO_MAX_ALG_NAME];
131 };
132
133 struct crypto_attr_type {
134 u32 type;
135 u32 mask;
136 };
137
138 /*
139 * Algorithm registration interface.
140 */
141 int crypto_register_alg(struct crypto_alg *alg);
142 void crypto_unregister_alg(struct crypto_alg *alg);
143 int crypto_register_algs(struct crypto_alg *algs, int count);
144 void crypto_unregister_algs(struct crypto_alg *algs, int count);
145
146 void crypto_mod_put(struct crypto_alg *alg);
147
148 int crypto_register_template(struct crypto_template *tmpl);
149 int crypto_register_templates(struct crypto_template *tmpls, int count);
150 void crypto_unregister_template(struct crypto_template *tmpl);
151 void crypto_unregister_templates(struct crypto_template *tmpls, int count);
152 struct crypto_template *crypto_lookup_template(const char *name);
153
154 int crypto_register_instance(struct crypto_template *tmpl,
155 struct crypto_instance *inst);
156 void crypto_unregister_instance(struct crypto_instance *inst);
157
158 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
159 const char *name, u32 type, u32 mask);
160 void crypto_drop_spawn(struct crypto_spawn *spawn);
161 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
162 u32 mask);
163 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
164
165 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
166 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
167 const char *crypto_attr_alg_name(struct rtattr *rta);
168 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
169 struct crypto_alg *alg);
170
171 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
172 int crypto_enqueue_request(struct crypto_queue *queue,
173 struct crypto_async_request *request);
174 void crypto_enqueue_request_head(struct crypto_queue *queue,
175 struct crypto_async_request *request);
176 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
crypto_queue_len(struct crypto_queue * queue)177 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
178 {
179 return queue->qlen;
180 }
181
182 void crypto_inc(u8 *a, unsigned int size);
183
crypto_tfm_ctx(struct crypto_tfm * tfm)184 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
185 {
186 return tfm->__crt_ctx;
187 }
188
crypto_tfm_ctx_align(struct crypto_tfm * tfm,unsigned int align)189 static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
190 unsigned int align)
191 {
192 if (align <= crypto_tfm_ctx_alignment())
193 align = 1;
194
195 return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
196 }
197
crypto_tfm_ctx_aligned(struct crypto_tfm * tfm)198 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
199 {
200 return crypto_tfm_ctx_align(tfm, crypto_tfm_alg_alignmask(tfm) + 1);
201 }
202
crypto_dma_align(void)203 static inline unsigned int crypto_dma_align(void)
204 {
205 return CRYPTO_DMA_ALIGN;
206 }
207
crypto_dma_padding(void)208 static inline unsigned int crypto_dma_padding(void)
209 {
210 return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
211 }
212
crypto_tfm_ctx_dma(struct crypto_tfm * tfm)213 static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
214 {
215 return crypto_tfm_ctx_align(tfm, crypto_dma_align());
216 }
217
crypto_tfm_alg_instance(struct crypto_tfm * tfm)218 static inline struct crypto_instance *crypto_tfm_alg_instance(
219 struct crypto_tfm *tfm)
220 {
221 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
222 }
223
crypto_instance_ctx(struct crypto_instance * inst)224 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
225 {
226 return inst->__ctx;
227 }
228
crypto_get_backlog(struct crypto_queue * queue)229 static inline struct crypto_async_request *crypto_get_backlog(
230 struct crypto_queue *queue)
231 {
232 return queue->backlog == &queue->list ? NULL :
233 container_of(queue->backlog, struct crypto_async_request, list);
234 }
235
crypto_requires_off(struct crypto_attr_type * algt,u32 off)236 static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
237 {
238 return (algt->type ^ off) & algt->mask & off;
239 }
240
241 /*
242 * When an algorithm uses another algorithm (e.g., if it's an instance of a
243 * template), these are the flags that should always be set on the "outer"
244 * algorithm if any "inner" algorithm has them set.
245 */
246 #define CRYPTO_ALG_INHERITED_FLAGS \
247 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \
248 CRYPTO_ALG_ALLOCATES_MEMORY)
249
250 /*
251 * Given the type and mask that specify the flags restrictions on a template
252 * instance being created, return the mask that should be passed to
253 * crypto_grab_*() (along with type=0) to honor any request the user made to
254 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
255 */
crypto_algt_inherited_mask(struct crypto_attr_type * algt)256 static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
257 {
258 return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
259 }
260
261 int crypto_register_notifier(struct notifier_block *nb);
262 int crypto_unregister_notifier(struct notifier_block *nb);
263
264 /* Crypto notification events. */
265 enum {
266 CRYPTO_MSG_ALG_REQUEST,
267 CRYPTO_MSG_ALG_REGISTER,
268 CRYPTO_MSG_ALG_LOADED,
269 };
270
crypto_request_complete(struct crypto_async_request * req,int err)271 static inline void crypto_request_complete(struct crypto_async_request *req,
272 int err)
273 {
274 req->complete(req->data, err);
275 }
276
crypto_tfm_alg_type(struct crypto_tfm * tfm)277 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
278 {
279 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
280 }
281
282 #endif /* _CRYPTO_ALGAPI_H */
283