1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * Cryptographic API for algorithms (i.e., low-level API). 4 * 5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 6 */ 7 #ifndef _CRYPTO_ALGAPI_H 8 #define _CRYPTO_ALGAPI_H 9 10 #include <linux/align.h> 11 #include <linux/cache.h> 12 #include <linux/crypto.h> 13 #include <linux/kconfig.h> 14 #include <linux/list.h> 15 #include <linux/types.h> 16 17 #include <asm/unaligned.h> 18 19 /* 20 * Maximum values for blocksize and alignmask, used to allocate 21 * static buffers that are big enough for any combination of 22 * algs and architectures. Ciphers have a lower maximum size. 23 */ 24 #define MAX_ALGAPI_BLOCKSIZE 160 25 #define MAX_ALGAPI_ALIGNMASK 127 26 #define MAX_CIPHER_BLOCKSIZE 16 27 #define MAX_CIPHER_ALIGNMASK 15 28 29 #ifdef ARCH_DMA_MINALIGN 30 #define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN 31 #else 32 #define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN 33 #endif 34 35 #define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1)) 36 37 struct crypto_aead; 38 struct crypto_instance; 39 struct module; 40 struct notifier_block; 41 struct rtattr; 42 struct seq_file; 43 struct sk_buff; 44 45 struct crypto_type { 46 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask); 47 unsigned int (*extsize)(struct crypto_alg *alg); 48 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask); 49 int (*init_tfm)(struct crypto_tfm *tfm); 50 void (*show)(struct seq_file *m, struct crypto_alg *alg); 51 int (*report)(struct sk_buff *skb, struct crypto_alg *alg); 52 void (*free)(struct crypto_instance *inst); 53 54 unsigned int type; 55 unsigned int maskclear; 56 unsigned int maskset; 57 unsigned int tfmsize; 58 }; 59 60 struct crypto_instance { 61 struct crypto_alg alg; 62 63 struct crypto_template *tmpl; 64 65 union { 66 /* Node in list of instances after registration. */ 67 struct hlist_node list; 68 /* List of attached spawns before registration. */ 69 struct crypto_spawn *spawns; 70 }; 71 72 void *__ctx[] CRYPTO_MINALIGN_ATTR; 73 }; 74 75 struct crypto_template { 76 struct list_head list; 77 struct hlist_head instances; 78 struct module *module; 79 80 int (*create)(struct crypto_template *tmpl, struct rtattr **tb); 81 82 char name[CRYPTO_MAX_ALG_NAME]; 83 }; 84 85 struct crypto_spawn { 86 struct list_head list; 87 struct crypto_alg *alg; 88 union { 89 /* Back pointer to instance after registration.*/ 90 struct crypto_instance *inst; 91 /* Spawn list pointer prior to registration. */ 92 struct crypto_spawn *next; 93 }; 94 const struct crypto_type *frontend; 95 u32 mask; 96 bool dead; 97 bool registered; 98 }; 99 100 struct crypto_queue { 101 struct list_head list; 102 struct list_head *backlog; 103 104 unsigned int qlen; 105 unsigned int max_qlen; 106 }; 107 108 struct scatter_walk { 109 struct scatterlist *sg; 110 unsigned int offset; 111 }; 112 113 struct crypto_attr_alg { 114 char name[CRYPTO_MAX_ALG_NAME]; 115 }; 116 117 struct crypto_attr_type { 118 u32 type; 119 u32 mask; 120 }; 121 122 void crypto_mod_put(struct crypto_alg *alg); 123 124 int crypto_register_template(struct crypto_template *tmpl); 125 int crypto_register_templates(struct crypto_template *tmpls, int count); 126 void crypto_unregister_template(struct crypto_template *tmpl); 127 void crypto_unregister_templates(struct crypto_template *tmpls, int count); 128 struct crypto_template *crypto_lookup_template(const char *name); 129 130 int crypto_register_instance(struct crypto_template *tmpl, 131 struct crypto_instance *inst); 132 void crypto_unregister_instance(struct crypto_instance *inst); 133 134 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst, 135 const char *name, u32 type, u32 mask); 136 void crypto_drop_spawn(struct crypto_spawn *spawn); 137 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 138 u32 mask); 139 void *crypto_spawn_tfm2(struct crypto_spawn *spawn); 140 141 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); 142 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret); 143 const char *crypto_attr_alg_name(struct rtattr *rta); 144 int crypto_inst_setname(struct crypto_instance *inst, const char *name, 145 struct crypto_alg *alg); 146 147 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen); 148 int crypto_enqueue_request(struct crypto_queue *queue, 149 struct crypto_async_request *request); 150 void crypto_enqueue_request_head(struct crypto_queue *queue, 151 struct crypto_async_request *request); 152 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); 153 static inline unsigned int crypto_queue_len(struct crypto_queue *queue) 154 { 155 return queue->qlen; 156 } 157 158 void crypto_inc(u8 *a, unsigned int size); 159 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size); 160 161 static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size) 162 { 163 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && 164 __builtin_constant_p(size) && 165 (size % sizeof(unsigned long)) == 0) { 166 unsigned long *d = (unsigned long *)dst; 167 unsigned long *s = (unsigned long *)src; 168 unsigned long l; 169 170 while (size > 0) { 171 l = get_unaligned(d) ^ get_unaligned(s++); 172 put_unaligned(l, d++); 173 size -= sizeof(unsigned long); 174 } 175 } else { 176 __crypto_xor(dst, dst, src, size); 177 } 178 } 179 180 static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2, 181 unsigned int size) 182 { 183 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && 184 __builtin_constant_p(size) && 185 (size % sizeof(unsigned long)) == 0) { 186 unsigned long *d = (unsigned long *)dst; 187 unsigned long *s1 = (unsigned long *)src1; 188 unsigned long *s2 = (unsigned long *)src2; 189 unsigned long l; 190 191 while (size > 0) { 192 l = get_unaligned(s1++) ^ get_unaligned(s2++); 193 put_unaligned(l, d++); 194 size -= sizeof(unsigned long); 195 } 196 } else { 197 __crypto_xor(dst, src1, src2, size); 198 } 199 } 200 201 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm) 202 { 203 return tfm->__crt_ctx; 204 } 205 206 static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm, 207 unsigned int align) 208 { 209 if (align <= crypto_tfm_ctx_alignment()) 210 align = 1; 211 212 return PTR_ALIGN(crypto_tfm_ctx(tfm), align); 213 } 214 215 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) 216 { 217 return crypto_tfm_ctx_align(tfm, crypto_tfm_alg_alignmask(tfm) + 1); 218 } 219 220 static inline unsigned int crypto_dma_align(void) 221 { 222 return CRYPTO_DMA_ALIGN; 223 } 224 225 static inline unsigned int crypto_dma_padding(void) 226 { 227 return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1); 228 } 229 230 static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm) 231 { 232 return crypto_tfm_ctx_align(tfm, crypto_dma_align()); 233 } 234 235 static inline struct crypto_instance *crypto_tfm_alg_instance( 236 struct crypto_tfm *tfm) 237 { 238 return container_of(tfm->__crt_alg, struct crypto_instance, alg); 239 } 240 241 static inline void *crypto_instance_ctx(struct crypto_instance *inst) 242 { 243 return inst->__ctx; 244 } 245 246 static inline struct crypto_async_request *crypto_get_backlog( 247 struct crypto_queue *queue) 248 { 249 return queue->backlog == &queue->list ? NULL : 250 container_of(queue->backlog, struct crypto_async_request, list); 251 } 252 253 static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off) 254 { 255 return (algt->type ^ off) & algt->mask & off; 256 } 257 258 /* 259 * When an algorithm uses another algorithm (e.g., if it's an instance of a 260 * template), these are the flags that should always be set on the "outer" 261 * algorithm if any "inner" algorithm has them set. 262 */ 263 #define CRYPTO_ALG_INHERITED_FLAGS \ 264 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \ 265 CRYPTO_ALG_ALLOCATES_MEMORY) 266 267 /* 268 * Given the type and mask that specify the flags restrictions on a template 269 * instance being created, return the mask that should be passed to 270 * crypto_grab_*() (along with type=0) to honor any request the user made to 271 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear. 272 */ 273 static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt) 274 { 275 return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS); 276 } 277 278 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size); 279 280 /** 281 * crypto_memneq - Compare two areas of memory without leaking 282 * timing information. 283 * 284 * @a: One area of memory 285 * @b: Another area of memory 286 * @size: The size of the area. 287 * 288 * Returns 0 when data is equal, 1 otherwise. 289 */ 290 static inline int crypto_memneq(const void *a, const void *b, size_t size) 291 { 292 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0; 293 } 294 295 int crypto_register_notifier(struct notifier_block *nb); 296 int crypto_unregister_notifier(struct notifier_block *nb); 297 298 /* Crypto notification events. */ 299 enum { 300 CRYPTO_MSG_ALG_REQUEST, 301 CRYPTO_MSG_ALG_REGISTER, 302 CRYPTO_MSG_ALG_LOADED, 303 }; 304 305 #endif /* _CRYPTO_ALGAPI_H */ 306