1 /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 /* 3 * AEAD: Authenticated Encryption with Associated Data 4 * 5 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au> 6 */ 7 8 #ifndef _CRYPTO_INTERNAL_AEAD_H 9 #define _CRYPTO_INTERNAL_AEAD_H 10 11 #include <crypto/aead.h> 12 #include <crypto/algapi.h> 13 #include <linux/stddef.h> 14 #include <linux/types.h> 15 16 struct rtattr; 17 18 struct aead_instance { 19 void (*free)(struct aead_instance *inst); 20 union { 21 struct { 22 char head[offsetof(struct aead_alg, base)]; 23 struct crypto_instance base; 24 } s; 25 struct aead_alg alg; 26 }; 27 }; 28 29 struct crypto_aead_spawn { 30 struct crypto_spawn base; 31 }; 32 33 struct aead_queue { 34 struct crypto_queue base; 35 }; 36 37 static inline void *crypto_aead_ctx(struct crypto_aead *tfm) 38 { 39 return crypto_tfm_ctx(&tfm->base); 40 } 41 42 static inline struct crypto_instance *aead_crypto_instance( 43 struct aead_instance *inst) 44 { 45 return container_of(&inst->alg.base, struct crypto_instance, alg); 46 } 47 48 static inline struct aead_instance *aead_instance(struct crypto_instance *inst) 49 { 50 return container_of(&inst->alg, struct aead_instance, alg.base); 51 } 52 53 static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead) 54 { 55 return aead_instance(crypto_tfm_alg_instance(&aead->base)); 56 } 57 58 static inline void *aead_instance_ctx(struct aead_instance *inst) 59 { 60 return crypto_instance_ctx(aead_crypto_instance(inst)); 61 } 62 63 static inline void *aead_request_ctx(struct aead_request *req) 64 { 65 return req->__ctx; 66 } 67 68 static inline void aead_request_complete(struct aead_request *req, int err) 69 { 70 req->base.complete(&req->base, err); 71 } 72 73 static inline u32 aead_request_flags(struct aead_request *req) 74 { 75 return req->base.flags; 76 } 77 78 static inline struct aead_request *aead_request_cast( 79 struct crypto_async_request *req) 80 { 81 return container_of(req, struct aead_request, base); 82 } 83 84 int crypto_grab_aead(struct crypto_aead_spawn *spawn, 85 struct crypto_instance *inst, 86 const char *name, u32 type, u32 mask); 87 88 static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn) 89 { 90 crypto_drop_spawn(&spawn->base); 91 } 92 93 static inline struct aead_alg *crypto_spawn_aead_alg( 94 struct crypto_aead_spawn *spawn) 95 { 96 return container_of(spawn->base.alg, struct aead_alg, base); 97 } 98 99 static inline struct crypto_aead *crypto_spawn_aead( 100 struct crypto_aead_spawn *spawn) 101 { 102 return crypto_spawn_tfm2(&spawn->base); 103 } 104 105 static inline void crypto_aead_set_reqsize(struct crypto_aead *aead, 106 unsigned int reqsize) 107 { 108 aead->reqsize = reqsize; 109 } 110 111 static inline void aead_init_queue(struct aead_queue *queue, 112 unsigned int max_qlen) 113 { 114 crypto_init_queue(&queue->base, max_qlen); 115 } 116 117 static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg) 118 { 119 return alg->chunksize; 120 } 121 122 /** 123 * crypto_aead_chunksize() - obtain chunk size 124 * @tfm: cipher handle 125 * 126 * The block size is set to one for ciphers such as CCM. However, 127 * you still need to provide incremental updates in multiples of 128 * the underlying block size as the IV does not have sub-block 129 * granularity. This is known in this API as the chunk size. 130 * 131 * Return: chunk size in bytes 132 */ 133 static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm) 134 { 135 return crypto_aead_alg_chunksize(crypto_aead_alg(tfm)); 136 } 137 138 int crypto_register_aead(struct aead_alg *alg); 139 void crypto_unregister_aead(struct aead_alg *alg); 140 int crypto_register_aeads(struct aead_alg *algs, int count); 141 void crypto_unregister_aeads(struct aead_alg *algs, int count); 142 int aead_register_instance(struct crypto_template *tmpl, 143 struct aead_instance *inst); 144 145 #endif /* _CRYPTO_INTERNAL_AEAD_H */ 146 147