1 /* 2 * Symmetric key cipher operations. 3 * 4 * Generic encrypt/decrypt wrapper for ciphers, handles operations across 5 * multiple page boundaries by using temporary blocks. In user context, 6 * the kernel is given a chance to schedule us once per page. 7 * 8 * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au> 9 * 10 * This program is free software; you can redistribute it and/or modify it 11 * under the terms of the GNU General Public License as published by the Free 12 * Software Foundation; either version 2 of the License, or (at your option) 13 * any later version. 14 * 15 */ 16 17 #include <crypto/internal/skcipher.h> 18 #include <linux/bug.h> 19 #include <linux/module.h> 20 21 #include "internal.h" 22 23 static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg) 24 { 25 if (alg->cra_type == &crypto_blkcipher_type) 26 return sizeof(struct crypto_blkcipher *); 27 28 BUG_ON(alg->cra_type != &crypto_ablkcipher_type && 29 alg->cra_type != &crypto_givcipher_type); 30 31 return sizeof(struct crypto_ablkcipher *); 32 } 33 34 static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm, 35 const u8 *key, unsigned int keylen) 36 { 37 struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm); 38 struct crypto_blkcipher *blkcipher = *ctx; 39 int err; 40 41 crypto_blkcipher_clear_flags(blkcipher, ~0); 42 crypto_blkcipher_set_flags(blkcipher, crypto_skcipher_get_flags(tfm) & 43 CRYPTO_TFM_REQ_MASK); 44 err = crypto_blkcipher_setkey(blkcipher, key, keylen); 45 crypto_skcipher_set_flags(tfm, crypto_blkcipher_get_flags(blkcipher) & 46 CRYPTO_TFM_RES_MASK); 47 48 return err; 49 } 50 51 static int skcipher_crypt_blkcipher(struct skcipher_request *req, 52 int (*crypt)(struct blkcipher_desc *, 53 struct scatterlist *, 54 struct scatterlist *, 55 unsigned int)) 56 { 57 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 58 struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm); 59 struct blkcipher_desc desc = { 60 .tfm = *ctx, 61 .info = req->iv, 62 .flags = req->base.flags, 63 }; 64 65 66 return crypt(&desc, req->dst, req->src, req->cryptlen); 67 } 68 69 static int skcipher_encrypt_blkcipher(struct skcipher_request *req) 70 { 71 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 72 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 73 struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; 74 75 return skcipher_crypt_blkcipher(req, alg->encrypt); 76 } 77 78 static int skcipher_decrypt_blkcipher(struct skcipher_request *req) 79 { 80 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 81 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 82 struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; 83 84 return skcipher_crypt_blkcipher(req, alg->decrypt); 85 } 86 87 static void crypto_exit_skcipher_ops_blkcipher(struct crypto_tfm *tfm) 88 { 89 struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm); 90 91 crypto_free_blkcipher(*ctx); 92 } 93 94 int crypto_init_skcipher_ops_blkcipher(struct crypto_tfm *tfm) 95 { 96 struct crypto_alg *calg = tfm->__crt_alg; 97 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 98 struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm); 99 struct crypto_blkcipher *blkcipher; 100 struct crypto_tfm *btfm; 101 102 if (!crypto_mod_get(calg)) 103 return -EAGAIN; 104 105 btfm = __crypto_alloc_tfm(calg, CRYPTO_ALG_TYPE_BLKCIPHER, 106 CRYPTO_ALG_TYPE_MASK); 107 if (IS_ERR(btfm)) { 108 crypto_mod_put(calg); 109 return PTR_ERR(btfm); 110 } 111 112 blkcipher = __crypto_blkcipher_cast(btfm); 113 *ctx = blkcipher; 114 tfm->exit = crypto_exit_skcipher_ops_blkcipher; 115 116 skcipher->setkey = skcipher_setkey_blkcipher; 117 skcipher->encrypt = skcipher_encrypt_blkcipher; 118 skcipher->decrypt = skcipher_decrypt_blkcipher; 119 120 skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher); 121 122 return 0; 123 } 124 125 static int skcipher_setkey_ablkcipher(struct crypto_skcipher *tfm, 126 const u8 *key, unsigned int keylen) 127 { 128 struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm); 129 struct crypto_ablkcipher *ablkcipher = *ctx; 130 int err; 131 132 crypto_ablkcipher_clear_flags(ablkcipher, ~0); 133 crypto_ablkcipher_set_flags(ablkcipher, 134 crypto_skcipher_get_flags(tfm) & 135 CRYPTO_TFM_REQ_MASK); 136 err = crypto_ablkcipher_setkey(ablkcipher, key, keylen); 137 crypto_skcipher_set_flags(tfm, 138 crypto_ablkcipher_get_flags(ablkcipher) & 139 CRYPTO_TFM_RES_MASK); 140 141 return err; 142 } 143 144 static int skcipher_crypt_ablkcipher(struct skcipher_request *req, 145 int (*crypt)(struct ablkcipher_request *)) 146 { 147 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 148 struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm); 149 struct ablkcipher_request *subreq = skcipher_request_ctx(req); 150 151 ablkcipher_request_set_tfm(subreq, *ctx); 152 ablkcipher_request_set_callback(subreq, skcipher_request_flags(req), 153 req->base.complete, req->base.data); 154 ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, 155 req->iv); 156 157 return crypt(subreq); 158 } 159 160 static int skcipher_encrypt_ablkcipher(struct skcipher_request *req) 161 { 162 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 163 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 164 struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; 165 166 return skcipher_crypt_ablkcipher(req, alg->encrypt); 167 } 168 169 static int skcipher_decrypt_ablkcipher(struct skcipher_request *req) 170 { 171 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 172 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 173 struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; 174 175 return skcipher_crypt_ablkcipher(req, alg->decrypt); 176 } 177 178 static void crypto_exit_skcipher_ops_ablkcipher(struct crypto_tfm *tfm) 179 { 180 struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm); 181 182 crypto_free_ablkcipher(*ctx); 183 } 184 185 int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm) 186 { 187 struct crypto_alg *calg = tfm->__crt_alg; 188 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 189 struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm); 190 struct crypto_ablkcipher *ablkcipher; 191 struct crypto_tfm *abtfm; 192 193 if (!crypto_mod_get(calg)) 194 return -EAGAIN; 195 196 abtfm = __crypto_alloc_tfm(calg, 0, 0); 197 if (IS_ERR(abtfm)) { 198 crypto_mod_put(calg); 199 return PTR_ERR(abtfm); 200 } 201 202 ablkcipher = __crypto_ablkcipher_cast(abtfm); 203 *ctx = ablkcipher; 204 tfm->exit = crypto_exit_skcipher_ops_ablkcipher; 205 206 skcipher->setkey = skcipher_setkey_ablkcipher; 207 skcipher->encrypt = skcipher_encrypt_ablkcipher; 208 skcipher->decrypt = skcipher_decrypt_ablkcipher; 209 210 skcipher->ivsize = crypto_ablkcipher_ivsize(ablkcipher); 211 skcipher->reqsize = crypto_ablkcipher_reqsize(ablkcipher) + 212 sizeof(struct ablkcipher_request); 213 214 return 0; 215 } 216 217 static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm) 218 { 219 if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type) 220 return crypto_init_skcipher_ops_blkcipher(tfm); 221 222 BUG_ON(tfm->__crt_alg->cra_type != &crypto_ablkcipher_type && 223 tfm->__crt_alg->cra_type != &crypto_givcipher_type); 224 225 return crypto_init_skcipher_ops_ablkcipher(tfm); 226 } 227 228 static const struct crypto_type crypto_skcipher_type2 = { 229 .extsize = crypto_skcipher_extsize, 230 .init_tfm = crypto_skcipher_init_tfm, 231 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 232 .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK, 233 .type = CRYPTO_ALG_TYPE_BLKCIPHER, 234 .tfmsize = offsetof(struct crypto_skcipher, base), 235 }; 236 237 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name, 238 u32 type, u32 mask) 239 { 240 return crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask); 241 } 242 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); 243 244 MODULE_LICENSE("GPL"); 245 MODULE_DESCRIPTION("Symmetric key cipher type"); 246