1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * AMD Cryptographic Coprocessor (CCP) AES GCM crypto API support 4 * 5 * Copyright (C) 2016,2017 Advanced Micro Devices, Inc. 6 * 7 * Author: Gary R Hook <gary.hook@amd.com> 8 */ 9 10 #include <linux/module.h> 11 #include <linux/sched.h> 12 #include <linux/delay.h> 13 #include <linux/scatterlist.h> 14 #include <linux/crypto.h> 15 #include <crypto/internal/aead.h> 16 #include <crypto/algapi.h> 17 #include <crypto/aes.h> 18 #include <crypto/ctr.h> 19 #include <crypto/gcm.h> 20 #include <crypto/scatterwalk.h> 21 22 #include "ccp-crypto.h" 23 24 static int ccp_aes_gcm_complete(struct crypto_async_request *async_req, int ret) 25 { 26 return ret; 27 } 28 29 static int ccp_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key, 30 unsigned int key_len) 31 { 32 struct ccp_ctx *ctx = crypto_aead_ctx(tfm); 33 34 switch (key_len) { 35 case AES_KEYSIZE_128: 36 ctx->u.aes.type = CCP_AES_TYPE_128; 37 break; 38 case AES_KEYSIZE_192: 39 ctx->u.aes.type = CCP_AES_TYPE_192; 40 break; 41 case AES_KEYSIZE_256: 42 ctx->u.aes.type = CCP_AES_TYPE_256; 43 break; 44 default: 45 crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 46 return -EINVAL; 47 } 48 49 ctx->u.aes.mode = CCP_AES_MODE_GCM; 50 ctx->u.aes.key_len = key_len; 51 52 memcpy(ctx->u.aes.key, key, key_len); 53 sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len); 54 55 return 0; 56 } 57 58 static int ccp_aes_gcm_setauthsize(struct crypto_aead *tfm, 59 unsigned int authsize) 60 { 61 return 0; 62 } 63 64 static int ccp_aes_gcm_crypt(struct aead_request *req, bool encrypt) 65 { 66 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 67 struct ccp_ctx *ctx = crypto_aead_ctx(tfm); 68 struct ccp_aes_req_ctx *rctx = aead_request_ctx(req); 69 struct scatterlist *iv_sg = NULL; 70 unsigned int iv_len = 0; 71 int i; 72 int ret = 0; 73 74 if (!ctx->u.aes.key_len) 75 return -EINVAL; 76 77 if (ctx->u.aes.mode != CCP_AES_MODE_GCM) 78 return -EINVAL; 79 80 if (!req->iv) 81 return -EINVAL; 82 83 /* 84 * 5 parts: 85 * plaintext/ciphertext input 86 * AAD 87 * key 88 * IV 89 * Destination+tag buffer 90 */ 91 92 /* Prepare the IV: 12 bytes + an integer (counter) */ 93 memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); 94 for (i = 0; i < 3; i++) 95 rctx->iv[i + GCM_AES_IV_SIZE] = 0; 96 rctx->iv[AES_BLOCK_SIZE - 1] = 1; 97 98 /* Set up a scatterlist for the IV */ 99 iv_sg = &rctx->iv_sg; 100 iv_len = AES_BLOCK_SIZE; 101 sg_init_one(iv_sg, rctx->iv, iv_len); 102 103 /* The AAD + plaintext are concatenated in the src buffer */ 104 memset(&rctx->cmd, 0, sizeof(rctx->cmd)); 105 INIT_LIST_HEAD(&rctx->cmd.entry); 106 rctx->cmd.engine = CCP_ENGINE_AES; 107 rctx->cmd.u.aes.type = ctx->u.aes.type; 108 rctx->cmd.u.aes.mode = ctx->u.aes.mode; 109 rctx->cmd.u.aes.action = encrypt; 110 rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; 111 rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; 112 rctx->cmd.u.aes.iv = iv_sg; 113 rctx->cmd.u.aes.iv_len = iv_len; 114 rctx->cmd.u.aes.src = req->src; 115 rctx->cmd.u.aes.src_len = req->cryptlen; 116 rctx->cmd.u.aes.aad_len = req->assoclen; 117 118 /* The cipher text + the tag are in the dst buffer */ 119 rctx->cmd.u.aes.dst = req->dst; 120 121 ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); 122 123 return ret; 124 } 125 126 static int ccp_aes_gcm_encrypt(struct aead_request *req) 127 { 128 return ccp_aes_gcm_crypt(req, CCP_AES_ACTION_ENCRYPT); 129 } 130 131 static int ccp_aes_gcm_decrypt(struct aead_request *req) 132 { 133 return ccp_aes_gcm_crypt(req, CCP_AES_ACTION_DECRYPT); 134 } 135 136 static int ccp_aes_gcm_cra_init(struct crypto_aead *tfm) 137 { 138 struct ccp_ctx *ctx = crypto_aead_ctx(tfm); 139 140 ctx->complete = ccp_aes_gcm_complete; 141 ctx->u.aes.key_len = 0; 142 143 crypto_aead_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx)); 144 145 return 0; 146 } 147 148 static void ccp_aes_gcm_cra_exit(struct crypto_tfm *tfm) 149 { 150 } 151 152 static struct aead_alg ccp_aes_gcm_defaults = { 153 .setkey = ccp_aes_gcm_setkey, 154 .setauthsize = ccp_aes_gcm_setauthsize, 155 .encrypt = ccp_aes_gcm_encrypt, 156 .decrypt = ccp_aes_gcm_decrypt, 157 .init = ccp_aes_gcm_cra_init, 158 .ivsize = GCM_AES_IV_SIZE, 159 .maxauthsize = AES_BLOCK_SIZE, 160 .base = { 161 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 162 CRYPTO_ALG_ASYNC | 163 CRYPTO_ALG_KERN_DRIVER_ONLY | 164 CRYPTO_ALG_NEED_FALLBACK, 165 .cra_blocksize = AES_BLOCK_SIZE, 166 .cra_ctxsize = sizeof(struct ccp_ctx), 167 .cra_priority = CCP_CRA_PRIORITY, 168 .cra_type = &crypto_ablkcipher_type, 169 .cra_exit = ccp_aes_gcm_cra_exit, 170 .cra_module = THIS_MODULE, 171 }, 172 }; 173 174 struct ccp_aes_aead_def { 175 enum ccp_aes_mode mode; 176 unsigned int version; 177 const char *name; 178 const char *driver_name; 179 unsigned int blocksize; 180 unsigned int ivsize; 181 struct aead_alg *alg_defaults; 182 }; 183 184 static struct ccp_aes_aead_def aes_aead_algs[] = { 185 { 186 .mode = CCP_AES_MODE_GHASH, 187 .version = CCP_VERSION(5, 0), 188 .name = "gcm(aes)", 189 .driver_name = "gcm-aes-ccp", 190 .blocksize = 1, 191 .ivsize = AES_BLOCK_SIZE, 192 .alg_defaults = &ccp_aes_gcm_defaults, 193 }, 194 }; 195 196 static int ccp_register_aes_aead(struct list_head *head, 197 const struct ccp_aes_aead_def *def) 198 { 199 struct ccp_crypto_aead *ccp_aead; 200 struct aead_alg *alg; 201 int ret; 202 203 ccp_aead = kzalloc(sizeof(*ccp_aead), GFP_KERNEL); 204 if (!ccp_aead) 205 return -ENOMEM; 206 207 INIT_LIST_HEAD(&ccp_aead->entry); 208 209 ccp_aead->mode = def->mode; 210 211 /* Copy the defaults and override as necessary */ 212 alg = &ccp_aead->alg; 213 *alg = *def->alg_defaults; 214 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name); 215 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 216 def->driver_name); 217 alg->base.cra_blocksize = def->blocksize; 218 alg->base.cra_ablkcipher.ivsize = def->ivsize; 219 220 ret = crypto_register_aead(alg); 221 if (ret) { 222 pr_err("%s ablkcipher algorithm registration error (%d)\n", 223 alg->base.cra_name, ret); 224 kfree(ccp_aead); 225 return ret; 226 } 227 228 list_add(&ccp_aead->entry, head); 229 230 return 0; 231 } 232 233 int ccp_register_aes_aeads(struct list_head *head) 234 { 235 int i, ret; 236 unsigned int ccpversion = ccp_version(); 237 238 for (i = 0; i < ARRAY_SIZE(aes_aead_algs); i++) { 239 if (aes_aead_algs[i].version > ccpversion) 240 continue; 241 ret = ccp_register_aes_aead(head, &aes_aead_algs[i]); 242 if (ret) 243 return ret; 244 } 245 246 return 0; 247 } 248