1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * sun8i-ce-cipher.c - hardware cryptographic offloader for 4 * Allwinner H3/A64/H5/H2+/H6/R40 SoC 5 * 6 * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com> 7 * 8 * This file add support for AES cipher with 128,192,256 bits keysize in 9 * CBC and ECB mode. 10 * 11 * You could find a link for the datasheet in Documentation/arm/sunxi.rst 12 */ 13 14 #include <linux/crypto.h> 15 #include <linux/dma-mapping.h> 16 #include <linux/io.h> 17 #include <linux/pm_runtime.h> 18 #include <crypto/scatterwalk.h> 19 #include <crypto/internal/des.h> 20 #include <crypto/internal/skcipher.h> 21 #include "sun8i-ce.h" 22 23 static int sun8i_ce_cipher_need_fallback(struct skcipher_request *areq) 24 { 25 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 26 struct scatterlist *sg; 27 28 if (sg_nents(areq->src) > MAX_SG || sg_nents(areq->dst) > MAX_SG) 29 return true; 30 31 if (areq->cryptlen < crypto_skcipher_ivsize(tfm)) 32 return true; 33 34 if (areq->cryptlen == 0 || areq->cryptlen % 16) 35 return true; 36 37 sg = areq->src; 38 while (sg) { 39 if (sg->length % 4 || !IS_ALIGNED(sg->offset, sizeof(u32))) 40 return true; 41 sg = sg_next(sg); 42 } 43 sg = areq->dst; 44 while (sg) { 45 if (sg->length % 4 || !IS_ALIGNED(sg->offset, sizeof(u32))) 46 return true; 47 sg = sg_next(sg); 48 } 49 return false; 50 } 51 52 static int sun8i_ce_cipher_fallback(struct skcipher_request *areq) 53 { 54 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 55 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 56 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 57 int err; 58 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG 59 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 60 struct sun8i_ce_alg_template *algt; 61 62 algt = container_of(alg, struct sun8i_ce_alg_template, alg.skcipher); 63 algt->stat_fb++; 64 #endif 65 66 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); 67 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, 68 areq->base.complete, areq->base.data); 69 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, 70 areq->cryptlen, areq->iv); 71 if (rctx->op_dir & CE_DECRYPTION) 72 err = crypto_skcipher_decrypt(&rctx->fallback_req); 73 else 74 err = crypto_skcipher_encrypt(&rctx->fallback_req); 75 return err; 76 } 77 78 static int sun8i_ce_cipher(struct skcipher_request *areq) 79 { 80 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 81 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 82 struct sun8i_ce_dev *ce = op->ce; 83 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 84 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 85 struct sun8i_ce_alg_template *algt; 86 struct sun8i_ce_flow *chan; 87 struct ce_task *cet; 88 struct scatterlist *sg; 89 unsigned int todo, len, offset, ivsize; 90 dma_addr_t addr_iv = 0, addr_key = 0; 91 void *backup_iv = NULL; 92 u32 common, sym; 93 int flow, i; 94 int nr_sgs = 0; 95 int nr_sgd = 0; 96 int err = 0; 97 98 algt = container_of(alg, struct sun8i_ce_alg_template, alg.skcipher); 99 100 dev_dbg(ce->dev, "%s %s %u %x IV(%p %u) key=%u\n", __func__, 101 crypto_tfm_alg_name(areq->base.tfm), 102 areq->cryptlen, 103 rctx->op_dir, areq->iv, crypto_skcipher_ivsize(tfm), 104 op->keylen); 105 106 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG 107 algt->stat_req++; 108 #endif 109 110 flow = rctx->flow; 111 112 chan = &ce->chanlist[flow]; 113 114 cet = chan->tl; 115 memset(cet, 0, sizeof(struct ce_task)); 116 117 cet->t_id = cpu_to_le32(flow); 118 common = ce->variant->alg_cipher[algt->ce_algo_id]; 119 common |= rctx->op_dir | CE_COMM_INT; 120 cet->t_common_ctl = cpu_to_le32(common); 121 /* CTS and recent CE (H6) need length in bytes, in word otherwise */ 122 if (ce->variant->has_t_dlen_in_bytes) 123 cet->t_dlen = cpu_to_le32(areq->cryptlen); 124 else 125 cet->t_dlen = cpu_to_le32(areq->cryptlen / 4); 126 127 sym = ce->variant->op_mode[algt->ce_blockmode]; 128 len = op->keylen; 129 switch (len) { 130 case 128 / 8: 131 sym |= CE_AES_128BITS; 132 break; 133 case 192 / 8: 134 sym |= CE_AES_192BITS; 135 break; 136 case 256 / 8: 137 sym |= CE_AES_256BITS; 138 break; 139 } 140 141 cet->t_sym_ctl = cpu_to_le32(sym); 142 cet->t_asym_ctl = 0; 143 144 addr_key = dma_map_single(ce->dev, op->key, op->keylen, DMA_TO_DEVICE); 145 cet->t_key = cpu_to_le32(addr_key); 146 if (dma_mapping_error(ce->dev, addr_key)) { 147 dev_err(ce->dev, "Cannot DMA MAP KEY\n"); 148 err = -EFAULT; 149 goto theend; 150 } 151 152 ivsize = crypto_skcipher_ivsize(tfm); 153 if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) { 154 chan->ivlen = ivsize; 155 chan->bounce_iv = kzalloc(ivsize, GFP_KERNEL | GFP_DMA); 156 if (!chan->bounce_iv) { 157 err = -ENOMEM; 158 goto theend_key; 159 } 160 if (rctx->op_dir & CE_DECRYPTION) { 161 backup_iv = kzalloc(ivsize, GFP_KERNEL); 162 if (!backup_iv) { 163 err = -ENOMEM; 164 goto theend_key; 165 } 166 offset = areq->cryptlen - ivsize; 167 scatterwalk_map_and_copy(backup_iv, areq->src, offset, 168 ivsize, 0); 169 } 170 memcpy(chan->bounce_iv, areq->iv, ivsize); 171 addr_iv = dma_map_single(ce->dev, chan->bounce_iv, chan->ivlen, 172 DMA_TO_DEVICE); 173 cet->t_iv = cpu_to_le32(addr_iv); 174 if (dma_mapping_error(ce->dev, addr_iv)) { 175 dev_err(ce->dev, "Cannot DMA MAP IV\n"); 176 err = -ENOMEM; 177 goto theend_iv; 178 } 179 } 180 181 if (areq->src == areq->dst) { 182 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src), 183 DMA_BIDIRECTIONAL); 184 if (nr_sgs <= 0 || nr_sgs > MAX_SG) { 185 dev_err(ce->dev, "Invalid sg number %d\n", nr_sgs); 186 err = -EINVAL; 187 goto theend_iv; 188 } 189 nr_sgd = nr_sgs; 190 } else { 191 nr_sgs = dma_map_sg(ce->dev, areq->src, sg_nents(areq->src), 192 DMA_TO_DEVICE); 193 if (nr_sgs <= 0 || nr_sgs > MAX_SG) { 194 dev_err(ce->dev, "Invalid sg number %d\n", nr_sgs); 195 err = -EINVAL; 196 goto theend_iv; 197 } 198 nr_sgd = dma_map_sg(ce->dev, areq->dst, sg_nents(areq->dst), 199 DMA_FROM_DEVICE); 200 if (nr_sgd <= 0 || nr_sgd > MAX_SG) { 201 dev_err(ce->dev, "Invalid sg number %d\n", nr_sgd); 202 err = -EINVAL; 203 goto theend_sgs; 204 } 205 } 206 207 len = areq->cryptlen; 208 for_each_sg(areq->src, sg, nr_sgs, i) { 209 cet->t_src[i].addr = cpu_to_le32(sg_dma_address(sg)); 210 todo = min(len, sg_dma_len(sg)); 211 cet->t_src[i].len = cpu_to_le32(todo / 4); 212 dev_dbg(ce->dev, "%s total=%u SG(%d %u off=%d) todo=%u\n", __func__, 213 areq->cryptlen, i, cet->t_src[i].len, sg->offset, todo); 214 len -= todo; 215 } 216 if (len > 0) { 217 dev_err(ce->dev, "remaining len %d\n", len); 218 err = -EINVAL; 219 goto theend_sgs; 220 } 221 222 len = areq->cryptlen; 223 for_each_sg(areq->dst, sg, nr_sgd, i) { 224 cet->t_dst[i].addr = cpu_to_le32(sg_dma_address(sg)); 225 todo = min(len, sg_dma_len(sg)); 226 cet->t_dst[i].len = cpu_to_le32(todo / 4); 227 dev_dbg(ce->dev, "%s total=%u SG(%d %u off=%d) todo=%u\n", __func__, 228 areq->cryptlen, i, cet->t_dst[i].len, sg->offset, todo); 229 len -= todo; 230 } 231 if (len > 0) { 232 dev_err(ce->dev, "remaining len %d\n", len); 233 err = -EINVAL; 234 goto theend_sgs; 235 } 236 237 chan->timeout = areq->cryptlen; 238 err = sun8i_ce_run_task(ce, flow, crypto_tfm_alg_name(areq->base.tfm)); 239 240 theend_sgs: 241 if (areq->src == areq->dst) { 242 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_BIDIRECTIONAL); 243 } else { 244 if (nr_sgs > 0) 245 dma_unmap_sg(ce->dev, areq->src, nr_sgs, DMA_TO_DEVICE); 246 dma_unmap_sg(ce->dev, areq->dst, nr_sgd, DMA_FROM_DEVICE); 247 } 248 249 theend_iv: 250 if (areq->iv && ivsize > 0) { 251 if (addr_iv) 252 dma_unmap_single(ce->dev, addr_iv, chan->ivlen, 253 DMA_TO_DEVICE); 254 offset = areq->cryptlen - ivsize; 255 if (rctx->op_dir & CE_DECRYPTION) { 256 memcpy(areq->iv, backup_iv, ivsize); 257 kfree_sensitive(backup_iv); 258 } else { 259 scatterwalk_map_and_copy(areq->iv, areq->dst, offset, 260 ivsize, 0); 261 } 262 kfree(chan->bounce_iv); 263 } 264 265 theend_key: 266 dma_unmap_single(ce->dev, addr_key, op->keylen, DMA_TO_DEVICE); 267 268 theend: 269 return err; 270 } 271 272 static int sun8i_ce_handle_cipher_request(struct crypto_engine *engine, void *areq) 273 { 274 int err; 275 struct skcipher_request *breq = container_of(areq, struct skcipher_request, base); 276 277 err = sun8i_ce_cipher(breq); 278 crypto_finalize_skcipher_request(engine, breq, err); 279 280 return 0; 281 } 282 283 int sun8i_ce_skdecrypt(struct skcipher_request *areq) 284 { 285 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 286 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 287 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 288 struct crypto_engine *engine; 289 int e; 290 291 rctx->op_dir = CE_DECRYPTION; 292 if (sun8i_ce_cipher_need_fallback(areq)) 293 return sun8i_ce_cipher_fallback(areq); 294 295 e = sun8i_ce_get_engine_number(op->ce); 296 rctx->flow = e; 297 engine = op->ce->chanlist[e].engine; 298 299 return crypto_transfer_skcipher_request_to_engine(engine, areq); 300 } 301 302 int sun8i_ce_skencrypt(struct skcipher_request *areq) 303 { 304 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); 305 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 306 struct sun8i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); 307 struct crypto_engine *engine; 308 int e; 309 310 rctx->op_dir = CE_ENCRYPTION; 311 if (sun8i_ce_cipher_need_fallback(areq)) 312 return sun8i_ce_cipher_fallback(areq); 313 314 e = sun8i_ce_get_engine_number(op->ce); 315 rctx->flow = e; 316 engine = op->ce->chanlist[e].engine; 317 318 return crypto_transfer_skcipher_request_to_engine(engine, areq); 319 } 320 321 int sun8i_ce_cipher_init(struct crypto_tfm *tfm) 322 { 323 struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm); 324 struct sun8i_ce_alg_template *algt; 325 const char *name = crypto_tfm_alg_name(tfm); 326 struct crypto_skcipher *sktfm = __crypto_skcipher_cast(tfm); 327 struct skcipher_alg *alg = crypto_skcipher_alg(sktfm); 328 int err; 329 330 memset(op, 0, sizeof(struct sun8i_cipher_tfm_ctx)); 331 332 algt = container_of(alg, struct sun8i_ce_alg_template, alg.skcipher); 333 op->ce = algt->ce; 334 335 op->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK); 336 if (IS_ERR(op->fallback_tfm)) { 337 dev_err(op->ce->dev, "ERROR: Cannot allocate fallback for %s %ld\n", 338 name, PTR_ERR(op->fallback_tfm)); 339 return PTR_ERR(op->fallback_tfm); 340 } 341 342 sktfm->reqsize = sizeof(struct sun8i_cipher_req_ctx) + 343 crypto_skcipher_reqsize(op->fallback_tfm); 344 345 346 dev_info(op->ce->dev, "Fallback for %s is %s\n", 347 crypto_tfm_alg_driver_name(&sktfm->base), 348 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op->fallback_tfm))); 349 350 op->enginectx.op.do_one_request = sun8i_ce_handle_cipher_request; 351 op->enginectx.op.prepare_request = NULL; 352 op->enginectx.op.unprepare_request = NULL; 353 354 err = pm_runtime_get_sync(op->ce->dev); 355 if (err < 0) 356 goto error_pm; 357 358 return 0; 359 error_pm: 360 pm_runtime_put_noidle(op->ce->dev); 361 crypto_free_skcipher(op->fallback_tfm); 362 return err; 363 } 364 365 void sun8i_ce_cipher_exit(struct crypto_tfm *tfm) 366 { 367 struct sun8i_cipher_tfm_ctx *op = crypto_tfm_ctx(tfm); 368 369 if (op->key) { 370 memzero_explicit(op->key, op->keylen); 371 kfree(op->key); 372 } 373 crypto_free_skcipher(op->fallback_tfm); 374 pm_runtime_put_sync_suspend(op->ce->dev); 375 } 376 377 int sun8i_ce_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, 378 unsigned int keylen) 379 { 380 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 381 struct sun8i_ce_dev *ce = op->ce; 382 383 switch (keylen) { 384 case 128 / 8: 385 break; 386 case 192 / 8: 387 break; 388 case 256 / 8: 389 break; 390 default: 391 dev_dbg(ce->dev, "ERROR: Invalid keylen %u\n", keylen); 392 return -EINVAL; 393 } 394 if (op->key) { 395 memzero_explicit(op->key, op->keylen); 396 kfree(op->key); 397 } 398 op->keylen = keylen; 399 op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA); 400 if (!op->key) 401 return -ENOMEM; 402 403 crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK); 404 crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); 405 406 return crypto_skcipher_setkey(op->fallback_tfm, key, keylen); 407 } 408 409 int sun8i_ce_des3_setkey(struct crypto_skcipher *tfm, const u8 *key, 410 unsigned int keylen) 411 { 412 struct sun8i_cipher_tfm_ctx *op = crypto_skcipher_ctx(tfm); 413 int err; 414 415 err = verify_skcipher_des3_key(tfm, key); 416 if (err) 417 return err; 418 419 if (op->key) { 420 memzero_explicit(op->key, op->keylen); 421 kfree(op->key); 422 } 423 op->keylen = keylen; 424 op->key = kmemdup(key, keylen, GFP_KERNEL | GFP_DMA); 425 if (!op->key) 426 return -ENOMEM; 427 428 crypto_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK); 429 crypto_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK); 430 431 return crypto_skcipher_setkey(op->fallback_tfm, key, keylen); 432 } 433