Lines Matching +full:rk3288 +full:- +full:crypto

1 // SPDX-License-Identifier: GPL-2.0-only
3 * Crypto acceleration support for Rockchip RK3288
7 * Author: Zain Wang <zain.wang@rock-chips.com>
9 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
12 #include <crypto/engine.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
32 if (!req->cryptlen) in rk_cipher_need_fallback()
35 len = req->cryptlen; in rk_cipher_need_fallback()
36 sgs = req->src; in rk_cipher_need_fallback()
37 sgd = req->dst; in rk_cipher_need_fallback()
39 if (!IS_ALIGNED(sgs->offset, sizeof(u32))) { in rk_cipher_need_fallback()
40 algt->stat_fb_align++; in rk_cipher_need_fallback()
43 if (!IS_ALIGNED(sgd->offset, sizeof(u32))) { in rk_cipher_need_fallback()
44 algt->stat_fb_align++; in rk_cipher_need_fallback()
47 stodo = min(len, sgs->length); in rk_cipher_need_fallback()
49 algt->stat_fb_len++; in rk_cipher_need_fallback()
52 dtodo = min(len, sgd->length); in rk_cipher_need_fallback()
54 algt->stat_fb_len++; in rk_cipher_need_fallback()
58 algt->stat_fb_sgdiff++; in rk_cipher_need_fallback()
61 len -= stodo; in rk_cipher_need_fallback()
77 algt->stat_fb++; in rk_cipher_fallback()
79 skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm); in rk_cipher_fallback()
80 skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags, in rk_cipher_fallback()
81 areq->base.complete, areq->base.data); in rk_cipher_fallback()
82 skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst, in rk_cipher_fallback()
83 areq->cryptlen, areq->iv); in rk_cipher_fallback()
84 if (rctx->mode & RK_CRYPTO_DEC) in rk_cipher_fallback()
85 err = crypto_skcipher_decrypt(&rctx->fallback_req); in rk_cipher_fallback()
87 err = crypto_skcipher_encrypt(&rctx->fallback_req); in rk_cipher_fallback()
102 engine = rkc->engine; in rk_cipher_handle_req()
103 rctx->dev = rkc; in rk_cipher_handle_req()
116 return -EINVAL; in rk_aes_setkey()
117 ctx->keylen = keylen; in rk_aes_setkey()
118 memcpy(ctx->key, key, keylen); in rk_aes_setkey()
120 return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen); in rk_aes_setkey()
133 ctx->keylen = keylen; in rk_des_setkey()
134 memcpy(ctx->key, key, keylen); in rk_des_setkey()
136 return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen); in rk_des_setkey()
149 ctx->keylen = keylen; in rk_tdes_setkey()
150 memcpy(ctx->key, key, keylen); in rk_tdes_setkey()
152 return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen); in rk_tdes_setkey()
159 rctx->mode = RK_CRYPTO_AES_ECB_MODE; in rk_aes_ecb_encrypt()
167 rctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC; in rk_aes_ecb_decrypt()
175 rctx->mode = RK_CRYPTO_AES_CBC_MODE; in rk_aes_cbc_encrypt()
183 rctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC; in rk_aes_cbc_decrypt()
191 rctx->mode = 0; in rk_des_ecb_encrypt()
199 rctx->mode = RK_CRYPTO_DEC; in rk_des_ecb_decrypt()
207 rctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC; in rk_des_cbc_encrypt()
215 rctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC; in rk_des_cbc_decrypt()
223 rctx->mode = RK_CRYPTO_TDES_SELECT; in rk_des3_ede_ecb_encrypt()
231 rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC; in rk_des3_ede_ecb_decrypt()
239 rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC; in rk_des3_ede_cbc_encrypt()
247 rctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC | in rk_des3_ede_cbc_decrypt()
263 rctx->mode |= RK_CRYPTO_TDES_FIFO_MODE | in rk_cipher_hw_init()
266 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, rctx->mode); in rk_cipher_hw_init()
267 memcpy_toio(dev->reg + RK_CRYPTO_TDES_KEY1_0, ctx->key, ctx->keylen); in rk_cipher_hw_init()
270 rctx->mode |= RK_CRYPTO_AES_FIFO_MODE | in rk_cipher_hw_init()
274 if (ctx->keylen == AES_KEYSIZE_192) in rk_cipher_hw_init()
275 rctx->mode |= RK_CRYPTO_AES_192BIT_key; in rk_cipher_hw_init()
276 else if (ctx->keylen == AES_KEYSIZE_256) in rk_cipher_hw_init()
277 rctx->mode |= RK_CRYPTO_AES_256BIT_key; in rk_cipher_hw_init()
278 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, rctx->mode); in rk_cipher_hw_init()
279 memcpy_toio(dev->reg + RK_CRYPTO_AES_KEY_0, ctx->key, ctx->keylen); in rk_cipher_hw_init()
310 u8 *ivtouse = areq->iv; in rk_cipher_run()
311 unsigned int len = areq->cryptlen; in rk_cipher_run()
315 struct rk_crypto_info *rkc = rctx->dev; in rk_cipher_run()
317 err = pm_runtime_resume_and_get(rkc->dev); in rk_cipher_run()
321 algt->stat_req++; in rk_cipher_run()
322 rkc->nreq++; in rk_cipher_run()
325 if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) { in rk_cipher_run()
326 if (rctx->mode & RK_CRYPTO_DEC) { in rk_cipher_run()
327 offset = areq->cryptlen - ivsize; in rk_cipher_run()
328 scatterwalk_map_and_copy(rctx->backup_iv, areq->src, in rk_cipher_run()
333 sgs = areq->src; in rk_cipher_run()
334 sgd = areq->dst; in rk_cipher_run()
337 if (!sgs->length) { in rk_cipher_run()
342 if (rctx->mode & RK_CRYPTO_DEC) { in rk_cipher_run()
344 offset = sgs->length - ivsize; in rk_cipher_run()
348 err = dma_map_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL); in rk_cipher_run()
350 err = -EINVAL; in rk_cipher_run()
354 err = dma_map_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE); in rk_cipher_run()
356 err = -EINVAL; in rk_cipher_run()
359 err = dma_map_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()
361 err = -EINVAL; in rk_cipher_run()
369 memcpy_toio(rkc->reg + RK_CRYPTO_TDES_IV_0, ivtouse, ivsize); in rk_cipher_run()
371 memcpy_toio(rkc->reg + RK_CRYPTO_AES_IV_0, ivtouse, ivsize); in rk_cipher_run()
373 reinit_completion(&rkc->complete); in rk_cipher_run()
374 rkc->status = 0; in rk_cipher_run()
377 len -= todo; in rk_cipher_run()
379 wait_for_completion_interruptible_timeout(&rkc->complete, in rk_cipher_run()
381 if (!rkc->status) { in rk_cipher_run()
382 dev_err(rkc->dev, "DMA timeout\n"); in rk_cipher_run()
383 err = -EFAULT; in rk_cipher_run()
387 dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL); in rk_cipher_run()
389 dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE); in rk_cipher_run()
390 dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()
392 if (rctx->mode & RK_CRYPTO_DEC) { in rk_cipher_run()
396 offset = sgd->length - ivsize; in rk_cipher_run()
404 if (areq->iv && ivsize > 0) { in rk_cipher_run()
405 offset = areq->cryptlen - ivsize; in rk_cipher_run()
406 if (rctx->mode & RK_CRYPTO_DEC) { in rk_cipher_run()
407 memcpy(areq->iv, rctx->backup_iv, ivsize); in rk_cipher_run()
408 memzero_explicit(rctx->backup_iv, ivsize); in rk_cipher_run()
410 scatterwalk_map_and_copy(areq->iv, areq->dst, offset, in rk_cipher_run()
416 pm_runtime_put_autosuspend(rkc->dev); in rk_cipher_run()
425 dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL); in rk_cipher_run()
427 dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE); in rk_cipher_run()
428 dma_unmap_sg(rkc->dev, sgd, 1, DMA_FROM_DEVICE); in rk_cipher_run()
437 const char *name = crypto_tfm_alg_name(&tfm->base); in rk_cipher_tfm_init()
441 ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK); in rk_cipher_tfm_init()
442 if (IS_ERR(ctx->fallback_tfm)) { in rk_cipher_tfm_init()
443 dev_err(algt->dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n", in rk_cipher_tfm_init()
444 name, PTR_ERR(ctx->fallback_tfm)); in rk_cipher_tfm_init()
445 return PTR_ERR(ctx->fallback_tfm); in rk_cipher_tfm_init()
448 tfm->reqsize = sizeof(struct rk_cipher_rctx) + in rk_cipher_tfm_init()
449 crypto_skcipher_reqsize(ctx->fallback_tfm); in rk_cipher_tfm_init()
458 memzero_explicit(ctx->key, ctx->keylen); in rk_cipher_tfm_exit()
459 crypto_free_skcipher(ctx->fallback_tfm); in rk_cipher_tfm_exit()
466 .base.cra_driver_name = "ecb-aes-rk",
491 .base.cra_driver_name = "cbc-aes-rk",
517 .base.cra_driver_name = "ecb-des-rk",
542 .base.cra_driver_name = "cbc-des-rk",
568 .base.cra_driver_name = "ecb-des3-ede-rk",
593 .base.cra_driver_name = "cbc-des3-ede-rk",