Lines Matching +full:inline +full:- +full:crypto +full:- +full:engine

1 // SPDX-License-Identifier: GPL-2.0-only
5 * Author: Boris Brezillon <boris.brezillon@free-electrons.com>
12 #include <crypto/aes.h>
13 #include <crypto/internal/des.h>
15 #include <linux/dma-mapping.h>
40 static inline void
44 mv_cesa_req_dma_iter_init(&iter->base, req->cryptlen); in mv_cesa_skcipher_req_iter_init()
45 mv_cesa_sg_dma_iter_init(&iter->src, req->src, DMA_TO_DEVICE); in mv_cesa_skcipher_req_iter_init()
46 mv_cesa_sg_dma_iter_init(&iter->dst, req->dst, DMA_FROM_DEVICE); in mv_cesa_skcipher_req_iter_init()
49 static inline bool
52 iter->src.op_offset = 0; in mv_cesa_skcipher_req_iter_next_op()
53 iter->dst.op_offset = 0; in mv_cesa_skcipher_req_iter_next_op()
55 return mv_cesa_req_dma_iter_next_op(&iter->base); in mv_cesa_skcipher_req_iter_next_op()
58 static inline void
63 if (req->dst != req->src) { in mv_cesa_skcipher_dma_cleanup()
64 dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents, in mv_cesa_skcipher_dma_cleanup()
66 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
69 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
72 mv_cesa_dma_cleanup(&creq->base); in mv_cesa_skcipher_dma_cleanup()
75 static inline void mv_cesa_skcipher_cleanup(struct skcipher_request *req) in mv_cesa_skcipher_cleanup()
79 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_skcipher_cleanup()
86 struct mv_cesa_skcipher_std_req *sreq = &creq->std; in mv_cesa_skcipher_std_step()
87 struct mv_cesa_engine *engine = creq->base.engine; in mv_cesa_skcipher_std_step() local
88 size_t len = min_t(size_t, req->cryptlen - sreq->offset, in mv_cesa_skcipher_std_step()
91 mv_cesa_adjust_op(engine, &sreq->op); in mv_cesa_skcipher_std_step()
92 if (engine->pool) in mv_cesa_skcipher_std_step()
93 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
95 memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
97 len = mv_cesa_sg_copy_to_sram(engine, req->src, creq->src_nents, in mv_cesa_skcipher_std_step()
99 sreq->offset); in mv_cesa_skcipher_std_step()
101 sreq->size = len; in mv_cesa_skcipher_std_step()
102 mv_cesa_set_crypt_op_len(&sreq->op, len); in mv_cesa_skcipher_std_step()
105 if (!sreq->skip_ctx) { in mv_cesa_skcipher_std_step()
106 if (engine->pool) in mv_cesa_skcipher_std_step()
107 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
109 memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op)); in mv_cesa_skcipher_std_step()
110 sreq->skip_ctx = true; in mv_cesa_skcipher_std_step()
111 } else if (engine->pool) in mv_cesa_skcipher_std_step()
112 memcpy(engine->sram_pool, &sreq->op, sizeof(sreq->op.desc)); in mv_cesa_skcipher_std_step()
114 memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op.desc)); in mv_cesa_skcipher_std_step()
116 mv_cesa_set_int_mask(engine, CESA_SA_INT_ACCEL0_DONE); in mv_cesa_skcipher_std_step()
117 writel_relaxed(CESA_SA_CFG_PARA_DIS, engine->regs + CESA_SA_CFG); in mv_cesa_skcipher_std_step()
118 WARN_ON(readl(engine->regs + CESA_SA_CMD) & in mv_cesa_skcipher_std_step()
120 writel(CESA_SA_CMD_EN_CESA_SA_ACCL0, engine->regs + CESA_SA_CMD); in mv_cesa_skcipher_std_step()
127 struct mv_cesa_skcipher_std_req *sreq = &creq->std; in mv_cesa_skcipher_std_process()
128 struct mv_cesa_engine *engine = creq->base.engine; in mv_cesa_skcipher_std_process() local
131 len = mv_cesa_sg_copy_from_sram(engine, req->dst, creq->dst_nents, in mv_cesa_skcipher_std_process()
132 CESA_SA_DATA_SRAM_OFFSET, sreq->size, in mv_cesa_skcipher_std_process()
133 sreq->offset); in mv_cesa_skcipher_std_process()
135 sreq->offset += len; in mv_cesa_skcipher_std_process()
136 if (sreq->offset < req->cryptlen) in mv_cesa_skcipher_std_process()
137 return -EINPROGRESS; in mv_cesa_skcipher_std_process()
147 struct mv_cesa_req *basereq = &creq->base; in mv_cesa_skcipher_process()
160 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_skcipher_step()
161 mv_cesa_dma_step(&creq->base); in mv_cesa_skcipher_step()
166 static inline void
170 struct mv_cesa_req *basereq = &creq->base; in mv_cesa_skcipher_dma_prepare()
172 mv_cesa_dma_prepare(basereq, basereq->engine); in mv_cesa_skcipher_dma_prepare()
175 static inline void
179 struct mv_cesa_skcipher_std_req *sreq = &creq->std; in mv_cesa_skcipher_std_prepare()
181 sreq->size = 0; in mv_cesa_skcipher_std_prepare()
182 sreq->offset = 0; in mv_cesa_skcipher_std_prepare()
185 static inline void mv_cesa_skcipher_prepare(struct crypto_async_request *req, in mv_cesa_skcipher_prepare()
186 struct mv_cesa_engine *engine) in mv_cesa_skcipher_prepare() argument
191 creq->base.engine = engine; in mv_cesa_skcipher_prepare()
193 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_skcipher_prepare()
199 static inline void
212 struct mv_cesa_engine *engine = creq->base.engine; in mv_cesa_skcipher_complete() local
215 atomic_sub(skreq->cryptlen, &engine->load); in mv_cesa_skcipher_complete()
218 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) { in mv_cesa_skcipher_complete()
221 basereq = &creq->base; in mv_cesa_skcipher_complete()
222 memcpy(skreq->iv, basereq->chain.last->op->ctx.skcipher.iv, in mv_cesa_skcipher_complete()
224 } else if (engine->pool) in mv_cesa_skcipher_complete()
225 memcpy(skreq->iv, in mv_cesa_skcipher_complete()
226 engine->sram_pool + CESA_SA_CRYPT_IV_SRAM_OFFSET, in mv_cesa_skcipher_complete()
229 memcpy_fromio(skreq->iv, in mv_cesa_skcipher_complete()
230 engine->sram + CESA_SA_CRYPT_IV_SRAM_OFFSET, in mv_cesa_skcipher_complete()
245 memzero_explicit(ctx, tfm->__crt_alg->cra_ctxsize); in mv_cesa_skcipher_cra_exit()
252 ctx->ops = &mv_cesa_skcipher_req_ops; in mv_cesa_skcipher_cra_init()
270 ret = aes_expandkey(&ctx->aes, key, len); in mv_cesa_aes_setkey()
274 remaining = (ctx->aes.key_length - 16) / 4; in mv_cesa_aes_setkey()
275 offset = ctx->aes.key_length + 24 - remaining; in mv_cesa_aes_setkey()
277 ctx->aes.key_dec[4 + i] = ctx->aes.key_enc[offset + i]; in mv_cesa_aes_setkey()
292 memcpy(ctx->key, key, DES_KEY_SIZE); in mv_cesa_des_setkey()
307 memcpy(ctx->key, key, DES3_EDE_KEY_SIZE); in mv_cesa_des3_ede_setkey()
316 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in mv_cesa_skcipher_dma_req_init()
318 struct mv_cesa_req *basereq = &creq->base; in mv_cesa_skcipher_dma_req_init()
323 basereq->chain.first = NULL; in mv_cesa_skcipher_dma_req_init()
324 basereq->chain.last = NULL; in mv_cesa_skcipher_dma_req_init()
326 if (req->src != req->dst) { in mv_cesa_skcipher_dma_req_init()
327 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
330 return -ENOMEM; in mv_cesa_skcipher_dma_req_init()
332 ret = dma_map_sg(cesa_dev->dev, req->dst, creq->dst_nents, in mv_cesa_skcipher_dma_req_init()
335 ret = -ENOMEM; in mv_cesa_skcipher_dma_req_init()
339 ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
342 return -ENOMEM; in mv_cesa_skcipher_dma_req_init()
345 mv_cesa_tdma_desc_iter_init(&basereq->chain); in mv_cesa_skcipher_dma_req_init()
351 op = mv_cesa_dma_add_op(&basereq->chain, op_templ, skip_ctx, in mv_cesa_skcipher_dma_req_init()
362 ret = mv_cesa_dma_add_op_transfers(&basereq->chain, &iter.base, in mv_cesa_skcipher_dma_req_init()
367 /* Add dummy desc to launch the crypto operation */ in mv_cesa_skcipher_dma_req_init()
368 ret = mv_cesa_dma_add_dummy_launch(&basereq->chain, flags); in mv_cesa_skcipher_dma_req_init()
373 ret = mv_cesa_dma_add_op_transfers(&basereq->chain, &iter.base, in mv_cesa_skcipher_dma_req_init()
381 ret = mv_cesa_dma_add_result_op(&basereq->chain, in mv_cesa_skcipher_dma_req_init()
389 basereq->chain.last->flags |= CESA_TDMA_END_OF_REQ; in mv_cesa_skcipher_dma_req_init()
395 if (req->dst != req->src) in mv_cesa_skcipher_dma_req_init()
396 dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents, in mv_cesa_skcipher_dma_req_init()
400 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_req_init()
401 req->dst != req->src ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL); in mv_cesa_skcipher_dma_req_init()
406 static inline int
411 struct mv_cesa_skcipher_std_req *sreq = &creq->std; in mv_cesa_skcipher_std_req_init()
412 struct mv_cesa_req *basereq = &creq->base; in mv_cesa_skcipher_std_req_init()
414 sreq->op = *op_templ; in mv_cesa_skcipher_std_req_init()
415 sreq->skip_ctx = false; in mv_cesa_skcipher_std_req_init()
416 basereq->chain.first = NULL; in mv_cesa_skcipher_std_req_init()
417 basereq->chain.last = NULL; in mv_cesa_skcipher_std_req_init()
430 if (!IS_ALIGNED(req->cryptlen, blksize)) in mv_cesa_skcipher_req_init()
431 return -EINVAL; in mv_cesa_skcipher_req_init()
433 creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); in mv_cesa_skcipher_req_init()
434 if (creq->src_nents < 0) { in mv_cesa_skcipher_req_init()
435 dev_err(cesa_dev->dev, "Invalid number of src SG"); in mv_cesa_skcipher_req_init()
436 return creq->src_nents; in mv_cesa_skcipher_req_init()
438 creq->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); in mv_cesa_skcipher_req_init()
439 if (creq->dst_nents < 0) { in mv_cesa_skcipher_req_init()
440 dev_err(cesa_dev->dev, "Invalid number of dst SG"); in mv_cesa_skcipher_req_init()
441 return creq->dst_nents; in mv_cesa_skcipher_req_init()
447 if (cesa_dev->caps->has_tdma) in mv_cesa_skcipher_req_init()
460 struct mv_cesa_engine *engine; in mv_cesa_skcipher_queue_req() local
466 engine = mv_cesa_select_engine(req->cryptlen); in mv_cesa_skcipher_queue_req()
467 mv_cesa_skcipher_prepare(&req->base, engine); in mv_cesa_skcipher_queue_req()
469 ret = mv_cesa_queue_req(&req->base, &creq->base); in mv_cesa_skcipher_queue_req()
471 if (mv_cesa_req_needs_cleanup(&req->base, ret)) in mv_cesa_skcipher_queue_req()
480 struct mv_cesa_des_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in mv_cesa_des_op()
485 memcpy(tmpl->ctx.skcipher.key, ctx->key, DES_KEY_SIZE); in mv_cesa_des_op()
520 .cra_driver_name = "mv-ecb-des",
539 memcpy(tmpl->ctx.skcipher.iv, req->iv, DES_BLOCK_SIZE); in mv_cesa_cbc_des_op()
571 .cra_driver_name = "mv-cbc-des",
587 struct mv_cesa_des3_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in mv_cesa_des3_op()
592 memcpy(tmpl->ctx.skcipher.key, ctx->key, DES3_EDE_KEY_SIZE); in mv_cesa_des3_op()
629 .cra_driver_name = "mv-ecb-des3-ede",
645 memcpy(tmpl->ctx.skcipher.iv, req->iv, DES3_EDE_BLOCK_SIZE); in mv_cesa_cbc_des3_op()
683 .cra_driver_name = "mv-cbc-des3-ede",
699 struct mv_cesa_aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm); in mv_cesa_aes_op()
707 key = ctx->aes.key_dec; in mv_cesa_aes_op()
709 key = ctx->aes.key_enc; in mv_cesa_aes_op()
711 for (i = 0; i < ctx->aes.key_length / sizeof(u32); i++) in mv_cesa_aes_op()
712 tmpl->ctx.skcipher.key[i] = cpu_to_le32(key[i]); in mv_cesa_aes_op()
714 if (ctx->aes.key_length == 24) in mv_cesa_aes_op()
716 else if (ctx->aes.key_length == 32) in mv_cesa_aes_op()
756 .cra_driver_name = "mv-ecb-aes",
774 memcpy(tmpl->ctx.skcipher.iv, req->iv, AES_BLOCK_SIZE); in mv_cesa_cbc_aes_op()
806 .cra_driver_name = "mv-cbc-aes",