Lines Matching +full:p +full:- +full:384

1 // SPDX-License-Identifier: GPL-2.0
14 #include <linux/dma-mapping.h>
33 #define HPRE_INVLD_REQ_ID (-1)
63 /* low address: e--->n */
67 /* low address: d--->n */
71 /* low address: dq->dp->q->p->qinv */
81 * ya = g^xa mod p; [RFC2631 sec 2.1.1]
84 * ZZ = yb^xa mod p; [RFC2631 sec 2.1.1]
85 * low address: d--->n, please refer to Hisilicon HPRE UM
95 /* low address: p->a->k->b */
96 unsigned char *p; member
99 /* low address: x->y */
105 /* low address: p->a->k */
106 unsigned char *p; member
152 return ((crypto_dma_align() - 1) | (HPRE_ALIGN_SZ - 1)) + 1; in hpre_align_sz()
157 return (hpre_align_sz() - 1) & ~(crypto_tfm_ctx_alignment() - 1); in hpre_align_pd()
165 spin_lock_irqsave(&ctx->req_lock, flags); in hpre_alloc_req_id()
166 id = idr_alloc(&ctx->req_idr, NULL, 0, ctx->qp->sq_depth, GFP_ATOMIC); in hpre_alloc_req_id()
167 spin_unlock_irqrestore(&ctx->req_lock, flags); in hpre_alloc_req_id()
176 spin_lock_irqsave(&ctx->req_lock, flags); in hpre_free_req_id()
177 idr_remove(&ctx->req_idr, req_id); in hpre_free_req_id()
178 spin_unlock_irqrestore(&ctx->req_lock, flags); in hpre_free_req_id()
187 ctx = hpre_req->ctx; in hpre_add_req_to_ctx()
190 return -EINVAL; in hpre_add_req_to_ctx()
192 ctx->req_list[id] = hpre_req; in hpre_add_req_to_ctx()
193 hpre_req->req_id = id; in hpre_add_req_to_ctx()
195 dfx = ctx->hpre->debug.dfx; in hpre_add_req_to_ctx()
197 ktime_get_ts64(&hpre_req->req_time); in hpre_add_req_to_ctx()
204 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_rm_req_from_ctx()
205 int id = hpre_req->req_id; in hpre_rm_req_from_ctx()
207 if (hpre_req->req_id >= 0) { in hpre_rm_req_from_ctx()
208 hpre_req->req_id = HPRE_INVLD_REQ_ID; in hpre_rm_req_from_ctx()
209 ctx->req_list[id] = NULL; in hpre_rm_req_from_ctx()
222 return ERR_PTR(-ENODEV); in hpre_get_qp_and_start()
228 pci_err(qp->qm->pdev, "Can not start qp!\n"); in hpre_get_qp_and_start()
229 return ERR_PTR(-EINVAL); in hpre_get_qp_and_start()
239 struct device *dev = hpre_req->ctx->dev; in hpre_get_data_dma_addr()
243 hpre_req->src = NULL; in hpre_get_data_dma_addr()
246 hpre_req->dst = NULL; in hpre_get_data_dma_addr()
252 return -ENOMEM; in hpre_get_data_dma_addr()
262 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_prepare_dma_buf()
263 struct device *dev = ctx->dev; in hpre_prepare_dma_buf()
267 shift = ctx->key_sz - len; in hpre_prepare_dma_buf()
269 return -EINVAL; in hpre_prepare_dma_buf()
271 ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC); in hpre_prepare_dma_buf()
273 return -ENOMEM; in hpre_prepare_dma_buf()
277 hpre_req->src = ptr; in hpre_prepare_dma_buf()
279 hpre_req->dst = ptr; in hpre_prepare_dma_buf()
289 struct hpre_sqe *msg = &hpre_req->req; in hpre_hw_data_init()
290 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_hw_data_init()
295 if ((sg_is_last(data) && len == ctx->key_sz) && in hpre_hw_data_init()
305 msg->in = cpu_to_le64(tmp); in hpre_hw_data_init()
307 msg->out = cpu_to_le64(tmp); in hpre_hw_data_init()
317 struct device *dev = ctx->dev; in hpre_hw_data_clr_all()
318 struct hpre_sqe *sqe = &req->req; in hpre_hw_data_clr_all()
321 tmp = le64_to_cpu(sqe->in); in hpre_hw_data_clr_all()
326 if (req->src) in hpre_hw_data_clr_all()
327 dma_free_coherent(dev, ctx->key_sz, req->src, tmp); in hpre_hw_data_clr_all()
329 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE); in hpre_hw_data_clr_all()
332 tmp = le64_to_cpu(sqe->out); in hpre_hw_data_clr_all()
336 if (req->dst) { in hpre_hw_data_clr_all()
338 scatterwalk_map_and_copy(req->dst, dst, 0, in hpre_hw_data_clr_all()
339 ctx->key_sz, 1); in hpre_hw_data_clr_all()
340 dma_free_coherent(dev, ctx->key_sz, req->dst, tmp); in hpre_hw_data_clr_all()
342 dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE); in hpre_hw_data_clr_all()
358 id = (int)le16_to_cpu(sqe->tag); in hpre_alg_res_post_hf()
359 req = ctx->req_list[id]; in hpre_alg_res_post_hf()
363 err = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_ALG_BITS) & in hpre_alg_res_post_hf()
366 done = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_DONE_SHIFT) & in hpre_alg_res_post_hf()
372 alg = le32_to_cpu(sqe->dw0) & HREE_ALG_TYPE_MASK; in hpre_alg_res_post_hf()
373 dev_err_ratelimited(ctx->dev, "alg[0x%x] error: done[0x%x], etype[0x%x]\n", in hpre_alg_res_post_hf()
376 return -EINVAL; in hpre_alg_res_post_hf()
384 return -EINVAL; in hpre_ctx_set()
386 spin_lock_init(&ctx->req_lock); in hpre_ctx_set()
387 ctx->qp = qp; in hpre_ctx_set()
388 ctx->dev = &qp->qm->pdev->dev; in hpre_ctx_set()
390 hpre = container_of(ctx->qp->qm, struct hpre, qm); in hpre_ctx_set()
391 ctx->hpre = hpre; in hpre_ctx_set()
392 ctx->req_list = kcalloc(qlen, sizeof(void *), GFP_KERNEL); in hpre_ctx_set()
393 if (!ctx->req_list) in hpre_ctx_set()
394 return -ENOMEM; in hpre_ctx_set()
395 ctx->key_sz = 0; in hpre_ctx_set()
396 ctx->crt_g2_mode = false; in hpre_ctx_set()
397 idr_init(&ctx->req_idr); in hpre_ctx_set()
405 idr_destroy(&ctx->req_idr); in hpre_ctx_clear()
406 kfree(ctx->req_list); in hpre_ctx_clear()
407 hisi_qm_free_qps(&ctx->qp, 1); in hpre_ctx_clear()
410 ctx->crt_g2_mode = false; in hpre_ctx_clear()
411 ctx->key_sz = 0; in hpre_ctx_clear()
421 time_use_us = (reply_time.tv_sec - req->req_time.tv_sec) * in hpre_is_bd_timeout()
423 (reply_time.tv_nsec - req->req_time.tv_nsec) / in hpre_is_bd_timeout()
434 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_dh_cb()
441 areq = req->areq.dh; in hpre_dh_cb()
442 areq->dst_len = ctx->key_sz; in hpre_dh_cb()
448 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_dh_cb()
455 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_rsa_cb()
467 areq = req->areq.rsa; in hpre_rsa_cb()
468 areq->dst_len = ctx->key_sz; in hpre_rsa_cb()
469 hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_rsa_cb()
476 struct hpre_ctx *ctx = qp->qp_ctx; in hpre_alg_cb()
477 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_alg_cb()
479 struct hpre_asym_request *req = ctx->req_list[le16_to_cpu(sqe->tag)]; in hpre_alg_cb()
486 req->cb(ctx, resp); in hpre_alg_cb()
504 qp->qp_ctx = ctx; in hpre_ctx_init()
505 qp->req_cb = hpre_alg_cb; in hpre_ctx_init()
507 ret = hpre_ctx_set(ctx, qp, qp->sq_depth); in hpre_ctx_init()
524 if (akreq->dst_len < ctx->key_sz) { in hpre_msg_request_set()
525 akreq->dst_len = ctx->key_sz; in hpre_msg_request_set()
526 return -EOVERFLOW; in hpre_msg_request_set()
531 h_req->cb = hpre_rsa_cb; in hpre_msg_request_set()
532 h_req->areq.rsa = akreq; in hpre_msg_request_set()
533 msg = &h_req->req; in hpre_msg_request_set()
538 if (kreq->dst_len < ctx->key_sz) { in hpre_msg_request_set()
539 kreq->dst_len = ctx->key_sz; in hpre_msg_request_set()
540 return -EOVERFLOW; in hpre_msg_request_set()
545 h_req->cb = hpre_dh_cb; in hpre_msg_request_set()
546 h_req->areq.dh = kreq; in hpre_msg_request_set()
547 msg = &h_req->req; in hpre_msg_request_set()
549 msg->key = cpu_to_le64(ctx->dh.dma_xa_p); in hpre_msg_request_set()
552 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_msg_request_set()
553 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_msg_request_set()
554 msg->dw0 |= cpu_to_le32(0x1 << HPRE_SQE_DONE_SHIFT); in hpre_msg_request_set()
555 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_msg_request_set()
556 h_req->ctx = ctx; in hpre_msg_request_set()
560 return -EBUSY; in hpre_msg_request_set()
562 msg->tag = cpu_to_le16((u16)req_id); in hpre_msg_request_set()
569 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_send()
575 ret = hisi_qp_send(ctx->qp, msg); in hpre_send()
576 if (ret != -EBUSY) in hpre_send()
584 if (ret != -EBUSY) in hpre_send()
596 struct hpre_sqe *msg = &hpre_req->req; in hpre_dh_compute_value()
603 if (req->src) { in hpre_dh_compute_value()
604 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 1); in hpre_dh_compute_value()
608 msg->in = cpu_to_le64(ctx->dh.dma_g); in hpre_dh_compute_value()
611 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1); in hpre_dh_compute_value()
615 if (ctx->crt_g2_mode && !req->src) in hpre_dh_compute_value()
616 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH_G2); in hpre_dh_compute_value()
618 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH); in hpre_dh_compute_value()
623 return -EINPROGRESS; in hpre_dh_compute_value()
627 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_dh_compute_value()
649 return -EINVAL; in hpre_is_dh_params_length_valid()
655 struct device *dev = ctx->dev; in hpre_dh_set_params()
658 if (params->p_size > HPRE_DH_MAX_P_SZ) in hpre_dh_set_params()
659 return -EINVAL; in hpre_dh_set_params()
661 if (hpre_is_dh_params_length_valid(params->p_size << in hpre_dh_set_params()
663 return -EINVAL; in hpre_dh_set_params()
665 sz = ctx->key_sz = params->p_size; in hpre_dh_set_params()
666 ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1, in hpre_dh_set_params()
667 &ctx->dh.dma_xa_p, GFP_KERNEL); in hpre_dh_set_params()
668 if (!ctx->dh.xa_p) in hpre_dh_set_params()
669 return -ENOMEM; in hpre_dh_set_params()
671 memcpy(ctx->dh.xa_p + sz, params->p, sz); in hpre_dh_set_params()
674 if (params->g_size == 1 && *(char *)params->g == HPRE_DH_G_FLAG) { in hpre_dh_set_params()
675 ctx->crt_g2_mode = true; in hpre_dh_set_params()
679 ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL); in hpre_dh_set_params()
680 if (!ctx->dh.g) { in hpre_dh_set_params()
681 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p, in hpre_dh_set_params()
682 ctx->dh.dma_xa_p); in hpre_dh_set_params()
683 ctx->dh.xa_p = NULL; in hpre_dh_set_params()
684 return -ENOMEM; in hpre_dh_set_params()
687 memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size); in hpre_dh_set_params()
694 struct device *dev = ctx->dev; in hpre_dh_clear_ctx()
695 unsigned int sz = ctx->key_sz; in hpre_dh_clear_ctx()
698 hisi_qm_stop_qp(ctx->qp); in hpre_dh_clear_ctx()
700 if (ctx->dh.g) { in hpre_dh_clear_ctx()
701 dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g); in hpre_dh_clear_ctx()
702 ctx->dh.g = NULL; in hpre_dh_clear_ctx()
705 if (ctx->dh.xa_p) { in hpre_dh_clear_ctx()
706 memzero_explicit(ctx->dh.xa_p, sz); in hpre_dh_clear_ctx()
707 dma_free_coherent(dev, sz << 1, ctx->dh.xa_p, in hpre_dh_clear_ctx()
708 ctx->dh.dma_xa_p); in hpre_dh_clear_ctx()
709 ctx->dh.xa_p = NULL; in hpre_dh_clear_ctx()
723 return -EINVAL; in hpre_dh_set_secret()
732 memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key, in hpre_dh_set_secret()
746 return ctx->key_sz; in hpre_dh_max_size()
769 (*len)--; in hpre_rsa_drop_leading_zeros()
799 struct hpre_sqe *msg = &hpre_req->req; in hpre_rsa_enc()
803 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_enc()
804 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) { in hpre_rsa_enc()
805 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_enc()
811 if (unlikely(!ctx->rsa.pubkey)) in hpre_rsa_enc()
812 return -EINVAL; in hpre_rsa_enc()
818 msg->dw0 |= cpu_to_le32(HPRE_ALG_NC_NCRT); in hpre_rsa_enc()
819 msg->key = cpu_to_le64(ctx->rsa.dma_pubkey); in hpre_rsa_enc()
821 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0); in hpre_rsa_enc()
825 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_enc()
832 return -EINPROGRESS; in hpre_rsa_enc()
836 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_rsa_enc()
847 struct hpre_sqe *msg = &hpre_req->req; in hpre_rsa_dec()
851 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_dec()
852 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) { in hpre_rsa_dec()
853 akcipher_request_set_tfm(req, ctx->rsa.soft_tfm); in hpre_rsa_dec()
859 if (unlikely(!ctx->rsa.prikey)) in hpre_rsa_dec()
860 return -EINVAL; in hpre_rsa_dec()
866 if (ctx->crt_g2_mode) { in hpre_rsa_dec()
867 msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey); in hpre_rsa_dec()
868 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | in hpre_rsa_dec()
871 msg->key = cpu_to_le64(ctx->rsa.dma_prikey); in hpre_rsa_dec()
872 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | in hpre_rsa_dec()
876 ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0); in hpre_rsa_dec()
880 ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0); in hpre_rsa_dec()
887 return -EINPROGRESS; in hpre_rsa_dec()
891 hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_rsa_dec()
903 ctx->key_sz = vlen; in hpre_rsa_set_n()
906 if (!hpre_rsa_key_size_is_support(ctx->key_sz)) in hpre_rsa_set_n()
909 ctx->rsa.pubkey = dma_alloc_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
910 &ctx->rsa.dma_pubkey, in hpre_rsa_set_n()
912 if (!ctx->rsa.pubkey) in hpre_rsa_set_n()
913 return -ENOMEM; in hpre_rsa_set_n()
916 ctx->rsa.prikey = dma_alloc_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
917 &ctx->rsa.dma_prikey, in hpre_rsa_set_n()
919 if (!ctx->rsa.prikey) { in hpre_rsa_set_n()
920 dma_free_coherent(ctx->dev, vlen << 1, in hpre_rsa_set_n()
921 ctx->rsa.pubkey, in hpre_rsa_set_n()
922 ctx->rsa.dma_pubkey); in hpre_rsa_set_n()
923 ctx->rsa.pubkey = NULL; in hpre_rsa_set_n()
924 return -ENOMEM; in hpre_rsa_set_n()
926 memcpy(ctx->rsa.prikey + vlen, ptr, vlen); in hpre_rsa_set_n()
928 memcpy(ctx->rsa.pubkey + vlen, ptr, vlen); in hpre_rsa_set_n()
941 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) in hpre_rsa_set_e()
942 return -EINVAL; in hpre_rsa_set_e()
944 memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen); in hpre_rsa_set_e()
956 if (!ctx->key_sz || !vlen || vlen > ctx->key_sz) in hpre_rsa_set_d()
957 return -EINVAL; in hpre_rsa_set_d()
959 memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen); in hpre_rsa_set_d()
972 return -EINVAL; in hpre_crt_para_get()
974 memcpy(para + para_sz - len, ptr, len); in hpre_crt_para_get()
981 unsigned int hlf_ksz = ctx->key_sz >> 1; in hpre_rsa_setkey_crt()
982 struct device *dev = ctx->dev; in hpre_rsa_setkey_crt()
986 ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, in hpre_rsa_setkey_crt()
987 &ctx->rsa.dma_crt_prikey, in hpre_rsa_setkey_crt()
989 if (!ctx->rsa.crt_prikey) in hpre_rsa_setkey_crt()
990 return -ENOMEM; in hpre_rsa_setkey_crt()
992 ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz, in hpre_rsa_setkey_crt()
993 rsa_key->dq, rsa_key->dq_sz); in hpre_rsa_setkey_crt()
998 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
999 rsa_key->dp, rsa_key->dp_sz); in hpre_rsa_setkey_crt()
1004 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1005 rsa_key->q, rsa_key->q_sz); in hpre_rsa_setkey_crt()
1010 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1011 rsa_key->p, rsa_key->p_sz); in hpre_rsa_setkey_crt()
1016 ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz, in hpre_rsa_setkey_crt()
1017 rsa_key->qinv, rsa_key->qinv_sz); in hpre_rsa_setkey_crt()
1021 ctx->crt_g2_mode = true; in hpre_rsa_setkey_crt()
1027 memzero_explicit(ctx->rsa.crt_prikey, offset); in hpre_rsa_setkey_crt()
1028 dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey, in hpre_rsa_setkey_crt()
1029 ctx->rsa.dma_crt_prikey); in hpre_rsa_setkey_crt()
1030 ctx->rsa.crt_prikey = NULL; in hpre_rsa_setkey_crt()
1031 ctx->crt_g2_mode = false; in hpre_rsa_setkey_crt()
1039 unsigned int half_key_sz = ctx->key_sz >> 1; in hpre_rsa_clear_ctx()
1040 struct device *dev = ctx->dev; in hpre_rsa_clear_ctx()
1043 hisi_qm_stop_qp(ctx->qp); in hpre_rsa_clear_ctx()
1045 if (ctx->rsa.pubkey) { in hpre_rsa_clear_ctx()
1046 dma_free_coherent(dev, ctx->key_sz << 1, in hpre_rsa_clear_ctx()
1047 ctx->rsa.pubkey, ctx->rsa.dma_pubkey); in hpre_rsa_clear_ctx()
1048 ctx->rsa.pubkey = NULL; in hpre_rsa_clear_ctx()
1051 if (ctx->rsa.crt_prikey) { in hpre_rsa_clear_ctx()
1052 memzero_explicit(ctx->rsa.crt_prikey, in hpre_rsa_clear_ctx()
1055 ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey); in hpre_rsa_clear_ctx()
1056 ctx->rsa.crt_prikey = NULL; in hpre_rsa_clear_ctx()
1059 if (ctx->rsa.prikey) { in hpre_rsa_clear_ctx()
1060 memzero_explicit(ctx->rsa.prikey, ctx->key_sz); in hpre_rsa_clear_ctx()
1061 dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey, in hpre_rsa_clear_ctx()
1062 ctx->rsa.dma_prikey); in hpre_rsa_clear_ctx()
1063 ctx->rsa.prikey = NULL; in hpre_rsa_clear_ctx()
1071 * CRT: return true, N-CRT: return false .
1075 u16 len = key->p_sz + key->q_sz + key->dp_sz + key->dq_sz + in hpre_is_crt_key()
1076 key->qinv_sz; in hpre_is_crt_key()
1080 /* N-CRT less than 5 parameters */ in hpre_is_crt_key()
1119 if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) { in hpre_rsa_setkey()
1120 ret = -EINVAL; in hpre_rsa_setkey()
1137 ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen); in hpre_rsa_setpubkey()
1150 ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen); in hpre_rsa_setprivkey()
1162 if (ctx->key_sz == HPRE_RSA_512BITS_KSZ || in hpre_rsa_max_size()
1163 ctx->key_sz == HPRE_RSA_1536BITS_KSZ) in hpre_rsa_max_size()
1164 return crypto_akcipher_maxsize(ctx->rsa.soft_tfm); in hpre_rsa_max_size()
1166 return ctx->key_sz; in hpre_rsa_max_size()
1174 ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0); in hpre_rsa_init_tfm()
1175 if (IS_ERR(ctx->rsa.soft_tfm)) { in hpre_rsa_init_tfm()
1177 return PTR_ERR(ctx->rsa.soft_tfm); in hpre_rsa_init_tfm()
1185 crypto_free_akcipher(ctx->rsa.soft_tfm); in hpre_rsa_init_tfm()
1195 crypto_free_akcipher(ctx->rsa.soft_tfm); in hpre_rsa_exit_tfm()
1203 j = len - i - 1; in hpre_key_to_big_end()
1211 struct device *dev = ctx->dev; in hpre_ecc_clear_ctx()
1212 unsigned int sz = ctx->key_sz; in hpre_ecc_clear_ctx()
1216 hisi_qm_stop_qp(ctx->qp); in hpre_ecc_clear_ctx()
1218 if (is_ecdh && ctx->ecdh.p) { in hpre_ecc_clear_ctx()
1219 /* ecdh: p->a->k->b */ in hpre_ecc_clear_ctx()
1220 memzero_explicit(ctx->ecdh.p + shift, sz); in hpre_ecc_clear_ctx()
1221 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p); in hpre_ecc_clear_ctx()
1222 ctx->ecdh.p = NULL; in hpre_ecc_clear_ctx()
1223 } else if (!is_ecdh && ctx->curve25519.p) { in hpre_ecc_clear_ctx()
1224 /* curve25519: p->a->k */ in hpre_ecc_clear_ctx()
1225 memzero_explicit(ctx->curve25519.p + shift, sz); in hpre_ecc_clear_ctx()
1226 dma_free_coherent(dev, sz << 2, ctx->curve25519.p, in hpre_ecc_clear_ctx()
1227 ctx->curve25519.dma_p); in hpre_ecc_clear_ctx()
1228 ctx->curve25519.p = NULL; in hpre_ecc_clear_ctx()
1235 * The bits of 192/224/256/384/521 are supported by HPRE,
1237 * bits<=256, bits=256; 256<bits<=384, bits=384; 384<bits<=576, bits=576;
1239 * high-order zeros by soft, so TASK_LENGTH1 is 0x3/0x5/0x8;
1258 unsigned int sz = cur_sz - (ndigits - 1) * sizeof(u64); in fill_curve_param()
1261 while (i < ndigits - 1) { in fill_curve_param()
1266 memcpy(addr + sizeof(u64) * i, &param[ndigits - 1], sz); in fill_curve_param()
1273 unsigned int shifta = ctx->key_sz << 1; in hpre_ecdh_fill_curve()
1274 unsigned int shiftb = ctx->key_sz << 2; in hpre_ecdh_fill_curve()
1275 void *p = ctx->ecdh.p + ctx->key_sz - cur_sz; in hpre_ecdh_fill_curve() local
1276 void *a = ctx->ecdh.p + shifta - cur_sz; in hpre_ecdh_fill_curve()
1277 void *b = ctx->ecdh.p + shiftb - cur_sz; in hpre_ecdh_fill_curve()
1278 void *x = ctx->ecdh.g + ctx->key_sz - cur_sz; in hpre_ecdh_fill_curve()
1279 void *y = ctx->ecdh.g + shifta - cur_sz; in hpre_ecdh_fill_curve()
1280 const struct ecc_curve *curve = ecc_get_curve(ctx->curve_id); in hpre_ecdh_fill_curve()
1284 return -EINVAL; in hpre_ecdh_fill_curve()
1286 n = kzalloc(ctx->key_sz, GFP_KERNEL); in hpre_ecdh_fill_curve()
1288 return -ENOMEM; in hpre_ecdh_fill_curve()
1290 fill_curve_param(p, curve->p, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1291 fill_curve_param(a, curve->a, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1292 fill_curve_param(b, curve->b, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1293 fill_curve_param(x, curve->g.x, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1294 fill_curve_param(y, curve->g.y, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1295 fill_curve_param(n, curve->n, cur_sz, curve->g.ndigits); in hpre_ecdh_fill_curve()
1297 if (params->key_size == cur_sz && memcmp(params->key, n, cur_sz) >= 0) { in hpre_ecdh_fill_curve()
1299 return -EINVAL; in hpre_ecdh_fill_curve()
1324 struct device *dev = ctx->dev; in hpre_ecdh_set_param()
1328 ctx->key_sz = hpre_ecdh_supported_curve(ctx->curve_id); in hpre_ecdh_set_param()
1329 if (!ctx->key_sz) in hpre_ecdh_set_param()
1330 return -EINVAL; in hpre_ecdh_set_param()
1332 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_set_param()
1333 if (!curve_sz || params->key_size > curve_sz) in hpre_ecdh_set_param()
1334 return -EINVAL; in hpre_ecdh_set_param()
1336 sz = ctx->key_sz; in hpre_ecdh_set_param()
1338 if (!ctx->ecdh.p) { in hpre_ecdh_set_param()
1339 ctx->ecdh.p = dma_alloc_coherent(dev, sz << 3, &ctx->ecdh.dma_p, in hpre_ecdh_set_param()
1341 if (!ctx->ecdh.p) in hpre_ecdh_set_param()
1342 return -ENOMEM; in hpre_ecdh_set_param()
1346 ctx->ecdh.g = ctx->ecdh.p + shift; in hpre_ecdh_set_param()
1347 ctx->ecdh.dma_g = ctx->ecdh.dma_p + shift; in hpre_ecdh_set_param()
1352 dma_free_coherent(dev, sz << 3, ctx->ecdh.p, ctx->ecdh.dma_p); in hpre_ecdh_set_param()
1353 ctx->ecdh.p = NULL; in hpre_ecdh_set_param()
1373 struct device *dev = ctx->dev; in ecdh_gen_privkey()
1382 ret = crypto_rng_get_bytes(crypto_default_rng, (u8 *)params->key, in ecdh_gen_privkey()
1383 params->key_size); in ecdh_gen_privkey()
1396 struct device *dev = ctx->dev; in hpre_ecdh_set_secret()
1403 return -EINVAL; in hpre_ecdh_set_secret()
1409 curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_set_secret()
1412 return -EINVAL; in hpre_ecdh_set_secret()
1415 params.key_size = curve_sz - 1; in hpre_ecdh_set_secret()
1423 return -EINVAL; in hpre_ecdh_set_secret()
1434 sz = ctx->key_sz; in hpre_ecdh_set_secret()
1435 sz_shift = (sz << 1) + sz - params.key_size; in hpre_ecdh_set_secret()
1436 memcpy(ctx->ecdh.p + sz_shift, params.key, params.key_size); in hpre_ecdh_set_secret()
1446 struct device *dev = ctx->dev; in hpre_ecdh_hw_data_clr_all()
1447 struct hpre_sqe *sqe = &req->req; in hpre_ecdh_hw_data_clr_all()
1450 dma = le64_to_cpu(sqe->in); in hpre_ecdh_hw_data_clr_all()
1454 if (src && req->src) in hpre_ecdh_hw_data_clr_all()
1455 dma_free_coherent(dev, ctx->key_sz << 2, req->src, dma); in hpre_ecdh_hw_data_clr_all()
1457 dma = le64_to_cpu(sqe->out); in hpre_ecdh_hw_data_clr_all()
1461 if (req->dst) in hpre_ecdh_hw_data_clr_all()
1462 dma_free_coherent(dev, ctx->key_sz << 1, req->dst, dma); in hpre_ecdh_hw_data_clr_all()
1464 dma_unmap_single(dev, dma, ctx->key_sz << 1, DMA_FROM_DEVICE); in hpre_ecdh_hw_data_clr_all()
1469 unsigned int curve_sz = hpre_ecdh_get_curvesz(ctx->curve_id); in hpre_ecdh_cb()
1470 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_ecdh_cb()
1474 char *p; in hpre_ecdh_cb() local
1478 areq = req->areq.ecdh; in hpre_ecdh_cb()
1479 areq->dst_len = ctx->key_sz << 1; in hpre_ecdh_cb()
1485 p = sg_virt(areq->dst); in hpre_ecdh_cb()
1486 memmove(p, p + ctx->key_sz - curve_sz, curve_sz); in hpre_ecdh_cb()
1487 memmove(p + curve_sz, p + areq->dst_len - curve_sz, curve_sz); in hpre_ecdh_cb()
1489 hpre_ecdh_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_ecdh_cb()
1503 if (req->dst_len < ctx->key_sz << 1) { in hpre_ecdh_msg_request_set()
1504 req->dst_len = ctx->key_sz << 1; in hpre_ecdh_msg_request_set()
1505 return -EINVAL; in hpre_ecdh_msg_request_set()
1510 h_req->cb = hpre_ecdh_cb; in hpre_ecdh_msg_request_set()
1511 h_req->areq.ecdh = req; in hpre_ecdh_msg_request_set()
1512 msg = &h_req->req; in hpre_ecdh_msg_request_set()
1514 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_ecdh_msg_request_set()
1515 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_ecdh_msg_request_set()
1516 msg->key = cpu_to_le64(ctx->ecdh.dma_p); in hpre_ecdh_msg_request_set()
1518 msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT); in hpre_ecdh_msg_request_set()
1519 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_ecdh_msg_request_set()
1520 h_req->ctx = ctx; in hpre_ecdh_msg_request_set()
1524 return -EBUSY; in hpre_ecdh_msg_request_set()
1526 msg->tag = cpu_to_le16((u16)req_id); in hpre_ecdh_msg_request_set()
1533 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_src_data_init()
1534 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_ecdh_src_data_init()
1535 struct device *dev = ctx->dev; in hpre_ecdh_src_data_init()
1542 shift = ctx->key_sz - (len >> 1); in hpre_ecdh_src_data_init()
1544 return -EINVAL; in hpre_ecdh_src_data_init()
1546 ptr = dma_alloc_coherent(dev, ctx->key_sz << 2, &dma, GFP_KERNEL); in hpre_ecdh_src_data_init()
1548 return -ENOMEM; in hpre_ecdh_src_data_init()
1550 tmpshift = ctx->key_sz << 1; in hpre_ecdh_src_data_init()
1553 memcpy(ptr + ctx->key_sz + shift, ptr + tmpshift + (len >> 1), len >> 1); in hpre_ecdh_src_data_init()
1555 hpre_req->src = ptr; in hpre_ecdh_src_data_init()
1556 msg->in = cpu_to_le64(dma); in hpre_ecdh_src_data_init()
1563 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_dst_data_init()
1564 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_ecdh_dst_data_init()
1565 struct device *dev = ctx->dev; in hpre_ecdh_dst_data_init()
1568 if (unlikely(!data || !sg_is_last(data) || len != ctx->key_sz << 1)) { in hpre_ecdh_dst_data_init()
1570 return -EINVAL; in hpre_ecdh_dst_data_init()
1573 hpre_req->dst = NULL; in hpre_ecdh_dst_data_init()
1577 return -ENOMEM; in hpre_ecdh_dst_data_init()
1580 msg->out = cpu_to_le64(dma); in hpre_ecdh_dst_data_init()
1588 struct device *dev = ctx->dev; in hpre_ecdh_compute_value()
1591 struct hpre_sqe *msg = &hpre_req->req; in hpre_ecdh_compute_value()
1600 if (req->src) { in hpre_ecdh_compute_value()
1601 ret = hpre_ecdh_src_data_init(hpre_req, req->src, req->src_len); in hpre_ecdh_compute_value()
1607 msg->in = cpu_to_le64(ctx->ecdh.dma_g); in hpre_ecdh_compute_value()
1610 ret = hpre_ecdh_dst_data_init(hpre_req, req->dst, req->dst_len); in hpre_ecdh_compute_value()
1616 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_ECC_MUL); in hpre_ecdh_compute_value()
1619 return -EINPROGRESS; in hpre_ecdh_compute_value()
1623 hpre_ecdh_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_ecdh_compute_value()
1632 return ctx->key_sz << 1; in hpre_ecdh_max_size()
1639 ctx->curve_id = ECC_CURVE_NIST_P192; in hpre_ecdh_nist_p192_init_tfm()
1650 ctx->curve_id = ECC_CURVE_NIST_P256; in hpre_ecdh_nist_p256_init_tfm()
1661 ctx->curve_id = ECC_CURVE_NIST_P384; in hpre_ecdh_nist_p384_init_tfm()
1679 unsigned int sz = ctx->key_sz; in hpre_curve25519_fill_curve()
1682 void *p; in hpre_curve25519_fill_curve() local
1685 * The key from 'buf' is in little-endian, we should preprocess it as in hpre_curve25519_fill_curve()
1688 * the same as the software curve-25519 that exists in crypto. in hpre_curve25519_fill_curve()
1694 p = ctx->curve25519.p + sz - len; in hpre_curve25519_fill_curve()
1699 fill_curve_param(p, curve->p, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1700 fill_curve_param(p + sz, curve->a, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1701 memcpy(p + shift, secret, len); in hpre_curve25519_fill_curve()
1702 fill_curve_param(p + shift + sz, curve->g.x, len, curve->g.ndigits); in hpre_curve25519_fill_curve()
1709 struct device *dev = ctx->dev; in hpre_curve25519_set_param()
1710 unsigned int sz = ctx->key_sz; in hpre_curve25519_set_param()
1713 /* p->a->k->gx */ in hpre_curve25519_set_param()
1714 if (!ctx->curve25519.p) { in hpre_curve25519_set_param()
1715 ctx->curve25519.p = dma_alloc_coherent(dev, sz << 2, in hpre_curve25519_set_param()
1716 &ctx->curve25519.dma_p, in hpre_curve25519_set_param()
1718 if (!ctx->curve25519.p) in hpre_curve25519_set_param()
1719 return -ENOMEM; in hpre_curve25519_set_param()
1722 ctx->curve25519.g = ctx->curve25519.p + shift + sz; in hpre_curve25519_set_param()
1723 ctx->curve25519.dma_g = ctx->curve25519.dma_p + shift + sz; in hpre_curve25519_set_param()
1734 struct device *dev = ctx->dev; in hpre_curve25519_set_secret()
1735 int ret = -EINVAL; in hpre_curve25519_set_secret()
1746 ctx->key_sz = CURVE25519_KEY_SIZE; in hpre_curve25519_set_secret()
1762 struct device *dev = ctx->dev; in hpre_curve25519_hw_data_clr_all()
1763 struct hpre_sqe *sqe = &req->req; in hpre_curve25519_hw_data_clr_all()
1766 dma = le64_to_cpu(sqe->in); in hpre_curve25519_hw_data_clr_all()
1770 if (src && req->src) in hpre_curve25519_hw_data_clr_all()
1771 dma_free_coherent(dev, ctx->key_sz, req->src, dma); in hpre_curve25519_hw_data_clr_all()
1773 dma = le64_to_cpu(sqe->out); in hpre_curve25519_hw_data_clr_all()
1777 if (req->dst) in hpre_curve25519_hw_data_clr_all()
1778 dma_free_coherent(dev, ctx->key_sz, req->dst, dma); in hpre_curve25519_hw_data_clr_all()
1780 dma_unmap_single(dev, dma, ctx->key_sz, DMA_FROM_DEVICE); in hpre_curve25519_hw_data_clr_all()
1785 struct hpre_dfx *dfx = ctx->hpre->debug.dfx; in hpre_curve25519_cb()
1792 areq = req->areq.curve25519; in hpre_curve25519_cb()
1793 areq->dst_len = ctx->key_sz; in hpre_curve25519_cb()
1799 hpre_key_to_big_end(sg_virt(areq->dst), CURVE25519_KEY_SIZE); in hpre_curve25519_cb()
1801 hpre_curve25519_hw_data_clr_all(ctx, req, areq->dst, areq->src); in hpre_curve25519_cb()
1815 if (unlikely(req->dst_len < ctx->key_sz)) { in hpre_curve25519_msg_request_set()
1816 req->dst_len = ctx->key_sz; in hpre_curve25519_msg_request_set()
1817 return -EINVAL; in hpre_curve25519_msg_request_set()
1822 h_req->cb = hpre_curve25519_cb; in hpre_curve25519_msg_request_set()
1823 h_req->areq.curve25519 = req; in hpre_curve25519_msg_request_set()
1824 msg = &h_req->req; in hpre_curve25519_msg_request_set()
1826 msg->in = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_curve25519_msg_request_set()
1827 msg->out = cpu_to_le64(DMA_MAPPING_ERROR); in hpre_curve25519_msg_request_set()
1828 msg->key = cpu_to_le64(ctx->curve25519.dma_p); in hpre_curve25519_msg_request_set()
1830 msg->dw0 |= cpu_to_le32(0x1U << HPRE_SQE_DONE_SHIFT); in hpre_curve25519_msg_request_set()
1831 msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1; in hpre_curve25519_msg_request_set()
1832 h_req->ctx = ctx; in hpre_curve25519_msg_request_set()
1836 return -EBUSY; in hpre_curve25519_msg_request_set()
1838 msg->tag = cpu_to_le16((u16)req_id); in hpre_curve25519_msg_request_set()
1846 for (i = 0; i < CURVE25519_KEY_SIZE - 1; i++) in hpre_curve25519_src_modulo_p()
1849 /* The modulus is ptr's last byte minus '0xed'(last byte of p) */ in hpre_curve25519_src_modulo_p()
1850 ptr[i] -= 0xed; in hpre_curve25519_src_modulo_p()
1856 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_src_init()
1857 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_curve25519_src_init()
1858 struct device *dev = ctx->dev; in hpre_curve25519_src_init()
1859 u8 p[CURVE25519_KEY_SIZE] = { 0 }; in hpre_curve25519_src_init() local
1866 return -EINVAL; in hpre_curve25519_src_init()
1869 ptr = dma_alloc_coherent(dev, ctx->key_sz, &dma, GFP_KERNEL); in hpre_curve25519_src_init()
1871 return -ENOMEM; in hpre_curve25519_src_init()
1881 * Src_data(gx) is in little-endian order, MSB in the final byte should in hpre_curve25519_src_init()
1882 * be masked as described in RFC7748, then transform it to big-endian in hpre_curve25519_src_init()
1890 fill_curve_param(p, curve->p, CURVE25519_KEY_SIZE, curve->g.ndigits); in hpre_curve25519_src_init()
1893 * When src_data equals (2^255 - 19) ~ (2^255 - 1), it is out of p, in hpre_curve25519_src_init()
1894 * we get its modulus to p, and then use it. in hpre_curve25519_src_init()
1896 if (memcmp(ptr, p, ctx->key_sz) == 0) { in hpre_curve25519_src_init()
1897 dev_err(dev, "gx is p!\n"); in hpre_curve25519_src_init()
1899 } else if (memcmp(ptr, p, ctx->key_sz) > 0) { in hpre_curve25519_src_init()
1903 hpre_req->src = ptr; in hpre_curve25519_src_init()
1904 msg->in = cpu_to_le64(dma); in hpre_curve25519_src_init()
1908 dma_free_coherent(dev, ctx->key_sz, ptr, dma); in hpre_curve25519_src_init()
1909 return -EINVAL; in hpre_curve25519_src_init()
1915 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_dst_init()
1916 struct hpre_ctx *ctx = hpre_req->ctx; in hpre_curve25519_dst_init()
1917 struct device *dev = ctx->dev; in hpre_curve25519_dst_init()
1920 if (!data || !sg_is_last(data) || len != ctx->key_sz) { in hpre_curve25519_dst_init()
1922 return -EINVAL; in hpre_curve25519_dst_init()
1925 hpre_req->dst = NULL; in hpre_curve25519_dst_init()
1929 return -ENOMEM; in hpre_curve25519_dst_init()
1932 msg->out = cpu_to_le64(dma); in hpre_curve25519_dst_init()
1940 struct device *dev = ctx->dev; in hpre_curve25519_compute_value()
1943 struct hpre_sqe *msg = &hpre_req->req; in hpre_curve25519_compute_value()
1952 if (req->src) { in hpre_curve25519_compute_value()
1953 ret = hpre_curve25519_src_init(hpre_req, req->src, req->src_len); in hpre_curve25519_compute_value()
1960 msg->in = cpu_to_le64(ctx->curve25519.dma_g); in hpre_curve25519_compute_value()
1963 ret = hpre_curve25519_dst_init(hpre_req, req->dst, req->dst_len); in hpre_curve25519_compute_value()
1969 msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_CURVE25519_MUL); in hpre_curve25519_compute_value()
1972 return -EINPROGRESS; in hpre_curve25519_compute_value()
1976 hpre_curve25519_hw_data_clr_all(ctx, hpre_req, req->dst, req->src); in hpre_curve25519_compute_value()
1984 return ctx->key_sz; in hpre_curve25519_max_size()
2017 .cra_driver_name = "hpre-rsa",
2033 .cra_driver_name = "hpre-dh",
2049 .cra_name = "ecdh-nist-p192",
2050 .cra_driver_name = "hpre-ecdh-nist-p192",
2063 .cra_name = "ecdh-nist-p256",
2064 .cra_driver_name = "hpre-ecdh-nist-p256",
2077 .cra_name = "ecdh-nist-p384",
2078 .cra_driver_name = "hpre-ecdh-nist-p384",
2095 .cra_driver_name = "hpre-curve25519",
2110 dev_err(&qm->pdev->dev, "failed to register rsa (%d)!\n", ret); in hpre_register_rsa()
2132 dev_err(&qm->pdev->dev, "failed to register dh (%d)!\n", ret); in hpre_register_dh()
2155 dev_err(&qm->pdev->dev, "failed to register %s (%d)!\n", in hpre_register_ecdh()
2164 for (--i; i >= 0; --i) in hpre_register_ecdh()
2177 for (i = ARRAY_SIZE(ecdh_curves) - 1; i >= 0; --i) in hpre_unregister_ecdh()
2190 dev_err(&qm->pdev->dev, "failed to register x25519 (%d)!\n", ret); in hpre_register_x25519()