Lines Matching refs:dd

90 	struct atmel_sha_dev	*dd;  member
114 struct atmel_sha_dev *dd; member
253 static inline u32 atmel_sha_read(struct atmel_sha_dev *dd, u32 offset) in atmel_sha_read() argument
255 u32 value = readl_relaxed(dd->io_base + offset); in atmel_sha_read()
258 if (dd->flags & SHA_FLAGS_DUMP_REG) { in atmel_sha_read()
261 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value, in atmel_sha_read()
269 static inline void atmel_sha_write(struct atmel_sha_dev *dd, in atmel_sha_write() argument
273 if (dd->flags & SHA_FLAGS_DUMP_REG) { in atmel_sha_write()
276 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value, in atmel_sha_write()
281 writel_relaxed(value, dd->io_base + offset); in atmel_sha_write()
284 static inline int atmel_sha_complete(struct atmel_sha_dev *dd, int err) in atmel_sha_complete() argument
286 struct ahash_request *req = dd->req; in atmel_sha_complete()
288 dd->flags &= ~(SHA_FLAGS_BUSY | SHA_FLAGS_FINAL | SHA_FLAGS_CPU | in atmel_sha_complete()
292 clk_disable(dd->iclk); in atmel_sha_complete()
294 if ((dd->is_async || dd->force_complete) && req->base.complete) in atmel_sha_complete()
298 tasklet_schedule(&dd->queue_task); in atmel_sha_complete()
407 struct atmel_sha_dev *dd = NULL; in atmel_sha_find_dev() local
411 if (!tctx->dd) { in atmel_sha_find_dev()
413 dd = tmp; in atmel_sha_find_dev()
416 tctx->dd = dd; in atmel_sha_find_dev()
418 dd = tctx->dd; in atmel_sha_find_dev()
423 return dd; in atmel_sha_find_dev()
431 struct atmel_sha_dev *dd = atmel_sha_find_dev(tctx); in atmel_sha_init() local
433 ctx->dd = dd; in atmel_sha_init()
437 dev_dbg(dd->dev, "init: digest size: %u\n", in atmel_sha_init()
473 static void atmel_sha_write_ctrl(struct atmel_sha_dev *dd, int dma) in atmel_sha_write_ctrl() argument
475 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_write_ctrl()
480 if (!dd->caps.has_dma) in atmel_sha_write_ctrl()
481 atmel_sha_write(dd, SHA_IER, SHA_INT_TXBUFE); in atmel_sha_write_ctrl()
483 if (dd->caps.has_dualbuff) in atmel_sha_write_ctrl()
486 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_write_ctrl()
521 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_write_ctrl()
522 } else if (dd->caps.has_uihv && (ctx->flags & SHA_FLAGS_RESTORE)) { in atmel_sha_write_ctrl()
532 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIHV); in atmel_sha_write_ctrl()
534 atmel_sha_write(dd, SHA_REG_DIN(i), hash[i]); in atmel_sha_write_ctrl()
535 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_write_ctrl()
545 atmel_sha_write(dd, SHA_MR, valmr); in atmel_sha_write_ctrl()
548 static inline int atmel_sha_wait_for_data_ready(struct atmel_sha_dev *dd, in atmel_sha_wait_for_data_ready() argument
551 u32 isr = atmel_sha_read(dd, SHA_ISR); in atmel_sha_wait_for_data_ready()
554 return resume(dd); in atmel_sha_wait_for_data_ready()
556 dd->resume = resume; in atmel_sha_wait_for_data_ready()
557 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_wait_for_data_ready()
561 static int atmel_sha_xmit_cpu(struct atmel_sha_dev *dd, const u8 *buf, in atmel_sha_xmit_cpu() argument
564 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_cpu()
568 dev_dbg(dd->dev, "xmit_cpu: digcnt: 0x%llx 0x%llx, length: %zd, final: %d\n", in atmel_sha_xmit_cpu()
571 atmel_sha_write_ctrl(dd, 0); in atmel_sha_xmit_cpu()
579 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_cpu()
583 dd->flags |= SHA_FLAGS_CPU; in atmel_sha_xmit_cpu()
586 atmel_sha_write(dd, SHA_REG_DIN(count), buffer[count]); in atmel_sha_xmit_cpu()
591 static int atmel_sha_xmit_pdc(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_pdc() argument
594 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_pdc()
597 dev_dbg(dd->dev, "xmit_pdc: digcnt: 0x%llx 0x%llx, length: %zd, final: %d\n", in atmel_sha_xmit_pdc()
601 atmel_sha_write(dd, SHA_PTCR, SHA_PTCR_TXTDIS); in atmel_sha_xmit_pdc()
602 atmel_sha_write(dd, SHA_TPR, dma_addr1); in atmel_sha_xmit_pdc()
603 atmel_sha_write(dd, SHA_TCR, len32); in atmel_sha_xmit_pdc()
606 atmel_sha_write(dd, SHA_TNPR, dma_addr2); in atmel_sha_xmit_pdc()
607 atmel_sha_write(dd, SHA_TNCR, len32); in atmel_sha_xmit_pdc()
609 atmel_sha_write_ctrl(dd, 1); in atmel_sha_xmit_pdc()
617 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_pdc()
619 dd->flags |= SHA_FLAGS_DMA_ACTIVE; in atmel_sha_xmit_pdc()
622 atmel_sha_write(dd, SHA_PTCR, SHA_PTCR_TXTEN); in atmel_sha_xmit_pdc()
629 struct atmel_sha_dev *dd = data; in atmel_sha_dma_callback() local
631 dd->is_async = true; in atmel_sha_dma_callback()
634 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_dma_callback()
637 static int atmel_sha_xmit_dma(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_dma() argument
640 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_dma()
644 dev_dbg(dd->dev, "xmit_dma: digcnt: 0x%llx 0x%llx, length: %zd, final: %d\n", in atmel_sha_xmit_dma()
647 dd->dma_lch_in.dma_conf.src_maxburst = 16; in atmel_sha_xmit_dma()
648 dd->dma_lch_in.dma_conf.dst_maxburst = 16; in atmel_sha_xmit_dma()
650 dmaengine_slave_config(dd->dma_lch_in.chan, &dd->dma_lch_in.dma_conf); in atmel_sha_xmit_dma()
658 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, sg, 2, in atmel_sha_xmit_dma()
664 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, sg, 1, in atmel_sha_xmit_dma()
668 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_xmit_dma()
671 in_desc->callback_param = dd; in atmel_sha_xmit_dma()
673 atmel_sha_write_ctrl(dd, 1); in atmel_sha_xmit_dma()
681 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_dma()
683 dd->flags |= SHA_FLAGS_DMA_ACTIVE; in atmel_sha_xmit_dma()
687 dma_async_issue_pending(dd->dma_lch_in.chan); in atmel_sha_xmit_dma()
692 static int atmel_sha_xmit_start(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_start() argument
695 if (dd->caps.has_dma) in atmel_sha_xmit_start()
696 return atmel_sha_xmit_dma(dd, dma_addr1, length1, in atmel_sha_xmit_start()
699 return atmel_sha_xmit_pdc(dd, dma_addr1, length1, in atmel_sha_xmit_start()
703 static int atmel_sha_update_cpu(struct atmel_sha_dev *dd) in atmel_sha_update_cpu() argument
705 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_cpu()
713 return atmel_sha_xmit_cpu(dd, ctx->buffer, bufcnt, 1); in atmel_sha_update_cpu()
716 static int atmel_sha_xmit_dma_map(struct atmel_sha_dev *dd, in atmel_sha_xmit_dma_map() argument
720 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, in atmel_sha_xmit_dma_map()
722 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in atmel_sha_xmit_dma_map()
723 dev_err(dd->dev, "dma %zu bytes error\n", ctx->buflen + in atmel_sha_xmit_dma_map()
725 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_xmit_dma_map()
731 return atmel_sha_xmit_start(dd, ctx->dma_addr, length, 0, 0, final); in atmel_sha_xmit_dma_map()
734 static int atmel_sha_update_dma_slow(struct atmel_sha_dev *dd) in atmel_sha_update_dma_slow() argument
736 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_slow()
744 dev_dbg(dd->dev, "slow: bufcnt: %zu, digcnt: 0x%llx 0x%llx, final: %d\n", in atmel_sha_update_dma_slow()
753 return atmel_sha_xmit_dma_map(dd, ctx, count, final); in atmel_sha_update_dma_slow()
759 static int atmel_sha_update_dma_start(struct atmel_sha_dev *dd) in atmel_sha_update_dma_start() argument
761 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_start()
770 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
772 dev_dbg(dd->dev, "fast: digcnt: 0x%llx 0x%llx, bufcnt: %zd, total: %u\n", in atmel_sha_update_dma_start()
778 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
782 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
811 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, in atmel_sha_update_dma_start()
813 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in atmel_sha_update_dma_start()
814 dev_err(dd->dev, "dma %zu bytes error\n", in atmel_sha_update_dma_start()
816 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_update_dma_start()
823 return atmel_sha_xmit_start(dd, ctx->dma_addr, count, 0, in atmel_sha_update_dma_start()
827 if (!dma_map_sg(dd->dev, ctx->sg, 1, in atmel_sha_update_dma_start()
829 dev_err(dd->dev, "dma_map_sg error\n"); in atmel_sha_update_dma_start()
830 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_update_dma_start()
837 return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), in atmel_sha_update_dma_start()
842 if (!dma_map_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE)) { in atmel_sha_update_dma_start()
843 dev_err(dd->dev, "dma_map_sg error\n"); in atmel_sha_update_dma_start()
844 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_update_dma_start()
850 return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), length, 0, in atmel_sha_update_dma_start()
854 static void atmel_sha_update_dma_stop(struct atmel_sha_dev *dd) in atmel_sha_update_dma_stop() argument
856 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_stop()
859 dma_unmap_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE); in atmel_sha_update_dma_stop()
866 dma_unmap_single(dd->dev, ctx->dma_addr, in atmel_sha_update_dma_stop()
870 dma_unmap_single(dd->dev, ctx->dma_addr, ctx->buflen + in atmel_sha_update_dma_stop()
875 static int atmel_sha_update_req(struct atmel_sha_dev *dd) in atmel_sha_update_req() argument
877 struct ahash_request *req = dd->req; in atmel_sha_update_req()
881 dev_dbg(dd->dev, "update_req: total: %u, digcnt: 0x%llx 0x%llx\n", in atmel_sha_update_req()
885 err = atmel_sha_update_cpu(dd); in atmel_sha_update_req()
887 err = atmel_sha_update_dma_start(dd); in atmel_sha_update_req()
890 dev_dbg(dd->dev, "update: err: %d, digcnt: 0x%llx 0%llx\n", in atmel_sha_update_req()
896 static int atmel_sha_final_req(struct atmel_sha_dev *dd) in atmel_sha_final_req() argument
898 struct ahash_request *req = dd->req; in atmel_sha_final_req()
907 err = atmel_sha_xmit_dma_map(dd, ctx, count, 1); in atmel_sha_final_req()
914 err = atmel_sha_xmit_cpu(dd, ctx->buffer, count, 1); in atmel_sha_final_req()
917 dev_dbg(dd->dev, "final_req: err: %d\n", err); in atmel_sha_final_req()
949 hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i)); in atmel_sha_copy_hash()
987 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_finish() local
992 dev_dbg(dd->dev, "digcnt: 0x%llx 0x%llx, bufcnt: %zd\n", ctx->digcnt[1], in atmel_sha_finish()
1001 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_finish_req() local
1005 if (SHA_FLAGS_FINAL & dd->flags) in atmel_sha_finish_req()
1012 (void)atmel_sha_complete(dd, err); in atmel_sha_finish_req()
1015 static int atmel_sha_hw_init(struct atmel_sha_dev *dd) in atmel_sha_hw_init() argument
1019 err = clk_enable(dd->iclk); in atmel_sha_hw_init()
1023 if (!(SHA_FLAGS_INIT & dd->flags)) { in atmel_sha_hw_init()
1024 atmel_sha_write(dd, SHA_CR, SHA_CR_SWRST); in atmel_sha_hw_init()
1025 dd->flags |= SHA_FLAGS_INIT; in atmel_sha_hw_init()
1031 static inline unsigned int atmel_sha_get_version(struct atmel_sha_dev *dd) in atmel_sha_get_version() argument
1033 return atmel_sha_read(dd, SHA_HW_VERSION) & 0x00000fff; in atmel_sha_get_version()
1036 static int atmel_sha_hw_version_init(struct atmel_sha_dev *dd) in atmel_sha_hw_version_init() argument
1040 err = atmel_sha_hw_init(dd); in atmel_sha_hw_version_init()
1044 dd->hw_version = atmel_sha_get_version(dd); in atmel_sha_hw_version_init()
1046 dev_info(dd->dev, in atmel_sha_hw_version_init()
1047 "version: 0x%x\n", dd->hw_version); in atmel_sha_hw_version_init()
1049 clk_disable(dd->iclk); in atmel_sha_hw_version_init()
1054 static int atmel_sha_handle_queue(struct atmel_sha_dev *dd, in atmel_sha_handle_queue() argument
1063 spin_lock_irqsave(&dd->lock, flags); in atmel_sha_handle_queue()
1065 ret = ahash_enqueue_request(&dd->queue, req); in atmel_sha_handle_queue()
1067 if (SHA_FLAGS_BUSY & dd->flags) { in atmel_sha_handle_queue()
1068 spin_unlock_irqrestore(&dd->lock, flags); in atmel_sha_handle_queue()
1072 backlog = crypto_get_backlog(&dd->queue); in atmel_sha_handle_queue()
1073 async_req = crypto_dequeue_request(&dd->queue); in atmel_sha_handle_queue()
1075 dd->flags |= SHA_FLAGS_BUSY; in atmel_sha_handle_queue()
1077 spin_unlock_irqrestore(&dd->lock, flags); in atmel_sha_handle_queue()
1087 dd->req = ahash_request_cast(async_req); in atmel_sha_handle_queue()
1088 start_async = (dd->req != req); in atmel_sha_handle_queue()
1089 dd->is_async = start_async; in atmel_sha_handle_queue()
1090 dd->force_complete = false; in atmel_sha_handle_queue()
1093 err = ctx->start(dd); in atmel_sha_handle_queue()
1097 static int atmel_sha_done(struct atmel_sha_dev *dd);
1099 static int atmel_sha_start(struct atmel_sha_dev *dd) in atmel_sha_start() argument
1101 struct ahash_request *req = dd->req; in atmel_sha_start()
1105 dev_dbg(dd->dev, "handling new req, op: %lu, nbytes: %u\n", in atmel_sha_start()
1108 err = atmel_sha_hw_init(dd); in atmel_sha_start()
1110 return atmel_sha_complete(dd, err); in atmel_sha_start()
1131 dd->resume = atmel_sha_done; in atmel_sha_start()
1133 err = atmel_sha_update_req(dd); in atmel_sha_start()
1136 err = atmel_sha_final_req(dd); in atmel_sha_start()
1138 err = atmel_sha_final_req(dd); in atmel_sha_start()
1145 dev_dbg(dd->dev, "exit, err: %d\n", err); in atmel_sha_start()
1154 struct atmel_sha_dev *dd = tctx->dd; in atmel_sha_enqueue() local
1158 return atmel_sha_handle_queue(dd, req); in atmel_sha_enqueue()
1319 struct atmel_sha_dev *dd = (struct atmel_sha_dev *)data; in atmel_sha_queue_task() local
1321 atmel_sha_handle_queue(dd, NULL); in atmel_sha_queue_task()
1324 static int atmel_sha_done(struct atmel_sha_dev *dd) in atmel_sha_done() argument
1328 if (SHA_FLAGS_CPU & dd->flags) { in atmel_sha_done()
1329 if (SHA_FLAGS_OUTPUT_READY & dd->flags) { in atmel_sha_done()
1330 dd->flags &= ~SHA_FLAGS_OUTPUT_READY; in atmel_sha_done()
1333 } else if (SHA_FLAGS_DMA_READY & dd->flags) { in atmel_sha_done()
1334 if (SHA_FLAGS_DMA_ACTIVE & dd->flags) { in atmel_sha_done()
1335 dd->flags &= ~SHA_FLAGS_DMA_ACTIVE; in atmel_sha_done()
1336 atmel_sha_update_dma_stop(dd); in atmel_sha_done()
1338 if (SHA_FLAGS_OUTPUT_READY & dd->flags) { in atmel_sha_done()
1340 dd->flags &= ~(SHA_FLAGS_DMA_READY | in atmel_sha_done()
1342 err = atmel_sha_update_dma_start(dd); in atmel_sha_done()
1351 atmel_sha_finish_req(dd->req, err); in atmel_sha_done()
1358 struct atmel_sha_dev *dd = (struct atmel_sha_dev *)data; in atmel_sha_done_task() local
1360 dd->is_async = true; in atmel_sha_done_task()
1361 (void)dd->resume(dd); in atmel_sha_done_task()
1389 static bool atmel_sha_dma_check_aligned(struct atmel_sha_dev *dd, in atmel_sha_dma_check_aligned() argument
1393 struct atmel_sha_dma *dma = &dd->dma_lch_in; in atmel_sha_dma_check_aligned()
1394 struct ahash_request *req = dd->req; in atmel_sha_dma_check_aligned()
1426 struct atmel_sha_dev *dd = data; in atmel_sha_dma_callback2() local
1427 struct atmel_sha_dma *dma = &dd->dma_lch_in; in atmel_sha_dma_callback2()
1431 dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); in atmel_sha_dma_callback2()
1438 dd->is_async = true; in atmel_sha_dma_callback2()
1439 (void)atmel_sha_wait_for_data_ready(dd, dd->resume); in atmel_sha_dma_callback2()
1442 static int atmel_sha_dma_start(struct atmel_sha_dev *dd, in atmel_sha_dma_start() argument
1447 struct atmel_sha_dma *dma = &dd->dma_lch_in; in atmel_sha_dma_start()
1455 dd->resume = resume; in atmel_sha_dma_start()
1462 sg_len = dma_map_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); in atmel_sha_dma_start()
1482 desc->callback_param = dd; in atmel_sha_dma_start()
1493 dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); in atmel_sha_dma_start()
1495 return atmel_sha_complete(dd, err); in atmel_sha_dma_start()
1501 static int atmel_sha_cpu_transfer(struct atmel_sha_dev *dd) in atmel_sha_cpu_transfer() argument
1503 struct ahash_request *req = dd->req; in atmel_sha_cpu_transfer()
1514 atmel_sha_write(dd, SHA_REG_DIN(din), words[i]); in atmel_sha_cpu_transfer()
1535 isr = atmel_sha_read(dd, SHA_ISR); in atmel_sha_cpu_transfer()
1538 dd->resume = atmel_sha_cpu_transfer; in atmel_sha_cpu_transfer()
1539 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_cpu_transfer()
1545 return dd->cpu_transfer_complete(dd); in atmel_sha_cpu_transfer()
1547 return atmel_sha_wait_for_data_ready(dd, dd->cpu_transfer_complete); in atmel_sha_cpu_transfer()
1550 static int atmel_sha_cpu_start(struct atmel_sha_dev *dd, in atmel_sha_cpu_start() argument
1557 struct ahash_request *req = dd->req; in atmel_sha_cpu_start()
1561 return resume(dd); in atmel_sha_cpu_start()
1580 dd->cpu_transfer_complete = resume; in atmel_sha_cpu_start()
1581 return atmel_sha_cpu_transfer(dd); in atmel_sha_cpu_start()
1584 static int atmel_sha_cpu_hash(struct atmel_sha_dev *dd, in atmel_sha_cpu_hash() argument
1589 struct ahash_request *req = dd->req; in atmel_sha_cpu_hash()
1595 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_cpu_hash()
1598 atmel_sha_write(dd, SHA_MR, mr); in atmel_sha_cpu_hash()
1599 atmel_sha_write(dd, SHA_MSR, msglen); in atmel_sha_cpu_hash()
1600 atmel_sha_write(dd, SHA_BCR, msglen); in atmel_sha_cpu_hash()
1601 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_cpu_hash()
1603 sg_init_one(&dd->tmp, data, datalen); in atmel_sha_cpu_hash()
1604 return atmel_sha_cpu_start(dd, &dd->tmp, datalen, false, true, resume); in atmel_sha_cpu_hash()
1670 static int atmel_sha_hmac_setup(struct atmel_sha_dev *dd,
1672 static int atmel_sha_hmac_prehash_key(struct atmel_sha_dev *dd,
1674 static int atmel_sha_hmac_prehash_key_done(struct atmel_sha_dev *dd);
1675 static int atmel_sha_hmac_compute_ipad_hash(struct atmel_sha_dev *dd);
1676 static int atmel_sha_hmac_compute_opad_hash(struct atmel_sha_dev *dd);
1677 static int atmel_sha_hmac_setup_done(struct atmel_sha_dev *dd);
1679 static int atmel_sha_hmac_init_done(struct atmel_sha_dev *dd);
1680 static int atmel_sha_hmac_final(struct atmel_sha_dev *dd);
1681 static int atmel_sha_hmac_final_done(struct atmel_sha_dev *dd);
1682 static int atmel_sha_hmac_digest2(struct atmel_sha_dev *dd);
1684 static int atmel_sha_hmac_setup(struct atmel_sha_dev *dd, in atmel_sha_hmac_setup() argument
1687 struct ahash_request *req = dd->req; in atmel_sha_hmac_setup()
1723 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_hmac_setup()
1728 return resume(dd); in atmel_sha_hmac_setup()
1732 return atmel_sha_hmac_prehash_key(dd, key, keylen); in atmel_sha_hmac_setup()
1737 return atmel_sha_hmac_compute_ipad_hash(dd); in atmel_sha_hmac_setup()
1740 static int atmel_sha_hmac_prehash_key(struct atmel_sha_dev *dd, in atmel_sha_hmac_prehash_key() argument
1743 return atmel_sha_cpu_hash(dd, key, keylen, true, in atmel_sha_hmac_prehash_key()
1747 static int atmel_sha_hmac_prehash_key_done(struct atmel_sha_dev *dd) in atmel_sha_hmac_prehash_key_done() argument
1749 struct ahash_request *req = dd->req; in atmel_sha_hmac_prehash_key_done()
1759 hmac->ipad[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_hmac_prehash_key_done()
1761 return atmel_sha_hmac_compute_ipad_hash(dd); in atmel_sha_hmac_prehash_key_done()
1764 static int atmel_sha_hmac_compute_ipad_hash(struct atmel_sha_dev *dd) in atmel_sha_hmac_compute_ipad_hash() argument
1766 struct ahash_request *req = dd->req; in atmel_sha_hmac_compute_ipad_hash()
1780 return atmel_sha_cpu_hash(dd, hmac->ipad, bs, false, in atmel_sha_hmac_compute_ipad_hash()
1784 static int atmel_sha_hmac_compute_opad_hash(struct atmel_sha_dev *dd) in atmel_sha_hmac_compute_opad_hash() argument
1786 struct ahash_request *req = dd->req; in atmel_sha_hmac_compute_opad_hash()
1795 hmac->ipad[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_hmac_compute_opad_hash()
1796 return atmel_sha_cpu_hash(dd, hmac->opad, bs, false, in atmel_sha_hmac_compute_opad_hash()
1800 static int atmel_sha_hmac_setup_done(struct atmel_sha_dev *dd) in atmel_sha_hmac_setup_done() argument
1802 struct ahash_request *req = dd->req; in atmel_sha_hmac_setup_done()
1810 hmac->opad[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_hmac_setup_done()
1812 return hmac->resume(dd); in atmel_sha_hmac_setup_done()
1815 static int atmel_sha_hmac_start(struct atmel_sha_dev *dd) in atmel_sha_hmac_start() argument
1817 struct ahash_request *req = dd->req; in atmel_sha_hmac_start()
1821 err = atmel_sha_hw_init(dd); in atmel_sha_hmac_start()
1823 return atmel_sha_complete(dd, err); in atmel_sha_hmac_start()
1827 err = atmel_sha_hmac_setup(dd, atmel_sha_hmac_init_done); in atmel_sha_hmac_start()
1831 dd->resume = atmel_sha_done; in atmel_sha_hmac_start()
1832 err = atmel_sha_update_req(dd); in atmel_sha_hmac_start()
1836 dd->resume = atmel_sha_hmac_final; in atmel_sha_hmac_start()
1837 err = atmel_sha_final_req(dd); in atmel_sha_hmac_start()
1841 err = atmel_sha_hmac_setup(dd, atmel_sha_hmac_digest2); in atmel_sha_hmac_start()
1845 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_hmac_start()
1870 static int atmel_sha_hmac_init_done(struct atmel_sha_dev *dd) in atmel_sha_hmac_init_done() argument
1872 struct ahash_request *req = dd->req; in atmel_sha_hmac_init_done()
1884 return atmel_sha_complete(dd, 0); in atmel_sha_hmac_init_done()
1887 static int atmel_sha_hmac_final(struct atmel_sha_dev *dd) in atmel_sha_hmac_final() argument
1889 struct ahash_request *req = dd->req; in atmel_sha_hmac_final()
1903 digest[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_hmac_final()
1906 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIHV); in atmel_sha_hmac_final()
1909 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->opad[i]); in atmel_sha_hmac_final()
1913 atmel_sha_write(dd, SHA_MR, mr); in atmel_sha_hmac_final()
1914 atmel_sha_write(dd, SHA_MSR, bs + ds); in atmel_sha_hmac_final()
1915 atmel_sha_write(dd, SHA_BCR, ds); in atmel_sha_hmac_final()
1916 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_hmac_final()
1918 sg_init_one(&dd->tmp, digest, ds); in atmel_sha_hmac_final()
1919 return atmel_sha_cpu_start(dd, &dd->tmp, ds, false, true, in atmel_sha_hmac_final()
1923 static int atmel_sha_hmac_final_done(struct atmel_sha_dev *dd) in atmel_sha_hmac_final_done() argument
1930 atmel_sha_copy_hash(dd->req); in atmel_sha_hmac_final_done()
1931 atmel_sha_copy_ready_hash(dd->req); in atmel_sha_hmac_final_done()
1932 return atmel_sha_complete(dd, 0); in atmel_sha_hmac_final_done()
1946 static int atmel_sha_hmac_digest2(struct atmel_sha_dev *dd) in atmel_sha_hmac_digest2() argument
1948 struct ahash_request *req = dd->req; in atmel_sha_hmac_digest2()
1976 sg_init_one(&dd->tmp, ctx->buffer, ctx->bufcnt); in atmel_sha_hmac_digest2()
1981 atmel_sha_dma_check_aligned(dd, req->src, req->nbytes)) in atmel_sha_hmac_digest2()
1985 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIHV); in atmel_sha_hmac_digest2()
1987 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->ipad[i]); in atmel_sha_hmac_digest2()
1989 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIEHV); in atmel_sha_hmac_digest2()
1991 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->opad[i]); in atmel_sha_hmac_digest2()
2000 atmel_sha_write(dd, SHA_MR, mr); in atmel_sha_hmac_digest2()
2002 atmel_sha_write(dd, SHA_MSR, req->nbytes); in atmel_sha_hmac_digest2()
2003 atmel_sha_write(dd, SHA_BCR, req->nbytes); in atmel_sha_hmac_digest2()
2005 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_hmac_digest2()
2009 sgbuf = &dd->tmp; in atmel_sha_hmac_digest2()
2017 return atmel_sha_dma_start(dd, sgbuf, req->nbytes, in atmel_sha_hmac_digest2()
2020 return atmel_sha_cpu_start(dd, sgbuf, req->nbytes, false, true, in atmel_sha_hmac_digest2()
2104 static int atmel_sha_authenc_init2(struct atmel_sha_dev *dd);
2105 static int atmel_sha_authenc_init_done(struct atmel_sha_dev *dd);
2106 static int atmel_sha_authenc_final_done(struct atmel_sha_dev *dd);
2134 authctx->cb(authctx->aes_dev, err, authctx->base.dd->is_async); in atmel_sha_authenc_complete()
2137 static int atmel_sha_authenc_start(struct atmel_sha_dev *dd) in atmel_sha_authenc_start() argument
2139 struct ahash_request *req = dd->req; in atmel_sha_authenc_start()
2147 dd->force_complete = true; in atmel_sha_authenc_start()
2149 err = atmel_sha_hw_init(dd); in atmel_sha_authenc_start()
2150 return authctx->cb(authctx->aes_dev, err, dd->is_async); in atmel_sha_authenc_start()
2157 dummy.dd = NULL; in atmel_sha_authenc_is_ready()
2254 struct atmel_sha_dev *dd; in atmel_sha_authenc_schedule() local
2260 dd = atmel_sha_find_dev(tctx); in atmel_sha_authenc_schedule()
2261 if (!dd) in atmel_sha_authenc_schedule()
2265 ctx->dd = dd; in atmel_sha_authenc_schedule()
2272 return atmel_sha_handle_queue(dd, req); in atmel_sha_authenc_schedule()
2286 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_init() local
2289 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_authenc_init()
2298 return atmel_sha_hmac_setup(dd, atmel_sha_authenc_init2); in atmel_sha_authenc_init()
2302 static int atmel_sha_authenc_init2(struct atmel_sha_dev *dd) in atmel_sha_authenc_init2() argument
2304 struct ahash_request *req = dd->req; in atmel_sha_authenc_init2()
2313 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIHV); in atmel_sha_authenc_init2()
2315 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->ipad[i]); in atmel_sha_authenc_init2()
2317 atmel_sha_write(dd, SHA_CR, SHA_CR_WUIEHV); in atmel_sha_authenc_init2()
2319 atmel_sha_write(dd, SHA_REG_DIN(i), hmac->opad[i]); in atmel_sha_authenc_init2()
2325 atmel_sha_write(dd, SHA_MR, mr); in atmel_sha_authenc_init2()
2328 atmel_sha_write(dd, SHA_MSR, msg_size); in atmel_sha_authenc_init2()
2329 atmel_sha_write(dd, SHA_BCR, msg_size); in atmel_sha_authenc_init2()
2331 atmel_sha_write(dd, SHA_CR, SHA_CR_FIRST); in atmel_sha_authenc_init2()
2334 return atmel_sha_cpu_start(dd, authctx->assoc, authctx->assoclen, in atmel_sha_authenc_init2()
2339 static int atmel_sha_authenc_init_done(struct atmel_sha_dev *dd) in atmel_sha_authenc_init_done() argument
2341 struct ahash_request *req = dd->req; in atmel_sha_authenc_init_done()
2344 return authctx->cb(authctx->aes_dev, 0, dd->is_async); in atmel_sha_authenc_init_done()
2354 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_final() local
2378 return atmel_sha_complete(dd, -EINVAL); in atmel_sha_authenc_final()
2386 return atmel_sha_wait_for_data_ready(dd, in atmel_sha_authenc_final()
2391 static int atmel_sha_authenc_final_done(struct atmel_sha_dev *dd) in atmel_sha_authenc_final_done() argument
2393 struct ahash_request *req = dd->req; in atmel_sha_authenc_final_done()
2398 authctx->digest[i] = atmel_sha_read(dd, SHA_REG_DIGEST(i)); in atmel_sha_authenc_final_done()
2400 return atmel_sha_complete(dd, 0); in atmel_sha_authenc_final_done()
2407 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_abort() local
2410 dd->is_async = false; in atmel_sha_authenc_abort()
2411 dd->force_complete = false; in atmel_sha_authenc_abort()
2412 (void)atmel_sha_complete(dd, 0); in atmel_sha_authenc_abort()
2419 static void atmel_sha_unregister_algs(struct atmel_sha_dev *dd) in atmel_sha_unregister_algs() argument
2423 if (dd->caps.has_hmac) in atmel_sha_unregister_algs()
2430 if (dd->caps.has_sha224) in atmel_sha_unregister_algs()
2433 if (dd->caps.has_sha_384_512) { in atmel_sha_unregister_algs()
2439 static int atmel_sha_register_algs(struct atmel_sha_dev *dd) in atmel_sha_register_algs() argument
2451 if (dd->caps.has_sha224) { in atmel_sha_register_algs()
2459 if (dd->caps.has_sha_384_512) { in atmel_sha_register_algs()
2469 if (dd->caps.has_hmac) { in atmel_sha_register_algs()
2499 static int atmel_sha_dma_init(struct atmel_sha_dev *dd) in atmel_sha_dma_init() argument
2501 dd->dma_lch_in.chan = dma_request_chan(dd->dev, "tx"); in atmel_sha_dma_init()
2502 if (IS_ERR(dd->dma_lch_in.chan)) { in atmel_sha_dma_init()
2503 return dev_err_probe(dd->dev, PTR_ERR(dd->dma_lch_in.chan), in atmel_sha_dma_init()
2507 dd->dma_lch_in.dma_conf.dst_addr = dd->phys_base + in atmel_sha_dma_init()
2509 dd->dma_lch_in.dma_conf.src_maxburst = 1; in atmel_sha_dma_init()
2510 dd->dma_lch_in.dma_conf.src_addr_width = in atmel_sha_dma_init()
2512 dd->dma_lch_in.dma_conf.dst_maxburst = 1; in atmel_sha_dma_init()
2513 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_sha_dma_init()
2515 dd->dma_lch_in.dma_conf.device_fc = false; in atmel_sha_dma_init()
2520 static void atmel_sha_dma_cleanup(struct atmel_sha_dev *dd) in atmel_sha_dma_cleanup() argument
2522 dma_release_channel(dd->dma_lch_in.chan); in atmel_sha_dma_cleanup()
2525 static void atmel_sha_get_cap(struct atmel_sha_dev *dd) in atmel_sha_get_cap() argument
2528 dd->caps.has_dma = 0; in atmel_sha_get_cap()
2529 dd->caps.has_dualbuff = 0; in atmel_sha_get_cap()
2530 dd->caps.has_sha224 = 0; in atmel_sha_get_cap()
2531 dd->caps.has_sha_384_512 = 0; in atmel_sha_get_cap()
2532 dd->caps.has_uihv = 0; in atmel_sha_get_cap()
2533 dd->caps.has_hmac = 0; in atmel_sha_get_cap()
2536 switch (dd->hw_version & 0xff0) { in atmel_sha_get_cap()
2540 dd->caps.has_dma = 1; in atmel_sha_get_cap()
2541 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
2542 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
2543 dd->caps.has_sha_384_512 = 1; in atmel_sha_get_cap()
2544 dd->caps.has_uihv = 1; in atmel_sha_get_cap()
2545 dd->caps.has_hmac = 1; in atmel_sha_get_cap()
2548 dd->caps.has_dma = 1; in atmel_sha_get_cap()
2549 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
2550 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
2551 dd->caps.has_sha_384_512 = 1; in atmel_sha_get_cap()
2552 dd->caps.has_uihv = 1; in atmel_sha_get_cap()
2555 dd->caps.has_dma = 1; in atmel_sha_get_cap()
2556 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
2557 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
2558 dd->caps.has_sha_384_512 = 1; in atmel_sha_get_cap()
2561 dd->caps.has_dma = 1; in atmel_sha_get_cap()
2562 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
2563 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
2568 dev_warn(dd->dev, in atmel_sha_get_cap()