Lines Matching refs:dd
104 struct atmel_aes_dev *dd; member
347 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset) in atmel_aes_read() argument
349 u32 value = readl_relaxed(dd->io_base + offset); in atmel_aes_read()
352 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_read()
355 dev_vdbg(dd->dev, "read 0x%08x from %s\n", value, in atmel_aes_read()
363 static inline void atmel_aes_write(struct atmel_aes_dev *dd, in atmel_aes_write() argument
367 if (dd->flags & AES_FLAGS_DUMP_REG) { in atmel_aes_write()
370 dev_vdbg(dd->dev, "write 0x%08x into %s\n", value, in atmel_aes_write()
375 writel_relaxed(value, dd->io_base + offset); in atmel_aes_write()
378 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_n() argument
382 *value = atmel_aes_read(dd, offset); in atmel_aes_read_n()
385 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_n() argument
389 atmel_aes_write(dd, offset, *value); in atmel_aes_write_n()
392 static inline void atmel_aes_read_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_block() argument
395 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block()
398 static inline void atmel_aes_write_block(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_block() argument
401 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block()
404 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev *dd, in atmel_aes_wait_for_data_ready() argument
407 u32 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_wait_for_data_ready()
410 return resume(dd); in atmel_aes_wait_for_data_ready()
412 dd->resume = resume; in atmel_aes_wait_for_data_ready()
413 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_wait_for_data_ready()
435 static int atmel_aes_hw_init(struct atmel_aes_dev *dd) in atmel_aes_hw_init() argument
439 err = clk_enable(dd->iclk); in atmel_aes_hw_init()
443 atmel_aes_write(dd, AES_CR, AES_CR_SWRST); in atmel_aes_hw_init()
444 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET); in atmel_aes_hw_init()
449 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd) in atmel_aes_get_version() argument
451 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff; in atmel_aes_get_version()
454 static int atmel_aes_hw_version_init(struct atmel_aes_dev *dd) in atmel_aes_hw_version_init() argument
458 err = atmel_aes_hw_init(dd); in atmel_aes_hw_version_init()
462 dd->hw_version = atmel_aes_get_version(dd); in atmel_aes_hw_version_init()
464 dev_info(dd->dev, "version: 0x%x\n", dd->hw_version); in atmel_aes_hw_version_init()
466 clk_disable(dd->iclk); in atmel_aes_hw_version_init()
470 static inline void atmel_aes_set_mode(struct atmel_aes_dev *dd, in atmel_aes_set_mode() argument
474 dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode; in atmel_aes_set_mode()
477 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev *dd) in atmel_aes_is_encrypt() argument
479 return (dd->flags & AES_FLAGS_ENCRYPT); in atmel_aes_is_encrypt()
483 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err);
486 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev *dd) in atmel_aes_set_iv_as_last_ciphertext_block() argument
488 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_set_iv_as_last_ciphertext_block()
509 static void atmel_aes_ctr_update_req_iv(struct atmel_aes_dev *dd) in atmel_aes_ctr_update_req_iv() argument
511 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_update_req_iv()
512 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_update_req_iv()
529 static inline int atmel_aes_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_complete() argument
531 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_complete()
535 if (dd->ctx->is_aead) in atmel_aes_complete()
536 atmel_aes_authenc_complete(dd, err); in atmel_aes_complete()
539 clk_disable(dd->iclk); in atmel_aes_complete()
540 dd->flags &= ~AES_FLAGS_BUSY; in atmel_aes_complete()
542 if (!err && !dd->ctx->is_aead && in atmel_aes_complete()
545 atmel_aes_set_iv_as_last_ciphertext_block(dd); in atmel_aes_complete()
547 atmel_aes_ctr_update_req_iv(dd); in atmel_aes_complete()
550 if (dd->is_async) in atmel_aes_complete()
551 crypto_request_complete(dd->areq, err); in atmel_aes_complete()
553 tasklet_schedule(&dd->queue_task); in atmel_aes_complete()
558 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl_key() argument
571 valmr |= dd->flags & AES_FLAGS_MODE_MASK; in atmel_aes_write_ctrl_key()
575 if (dd->caps.has_dualbuff) in atmel_aes_write_ctrl_key()
581 atmel_aes_write(dd, AES_MR, valmr); in atmel_aes_write_ctrl_key()
583 atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen)); in atmel_aes_write_ctrl_key()
586 atmel_aes_write_block(dd, AES_IVR(0), iv); in atmel_aes_write_ctrl_key()
589 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma, in atmel_aes_write_ctrl() argument
593 atmel_aes_write_ctrl_key(dd, use_dma, iv, in atmel_aes_write_ctrl()
594 dd->ctx->key, dd->ctx->keylen); in atmel_aes_write_ctrl()
599 static int atmel_aes_cpu_transfer(struct atmel_aes_dev *dd) in atmel_aes_cpu_transfer() argument
605 atmel_aes_read_block(dd, AES_ODATAR(0), dd->data); in atmel_aes_cpu_transfer()
606 dd->data += 4; in atmel_aes_cpu_transfer()
607 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
609 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
612 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_transfer()
614 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_cpu_transfer()
616 dd->resume = atmel_aes_cpu_transfer; in atmel_aes_cpu_transfer()
617 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_cpu_transfer()
622 if (!sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_cpu_transfer()
623 dd->buf, dd->total)) in atmel_aes_cpu_transfer()
627 return atmel_aes_complete(dd, err); in atmel_aes_cpu_transfer()
629 return dd->cpu_transfer_complete(dd); in atmel_aes_cpu_transfer()
632 static int atmel_aes_cpu_start(struct atmel_aes_dev *dd, in atmel_aes_cpu_start() argument
643 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_cpu_start()
645 dd->total = len; in atmel_aes_cpu_start()
646 dd->real_dst = dst; in atmel_aes_cpu_start()
647 dd->cpu_transfer_complete = resume; in atmel_aes_cpu_start()
648 dd->datalen = len + padlen; in atmel_aes_cpu_start()
649 dd->data = (u32 *)dd->buf; in atmel_aes_cpu_start()
650 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_cpu_start()
651 return atmel_aes_wait_for_data_ready(dd, atmel_aes_cpu_transfer); in atmel_aes_cpu_start()
659 static bool atmel_aes_check_aligned(struct atmel_aes_dev *dd, in atmel_aes_check_aligned() argument
666 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
674 if (!IS_ALIGNED(len, dd->ctx->block_size)) in atmel_aes_check_aligned()
683 if (!IS_ALIGNED(sg->length, dd->ctx->block_size)) in atmel_aes_check_aligned()
709 static int atmel_aes_map(struct atmel_aes_dev *dd, in atmel_aes_map() argument
717 dd->total = len; in atmel_aes_map()
718 dd->src.sg = src; in atmel_aes_map()
719 dd->dst.sg = dst; in atmel_aes_map()
720 dd->real_dst = dst; in atmel_aes_map()
722 src_aligned = atmel_aes_check_aligned(dd, src, len, &dd->src); in atmel_aes_map()
726 dst_aligned = atmel_aes_check_aligned(dd, dst, len, &dd->dst); in atmel_aes_map()
728 padlen = atmel_aes_padlen(len, dd->ctx->block_size); in atmel_aes_map()
730 if (dd->buflen < len + padlen) in atmel_aes_map()
734 sg_copy_to_buffer(src, sg_nents(src), dd->buf, len); in atmel_aes_map()
735 dd->src.sg = &dd->aligned_sg; in atmel_aes_map()
736 dd->src.nents = 1; in atmel_aes_map()
737 dd->src.remainder = 0; in atmel_aes_map()
741 dd->dst.sg = &dd->aligned_sg; in atmel_aes_map()
742 dd->dst.nents = 1; in atmel_aes_map()
743 dd->dst.remainder = 0; in atmel_aes_map()
746 sg_init_table(&dd->aligned_sg, 1); in atmel_aes_map()
747 sg_set_buf(&dd->aligned_sg, dd->buf, len + padlen); in atmel_aes_map()
750 if (dd->src.sg == dd->dst.sg) { in atmel_aes_map()
751 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
753 dd->dst.sg_len = dd->src.sg_len; in atmel_aes_map()
754 if (!dd->src.sg_len) in atmel_aes_map()
757 dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
759 if (!dd->src.sg_len) in atmel_aes_map()
762 dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_map()
764 if (!dd->dst.sg_len) { in atmel_aes_map()
765 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_map()
774 static void atmel_aes_unmap(struct atmel_aes_dev *dd) in atmel_aes_unmap() argument
776 if (dd->src.sg == dd->dst.sg) { in atmel_aes_unmap()
777 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
780 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
781 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
783 dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents, in atmel_aes_unmap()
786 if (dd->dst.sg != &dd->aligned_sg) in atmel_aes_unmap()
787 atmel_aes_restore_sg(&dd->dst); in atmel_aes_unmap()
789 dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, in atmel_aes_unmap()
792 if (dd->src.sg != &dd->aligned_sg) in atmel_aes_unmap()
793 atmel_aes_restore_sg(&dd->src); in atmel_aes_unmap()
796 if (dd->dst.sg == &dd->aligned_sg) in atmel_aes_unmap()
797 sg_copy_from_buffer(dd->real_dst, sg_nents(dd->real_dst), in atmel_aes_unmap()
798 dd->buf, dd->total); in atmel_aes_unmap()
801 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev *dd, in atmel_aes_dma_transfer_start() argument
820 dma = &dd->src; in atmel_aes_dma_transfer_start()
822 config.dst_addr = dd->phys_base + AES_IDATAR(0); in atmel_aes_dma_transfer_start()
826 dma = &dd->dst; in atmel_aes_dma_transfer_start()
828 config.src_addr = dd->phys_base + AES_ODATAR(0); in atmel_aes_dma_transfer_start()
845 desc->callback_param = dd; in atmel_aes_dma_transfer_start()
852 static int atmel_aes_dma_start(struct atmel_aes_dev *dd, in atmel_aes_dma_start() argument
862 switch (dd->ctx->block_size) { in atmel_aes_dma_start()
881 maxburst = dd->caps.max_burst_size; in atmel_aes_dma_start()
889 err = atmel_aes_map(dd, src, dst, len); in atmel_aes_dma_start()
893 dd->resume = resume; in atmel_aes_dma_start()
896 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_DEV_TO_MEM, in atmel_aes_dma_start()
902 err = atmel_aes_dma_transfer_start(dd, addr_width, DMA_MEM_TO_DEV, in atmel_aes_dma_start()
910 dmaengine_terminate_sync(dd->dst.chan); in atmel_aes_dma_start()
912 atmel_aes_unmap(dd); in atmel_aes_dma_start()
914 return atmel_aes_complete(dd, err); in atmel_aes_dma_start()
919 struct atmel_aes_dev *dd = data; in atmel_aes_dma_callback() local
921 atmel_aes_unmap(dd); in atmel_aes_dma_callback()
922 dd->is_async = true; in atmel_aes_dma_callback()
923 (void)dd->resume(dd); in atmel_aes_dma_callback()
926 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, in atmel_aes_handle_queue() argument
935 spin_lock_irqsave(&dd->lock, flags); in atmel_aes_handle_queue()
937 ret = crypto_enqueue_request(&dd->queue, new_areq); in atmel_aes_handle_queue()
938 if (dd->flags & AES_FLAGS_BUSY) { in atmel_aes_handle_queue()
939 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
942 backlog = crypto_get_backlog(&dd->queue); in atmel_aes_handle_queue()
943 areq = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
945 dd->flags |= AES_FLAGS_BUSY; in atmel_aes_handle_queue()
946 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
956 dd->areq = areq; in atmel_aes_handle_queue()
957 dd->ctx = ctx; in atmel_aes_handle_queue()
959 dd->is_async = start_async; in atmel_aes_handle_queue()
962 err = ctx->start(dd); in atmel_aes_handle_queue()
969 static int atmel_aes_transfer_complete(struct atmel_aes_dev *dd) in atmel_aes_transfer_complete() argument
971 return atmel_aes_complete(dd, 0); in atmel_aes_transfer_complete()
974 static int atmel_aes_start(struct atmel_aes_dev *dd) in atmel_aes_start() argument
976 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_start()
979 dd->ctx->block_size != AES_BLOCK_SIZE); in atmel_aes_start()
982 atmel_aes_set_mode(dd, rctx); in atmel_aes_start()
984 err = atmel_aes_hw_init(dd); in atmel_aes_start()
986 return atmel_aes_complete(dd, err); in atmel_aes_start()
988 atmel_aes_write_ctrl(dd, use_dma, (void *)req->iv); in atmel_aes_start()
990 return atmel_aes_dma_start(dd, req->src, req->dst, in atmel_aes_start()
994 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, in atmel_aes_start()
998 static int atmel_aes_ctr_transfer(struct atmel_aes_dev *dd) in atmel_aes_ctr_transfer() argument
1000 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_transfer()
1001 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_transfer()
1009 ctx->offset += dd->total; in atmel_aes_ctr_transfer()
1011 return atmel_aes_transfer_complete(dd); in atmel_aes_ctr_transfer()
1036 atmel_aes_write_ctrl(dd, use_dma, ctx->iv); in atmel_aes_ctr_transfer()
1047 return atmel_aes_dma_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1050 return atmel_aes_cpu_start(dd, src, dst, datalen, in atmel_aes_ctr_transfer()
1054 static int atmel_aes_ctr_start(struct atmel_aes_dev *dd) in atmel_aes_ctr_start() argument
1056 struct atmel_aes_ctr_ctx *ctx = atmel_aes_ctr_ctx_cast(dd->ctx); in atmel_aes_ctr_start()
1057 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_ctr_start()
1061 atmel_aes_set_mode(dd, rctx); in atmel_aes_ctr_start()
1063 err = atmel_aes_hw_init(dd); in atmel_aes_ctr_start()
1065 return atmel_aes_complete(dd, err); in atmel_aes_ctr_start()
1069 dd->total = 0; in atmel_aes_ctr_start()
1070 return atmel_aes_ctr_transfer(dd); in atmel_aes_ctr_start()
1152 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_crypt()
1264 struct atmel_aes_dev *dd; in atmel_aes_init_tfm() local
1266 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_init_tfm()
1267 if (!dd) in atmel_aes_init_tfm()
1271 ctx->base.dd = dd; in atmel_aes_init_tfm()
1280 struct atmel_aes_dev *dd; in atmel_aes_ctr_init_tfm() local
1282 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_ctr_init_tfm()
1283 if (!dd) in atmel_aes_ctr_init_tfm()
1287 ctx->base.dd = dd; in atmel_aes_ctr_init_tfm()
1425 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd,
1429 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd);
1430 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd);
1432 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd);
1433 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd);
1434 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd);
1435 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd);
1436 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd);
1437 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd);
1438 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd);
1446 static int atmel_aes_gcm_ghash(struct atmel_aes_dev *dd, in atmel_aes_gcm_ghash() argument
1451 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash()
1453 dd->data = (u32 *)data; in atmel_aes_gcm_ghash()
1454 dd->datalen = datalen; in atmel_aes_gcm_ghash()
1459 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_ghash()
1460 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_ghash_init); in atmel_aes_gcm_ghash()
1463 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_init() argument
1465 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_init()
1468 atmel_aes_write(dd, AES_AADLENR, dd->total); in atmel_aes_gcm_ghash_init()
1469 atmel_aes_write(dd, AES_CLENR, 0); in atmel_aes_gcm_ghash_init()
1473 atmel_aes_write_block(dd, AES_GHASHR(0), ctx->ghash_in); in atmel_aes_gcm_ghash_init()
1475 return atmel_aes_gcm_ghash_finalize(dd); in atmel_aes_gcm_ghash_init()
1478 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_ghash_finalize() argument
1480 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_ghash_finalize()
1484 while (dd->datalen > 0) { in atmel_aes_gcm_ghash_finalize()
1485 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_ghash_finalize()
1486 dd->data += 4; in atmel_aes_gcm_ghash_finalize()
1487 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_ghash_finalize()
1489 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_ghash_finalize()
1491 dd->resume = atmel_aes_gcm_ghash_finalize; in atmel_aes_gcm_ghash_finalize()
1492 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_ghash_finalize()
1498 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash_out); in atmel_aes_gcm_ghash_finalize()
1500 return ctx->ghash_resume(dd); in atmel_aes_gcm_ghash_finalize()
1504 static int atmel_aes_gcm_start(struct atmel_aes_dev *dd) in atmel_aes_gcm_start() argument
1506 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_start()
1507 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_start()
1513 u8 *data = dd->buf; in atmel_aes_gcm_start()
1516 atmel_aes_set_mode(dd, rctx); in atmel_aes_gcm_start()
1518 err = atmel_aes_hw_init(dd); in atmel_aes_gcm_start()
1520 return atmel_aes_complete(dd, err); in atmel_aes_gcm_start()
1525 return atmel_aes_gcm_process(dd); in atmel_aes_gcm_start()
1530 if (datalen > dd->buflen) in atmel_aes_gcm_start()
1531 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_start()
1537 return atmel_aes_gcm_ghash(dd, (const u32 *)data, datalen, in atmel_aes_gcm_start()
1541 static int atmel_aes_gcm_process(struct atmel_aes_dev *dd) in atmel_aes_gcm_process() argument
1543 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_process()
1544 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_process()
1546 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_process()
1558 dd->flags |= AES_FLAGS_GTAGEN; in atmel_aes_gcm_process()
1560 atmel_aes_write_ctrl(dd, false, NULL); in atmel_aes_gcm_process()
1561 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_length); in atmel_aes_gcm_process()
1564 static int atmel_aes_gcm_length(struct atmel_aes_dev *dd) in atmel_aes_gcm_length() argument
1566 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_length()
1567 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_length()
1574 atmel_aes_write_block(dd, AES_IVR(0), j0); in atmel_aes_gcm_length()
1578 atmel_aes_write(dd, AES_AADLENR, req->assoclen); in atmel_aes_gcm_length()
1579 atmel_aes_write(dd, AES_CLENR, ctx->textlen); in atmel_aes_gcm_length()
1583 dd->datalen = 0; in atmel_aes_gcm_length()
1584 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1589 if (unlikely(req->assoclen + padlen > dd->buflen)) in atmel_aes_gcm_length()
1590 return atmel_aes_complete(dd, -EINVAL); in atmel_aes_gcm_length()
1591 sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen); in atmel_aes_gcm_length()
1594 dd->data = (u32 *)dd->buf; in atmel_aes_gcm_length()
1595 dd->datalen = req->assoclen + padlen; in atmel_aes_gcm_length()
1596 return atmel_aes_gcm_data(dd); in atmel_aes_gcm_length()
1599 static int atmel_aes_gcm_data(struct atmel_aes_dev *dd) in atmel_aes_gcm_data() argument
1601 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_data()
1602 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_data()
1608 while (dd->datalen > 0) { in atmel_aes_gcm_data()
1609 atmel_aes_write_block(dd, AES_IDATAR(0), dd->data); in atmel_aes_gcm_data()
1610 dd->data += 4; in atmel_aes_gcm_data()
1611 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_data()
1613 isr = atmel_aes_read(dd, AES_ISR); in atmel_aes_gcm_data()
1615 dd->resume = atmel_aes_gcm_data; in atmel_aes_gcm_data()
1616 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_gcm_data()
1623 return atmel_aes_gcm_tag_init(dd); in atmel_aes_gcm_data()
1632 mr = atmel_aes_read(dd, AES_MR); in atmel_aes_gcm_data()
1635 if (dd->caps.has_dualbuff) in atmel_aes_gcm_data()
1637 atmel_aes_write(dd, AES_MR, mr); in atmel_aes_gcm_data()
1639 return atmel_aes_dma_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1643 return atmel_aes_cpu_start(dd, src, dst, ctx->textlen, in atmel_aes_gcm_data()
1647 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag_init() argument
1649 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag_init()
1650 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_tag_init()
1651 __be64 *data = dd->buf; in atmel_aes_gcm_tag_init()
1653 if (likely(dd->flags & AES_FLAGS_GTAGEN)) { in atmel_aes_gcm_tag_init()
1654 if (!(atmel_aes_read(dd, AES_ISR) & AES_INT_TAGRDY)) { in atmel_aes_gcm_tag_init()
1655 dd->resume = atmel_aes_gcm_tag_init; in atmel_aes_gcm_tag_init()
1656 atmel_aes_write(dd, AES_IER, AES_INT_TAGRDY); in atmel_aes_gcm_tag_init()
1660 return atmel_aes_gcm_finalize(dd); in atmel_aes_gcm_tag_init()
1664 atmel_aes_read_block(dd, AES_GHASHR(0), ctx->ghash); in atmel_aes_gcm_tag_init()
1669 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE, in atmel_aes_gcm_tag_init()
1673 static int atmel_aes_gcm_tag(struct atmel_aes_dev *dd) in atmel_aes_gcm_tag() argument
1675 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_tag()
1682 flags = dd->flags; in atmel_aes_gcm_tag()
1683 dd->flags &= ~(AES_FLAGS_OPMODE_MASK | AES_FLAGS_GTAGEN); in atmel_aes_gcm_tag()
1684 dd->flags |= AES_FLAGS_CTR; in atmel_aes_gcm_tag()
1685 atmel_aes_write_ctrl(dd, false, ctx->j0); in atmel_aes_gcm_tag()
1686 dd->flags = flags; in atmel_aes_gcm_tag()
1688 atmel_aes_write_block(dd, AES_IDATAR(0), ctx->ghash); in atmel_aes_gcm_tag()
1689 return atmel_aes_wait_for_data_ready(dd, atmel_aes_gcm_finalize); in atmel_aes_gcm_tag()
1692 static int atmel_aes_gcm_finalize(struct atmel_aes_dev *dd) in atmel_aes_gcm_finalize() argument
1694 struct atmel_aes_gcm_ctx *ctx = atmel_aes_gcm_ctx_cast(dd->ctx); in atmel_aes_gcm_finalize()
1695 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_gcm_finalize()
1697 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_gcm_finalize()
1702 if (likely(dd->flags & AES_FLAGS_GTAGEN)) in atmel_aes_gcm_finalize()
1703 atmel_aes_read_block(dd, AES_TAGR(0), ctx->tag); in atmel_aes_gcm_finalize()
1705 atmel_aes_read_block(dd, AES_ODATAR(0), ctx->tag); in atmel_aes_gcm_finalize()
1717 return atmel_aes_complete(dd, err); in atmel_aes_gcm_finalize()
1733 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_gcm_crypt()
1771 struct atmel_aes_dev *dd; in atmel_aes_gcm_init() local
1773 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_gcm_init()
1774 if (!dd) in atmel_aes_gcm_init()
1778 ctx->base.dd = dd; in atmel_aes_gcm_init()
1810 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd);
1812 static int atmel_aes_xts_start(struct atmel_aes_dev *dd) in atmel_aes_xts_start() argument
1814 struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx); in atmel_aes_xts_start()
1815 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_xts_start()
1820 atmel_aes_set_mode(dd, rctx); in atmel_aes_xts_start()
1822 err = atmel_aes_hw_init(dd); in atmel_aes_xts_start()
1824 return atmel_aes_complete(dd, err); in atmel_aes_xts_start()
1827 flags = dd->flags; in atmel_aes_xts_start()
1828 dd->flags &= ~AES_FLAGS_MODE_MASK; in atmel_aes_xts_start()
1829 dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); in atmel_aes_xts_start()
1830 atmel_aes_write_ctrl_key(dd, false, NULL, in atmel_aes_xts_start()
1832 dd->flags = flags; in atmel_aes_xts_start()
1834 atmel_aes_write_block(dd, AES_IDATAR(0), req->iv); in atmel_aes_xts_start()
1835 return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data); in atmel_aes_xts_start()
1838 static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd) in atmel_aes_xts_process_data() argument
1840 struct skcipher_request *req = skcipher_request_cast(dd->areq); in atmel_aes_xts_process_data()
1848 atmel_aes_read_block(dd, AES_ODATAR(0), tweak); in atmel_aes_xts_process_data()
1858 atmel_aes_write_ctrl(dd, use_dma, NULL); in atmel_aes_xts_process_data()
1859 atmel_aes_write_block(dd, AES_TWR(0), tweak); in atmel_aes_xts_process_data()
1860 atmel_aes_write_block(dd, AES_ALPHAR(0), one); in atmel_aes_xts_process_data()
1862 return atmel_aes_dma_start(dd, req->src, req->dst, in atmel_aes_xts_process_data()
1866 return atmel_aes_cpu_start(dd, req->src, req->dst, req->cryptlen, in atmel_aes_xts_process_data()
1907 struct atmel_aes_dev *dd; in atmel_aes_xts_init_tfm() local
1910 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_xts_init_tfm()
1911 if (!dd) in atmel_aes_xts_init_tfm()
1921 ctx->base.dd = dd; in atmel_aes_xts_init_tfm()
1954 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd);
1955 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err,
1957 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err,
1959 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd);
1960 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err,
1963 static void atmel_aes_authenc_complete(struct atmel_aes_dev *dd, int err) in atmel_aes_authenc_complete() argument
1965 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_complete()
1968 if (err && (dd->flags & AES_FLAGS_OWN_SHA)) in atmel_aes_authenc_complete()
1970 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_complete()
1973 static int atmel_aes_authenc_start(struct atmel_aes_dev *dd) in atmel_aes_authenc_start() argument
1975 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_start()
1981 atmel_aes_set_mode(dd, &rctx->base); in atmel_aes_authenc_start()
1983 err = atmel_aes_hw_init(dd); in atmel_aes_authenc_start()
1985 return atmel_aes_complete(dd, err); in atmel_aes_authenc_start()
1988 atmel_aes_authenc_init, dd); in atmel_aes_authenc_start()
1991 static int atmel_aes_authenc_init(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_init() argument
1994 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_init()
1998 dd->is_async = true; in atmel_aes_authenc_init()
2000 return atmel_aes_complete(dd, err); in atmel_aes_authenc_init()
2003 dd->flags |= AES_FLAGS_OWN_SHA; in atmel_aes_authenc_init()
2009 atmel_aes_authenc_transfer, dd); in atmel_aes_authenc_init()
2012 static int atmel_aes_authenc_transfer(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_transfer() argument
2015 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_transfer()
2017 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_transfer()
2023 dd->is_async = true; in atmel_aes_authenc_transfer()
2025 return atmel_aes_complete(dd, err); in atmel_aes_authenc_transfer()
2044 atmel_aes_write_ctrl(dd, true, iv); in atmel_aes_authenc_transfer()
2048 atmel_aes_write(dd, AES_EMR, emr); in atmel_aes_authenc_transfer()
2051 return atmel_aes_dma_start(dd, src, dst, rctx->textlen, in atmel_aes_authenc_transfer()
2055 static int atmel_aes_authenc_digest(struct atmel_aes_dev *dd) in atmel_aes_authenc_digest() argument
2057 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_digest()
2061 dd->flags &= ~AES_FLAGS_OWN_SHA; in atmel_aes_authenc_digest()
2064 atmel_aes_authenc_final, dd); in atmel_aes_authenc_digest()
2067 static int atmel_aes_authenc_final(struct atmel_aes_dev *dd, int err, in atmel_aes_authenc_final() argument
2070 struct aead_request *req = aead_request_cast(dd->areq); in atmel_aes_authenc_final()
2073 bool enc = atmel_aes_is_encrypt(dd); in atmel_aes_authenc_final()
2078 dd->is_async = true; in atmel_aes_authenc_final()
2093 return atmel_aes_complete(dd, err); in atmel_aes_authenc_final()
2135 struct atmel_aes_dev *dd; in atmel_aes_authenc_init_tfm() local
2137 dd = atmel_aes_dev_alloc(&ctx->base); in atmel_aes_authenc_init_tfm()
2138 if (!dd) in atmel_aes_authenc_init_tfm()
2147 ctx->base.dd = dd; in atmel_aes_authenc_init_tfm()
2211 return atmel_aes_handle_queue(ctx->dd, &req->base); in atmel_aes_authenc_crypt()
2310 static int atmel_aes_buff_init(struct atmel_aes_dev *dd) in atmel_aes_buff_init() argument
2312 dd->buf = (void *)__get_free_pages(GFP_KERNEL, ATMEL_AES_BUFFER_ORDER); in atmel_aes_buff_init()
2313 dd->buflen = ATMEL_AES_BUFFER_SIZE; in atmel_aes_buff_init()
2314 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
2316 if (!dd->buf) { in atmel_aes_buff_init()
2317 dev_err(dd->dev, "unable to alloc pages.\n"); in atmel_aes_buff_init()
2324 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd) in atmel_aes_buff_cleanup() argument
2326 free_page((unsigned long)dd->buf); in atmel_aes_buff_cleanup()
2329 static int atmel_aes_dma_init(struct atmel_aes_dev *dd) in atmel_aes_dma_init() argument
2334 dd->src.chan = dma_request_chan(dd->dev, "tx"); in atmel_aes_dma_init()
2335 if (IS_ERR(dd->src.chan)) { in atmel_aes_dma_init()
2336 ret = PTR_ERR(dd->src.chan); in atmel_aes_dma_init()
2340 dd->dst.chan = dma_request_chan(dd->dev, "rx"); in atmel_aes_dma_init()
2341 if (IS_ERR(dd->dst.chan)) { in atmel_aes_dma_init()
2342 ret = PTR_ERR(dd->dst.chan); in atmel_aes_dma_init()
2349 dma_release_channel(dd->src.chan); in atmel_aes_dma_init()
2351 dev_err(dd->dev, "no DMA channel available\n"); in atmel_aes_dma_init()
2355 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd) in atmel_aes_dma_cleanup() argument
2357 dma_release_channel(dd->dst.chan); in atmel_aes_dma_cleanup()
2358 dma_release_channel(dd->src.chan); in atmel_aes_dma_cleanup()
2363 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_queue_task() local
2365 atmel_aes_handle_queue(dd, NULL); in atmel_aes_queue_task()
2370 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_done_task() local
2372 dd->is_async = true; in atmel_aes_done_task()
2373 (void)dd->resume(dd); in atmel_aes_done_task()
2394 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd) in atmel_aes_unregister_algs() argument
2399 if (dd->caps.has_authenc) in atmel_aes_unregister_algs()
2404 if (dd->caps.has_xts) in atmel_aes_unregister_algs()
2407 if (dd->caps.has_gcm) in atmel_aes_unregister_algs()
2410 if (dd->caps.has_cfb64) in atmel_aes_unregister_algs()
2425 static int atmel_aes_register_algs(struct atmel_aes_dev *dd) in atmel_aes_register_algs() argument
2437 if (dd->caps.has_cfb64) { in atmel_aes_register_algs()
2445 if (dd->caps.has_gcm) { in atmel_aes_register_algs()
2453 if (dd->caps.has_xts) { in atmel_aes_register_algs()
2462 if (dd->caps.has_authenc) { in atmel_aes_register_algs()
2495 static void atmel_aes_get_cap(struct atmel_aes_dev *dd) in atmel_aes_get_cap() argument
2497 dd->caps.has_dualbuff = 0; in atmel_aes_get_cap()
2498 dd->caps.has_cfb64 = 0; in atmel_aes_get_cap()
2499 dd->caps.has_gcm = 0; in atmel_aes_get_cap()
2500 dd->caps.has_xts = 0; in atmel_aes_get_cap()
2501 dd->caps.has_authenc = 0; in atmel_aes_get_cap()
2502 dd->caps.max_burst_size = 1; in atmel_aes_get_cap()
2505 switch (dd->hw_version & 0xff0) { in atmel_aes_get_cap()
2509 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2510 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2511 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2512 dd->caps.has_xts = 1; in atmel_aes_get_cap()
2513 dd->caps.has_authenc = 1; in atmel_aes_get_cap()
2514 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2517 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2518 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2519 dd->caps.has_gcm = 1; in atmel_aes_get_cap()
2520 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2523 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
2524 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
2525 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
2530 dev_warn(dd->dev, in atmel_aes_get_cap()