Lines Matching refs:ctx
303 static size_t atmel_sha_append_sg(struct atmel_sha_reqctx *ctx) in atmel_sha_append_sg() argument
307 while ((ctx->bufcnt < ctx->buflen) && ctx->total) { in atmel_sha_append_sg()
308 count = min(ctx->sg->length - ctx->offset, ctx->total); in atmel_sha_append_sg()
309 count = min(count, ctx->buflen - ctx->bufcnt); in atmel_sha_append_sg()
318 if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) { in atmel_sha_append_sg()
319 ctx->sg = sg_next(ctx->sg); in atmel_sha_append_sg()
326 scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg, in atmel_sha_append_sg()
327 ctx->offset, count, 0); in atmel_sha_append_sg()
329 ctx->bufcnt += count; in atmel_sha_append_sg()
330 ctx->offset += count; in atmel_sha_append_sg()
331 ctx->total -= count; in atmel_sha_append_sg()
333 if (ctx->offset == ctx->sg->length) { in atmel_sha_append_sg()
334 ctx->sg = sg_next(ctx->sg); in atmel_sha_append_sg()
335 if (ctx->sg) in atmel_sha_append_sg()
336 ctx->offset = 0; in atmel_sha_append_sg()
338 ctx->total = 0; in atmel_sha_append_sg()
361 static void atmel_sha_fill_padding(struct atmel_sha_reqctx *ctx, int length) in atmel_sha_fill_padding() argument
367 size[0] = ctx->digcnt[0]; in atmel_sha_fill_padding()
368 size[1] = ctx->digcnt[1]; in atmel_sha_fill_padding()
370 size[0] += ctx->bufcnt; in atmel_sha_fill_padding()
371 if (size[0] < ctx->bufcnt) in atmel_sha_fill_padding()
381 switch (ctx->flags & SHA_FLAGS_ALGO_MASK) { in atmel_sha_fill_padding()
384 index = ctx->bufcnt & 0x7f; in atmel_sha_fill_padding()
386 *(ctx->buffer + ctx->bufcnt) = 0x80; in atmel_sha_fill_padding()
387 memset(ctx->buffer + ctx->bufcnt + 1, 0, padlen-1); in atmel_sha_fill_padding()
388 memcpy(ctx->buffer + ctx->bufcnt + padlen, bits, 16); in atmel_sha_fill_padding()
389 ctx->bufcnt += padlen + 16; in atmel_sha_fill_padding()
390 ctx->flags |= SHA_FLAGS_PAD; in atmel_sha_fill_padding()
394 index = ctx->bufcnt & 0x3f; in atmel_sha_fill_padding()
396 *(ctx->buffer + ctx->bufcnt) = 0x80; in atmel_sha_fill_padding()
397 memset(ctx->buffer + ctx->bufcnt + 1, 0, padlen-1); in atmel_sha_fill_padding()
398 memcpy(ctx->buffer + ctx->bufcnt + padlen, &bits[1], 8); in atmel_sha_fill_padding()
399 ctx->bufcnt += padlen + 8; in atmel_sha_fill_padding()
400 ctx->flags |= SHA_FLAGS_PAD; in atmel_sha_fill_padding()
430 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_init() local
433 ctx->dd = dd; in atmel_sha_init()
435 ctx->flags = 0; in atmel_sha_init()
442 ctx->flags |= SHA_FLAGS_SHA1; in atmel_sha_init()
443 ctx->block_size = SHA1_BLOCK_SIZE; in atmel_sha_init()
446 ctx->flags |= SHA_FLAGS_SHA224; in atmel_sha_init()
447 ctx->block_size = SHA224_BLOCK_SIZE; in atmel_sha_init()
450 ctx->flags |= SHA_FLAGS_SHA256; in atmel_sha_init()
451 ctx->block_size = SHA256_BLOCK_SIZE; in atmel_sha_init()
454 ctx->flags |= SHA_FLAGS_SHA384; in atmel_sha_init()
455 ctx->block_size = SHA384_BLOCK_SIZE; in atmel_sha_init()
458 ctx->flags |= SHA_FLAGS_SHA512; in atmel_sha_init()
459 ctx->block_size = SHA512_BLOCK_SIZE; in atmel_sha_init()
465 ctx->bufcnt = 0; in atmel_sha_init()
466 ctx->digcnt[0] = 0; in atmel_sha_init()
467 ctx->digcnt[1] = 0; in atmel_sha_init()
468 ctx->buflen = SHA_BUFFER_LEN; in atmel_sha_init()
475 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_write_ctrl() local
489 switch (ctx->flags & SHA_FLAGS_ALGO_MASK) { in atmel_sha_write_ctrl()
520 if (!(ctx->digcnt[0] || ctx->digcnt[1])) { in atmel_sha_write_ctrl()
522 } else if (dd->caps.has_uihv && (ctx->flags & SHA_FLAGS_RESTORE)) { in atmel_sha_write_ctrl()
523 const u32 *hash = (const u32 *)ctx->digest; in atmel_sha_write_ctrl()
531 ctx->flags &= ~SHA_FLAGS_RESTORE; in atmel_sha_write_ctrl()
564 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_cpu() local
569 ctx->digcnt[1], ctx->digcnt[0], length, final); in atmel_sha_xmit_cpu()
574 ctx->digcnt[0] += length; in atmel_sha_xmit_cpu()
575 if (ctx->digcnt[0] < length) in atmel_sha_xmit_cpu()
576 ctx->digcnt[1]++; in atmel_sha_xmit_cpu()
594 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_pdc() local
598 ctx->digcnt[1], ctx->digcnt[0], length1, final); in atmel_sha_xmit_pdc()
612 ctx->digcnt[0] += length1; in atmel_sha_xmit_pdc()
613 if (ctx->digcnt[0] < length1) in atmel_sha_xmit_pdc()
614 ctx->digcnt[1]++; in atmel_sha_xmit_pdc()
640 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_dma() local
645 ctx->digcnt[1], ctx->digcnt[0], length1, final); in atmel_sha_xmit_dma()
676 ctx->digcnt[0] += length1; in atmel_sha_xmit_dma()
677 if (ctx->digcnt[0] < length1) in atmel_sha_xmit_dma()
678 ctx->digcnt[1]++; in atmel_sha_xmit_dma()
705 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_cpu() local
708 atmel_sha_append_sg(ctx); in atmel_sha_update_cpu()
709 atmel_sha_fill_padding(ctx, 0); in atmel_sha_update_cpu()
710 bufcnt = ctx->bufcnt; in atmel_sha_update_cpu()
711 ctx->bufcnt = 0; in atmel_sha_update_cpu()
713 return atmel_sha_xmit_cpu(dd, ctx->buffer, bufcnt, 1); in atmel_sha_update_cpu()
717 struct atmel_sha_reqctx *ctx, in atmel_sha_xmit_dma_map() argument
720 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, in atmel_sha_xmit_dma_map()
721 ctx->buflen + ctx->block_size, DMA_TO_DEVICE); in atmel_sha_xmit_dma_map()
722 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in atmel_sha_xmit_dma_map()
723 dev_err(dd->dev, "dma %zu bytes error\n", ctx->buflen + in atmel_sha_xmit_dma_map()
724 ctx->block_size); in atmel_sha_xmit_dma_map()
728 ctx->flags &= ~SHA_FLAGS_SG; in atmel_sha_xmit_dma_map()
731 return atmel_sha_xmit_start(dd, ctx->dma_addr, length, 0, 0, final); in atmel_sha_xmit_dma_map()
736 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_slow() local
740 atmel_sha_append_sg(ctx); in atmel_sha_update_dma_slow()
742 final = (ctx->flags & SHA_FLAGS_FINUP) && !ctx->total; in atmel_sha_update_dma_slow()
745 ctx->bufcnt, ctx->digcnt[1], ctx->digcnt[0], final); in atmel_sha_update_dma_slow()
748 atmel_sha_fill_padding(ctx, 0); in atmel_sha_update_dma_slow()
750 if (final || (ctx->bufcnt == ctx->buflen)) { in atmel_sha_update_dma_slow()
751 count = ctx->bufcnt; in atmel_sha_update_dma_slow()
752 ctx->bufcnt = 0; in atmel_sha_update_dma_slow()
753 return atmel_sha_xmit_dma_map(dd, ctx, count, final); in atmel_sha_update_dma_slow()
761 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_start() local
766 if (!ctx->total) in atmel_sha_update_dma_start()
769 if (ctx->bufcnt || ctx->offset) in atmel_sha_update_dma_start()
773 ctx->digcnt[1], ctx->digcnt[0], ctx->bufcnt, ctx->total); in atmel_sha_update_dma_start()
775 sg = ctx->sg; in atmel_sha_update_dma_start()
780 if (!sg_is_last(sg) && !IS_ALIGNED(sg->length, ctx->block_size)) in atmel_sha_update_dma_start()
784 length = min(ctx->total, sg->length); in atmel_sha_update_dma_start()
787 if (!(ctx->flags & SHA_FLAGS_FINUP)) { in atmel_sha_update_dma_start()
789 tail = length & (ctx->block_size - 1); in atmel_sha_update_dma_start()
794 ctx->total -= length; in atmel_sha_update_dma_start()
795 ctx->offset = length; /* offset where to start slow */ in atmel_sha_update_dma_start()
797 final = (ctx->flags & SHA_FLAGS_FINUP) && !ctx->total; in atmel_sha_update_dma_start()
801 tail = length & (ctx->block_size - 1); in atmel_sha_update_dma_start()
803 ctx->total += tail; in atmel_sha_update_dma_start()
804 ctx->offset = length; /* offset where to start slow */ in atmel_sha_update_dma_start()
806 sg = ctx->sg; in atmel_sha_update_dma_start()
807 atmel_sha_append_sg(ctx); in atmel_sha_update_dma_start()
809 atmel_sha_fill_padding(ctx, length); in atmel_sha_update_dma_start()
811 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, in atmel_sha_update_dma_start()
812 ctx->buflen + ctx->block_size, DMA_TO_DEVICE); in atmel_sha_update_dma_start()
813 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in atmel_sha_update_dma_start()
815 ctx->buflen + ctx->block_size); in atmel_sha_update_dma_start()
820 ctx->flags &= ~SHA_FLAGS_SG; in atmel_sha_update_dma_start()
821 count = ctx->bufcnt; in atmel_sha_update_dma_start()
822 ctx->bufcnt = 0; in atmel_sha_update_dma_start()
823 return atmel_sha_xmit_start(dd, ctx->dma_addr, count, 0, in atmel_sha_update_dma_start()
826 ctx->sg = sg; in atmel_sha_update_dma_start()
827 if (!dma_map_sg(dd->dev, ctx->sg, 1, in atmel_sha_update_dma_start()
833 ctx->flags |= SHA_FLAGS_SG; in atmel_sha_update_dma_start()
835 count = ctx->bufcnt; in atmel_sha_update_dma_start()
836 ctx->bufcnt = 0; in atmel_sha_update_dma_start()
837 return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), in atmel_sha_update_dma_start()
838 length, ctx->dma_addr, count, final); in atmel_sha_update_dma_start()
842 if (!dma_map_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE)) { in atmel_sha_update_dma_start()
847 ctx->flags |= SHA_FLAGS_SG; in atmel_sha_update_dma_start()
850 return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), length, 0, in atmel_sha_update_dma_start()
856 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_stop() local
858 if (ctx->flags & SHA_FLAGS_SG) { in atmel_sha_update_dma_stop()
859 dma_unmap_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE); in atmel_sha_update_dma_stop()
860 if (ctx->sg->length == ctx->offset) { in atmel_sha_update_dma_stop()
861 ctx->sg = sg_next(ctx->sg); in atmel_sha_update_dma_stop()
862 if (ctx->sg) in atmel_sha_update_dma_stop()
863 ctx->offset = 0; in atmel_sha_update_dma_stop()
865 if (ctx->flags & SHA_FLAGS_PAD) { in atmel_sha_update_dma_stop()
866 dma_unmap_single(dd->dev, ctx->dma_addr, in atmel_sha_update_dma_stop()
867 ctx->buflen + ctx->block_size, DMA_TO_DEVICE); in atmel_sha_update_dma_stop()
870 dma_unmap_single(dd->dev, ctx->dma_addr, ctx->buflen + in atmel_sha_update_dma_stop()
871 ctx->block_size, DMA_TO_DEVICE); in atmel_sha_update_dma_stop()
878 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_update_req() local
882 ctx->total, ctx->digcnt[1], ctx->digcnt[0]); in atmel_sha_update_req()
884 if (ctx->flags & SHA_FLAGS_CPU) in atmel_sha_update_req()
891 err, ctx->digcnt[1], ctx->digcnt[0]); in atmel_sha_update_req()
899 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_final_req() local
903 if (ctx->bufcnt >= ATMEL_SHA_DMA_THRESHOLD) { in atmel_sha_final_req()
904 atmel_sha_fill_padding(ctx, 0); in atmel_sha_final_req()
905 count = ctx->bufcnt; in atmel_sha_final_req()
906 ctx->bufcnt = 0; in atmel_sha_final_req()
907 err = atmel_sha_xmit_dma_map(dd, ctx, count, 1); in atmel_sha_final_req()
911 atmel_sha_fill_padding(ctx, 0); in atmel_sha_final_req()
912 count = ctx->bufcnt; in atmel_sha_final_req()
913 ctx->bufcnt = 0; in atmel_sha_final_req()
914 err = atmel_sha_xmit_cpu(dd, ctx->buffer, count, 1); in atmel_sha_final_req()
924 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_copy_hash() local
925 u32 *hash = (u32 *)ctx->digest; in atmel_sha_copy_hash()
928 switch (ctx->flags & SHA_FLAGS_ALGO_MASK) { in atmel_sha_copy_hash()
949 hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i)); in atmel_sha_copy_hash()
950 ctx->flags |= SHA_FLAGS_RESTORE; in atmel_sha_copy_hash()
955 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_copy_ready_hash() local
960 switch (ctx->flags & SHA_FLAGS_ALGO_MASK) { in atmel_sha_copy_ready_hash()
963 memcpy(req->result, ctx->digest, SHA1_DIGEST_SIZE); in atmel_sha_copy_ready_hash()
967 memcpy(req->result, ctx->digest, SHA224_DIGEST_SIZE); in atmel_sha_copy_ready_hash()
971 memcpy(req->result, ctx->digest, SHA256_DIGEST_SIZE); in atmel_sha_copy_ready_hash()
975 memcpy(req->result, ctx->digest, SHA384_DIGEST_SIZE); in atmel_sha_copy_ready_hash()
979 memcpy(req->result, ctx->digest, SHA512_DIGEST_SIZE); in atmel_sha_copy_ready_hash()
986 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_finish() local
987 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_finish()
989 if (ctx->digcnt[0] || ctx->digcnt[1]) in atmel_sha_finish()
992 dev_dbg(dd->dev, "digcnt: 0x%llx 0x%llx, bufcnt: %zd\n", ctx->digcnt[1], in atmel_sha_finish()
993 ctx->digcnt[0], ctx->bufcnt); in atmel_sha_finish()
1000 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_finish_req() local
1001 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_finish_req()
1008 ctx->flags |= SHA_FLAGS_ERROR; in atmel_sha_finish_req()
1058 struct atmel_sha_ctx *ctx; in atmel_sha_handle_queue() local
1085 ctx = crypto_tfm_ctx(async_req->tfm); in atmel_sha_handle_queue()
1093 err = ctx->start(dd); in atmel_sha_handle_queue()
1102 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_start() local
1106 ctx->op, req->nbytes); in atmel_sha_start()
1132 if (ctx->op == SHA_OP_UPDATE) { in atmel_sha_start()
1134 if (!err && (ctx->flags & SHA_FLAGS_FINUP)) in atmel_sha_start()
1137 } else if (ctx->op == SHA_OP_FINAL) { in atmel_sha_start()
1152 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_enqueue() local
1156 ctx->op = op; in atmel_sha_enqueue()
1163 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_update() local
1168 ctx->total = req->nbytes; in atmel_sha_update()
1169 ctx->sg = req->src; in atmel_sha_update()
1170 ctx->offset = 0; in atmel_sha_update()
1172 if (ctx->flags & SHA_FLAGS_FINUP) { in atmel_sha_update()
1173 if (ctx->bufcnt + ctx->total < ATMEL_SHA_DMA_THRESHOLD) in atmel_sha_update()
1175 ctx->flags |= SHA_FLAGS_CPU; in atmel_sha_update()
1176 } else if (ctx->bufcnt + ctx->total < ctx->buflen) { in atmel_sha_update()
1177 atmel_sha_append_sg(ctx); in atmel_sha_update()
1185 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_final() local
1187 ctx->flags |= SHA_FLAGS_FINUP; in atmel_sha_final()
1189 if (ctx->flags & SHA_FLAGS_ERROR) in atmel_sha_final()
1192 if (ctx->flags & SHA_FLAGS_PAD) in atmel_sha_final()
1201 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_finup() local
1204 ctx->flags |= SHA_FLAGS_FINUP; in atmel_sha_finup()
1229 const struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_export() local
1231 memcpy(out, ctx, sizeof(*ctx)); in atmel_sha_export()
1237 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_import() local
1239 memcpy(ctx, in, sizeof(*ctx)); in atmel_sha_import()
1245 struct atmel_sha_ctx *ctx = crypto_tfm_ctx(tfm); in atmel_sha_cra_init() local
1249 ctx->start = atmel_sha_start; in atmel_sha_cra_init()
1395 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_dma_check_aligned() local
1396 size_t bs = ctx->block_size; in atmel_sha_dma_check_aligned()
1504 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_cpu_transfer() local
1505 const u32 *words = (const u32 *)ctx->buffer; in atmel_sha_cpu_transfer()
1509 din_inc = (ctx->flags & SHA_FLAGS_IDATAR0) ? 0 : 1; in atmel_sha_cpu_transfer()
1512 num_words = DIV_ROUND_UP(ctx->bufcnt, sizeof(u32)); in atmel_sha_cpu_transfer()
1516 ctx->offset += ctx->bufcnt; in atmel_sha_cpu_transfer()
1517 ctx->total -= ctx->bufcnt; in atmel_sha_cpu_transfer()
1519 if (!ctx->total) in atmel_sha_cpu_transfer()
1530 ctx->bufcnt = min_t(size_t, ctx->block_size, ctx->total); in atmel_sha_cpu_transfer()
1531 scatterwalk_map_and_copy(ctx->buffer, ctx->sg, in atmel_sha_cpu_transfer()
1532 ctx->offset, ctx->bufcnt, 0); in atmel_sha_cpu_transfer()
1544 if (unlikely(!(ctx->flags & SHA_FLAGS_WAIT_DATARDY))) in atmel_sha_cpu_transfer()
1558 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_cpu_start() local
1563 ctx->flags &= ~(SHA_FLAGS_IDATAR0 | SHA_FLAGS_WAIT_DATARDY); in atmel_sha_cpu_start()
1566 ctx->flags |= SHA_FLAGS_IDATAR0; in atmel_sha_cpu_start()
1569 ctx->flags |= SHA_FLAGS_WAIT_DATARDY; in atmel_sha_cpu_start()
1571 ctx->sg = sg; in atmel_sha_cpu_start()
1572 ctx->total = len; in atmel_sha_cpu_start()
1573 ctx->offset = 0; in atmel_sha_cpu_start()
1576 ctx->bufcnt = min_t(size_t, ctx->block_size, ctx->total); in atmel_sha_cpu_start()
1577 scatterwalk_map_and_copy(ctx->buffer, ctx->sg, in atmel_sha_cpu_start()
1578 ctx->offset, ctx->bufcnt, 0); in atmel_sha_cpu_start()
1590 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_cpu_hash() local
1594 if (!(IS_ALIGNED(datalen, ctx->block_size) || auto_padding)) in atmel_sha_cpu_hash()
1597 mr |= (ctx->flags & SHA_FLAGS_ALGO_MASK); in atmel_sha_cpu_hash()
1688 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_setup() local
1696 switch (ctx->flags & SHA_FLAGS_ALGO_MASK) { in atmel_sha_hmac_setup()
1698 ctx->block_size = SHA1_BLOCK_SIZE; in atmel_sha_hmac_setup()
1699 ctx->hash_size = SHA1_DIGEST_SIZE; in atmel_sha_hmac_setup()
1703 ctx->block_size = SHA224_BLOCK_SIZE; in atmel_sha_hmac_setup()
1704 ctx->hash_size = SHA256_DIGEST_SIZE; in atmel_sha_hmac_setup()
1708 ctx->block_size = SHA256_BLOCK_SIZE; in atmel_sha_hmac_setup()
1709 ctx->hash_size = SHA256_DIGEST_SIZE; in atmel_sha_hmac_setup()
1713 ctx->block_size = SHA384_BLOCK_SIZE; in atmel_sha_hmac_setup()
1714 ctx->hash_size = SHA512_DIGEST_SIZE; in atmel_sha_hmac_setup()
1718 ctx->block_size = SHA512_BLOCK_SIZE; in atmel_sha_hmac_setup()
1719 ctx->hash_size = SHA512_DIGEST_SIZE; in atmel_sha_hmac_setup()
1725 bs = ctx->block_size; in atmel_sha_hmac_setup()
1752 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_prehash_key_done() local
1754 size_t bs = ctx->block_size; in atmel_sha_hmac_prehash_key_done()
1769 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_compute_ipad_hash() local
1770 size_t bs = ctx->block_size; in atmel_sha_hmac_compute_ipad_hash()
1789 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_compute_opad_hash() local
1790 size_t bs = ctx->block_size; in atmel_sha_hmac_compute_opad_hash()
1791 size_t hs = ctx->hash_size; in atmel_sha_hmac_compute_opad_hash()
1805 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_setup_done() local
1806 size_t hs = ctx->hash_size; in atmel_sha_hmac_setup_done()
1818 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_start() local
1825 switch (ctx->op) { in atmel_sha_hmac_start()
1873 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_init_done() local
1876 size_t bs = ctx->block_size; in atmel_sha_hmac_init_done()
1877 size_t hs = ctx->hash_size; in atmel_sha_hmac_init_done()
1879 ctx->bufcnt = 0; in atmel_sha_hmac_init_done()
1880 ctx->digcnt[0] = bs; in atmel_sha_hmac_init_done()
1881 ctx->digcnt[1] = 0; in atmel_sha_hmac_init_done()
1882 ctx->flags |= SHA_FLAGS_RESTORE; in atmel_sha_hmac_init_done()
1883 memcpy(ctx->digest, hmac->ipad, hs); in atmel_sha_hmac_init_done()
1890 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_final() local
1893 u32 *digest = (u32 *)ctx->digest; in atmel_sha_hmac_final()
1895 size_t bs = ctx->block_size; in atmel_sha_hmac_final()
1896 size_t hs = ctx->hash_size; in atmel_sha_hmac_final()
1912 mr |= (ctx->flags & SHA_FLAGS_ALGO_MASK); in atmel_sha_hmac_final()
1949 struct atmel_sha_reqctx *ctx = ahash_request_ctx(req); in atmel_sha_hmac_digest2() local
1953 size_t hs = ctx->hash_size; in atmel_sha_hmac_digest2()
1961 ctx->bufcnt = 0; in atmel_sha_hmac_digest2()
1962 ctx->digcnt[0] = 0; in atmel_sha_hmac_digest2()
1963 ctx->digcnt[1] = 0; in atmel_sha_hmac_digest2()
1964 switch (ctx->flags & SHA_FLAGS_ALGO_MASK) { in atmel_sha_hmac_digest2()
1968 atmel_sha_fill_padding(ctx, 64); in atmel_sha_hmac_digest2()
1973 atmel_sha_fill_padding(ctx, 128); in atmel_sha_hmac_digest2()
1976 sg_init_one(&dd->tmp, ctx->buffer, ctx->bufcnt); in atmel_sha_hmac_digest2()
1995 mr |= ctx->flags & SHA_FLAGS_ALGO_MASK; in atmel_sha_hmac_digest2()
2010 req->nbytes = ctx->bufcnt; in atmel_sha_hmac_digest2()
2251 struct atmel_sha_reqctx *ctx = &authctx->base; in atmel_sha_authenc_schedule() local
2265 ctx->dd = dd; in atmel_sha_authenc_schedule()
2266 ctx->buflen = SHA_BUFFER_LEN; in atmel_sha_authenc_schedule()
2283 struct atmel_sha_reqctx *ctx = &authctx->base; in atmel_sha_authenc_init() local
2286 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_init()
2297 ctx->flags = hmac->base.flags; in atmel_sha_authenc_init()
2306 struct atmel_sha_reqctx *ctx = &authctx->base; in atmel_sha_authenc_init2() local
2309 size_t hs = ctx->hash_size; in atmel_sha_authenc_init2()
2324 mr |= ctx->flags & SHA_FLAGS_ALGO_MASK; in atmel_sha_authenc_init2()
2353 struct atmel_sha_reqctx *ctx = &authctx->base; in atmel_sha_authenc_final() local
2354 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_final()
2356 switch (ctx->flags & SHA_FLAGS_ALGO_MASK) { in atmel_sha_authenc_final()
2406 struct atmel_sha_reqctx *ctx = &authctx->base; in atmel_sha_authenc_abort() local
2407 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_authenc_abort()