Lines Matching +full:inline +full:- +full:crypto +full:- +full:engine

1 // SPDX-License-Identifier: GPL-2.0-only
8 #include <crypto/aes.h>
9 #include <crypto/engine.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/des.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
28 #define DRIVER_NAME "stm32-cryp"
182 struct crypto_engine *engine; member
213 static inline bool is_aes(struct stm32_cryp *cryp) in is_aes()
215 return cryp->flags & FLG_AES; in is_aes()
218 static inline bool is_des(struct stm32_cryp *cryp) in is_des()
220 return cryp->flags & FLG_DES; in is_des()
223 static inline bool is_tdes(struct stm32_cryp *cryp) in is_tdes()
225 return cryp->flags & FLG_TDES; in is_tdes()
228 static inline bool is_ecb(struct stm32_cryp *cryp) in is_ecb()
230 return cryp->flags & FLG_ECB; in is_ecb()
233 static inline bool is_cbc(struct stm32_cryp *cryp) in is_cbc()
235 return cryp->flags & FLG_CBC; in is_cbc()
238 static inline bool is_ctr(struct stm32_cryp *cryp) in is_ctr()
240 return cryp->flags & FLG_CTR; in is_ctr()
243 static inline bool is_gcm(struct stm32_cryp *cryp) in is_gcm()
245 return cryp->flags & FLG_GCM; in is_gcm()
248 static inline bool is_ccm(struct stm32_cryp *cryp) in is_ccm()
250 return cryp->flags & FLG_CCM; in is_ccm()
253 static inline bool is_encrypt(struct stm32_cryp *cryp) in is_encrypt()
255 return cryp->flags & FLG_ENCRYPT; in is_encrypt()
258 static inline bool is_decrypt(struct stm32_cryp *cryp) in is_decrypt()
263 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst) in stm32_cryp_read()
265 return readl_relaxed(cryp->regs + ofst); in stm32_cryp_read()
268 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val) in stm32_cryp_write()
270 writel_relaxed(val, cryp->regs + ofst); in stm32_cryp_write()
273 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp) in stm32_cryp_wait_busy()
277 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status, in stm32_cryp_wait_busy()
281 static inline void stm32_cryp_enable(struct stm32_cryp *cryp) in stm32_cryp_enable()
283 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN, in stm32_cryp_enable()
284 cryp->regs + cryp->caps->cr); in stm32_cryp_enable()
287 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp) in stm32_cryp_wait_enable()
291 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status, in stm32_cryp_wait_enable()
295 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp) in stm32_cryp_wait_output()
299 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status, in stm32_cryp_wait_output()
303 static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp) in stm32_cryp_key_read_enable()
305 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN, in stm32_cryp_key_read_enable()
306 cryp->regs + cryp->caps->cr); in stm32_cryp_key_read_enable()
309 static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp) in stm32_cryp_key_read_disable()
311 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN, in stm32_cryp_key_read_disable()
312 cryp->regs + cryp->caps->cr); in stm32_cryp_key_read_disable()
323 if (!ctx->cryp) { in stm32_cryp_find_dev()
328 ctx->cryp = cryp; in stm32_cryp_find_dev()
330 cryp = ctx->cryp; in stm32_cryp_find_dev()
343 stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++)); in stm32_cryp_hw_write_iv()
344 stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++)); in stm32_cryp_hw_write_iv()
347 stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++)); in stm32_cryp_hw_write_iv()
348 stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++)); in stm32_cryp_hw_write_iv()
354 struct skcipher_request *req = cryp->req; in stm32_cryp_get_iv()
355 __be32 *tmp = (void *)req->iv; in stm32_cryp_get_iv()
360 if (cryp->caps->iv_protection) in stm32_cryp_get_iv()
363 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l)); in stm32_cryp_get_iv()
364 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r)); in stm32_cryp_get_iv()
367 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l)); in stm32_cryp_get_iv()
368 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r)); in stm32_cryp_get_iv()
371 if (cryp->caps->iv_protection) in stm32_cryp_get_iv()
376 * ux500_swap_bits_in_byte() - mirror the bits in a byte
380 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
381 * nibble 2 (n2) bits 4-7.
387 * 3. Move bit 1-4, 1 position to the left.
392 * 3. Move bit 3-6, 1 position to the right.
396 static inline u8 ux500_swap_bits_in_byte(u8 b) in ux500_swap_bits_in_byte()
401 #define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4, in ux500_swap_bits_in_byte()
406 #define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6, in ux500_swap_bits_in_byte()
417 /* Right shift 1, bits 1-4 */ in ux500_swap_bits_in_byte()
425 /* Left shift 1, bits 3-6 */ in ux500_swap_bits_in_byte()
432 * ux500_swizzle_key() - Shuffle around words and bits in the AES key
441 static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len) in ux500_swizzle_key()
448 j = len - bpw; in ux500_swizzle_key()
451 index = len - j - bpw + i; in ux500_swizzle_key()
455 j -= bpw; in ux500_swizzle_key()
465 stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0])); in stm32_cryp_hw_write_key()
466 stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1])); in stm32_cryp_hw_write_key()
474 * a proper big-endian bit sequence. in stm32_cryp_hw_write_key()
476 if (is_aes(c) && c->caps->linear_aes_key) { in stm32_cryp_hw_write_key()
479 ux500_swizzle_key((u8 *)c->ctx->key, in stm32_cryp_hw_write_key()
480 (u8 *)tmpkey, c->ctx->keylen); in stm32_cryp_hw_write_key()
482 r_id = c->caps->k1l; in stm32_cryp_hw_write_key()
483 for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4) in stm32_cryp_hw_write_key()
489 r_id = c->caps->k3r; in stm32_cryp_hw_write_key()
490 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4) in stm32_cryp_hw_write_key()
491 stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1])); in stm32_cryp_hw_write_key()
523 dev_err(cryp->dev, "Unknown mode\n"); in stm32_cryp_get_hw_mode()
529 return is_encrypt(cryp) ? cryp->areq->cryptlen : in stm32_cryp_get_input_text_len()
530 cryp->areq->cryptlen - cryp->authsize; in stm32_cryp_get_input_text_len()
539 memcpy(iv, cryp->areq->iv, 12); in stm32_cryp_gcm_init()
541 cryp->gcm_ctr = GCM_CTR_INIT; in stm32_cryp_gcm_init()
544 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN); in stm32_cryp_gcm_init()
549 dev_err(cryp->dev, "Timeout (gcm init)\n"); in stm32_cryp_gcm_init()
554 if (cryp->areq->assoclen) { in stm32_cryp_gcm_init()
556 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_gcm_init()
559 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_gcm_init()
571 if (!cryp->header_in) { in stm32_crypt_gcmccm_end_header()
575 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n"); in stm32_crypt_gcmccm_end_header()
576 stm32_cryp_write(cryp, cryp->caps->imsc, 0); in stm32_crypt_gcmccm_end_header()
583 cfg = stm32_cryp_read(cryp, cryp->caps->cr); in stm32_crypt_gcmccm_end_header()
585 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_crypt_gcmccm_end_header()
589 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_crypt_gcmccm_end_header()
604 u32 alen = cryp->areq->assoclen; in stm32_cryp_write_ccm_first_header()
624 written = min_t(size_t, AES_BLOCK_SIZE - len, alen); in stm32_cryp_write_ccm_first_header()
626 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0); in stm32_cryp_write_ccm_first_header()
628 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); in stm32_cryp_write_ccm_first_header()
630 cryp->header_in -= written; in stm32_cryp_write_ccm_first_header()
645 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); in stm32_cryp_ccm_init()
646 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); in stm32_cryp_ccm_init()
647 iv[AES_BLOCK_SIZE - 1] = 1; in stm32_cryp_ccm_init()
653 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); in stm32_cryp_ccm_init()
655 if (cryp->areq->assoclen) in stm32_cryp_ccm_init()
660 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; in stm32_cryp_ccm_init()
661 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; in stm32_cryp_ccm_init()
664 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN); in stm32_cryp_ccm_init()
673 if (!cryp->caps->padding_wa) in stm32_cryp_ccm_init()
675 stm32_cryp_write(cryp, cryp->caps->din, xd); in stm32_cryp_ccm_init()
681 dev_err(cryp->dev, "Timeout (ccm init)\n"); in stm32_cryp_ccm_init()
686 if (cryp->areq->assoclen) { in stm32_cryp_ccm_init()
688 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_ccm_init()
694 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_ccm_init()
705 pm_runtime_get_sync(cryp->dev); in stm32_cryp_hw_init()
708 stm32_cryp_write(cryp, cryp->caps->imsc, 0); in stm32_cryp_hw_init()
713 switch (cryp->ctx->keylen) { in stm32_cryp_hw_init()
730 return -EINVAL; in stm32_cryp_hw_init()
736 if (cryp->caps->kp_mode) in stm32_cryp_hw_init()
737 stm32_cryp_write(cryp, cryp->caps->cr, in stm32_cryp_hw_init()
741 cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE); in stm32_cryp_hw_init()
751 dev_err(cryp->dev, "Timeout (key preparation)\n"); in stm32_cryp_hw_init()
758 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_hw_init()
765 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_hw_init()
789 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv); in stm32_cryp_hw_init()
811 pm_runtime_mark_last_busy(cryp->dev); in stm32_cryp_finish_req()
812 pm_runtime_put_autosuspend(cryp->dev); in stm32_cryp_finish_req()
815 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); in stm32_cryp_finish_req()
817 crypto_finalize_skcipher_request(cryp->engine, cryp->req, in stm32_cryp_finish_req()
824 stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT); in stm32_cryp_cpu_start()
829 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
838 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
842 tfm->reqsize = sizeof(struct stm32_cryp_reqctx); in stm32_cryp_aes_aead_init()
855 return -ENODEV; in stm32_cryp_crypt()
857 rctx->mode = mode; in stm32_cryp_crypt()
859 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req); in stm32_cryp_crypt()
869 return -ENODEV; in stm32_cryp_aead_crypt()
871 rctx->mode = mode; in stm32_cryp_aead_crypt()
873 return crypto_transfer_aead_request_to_engine(cryp->engine, req); in stm32_cryp_aead_crypt()
881 memcpy(ctx->key, key, keylen); in stm32_cryp_setkey()
882 ctx->keylen = keylen; in stm32_cryp_setkey()
892 return -EINVAL; in stm32_cryp_aes_setkey()
918 return -EINVAL; in stm32_cryp_aes_aead_setkey()
920 memcpy(ctx->key, key, keylen); in stm32_cryp_aes_aead_setkey()
921 ctx->keylen = keylen; in stm32_cryp_aes_aead_setkey()
939 return -EINVAL; in stm32_cryp_aes_gcm_setauthsize()
958 return -EINVAL; in stm32_cryp_aes_ccm_setauthsize()
966 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_ecb_encrypt()
967 return -EINVAL; in stm32_cryp_aes_ecb_encrypt()
969 if (req->cryptlen == 0) in stm32_cryp_aes_ecb_encrypt()
977 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_ecb_decrypt()
978 return -EINVAL; in stm32_cryp_aes_ecb_decrypt()
980 if (req->cryptlen == 0) in stm32_cryp_aes_ecb_decrypt()
988 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_cbc_encrypt()
989 return -EINVAL; in stm32_cryp_aes_cbc_encrypt()
991 if (req->cryptlen == 0) in stm32_cryp_aes_cbc_encrypt()
999 if (req->cryptlen % AES_BLOCK_SIZE) in stm32_cryp_aes_cbc_decrypt()
1000 return -EINVAL; in stm32_cryp_aes_cbc_decrypt()
1002 if (req->cryptlen == 0) in stm32_cryp_aes_cbc_decrypt()
1010 if (req->cryptlen == 0) in stm32_cryp_aes_ctr_encrypt()
1018 if (req->cryptlen == 0) in stm32_cryp_aes_ctr_decrypt()
1034 static inline int crypto_ccm_check_iv(const u8 *iv) in crypto_ccm_check_iv()
1038 return -EINVAL; in crypto_ccm_check_iv()
1047 err = crypto_ccm_check_iv(req->iv); in stm32_cryp_aes_ccm_encrypt()
1058 err = crypto_ccm_check_iv(req->iv); in stm32_cryp_aes_ccm_decrypt()
1067 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_des_ecb_encrypt()
1068 return -EINVAL; in stm32_cryp_des_ecb_encrypt()
1070 if (req->cryptlen == 0) in stm32_cryp_des_ecb_encrypt()
1078 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_des_ecb_decrypt()
1079 return -EINVAL; in stm32_cryp_des_ecb_decrypt()
1081 if (req->cryptlen == 0) in stm32_cryp_des_ecb_decrypt()
1089 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_des_cbc_encrypt()
1090 return -EINVAL; in stm32_cryp_des_cbc_encrypt()
1092 if (req->cryptlen == 0) in stm32_cryp_des_cbc_encrypt()
1100 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_des_cbc_decrypt()
1101 return -EINVAL; in stm32_cryp_des_cbc_decrypt()
1103 if (req->cryptlen == 0) in stm32_cryp_des_cbc_decrypt()
1111 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_tdes_ecb_encrypt()
1112 return -EINVAL; in stm32_cryp_tdes_ecb_encrypt()
1114 if (req->cryptlen == 0) in stm32_cryp_tdes_ecb_encrypt()
1122 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_tdes_ecb_decrypt()
1123 return -EINVAL; in stm32_cryp_tdes_ecb_decrypt()
1125 if (req->cryptlen == 0) in stm32_cryp_tdes_ecb_decrypt()
1133 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_tdes_cbc_encrypt()
1134 return -EINVAL; in stm32_cryp_tdes_cbc_encrypt()
1136 if (req->cryptlen == 0) in stm32_cryp_tdes_cbc_encrypt()
1144 if (req->cryptlen % DES_BLOCK_SIZE) in stm32_cryp_tdes_cbc_decrypt()
1145 return -EINVAL; in stm32_cryp_tdes_cbc_decrypt()
1147 if (req->cryptlen == 0) in stm32_cryp_tdes_cbc_decrypt()
1163 return -EINVAL; in stm32_cryp_prepare_req()
1168 cryp = ctx->cryp; in stm32_cryp_prepare_req()
1171 rctx->mode &= FLG_MODE_MASK; in stm32_cryp_prepare_req()
1173 ctx->cryp = cryp; in stm32_cryp_prepare_req()
1175 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; in stm32_cryp_prepare_req()
1176 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE; in stm32_cryp_prepare_req()
1177 cryp->ctx = ctx; in stm32_cryp_prepare_req()
1180 cryp->req = req; in stm32_cryp_prepare_req()
1181 cryp->areq = NULL; in stm32_cryp_prepare_req()
1182 cryp->header_in = 0; in stm32_cryp_prepare_req()
1183 cryp->payload_in = req->cryptlen; in stm32_cryp_prepare_req()
1184 cryp->payload_out = req->cryptlen; in stm32_cryp_prepare_req()
1185 cryp->authsize = 0; in stm32_cryp_prepare_req()
1191 * <- assoclen -> <- cryptlen -> in stm32_cryp_prepare_req()
1194 * <- assoclen -> <-- cryptlen --> <- authsize -> in stm32_cryp_prepare_req()
1198 * <- assoclen ---> <---------- cryptlen ----------> in stm32_cryp_prepare_req()
1201 * <- assoclen -> <- cryptlen - authsize -> in stm32_cryp_prepare_req()
1203 cryp->areq = areq; in stm32_cryp_prepare_req()
1204 cryp->req = NULL; in stm32_cryp_prepare_req()
1205 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); in stm32_cryp_prepare_req()
1207 cryp->payload_in = areq->cryptlen; in stm32_cryp_prepare_req()
1208 cryp->header_in = areq->assoclen; in stm32_cryp_prepare_req()
1209 cryp->payload_out = areq->cryptlen; in stm32_cryp_prepare_req()
1211 cryp->payload_in = areq->cryptlen - cryp->authsize; in stm32_cryp_prepare_req()
1212 cryp->header_in = areq->assoclen; in stm32_cryp_prepare_req()
1213 cryp->payload_out = cryp->payload_in; in stm32_cryp_prepare_req()
1217 in_sg = req ? req->src : areq->src; in stm32_cryp_prepare_req()
1218 scatterwalk_start(&cryp->in_walk, in_sg); in stm32_cryp_prepare_req()
1220 cryp->out_sg = req ? req->dst : areq->dst; in stm32_cryp_prepare_req()
1221 scatterwalk_start(&cryp->out_walk, cryp->out_sg); in stm32_cryp_prepare_req()
1225 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2); in stm32_cryp_prepare_req()
1229 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr)); in stm32_cryp_prepare_req()
1235 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq) in stm32_cryp_cipher_one_req() argument
1242 struct stm32_cryp *cryp = ctx->cryp; in stm32_cryp_cipher_one_req()
1245 return -ENODEV; in stm32_cryp_cipher_one_req()
1251 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq) in stm32_cryp_aead_one_req() argument
1256 struct stm32_cryp *cryp = ctx->cryp; in stm32_cryp_aead_one_req()
1260 return -ENODEV; in stm32_cryp_aead_one_req()
1266 if (unlikely(!cryp->payload_in && !cryp->header_in)) { in stm32_cryp_aead_one_req()
1282 cfg = stm32_cryp_read(cryp, cryp->caps->cr); in stm32_cryp_read_auth_tag()
1289 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_read_auth_tag()
1293 size_bit = cryp->areq->assoclen * 8; in stm32_cryp_read_auth_tag()
1294 if (cryp->caps->swap_final) in stm32_cryp_read_auth_tag()
1297 stm32_cryp_write(cryp, cryp->caps->din, 0); in stm32_cryp_read_auth_tag()
1298 stm32_cryp_write(cryp, cryp->caps->din, size_bit); in stm32_cryp_read_auth_tag()
1300 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : in stm32_cryp_read_auth_tag()
1301 cryp->areq->cryptlen - cryp->authsize; in stm32_cryp_read_auth_tag()
1303 if (cryp->caps->swap_final) in stm32_cryp_read_auth_tag()
1306 stm32_cryp_write(cryp, cryp->caps->din, 0); in stm32_cryp_read_auth_tag()
1307 stm32_cryp_write(cryp, cryp->caps->din, size_bit); in stm32_cryp_read_auth_tag()
1314 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); in stm32_cryp_read_auth_tag()
1315 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); in stm32_cryp_read_auth_tag()
1320 if (!cryp->caps->padding_wa) in stm32_cryp_read_auth_tag()
1322 stm32_cryp_write(cryp, cryp->caps->din, xiv); in stm32_cryp_read_auth_tag()
1329 dev_err(cryp->dev, "Timeout (read tag)\n"); in stm32_cryp_read_auth_tag()
1337 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32); in stm32_cryp_read_auth_tag()
1338 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1); in stm32_cryp_read_auth_tag()
1343 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0); in stm32_cryp_read_auth_tag()
1344 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32); in stm32_cryp_read_auth_tag()
1346 if (crypto_memneq(in_tag, out_tag, cryp->authsize)) in stm32_cryp_read_auth_tag()
1347 ret = -EBADMSG; in stm32_cryp_read_auth_tag()
1352 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_read_auth_tag()
1361 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) { in stm32_cryp_check_ctr_counter()
1366 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr)); in stm32_cryp_check_ctr_counter()
1368 cr = stm32_cryp_read(cryp, cryp->caps->cr); in stm32_cryp_check_ctr_counter()
1369 stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN); in stm32_cryp_check_ctr_counter()
1371 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr); in stm32_cryp_check_ctr_counter()
1373 stm32_cryp_write(cryp, cryp->caps->cr, cr); in stm32_cryp_check_ctr_counter()
1377 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l)); in stm32_cryp_check_ctr_counter()
1378 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r)); in stm32_cryp_check_ctr_counter()
1379 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l)); in stm32_cryp_check_ctr_counter()
1380 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r)); in stm32_cryp_check_ctr_counter()
1387 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); in stm32_cryp_irq_read_data()
1388 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_read_data()
1389 cryp->payload_out), 1); in stm32_cryp_irq_read_data()
1390 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_read_data()
1391 cryp->payload_out); in stm32_cryp_irq_read_data()
1398 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_write_block()
1399 cryp->payload_in), 0); in stm32_cryp_irq_write_block()
1400 writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32)); in stm32_cryp_irq_write_block()
1401 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in); in stm32_cryp_irq_write_block()
1413 stm32_cryp_write(cryp, cryp->caps->imsc, 0); in stm32_cryp_irq_write_gcm_padded_data()
1414 cfg = stm32_cryp_read(cryp, cryp->caps->cr); in stm32_cryp_irq_write_gcm_padded_data()
1416 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1419 stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2); in stm32_cryp_irq_write_gcm_padded_data()
1424 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1428 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1435 dev_err(cryp->dev, "Timeout (write gcm last data)\n"); in stm32_cryp_irq_write_gcm_padded_data()
1444 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); in stm32_cryp_irq_write_gcm_padded_data()
1446 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_write_gcm_padded_data()
1447 cryp->payload_out), 1); in stm32_cryp_irq_write_gcm_padded_data()
1448 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_write_gcm_padded_data()
1449 cryp->payload_out); in stm32_cryp_irq_write_gcm_padded_data()
1454 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1459 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_gcm_padded_data()
1462 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); in stm32_cryp_irq_write_gcm_padded_data()
1467 dev_err(cryp->dev, "Timeout (write gcm padded data)\n"); in stm32_cryp_irq_write_gcm_padded_data()
1472 stm32_cryp_read(cryp, cryp->caps->dout); in stm32_cryp_irq_write_gcm_padded_data()
1483 cfg = stm32_cryp_read(cryp, cryp->caps->cr); in stm32_cryp_irq_set_npblb()
1485 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_set_npblb()
1487 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT; in stm32_cryp_irq_set_npblb()
1489 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_set_npblb()
1503 stm32_cryp_write(cryp, cryp->caps->imsc, 0); in stm32_cryp_irq_write_ccm_padded_data()
1505 cfg = stm32_cryp_read(cryp, cryp->caps->cr); in stm32_cryp_irq_write_ccm_padded_data()
1507 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1517 stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp); in stm32_cryp_irq_write_ccm_padded_data()
1522 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1526 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1533 dev_err(cryp->dev, "Timeout (write ccm padded data)\n"); in stm32_cryp_irq_write_ccm_padded_data()
1542 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); in stm32_cryp_irq_write_ccm_padded_data()
1544 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, in stm32_cryp_irq_write_ccm_padded_data()
1545 cryp->payload_out), 1); in stm32_cryp_irq_write_ccm_padded_data()
1546 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out); in stm32_cryp_irq_write_ccm_padded_data()
1555 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1560 stm32_cryp_write(cryp, cryp->caps->cr, cfg); in stm32_cryp_irq_write_ccm_padded_data()
1566 stm32_cryp_write(cryp, cryp->caps->din, block[i]); in stm32_cryp_irq_write_ccm_padded_data()
1572 dev_err(cryp->dev, "Timeout (write ccm padded data)\n"); in stm32_cryp_irq_write_ccm_padded_data()
1580 if (unlikely(!cryp->payload_in)) { in stm32_cryp_irq_write_data()
1581 dev_warn(cryp->dev, "No more data to process\n"); in stm32_cryp_irq_write_data()
1585 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE && in stm32_cryp_irq_write_data()
1589 if (cryp->caps->padding_wa) { in stm32_cryp_irq_write_data()
1599 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) && in stm32_cryp_irq_write_data()
1603 if (cryp->caps->padding_wa) { in stm32_cryp_irq_write_data()
1624 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in); in stm32_cryp_irq_write_gcmccm_header()
1626 scatterwalk_copychunks(block, &cryp->in_walk, written, 0); in stm32_cryp_irq_write_gcmccm_header()
1628 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); in stm32_cryp_irq_write_gcmccm_header()
1630 cryp->header_in -= written; in stm32_cryp_irq_write_gcmccm_header()
1639 u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc); in stm32_cryp_irq_thread()
1641 if (cryp->irq_status & MISR_OUT) in stm32_cryp_irq_thread()
1645 if (cryp->irq_status & MISR_IN) { in stm32_cryp_irq_thread()
1647 ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK; in stm32_cryp_irq_thread()
1655 cryp->gcm_ctr++; in stm32_cryp_irq_thread()
1663 if (!cryp->payload_in && !cryp->header_in) in stm32_cryp_irq_thread()
1665 if (!cryp->payload_out) in stm32_cryp_irq_thread()
1667 stm32_cryp_write(cryp, cryp->caps->imsc, it_mask); in stm32_cryp_irq_thread()
1669 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) { in stm32_cryp_irq_thread()
1682 cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis); in stm32_cryp_irq()
1691 .base.cra_driver_name = "stm32-ecb-aes",
1713 .base.cra_driver_name = "stm32-cbc-aes",
1736 .base.cra_driver_name = "stm32-ctr-aes",
1759 .base.cra_driver_name = "stm32-ecb-des",
1781 .base.cra_driver_name = "stm32-cbc-des",
1804 .base.cra_driver_name = "stm32-ecb-des3",
1826 .base.cra_driver_name = "stm32-cbc-des3",
1860 .cra_driver_name = "stm32-gcm-aes",
1883 .cra_driver_name = "stm32-ccm-aes",
1964 { .compatible = "stericsson,ux500-cryp", .data = &ux500_data},
1965 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1966 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1973 struct device *dev = &pdev->dev; in stm32_cryp_probe()
1980 return -ENOMEM; in stm32_cryp_probe()
1982 cryp->caps = of_device_get_match_data(dev); in stm32_cryp_probe()
1983 if (!cryp->caps) in stm32_cryp_probe()
1984 return -ENODEV; in stm32_cryp_probe()
1986 cryp->dev = dev; in stm32_cryp_probe()
1988 cryp->regs = devm_platform_ioremap_resource(pdev, 0); in stm32_cryp_probe()
1989 if (IS_ERR(cryp->regs)) in stm32_cryp_probe()
1990 return PTR_ERR(cryp->regs); in stm32_cryp_probe()
2004 cryp->clk = devm_clk_get(dev, NULL); in stm32_cryp_probe()
2005 if (IS_ERR(cryp->clk)) { in stm32_cryp_probe()
2006 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n"); in stm32_cryp_probe()
2008 return PTR_ERR(cryp->clk); in stm32_cryp_probe()
2011 ret = clk_prepare_enable(cryp->clk); in stm32_cryp_probe()
2013 dev_err(cryp->dev, "Failed to enable clock\n"); in stm32_cryp_probe()
2027 if (ret == -EPROBE_DEFER) in stm32_cryp_probe()
2038 list_add(&cryp->list, &cryp_list.dev_list); in stm32_cryp_probe()
2041 /* Initialize crypto engine */ in stm32_cryp_probe()
2042 cryp->engine = crypto_engine_alloc_init(dev, 1); in stm32_cryp_probe()
2043 if (!cryp->engine) { in stm32_cryp_probe()
2044 dev_err(dev, "Could not init crypto engine\n"); in stm32_cryp_probe()
2045 ret = -ENOMEM; in stm32_cryp_probe()
2049 ret = crypto_engine_start(cryp->engine); in stm32_cryp_probe()
2051 dev_err(dev, "Could not start crypto engine\n"); in stm32_cryp_probe()
2061 if (cryp->caps->aeads_support) { in stm32_cryp_probe()
2077 crypto_engine_exit(cryp->engine); in stm32_cryp_probe()
2080 list_del(&cryp->list); in stm32_cryp_probe()
2086 clk_disable_unprepare(cryp->clk); in stm32_cryp_probe()
2097 return -ENODEV; in stm32_cryp_remove()
2099 ret = pm_runtime_resume_and_get(cryp->dev); in stm32_cryp_remove()
2103 if (cryp->caps->aeads_support) in stm32_cryp_remove()
2107 crypto_engine_exit(cryp->engine); in stm32_cryp_remove()
2110 list_del(&cryp->list); in stm32_cryp_remove()
2113 pm_runtime_disable(cryp->dev); in stm32_cryp_remove()
2114 pm_runtime_put_noidle(cryp->dev); in stm32_cryp_remove()
2116 clk_disable_unprepare(cryp->clk); in stm32_cryp_remove()
2126 clk_disable_unprepare(cryp->clk); in stm32_cryp_runtime_suspend()
2136 ret = clk_prepare_enable(cryp->clk); in stm32_cryp_runtime_resume()
2138 dev_err(cryp->dev, "Failed to prepare_enable clock\n"); in stm32_cryp_runtime_resume()