1 /* 2 * Copyright (C) STMicroelectronics SA 2017 3 * Author: Fabien Dessenne <fabien.dessenne@st.com> 4 * License terms: GNU General Public License (GPL), version 2 5 */ 6 7 #include <linux/clk.h> 8 #include <linux/delay.h> 9 #include <linux/interrupt.h> 10 #include <linux/iopoll.h> 11 #include <linux/module.h> 12 #include <linux/of_device.h> 13 #include <linux/platform_device.h> 14 #include <linux/reset.h> 15 16 #include <crypto/aes.h> 17 #include <crypto/des.h> 18 #include <crypto/engine.h> 19 #include <crypto/scatterwalk.h> 20 #include <crypto/internal/aead.h> 21 22 #define DRIVER_NAME "stm32-cryp" 23 24 /* Bit [0] encrypt / decrypt */ 25 #define FLG_ENCRYPT BIT(0) 26 /* Bit [8..1] algo & operation mode */ 27 #define FLG_AES BIT(1) 28 #define FLG_DES BIT(2) 29 #define FLG_TDES BIT(3) 30 #define FLG_ECB BIT(4) 31 #define FLG_CBC BIT(5) 32 #define FLG_CTR BIT(6) 33 #define FLG_GCM BIT(7) 34 #define FLG_CCM BIT(8) 35 /* Mode mask = bits [15..0] */ 36 #define FLG_MODE_MASK GENMASK(15, 0) 37 /* Bit [31..16] status */ 38 #define FLG_CCM_PADDED_WA BIT(16) 39 40 /* Registers */ 41 #define CRYP_CR 0x00000000 42 #define CRYP_SR 0x00000004 43 #define CRYP_DIN 0x00000008 44 #define CRYP_DOUT 0x0000000C 45 #define CRYP_DMACR 0x00000010 46 #define CRYP_IMSCR 0x00000014 47 #define CRYP_RISR 0x00000018 48 #define CRYP_MISR 0x0000001C 49 #define CRYP_K0LR 0x00000020 50 #define CRYP_K0RR 0x00000024 51 #define CRYP_K1LR 0x00000028 52 #define CRYP_K1RR 0x0000002C 53 #define CRYP_K2LR 0x00000030 54 #define CRYP_K2RR 0x00000034 55 #define CRYP_K3LR 0x00000038 56 #define CRYP_K3RR 0x0000003C 57 #define CRYP_IV0LR 0x00000040 58 #define CRYP_IV0RR 0x00000044 59 #define CRYP_IV1LR 0x00000048 60 #define CRYP_IV1RR 0x0000004C 61 #define CRYP_CSGCMCCM0R 0x00000050 62 #define CRYP_CSGCM0R 0x00000070 63 64 /* Registers values */ 65 #define CR_DEC_NOT_ENC 0x00000004 66 #define CR_TDES_ECB 0x00000000 67 #define CR_TDES_CBC 0x00000008 68 #define CR_DES_ECB 0x00000010 69 #define CR_DES_CBC 0x00000018 70 #define CR_AES_ECB 0x00000020 71 #define CR_AES_CBC 0x00000028 72 #define CR_AES_CTR 0x00000030 73 #define CR_AES_KP 0x00000038 74 #define CR_AES_GCM 0x00080000 75 #define CR_AES_CCM 0x00080008 76 #define CR_AES_UNKNOWN 0xFFFFFFFF 77 #define CR_ALGO_MASK 0x00080038 78 #define CR_DATA32 0x00000000 79 #define CR_DATA16 0x00000040 80 #define CR_DATA8 0x00000080 81 #define CR_DATA1 0x000000C0 82 #define CR_KEY128 0x00000000 83 #define CR_KEY192 0x00000100 84 #define CR_KEY256 0x00000200 85 #define CR_FFLUSH 0x00004000 86 #define CR_CRYPEN 0x00008000 87 #define CR_PH_INIT 0x00000000 88 #define CR_PH_HEADER 0x00010000 89 #define CR_PH_PAYLOAD 0x00020000 90 #define CR_PH_FINAL 0x00030000 91 #define CR_PH_MASK 0x00030000 92 #define CR_NBPBL_SHIFT 20 93 94 #define SR_BUSY 0x00000010 95 #define SR_OFNE 0x00000004 96 97 #define IMSCR_IN BIT(0) 98 #define IMSCR_OUT BIT(1) 99 100 #define MISR_IN BIT(0) 101 #define MISR_OUT BIT(1) 102 103 /* Misc */ 104 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32)) 105 #define GCM_CTR_INIT 2 106 #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset) 107 #define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset) 108 109 struct stm32_cryp_caps { 110 bool swap_final; 111 bool padding_wa; 112 }; 113 114 struct stm32_cryp_ctx { 115 struct crypto_engine_ctx enginectx; 116 struct stm32_cryp *cryp; 117 int keylen; 118 u32 key[AES_KEYSIZE_256 / sizeof(u32)]; 119 unsigned long flags; 120 }; 121 122 struct stm32_cryp_reqctx { 123 unsigned long mode; 124 }; 125 126 struct stm32_cryp { 127 struct list_head list; 128 struct device *dev; 129 void __iomem *regs; 130 struct clk *clk; 131 unsigned long flags; 132 u32 irq_status; 133 const struct stm32_cryp_caps *caps; 134 struct stm32_cryp_ctx *ctx; 135 136 struct crypto_engine *engine; 137 138 struct mutex lock; /* protects req / areq */ 139 struct ablkcipher_request *req; 140 struct aead_request *areq; 141 142 size_t authsize; 143 size_t hw_blocksize; 144 145 size_t total_in; 146 size_t total_in_save; 147 size_t total_out; 148 size_t total_out_save; 149 150 struct scatterlist *in_sg; 151 struct scatterlist *out_sg; 152 struct scatterlist *out_sg_save; 153 154 struct scatterlist in_sgl; 155 struct scatterlist out_sgl; 156 bool sgs_copied; 157 158 int in_sg_len; 159 int out_sg_len; 160 161 struct scatter_walk in_walk; 162 struct scatter_walk out_walk; 163 164 u32 last_ctr[4]; 165 u32 gcm_ctr; 166 }; 167 168 struct stm32_cryp_list { 169 struct list_head dev_list; 170 spinlock_t lock; /* protect dev_list */ 171 }; 172 173 static struct stm32_cryp_list cryp_list = { 174 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list), 175 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock), 176 }; 177 178 static inline bool is_aes(struct stm32_cryp *cryp) 179 { 180 return cryp->flags & FLG_AES; 181 } 182 183 static inline bool is_des(struct stm32_cryp *cryp) 184 { 185 return cryp->flags & FLG_DES; 186 } 187 188 static inline bool is_tdes(struct stm32_cryp *cryp) 189 { 190 return cryp->flags & FLG_TDES; 191 } 192 193 static inline bool is_ecb(struct stm32_cryp *cryp) 194 { 195 return cryp->flags & FLG_ECB; 196 } 197 198 static inline bool is_cbc(struct stm32_cryp *cryp) 199 { 200 return cryp->flags & FLG_CBC; 201 } 202 203 static inline bool is_ctr(struct stm32_cryp *cryp) 204 { 205 return cryp->flags & FLG_CTR; 206 } 207 208 static inline bool is_gcm(struct stm32_cryp *cryp) 209 { 210 return cryp->flags & FLG_GCM; 211 } 212 213 static inline bool is_ccm(struct stm32_cryp *cryp) 214 { 215 return cryp->flags & FLG_CCM; 216 } 217 218 static inline bool is_encrypt(struct stm32_cryp *cryp) 219 { 220 return cryp->flags & FLG_ENCRYPT; 221 } 222 223 static inline bool is_decrypt(struct stm32_cryp *cryp) 224 { 225 return !is_encrypt(cryp); 226 } 227 228 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst) 229 { 230 return readl_relaxed(cryp->regs + ofst); 231 } 232 233 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val) 234 { 235 writel_relaxed(val, cryp->regs + ofst); 236 } 237 238 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp) 239 { 240 u32 status; 241 242 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status, 243 !(status & SR_BUSY), 10, 100000); 244 } 245 246 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp) 247 { 248 u32 status; 249 250 return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status, 251 !(status & CR_CRYPEN), 10, 100000); 252 } 253 254 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp) 255 { 256 u32 status; 257 258 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status, 259 status & SR_OFNE, 10, 100000); 260 } 261 262 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp); 263 264 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx) 265 { 266 struct stm32_cryp *tmp, *cryp = NULL; 267 268 spin_lock_bh(&cryp_list.lock); 269 if (!ctx->cryp) { 270 list_for_each_entry(tmp, &cryp_list.dev_list, list) { 271 cryp = tmp; 272 break; 273 } 274 ctx->cryp = cryp; 275 } else { 276 cryp = ctx->cryp; 277 } 278 279 spin_unlock_bh(&cryp_list.lock); 280 281 return cryp; 282 } 283 284 static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total, 285 size_t align) 286 { 287 int len = 0; 288 289 if (!total) 290 return 0; 291 292 if (!IS_ALIGNED(total, align)) 293 return -EINVAL; 294 295 while (sg) { 296 if (!IS_ALIGNED(sg->offset, sizeof(u32))) 297 return -EINVAL; 298 299 if (!IS_ALIGNED(sg->length, align)) 300 return -EINVAL; 301 302 len += sg->length; 303 sg = sg_next(sg); 304 } 305 306 if (len != total) 307 return -EINVAL; 308 309 return 0; 310 } 311 312 static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp) 313 { 314 int ret; 315 316 ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in, 317 cryp->hw_blocksize); 318 if (ret) 319 return ret; 320 321 ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out, 322 cryp->hw_blocksize); 323 324 return ret; 325 } 326 327 static void sg_copy_buf(void *buf, struct scatterlist *sg, 328 unsigned int start, unsigned int nbytes, int out) 329 { 330 struct scatter_walk walk; 331 332 if (!nbytes) 333 return; 334 335 scatterwalk_start(&walk, sg); 336 scatterwalk_advance(&walk, start); 337 scatterwalk_copychunks(buf, &walk, nbytes, out); 338 scatterwalk_done(&walk, out, 0); 339 } 340 341 static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp) 342 { 343 void *buf_in, *buf_out; 344 int pages, total_in, total_out; 345 346 if (!stm32_cryp_check_io_aligned(cryp)) { 347 cryp->sgs_copied = 0; 348 return 0; 349 } 350 351 total_in = ALIGN(cryp->total_in, cryp->hw_blocksize); 352 pages = total_in ? get_order(total_in) : 1; 353 buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages); 354 355 total_out = ALIGN(cryp->total_out, cryp->hw_blocksize); 356 pages = total_out ? get_order(total_out) : 1; 357 buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages); 358 359 if (!buf_in || !buf_out) { 360 dev_err(cryp->dev, "Can't allocate pages when unaligned\n"); 361 cryp->sgs_copied = 0; 362 return -EFAULT; 363 } 364 365 sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0); 366 367 sg_init_one(&cryp->in_sgl, buf_in, total_in); 368 cryp->in_sg = &cryp->in_sgl; 369 cryp->in_sg_len = 1; 370 371 sg_init_one(&cryp->out_sgl, buf_out, total_out); 372 cryp->out_sg_save = cryp->out_sg; 373 cryp->out_sg = &cryp->out_sgl; 374 cryp->out_sg_len = 1; 375 376 cryp->sgs_copied = 1; 377 378 return 0; 379 } 380 381 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv) 382 { 383 if (!iv) 384 return; 385 386 stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++)); 387 stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++)); 388 389 if (is_aes(cryp)) { 390 stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++)); 391 stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++)); 392 } 393 } 394 395 static void stm32_cryp_hw_write_key(struct stm32_cryp *c) 396 { 397 unsigned int i; 398 int r_id; 399 400 if (is_des(c)) { 401 stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0])); 402 stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1])); 403 } else { 404 r_id = CRYP_K3RR; 405 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4) 406 stm32_cryp_write(c, r_id, 407 cpu_to_be32(c->ctx->key[i - 1])); 408 } 409 } 410 411 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp) 412 { 413 if (is_aes(cryp) && is_ecb(cryp)) 414 return CR_AES_ECB; 415 416 if (is_aes(cryp) && is_cbc(cryp)) 417 return CR_AES_CBC; 418 419 if (is_aes(cryp) && is_ctr(cryp)) 420 return CR_AES_CTR; 421 422 if (is_aes(cryp) && is_gcm(cryp)) 423 return CR_AES_GCM; 424 425 if (is_aes(cryp) && is_ccm(cryp)) 426 return CR_AES_CCM; 427 428 if (is_des(cryp) && is_ecb(cryp)) 429 return CR_DES_ECB; 430 431 if (is_des(cryp) && is_cbc(cryp)) 432 return CR_DES_CBC; 433 434 if (is_tdes(cryp) && is_ecb(cryp)) 435 return CR_TDES_ECB; 436 437 if (is_tdes(cryp) && is_cbc(cryp)) 438 return CR_TDES_CBC; 439 440 dev_err(cryp->dev, "Unknown mode\n"); 441 return CR_AES_UNKNOWN; 442 } 443 444 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp) 445 { 446 return is_encrypt(cryp) ? cryp->areq->cryptlen : 447 cryp->areq->cryptlen - cryp->authsize; 448 } 449 450 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg) 451 { 452 int ret; 453 u32 iv[4]; 454 455 /* Phase 1 : init */ 456 memcpy(iv, cryp->areq->iv, 12); 457 iv[3] = cpu_to_be32(GCM_CTR_INIT); 458 cryp->gcm_ctr = GCM_CTR_INIT; 459 stm32_cryp_hw_write_iv(cryp, iv); 460 461 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN); 462 463 /* Wait for end of processing */ 464 ret = stm32_cryp_wait_enable(cryp); 465 if (ret) 466 dev_err(cryp->dev, "Timeout (gcm init)\n"); 467 468 return ret; 469 } 470 471 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg) 472 { 473 int ret; 474 u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE]; 475 u32 *d; 476 unsigned int i, textlen; 477 478 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */ 479 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 480 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 481 iv[AES_BLOCK_SIZE - 1] = 1; 482 stm32_cryp_hw_write_iv(cryp, (u32 *)iv); 483 484 /* Build B0 */ 485 memcpy(b0, iv, AES_BLOCK_SIZE); 486 487 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); 488 489 if (cryp->areq->assoclen) 490 b0[0] |= 0x40; 491 492 textlen = stm32_cryp_get_input_text_len(cryp); 493 494 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; 495 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; 496 497 /* Enable HW */ 498 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN); 499 500 /* Write B0 */ 501 d = (u32 *)b0; 502 503 for (i = 0; i < AES_BLOCK_32; i++) { 504 if (!cryp->caps->padding_wa) 505 *d = cpu_to_be32(*d); 506 stm32_cryp_write(cryp, CRYP_DIN, *d++); 507 } 508 509 /* Wait for end of processing */ 510 ret = stm32_cryp_wait_enable(cryp); 511 if (ret) 512 dev_err(cryp->dev, "Timeout (ccm init)\n"); 513 514 return ret; 515 } 516 517 static int stm32_cryp_hw_init(struct stm32_cryp *cryp) 518 { 519 int ret; 520 u32 cfg, hw_mode; 521 522 /* Disable interrupt */ 523 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 524 525 /* Set key */ 526 stm32_cryp_hw_write_key(cryp); 527 528 /* Set configuration */ 529 cfg = CR_DATA8 | CR_FFLUSH; 530 531 switch (cryp->ctx->keylen) { 532 case AES_KEYSIZE_128: 533 cfg |= CR_KEY128; 534 break; 535 536 case AES_KEYSIZE_192: 537 cfg |= CR_KEY192; 538 break; 539 540 default: 541 case AES_KEYSIZE_256: 542 cfg |= CR_KEY256; 543 break; 544 } 545 546 hw_mode = stm32_cryp_get_hw_mode(cryp); 547 if (hw_mode == CR_AES_UNKNOWN) 548 return -EINVAL; 549 550 /* AES ECB/CBC decrypt: run key preparation first */ 551 if (is_decrypt(cryp) && 552 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) { 553 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN); 554 555 /* Wait for end of processing */ 556 ret = stm32_cryp_wait_busy(cryp); 557 if (ret) { 558 dev_err(cryp->dev, "Timeout (key preparation)\n"); 559 return ret; 560 } 561 } 562 563 cfg |= hw_mode; 564 565 if (is_decrypt(cryp)) 566 cfg |= CR_DEC_NOT_ENC; 567 568 /* Apply config and flush (valid when CRYPEN = 0) */ 569 stm32_cryp_write(cryp, CRYP_CR, cfg); 570 571 switch (hw_mode) { 572 case CR_AES_GCM: 573 case CR_AES_CCM: 574 /* Phase 1 : init */ 575 if (hw_mode == CR_AES_CCM) 576 ret = stm32_cryp_ccm_init(cryp, cfg); 577 else 578 ret = stm32_cryp_gcm_init(cryp, cfg); 579 580 if (ret) 581 return ret; 582 583 /* Phase 2 : header (authenticated data) */ 584 if (cryp->areq->assoclen) { 585 cfg |= CR_PH_HEADER; 586 } else if (stm32_cryp_get_input_text_len(cryp)) { 587 cfg |= CR_PH_PAYLOAD; 588 stm32_cryp_write(cryp, CRYP_CR, cfg); 589 } else { 590 cfg |= CR_PH_INIT; 591 } 592 593 break; 594 595 case CR_DES_CBC: 596 case CR_TDES_CBC: 597 case CR_AES_CBC: 598 case CR_AES_CTR: 599 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info); 600 break; 601 602 default: 603 break; 604 } 605 606 /* Enable now */ 607 cfg |= CR_CRYPEN; 608 609 stm32_cryp_write(cryp, CRYP_CR, cfg); 610 611 cryp->flags &= ~FLG_CCM_PADDED_WA; 612 613 return 0; 614 } 615 616 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err) 617 { 618 if (!err && (is_gcm(cryp) || is_ccm(cryp))) 619 /* Phase 4 : output tag */ 620 err = stm32_cryp_read_auth_tag(cryp); 621 622 if (cryp->sgs_copied) { 623 void *buf_in, *buf_out; 624 int pages, len; 625 626 buf_in = sg_virt(&cryp->in_sgl); 627 buf_out = sg_virt(&cryp->out_sgl); 628 629 sg_copy_buf(buf_out, cryp->out_sg_save, 0, 630 cryp->total_out_save, 1); 631 632 len = ALIGN(cryp->total_in_save, cryp->hw_blocksize); 633 pages = len ? get_order(len) : 1; 634 free_pages((unsigned long)buf_in, pages); 635 636 len = ALIGN(cryp->total_out_save, cryp->hw_blocksize); 637 pages = len ? get_order(len) : 1; 638 free_pages((unsigned long)buf_out, pages); 639 } 640 641 if (is_gcm(cryp) || is_ccm(cryp)) { 642 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); 643 cryp->areq = NULL; 644 } else { 645 crypto_finalize_ablkcipher_request(cryp->engine, cryp->req, 646 err); 647 cryp->req = NULL; 648 } 649 650 memset(cryp->ctx->key, 0, cryp->ctx->keylen); 651 652 mutex_unlock(&cryp->lock); 653 } 654 655 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp) 656 { 657 /* Enable interrupt and let the IRQ handler do everything */ 658 stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT); 659 660 return 0; 661 } 662 663 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq); 664 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine, 665 void *areq); 666 667 static int stm32_cryp_cra_init(struct crypto_tfm *tfm) 668 { 669 struct stm32_cryp_ctx *ctx = crypto_tfm_ctx(tfm); 670 671 tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx); 672 673 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req; 674 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req; 675 ctx->enginectx.op.unprepare_request = NULL; 676 return 0; 677 } 678 679 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq); 680 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, 681 void *areq); 682 683 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm) 684 { 685 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 686 687 tfm->reqsize = sizeof(struct stm32_cryp_reqctx); 688 689 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req; 690 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req; 691 ctx->enginectx.op.unprepare_request = NULL; 692 693 return 0; 694 } 695 696 static int stm32_cryp_crypt(struct ablkcipher_request *req, unsigned long mode) 697 { 698 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx( 699 crypto_ablkcipher_reqtfm(req)); 700 struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req); 701 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 702 703 if (!cryp) 704 return -ENODEV; 705 706 rctx->mode = mode; 707 708 return crypto_transfer_ablkcipher_request_to_engine(cryp->engine, req); 709 } 710 711 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode) 712 { 713 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 714 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req); 715 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 716 717 if (!cryp) 718 return -ENODEV; 719 720 rctx->mode = mode; 721 722 return crypto_transfer_aead_request_to_engine(cryp->engine, req); 723 } 724 725 static int stm32_cryp_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 726 unsigned int keylen) 727 { 728 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm); 729 730 memcpy(ctx->key, key, keylen); 731 ctx->keylen = keylen; 732 733 return 0; 734 } 735 736 static int stm32_cryp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 737 unsigned int keylen) 738 { 739 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 740 keylen != AES_KEYSIZE_256) 741 return -EINVAL; 742 else 743 return stm32_cryp_setkey(tfm, key, keylen); 744 } 745 746 static int stm32_cryp_des_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 747 unsigned int keylen) 748 { 749 if (keylen != DES_KEY_SIZE) 750 return -EINVAL; 751 else 752 return stm32_cryp_setkey(tfm, key, keylen); 753 } 754 755 static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 756 unsigned int keylen) 757 { 758 if (keylen != (3 * DES_KEY_SIZE)) 759 return -EINVAL; 760 else 761 return stm32_cryp_setkey(tfm, key, keylen); 762 } 763 764 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key, 765 unsigned int keylen) 766 { 767 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 768 769 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 770 keylen != AES_KEYSIZE_256) 771 return -EINVAL; 772 773 memcpy(ctx->key, key, keylen); 774 ctx->keylen = keylen; 775 776 return 0; 777 } 778 779 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm, 780 unsigned int authsize) 781 { 782 return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL; 783 } 784 785 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm, 786 unsigned int authsize) 787 { 788 switch (authsize) { 789 case 4: 790 case 6: 791 case 8: 792 case 10: 793 case 12: 794 case 14: 795 case 16: 796 break; 797 default: 798 return -EINVAL; 799 } 800 801 return 0; 802 } 803 804 static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request *req) 805 { 806 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT); 807 } 808 809 static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request *req) 810 { 811 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB); 812 } 813 814 static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request *req) 815 { 816 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT); 817 } 818 819 static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request *req) 820 { 821 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC); 822 } 823 824 static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request *req) 825 { 826 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT); 827 } 828 829 static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request *req) 830 { 831 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR); 832 } 833 834 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req) 835 { 836 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT); 837 } 838 839 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req) 840 { 841 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM); 842 } 843 844 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req) 845 { 846 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT); 847 } 848 849 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req) 850 { 851 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM); 852 } 853 854 static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request *req) 855 { 856 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT); 857 } 858 859 static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request *req) 860 { 861 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB); 862 } 863 864 static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request *req) 865 { 866 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT); 867 } 868 869 static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request *req) 870 { 871 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC); 872 } 873 874 static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request *req) 875 { 876 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT); 877 } 878 879 static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request *req) 880 { 881 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB); 882 } 883 884 static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request *req) 885 { 886 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT); 887 } 888 889 static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request *req) 890 { 891 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC); 892 } 893 894 static int stm32_cryp_prepare_req(struct ablkcipher_request *req, 895 struct aead_request *areq) 896 { 897 struct stm32_cryp_ctx *ctx; 898 struct stm32_cryp *cryp; 899 struct stm32_cryp_reqctx *rctx; 900 int ret; 901 902 if (!req && !areq) 903 return -EINVAL; 904 905 ctx = req ? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)) : 906 crypto_aead_ctx(crypto_aead_reqtfm(areq)); 907 908 cryp = ctx->cryp; 909 910 if (!cryp) 911 return -ENODEV; 912 913 mutex_lock(&cryp->lock); 914 915 rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq); 916 rctx->mode &= FLG_MODE_MASK; 917 918 ctx->cryp = cryp; 919 920 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; 921 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE; 922 cryp->ctx = ctx; 923 924 if (req) { 925 cryp->req = req; 926 cryp->total_in = req->nbytes; 927 cryp->total_out = cryp->total_in; 928 } else { 929 /* 930 * Length of input and output data: 931 * Encryption case: 932 * INPUT = AssocData || PlainText 933 * <- assoclen -> <- cryptlen -> 934 * <------- total_in -----------> 935 * 936 * OUTPUT = AssocData || CipherText || AuthTag 937 * <- assoclen -> <- cryptlen -> <- authsize -> 938 * <---------------- total_out -----------------> 939 * 940 * Decryption case: 941 * INPUT = AssocData || CipherText || AuthTag 942 * <- assoclen -> <--------- cryptlen ---------> 943 * <- authsize -> 944 * <---------------- total_in ------------------> 945 * 946 * OUTPUT = AssocData || PlainText 947 * <- assoclen -> <- crypten - authsize -> 948 * <---------- total_out -----------------> 949 */ 950 cryp->areq = areq; 951 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); 952 cryp->total_in = areq->assoclen + areq->cryptlen; 953 if (is_encrypt(cryp)) 954 /* Append auth tag to output */ 955 cryp->total_out = cryp->total_in + cryp->authsize; 956 else 957 /* No auth tag in output */ 958 cryp->total_out = cryp->total_in - cryp->authsize; 959 } 960 961 cryp->total_in_save = cryp->total_in; 962 cryp->total_out_save = cryp->total_out; 963 964 cryp->in_sg = req ? req->src : areq->src; 965 cryp->out_sg = req ? req->dst : areq->dst; 966 cryp->out_sg_save = cryp->out_sg; 967 968 cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in); 969 if (cryp->in_sg_len < 0) { 970 dev_err(cryp->dev, "Cannot get in_sg_len\n"); 971 ret = cryp->in_sg_len; 972 goto out; 973 } 974 975 cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out); 976 if (cryp->out_sg_len < 0) { 977 dev_err(cryp->dev, "Cannot get out_sg_len\n"); 978 ret = cryp->out_sg_len; 979 goto out; 980 } 981 982 ret = stm32_cryp_copy_sgs(cryp); 983 if (ret) 984 goto out; 985 986 scatterwalk_start(&cryp->in_walk, cryp->in_sg); 987 scatterwalk_start(&cryp->out_walk, cryp->out_sg); 988 989 if (is_gcm(cryp) || is_ccm(cryp)) { 990 /* In output, jump after assoc data */ 991 scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen); 992 cryp->total_out -= cryp->areq->assoclen; 993 } 994 995 ret = stm32_cryp_hw_init(cryp); 996 out: 997 if (ret) 998 mutex_unlock(&cryp->lock); 999 1000 return ret; 1001 } 1002 1003 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine, 1004 void *areq) 1005 { 1006 struct ablkcipher_request *req = container_of(areq, 1007 struct ablkcipher_request, 1008 base); 1009 1010 return stm32_cryp_prepare_req(req, NULL); 1011 } 1012 1013 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq) 1014 { 1015 struct ablkcipher_request *req = container_of(areq, 1016 struct ablkcipher_request, 1017 base); 1018 struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx( 1019 crypto_ablkcipher_reqtfm(req)); 1020 struct stm32_cryp *cryp = ctx->cryp; 1021 1022 if (!cryp) 1023 return -ENODEV; 1024 1025 return stm32_cryp_cpu_start(cryp); 1026 } 1027 1028 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq) 1029 { 1030 struct aead_request *req = container_of(areq, struct aead_request, 1031 base); 1032 1033 return stm32_cryp_prepare_req(NULL, req); 1034 } 1035 1036 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq) 1037 { 1038 struct aead_request *req = container_of(areq, struct aead_request, 1039 base); 1040 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 1041 struct stm32_cryp *cryp = ctx->cryp; 1042 1043 if (!cryp) 1044 return -ENODEV; 1045 1046 if (unlikely(!cryp->areq->assoclen && 1047 !stm32_cryp_get_input_text_len(cryp))) { 1048 /* No input data to process: get tag and finish */ 1049 stm32_cryp_finish_req(cryp, 0); 1050 return 0; 1051 } 1052 1053 return stm32_cryp_cpu_start(cryp); 1054 } 1055 1056 static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst, 1057 unsigned int n) 1058 { 1059 scatterwalk_advance(&cryp->out_walk, n); 1060 1061 if (unlikely(cryp->out_sg->length == _walked_out)) { 1062 cryp->out_sg = sg_next(cryp->out_sg); 1063 if (cryp->out_sg) { 1064 scatterwalk_start(&cryp->out_walk, cryp->out_sg); 1065 return (sg_virt(cryp->out_sg) + _walked_out); 1066 } 1067 } 1068 1069 return (u32 *)((u8 *)dst + n); 1070 } 1071 1072 static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src, 1073 unsigned int n) 1074 { 1075 scatterwalk_advance(&cryp->in_walk, n); 1076 1077 if (unlikely(cryp->in_sg->length == _walked_in)) { 1078 cryp->in_sg = sg_next(cryp->in_sg); 1079 if (cryp->in_sg) { 1080 scatterwalk_start(&cryp->in_walk, cryp->in_sg); 1081 return (sg_virt(cryp->in_sg) + _walked_in); 1082 } 1083 } 1084 1085 return (u32 *)((u8 *)src + n); 1086 } 1087 1088 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp) 1089 { 1090 u32 cfg, size_bit, *dst, d32; 1091 u8 *d8; 1092 unsigned int i, j; 1093 int ret = 0; 1094 1095 /* Update Config */ 1096 cfg = stm32_cryp_read(cryp, CRYP_CR); 1097 1098 cfg &= ~CR_PH_MASK; 1099 cfg |= CR_PH_FINAL; 1100 cfg &= ~CR_DEC_NOT_ENC; 1101 cfg |= CR_CRYPEN; 1102 1103 stm32_cryp_write(cryp, CRYP_CR, cfg); 1104 1105 if (is_gcm(cryp)) { 1106 /* GCM: write aad and payload size (in bits) */ 1107 size_bit = cryp->areq->assoclen * 8; 1108 if (cryp->caps->swap_final) 1109 size_bit = cpu_to_be32(size_bit); 1110 1111 stm32_cryp_write(cryp, CRYP_DIN, 0); 1112 stm32_cryp_write(cryp, CRYP_DIN, size_bit); 1113 1114 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : 1115 cryp->areq->cryptlen - AES_BLOCK_SIZE; 1116 size_bit *= 8; 1117 if (cryp->caps->swap_final) 1118 size_bit = cpu_to_be32(size_bit); 1119 1120 stm32_cryp_write(cryp, CRYP_DIN, 0); 1121 stm32_cryp_write(cryp, CRYP_DIN, size_bit); 1122 } else { 1123 /* CCM: write CTR0 */ 1124 u8 iv[AES_BLOCK_SIZE]; 1125 u32 *iv32 = (u32 *)iv; 1126 1127 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 1128 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 1129 1130 for (i = 0; i < AES_BLOCK_32; i++) { 1131 if (!cryp->caps->padding_wa) 1132 *iv32 = cpu_to_be32(*iv32); 1133 stm32_cryp_write(cryp, CRYP_DIN, *iv32++); 1134 } 1135 } 1136 1137 /* Wait for output data */ 1138 ret = stm32_cryp_wait_output(cryp); 1139 if (ret) { 1140 dev_err(cryp->dev, "Timeout (read tag)\n"); 1141 return ret; 1142 } 1143 1144 if (is_encrypt(cryp)) { 1145 /* Get and write tag */ 1146 dst = sg_virt(cryp->out_sg) + _walked_out; 1147 1148 for (i = 0; i < AES_BLOCK_32; i++) { 1149 if (cryp->total_out >= sizeof(u32)) { 1150 /* Read a full u32 */ 1151 *dst = stm32_cryp_read(cryp, CRYP_DOUT); 1152 1153 dst = stm32_cryp_next_out(cryp, dst, 1154 sizeof(u32)); 1155 cryp->total_out -= sizeof(u32); 1156 } else if (!cryp->total_out) { 1157 /* Empty fifo out (data from input padding) */ 1158 stm32_cryp_read(cryp, CRYP_DOUT); 1159 } else { 1160 /* Read less than an u32 */ 1161 d32 = stm32_cryp_read(cryp, CRYP_DOUT); 1162 d8 = (u8 *)&d32; 1163 1164 for (j = 0; j < cryp->total_out; j++) { 1165 *((u8 *)dst) = *(d8++); 1166 dst = stm32_cryp_next_out(cryp, dst, 1); 1167 } 1168 cryp->total_out = 0; 1169 } 1170 } 1171 } else { 1172 /* Get and check tag */ 1173 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32]; 1174 1175 scatterwalk_map_and_copy(in_tag, cryp->in_sg, 1176 cryp->total_in_save - cryp->authsize, 1177 cryp->authsize, 0); 1178 1179 for (i = 0; i < AES_BLOCK_32; i++) 1180 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT); 1181 1182 if (crypto_memneq(in_tag, out_tag, cryp->authsize)) 1183 ret = -EBADMSG; 1184 } 1185 1186 /* Disable cryp */ 1187 cfg &= ~CR_CRYPEN; 1188 stm32_cryp_write(cryp, CRYP_CR, cfg); 1189 1190 return ret; 1191 } 1192 1193 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp) 1194 { 1195 u32 cr; 1196 1197 if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) { 1198 cryp->last_ctr[3] = 0; 1199 cryp->last_ctr[2]++; 1200 if (!cryp->last_ctr[2]) { 1201 cryp->last_ctr[1]++; 1202 if (!cryp->last_ctr[1]) 1203 cryp->last_ctr[0]++; 1204 } 1205 1206 cr = stm32_cryp_read(cryp, CRYP_CR); 1207 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN); 1208 1209 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr); 1210 1211 stm32_cryp_write(cryp, CRYP_CR, cr); 1212 } 1213 1214 cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR); 1215 cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR); 1216 cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR); 1217 cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR); 1218 } 1219 1220 static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp) 1221 { 1222 unsigned int i, j; 1223 u32 d32, *dst; 1224 u8 *d8; 1225 size_t tag_size; 1226 1227 /* Do no read tag now (if any) */ 1228 if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp))) 1229 tag_size = cryp->authsize; 1230 else 1231 tag_size = 0; 1232 1233 dst = sg_virt(cryp->out_sg) + _walked_out; 1234 1235 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) { 1236 if (likely(cryp->total_out - tag_size >= sizeof(u32))) { 1237 /* Read a full u32 */ 1238 *dst = stm32_cryp_read(cryp, CRYP_DOUT); 1239 1240 dst = stm32_cryp_next_out(cryp, dst, sizeof(u32)); 1241 cryp->total_out -= sizeof(u32); 1242 } else if (cryp->total_out == tag_size) { 1243 /* Empty fifo out (data from input padding) */ 1244 d32 = stm32_cryp_read(cryp, CRYP_DOUT); 1245 } else { 1246 /* Read less than an u32 */ 1247 d32 = stm32_cryp_read(cryp, CRYP_DOUT); 1248 d8 = (u8 *)&d32; 1249 1250 for (j = 0; j < cryp->total_out - tag_size; j++) { 1251 *((u8 *)dst) = *(d8++); 1252 dst = stm32_cryp_next_out(cryp, dst, 1); 1253 } 1254 cryp->total_out = tag_size; 1255 } 1256 } 1257 1258 return !(cryp->total_out - tag_size) || !cryp->total_in; 1259 } 1260 1261 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp) 1262 { 1263 unsigned int i, j; 1264 u32 *src; 1265 u8 d8[4]; 1266 size_t tag_size; 1267 1268 /* Do no write tag (if any) */ 1269 if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp))) 1270 tag_size = cryp->authsize; 1271 else 1272 tag_size = 0; 1273 1274 src = sg_virt(cryp->in_sg) + _walked_in; 1275 1276 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) { 1277 if (likely(cryp->total_in - tag_size >= sizeof(u32))) { 1278 /* Write a full u32 */ 1279 stm32_cryp_write(cryp, CRYP_DIN, *src); 1280 1281 src = stm32_cryp_next_in(cryp, src, sizeof(u32)); 1282 cryp->total_in -= sizeof(u32); 1283 } else if (cryp->total_in == tag_size) { 1284 /* Write padding data */ 1285 stm32_cryp_write(cryp, CRYP_DIN, 0); 1286 } else { 1287 /* Write less than an u32 */ 1288 memset(d8, 0, sizeof(u32)); 1289 for (j = 0; j < cryp->total_in - tag_size; j++) { 1290 d8[j] = *((u8 *)src); 1291 src = stm32_cryp_next_in(cryp, src, 1); 1292 } 1293 1294 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1295 cryp->total_in = tag_size; 1296 } 1297 } 1298 } 1299 1300 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp) 1301 { 1302 int err; 1303 u32 cfg, tmp[AES_BLOCK_32]; 1304 size_t total_in_ori = cryp->total_in; 1305 struct scatterlist *out_sg_ori = cryp->out_sg; 1306 unsigned int i; 1307 1308 /* 'Special workaround' procedure described in the datasheet */ 1309 1310 /* a) disable ip */ 1311 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1312 cfg = stm32_cryp_read(cryp, CRYP_CR); 1313 cfg &= ~CR_CRYPEN; 1314 stm32_cryp_write(cryp, CRYP_CR, cfg); 1315 1316 /* b) Update IV1R */ 1317 stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2); 1318 1319 /* c) change mode to CTR */ 1320 cfg &= ~CR_ALGO_MASK; 1321 cfg |= CR_AES_CTR; 1322 stm32_cryp_write(cryp, CRYP_CR, cfg); 1323 1324 /* a) enable IP */ 1325 cfg |= CR_CRYPEN; 1326 stm32_cryp_write(cryp, CRYP_CR, cfg); 1327 1328 /* b) pad and write the last block */ 1329 stm32_cryp_irq_write_block(cryp); 1330 cryp->total_in = total_in_ori; 1331 err = stm32_cryp_wait_output(cryp); 1332 if (err) { 1333 dev_err(cryp->dev, "Timeout (write gcm header)\n"); 1334 return stm32_cryp_finish_req(cryp, err); 1335 } 1336 1337 /* c) get and store encrypted data */ 1338 stm32_cryp_irq_read_data(cryp); 1339 scatterwalk_map_and_copy(tmp, out_sg_ori, 1340 cryp->total_in_save - total_in_ori, 1341 total_in_ori, 0); 1342 1343 /* d) change mode back to AES GCM */ 1344 cfg &= ~CR_ALGO_MASK; 1345 cfg |= CR_AES_GCM; 1346 stm32_cryp_write(cryp, CRYP_CR, cfg); 1347 1348 /* e) change phase to Final */ 1349 cfg &= ~CR_PH_MASK; 1350 cfg |= CR_PH_FINAL; 1351 stm32_cryp_write(cryp, CRYP_CR, cfg); 1352 1353 /* f) write padded data */ 1354 for (i = 0; i < AES_BLOCK_32; i++) { 1355 if (cryp->total_in) 1356 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]); 1357 else 1358 stm32_cryp_write(cryp, CRYP_DIN, 0); 1359 1360 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in); 1361 } 1362 1363 /* g) Empty fifo out */ 1364 err = stm32_cryp_wait_output(cryp); 1365 if (err) { 1366 dev_err(cryp->dev, "Timeout (write gcm header)\n"); 1367 return stm32_cryp_finish_req(cryp, err); 1368 } 1369 1370 for (i = 0; i < AES_BLOCK_32; i++) 1371 stm32_cryp_read(cryp, CRYP_DOUT); 1372 1373 /* h) run the he normal Final phase */ 1374 stm32_cryp_finish_req(cryp, 0); 1375 } 1376 1377 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp) 1378 { 1379 u32 cfg, payload_bytes; 1380 1381 /* disable ip, set NPBLB and reneable ip */ 1382 cfg = stm32_cryp_read(cryp, CRYP_CR); 1383 cfg &= ~CR_CRYPEN; 1384 stm32_cryp_write(cryp, CRYP_CR, cfg); 1385 1386 payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize : 1387 cryp->total_in; 1388 cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT; 1389 cfg |= CR_CRYPEN; 1390 stm32_cryp_write(cryp, CRYP_CR, cfg); 1391 } 1392 1393 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp) 1394 { 1395 int err = 0; 1396 u32 cfg, iv1tmp; 1397 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32]; 1398 size_t last_total_out, total_in_ori = cryp->total_in; 1399 struct scatterlist *out_sg_ori = cryp->out_sg; 1400 unsigned int i; 1401 1402 /* 'Special workaround' procedure described in the datasheet */ 1403 cryp->flags |= FLG_CCM_PADDED_WA; 1404 1405 /* a) disable ip */ 1406 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1407 1408 cfg = stm32_cryp_read(cryp, CRYP_CR); 1409 cfg &= ~CR_CRYPEN; 1410 stm32_cryp_write(cryp, CRYP_CR, cfg); 1411 1412 /* b) get IV1 from CRYP_CSGCMCCM7 */ 1413 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4); 1414 1415 /* c) Load CRYP_CSGCMCCMxR */ 1416 for (i = 0; i < ARRAY_SIZE(cstmp1); i++) 1417 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1418 1419 /* d) Write IV1R */ 1420 stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp); 1421 1422 /* e) change mode to CTR */ 1423 cfg &= ~CR_ALGO_MASK; 1424 cfg |= CR_AES_CTR; 1425 stm32_cryp_write(cryp, CRYP_CR, cfg); 1426 1427 /* a) enable IP */ 1428 cfg |= CR_CRYPEN; 1429 stm32_cryp_write(cryp, CRYP_CR, cfg); 1430 1431 /* b) pad and write the last block */ 1432 stm32_cryp_irq_write_block(cryp); 1433 cryp->total_in = total_in_ori; 1434 err = stm32_cryp_wait_output(cryp); 1435 if (err) { 1436 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n"); 1437 return stm32_cryp_finish_req(cryp, err); 1438 } 1439 1440 /* c) get and store decrypted data */ 1441 last_total_out = cryp->total_out; 1442 stm32_cryp_irq_read_data(cryp); 1443 1444 memset(tmp, 0, sizeof(tmp)); 1445 scatterwalk_map_and_copy(tmp, out_sg_ori, 1446 cryp->total_out_save - last_total_out, 1447 last_total_out, 0); 1448 1449 /* d) Load again CRYP_CSGCMCCMxR */ 1450 for (i = 0; i < ARRAY_SIZE(cstmp2); i++) 1451 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1452 1453 /* e) change mode back to AES CCM */ 1454 cfg &= ~CR_ALGO_MASK; 1455 cfg |= CR_AES_CCM; 1456 stm32_cryp_write(cryp, CRYP_CR, cfg); 1457 1458 /* f) change phase to header */ 1459 cfg &= ~CR_PH_MASK; 1460 cfg |= CR_PH_HEADER; 1461 stm32_cryp_write(cryp, CRYP_CR, cfg); 1462 1463 /* g) XOR and write padded data */ 1464 for (i = 0; i < ARRAY_SIZE(tmp); i++) { 1465 tmp[i] ^= cstmp1[i]; 1466 tmp[i] ^= cstmp2[i]; 1467 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]); 1468 } 1469 1470 /* h) wait for completion */ 1471 err = stm32_cryp_wait_busy(cryp); 1472 if (err) 1473 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n"); 1474 1475 /* i) run the he normal Final phase */ 1476 stm32_cryp_finish_req(cryp, err); 1477 } 1478 1479 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp) 1480 { 1481 if (unlikely(!cryp->total_in)) { 1482 dev_warn(cryp->dev, "No more data to process\n"); 1483 return; 1484 } 1485 1486 if (unlikely(cryp->total_in < AES_BLOCK_SIZE && 1487 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) && 1488 is_encrypt(cryp))) { 1489 /* Padding for AES GCM encryption */ 1490 if (cryp->caps->padding_wa) 1491 /* Special case 1 */ 1492 return stm32_cryp_irq_write_gcm_padded_data(cryp); 1493 1494 /* Setting padding bytes (NBBLB) */ 1495 stm32_cryp_irq_set_npblb(cryp); 1496 } 1497 1498 if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) && 1499 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) && 1500 is_decrypt(cryp))) { 1501 /* Padding for AES CCM decryption */ 1502 if (cryp->caps->padding_wa) 1503 /* Special case 2 */ 1504 return stm32_cryp_irq_write_ccm_padded_data(cryp); 1505 1506 /* Setting padding bytes (NBBLB) */ 1507 stm32_cryp_irq_set_npblb(cryp); 1508 } 1509 1510 if (is_aes(cryp) && is_ctr(cryp)) 1511 stm32_cryp_check_ctr_counter(cryp); 1512 1513 stm32_cryp_irq_write_block(cryp); 1514 } 1515 1516 static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp) 1517 { 1518 int err; 1519 unsigned int i, j; 1520 u32 cfg, *src; 1521 1522 src = sg_virt(cryp->in_sg) + _walked_in; 1523 1524 for (i = 0; i < AES_BLOCK_32; i++) { 1525 stm32_cryp_write(cryp, CRYP_DIN, *src); 1526 1527 src = stm32_cryp_next_in(cryp, src, sizeof(u32)); 1528 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in); 1529 1530 /* Check if whole header written */ 1531 if ((cryp->total_in_save - cryp->total_in) == 1532 cryp->areq->assoclen) { 1533 /* Write padding if needed */ 1534 for (j = i + 1; j < AES_BLOCK_32; j++) 1535 stm32_cryp_write(cryp, CRYP_DIN, 0); 1536 1537 /* Wait for completion */ 1538 err = stm32_cryp_wait_busy(cryp); 1539 if (err) { 1540 dev_err(cryp->dev, "Timeout (gcm header)\n"); 1541 return stm32_cryp_finish_req(cryp, err); 1542 } 1543 1544 if (stm32_cryp_get_input_text_len(cryp)) { 1545 /* Phase 3 : payload */ 1546 cfg = stm32_cryp_read(cryp, CRYP_CR); 1547 cfg &= ~CR_CRYPEN; 1548 stm32_cryp_write(cryp, CRYP_CR, cfg); 1549 1550 cfg &= ~CR_PH_MASK; 1551 cfg |= CR_PH_PAYLOAD; 1552 cfg |= CR_CRYPEN; 1553 stm32_cryp_write(cryp, CRYP_CR, cfg); 1554 } else { 1555 /* Phase 4 : tag */ 1556 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1557 stm32_cryp_finish_req(cryp, 0); 1558 } 1559 1560 break; 1561 } 1562 1563 if (!cryp->total_in) 1564 break; 1565 } 1566 } 1567 1568 static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp) 1569 { 1570 int err; 1571 unsigned int i = 0, j, k; 1572 u32 alen, cfg, *src; 1573 u8 d8[4]; 1574 1575 src = sg_virt(cryp->in_sg) + _walked_in; 1576 alen = cryp->areq->assoclen; 1577 1578 if (!_walked_in) { 1579 if (cryp->areq->assoclen <= 65280) { 1580 /* Write first u32 of B1 */ 1581 d8[0] = (alen >> 8) & 0xFF; 1582 d8[1] = alen & 0xFF; 1583 d8[2] = *((u8 *)src); 1584 src = stm32_cryp_next_in(cryp, src, 1); 1585 d8[3] = *((u8 *)src); 1586 src = stm32_cryp_next_in(cryp, src, 1); 1587 1588 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1589 i++; 1590 1591 cryp->total_in -= min_t(size_t, 2, cryp->total_in); 1592 } else { 1593 /* Build the two first u32 of B1 */ 1594 d8[0] = 0xFF; 1595 d8[1] = 0xFE; 1596 d8[2] = alen & 0xFF000000; 1597 d8[3] = alen & 0x00FF0000; 1598 1599 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1600 i++; 1601 1602 d8[0] = alen & 0x0000FF00; 1603 d8[1] = alen & 0x000000FF; 1604 d8[2] = *((u8 *)src); 1605 src = stm32_cryp_next_in(cryp, src, 1); 1606 d8[3] = *((u8 *)src); 1607 src = stm32_cryp_next_in(cryp, src, 1); 1608 1609 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1610 i++; 1611 1612 cryp->total_in -= min_t(size_t, 2, cryp->total_in); 1613 } 1614 } 1615 1616 /* Write next u32 */ 1617 for (; i < AES_BLOCK_32; i++) { 1618 /* Build an u32 */ 1619 memset(d8, 0, sizeof(u32)); 1620 for (k = 0; k < sizeof(u32); k++) { 1621 d8[k] = *((u8 *)src); 1622 src = stm32_cryp_next_in(cryp, src, 1); 1623 1624 cryp->total_in -= min_t(size_t, 1, cryp->total_in); 1625 if ((cryp->total_in_save - cryp->total_in) == alen) 1626 break; 1627 } 1628 1629 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8); 1630 1631 if ((cryp->total_in_save - cryp->total_in) == alen) { 1632 /* Write padding if needed */ 1633 for (j = i + 1; j < AES_BLOCK_32; j++) 1634 stm32_cryp_write(cryp, CRYP_DIN, 0); 1635 1636 /* Wait for completion */ 1637 err = stm32_cryp_wait_busy(cryp); 1638 if (err) { 1639 dev_err(cryp->dev, "Timeout (ccm header)\n"); 1640 return stm32_cryp_finish_req(cryp, err); 1641 } 1642 1643 if (stm32_cryp_get_input_text_len(cryp)) { 1644 /* Phase 3 : payload */ 1645 cfg = stm32_cryp_read(cryp, CRYP_CR); 1646 cfg &= ~CR_CRYPEN; 1647 stm32_cryp_write(cryp, CRYP_CR, cfg); 1648 1649 cfg &= ~CR_PH_MASK; 1650 cfg |= CR_PH_PAYLOAD; 1651 cfg |= CR_CRYPEN; 1652 stm32_cryp_write(cryp, CRYP_CR, cfg); 1653 } else { 1654 /* Phase 4 : tag */ 1655 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1656 stm32_cryp_finish_req(cryp, 0); 1657 } 1658 1659 break; 1660 } 1661 } 1662 } 1663 1664 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg) 1665 { 1666 struct stm32_cryp *cryp = arg; 1667 u32 ph; 1668 1669 if (cryp->irq_status & MISR_OUT) 1670 /* Output FIFO IRQ: read data */ 1671 if (unlikely(stm32_cryp_irq_read_data(cryp))) { 1672 /* All bytes processed, finish */ 1673 stm32_cryp_write(cryp, CRYP_IMSCR, 0); 1674 stm32_cryp_finish_req(cryp, 0); 1675 return IRQ_HANDLED; 1676 } 1677 1678 if (cryp->irq_status & MISR_IN) { 1679 if (is_gcm(cryp)) { 1680 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK; 1681 if (unlikely(ph == CR_PH_HEADER)) 1682 /* Write Header */ 1683 stm32_cryp_irq_write_gcm_header(cryp); 1684 else 1685 /* Input FIFO IRQ: write data */ 1686 stm32_cryp_irq_write_data(cryp); 1687 cryp->gcm_ctr++; 1688 } else if (is_ccm(cryp)) { 1689 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK; 1690 if (unlikely(ph == CR_PH_HEADER)) 1691 /* Write Header */ 1692 stm32_cryp_irq_write_ccm_header(cryp); 1693 else 1694 /* Input FIFO IRQ: write data */ 1695 stm32_cryp_irq_write_data(cryp); 1696 } else { 1697 /* Input FIFO IRQ: write data */ 1698 stm32_cryp_irq_write_data(cryp); 1699 } 1700 } 1701 1702 return IRQ_HANDLED; 1703 } 1704 1705 static irqreturn_t stm32_cryp_irq(int irq, void *arg) 1706 { 1707 struct stm32_cryp *cryp = arg; 1708 1709 cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR); 1710 1711 return IRQ_WAKE_THREAD; 1712 } 1713 1714 static struct crypto_alg crypto_algs[] = { 1715 { 1716 .cra_name = "ecb(aes)", 1717 .cra_driver_name = "stm32-ecb-aes", 1718 .cra_priority = 200, 1719 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1720 CRYPTO_ALG_ASYNC, 1721 .cra_blocksize = AES_BLOCK_SIZE, 1722 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1723 .cra_alignmask = 0xf, 1724 .cra_type = &crypto_ablkcipher_type, 1725 .cra_module = THIS_MODULE, 1726 .cra_init = stm32_cryp_cra_init, 1727 .cra_ablkcipher = { 1728 .min_keysize = AES_MIN_KEY_SIZE, 1729 .max_keysize = AES_MAX_KEY_SIZE, 1730 .setkey = stm32_cryp_aes_setkey, 1731 .encrypt = stm32_cryp_aes_ecb_encrypt, 1732 .decrypt = stm32_cryp_aes_ecb_decrypt, 1733 } 1734 }, 1735 { 1736 .cra_name = "cbc(aes)", 1737 .cra_driver_name = "stm32-cbc-aes", 1738 .cra_priority = 200, 1739 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1740 CRYPTO_ALG_ASYNC, 1741 .cra_blocksize = AES_BLOCK_SIZE, 1742 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1743 .cra_alignmask = 0xf, 1744 .cra_type = &crypto_ablkcipher_type, 1745 .cra_module = THIS_MODULE, 1746 .cra_init = stm32_cryp_cra_init, 1747 .cra_ablkcipher = { 1748 .min_keysize = AES_MIN_KEY_SIZE, 1749 .max_keysize = AES_MAX_KEY_SIZE, 1750 .ivsize = AES_BLOCK_SIZE, 1751 .setkey = stm32_cryp_aes_setkey, 1752 .encrypt = stm32_cryp_aes_cbc_encrypt, 1753 .decrypt = stm32_cryp_aes_cbc_decrypt, 1754 } 1755 }, 1756 { 1757 .cra_name = "ctr(aes)", 1758 .cra_driver_name = "stm32-ctr-aes", 1759 .cra_priority = 200, 1760 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1761 CRYPTO_ALG_ASYNC, 1762 .cra_blocksize = 1, 1763 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1764 .cra_alignmask = 0xf, 1765 .cra_type = &crypto_ablkcipher_type, 1766 .cra_module = THIS_MODULE, 1767 .cra_init = stm32_cryp_cra_init, 1768 .cra_ablkcipher = { 1769 .min_keysize = AES_MIN_KEY_SIZE, 1770 .max_keysize = AES_MAX_KEY_SIZE, 1771 .ivsize = AES_BLOCK_SIZE, 1772 .setkey = stm32_cryp_aes_setkey, 1773 .encrypt = stm32_cryp_aes_ctr_encrypt, 1774 .decrypt = stm32_cryp_aes_ctr_decrypt, 1775 } 1776 }, 1777 { 1778 .cra_name = "ecb(des)", 1779 .cra_driver_name = "stm32-ecb-des", 1780 .cra_priority = 200, 1781 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1782 CRYPTO_ALG_ASYNC, 1783 .cra_blocksize = DES_BLOCK_SIZE, 1784 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1785 .cra_alignmask = 0xf, 1786 .cra_type = &crypto_ablkcipher_type, 1787 .cra_module = THIS_MODULE, 1788 .cra_init = stm32_cryp_cra_init, 1789 .cra_ablkcipher = { 1790 .min_keysize = DES_BLOCK_SIZE, 1791 .max_keysize = DES_BLOCK_SIZE, 1792 .setkey = stm32_cryp_des_setkey, 1793 .encrypt = stm32_cryp_des_ecb_encrypt, 1794 .decrypt = stm32_cryp_des_ecb_decrypt, 1795 } 1796 }, 1797 { 1798 .cra_name = "cbc(des)", 1799 .cra_driver_name = "stm32-cbc-des", 1800 .cra_priority = 200, 1801 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1802 CRYPTO_ALG_ASYNC, 1803 .cra_blocksize = DES_BLOCK_SIZE, 1804 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1805 .cra_alignmask = 0xf, 1806 .cra_type = &crypto_ablkcipher_type, 1807 .cra_module = THIS_MODULE, 1808 .cra_init = stm32_cryp_cra_init, 1809 .cra_ablkcipher = { 1810 .min_keysize = DES_BLOCK_SIZE, 1811 .max_keysize = DES_BLOCK_SIZE, 1812 .ivsize = DES_BLOCK_SIZE, 1813 .setkey = stm32_cryp_des_setkey, 1814 .encrypt = stm32_cryp_des_cbc_encrypt, 1815 .decrypt = stm32_cryp_des_cbc_decrypt, 1816 } 1817 }, 1818 { 1819 .cra_name = "ecb(des3_ede)", 1820 .cra_driver_name = "stm32-ecb-des3", 1821 .cra_priority = 200, 1822 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1823 CRYPTO_ALG_ASYNC, 1824 .cra_blocksize = DES_BLOCK_SIZE, 1825 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1826 .cra_alignmask = 0xf, 1827 .cra_type = &crypto_ablkcipher_type, 1828 .cra_module = THIS_MODULE, 1829 .cra_init = stm32_cryp_cra_init, 1830 .cra_ablkcipher = { 1831 .min_keysize = 3 * DES_BLOCK_SIZE, 1832 .max_keysize = 3 * DES_BLOCK_SIZE, 1833 .setkey = stm32_cryp_tdes_setkey, 1834 .encrypt = stm32_cryp_tdes_ecb_encrypt, 1835 .decrypt = stm32_cryp_tdes_ecb_decrypt, 1836 } 1837 }, 1838 { 1839 .cra_name = "cbc(des3_ede)", 1840 .cra_driver_name = "stm32-cbc-des3", 1841 .cra_priority = 200, 1842 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | 1843 CRYPTO_ALG_ASYNC, 1844 .cra_blocksize = DES_BLOCK_SIZE, 1845 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1846 .cra_alignmask = 0xf, 1847 .cra_type = &crypto_ablkcipher_type, 1848 .cra_module = THIS_MODULE, 1849 .cra_init = stm32_cryp_cra_init, 1850 .cra_ablkcipher = { 1851 .min_keysize = 3 * DES_BLOCK_SIZE, 1852 .max_keysize = 3 * DES_BLOCK_SIZE, 1853 .ivsize = DES_BLOCK_SIZE, 1854 .setkey = stm32_cryp_tdes_setkey, 1855 .encrypt = stm32_cryp_tdes_cbc_encrypt, 1856 .decrypt = stm32_cryp_tdes_cbc_decrypt, 1857 } 1858 }, 1859 }; 1860 1861 static struct aead_alg aead_algs[] = { 1862 { 1863 .setkey = stm32_cryp_aes_aead_setkey, 1864 .setauthsize = stm32_cryp_aes_gcm_setauthsize, 1865 .encrypt = stm32_cryp_aes_gcm_encrypt, 1866 .decrypt = stm32_cryp_aes_gcm_decrypt, 1867 .init = stm32_cryp_aes_aead_init, 1868 .ivsize = 12, 1869 .maxauthsize = AES_BLOCK_SIZE, 1870 1871 .base = { 1872 .cra_name = "gcm(aes)", 1873 .cra_driver_name = "stm32-gcm-aes", 1874 .cra_priority = 200, 1875 .cra_flags = CRYPTO_ALG_ASYNC, 1876 .cra_blocksize = 1, 1877 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1878 .cra_alignmask = 0xf, 1879 .cra_module = THIS_MODULE, 1880 }, 1881 }, 1882 { 1883 .setkey = stm32_cryp_aes_aead_setkey, 1884 .setauthsize = stm32_cryp_aes_ccm_setauthsize, 1885 .encrypt = stm32_cryp_aes_ccm_encrypt, 1886 .decrypt = stm32_cryp_aes_ccm_decrypt, 1887 .init = stm32_cryp_aes_aead_init, 1888 .ivsize = AES_BLOCK_SIZE, 1889 .maxauthsize = AES_BLOCK_SIZE, 1890 1891 .base = { 1892 .cra_name = "ccm(aes)", 1893 .cra_driver_name = "stm32-ccm-aes", 1894 .cra_priority = 200, 1895 .cra_flags = CRYPTO_ALG_ASYNC, 1896 .cra_blocksize = 1, 1897 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1898 .cra_alignmask = 0xf, 1899 .cra_module = THIS_MODULE, 1900 }, 1901 }, 1902 }; 1903 1904 static const struct stm32_cryp_caps f7_data = { 1905 .swap_final = true, 1906 .padding_wa = true, 1907 }; 1908 1909 static const struct stm32_cryp_caps mp1_data = { 1910 .swap_final = false, 1911 .padding_wa = false, 1912 }; 1913 1914 static const struct of_device_id stm32_dt_ids[] = { 1915 { .compatible = "st,stm32f756-cryp", .data = &f7_data}, 1916 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data}, 1917 {}, 1918 }; 1919 MODULE_DEVICE_TABLE(of, stm32_dt_ids); 1920 1921 static int stm32_cryp_probe(struct platform_device *pdev) 1922 { 1923 struct device *dev = &pdev->dev; 1924 struct stm32_cryp *cryp; 1925 struct resource *res; 1926 struct reset_control *rst; 1927 int irq, ret; 1928 1929 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL); 1930 if (!cryp) 1931 return -ENOMEM; 1932 1933 cryp->caps = of_device_get_match_data(dev); 1934 if (!cryp->caps) 1935 return -ENODEV; 1936 1937 cryp->dev = dev; 1938 1939 mutex_init(&cryp->lock); 1940 1941 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1942 cryp->regs = devm_ioremap_resource(dev, res); 1943 if (IS_ERR(cryp->regs)) 1944 return PTR_ERR(cryp->regs); 1945 1946 irq = platform_get_irq(pdev, 0); 1947 if (irq < 0) { 1948 dev_err(dev, "Cannot get IRQ resource\n"); 1949 return irq; 1950 } 1951 1952 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq, 1953 stm32_cryp_irq_thread, IRQF_ONESHOT, 1954 dev_name(dev), cryp); 1955 if (ret) { 1956 dev_err(dev, "Cannot grab IRQ\n"); 1957 return ret; 1958 } 1959 1960 cryp->clk = devm_clk_get(dev, NULL); 1961 if (IS_ERR(cryp->clk)) { 1962 dev_err(dev, "Could not get clock\n"); 1963 return PTR_ERR(cryp->clk); 1964 } 1965 1966 ret = clk_prepare_enable(cryp->clk); 1967 if (ret) { 1968 dev_err(cryp->dev, "Failed to enable clock\n"); 1969 return ret; 1970 } 1971 1972 rst = devm_reset_control_get(dev, NULL); 1973 if (!IS_ERR(rst)) { 1974 reset_control_assert(rst); 1975 udelay(2); 1976 reset_control_deassert(rst); 1977 } 1978 1979 platform_set_drvdata(pdev, cryp); 1980 1981 spin_lock(&cryp_list.lock); 1982 list_add(&cryp->list, &cryp_list.dev_list); 1983 spin_unlock(&cryp_list.lock); 1984 1985 /* Initialize crypto engine */ 1986 cryp->engine = crypto_engine_alloc_init(dev, 1); 1987 if (!cryp->engine) { 1988 dev_err(dev, "Could not init crypto engine\n"); 1989 ret = -ENOMEM; 1990 goto err_engine1; 1991 } 1992 1993 ret = crypto_engine_start(cryp->engine); 1994 if (ret) { 1995 dev_err(dev, "Could not start crypto engine\n"); 1996 goto err_engine2; 1997 } 1998 1999 ret = crypto_register_algs(crypto_algs, ARRAY_SIZE(crypto_algs)); 2000 if (ret) { 2001 dev_err(dev, "Could not register algs\n"); 2002 goto err_algs; 2003 } 2004 2005 ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2006 if (ret) 2007 goto err_aead_algs; 2008 2009 dev_info(dev, "Initialized\n"); 2010 2011 return 0; 2012 2013 err_aead_algs: 2014 crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs)); 2015 err_algs: 2016 err_engine2: 2017 crypto_engine_exit(cryp->engine); 2018 err_engine1: 2019 spin_lock(&cryp_list.lock); 2020 list_del(&cryp->list); 2021 spin_unlock(&cryp_list.lock); 2022 2023 clk_disable_unprepare(cryp->clk); 2024 2025 return ret; 2026 } 2027 2028 static int stm32_cryp_remove(struct platform_device *pdev) 2029 { 2030 struct stm32_cryp *cryp = platform_get_drvdata(pdev); 2031 2032 if (!cryp) 2033 return -ENODEV; 2034 2035 crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2036 crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs)); 2037 2038 crypto_engine_exit(cryp->engine); 2039 2040 spin_lock(&cryp_list.lock); 2041 list_del(&cryp->list); 2042 spin_unlock(&cryp_list.lock); 2043 2044 clk_disable_unprepare(cryp->clk); 2045 2046 return 0; 2047 } 2048 2049 static struct platform_driver stm32_cryp_driver = { 2050 .probe = stm32_cryp_probe, 2051 .remove = stm32_cryp_remove, 2052 .driver = { 2053 .name = DRIVER_NAME, 2054 .of_match_table = stm32_dt_ids, 2055 }, 2056 }; 2057 2058 module_platform_driver(stm32_cryp_driver); 2059 2060 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>"); 2061 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver"); 2062 MODULE_LICENSE("GPL"); 2063