1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) STMicroelectronics SA 2017 4 * Author: Fabien Dessenne <fabien.dessenne@st.com> 5 * Ux500 support taken from snippets in the old Ux500 cryp driver 6 */ 7 8 #include <linux/clk.h> 9 #include <linux/delay.h> 10 #include <linux/interrupt.h> 11 #include <linux/iopoll.h> 12 #include <linux/module.h> 13 #include <linux/of_device.h> 14 #include <linux/platform_device.h> 15 #include <linux/pm_runtime.h> 16 #include <linux/reset.h> 17 18 #include <crypto/aes.h> 19 #include <crypto/internal/des.h> 20 #include <crypto/engine.h> 21 #include <crypto/scatterwalk.h> 22 #include <crypto/internal/aead.h> 23 #include <crypto/internal/skcipher.h> 24 25 #define DRIVER_NAME "stm32-cryp" 26 27 /* Bit [0] encrypt / decrypt */ 28 #define FLG_ENCRYPT BIT(0) 29 /* Bit [8..1] algo & operation mode */ 30 #define FLG_AES BIT(1) 31 #define FLG_DES BIT(2) 32 #define FLG_TDES BIT(3) 33 #define FLG_ECB BIT(4) 34 #define FLG_CBC BIT(5) 35 #define FLG_CTR BIT(6) 36 #define FLG_GCM BIT(7) 37 #define FLG_CCM BIT(8) 38 /* Mode mask = bits [15..0] */ 39 #define FLG_MODE_MASK GENMASK(15, 0) 40 /* Bit [31..16] status */ 41 42 /* Registers */ 43 #define CRYP_CR 0x00000000 44 #define CRYP_SR 0x00000004 45 #define CRYP_DIN 0x00000008 46 #define CRYP_DOUT 0x0000000C 47 #define CRYP_DMACR 0x00000010 48 #define CRYP_IMSCR 0x00000014 49 #define CRYP_RISR 0x00000018 50 #define CRYP_MISR 0x0000001C 51 #define CRYP_K0LR 0x00000020 52 #define CRYP_K0RR 0x00000024 53 #define CRYP_K1LR 0x00000028 54 #define CRYP_K1RR 0x0000002C 55 #define CRYP_K2LR 0x00000030 56 #define CRYP_K2RR 0x00000034 57 #define CRYP_K3LR 0x00000038 58 #define CRYP_K3RR 0x0000003C 59 #define CRYP_IV0LR 0x00000040 60 #define CRYP_IV0RR 0x00000044 61 #define CRYP_IV1LR 0x00000048 62 #define CRYP_IV1RR 0x0000004C 63 #define CRYP_CSGCMCCM0R 0x00000050 64 #define CRYP_CSGCM0R 0x00000070 65 66 #define UX500_CRYP_CR 0x00000000 67 #define UX500_CRYP_SR 0x00000004 68 #define UX500_CRYP_DIN 0x00000008 69 #define UX500_CRYP_DINSIZE 0x0000000C 70 #define UX500_CRYP_DOUT 0x00000010 71 #define UX500_CRYP_DOUSIZE 0x00000014 72 #define UX500_CRYP_DMACR 0x00000018 73 #define UX500_CRYP_IMSC 0x0000001C 74 #define UX500_CRYP_RIS 0x00000020 75 #define UX500_CRYP_MIS 0x00000024 76 #define UX500_CRYP_K1L 0x00000028 77 #define UX500_CRYP_K1R 0x0000002C 78 #define UX500_CRYP_K2L 0x00000030 79 #define UX500_CRYP_K2R 0x00000034 80 #define UX500_CRYP_K3L 0x00000038 81 #define UX500_CRYP_K3R 0x0000003C 82 #define UX500_CRYP_K4L 0x00000040 83 #define UX500_CRYP_K4R 0x00000044 84 #define UX500_CRYP_IV0L 0x00000048 85 #define UX500_CRYP_IV0R 0x0000004C 86 #define UX500_CRYP_IV1L 0x00000050 87 #define UX500_CRYP_IV1R 0x00000054 88 89 /* Registers values */ 90 #define CR_DEC_NOT_ENC 0x00000004 91 #define CR_TDES_ECB 0x00000000 92 #define CR_TDES_CBC 0x00000008 93 #define CR_DES_ECB 0x00000010 94 #define CR_DES_CBC 0x00000018 95 #define CR_AES_ECB 0x00000020 96 #define CR_AES_CBC 0x00000028 97 #define CR_AES_CTR 0x00000030 98 #define CR_AES_KP 0x00000038 /* Not on Ux500 */ 99 #define CR_AES_XTS 0x00000038 /* Only on Ux500 */ 100 #define CR_AES_GCM 0x00080000 101 #define CR_AES_CCM 0x00080008 102 #define CR_AES_UNKNOWN 0xFFFFFFFF 103 #define CR_ALGO_MASK 0x00080038 104 #define CR_DATA32 0x00000000 105 #define CR_DATA16 0x00000040 106 #define CR_DATA8 0x00000080 107 #define CR_DATA1 0x000000C0 108 #define CR_KEY128 0x00000000 109 #define CR_KEY192 0x00000100 110 #define CR_KEY256 0x00000200 111 #define CR_KEYRDEN 0x00000400 /* Only on Ux500 */ 112 #define CR_KSE 0x00000800 /* Only on Ux500 */ 113 #define CR_FFLUSH 0x00004000 114 #define CR_CRYPEN 0x00008000 115 #define CR_PH_INIT 0x00000000 116 #define CR_PH_HEADER 0x00010000 117 #define CR_PH_PAYLOAD 0x00020000 118 #define CR_PH_FINAL 0x00030000 119 #define CR_PH_MASK 0x00030000 120 #define CR_NBPBL_SHIFT 20 121 122 #define SR_BUSY 0x00000010 123 #define SR_OFNE 0x00000004 124 125 #define IMSCR_IN BIT(0) 126 #define IMSCR_OUT BIT(1) 127 128 #define MISR_IN BIT(0) 129 #define MISR_OUT BIT(1) 130 131 /* Misc */ 132 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32)) 133 #define GCM_CTR_INIT 2 134 #define CRYP_AUTOSUSPEND_DELAY 50 135 136 struct stm32_cryp_caps { 137 bool aeads_support; 138 bool linear_aes_key; 139 bool kp_mode; 140 bool iv_protection; 141 bool swap_final; 142 bool padding_wa; 143 u32 cr; 144 u32 sr; 145 u32 din; 146 u32 dout; 147 u32 imsc; 148 u32 mis; 149 u32 k1l; 150 u32 k1r; 151 u32 k3r; 152 u32 iv0l; 153 u32 iv0r; 154 u32 iv1l; 155 u32 iv1r; 156 }; 157 158 struct stm32_cryp_ctx { 159 struct crypto_engine_ctx enginectx; 160 struct stm32_cryp *cryp; 161 int keylen; 162 __be32 key[AES_KEYSIZE_256 / sizeof(u32)]; 163 unsigned long flags; 164 }; 165 166 struct stm32_cryp_reqctx { 167 unsigned long mode; 168 }; 169 170 struct stm32_cryp { 171 struct list_head list; 172 struct device *dev; 173 void __iomem *regs; 174 struct clk *clk; 175 unsigned long flags; 176 u32 irq_status; 177 const struct stm32_cryp_caps *caps; 178 struct stm32_cryp_ctx *ctx; 179 180 struct crypto_engine *engine; 181 182 struct skcipher_request *req; 183 struct aead_request *areq; 184 185 size_t authsize; 186 size_t hw_blocksize; 187 188 size_t payload_in; 189 size_t header_in; 190 size_t payload_out; 191 192 struct scatterlist *out_sg; 193 194 struct scatter_walk in_walk; 195 struct scatter_walk out_walk; 196 197 __be32 last_ctr[4]; 198 u32 gcm_ctr; 199 }; 200 201 struct stm32_cryp_list { 202 struct list_head dev_list; 203 spinlock_t lock; /* protect dev_list */ 204 }; 205 206 static struct stm32_cryp_list cryp_list = { 207 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list), 208 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock), 209 }; 210 211 static inline bool is_aes(struct stm32_cryp *cryp) 212 { 213 return cryp->flags & FLG_AES; 214 } 215 216 static inline bool is_des(struct stm32_cryp *cryp) 217 { 218 return cryp->flags & FLG_DES; 219 } 220 221 static inline bool is_tdes(struct stm32_cryp *cryp) 222 { 223 return cryp->flags & FLG_TDES; 224 } 225 226 static inline bool is_ecb(struct stm32_cryp *cryp) 227 { 228 return cryp->flags & FLG_ECB; 229 } 230 231 static inline bool is_cbc(struct stm32_cryp *cryp) 232 { 233 return cryp->flags & FLG_CBC; 234 } 235 236 static inline bool is_ctr(struct stm32_cryp *cryp) 237 { 238 return cryp->flags & FLG_CTR; 239 } 240 241 static inline bool is_gcm(struct stm32_cryp *cryp) 242 { 243 return cryp->flags & FLG_GCM; 244 } 245 246 static inline bool is_ccm(struct stm32_cryp *cryp) 247 { 248 return cryp->flags & FLG_CCM; 249 } 250 251 static inline bool is_encrypt(struct stm32_cryp *cryp) 252 { 253 return cryp->flags & FLG_ENCRYPT; 254 } 255 256 static inline bool is_decrypt(struct stm32_cryp *cryp) 257 { 258 return !is_encrypt(cryp); 259 } 260 261 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst) 262 { 263 return readl_relaxed(cryp->regs + ofst); 264 } 265 266 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val) 267 { 268 writel_relaxed(val, cryp->regs + ofst); 269 } 270 271 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp) 272 { 273 u32 status; 274 275 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status, 276 !(status & SR_BUSY), 10, 100000); 277 } 278 279 static inline void stm32_cryp_enable(struct stm32_cryp *cryp) 280 { 281 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN, 282 cryp->regs + cryp->caps->cr); 283 } 284 285 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp) 286 { 287 u32 status; 288 289 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status, 290 !(status & CR_CRYPEN), 10, 100000); 291 } 292 293 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp) 294 { 295 u32 status; 296 297 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status, 298 status & SR_OFNE, 10, 100000); 299 } 300 301 static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp) 302 { 303 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN, 304 cryp->regs + cryp->caps->cr); 305 } 306 307 static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp) 308 { 309 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN, 310 cryp->regs + cryp->caps->cr); 311 } 312 313 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp); 314 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err); 315 316 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx) 317 { 318 struct stm32_cryp *tmp, *cryp = NULL; 319 320 spin_lock_bh(&cryp_list.lock); 321 if (!ctx->cryp) { 322 list_for_each_entry(tmp, &cryp_list.dev_list, list) { 323 cryp = tmp; 324 break; 325 } 326 ctx->cryp = cryp; 327 } else { 328 cryp = ctx->cryp; 329 } 330 331 spin_unlock_bh(&cryp_list.lock); 332 333 return cryp; 334 } 335 336 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv) 337 { 338 if (!iv) 339 return; 340 341 stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++)); 342 stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++)); 343 344 if (is_aes(cryp)) { 345 stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++)); 346 stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++)); 347 } 348 } 349 350 static void stm32_cryp_get_iv(struct stm32_cryp *cryp) 351 { 352 struct skcipher_request *req = cryp->req; 353 __be32 *tmp = (void *)req->iv; 354 355 if (!tmp) 356 return; 357 358 if (cryp->caps->iv_protection) 359 stm32_cryp_key_read_enable(cryp); 360 361 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l)); 362 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r)); 363 364 if (is_aes(cryp)) { 365 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l)); 366 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r)); 367 } 368 369 if (cryp->caps->iv_protection) 370 stm32_cryp_key_read_disable(cryp); 371 } 372 373 /** 374 * ux500_swap_bits_in_byte() - mirror the bits in a byte 375 * @b: the byte to be mirrored 376 * 377 * The bits are swapped the following way: 378 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and 379 * nibble 2 (n2) bits 4-7. 380 * 381 * Nibble 1 (n1): 382 * (The "old" (moved) bit is replaced with a zero) 383 * 1. Move bit 6 and 7, 4 positions to the left. 384 * 2. Move bit 3 and 5, 2 positions to the left. 385 * 3. Move bit 1-4, 1 position to the left. 386 * 387 * Nibble 2 (n2): 388 * 1. Move bit 0 and 1, 4 positions to the right. 389 * 2. Move bit 2 and 4, 2 positions to the right. 390 * 3. Move bit 3-6, 1 position to the right. 391 * 392 * Combine the two nibbles to a complete and swapped byte. 393 */ 394 static inline u8 ux500_swap_bits_in_byte(u8 b) 395 { 396 #define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */ 397 #define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5, 398 right shift 2 */ 399 #define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4, 400 right shift 1 */ 401 #define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */ 402 #define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4, 403 left shift 2 */ 404 #define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6, 405 left shift 1 */ 406 407 u8 n1; 408 u8 n2; 409 410 /* Swap most significant nibble */ 411 /* Right shift 4, bits 6 and 7 */ 412 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4)); 413 /* Right shift 2, bits 3 and 5 */ 414 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2)); 415 /* Right shift 1, bits 1-4 */ 416 n1 = (n1 & R_SHIFT_1_MASK) >> 1; 417 418 /* Swap least significant nibble */ 419 /* Left shift 4, bits 0 and 1 */ 420 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4)); 421 /* Left shift 2, bits 2 and 4 */ 422 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2)); 423 /* Left shift 1, bits 3-6 */ 424 n2 = (n2 & L_SHIFT_1_MASK) << 1; 425 426 return n1 | n2; 427 } 428 429 /** 430 * ux500_swizzle_key() - Shuffle around words and bits in the AES key 431 * @in: key to swizzle 432 * @out: swizzled key 433 * @len: length of key, in bytes 434 * 435 * This "key swizzling procedure" is described in the examples in the 436 * DB8500 design specification. There is no real description of why 437 * the bits have been arranged like this in the hardware. 438 */ 439 static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len) 440 { 441 int i = 0; 442 int bpw = sizeof(u32); 443 int j; 444 int index = 0; 445 446 j = len - bpw; 447 while (j >= 0) { 448 for (i = 0; i < bpw; i++) { 449 index = len - j - bpw + i; 450 out[j + i] = 451 ux500_swap_bits_in_byte(in[index]); 452 } 453 j -= bpw; 454 } 455 } 456 457 static void stm32_cryp_hw_write_key(struct stm32_cryp *c) 458 { 459 unsigned int i; 460 int r_id; 461 462 if (is_des(c)) { 463 stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0])); 464 stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1])); 465 return; 466 } 467 468 /* 469 * On the Ux500 the AES key is considered as a single bit sequence 470 * of 128, 192 or 256 bits length. It is written linearly into the 471 * registers from K1L and down, and need to be processed to become 472 * a proper big-endian bit sequence. 473 */ 474 if (is_aes(c) && c->caps->linear_aes_key) { 475 u32 tmpkey[8]; 476 477 ux500_swizzle_key((u8 *)c->ctx->key, 478 (u8 *)tmpkey, c->ctx->keylen); 479 480 r_id = c->caps->k1l; 481 for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4) 482 stm32_cryp_write(c, r_id, tmpkey[i]); 483 484 return; 485 } 486 487 r_id = c->caps->k3r; 488 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4) 489 stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1])); 490 } 491 492 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp) 493 { 494 if (is_aes(cryp) && is_ecb(cryp)) 495 return CR_AES_ECB; 496 497 if (is_aes(cryp) && is_cbc(cryp)) 498 return CR_AES_CBC; 499 500 if (is_aes(cryp) && is_ctr(cryp)) 501 return CR_AES_CTR; 502 503 if (is_aes(cryp) && is_gcm(cryp)) 504 return CR_AES_GCM; 505 506 if (is_aes(cryp) && is_ccm(cryp)) 507 return CR_AES_CCM; 508 509 if (is_des(cryp) && is_ecb(cryp)) 510 return CR_DES_ECB; 511 512 if (is_des(cryp) && is_cbc(cryp)) 513 return CR_DES_CBC; 514 515 if (is_tdes(cryp) && is_ecb(cryp)) 516 return CR_TDES_ECB; 517 518 if (is_tdes(cryp) && is_cbc(cryp)) 519 return CR_TDES_CBC; 520 521 dev_err(cryp->dev, "Unknown mode\n"); 522 return CR_AES_UNKNOWN; 523 } 524 525 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp) 526 { 527 return is_encrypt(cryp) ? cryp->areq->cryptlen : 528 cryp->areq->cryptlen - cryp->authsize; 529 } 530 531 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg) 532 { 533 int ret; 534 __be32 iv[4]; 535 536 /* Phase 1 : init */ 537 memcpy(iv, cryp->areq->iv, 12); 538 iv[3] = cpu_to_be32(GCM_CTR_INIT); 539 cryp->gcm_ctr = GCM_CTR_INIT; 540 stm32_cryp_hw_write_iv(cryp, iv); 541 542 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN); 543 544 /* Wait for end of processing */ 545 ret = stm32_cryp_wait_enable(cryp); 546 if (ret) { 547 dev_err(cryp->dev, "Timeout (gcm init)\n"); 548 return ret; 549 } 550 551 /* Prepare next phase */ 552 if (cryp->areq->assoclen) { 553 cfg |= CR_PH_HEADER; 554 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 555 } else if (stm32_cryp_get_input_text_len(cryp)) { 556 cfg |= CR_PH_PAYLOAD; 557 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 558 } 559 560 return 0; 561 } 562 563 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp) 564 { 565 u32 cfg; 566 int err; 567 568 /* Check if whole header written */ 569 if (!cryp->header_in) { 570 /* Wait for completion */ 571 err = stm32_cryp_wait_busy(cryp); 572 if (err) { 573 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n"); 574 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 575 stm32_cryp_finish_req(cryp, err); 576 return; 577 } 578 579 if (stm32_cryp_get_input_text_len(cryp)) { 580 /* Phase 3 : payload */ 581 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 582 cfg &= ~CR_CRYPEN; 583 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 584 585 cfg &= ~CR_PH_MASK; 586 cfg |= CR_PH_PAYLOAD | CR_CRYPEN; 587 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 588 } else { 589 /* 590 * Phase 4 : tag. 591 * Nothing to read, nothing to write, caller have to 592 * end request 593 */ 594 } 595 } 596 } 597 598 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp) 599 { 600 size_t written; 601 size_t len; 602 u32 alen = cryp->areq->assoclen; 603 u32 block[AES_BLOCK_32] = {0}; 604 u8 *b8 = (u8 *)block; 605 606 if (alen <= 65280) { 607 /* Write first u32 of B1 */ 608 b8[0] = (alen >> 8) & 0xFF; 609 b8[1] = alen & 0xFF; 610 len = 2; 611 } else { 612 /* Build the two first u32 of B1 */ 613 b8[0] = 0xFF; 614 b8[1] = 0xFE; 615 b8[2] = (alen & 0xFF000000) >> 24; 616 b8[3] = (alen & 0x00FF0000) >> 16; 617 b8[4] = (alen & 0x0000FF00) >> 8; 618 b8[5] = alen & 0x000000FF; 619 len = 6; 620 } 621 622 written = min_t(size_t, AES_BLOCK_SIZE - len, alen); 623 624 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0); 625 626 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); 627 628 cryp->header_in -= written; 629 630 stm32_crypt_gcmccm_end_header(cryp); 631 } 632 633 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg) 634 { 635 int ret; 636 u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32]; 637 u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32; 638 __be32 *bd; 639 u32 *d; 640 unsigned int i, textlen; 641 642 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */ 643 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 644 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 645 iv[AES_BLOCK_SIZE - 1] = 1; 646 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv); 647 648 /* Build B0 */ 649 memcpy(b0, iv, AES_BLOCK_SIZE); 650 651 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); 652 653 if (cryp->areq->assoclen) 654 b0[0] |= 0x40; 655 656 textlen = stm32_cryp_get_input_text_len(cryp); 657 658 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; 659 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; 660 661 /* Enable HW */ 662 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN); 663 664 /* Write B0 */ 665 d = (u32 *)b0; 666 bd = (__be32 *)b0; 667 668 for (i = 0; i < AES_BLOCK_32; i++) { 669 u32 xd = d[i]; 670 671 if (!cryp->caps->padding_wa) 672 xd = be32_to_cpu(bd[i]); 673 stm32_cryp_write(cryp, cryp->caps->din, xd); 674 } 675 676 /* Wait for end of processing */ 677 ret = stm32_cryp_wait_enable(cryp); 678 if (ret) { 679 dev_err(cryp->dev, "Timeout (ccm init)\n"); 680 return ret; 681 } 682 683 /* Prepare next phase */ 684 if (cryp->areq->assoclen) { 685 cfg |= CR_PH_HEADER | CR_CRYPEN; 686 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 687 688 /* Write first (special) block (may move to next phase [payload]) */ 689 stm32_cryp_write_ccm_first_header(cryp); 690 } else if (stm32_cryp_get_input_text_len(cryp)) { 691 cfg |= CR_PH_PAYLOAD; 692 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 693 } 694 695 return 0; 696 } 697 698 static int stm32_cryp_hw_init(struct stm32_cryp *cryp) 699 { 700 int ret; 701 u32 cfg, hw_mode; 702 703 pm_runtime_get_sync(cryp->dev); 704 705 /* Disable interrupt */ 706 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 707 708 /* Set configuration */ 709 cfg = CR_DATA8 | CR_FFLUSH; 710 711 switch (cryp->ctx->keylen) { 712 case AES_KEYSIZE_128: 713 cfg |= CR_KEY128; 714 break; 715 716 case AES_KEYSIZE_192: 717 cfg |= CR_KEY192; 718 break; 719 720 default: 721 case AES_KEYSIZE_256: 722 cfg |= CR_KEY256; 723 break; 724 } 725 726 hw_mode = stm32_cryp_get_hw_mode(cryp); 727 if (hw_mode == CR_AES_UNKNOWN) 728 return -EINVAL; 729 730 /* AES ECB/CBC decrypt: run key preparation first */ 731 if (is_decrypt(cryp) && 732 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) { 733 /* Configure in key preparation mode */ 734 if (cryp->caps->kp_mode) 735 stm32_cryp_write(cryp, cryp->caps->cr, 736 cfg | CR_AES_KP); 737 else 738 stm32_cryp_write(cryp, 739 cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE); 740 741 /* Set key only after full configuration done */ 742 stm32_cryp_hw_write_key(cryp); 743 744 /* Start prepare key */ 745 stm32_cryp_enable(cryp); 746 /* Wait for end of processing */ 747 ret = stm32_cryp_wait_busy(cryp); 748 if (ret) { 749 dev_err(cryp->dev, "Timeout (key preparation)\n"); 750 return ret; 751 } 752 753 cfg |= hw_mode | CR_DEC_NOT_ENC; 754 755 /* Apply updated config (Decrypt + algo) and flush */ 756 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 757 } else { 758 cfg |= hw_mode; 759 if (is_decrypt(cryp)) 760 cfg |= CR_DEC_NOT_ENC; 761 762 /* Apply config and flush */ 763 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 764 765 /* Set key only after configuration done */ 766 stm32_cryp_hw_write_key(cryp); 767 } 768 769 switch (hw_mode) { 770 case CR_AES_GCM: 771 case CR_AES_CCM: 772 /* Phase 1 : init */ 773 if (hw_mode == CR_AES_CCM) 774 ret = stm32_cryp_ccm_init(cryp, cfg); 775 else 776 ret = stm32_cryp_gcm_init(cryp, cfg); 777 778 if (ret) 779 return ret; 780 781 break; 782 783 case CR_DES_CBC: 784 case CR_TDES_CBC: 785 case CR_AES_CBC: 786 case CR_AES_CTR: 787 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv); 788 break; 789 790 default: 791 break; 792 } 793 794 /* Enable now */ 795 stm32_cryp_enable(cryp); 796 797 return 0; 798 } 799 800 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err) 801 { 802 if (!err && (is_gcm(cryp) || is_ccm(cryp))) 803 /* Phase 4 : output tag */ 804 err = stm32_cryp_read_auth_tag(cryp); 805 806 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp)))) 807 stm32_cryp_get_iv(cryp); 808 809 pm_runtime_mark_last_busy(cryp->dev); 810 pm_runtime_put_autosuspend(cryp->dev); 811 812 if (is_gcm(cryp) || is_ccm(cryp)) 813 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); 814 else 815 crypto_finalize_skcipher_request(cryp->engine, cryp->req, 816 err); 817 } 818 819 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp) 820 { 821 /* Enable interrupt and let the IRQ handler do everything */ 822 stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT); 823 824 return 0; 825 } 826 827 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq); 828 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine, 829 void *areq); 830 831 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm) 832 { 833 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 834 835 crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx)); 836 837 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req; 838 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req; 839 ctx->enginectx.op.unprepare_request = NULL; 840 return 0; 841 } 842 843 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq); 844 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, 845 void *areq); 846 847 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm) 848 { 849 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 850 851 tfm->reqsize = sizeof(struct stm32_cryp_reqctx); 852 853 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req; 854 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req; 855 ctx->enginectx.op.unprepare_request = NULL; 856 857 return 0; 858 } 859 860 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode) 861 { 862 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx( 863 crypto_skcipher_reqtfm(req)); 864 struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req); 865 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 866 867 if (!cryp) 868 return -ENODEV; 869 870 rctx->mode = mode; 871 872 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req); 873 } 874 875 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode) 876 { 877 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 878 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req); 879 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 880 881 if (!cryp) 882 return -ENODEV; 883 884 rctx->mode = mode; 885 886 return crypto_transfer_aead_request_to_engine(cryp->engine, req); 887 } 888 889 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key, 890 unsigned int keylen) 891 { 892 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 893 894 memcpy(ctx->key, key, keylen); 895 ctx->keylen = keylen; 896 897 return 0; 898 } 899 900 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, 901 unsigned int keylen) 902 { 903 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 904 keylen != AES_KEYSIZE_256) 905 return -EINVAL; 906 else 907 return stm32_cryp_setkey(tfm, key, keylen); 908 } 909 910 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key, 911 unsigned int keylen) 912 { 913 return verify_skcipher_des_key(tfm, key) ?: 914 stm32_cryp_setkey(tfm, key, keylen); 915 } 916 917 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key, 918 unsigned int keylen) 919 { 920 return verify_skcipher_des3_key(tfm, key) ?: 921 stm32_cryp_setkey(tfm, key, keylen); 922 } 923 924 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key, 925 unsigned int keylen) 926 { 927 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 928 929 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 930 keylen != AES_KEYSIZE_256) 931 return -EINVAL; 932 933 memcpy(ctx->key, key, keylen); 934 ctx->keylen = keylen; 935 936 return 0; 937 } 938 939 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm, 940 unsigned int authsize) 941 { 942 switch (authsize) { 943 case 4: 944 case 8: 945 case 12: 946 case 13: 947 case 14: 948 case 15: 949 case 16: 950 break; 951 default: 952 return -EINVAL; 953 } 954 955 return 0; 956 } 957 958 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm, 959 unsigned int authsize) 960 { 961 switch (authsize) { 962 case 4: 963 case 6: 964 case 8: 965 case 10: 966 case 12: 967 case 14: 968 case 16: 969 break; 970 default: 971 return -EINVAL; 972 } 973 974 return 0; 975 } 976 977 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req) 978 { 979 if (req->cryptlen % AES_BLOCK_SIZE) 980 return -EINVAL; 981 982 if (req->cryptlen == 0) 983 return 0; 984 985 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT); 986 } 987 988 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req) 989 { 990 if (req->cryptlen % AES_BLOCK_SIZE) 991 return -EINVAL; 992 993 if (req->cryptlen == 0) 994 return 0; 995 996 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB); 997 } 998 999 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req) 1000 { 1001 if (req->cryptlen % AES_BLOCK_SIZE) 1002 return -EINVAL; 1003 1004 if (req->cryptlen == 0) 1005 return 0; 1006 1007 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT); 1008 } 1009 1010 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req) 1011 { 1012 if (req->cryptlen % AES_BLOCK_SIZE) 1013 return -EINVAL; 1014 1015 if (req->cryptlen == 0) 1016 return 0; 1017 1018 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC); 1019 } 1020 1021 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req) 1022 { 1023 if (req->cryptlen == 0) 1024 return 0; 1025 1026 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT); 1027 } 1028 1029 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req) 1030 { 1031 if (req->cryptlen == 0) 1032 return 0; 1033 1034 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR); 1035 } 1036 1037 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req) 1038 { 1039 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT); 1040 } 1041 1042 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req) 1043 { 1044 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM); 1045 } 1046 1047 static inline int crypto_ccm_check_iv(const u8 *iv) 1048 { 1049 /* 2 <= L <= 8, so 1 <= L' <= 7. */ 1050 if (iv[0] < 1 || iv[0] > 7) 1051 return -EINVAL; 1052 1053 return 0; 1054 } 1055 1056 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req) 1057 { 1058 int err; 1059 1060 err = crypto_ccm_check_iv(req->iv); 1061 if (err) 1062 return err; 1063 1064 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT); 1065 } 1066 1067 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req) 1068 { 1069 int err; 1070 1071 err = crypto_ccm_check_iv(req->iv); 1072 if (err) 1073 return err; 1074 1075 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM); 1076 } 1077 1078 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req) 1079 { 1080 if (req->cryptlen % DES_BLOCK_SIZE) 1081 return -EINVAL; 1082 1083 if (req->cryptlen == 0) 1084 return 0; 1085 1086 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT); 1087 } 1088 1089 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req) 1090 { 1091 if (req->cryptlen % DES_BLOCK_SIZE) 1092 return -EINVAL; 1093 1094 if (req->cryptlen == 0) 1095 return 0; 1096 1097 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB); 1098 } 1099 1100 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req) 1101 { 1102 if (req->cryptlen % DES_BLOCK_SIZE) 1103 return -EINVAL; 1104 1105 if (req->cryptlen == 0) 1106 return 0; 1107 1108 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT); 1109 } 1110 1111 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req) 1112 { 1113 if (req->cryptlen % DES_BLOCK_SIZE) 1114 return -EINVAL; 1115 1116 if (req->cryptlen == 0) 1117 return 0; 1118 1119 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC); 1120 } 1121 1122 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req) 1123 { 1124 if (req->cryptlen % DES_BLOCK_SIZE) 1125 return -EINVAL; 1126 1127 if (req->cryptlen == 0) 1128 return 0; 1129 1130 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT); 1131 } 1132 1133 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req) 1134 { 1135 if (req->cryptlen % DES_BLOCK_SIZE) 1136 return -EINVAL; 1137 1138 if (req->cryptlen == 0) 1139 return 0; 1140 1141 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB); 1142 } 1143 1144 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req) 1145 { 1146 if (req->cryptlen % DES_BLOCK_SIZE) 1147 return -EINVAL; 1148 1149 if (req->cryptlen == 0) 1150 return 0; 1151 1152 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT); 1153 } 1154 1155 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req) 1156 { 1157 if (req->cryptlen % DES_BLOCK_SIZE) 1158 return -EINVAL; 1159 1160 if (req->cryptlen == 0) 1161 return 0; 1162 1163 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC); 1164 } 1165 1166 static int stm32_cryp_prepare_req(struct skcipher_request *req, 1167 struct aead_request *areq) 1168 { 1169 struct stm32_cryp_ctx *ctx; 1170 struct stm32_cryp *cryp; 1171 struct stm32_cryp_reqctx *rctx; 1172 struct scatterlist *in_sg; 1173 int ret; 1174 1175 if (!req && !areq) 1176 return -EINVAL; 1177 1178 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) : 1179 crypto_aead_ctx(crypto_aead_reqtfm(areq)); 1180 1181 cryp = ctx->cryp; 1182 1183 if (!cryp) 1184 return -ENODEV; 1185 1186 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq); 1187 rctx->mode &= FLG_MODE_MASK; 1188 1189 ctx->cryp = cryp; 1190 1191 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; 1192 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE; 1193 cryp->ctx = ctx; 1194 1195 if (req) { 1196 cryp->req = req; 1197 cryp->areq = NULL; 1198 cryp->header_in = 0; 1199 cryp->payload_in = req->cryptlen; 1200 cryp->payload_out = req->cryptlen; 1201 cryp->authsize = 0; 1202 } else { 1203 /* 1204 * Length of input and output data: 1205 * Encryption case: 1206 * INPUT = AssocData || PlainText 1207 * <- assoclen -> <- cryptlen -> 1208 * 1209 * OUTPUT = AssocData || CipherText || AuthTag 1210 * <- assoclen -> <-- cryptlen --> <- authsize -> 1211 * 1212 * Decryption case: 1213 * INPUT = AssocData || CipherTex || AuthTag 1214 * <- assoclen ---> <---------- cryptlen ----------> 1215 * 1216 * OUTPUT = AssocData || PlainText 1217 * <- assoclen -> <- cryptlen - authsize -> 1218 */ 1219 cryp->areq = areq; 1220 cryp->req = NULL; 1221 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); 1222 if (is_encrypt(cryp)) { 1223 cryp->payload_in = areq->cryptlen; 1224 cryp->header_in = areq->assoclen; 1225 cryp->payload_out = areq->cryptlen; 1226 } else { 1227 cryp->payload_in = areq->cryptlen - cryp->authsize; 1228 cryp->header_in = areq->assoclen; 1229 cryp->payload_out = cryp->payload_in; 1230 } 1231 } 1232 1233 in_sg = req ? req->src : areq->src; 1234 scatterwalk_start(&cryp->in_walk, in_sg); 1235 1236 cryp->out_sg = req ? req->dst : areq->dst; 1237 scatterwalk_start(&cryp->out_walk, cryp->out_sg); 1238 1239 if (is_gcm(cryp) || is_ccm(cryp)) { 1240 /* In output, jump after assoc data */ 1241 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2); 1242 } 1243 1244 if (is_ctr(cryp)) 1245 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr)); 1246 1247 ret = stm32_cryp_hw_init(cryp); 1248 return ret; 1249 } 1250 1251 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine, 1252 void *areq) 1253 { 1254 struct skcipher_request *req = container_of(areq, 1255 struct skcipher_request, 1256 base); 1257 1258 return stm32_cryp_prepare_req(req, NULL); 1259 } 1260 1261 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq) 1262 { 1263 struct skcipher_request *req = container_of(areq, 1264 struct skcipher_request, 1265 base); 1266 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx( 1267 crypto_skcipher_reqtfm(req)); 1268 struct stm32_cryp *cryp = ctx->cryp; 1269 1270 if (!cryp) 1271 return -ENODEV; 1272 1273 return stm32_cryp_cpu_start(cryp); 1274 } 1275 1276 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq) 1277 { 1278 struct aead_request *req = container_of(areq, struct aead_request, 1279 base); 1280 1281 return stm32_cryp_prepare_req(NULL, req); 1282 } 1283 1284 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq) 1285 { 1286 struct aead_request *req = container_of(areq, struct aead_request, 1287 base); 1288 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 1289 struct stm32_cryp *cryp = ctx->cryp; 1290 1291 if (!cryp) 1292 return -ENODEV; 1293 1294 if (unlikely(!cryp->payload_in && !cryp->header_in)) { 1295 /* No input data to process: get tag and finish */ 1296 stm32_cryp_finish_req(cryp, 0); 1297 return 0; 1298 } 1299 1300 return stm32_cryp_cpu_start(cryp); 1301 } 1302 1303 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp) 1304 { 1305 u32 cfg, size_bit; 1306 unsigned int i; 1307 int ret = 0; 1308 1309 /* Update Config */ 1310 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1311 1312 cfg &= ~CR_PH_MASK; 1313 cfg |= CR_PH_FINAL; 1314 cfg &= ~CR_DEC_NOT_ENC; 1315 cfg |= CR_CRYPEN; 1316 1317 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1318 1319 if (is_gcm(cryp)) { 1320 /* GCM: write aad and payload size (in bits) */ 1321 size_bit = cryp->areq->assoclen * 8; 1322 if (cryp->caps->swap_final) 1323 size_bit = (__force u32)cpu_to_be32(size_bit); 1324 1325 stm32_cryp_write(cryp, cryp->caps->din, 0); 1326 stm32_cryp_write(cryp, cryp->caps->din, size_bit); 1327 1328 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : 1329 cryp->areq->cryptlen - cryp->authsize; 1330 size_bit *= 8; 1331 if (cryp->caps->swap_final) 1332 size_bit = (__force u32)cpu_to_be32(size_bit); 1333 1334 stm32_cryp_write(cryp, cryp->caps->din, 0); 1335 stm32_cryp_write(cryp, cryp->caps->din, size_bit); 1336 } else { 1337 /* CCM: write CTR0 */ 1338 u32 iv32[AES_BLOCK_32]; 1339 u8 *iv = (u8 *)iv32; 1340 __be32 *biv = (__be32 *)iv32; 1341 1342 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 1343 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 1344 1345 for (i = 0; i < AES_BLOCK_32; i++) { 1346 u32 xiv = iv32[i]; 1347 1348 if (!cryp->caps->padding_wa) 1349 xiv = be32_to_cpu(biv[i]); 1350 stm32_cryp_write(cryp, cryp->caps->din, xiv); 1351 } 1352 } 1353 1354 /* Wait for output data */ 1355 ret = stm32_cryp_wait_output(cryp); 1356 if (ret) { 1357 dev_err(cryp->dev, "Timeout (read tag)\n"); 1358 return ret; 1359 } 1360 1361 if (is_encrypt(cryp)) { 1362 u32 out_tag[AES_BLOCK_32]; 1363 1364 /* Get and write tag */ 1365 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32); 1366 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1); 1367 } else { 1368 /* Get and check tag */ 1369 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32]; 1370 1371 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0); 1372 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32); 1373 1374 if (crypto_memneq(in_tag, out_tag, cryp->authsize)) 1375 ret = -EBADMSG; 1376 } 1377 1378 /* Disable cryp */ 1379 cfg &= ~CR_CRYPEN; 1380 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1381 1382 return ret; 1383 } 1384 1385 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp) 1386 { 1387 u32 cr; 1388 1389 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) { 1390 /* 1391 * In this case, we need to increment manually the ctr counter, 1392 * as HW doesn't handle the U32 carry. 1393 */ 1394 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr)); 1395 1396 cr = stm32_cryp_read(cryp, cryp->caps->cr); 1397 stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN); 1398 1399 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr); 1400 1401 stm32_cryp_write(cryp, cryp->caps->cr, cr); 1402 } 1403 1404 /* The IV registers are BE */ 1405 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l)); 1406 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r)); 1407 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l)); 1408 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r)); 1409 } 1410 1411 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp) 1412 { 1413 u32 block[AES_BLOCK_32]; 1414 1415 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); 1416 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1417 cryp->payload_out), 1); 1418 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, 1419 cryp->payload_out); 1420 } 1421 1422 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp) 1423 { 1424 u32 block[AES_BLOCK_32] = {0}; 1425 1426 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize, 1427 cryp->payload_in), 0); 1428 writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32)); 1429 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in); 1430 } 1431 1432 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp) 1433 { 1434 int err; 1435 u32 cfg, block[AES_BLOCK_32] = {0}; 1436 unsigned int i; 1437 1438 /* 'Special workaround' procedure described in the datasheet */ 1439 1440 /* a) disable ip */ 1441 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 1442 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1443 cfg &= ~CR_CRYPEN; 1444 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1445 1446 /* b) Update IV1R */ 1447 stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2); 1448 1449 /* c) change mode to CTR */ 1450 cfg &= ~CR_ALGO_MASK; 1451 cfg |= CR_AES_CTR; 1452 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1453 1454 /* a) enable IP */ 1455 cfg |= CR_CRYPEN; 1456 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1457 1458 /* b) pad and write the last block */ 1459 stm32_cryp_irq_write_block(cryp); 1460 /* wait end of process */ 1461 err = stm32_cryp_wait_output(cryp); 1462 if (err) { 1463 dev_err(cryp->dev, "Timeout (write gcm last data)\n"); 1464 return stm32_cryp_finish_req(cryp, err); 1465 } 1466 1467 /* c) get and store encrypted data */ 1468 /* 1469 * Same code as stm32_cryp_irq_read_data(), but we want to store 1470 * block value 1471 */ 1472 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); 1473 1474 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1475 cryp->payload_out), 1); 1476 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, 1477 cryp->payload_out); 1478 1479 /* d) change mode back to AES GCM */ 1480 cfg &= ~CR_ALGO_MASK; 1481 cfg |= CR_AES_GCM; 1482 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1483 1484 /* e) change phase to Final */ 1485 cfg &= ~CR_PH_MASK; 1486 cfg |= CR_PH_FINAL; 1487 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1488 1489 /* f) write padded data */ 1490 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); 1491 1492 /* g) Empty fifo out */ 1493 err = stm32_cryp_wait_output(cryp); 1494 if (err) { 1495 dev_err(cryp->dev, "Timeout (write gcm padded data)\n"); 1496 return stm32_cryp_finish_req(cryp, err); 1497 } 1498 1499 for (i = 0; i < AES_BLOCK_32; i++) 1500 stm32_cryp_read(cryp, cryp->caps->dout); 1501 1502 /* h) run the he normal Final phase */ 1503 stm32_cryp_finish_req(cryp, 0); 1504 } 1505 1506 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp) 1507 { 1508 u32 cfg; 1509 1510 /* disable ip, set NPBLB and reneable ip */ 1511 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1512 cfg &= ~CR_CRYPEN; 1513 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1514 1515 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT; 1516 cfg |= CR_CRYPEN; 1517 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1518 } 1519 1520 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp) 1521 { 1522 int err = 0; 1523 u32 cfg, iv1tmp; 1524 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32]; 1525 u32 block[AES_BLOCK_32] = {0}; 1526 unsigned int i; 1527 1528 /* 'Special workaround' procedure described in the datasheet */ 1529 1530 /* a) disable ip */ 1531 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 1532 1533 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1534 cfg &= ~CR_CRYPEN; 1535 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1536 1537 /* b) get IV1 from CRYP_CSGCMCCM7 */ 1538 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4); 1539 1540 /* c) Load CRYP_CSGCMCCMxR */ 1541 for (i = 0; i < ARRAY_SIZE(cstmp1); i++) 1542 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1543 1544 /* d) Write IV1R */ 1545 stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp); 1546 1547 /* e) change mode to CTR */ 1548 cfg &= ~CR_ALGO_MASK; 1549 cfg |= CR_AES_CTR; 1550 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1551 1552 /* a) enable IP */ 1553 cfg |= CR_CRYPEN; 1554 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1555 1556 /* b) pad and write the last block */ 1557 stm32_cryp_irq_write_block(cryp); 1558 /* wait end of process */ 1559 err = stm32_cryp_wait_output(cryp); 1560 if (err) { 1561 dev_err(cryp->dev, "Timeout (write ccm padded data)\n"); 1562 return stm32_cryp_finish_req(cryp, err); 1563 } 1564 1565 /* c) get and store decrypted data */ 1566 /* 1567 * Same code as stm32_cryp_irq_read_data(), but we want to store 1568 * block value 1569 */ 1570 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32)); 1571 1572 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1573 cryp->payload_out), 1); 1574 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out); 1575 1576 /* d) Load again CRYP_CSGCMCCMxR */ 1577 for (i = 0; i < ARRAY_SIZE(cstmp2); i++) 1578 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1579 1580 /* e) change mode back to AES CCM */ 1581 cfg &= ~CR_ALGO_MASK; 1582 cfg |= CR_AES_CCM; 1583 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1584 1585 /* f) change phase to header */ 1586 cfg &= ~CR_PH_MASK; 1587 cfg |= CR_PH_HEADER; 1588 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1589 1590 /* g) XOR and write padded data */ 1591 for (i = 0; i < ARRAY_SIZE(block); i++) { 1592 block[i] ^= cstmp1[i]; 1593 block[i] ^= cstmp2[i]; 1594 stm32_cryp_write(cryp, cryp->caps->din, block[i]); 1595 } 1596 1597 /* h) wait for completion */ 1598 err = stm32_cryp_wait_busy(cryp); 1599 if (err) 1600 dev_err(cryp->dev, "Timeout (write ccm padded data)\n"); 1601 1602 /* i) run the he normal Final phase */ 1603 stm32_cryp_finish_req(cryp, err); 1604 } 1605 1606 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp) 1607 { 1608 if (unlikely(!cryp->payload_in)) { 1609 dev_warn(cryp->dev, "No more data to process\n"); 1610 return; 1611 } 1612 1613 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE && 1614 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) && 1615 is_encrypt(cryp))) { 1616 /* Padding for AES GCM encryption */ 1617 if (cryp->caps->padding_wa) { 1618 /* Special case 1 */ 1619 stm32_cryp_irq_write_gcm_padded_data(cryp); 1620 return; 1621 } 1622 1623 /* Setting padding bytes (NBBLB) */ 1624 stm32_cryp_irq_set_npblb(cryp); 1625 } 1626 1627 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) && 1628 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) && 1629 is_decrypt(cryp))) { 1630 /* Padding for AES CCM decryption */ 1631 if (cryp->caps->padding_wa) { 1632 /* Special case 2 */ 1633 stm32_cryp_irq_write_ccm_padded_data(cryp); 1634 return; 1635 } 1636 1637 /* Setting padding bytes (NBBLB) */ 1638 stm32_cryp_irq_set_npblb(cryp); 1639 } 1640 1641 if (is_aes(cryp) && is_ctr(cryp)) 1642 stm32_cryp_check_ctr_counter(cryp); 1643 1644 stm32_cryp_irq_write_block(cryp); 1645 } 1646 1647 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp) 1648 { 1649 u32 block[AES_BLOCK_32] = {0}; 1650 size_t written; 1651 1652 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in); 1653 1654 scatterwalk_copychunks(block, &cryp->in_walk, written, 0); 1655 1656 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32); 1657 1658 cryp->header_in -= written; 1659 1660 stm32_crypt_gcmccm_end_header(cryp); 1661 } 1662 1663 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg) 1664 { 1665 struct stm32_cryp *cryp = arg; 1666 u32 ph; 1667 u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc); 1668 1669 if (cryp->irq_status & MISR_OUT) 1670 /* Output FIFO IRQ: read data */ 1671 stm32_cryp_irq_read_data(cryp); 1672 1673 if (cryp->irq_status & MISR_IN) { 1674 if (is_gcm(cryp) || is_ccm(cryp)) { 1675 ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK; 1676 if (unlikely(ph == CR_PH_HEADER)) 1677 /* Write Header */ 1678 stm32_cryp_irq_write_gcmccm_header(cryp); 1679 else 1680 /* Input FIFO IRQ: write data */ 1681 stm32_cryp_irq_write_data(cryp); 1682 if (is_gcm(cryp)) 1683 cryp->gcm_ctr++; 1684 } else { 1685 /* Input FIFO IRQ: write data */ 1686 stm32_cryp_irq_write_data(cryp); 1687 } 1688 } 1689 1690 /* Mask useless interrupts */ 1691 if (!cryp->payload_in && !cryp->header_in) 1692 it_mask &= ~IMSCR_IN; 1693 if (!cryp->payload_out) 1694 it_mask &= ~IMSCR_OUT; 1695 stm32_cryp_write(cryp, cryp->caps->imsc, it_mask); 1696 1697 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) 1698 stm32_cryp_finish_req(cryp, 0); 1699 1700 return IRQ_HANDLED; 1701 } 1702 1703 static irqreturn_t stm32_cryp_irq(int irq, void *arg) 1704 { 1705 struct stm32_cryp *cryp = arg; 1706 1707 cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis); 1708 1709 return IRQ_WAKE_THREAD; 1710 } 1711 1712 static struct skcipher_alg crypto_algs[] = { 1713 { 1714 .base.cra_name = "ecb(aes)", 1715 .base.cra_driver_name = "stm32-ecb-aes", 1716 .base.cra_priority = 200, 1717 .base.cra_flags = CRYPTO_ALG_ASYNC, 1718 .base.cra_blocksize = AES_BLOCK_SIZE, 1719 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1720 .base.cra_alignmask = 0, 1721 .base.cra_module = THIS_MODULE, 1722 1723 .init = stm32_cryp_init_tfm, 1724 .min_keysize = AES_MIN_KEY_SIZE, 1725 .max_keysize = AES_MAX_KEY_SIZE, 1726 .setkey = stm32_cryp_aes_setkey, 1727 .encrypt = stm32_cryp_aes_ecb_encrypt, 1728 .decrypt = stm32_cryp_aes_ecb_decrypt, 1729 }, 1730 { 1731 .base.cra_name = "cbc(aes)", 1732 .base.cra_driver_name = "stm32-cbc-aes", 1733 .base.cra_priority = 200, 1734 .base.cra_flags = CRYPTO_ALG_ASYNC, 1735 .base.cra_blocksize = AES_BLOCK_SIZE, 1736 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1737 .base.cra_alignmask = 0, 1738 .base.cra_module = THIS_MODULE, 1739 1740 .init = stm32_cryp_init_tfm, 1741 .min_keysize = AES_MIN_KEY_SIZE, 1742 .max_keysize = AES_MAX_KEY_SIZE, 1743 .ivsize = AES_BLOCK_SIZE, 1744 .setkey = stm32_cryp_aes_setkey, 1745 .encrypt = stm32_cryp_aes_cbc_encrypt, 1746 .decrypt = stm32_cryp_aes_cbc_decrypt, 1747 }, 1748 { 1749 .base.cra_name = "ctr(aes)", 1750 .base.cra_driver_name = "stm32-ctr-aes", 1751 .base.cra_priority = 200, 1752 .base.cra_flags = CRYPTO_ALG_ASYNC, 1753 .base.cra_blocksize = 1, 1754 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1755 .base.cra_alignmask = 0, 1756 .base.cra_module = THIS_MODULE, 1757 1758 .init = stm32_cryp_init_tfm, 1759 .min_keysize = AES_MIN_KEY_SIZE, 1760 .max_keysize = AES_MAX_KEY_SIZE, 1761 .ivsize = AES_BLOCK_SIZE, 1762 .setkey = stm32_cryp_aes_setkey, 1763 .encrypt = stm32_cryp_aes_ctr_encrypt, 1764 .decrypt = stm32_cryp_aes_ctr_decrypt, 1765 }, 1766 { 1767 .base.cra_name = "ecb(des)", 1768 .base.cra_driver_name = "stm32-ecb-des", 1769 .base.cra_priority = 200, 1770 .base.cra_flags = CRYPTO_ALG_ASYNC, 1771 .base.cra_blocksize = DES_BLOCK_SIZE, 1772 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1773 .base.cra_alignmask = 0, 1774 .base.cra_module = THIS_MODULE, 1775 1776 .init = stm32_cryp_init_tfm, 1777 .min_keysize = DES_BLOCK_SIZE, 1778 .max_keysize = DES_BLOCK_SIZE, 1779 .setkey = stm32_cryp_des_setkey, 1780 .encrypt = stm32_cryp_des_ecb_encrypt, 1781 .decrypt = stm32_cryp_des_ecb_decrypt, 1782 }, 1783 { 1784 .base.cra_name = "cbc(des)", 1785 .base.cra_driver_name = "stm32-cbc-des", 1786 .base.cra_priority = 200, 1787 .base.cra_flags = CRYPTO_ALG_ASYNC, 1788 .base.cra_blocksize = DES_BLOCK_SIZE, 1789 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1790 .base.cra_alignmask = 0, 1791 .base.cra_module = THIS_MODULE, 1792 1793 .init = stm32_cryp_init_tfm, 1794 .min_keysize = DES_BLOCK_SIZE, 1795 .max_keysize = DES_BLOCK_SIZE, 1796 .ivsize = DES_BLOCK_SIZE, 1797 .setkey = stm32_cryp_des_setkey, 1798 .encrypt = stm32_cryp_des_cbc_encrypt, 1799 .decrypt = stm32_cryp_des_cbc_decrypt, 1800 }, 1801 { 1802 .base.cra_name = "ecb(des3_ede)", 1803 .base.cra_driver_name = "stm32-ecb-des3", 1804 .base.cra_priority = 200, 1805 .base.cra_flags = CRYPTO_ALG_ASYNC, 1806 .base.cra_blocksize = DES_BLOCK_SIZE, 1807 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1808 .base.cra_alignmask = 0, 1809 .base.cra_module = THIS_MODULE, 1810 1811 .init = stm32_cryp_init_tfm, 1812 .min_keysize = 3 * DES_BLOCK_SIZE, 1813 .max_keysize = 3 * DES_BLOCK_SIZE, 1814 .setkey = stm32_cryp_tdes_setkey, 1815 .encrypt = stm32_cryp_tdes_ecb_encrypt, 1816 .decrypt = stm32_cryp_tdes_ecb_decrypt, 1817 }, 1818 { 1819 .base.cra_name = "cbc(des3_ede)", 1820 .base.cra_driver_name = "stm32-cbc-des3", 1821 .base.cra_priority = 200, 1822 .base.cra_flags = CRYPTO_ALG_ASYNC, 1823 .base.cra_blocksize = DES_BLOCK_SIZE, 1824 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1825 .base.cra_alignmask = 0, 1826 .base.cra_module = THIS_MODULE, 1827 1828 .init = stm32_cryp_init_tfm, 1829 .min_keysize = 3 * DES_BLOCK_SIZE, 1830 .max_keysize = 3 * DES_BLOCK_SIZE, 1831 .ivsize = DES_BLOCK_SIZE, 1832 .setkey = stm32_cryp_tdes_setkey, 1833 .encrypt = stm32_cryp_tdes_cbc_encrypt, 1834 .decrypt = stm32_cryp_tdes_cbc_decrypt, 1835 }, 1836 }; 1837 1838 static struct aead_alg aead_algs[] = { 1839 { 1840 .setkey = stm32_cryp_aes_aead_setkey, 1841 .setauthsize = stm32_cryp_aes_gcm_setauthsize, 1842 .encrypt = stm32_cryp_aes_gcm_encrypt, 1843 .decrypt = stm32_cryp_aes_gcm_decrypt, 1844 .init = stm32_cryp_aes_aead_init, 1845 .ivsize = 12, 1846 .maxauthsize = AES_BLOCK_SIZE, 1847 1848 .base = { 1849 .cra_name = "gcm(aes)", 1850 .cra_driver_name = "stm32-gcm-aes", 1851 .cra_priority = 200, 1852 .cra_flags = CRYPTO_ALG_ASYNC, 1853 .cra_blocksize = 1, 1854 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1855 .cra_alignmask = 0, 1856 .cra_module = THIS_MODULE, 1857 }, 1858 }, 1859 { 1860 .setkey = stm32_cryp_aes_aead_setkey, 1861 .setauthsize = stm32_cryp_aes_ccm_setauthsize, 1862 .encrypt = stm32_cryp_aes_ccm_encrypt, 1863 .decrypt = stm32_cryp_aes_ccm_decrypt, 1864 .init = stm32_cryp_aes_aead_init, 1865 .ivsize = AES_BLOCK_SIZE, 1866 .maxauthsize = AES_BLOCK_SIZE, 1867 1868 .base = { 1869 .cra_name = "ccm(aes)", 1870 .cra_driver_name = "stm32-ccm-aes", 1871 .cra_priority = 200, 1872 .cra_flags = CRYPTO_ALG_ASYNC, 1873 .cra_blocksize = 1, 1874 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1875 .cra_alignmask = 0, 1876 .cra_module = THIS_MODULE, 1877 }, 1878 }, 1879 }; 1880 1881 static const struct stm32_cryp_caps ux500_data = { 1882 .aeads_support = false, 1883 .linear_aes_key = true, 1884 .kp_mode = false, 1885 .iv_protection = true, 1886 .swap_final = true, 1887 .padding_wa = true, 1888 .cr = UX500_CRYP_CR, 1889 .sr = UX500_CRYP_SR, 1890 .din = UX500_CRYP_DIN, 1891 .dout = UX500_CRYP_DOUT, 1892 .imsc = UX500_CRYP_IMSC, 1893 .mis = UX500_CRYP_MIS, 1894 .k1l = UX500_CRYP_K1L, 1895 .k1r = UX500_CRYP_K1R, 1896 .k3r = UX500_CRYP_K3R, 1897 .iv0l = UX500_CRYP_IV0L, 1898 .iv0r = UX500_CRYP_IV0R, 1899 .iv1l = UX500_CRYP_IV1L, 1900 .iv1r = UX500_CRYP_IV1R, 1901 }; 1902 1903 static const struct stm32_cryp_caps f7_data = { 1904 .aeads_support = true, 1905 .linear_aes_key = false, 1906 .kp_mode = true, 1907 .iv_protection = false, 1908 .swap_final = true, 1909 .padding_wa = true, 1910 .cr = CRYP_CR, 1911 .sr = CRYP_SR, 1912 .din = CRYP_DIN, 1913 .dout = CRYP_DOUT, 1914 .imsc = CRYP_IMSCR, 1915 .mis = CRYP_MISR, 1916 .k1l = CRYP_K1LR, 1917 .k1r = CRYP_K1RR, 1918 .k3r = CRYP_K3RR, 1919 .iv0l = CRYP_IV0LR, 1920 .iv0r = CRYP_IV0RR, 1921 .iv1l = CRYP_IV1LR, 1922 .iv1r = CRYP_IV1RR, 1923 }; 1924 1925 static const struct stm32_cryp_caps mp1_data = { 1926 .aeads_support = true, 1927 .linear_aes_key = false, 1928 .kp_mode = true, 1929 .iv_protection = false, 1930 .swap_final = false, 1931 .padding_wa = false, 1932 .cr = CRYP_CR, 1933 .sr = CRYP_SR, 1934 .din = CRYP_DIN, 1935 .dout = CRYP_DOUT, 1936 .imsc = CRYP_IMSCR, 1937 .mis = CRYP_MISR, 1938 .k1l = CRYP_K1LR, 1939 .k1r = CRYP_K1RR, 1940 .k3r = CRYP_K3RR, 1941 .iv0l = CRYP_IV0LR, 1942 .iv0r = CRYP_IV0RR, 1943 .iv1l = CRYP_IV1LR, 1944 .iv1r = CRYP_IV1RR, 1945 }; 1946 1947 static const struct of_device_id stm32_dt_ids[] = { 1948 { .compatible = "stericsson,ux500-cryp", .data = &ux500_data}, 1949 { .compatible = "st,stm32f756-cryp", .data = &f7_data}, 1950 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data}, 1951 {}, 1952 }; 1953 MODULE_DEVICE_TABLE(of, stm32_dt_ids); 1954 1955 static int stm32_cryp_probe(struct platform_device *pdev) 1956 { 1957 struct device *dev = &pdev->dev; 1958 struct stm32_cryp *cryp; 1959 struct reset_control *rst; 1960 int irq, ret; 1961 1962 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL); 1963 if (!cryp) 1964 return -ENOMEM; 1965 1966 cryp->caps = of_device_get_match_data(dev); 1967 if (!cryp->caps) 1968 return -ENODEV; 1969 1970 cryp->dev = dev; 1971 1972 cryp->regs = devm_platform_ioremap_resource(pdev, 0); 1973 if (IS_ERR(cryp->regs)) 1974 return PTR_ERR(cryp->regs); 1975 1976 irq = platform_get_irq(pdev, 0); 1977 if (irq < 0) 1978 return irq; 1979 1980 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq, 1981 stm32_cryp_irq_thread, IRQF_ONESHOT, 1982 dev_name(dev), cryp); 1983 if (ret) { 1984 dev_err(dev, "Cannot grab IRQ\n"); 1985 return ret; 1986 } 1987 1988 cryp->clk = devm_clk_get(dev, NULL); 1989 if (IS_ERR(cryp->clk)) { 1990 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n"); 1991 1992 return PTR_ERR(cryp->clk); 1993 } 1994 1995 ret = clk_prepare_enable(cryp->clk); 1996 if (ret) { 1997 dev_err(cryp->dev, "Failed to enable clock\n"); 1998 return ret; 1999 } 2000 2001 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY); 2002 pm_runtime_use_autosuspend(dev); 2003 2004 pm_runtime_get_noresume(dev); 2005 pm_runtime_set_active(dev); 2006 pm_runtime_enable(dev); 2007 2008 rst = devm_reset_control_get(dev, NULL); 2009 if (IS_ERR(rst)) { 2010 ret = PTR_ERR(rst); 2011 if (ret == -EPROBE_DEFER) 2012 goto err_rst; 2013 } else { 2014 reset_control_assert(rst); 2015 udelay(2); 2016 reset_control_deassert(rst); 2017 } 2018 2019 platform_set_drvdata(pdev, cryp); 2020 2021 spin_lock(&cryp_list.lock); 2022 list_add(&cryp->list, &cryp_list.dev_list); 2023 spin_unlock(&cryp_list.lock); 2024 2025 /* Initialize crypto engine */ 2026 cryp->engine = crypto_engine_alloc_init(dev, 1); 2027 if (!cryp->engine) { 2028 dev_err(dev, "Could not init crypto engine\n"); 2029 ret = -ENOMEM; 2030 goto err_engine1; 2031 } 2032 2033 ret = crypto_engine_start(cryp->engine); 2034 if (ret) { 2035 dev_err(dev, "Could not start crypto engine\n"); 2036 goto err_engine2; 2037 } 2038 2039 ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2040 if (ret) { 2041 dev_err(dev, "Could not register algs\n"); 2042 goto err_algs; 2043 } 2044 2045 if (cryp->caps->aeads_support) { 2046 ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2047 if (ret) 2048 goto err_aead_algs; 2049 } 2050 2051 dev_info(dev, "Initialized\n"); 2052 2053 pm_runtime_put_sync(dev); 2054 2055 return 0; 2056 2057 err_aead_algs: 2058 crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2059 err_algs: 2060 err_engine2: 2061 crypto_engine_exit(cryp->engine); 2062 err_engine1: 2063 spin_lock(&cryp_list.lock); 2064 list_del(&cryp->list); 2065 spin_unlock(&cryp_list.lock); 2066 err_rst: 2067 pm_runtime_disable(dev); 2068 pm_runtime_put_noidle(dev); 2069 2070 clk_disable_unprepare(cryp->clk); 2071 2072 return ret; 2073 } 2074 2075 static int stm32_cryp_remove(struct platform_device *pdev) 2076 { 2077 struct stm32_cryp *cryp = platform_get_drvdata(pdev); 2078 int ret; 2079 2080 if (!cryp) 2081 return -ENODEV; 2082 2083 ret = pm_runtime_resume_and_get(cryp->dev); 2084 if (ret < 0) 2085 return ret; 2086 2087 if (cryp->caps->aeads_support) 2088 crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2089 crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2090 2091 crypto_engine_exit(cryp->engine); 2092 2093 spin_lock(&cryp_list.lock); 2094 list_del(&cryp->list); 2095 spin_unlock(&cryp_list.lock); 2096 2097 pm_runtime_disable(cryp->dev); 2098 pm_runtime_put_noidle(cryp->dev); 2099 2100 clk_disable_unprepare(cryp->clk); 2101 2102 return 0; 2103 } 2104 2105 #ifdef CONFIG_PM 2106 static int stm32_cryp_runtime_suspend(struct device *dev) 2107 { 2108 struct stm32_cryp *cryp = dev_get_drvdata(dev); 2109 2110 clk_disable_unprepare(cryp->clk); 2111 2112 return 0; 2113 } 2114 2115 static int stm32_cryp_runtime_resume(struct device *dev) 2116 { 2117 struct stm32_cryp *cryp = dev_get_drvdata(dev); 2118 int ret; 2119 2120 ret = clk_prepare_enable(cryp->clk); 2121 if (ret) { 2122 dev_err(cryp->dev, "Failed to prepare_enable clock\n"); 2123 return ret; 2124 } 2125 2126 return 0; 2127 } 2128 #endif 2129 2130 static const struct dev_pm_ops stm32_cryp_pm_ops = { 2131 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 2132 pm_runtime_force_resume) 2133 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend, 2134 stm32_cryp_runtime_resume, NULL) 2135 }; 2136 2137 static struct platform_driver stm32_cryp_driver = { 2138 .probe = stm32_cryp_probe, 2139 .remove = stm32_cryp_remove, 2140 .driver = { 2141 .name = DRIVER_NAME, 2142 .pm = &stm32_cryp_pm_ops, 2143 .of_match_table = stm32_dt_ids, 2144 }, 2145 }; 2146 2147 module_platform_driver(stm32_cryp_driver); 2148 2149 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>"); 2150 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver"); 2151 MODULE_LICENSE("GPL"); 2152