1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) STMicroelectronics SA 2017 4 * Author: Fabien Dessenne <fabien.dessenne@st.com> 5 * Ux500 support taken from snippets in the old Ux500 cryp driver 6 */ 7 8 #include <linux/clk.h> 9 #include <linux/delay.h> 10 #include <linux/interrupt.h> 11 #include <linux/iopoll.h> 12 #include <linux/module.h> 13 #include <linux/of_device.h> 14 #include <linux/platform_device.h> 15 #include <linux/pm_runtime.h> 16 #include <linux/reset.h> 17 18 #include <crypto/aes.h> 19 #include <crypto/internal/des.h> 20 #include <crypto/engine.h> 21 #include <crypto/scatterwalk.h> 22 #include <crypto/internal/aead.h> 23 #include <crypto/internal/skcipher.h> 24 25 #define DRIVER_NAME "stm32-cryp" 26 27 /* Bit [0] encrypt / decrypt */ 28 #define FLG_ENCRYPT BIT(0) 29 /* Bit [8..1] algo & operation mode */ 30 #define FLG_AES BIT(1) 31 #define FLG_DES BIT(2) 32 #define FLG_TDES BIT(3) 33 #define FLG_ECB BIT(4) 34 #define FLG_CBC BIT(5) 35 #define FLG_CTR BIT(6) 36 #define FLG_GCM BIT(7) 37 #define FLG_CCM BIT(8) 38 /* Mode mask = bits [15..0] */ 39 #define FLG_MODE_MASK GENMASK(15, 0) 40 /* Bit [31..16] status */ 41 42 /* Registers */ 43 #define CRYP_CR 0x00000000 44 #define CRYP_SR 0x00000004 45 #define CRYP_DIN 0x00000008 46 #define CRYP_DOUT 0x0000000C 47 #define CRYP_DMACR 0x00000010 48 #define CRYP_IMSCR 0x00000014 49 #define CRYP_RISR 0x00000018 50 #define CRYP_MISR 0x0000001C 51 #define CRYP_K0LR 0x00000020 52 #define CRYP_K0RR 0x00000024 53 #define CRYP_K1LR 0x00000028 54 #define CRYP_K1RR 0x0000002C 55 #define CRYP_K2LR 0x00000030 56 #define CRYP_K2RR 0x00000034 57 #define CRYP_K3LR 0x00000038 58 #define CRYP_K3RR 0x0000003C 59 #define CRYP_IV0LR 0x00000040 60 #define CRYP_IV0RR 0x00000044 61 #define CRYP_IV1LR 0x00000048 62 #define CRYP_IV1RR 0x0000004C 63 #define CRYP_CSGCMCCM0R 0x00000050 64 #define CRYP_CSGCM0R 0x00000070 65 66 #define UX500_CRYP_CR 0x00000000 67 #define UX500_CRYP_SR 0x00000004 68 #define UX500_CRYP_DIN 0x00000008 69 #define UX500_CRYP_DINSIZE 0x0000000C 70 #define UX500_CRYP_DOUT 0x00000010 71 #define UX500_CRYP_DOUSIZE 0x00000014 72 #define UX500_CRYP_DMACR 0x00000018 73 #define UX500_CRYP_IMSC 0x0000001C 74 #define UX500_CRYP_RIS 0x00000020 75 #define UX500_CRYP_MIS 0x00000024 76 #define UX500_CRYP_K1L 0x00000028 77 #define UX500_CRYP_K1R 0x0000002C 78 #define UX500_CRYP_K2L 0x00000030 79 #define UX500_CRYP_K2R 0x00000034 80 #define UX500_CRYP_K3L 0x00000038 81 #define UX500_CRYP_K3R 0x0000003C 82 #define UX500_CRYP_K4L 0x00000040 83 #define UX500_CRYP_K4R 0x00000044 84 #define UX500_CRYP_IV0L 0x00000048 85 #define UX500_CRYP_IV0R 0x0000004C 86 #define UX500_CRYP_IV1L 0x00000050 87 #define UX500_CRYP_IV1R 0x00000054 88 89 /* Registers values */ 90 #define CR_DEC_NOT_ENC 0x00000004 91 #define CR_TDES_ECB 0x00000000 92 #define CR_TDES_CBC 0x00000008 93 #define CR_DES_ECB 0x00000010 94 #define CR_DES_CBC 0x00000018 95 #define CR_AES_ECB 0x00000020 96 #define CR_AES_CBC 0x00000028 97 #define CR_AES_CTR 0x00000030 98 #define CR_AES_KP 0x00000038 /* Not on Ux500 */ 99 #define CR_AES_XTS 0x00000038 /* Only on Ux500 */ 100 #define CR_AES_GCM 0x00080000 101 #define CR_AES_CCM 0x00080008 102 #define CR_AES_UNKNOWN 0xFFFFFFFF 103 #define CR_ALGO_MASK 0x00080038 104 #define CR_DATA32 0x00000000 105 #define CR_DATA16 0x00000040 106 #define CR_DATA8 0x00000080 107 #define CR_DATA1 0x000000C0 108 #define CR_KEY128 0x00000000 109 #define CR_KEY192 0x00000100 110 #define CR_KEY256 0x00000200 111 #define CR_KEYRDEN 0x00000400 /* Only on Ux500 */ 112 #define CR_KSE 0x00000800 /* Only on Ux500 */ 113 #define CR_FFLUSH 0x00004000 114 #define CR_CRYPEN 0x00008000 115 #define CR_PH_INIT 0x00000000 116 #define CR_PH_HEADER 0x00010000 117 #define CR_PH_PAYLOAD 0x00020000 118 #define CR_PH_FINAL 0x00030000 119 #define CR_PH_MASK 0x00030000 120 #define CR_NBPBL_SHIFT 20 121 122 #define SR_BUSY 0x00000010 123 #define SR_OFNE 0x00000004 124 125 #define IMSCR_IN BIT(0) 126 #define IMSCR_OUT BIT(1) 127 128 #define MISR_IN BIT(0) 129 #define MISR_OUT BIT(1) 130 131 /* Misc */ 132 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32)) 133 #define GCM_CTR_INIT 2 134 #define CRYP_AUTOSUSPEND_DELAY 50 135 136 struct stm32_cryp_caps { 137 bool aeads_support; 138 bool linear_aes_key; 139 bool kp_mode; 140 bool iv_protection; 141 bool swap_final; 142 bool padding_wa; 143 u32 cr; 144 u32 sr; 145 u32 din; 146 u32 dout; 147 u32 imsc; 148 u32 mis; 149 u32 k1l; 150 u32 k1r; 151 u32 k3r; 152 u32 iv0l; 153 u32 iv0r; 154 u32 iv1l; 155 u32 iv1r; 156 }; 157 158 struct stm32_cryp_ctx { 159 struct crypto_engine_ctx enginectx; 160 struct stm32_cryp *cryp; 161 int keylen; 162 __be32 key[AES_KEYSIZE_256 / sizeof(u32)]; 163 unsigned long flags; 164 }; 165 166 struct stm32_cryp_reqctx { 167 unsigned long mode; 168 }; 169 170 struct stm32_cryp { 171 struct list_head list; 172 struct device *dev; 173 void __iomem *regs; 174 struct clk *clk; 175 unsigned long flags; 176 u32 irq_status; 177 const struct stm32_cryp_caps *caps; 178 struct stm32_cryp_ctx *ctx; 179 180 struct crypto_engine *engine; 181 182 struct skcipher_request *req; 183 struct aead_request *areq; 184 185 size_t authsize; 186 size_t hw_blocksize; 187 188 size_t payload_in; 189 size_t header_in; 190 size_t payload_out; 191 192 struct scatterlist *out_sg; 193 194 struct scatter_walk in_walk; 195 struct scatter_walk out_walk; 196 197 __be32 last_ctr[4]; 198 u32 gcm_ctr; 199 }; 200 201 struct stm32_cryp_list { 202 struct list_head dev_list; 203 spinlock_t lock; /* protect dev_list */ 204 }; 205 206 static struct stm32_cryp_list cryp_list = { 207 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list), 208 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock), 209 }; 210 211 static inline bool is_aes(struct stm32_cryp *cryp) 212 { 213 return cryp->flags & FLG_AES; 214 } 215 216 static inline bool is_des(struct stm32_cryp *cryp) 217 { 218 return cryp->flags & FLG_DES; 219 } 220 221 static inline bool is_tdes(struct stm32_cryp *cryp) 222 { 223 return cryp->flags & FLG_TDES; 224 } 225 226 static inline bool is_ecb(struct stm32_cryp *cryp) 227 { 228 return cryp->flags & FLG_ECB; 229 } 230 231 static inline bool is_cbc(struct stm32_cryp *cryp) 232 { 233 return cryp->flags & FLG_CBC; 234 } 235 236 static inline bool is_ctr(struct stm32_cryp *cryp) 237 { 238 return cryp->flags & FLG_CTR; 239 } 240 241 static inline bool is_gcm(struct stm32_cryp *cryp) 242 { 243 return cryp->flags & FLG_GCM; 244 } 245 246 static inline bool is_ccm(struct stm32_cryp *cryp) 247 { 248 return cryp->flags & FLG_CCM; 249 } 250 251 static inline bool is_encrypt(struct stm32_cryp *cryp) 252 { 253 return cryp->flags & FLG_ENCRYPT; 254 } 255 256 static inline bool is_decrypt(struct stm32_cryp *cryp) 257 { 258 return !is_encrypt(cryp); 259 } 260 261 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst) 262 { 263 return readl_relaxed(cryp->regs + ofst); 264 } 265 266 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val) 267 { 268 writel_relaxed(val, cryp->regs + ofst); 269 } 270 271 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp) 272 { 273 u32 status; 274 275 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status, 276 !(status & SR_BUSY), 10, 100000); 277 } 278 279 static inline void stm32_cryp_enable(struct stm32_cryp *cryp) 280 { 281 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN, 282 cryp->regs + cryp->caps->cr); 283 } 284 285 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp) 286 { 287 u32 status; 288 289 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status, 290 !(status & CR_CRYPEN), 10, 100000); 291 } 292 293 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp) 294 { 295 u32 status; 296 297 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status, 298 status & SR_OFNE, 10, 100000); 299 } 300 301 static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp) 302 { 303 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN, 304 cryp->regs + cryp->caps->cr); 305 } 306 307 static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp) 308 { 309 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN, 310 cryp->regs + cryp->caps->cr); 311 } 312 313 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp); 314 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err); 315 316 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx) 317 { 318 struct stm32_cryp *tmp, *cryp = NULL; 319 320 spin_lock_bh(&cryp_list.lock); 321 if (!ctx->cryp) { 322 list_for_each_entry(tmp, &cryp_list.dev_list, list) { 323 cryp = tmp; 324 break; 325 } 326 ctx->cryp = cryp; 327 } else { 328 cryp = ctx->cryp; 329 } 330 331 spin_unlock_bh(&cryp_list.lock); 332 333 return cryp; 334 } 335 336 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv) 337 { 338 if (!iv) 339 return; 340 341 stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++)); 342 stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++)); 343 344 if (is_aes(cryp)) { 345 stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++)); 346 stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++)); 347 } 348 } 349 350 static void stm32_cryp_get_iv(struct stm32_cryp *cryp) 351 { 352 struct skcipher_request *req = cryp->req; 353 __be32 *tmp = (void *)req->iv; 354 355 if (!tmp) 356 return; 357 358 if (cryp->caps->iv_protection) 359 stm32_cryp_key_read_enable(cryp); 360 361 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l)); 362 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r)); 363 364 if (is_aes(cryp)) { 365 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l)); 366 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r)); 367 } 368 369 if (cryp->caps->iv_protection) 370 stm32_cryp_key_read_disable(cryp); 371 } 372 373 /** 374 * ux500_swap_bits_in_byte() - mirror the bits in a byte 375 * @b: the byte to be mirrored 376 * 377 * The bits are swapped the following way: 378 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and 379 * nibble 2 (n2) bits 4-7. 380 * 381 * Nibble 1 (n1): 382 * (The "old" (moved) bit is replaced with a zero) 383 * 1. Move bit 6 and 7, 4 positions to the left. 384 * 2. Move bit 3 and 5, 2 positions to the left. 385 * 3. Move bit 1-4, 1 position to the left. 386 * 387 * Nibble 2 (n2): 388 * 1. Move bit 0 and 1, 4 positions to the right. 389 * 2. Move bit 2 and 4, 2 positions to the right. 390 * 3. Move bit 3-6, 1 position to the right. 391 * 392 * Combine the two nibbles to a complete and swapped byte. 393 */ 394 static inline u8 ux500_swap_bits_in_byte(u8 b) 395 { 396 #define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */ 397 #define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5, 398 right shift 2 */ 399 #define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4, 400 right shift 1 */ 401 #define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */ 402 #define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4, 403 left shift 2 */ 404 #define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6, 405 left shift 1 */ 406 407 u8 n1; 408 u8 n2; 409 410 /* Swap most significant nibble */ 411 /* Right shift 4, bits 6 and 7 */ 412 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4)); 413 /* Right shift 2, bits 3 and 5 */ 414 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2)); 415 /* Right shift 1, bits 1-4 */ 416 n1 = (n1 & R_SHIFT_1_MASK) >> 1; 417 418 /* Swap least significant nibble */ 419 /* Left shift 4, bits 0 and 1 */ 420 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4)); 421 /* Left shift 2, bits 2 and 4 */ 422 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2)); 423 /* Left shift 1, bits 3-6 */ 424 n2 = (n2 & L_SHIFT_1_MASK) << 1; 425 426 return n1 | n2; 427 } 428 429 /** 430 * ux500_swizzle_key() - Shuffle around words and bits in the AES key 431 * @in: key to swizzle 432 * @out: swizzled key 433 * @len: length of key, in bytes 434 * 435 * This "key swizzling procedure" is described in the examples in the 436 * DB8500 design specification. There is no real description of why 437 * the bits have been arranged like this in the hardware. 438 */ 439 static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len) 440 { 441 int i = 0; 442 int bpw = sizeof(u32); 443 int j; 444 int index = 0; 445 446 j = len - bpw; 447 while (j >= 0) { 448 for (i = 0; i < bpw; i++) { 449 index = len - j - bpw + i; 450 out[j + i] = 451 ux500_swap_bits_in_byte(in[index]); 452 } 453 j -= bpw; 454 } 455 } 456 457 static void stm32_cryp_hw_write_key(struct stm32_cryp *c) 458 { 459 unsigned int i; 460 int r_id; 461 462 if (is_des(c)) { 463 stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0])); 464 stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1])); 465 return; 466 } 467 468 /* 469 * On the Ux500 the AES key is considered as a single bit sequence 470 * of 128, 192 or 256 bits length. It is written linearly into the 471 * registers from K1L and down, and need to be processed to become 472 * a proper big-endian bit sequence. 473 */ 474 if (is_aes(c) && c->caps->linear_aes_key) { 475 u32 tmpkey[8]; 476 477 ux500_swizzle_key((u8 *)c->ctx->key, 478 (u8 *)tmpkey, c->ctx->keylen); 479 480 r_id = c->caps->k1l; 481 for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4) 482 stm32_cryp_write(c, r_id, tmpkey[i]); 483 484 return; 485 } 486 487 r_id = c->caps->k3r; 488 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4) 489 stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1])); 490 } 491 492 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp) 493 { 494 if (is_aes(cryp) && is_ecb(cryp)) 495 return CR_AES_ECB; 496 497 if (is_aes(cryp) && is_cbc(cryp)) 498 return CR_AES_CBC; 499 500 if (is_aes(cryp) && is_ctr(cryp)) 501 return CR_AES_CTR; 502 503 if (is_aes(cryp) && is_gcm(cryp)) 504 return CR_AES_GCM; 505 506 if (is_aes(cryp) && is_ccm(cryp)) 507 return CR_AES_CCM; 508 509 if (is_des(cryp) && is_ecb(cryp)) 510 return CR_DES_ECB; 511 512 if (is_des(cryp) && is_cbc(cryp)) 513 return CR_DES_CBC; 514 515 if (is_tdes(cryp) && is_ecb(cryp)) 516 return CR_TDES_ECB; 517 518 if (is_tdes(cryp) && is_cbc(cryp)) 519 return CR_TDES_CBC; 520 521 dev_err(cryp->dev, "Unknown mode\n"); 522 return CR_AES_UNKNOWN; 523 } 524 525 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp) 526 { 527 return is_encrypt(cryp) ? cryp->areq->cryptlen : 528 cryp->areq->cryptlen - cryp->authsize; 529 } 530 531 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg) 532 { 533 int ret; 534 __be32 iv[4]; 535 536 /* Phase 1 : init */ 537 memcpy(iv, cryp->areq->iv, 12); 538 iv[3] = cpu_to_be32(GCM_CTR_INIT); 539 cryp->gcm_ctr = GCM_CTR_INIT; 540 stm32_cryp_hw_write_iv(cryp, iv); 541 542 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN); 543 544 /* Wait for end of processing */ 545 ret = stm32_cryp_wait_enable(cryp); 546 if (ret) { 547 dev_err(cryp->dev, "Timeout (gcm init)\n"); 548 return ret; 549 } 550 551 /* Prepare next phase */ 552 if (cryp->areq->assoclen) { 553 cfg |= CR_PH_HEADER; 554 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 555 } else if (stm32_cryp_get_input_text_len(cryp)) { 556 cfg |= CR_PH_PAYLOAD; 557 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 558 } 559 560 return 0; 561 } 562 563 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp) 564 { 565 u32 cfg; 566 int err; 567 568 /* Check if whole header written */ 569 if (!cryp->header_in) { 570 /* Wait for completion */ 571 err = stm32_cryp_wait_busy(cryp); 572 if (err) { 573 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n"); 574 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 575 stm32_cryp_finish_req(cryp, err); 576 return; 577 } 578 579 if (stm32_cryp_get_input_text_len(cryp)) { 580 /* Phase 3 : payload */ 581 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 582 cfg &= ~CR_CRYPEN; 583 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 584 585 cfg &= ~CR_PH_MASK; 586 cfg |= CR_PH_PAYLOAD | CR_CRYPEN; 587 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 588 } else { 589 /* 590 * Phase 4 : tag. 591 * Nothing to read, nothing to write, caller have to 592 * end request 593 */ 594 } 595 } 596 } 597 598 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp) 599 { 600 unsigned int i; 601 size_t written; 602 size_t len; 603 u32 alen = cryp->areq->assoclen; 604 u32 block[AES_BLOCK_32] = {0}; 605 u8 *b8 = (u8 *)block; 606 607 if (alen <= 65280) { 608 /* Write first u32 of B1 */ 609 b8[0] = (alen >> 8) & 0xFF; 610 b8[1] = alen & 0xFF; 611 len = 2; 612 } else { 613 /* Build the two first u32 of B1 */ 614 b8[0] = 0xFF; 615 b8[1] = 0xFE; 616 b8[2] = (alen & 0xFF000000) >> 24; 617 b8[3] = (alen & 0x00FF0000) >> 16; 618 b8[4] = (alen & 0x0000FF00) >> 8; 619 b8[5] = alen & 0x000000FF; 620 len = 6; 621 } 622 623 written = min_t(size_t, AES_BLOCK_SIZE - len, alen); 624 625 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0); 626 for (i = 0; i < AES_BLOCK_32; i++) 627 stm32_cryp_write(cryp, cryp->caps->din, block[i]); 628 629 cryp->header_in -= written; 630 631 stm32_crypt_gcmccm_end_header(cryp); 632 } 633 634 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg) 635 { 636 int ret; 637 u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32]; 638 u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32; 639 __be32 *bd; 640 u32 *d; 641 unsigned int i, textlen; 642 643 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */ 644 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 645 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 646 iv[AES_BLOCK_SIZE - 1] = 1; 647 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv); 648 649 /* Build B0 */ 650 memcpy(b0, iv, AES_BLOCK_SIZE); 651 652 b0[0] |= (8 * ((cryp->authsize - 2) / 2)); 653 654 if (cryp->areq->assoclen) 655 b0[0] |= 0x40; 656 657 textlen = stm32_cryp_get_input_text_len(cryp); 658 659 b0[AES_BLOCK_SIZE - 2] = textlen >> 8; 660 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF; 661 662 /* Enable HW */ 663 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN); 664 665 /* Write B0 */ 666 d = (u32 *)b0; 667 bd = (__be32 *)b0; 668 669 for (i = 0; i < AES_BLOCK_32; i++) { 670 u32 xd = d[i]; 671 672 if (!cryp->caps->padding_wa) 673 xd = be32_to_cpu(bd[i]); 674 stm32_cryp_write(cryp, cryp->caps->din, xd); 675 } 676 677 /* Wait for end of processing */ 678 ret = stm32_cryp_wait_enable(cryp); 679 if (ret) { 680 dev_err(cryp->dev, "Timeout (ccm init)\n"); 681 return ret; 682 } 683 684 /* Prepare next phase */ 685 if (cryp->areq->assoclen) { 686 cfg |= CR_PH_HEADER | CR_CRYPEN; 687 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 688 689 /* Write first (special) block (may move to next phase [payload]) */ 690 stm32_cryp_write_ccm_first_header(cryp); 691 } else if (stm32_cryp_get_input_text_len(cryp)) { 692 cfg |= CR_PH_PAYLOAD; 693 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 694 } 695 696 return 0; 697 } 698 699 static int stm32_cryp_hw_init(struct stm32_cryp *cryp) 700 { 701 int ret; 702 u32 cfg, hw_mode; 703 704 pm_runtime_get_sync(cryp->dev); 705 706 /* Disable interrupt */ 707 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 708 709 /* Set configuration */ 710 cfg = CR_DATA8 | CR_FFLUSH; 711 712 switch (cryp->ctx->keylen) { 713 case AES_KEYSIZE_128: 714 cfg |= CR_KEY128; 715 break; 716 717 case AES_KEYSIZE_192: 718 cfg |= CR_KEY192; 719 break; 720 721 default: 722 case AES_KEYSIZE_256: 723 cfg |= CR_KEY256; 724 break; 725 } 726 727 hw_mode = stm32_cryp_get_hw_mode(cryp); 728 if (hw_mode == CR_AES_UNKNOWN) 729 return -EINVAL; 730 731 /* AES ECB/CBC decrypt: run key preparation first */ 732 if (is_decrypt(cryp) && 733 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) { 734 /* Configure in key preparation mode */ 735 if (cryp->caps->kp_mode) 736 stm32_cryp_write(cryp, cryp->caps->cr, 737 cfg | CR_AES_KP); 738 else 739 stm32_cryp_write(cryp, 740 cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE); 741 742 /* Set key only after full configuration done */ 743 stm32_cryp_hw_write_key(cryp); 744 745 /* Start prepare key */ 746 stm32_cryp_enable(cryp); 747 /* Wait for end of processing */ 748 ret = stm32_cryp_wait_busy(cryp); 749 if (ret) { 750 dev_err(cryp->dev, "Timeout (key preparation)\n"); 751 return ret; 752 } 753 754 cfg |= hw_mode | CR_DEC_NOT_ENC; 755 756 /* Apply updated config (Decrypt + algo) and flush */ 757 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 758 } else { 759 cfg |= hw_mode; 760 if (is_decrypt(cryp)) 761 cfg |= CR_DEC_NOT_ENC; 762 763 /* Apply config and flush */ 764 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 765 766 /* Set key only after configuration done */ 767 stm32_cryp_hw_write_key(cryp); 768 } 769 770 switch (hw_mode) { 771 case CR_AES_GCM: 772 case CR_AES_CCM: 773 /* Phase 1 : init */ 774 if (hw_mode == CR_AES_CCM) 775 ret = stm32_cryp_ccm_init(cryp, cfg); 776 else 777 ret = stm32_cryp_gcm_init(cryp, cfg); 778 779 if (ret) 780 return ret; 781 782 break; 783 784 case CR_DES_CBC: 785 case CR_TDES_CBC: 786 case CR_AES_CBC: 787 case CR_AES_CTR: 788 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv); 789 break; 790 791 default: 792 break; 793 } 794 795 /* Enable now */ 796 stm32_cryp_enable(cryp); 797 798 return 0; 799 } 800 801 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err) 802 { 803 if (!err && (is_gcm(cryp) || is_ccm(cryp))) 804 /* Phase 4 : output tag */ 805 err = stm32_cryp_read_auth_tag(cryp); 806 807 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp)))) 808 stm32_cryp_get_iv(cryp); 809 810 pm_runtime_mark_last_busy(cryp->dev); 811 pm_runtime_put_autosuspend(cryp->dev); 812 813 if (is_gcm(cryp) || is_ccm(cryp)) 814 crypto_finalize_aead_request(cryp->engine, cryp->areq, err); 815 else 816 crypto_finalize_skcipher_request(cryp->engine, cryp->req, 817 err); 818 } 819 820 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp) 821 { 822 /* Enable interrupt and let the IRQ handler do everything */ 823 stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT); 824 825 return 0; 826 } 827 828 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq); 829 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine, 830 void *areq); 831 832 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm) 833 { 834 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 835 836 crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx)); 837 838 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req; 839 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req; 840 ctx->enginectx.op.unprepare_request = NULL; 841 return 0; 842 } 843 844 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq); 845 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, 846 void *areq); 847 848 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm) 849 { 850 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 851 852 tfm->reqsize = sizeof(struct stm32_cryp_reqctx); 853 854 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req; 855 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req; 856 ctx->enginectx.op.unprepare_request = NULL; 857 858 return 0; 859 } 860 861 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode) 862 { 863 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx( 864 crypto_skcipher_reqtfm(req)); 865 struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req); 866 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 867 868 if (!cryp) 869 return -ENODEV; 870 871 rctx->mode = mode; 872 873 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req); 874 } 875 876 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode) 877 { 878 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 879 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req); 880 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx); 881 882 if (!cryp) 883 return -ENODEV; 884 885 rctx->mode = mode; 886 887 return crypto_transfer_aead_request_to_engine(cryp->engine, req); 888 } 889 890 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key, 891 unsigned int keylen) 892 { 893 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm); 894 895 memcpy(ctx->key, key, keylen); 896 ctx->keylen = keylen; 897 898 return 0; 899 } 900 901 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, 902 unsigned int keylen) 903 { 904 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 905 keylen != AES_KEYSIZE_256) 906 return -EINVAL; 907 else 908 return stm32_cryp_setkey(tfm, key, keylen); 909 } 910 911 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key, 912 unsigned int keylen) 913 { 914 return verify_skcipher_des_key(tfm, key) ?: 915 stm32_cryp_setkey(tfm, key, keylen); 916 } 917 918 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key, 919 unsigned int keylen) 920 { 921 return verify_skcipher_des3_key(tfm, key) ?: 922 stm32_cryp_setkey(tfm, key, keylen); 923 } 924 925 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key, 926 unsigned int keylen) 927 { 928 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm); 929 930 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 931 keylen != AES_KEYSIZE_256) 932 return -EINVAL; 933 934 memcpy(ctx->key, key, keylen); 935 ctx->keylen = keylen; 936 937 return 0; 938 } 939 940 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm, 941 unsigned int authsize) 942 { 943 switch (authsize) { 944 case 4: 945 case 8: 946 case 12: 947 case 13: 948 case 14: 949 case 15: 950 case 16: 951 break; 952 default: 953 return -EINVAL; 954 } 955 956 return 0; 957 } 958 959 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm, 960 unsigned int authsize) 961 { 962 switch (authsize) { 963 case 4: 964 case 6: 965 case 8: 966 case 10: 967 case 12: 968 case 14: 969 case 16: 970 break; 971 default: 972 return -EINVAL; 973 } 974 975 return 0; 976 } 977 978 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req) 979 { 980 if (req->cryptlen % AES_BLOCK_SIZE) 981 return -EINVAL; 982 983 if (req->cryptlen == 0) 984 return 0; 985 986 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT); 987 } 988 989 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req) 990 { 991 if (req->cryptlen % AES_BLOCK_SIZE) 992 return -EINVAL; 993 994 if (req->cryptlen == 0) 995 return 0; 996 997 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB); 998 } 999 1000 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req) 1001 { 1002 if (req->cryptlen % AES_BLOCK_SIZE) 1003 return -EINVAL; 1004 1005 if (req->cryptlen == 0) 1006 return 0; 1007 1008 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT); 1009 } 1010 1011 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req) 1012 { 1013 if (req->cryptlen % AES_BLOCK_SIZE) 1014 return -EINVAL; 1015 1016 if (req->cryptlen == 0) 1017 return 0; 1018 1019 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC); 1020 } 1021 1022 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req) 1023 { 1024 if (req->cryptlen == 0) 1025 return 0; 1026 1027 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT); 1028 } 1029 1030 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req) 1031 { 1032 if (req->cryptlen == 0) 1033 return 0; 1034 1035 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR); 1036 } 1037 1038 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req) 1039 { 1040 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT); 1041 } 1042 1043 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req) 1044 { 1045 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM); 1046 } 1047 1048 static inline int crypto_ccm_check_iv(const u8 *iv) 1049 { 1050 /* 2 <= L <= 8, so 1 <= L' <= 7. */ 1051 if (iv[0] < 1 || iv[0] > 7) 1052 return -EINVAL; 1053 1054 return 0; 1055 } 1056 1057 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req) 1058 { 1059 int err; 1060 1061 err = crypto_ccm_check_iv(req->iv); 1062 if (err) 1063 return err; 1064 1065 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT); 1066 } 1067 1068 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req) 1069 { 1070 int err; 1071 1072 err = crypto_ccm_check_iv(req->iv); 1073 if (err) 1074 return err; 1075 1076 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM); 1077 } 1078 1079 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req) 1080 { 1081 if (req->cryptlen % DES_BLOCK_SIZE) 1082 return -EINVAL; 1083 1084 if (req->cryptlen == 0) 1085 return 0; 1086 1087 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT); 1088 } 1089 1090 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req) 1091 { 1092 if (req->cryptlen % DES_BLOCK_SIZE) 1093 return -EINVAL; 1094 1095 if (req->cryptlen == 0) 1096 return 0; 1097 1098 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB); 1099 } 1100 1101 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req) 1102 { 1103 if (req->cryptlen % DES_BLOCK_SIZE) 1104 return -EINVAL; 1105 1106 if (req->cryptlen == 0) 1107 return 0; 1108 1109 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT); 1110 } 1111 1112 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req) 1113 { 1114 if (req->cryptlen % DES_BLOCK_SIZE) 1115 return -EINVAL; 1116 1117 if (req->cryptlen == 0) 1118 return 0; 1119 1120 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC); 1121 } 1122 1123 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req) 1124 { 1125 if (req->cryptlen % DES_BLOCK_SIZE) 1126 return -EINVAL; 1127 1128 if (req->cryptlen == 0) 1129 return 0; 1130 1131 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT); 1132 } 1133 1134 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req) 1135 { 1136 if (req->cryptlen % DES_BLOCK_SIZE) 1137 return -EINVAL; 1138 1139 if (req->cryptlen == 0) 1140 return 0; 1141 1142 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB); 1143 } 1144 1145 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req) 1146 { 1147 if (req->cryptlen % DES_BLOCK_SIZE) 1148 return -EINVAL; 1149 1150 if (req->cryptlen == 0) 1151 return 0; 1152 1153 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT); 1154 } 1155 1156 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req) 1157 { 1158 if (req->cryptlen % DES_BLOCK_SIZE) 1159 return -EINVAL; 1160 1161 if (req->cryptlen == 0) 1162 return 0; 1163 1164 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC); 1165 } 1166 1167 static int stm32_cryp_prepare_req(struct skcipher_request *req, 1168 struct aead_request *areq) 1169 { 1170 struct stm32_cryp_ctx *ctx; 1171 struct stm32_cryp *cryp; 1172 struct stm32_cryp_reqctx *rctx; 1173 struct scatterlist *in_sg; 1174 int ret; 1175 1176 if (!req && !areq) 1177 return -EINVAL; 1178 1179 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) : 1180 crypto_aead_ctx(crypto_aead_reqtfm(areq)); 1181 1182 cryp = ctx->cryp; 1183 1184 if (!cryp) 1185 return -ENODEV; 1186 1187 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq); 1188 rctx->mode &= FLG_MODE_MASK; 1189 1190 ctx->cryp = cryp; 1191 1192 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; 1193 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE; 1194 cryp->ctx = ctx; 1195 1196 if (req) { 1197 cryp->req = req; 1198 cryp->areq = NULL; 1199 cryp->header_in = 0; 1200 cryp->payload_in = req->cryptlen; 1201 cryp->payload_out = req->cryptlen; 1202 cryp->authsize = 0; 1203 } else { 1204 /* 1205 * Length of input and output data: 1206 * Encryption case: 1207 * INPUT = AssocData || PlainText 1208 * <- assoclen -> <- cryptlen -> 1209 * 1210 * OUTPUT = AssocData || CipherText || AuthTag 1211 * <- assoclen -> <-- cryptlen --> <- authsize -> 1212 * 1213 * Decryption case: 1214 * INPUT = AssocData || CipherTex || AuthTag 1215 * <- assoclen ---> <---------- cryptlen ----------> 1216 * 1217 * OUTPUT = AssocData || PlainText 1218 * <- assoclen -> <- cryptlen - authsize -> 1219 */ 1220 cryp->areq = areq; 1221 cryp->req = NULL; 1222 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); 1223 if (is_encrypt(cryp)) { 1224 cryp->payload_in = areq->cryptlen; 1225 cryp->header_in = areq->assoclen; 1226 cryp->payload_out = areq->cryptlen; 1227 } else { 1228 cryp->payload_in = areq->cryptlen - cryp->authsize; 1229 cryp->header_in = areq->assoclen; 1230 cryp->payload_out = cryp->payload_in; 1231 } 1232 } 1233 1234 in_sg = req ? req->src : areq->src; 1235 scatterwalk_start(&cryp->in_walk, in_sg); 1236 1237 cryp->out_sg = req ? req->dst : areq->dst; 1238 scatterwalk_start(&cryp->out_walk, cryp->out_sg); 1239 1240 if (is_gcm(cryp) || is_ccm(cryp)) { 1241 /* In output, jump after assoc data */ 1242 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2); 1243 } 1244 1245 if (is_ctr(cryp)) 1246 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr)); 1247 1248 ret = stm32_cryp_hw_init(cryp); 1249 return ret; 1250 } 1251 1252 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine, 1253 void *areq) 1254 { 1255 struct skcipher_request *req = container_of(areq, 1256 struct skcipher_request, 1257 base); 1258 1259 return stm32_cryp_prepare_req(req, NULL); 1260 } 1261 1262 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq) 1263 { 1264 struct skcipher_request *req = container_of(areq, 1265 struct skcipher_request, 1266 base); 1267 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx( 1268 crypto_skcipher_reqtfm(req)); 1269 struct stm32_cryp *cryp = ctx->cryp; 1270 1271 if (!cryp) 1272 return -ENODEV; 1273 1274 return stm32_cryp_cpu_start(cryp); 1275 } 1276 1277 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq) 1278 { 1279 struct aead_request *req = container_of(areq, struct aead_request, 1280 base); 1281 1282 return stm32_cryp_prepare_req(NULL, req); 1283 } 1284 1285 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq) 1286 { 1287 struct aead_request *req = container_of(areq, struct aead_request, 1288 base); 1289 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 1290 struct stm32_cryp *cryp = ctx->cryp; 1291 1292 if (!cryp) 1293 return -ENODEV; 1294 1295 if (unlikely(!cryp->payload_in && !cryp->header_in)) { 1296 /* No input data to process: get tag and finish */ 1297 stm32_cryp_finish_req(cryp, 0); 1298 return 0; 1299 } 1300 1301 return stm32_cryp_cpu_start(cryp); 1302 } 1303 1304 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp) 1305 { 1306 u32 cfg, size_bit; 1307 unsigned int i; 1308 int ret = 0; 1309 1310 /* Update Config */ 1311 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1312 1313 cfg &= ~CR_PH_MASK; 1314 cfg |= CR_PH_FINAL; 1315 cfg &= ~CR_DEC_NOT_ENC; 1316 cfg |= CR_CRYPEN; 1317 1318 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1319 1320 if (is_gcm(cryp)) { 1321 /* GCM: write aad and payload size (in bits) */ 1322 size_bit = cryp->areq->assoclen * 8; 1323 if (cryp->caps->swap_final) 1324 size_bit = (__force u32)cpu_to_be32(size_bit); 1325 1326 stm32_cryp_write(cryp, cryp->caps->din, 0); 1327 stm32_cryp_write(cryp, cryp->caps->din, size_bit); 1328 1329 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : 1330 cryp->areq->cryptlen - cryp->authsize; 1331 size_bit *= 8; 1332 if (cryp->caps->swap_final) 1333 size_bit = (__force u32)cpu_to_be32(size_bit); 1334 1335 stm32_cryp_write(cryp, cryp->caps->din, 0); 1336 stm32_cryp_write(cryp, cryp->caps->din, size_bit); 1337 } else { 1338 /* CCM: write CTR0 */ 1339 u32 iv32[AES_BLOCK_32]; 1340 u8 *iv = (u8 *)iv32; 1341 __be32 *biv = (__be32 *)iv32; 1342 1343 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); 1344 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1); 1345 1346 for (i = 0; i < AES_BLOCK_32; i++) { 1347 u32 xiv = iv32[i]; 1348 1349 if (!cryp->caps->padding_wa) 1350 xiv = be32_to_cpu(biv[i]); 1351 stm32_cryp_write(cryp, cryp->caps->din, xiv); 1352 } 1353 } 1354 1355 /* Wait for output data */ 1356 ret = stm32_cryp_wait_output(cryp); 1357 if (ret) { 1358 dev_err(cryp->dev, "Timeout (read tag)\n"); 1359 return ret; 1360 } 1361 1362 if (is_encrypt(cryp)) { 1363 u32 out_tag[AES_BLOCK_32]; 1364 1365 /* Get and write tag */ 1366 for (i = 0; i < AES_BLOCK_32; i++) 1367 out_tag[i] = stm32_cryp_read(cryp, cryp->caps->dout); 1368 1369 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1); 1370 } else { 1371 /* Get and check tag */ 1372 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32]; 1373 1374 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0); 1375 1376 for (i = 0; i < AES_BLOCK_32; i++) 1377 out_tag[i] = stm32_cryp_read(cryp, cryp->caps->dout); 1378 1379 if (crypto_memneq(in_tag, out_tag, cryp->authsize)) 1380 ret = -EBADMSG; 1381 } 1382 1383 /* Disable cryp */ 1384 cfg &= ~CR_CRYPEN; 1385 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1386 1387 return ret; 1388 } 1389 1390 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp) 1391 { 1392 u32 cr; 1393 1394 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) { 1395 /* 1396 * In this case, we need to increment manually the ctr counter, 1397 * as HW doesn't handle the U32 carry. 1398 */ 1399 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr)); 1400 1401 cr = stm32_cryp_read(cryp, cryp->caps->cr); 1402 stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN); 1403 1404 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr); 1405 1406 stm32_cryp_write(cryp, cryp->caps->cr, cr); 1407 } 1408 1409 /* The IV registers are BE */ 1410 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l)); 1411 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r)); 1412 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l)); 1413 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r)); 1414 } 1415 1416 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp) 1417 { 1418 unsigned int i; 1419 u32 block[AES_BLOCK_32]; 1420 1421 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) 1422 block[i] = stm32_cryp_read(cryp, cryp->caps->dout); 1423 1424 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1425 cryp->payload_out), 1); 1426 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, 1427 cryp->payload_out); 1428 } 1429 1430 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp) 1431 { 1432 unsigned int i; 1433 u32 block[AES_BLOCK_32] = {0}; 1434 1435 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize, 1436 cryp->payload_in), 0); 1437 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) 1438 stm32_cryp_write(cryp, cryp->caps->din, block[i]); 1439 1440 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in); 1441 } 1442 1443 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp) 1444 { 1445 int err; 1446 u32 cfg, block[AES_BLOCK_32] = {0}; 1447 unsigned int i; 1448 1449 /* 'Special workaround' procedure described in the datasheet */ 1450 1451 /* a) disable ip */ 1452 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 1453 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1454 cfg &= ~CR_CRYPEN; 1455 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1456 1457 /* b) Update IV1R */ 1458 stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2); 1459 1460 /* c) change mode to CTR */ 1461 cfg &= ~CR_ALGO_MASK; 1462 cfg |= CR_AES_CTR; 1463 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1464 1465 /* a) enable IP */ 1466 cfg |= CR_CRYPEN; 1467 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1468 1469 /* b) pad and write the last block */ 1470 stm32_cryp_irq_write_block(cryp); 1471 /* wait end of process */ 1472 err = stm32_cryp_wait_output(cryp); 1473 if (err) { 1474 dev_err(cryp->dev, "Timeout (write gcm last data)\n"); 1475 return stm32_cryp_finish_req(cryp, err); 1476 } 1477 1478 /* c) get and store encrypted data */ 1479 /* 1480 * Same code as stm32_cryp_irq_read_data(), but we want to store 1481 * block value 1482 */ 1483 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) 1484 block[i] = stm32_cryp_read(cryp, cryp->caps->dout); 1485 1486 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1487 cryp->payload_out), 1); 1488 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, 1489 cryp->payload_out); 1490 1491 /* d) change mode back to AES GCM */ 1492 cfg &= ~CR_ALGO_MASK; 1493 cfg |= CR_AES_GCM; 1494 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1495 1496 /* e) change phase to Final */ 1497 cfg &= ~CR_PH_MASK; 1498 cfg |= CR_PH_FINAL; 1499 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1500 1501 /* f) write padded data */ 1502 for (i = 0; i < AES_BLOCK_32; i++) 1503 stm32_cryp_write(cryp, cryp->caps->din, block[i]); 1504 1505 /* g) Empty fifo out */ 1506 err = stm32_cryp_wait_output(cryp); 1507 if (err) { 1508 dev_err(cryp->dev, "Timeout (write gcm padded data)\n"); 1509 return stm32_cryp_finish_req(cryp, err); 1510 } 1511 1512 for (i = 0; i < AES_BLOCK_32; i++) 1513 stm32_cryp_read(cryp, cryp->caps->dout); 1514 1515 /* h) run the he normal Final phase */ 1516 stm32_cryp_finish_req(cryp, 0); 1517 } 1518 1519 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp) 1520 { 1521 u32 cfg; 1522 1523 /* disable ip, set NPBLB and reneable ip */ 1524 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1525 cfg &= ~CR_CRYPEN; 1526 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1527 1528 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT; 1529 cfg |= CR_CRYPEN; 1530 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1531 } 1532 1533 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp) 1534 { 1535 int err = 0; 1536 u32 cfg, iv1tmp; 1537 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32]; 1538 u32 block[AES_BLOCK_32] = {0}; 1539 unsigned int i; 1540 1541 /* 'Special workaround' procedure described in the datasheet */ 1542 1543 /* a) disable ip */ 1544 stm32_cryp_write(cryp, cryp->caps->imsc, 0); 1545 1546 cfg = stm32_cryp_read(cryp, cryp->caps->cr); 1547 cfg &= ~CR_CRYPEN; 1548 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1549 1550 /* b) get IV1 from CRYP_CSGCMCCM7 */ 1551 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4); 1552 1553 /* c) Load CRYP_CSGCMCCMxR */ 1554 for (i = 0; i < ARRAY_SIZE(cstmp1); i++) 1555 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1556 1557 /* d) Write IV1R */ 1558 stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp); 1559 1560 /* e) change mode to CTR */ 1561 cfg &= ~CR_ALGO_MASK; 1562 cfg |= CR_AES_CTR; 1563 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1564 1565 /* a) enable IP */ 1566 cfg |= CR_CRYPEN; 1567 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1568 1569 /* b) pad and write the last block */ 1570 stm32_cryp_irq_write_block(cryp); 1571 /* wait end of process */ 1572 err = stm32_cryp_wait_output(cryp); 1573 if (err) { 1574 dev_err(cryp->dev, "Timeout (write ccm padded data)\n"); 1575 return stm32_cryp_finish_req(cryp, err); 1576 } 1577 1578 /* c) get and store decrypted data */ 1579 /* 1580 * Same code as stm32_cryp_irq_read_data(), but we want to store 1581 * block value 1582 */ 1583 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) 1584 block[i] = stm32_cryp_read(cryp, cryp->caps->dout); 1585 1586 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize, 1587 cryp->payload_out), 1); 1588 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out); 1589 1590 /* d) Load again CRYP_CSGCMCCMxR */ 1591 for (i = 0; i < ARRAY_SIZE(cstmp2); i++) 1592 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4); 1593 1594 /* e) change mode back to AES CCM */ 1595 cfg &= ~CR_ALGO_MASK; 1596 cfg |= CR_AES_CCM; 1597 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1598 1599 /* f) change phase to header */ 1600 cfg &= ~CR_PH_MASK; 1601 cfg |= CR_PH_HEADER; 1602 stm32_cryp_write(cryp, cryp->caps->cr, cfg); 1603 1604 /* g) XOR and write padded data */ 1605 for (i = 0; i < ARRAY_SIZE(block); i++) { 1606 block[i] ^= cstmp1[i]; 1607 block[i] ^= cstmp2[i]; 1608 stm32_cryp_write(cryp, cryp->caps->din, block[i]); 1609 } 1610 1611 /* h) wait for completion */ 1612 err = stm32_cryp_wait_busy(cryp); 1613 if (err) 1614 dev_err(cryp->dev, "Timeout (write ccm padded data)\n"); 1615 1616 /* i) run the he normal Final phase */ 1617 stm32_cryp_finish_req(cryp, err); 1618 } 1619 1620 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp) 1621 { 1622 if (unlikely(!cryp->payload_in)) { 1623 dev_warn(cryp->dev, "No more data to process\n"); 1624 return; 1625 } 1626 1627 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE && 1628 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) && 1629 is_encrypt(cryp))) { 1630 /* Padding for AES GCM encryption */ 1631 if (cryp->caps->padding_wa) { 1632 /* Special case 1 */ 1633 stm32_cryp_irq_write_gcm_padded_data(cryp); 1634 return; 1635 } 1636 1637 /* Setting padding bytes (NBBLB) */ 1638 stm32_cryp_irq_set_npblb(cryp); 1639 } 1640 1641 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) && 1642 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) && 1643 is_decrypt(cryp))) { 1644 /* Padding for AES CCM decryption */ 1645 if (cryp->caps->padding_wa) { 1646 /* Special case 2 */ 1647 stm32_cryp_irq_write_ccm_padded_data(cryp); 1648 return; 1649 } 1650 1651 /* Setting padding bytes (NBBLB) */ 1652 stm32_cryp_irq_set_npblb(cryp); 1653 } 1654 1655 if (is_aes(cryp) && is_ctr(cryp)) 1656 stm32_cryp_check_ctr_counter(cryp); 1657 1658 stm32_cryp_irq_write_block(cryp); 1659 } 1660 1661 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp) 1662 { 1663 unsigned int i; 1664 u32 block[AES_BLOCK_32] = {0}; 1665 size_t written; 1666 1667 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in); 1668 1669 scatterwalk_copychunks(block, &cryp->in_walk, written, 0); 1670 for (i = 0; i < AES_BLOCK_32; i++) 1671 stm32_cryp_write(cryp, cryp->caps->din, block[i]); 1672 1673 cryp->header_in -= written; 1674 1675 stm32_crypt_gcmccm_end_header(cryp); 1676 } 1677 1678 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg) 1679 { 1680 struct stm32_cryp *cryp = arg; 1681 u32 ph; 1682 u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc); 1683 1684 if (cryp->irq_status & MISR_OUT) 1685 /* Output FIFO IRQ: read data */ 1686 stm32_cryp_irq_read_data(cryp); 1687 1688 if (cryp->irq_status & MISR_IN) { 1689 if (is_gcm(cryp) || is_ccm(cryp)) { 1690 ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK; 1691 if (unlikely(ph == CR_PH_HEADER)) 1692 /* Write Header */ 1693 stm32_cryp_irq_write_gcmccm_header(cryp); 1694 else 1695 /* Input FIFO IRQ: write data */ 1696 stm32_cryp_irq_write_data(cryp); 1697 if (is_gcm(cryp)) 1698 cryp->gcm_ctr++; 1699 } else { 1700 /* Input FIFO IRQ: write data */ 1701 stm32_cryp_irq_write_data(cryp); 1702 } 1703 } 1704 1705 /* Mask useless interrupts */ 1706 if (!cryp->payload_in && !cryp->header_in) 1707 it_mask &= ~IMSCR_IN; 1708 if (!cryp->payload_out) 1709 it_mask &= ~IMSCR_OUT; 1710 stm32_cryp_write(cryp, cryp->caps->imsc, it_mask); 1711 1712 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) 1713 stm32_cryp_finish_req(cryp, 0); 1714 1715 return IRQ_HANDLED; 1716 } 1717 1718 static irqreturn_t stm32_cryp_irq(int irq, void *arg) 1719 { 1720 struct stm32_cryp *cryp = arg; 1721 1722 cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis); 1723 1724 return IRQ_WAKE_THREAD; 1725 } 1726 1727 static struct skcipher_alg crypto_algs[] = { 1728 { 1729 .base.cra_name = "ecb(aes)", 1730 .base.cra_driver_name = "stm32-ecb-aes", 1731 .base.cra_priority = 200, 1732 .base.cra_flags = CRYPTO_ALG_ASYNC, 1733 .base.cra_blocksize = AES_BLOCK_SIZE, 1734 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1735 .base.cra_alignmask = 0, 1736 .base.cra_module = THIS_MODULE, 1737 1738 .init = stm32_cryp_init_tfm, 1739 .min_keysize = AES_MIN_KEY_SIZE, 1740 .max_keysize = AES_MAX_KEY_SIZE, 1741 .setkey = stm32_cryp_aes_setkey, 1742 .encrypt = stm32_cryp_aes_ecb_encrypt, 1743 .decrypt = stm32_cryp_aes_ecb_decrypt, 1744 }, 1745 { 1746 .base.cra_name = "cbc(aes)", 1747 .base.cra_driver_name = "stm32-cbc-aes", 1748 .base.cra_priority = 200, 1749 .base.cra_flags = CRYPTO_ALG_ASYNC, 1750 .base.cra_blocksize = AES_BLOCK_SIZE, 1751 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1752 .base.cra_alignmask = 0, 1753 .base.cra_module = THIS_MODULE, 1754 1755 .init = stm32_cryp_init_tfm, 1756 .min_keysize = AES_MIN_KEY_SIZE, 1757 .max_keysize = AES_MAX_KEY_SIZE, 1758 .ivsize = AES_BLOCK_SIZE, 1759 .setkey = stm32_cryp_aes_setkey, 1760 .encrypt = stm32_cryp_aes_cbc_encrypt, 1761 .decrypt = stm32_cryp_aes_cbc_decrypt, 1762 }, 1763 { 1764 .base.cra_name = "ctr(aes)", 1765 .base.cra_driver_name = "stm32-ctr-aes", 1766 .base.cra_priority = 200, 1767 .base.cra_flags = CRYPTO_ALG_ASYNC, 1768 .base.cra_blocksize = 1, 1769 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1770 .base.cra_alignmask = 0, 1771 .base.cra_module = THIS_MODULE, 1772 1773 .init = stm32_cryp_init_tfm, 1774 .min_keysize = AES_MIN_KEY_SIZE, 1775 .max_keysize = AES_MAX_KEY_SIZE, 1776 .ivsize = AES_BLOCK_SIZE, 1777 .setkey = stm32_cryp_aes_setkey, 1778 .encrypt = stm32_cryp_aes_ctr_encrypt, 1779 .decrypt = stm32_cryp_aes_ctr_decrypt, 1780 }, 1781 { 1782 .base.cra_name = "ecb(des)", 1783 .base.cra_driver_name = "stm32-ecb-des", 1784 .base.cra_priority = 200, 1785 .base.cra_flags = CRYPTO_ALG_ASYNC, 1786 .base.cra_blocksize = DES_BLOCK_SIZE, 1787 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1788 .base.cra_alignmask = 0, 1789 .base.cra_module = THIS_MODULE, 1790 1791 .init = stm32_cryp_init_tfm, 1792 .min_keysize = DES_BLOCK_SIZE, 1793 .max_keysize = DES_BLOCK_SIZE, 1794 .setkey = stm32_cryp_des_setkey, 1795 .encrypt = stm32_cryp_des_ecb_encrypt, 1796 .decrypt = stm32_cryp_des_ecb_decrypt, 1797 }, 1798 { 1799 .base.cra_name = "cbc(des)", 1800 .base.cra_driver_name = "stm32-cbc-des", 1801 .base.cra_priority = 200, 1802 .base.cra_flags = CRYPTO_ALG_ASYNC, 1803 .base.cra_blocksize = DES_BLOCK_SIZE, 1804 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1805 .base.cra_alignmask = 0, 1806 .base.cra_module = THIS_MODULE, 1807 1808 .init = stm32_cryp_init_tfm, 1809 .min_keysize = DES_BLOCK_SIZE, 1810 .max_keysize = DES_BLOCK_SIZE, 1811 .ivsize = DES_BLOCK_SIZE, 1812 .setkey = stm32_cryp_des_setkey, 1813 .encrypt = stm32_cryp_des_cbc_encrypt, 1814 .decrypt = stm32_cryp_des_cbc_decrypt, 1815 }, 1816 { 1817 .base.cra_name = "ecb(des3_ede)", 1818 .base.cra_driver_name = "stm32-ecb-des3", 1819 .base.cra_priority = 200, 1820 .base.cra_flags = CRYPTO_ALG_ASYNC, 1821 .base.cra_blocksize = DES_BLOCK_SIZE, 1822 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1823 .base.cra_alignmask = 0, 1824 .base.cra_module = THIS_MODULE, 1825 1826 .init = stm32_cryp_init_tfm, 1827 .min_keysize = 3 * DES_BLOCK_SIZE, 1828 .max_keysize = 3 * DES_BLOCK_SIZE, 1829 .setkey = stm32_cryp_tdes_setkey, 1830 .encrypt = stm32_cryp_tdes_ecb_encrypt, 1831 .decrypt = stm32_cryp_tdes_ecb_decrypt, 1832 }, 1833 { 1834 .base.cra_name = "cbc(des3_ede)", 1835 .base.cra_driver_name = "stm32-cbc-des3", 1836 .base.cra_priority = 200, 1837 .base.cra_flags = CRYPTO_ALG_ASYNC, 1838 .base.cra_blocksize = DES_BLOCK_SIZE, 1839 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1840 .base.cra_alignmask = 0, 1841 .base.cra_module = THIS_MODULE, 1842 1843 .init = stm32_cryp_init_tfm, 1844 .min_keysize = 3 * DES_BLOCK_SIZE, 1845 .max_keysize = 3 * DES_BLOCK_SIZE, 1846 .ivsize = DES_BLOCK_SIZE, 1847 .setkey = stm32_cryp_tdes_setkey, 1848 .encrypt = stm32_cryp_tdes_cbc_encrypt, 1849 .decrypt = stm32_cryp_tdes_cbc_decrypt, 1850 }, 1851 }; 1852 1853 static struct aead_alg aead_algs[] = { 1854 { 1855 .setkey = stm32_cryp_aes_aead_setkey, 1856 .setauthsize = stm32_cryp_aes_gcm_setauthsize, 1857 .encrypt = stm32_cryp_aes_gcm_encrypt, 1858 .decrypt = stm32_cryp_aes_gcm_decrypt, 1859 .init = stm32_cryp_aes_aead_init, 1860 .ivsize = 12, 1861 .maxauthsize = AES_BLOCK_SIZE, 1862 1863 .base = { 1864 .cra_name = "gcm(aes)", 1865 .cra_driver_name = "stm32-gcm-aes", 1866 .cra_priority = 200, 1867 .cra_flags = CRYPTO_ALG_ASYNC, 1868 .cra_blocksize = 1, 1869 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1870 .cra_alignmask = 0, 1871 .cra_module = THIS_MODULE, 1872 }, 1873 }, 1874 { 1875 .setkey = stm32_cryp_aes_aead_setkey, 1876 .setauthsize = stm32_cryp_aes_ccm_setauthsize, 1877 .encrypt = stm32_cryp_aes_ccm_encrypt, 1878 .decrypt = stm32_cryp_aes_ccm_decrypt, 1879 .init = stm32_cryp_aes_aead_init, 1880 .ivsize = AES_BLOCK_SIZE, 1881 .maxauthsize = AES_BLOCK_SIZE, 1882 1883 .base = { 1884 .cra_name = "ccm(aes)", 1885 .cra_driver_name = "stm32-ccm-aes", 1886 .cra_priority = 200, 1887 .cra_flags = CRYPTO_ALG_ASYNC, 1888 .cra_blocksize = 1, 1889 .cra_ctxsize = sizeof(struct stm32_cryp_ctx), 1890 .cra_alignmask = 0, 1891 .cra_module = THIS_MODULE, 1892 }, 1893 }, 1894 }; 1895 1896 static const struct stm32_cryp_caps ux500_data = { 1897 .aeads_support = false, 1898 .linear_aes_key = true, 1899 .kp_mode = false, 1900 .iv_protection = true, 1901 .swap_final = true, 1902 .padding_wa = true, 1903 .cr = UX500_CRYP_CR, 1904 .sr = UX500_CRYP_SR, 1905 .din = UX500_CRYP_DIN, 1906 .dout = UX500_CRYP_DOUT, 1907 .imsc = UX500_CRYP_IMSC, 1908 .mis = UX500_CRYP_MIS, 1909 .k1l = UX500_CRYP_K1L, 1910 .k1r = UX500_CRYP_K1R, 1911 .k3r = UX500_CRYP_K3R, 1912 .iv0l = UX500_CRYP_IV0L, 1913 .iv0r = UX500_CRYP_IV0R, 1914 .iv1l = UX500_CRYP_IV1L, 1915 .iv1r = UX500_CRYP_IV1R, 1916 }; 1917 1918 static const struct stm32_cryp_caps f7_data = { 1919 .aeads_support = true, 1920 .linear_aes_key = false, 1921 .kp_mode = true, 1922 .iv_protection = false, 1923 .swap_final = true, 1924 .padding_wa = true, 1925 .cr = CRYP_CR, 1926 .sr = CRYP_SR, 1927 .din = CRYP_DIN, 1928 .dout = CRYP_DOUT, 1929 .imsc = CRYP_IMSCR, 1930 .mis = CRYP_MISR, 1931 .k1l = CRYP_K1LR, 1932 .k1r = CRYP_K1RR, 1933 .k3r = CRYP_K3RR, 1934 .iv0l = CRYP_IV0LR, 1935 .iv0r = CRYP_IV0RR, 1936 .iv1l = CRYP_IV1LR, 1937 .iv1r = CRYP_IV1RR, 1938 }; 1939 1940 static const struct stm32_cryp_caps mp1_data = { 1941 .aeads_support = true, 1942 .linear_aes_key = false, 1943 .kp_mode = true, 1944 .iv_protection = false, 1945 .swap_final = false, 1946 .padding_wa = false, 1947 .cr = CRYP_CR, 1948 .sr = CRYP_SR, 1949 .din = CRYP_DIN, 1950 .dout = CRYP_DOUT, 1951 .imsc = CRYP_IMSCR, 1952 .mis = CRYP_MISR, 1953 .k1l = CRYP_K1LR, 1954 .k1r = CRYP_K1RR, 1955 .k3r = CRYP_K3RR, 1956 .iv0l = CRYP_IV0LR, 1957 .iv0r = CRYP_IV0RR, 1958 .iv1l = CRYP_IV1LR, 1959 .iv1r = CRYP_IV1RR, 1960 }; 1961 1962 static const struct of_device_id stm32_dt_ids[] = { 1963 { .compatible = "stericsson,ux500-cryp", .data = &ux500_data}, 1964 { .compatible = "st,stm32f756-cryp", .data = &f7_data}, 1965 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data}, 1966 {}, 1967 }; 1968 MODULE_DEVICE_TABLE(of, stm32_dt_ids); 1969 1970 static int stm32_cryp_probe(struct platform_device *pdev) 1971 { 1972 struct device *dev = &pdev->dev; 1973 struct stm32_cryp *cryp; 1974 struct reset_control *rst; 1975 int irq, ret; 1976 1977 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL); 1978 if (!cryp) 1979 return -ENOMEM; 1980 1981 cryp->caps = of_device_get_match_data(dev); 1982 if (!cryp->caps) 1983 return -ENODEV; 1984 1985 cryp->dev = dev; 1986 1987 cryp->regs = devm_platform_ioremap_resource(pdev, 0); 1988 if (IS_ERR(cryp->regs)) 1989 return PTR_ERR(cryp->regs); 1990 1991 irq = platform_get_irq(pdev, 0); 1992 if (irq < 0) 1993 return irq; 1994 1995 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq, 1996 stm32_cryp_irq_thread, IRQF_ONESHOT, 1997 dev_name(dev), cryp); 1998 if (ret) { 1999 dev_err(dev, "Cannot grab IRQ\n"); 2000 return ret; 2001 } 2002 2003 cryp->clk = devm_clk_get(dev, NULL); 2004 if (IS_ERR(cryp->clk)) { 2005 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n"); 2006 2007 return PTR_ERR(cryp->clk); 2008 } 2009 2010 ret = clk_prepare_enable(cryp->clk); 2011 if (ret) { 2012 dev_err(cryp->dev, "Failed to enable clock\n"); 2013 return ret; 2014 } 2015 2016 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY); 2017 pm_runtime_use_autosuspend(dev); 2018 2019 pm_runtime_get_noresume(dev); 2020 pm_runtime_set_active(dev); 2021 pm_runtime_enable(dev); 2022 2023 rst = devm_reset_control_get(dev, NULL); 2024 if (IS_ERR(rst)) { 2025 ret = PTR_ERR(rst); 2026 if (ret == -EPROBE_DEFER) 2027 goto err_rst; 2028 } else { 2029 reset_control_assert(rst); 2030 udelay(2); 2031 reset_control_deassert(rst); 2032 } 2033 2034 platform_set_drvdata(pdev, cryp); 2035 2036 spin_lock(&cryp_list.lock); 2037 list_add(&cryp->list, &cryp_list.dev_list); 2038 spin_unlock(&cryp_list.lock); 2039 2040 /* Initialize crypto engine */ 2041 cryp->engine = crypto_engine_alloc_init(dev, 1); 2042 if (!cryp->engine) { 2043 dev_err(dev, "Could not init crypto engine\n"); 2044 ret = -ENOMEM; 2045 goto err_engine1; 2046 } 2047 2048 ret = crypto_engine_start(cryp->engine); 2049 if (ret) { 2050 dev_err(dev, "Could not start crypto engine\n"); 2051 goto err_engine2; 2052 } 2053 2054 ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2055 if (ret) { 2056 dev_err(dev, "Could not register algs\n"); 2057 goto err_algs; 2058 } 2059 2060 if (cryp->caps->aeads_support) { 2061 ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2062 if (ret) 2063 goto err_aead_algs; 2064 } 2065 2066 dev_info(dev, "Initialized\n"); 2067 2068 pm_runtime_put_sync(dev); 2069 2070 return 0; 2071 2072 err_aead_algs: 2073 crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2074 err_algs: 2075 err_engine2: 2076 crypto_engine_exit(cryp->engine); 2077 err_engine1: 2078 spin_lock(&cryp_list.lock); 2079 list_del(&cryp->list); 2080 spin_unlock(&cryp_list.lock); 2081 err_rst: 2082 pm_runtime_disable(dev); 2083 pm_runtime_put_noidle(dev); 2084 2085 clk_disable_unprepare(cryp->clk); 2086 2087 return ret; 2088 } 2089 2090 static int stm32_cryp_remove(struct platform_device *pdev) 2091 { 2092 struct stm32_cryp *cryp = platform_get_drvdata(pdev); 2093 int ret; 2094 2095 if (!cryp) 2096 return -ENODEV; 2097 2098 ret = pm_runtime_resume_and_get(cryp->dev); 2099 if (ret < 0) 2100 return ret; 2101 2102 if (cryp->caps->aeads_support) 2103 crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs)); 2104 crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs)); 2105 2106 crypto_engine_exit(cryp->engine); 2107 2108 spin_lock(&cryp_list.lock); 2109 list_del(&cryp->list); 2110 spin_unlock(&cryp_list.lock); 2111 2112 pm_runtime_disable(cryp->dev); 2113 pm_runtime_put_noidle(cryp->dev); 2114 2115 clk_disable_unprepare(cryp->clk); 2116 2117 return 0; 2118 } 2119 2120 #ifdef CONFIG_PM 2121 static int stm32_cryp_runtime_suspend(struct device *dev) 2122 { 2123 struct stm32_cryp *cryp = dev_get_drvdata(dev); 2124 2125 clk_disable_unprepare(cryp->clk); 2126 2127 return 0; 2128 } 2129 2130 static int stm32_cryp_runtime_resume(struct device *dev) 2131 { 2132 struct stm32_cryp *cryp = dev_get_drvdata(dev); 2133 int ret; 2134 2135 ret = clk_prepare_enable(cryp->clk); 2136 if (ret) { 2137 dev_err(cryp->dev, "Failed to prepare_enable clock\n"); 2138 return ret; 2139 } 2140 2141 return 0; 2142 } 2143 #endif 2144 2145 static const struct dev_pm_ops stm32_cryp_pm_ops = { 2146 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 2147 pm_runtime_force_resume) 2148 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend, 2149 stm32_cryp_runtime_resume, NULL) 2150 }; 2151 2152 static struct platform_driver stm32_cryp_driver = { 2153 .probe = stm32_cryp_probe, 2154 .remove = stm32_cryp_remove, 2155 .driver = { 2156 .name = DRIVER_NAME, 2157 .pm = &stm32_cryp_pm_ops, 2158 .of_match_table = stm32_dt_ids, 2159 }, 2160 }; 2161 2162 module_platform_driver(stm32_cryp_driver); 2163 2164 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>"); 2165 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver"); 2166 MODULE_LICENSE("GPL"); 2167