1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * sun8i-ce-core.c - hardware cryptographic offloader for 4 * Allwinner H3/A64/H5/H2+/H6/R40 SoC 5 * 6 * Copyright (C) 2015-2019 Corentin Labbe <clabbe.montjoie@gmail.com> 7 * 8 * Core file which registers crypto algorithms supported by the CryptoEngine. 9 * 10 * You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst 11 */ 12 #include <linux/clk.h> 13 #include <linux/crypto.h> 14 #include <linux/delay.h> 15 #include <linux/dma-mapping.h> 16 #include <linux/interrupt.h> 17 #include <linux/io.h> 18 #include <linux/irq.h> 19 #include <linux/module.h> 20 #include <linux/of.h> 21 #include <linux/of_device.h> 22 #include <linux/platform_device.h> 23 #include <linux/pm_runtime.h> 24 #include <linux/reset.h> 25 #include <crypto/internal/rng.h> 26 #include <crypto/internal/skcipher.h> 27 28 #include "sun8i-ce.h" 29 30 /* 31 * mod clock is lower on H3 than other SoC due to some DMA timeout occurring 32 * with high value. 33 * If you want to tune mod clock, loading driver and passing selftest is 34 * insufficient, you need to test with some LUKS test (mount and write to it) 35 */ 36 static const struct ce_variant ce_h3_variant = { 37 .alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES, 38 }, 39 .alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256, 40 CE_ALG_SHA384, CE_ALG_SHA512 41 }, 42 .op_mode = { CE_OP_ECB, CE_OP_CBC 43 }, 44 .ce_clks = { 45 { "bus", 0, 200000000 }, 46 { "mod", 50000000, 0 }, 47 }, 48 .esr = ESR_H3, 49 .prng = CE_ALG_PRNG, 50 .trng = CE_ID_NOTSUPP, 51 }; 52 53 static const struct ce_variant ce_h5_variant = { 54 .alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES, 55 }, 56 .alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256, 57 CE_ID_NOTSUPP, CE_ID_NOTSUPP 58 }, 59 .op_mode = { CE_OP_ECB, CE_OP_CBC 60 }, 61 .ce_clks = { 62 { "bus", 0, 200000000 }, 63 { "mod", 300000000, 0 }, 64 }, 65 .esr = ESR_H5, 66 .prng = CE_ALG_PRNG, 67 .trng = CE_ID_NOTSUPP, 68 }; 69 70 static const struct ce_variant ce_h6_variant = { 71 .alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES, 72 }, 73 .alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256, 74 CE_ALG_SHA384, CE_ALG_SHA512 75 }, 76 .op_mode = { CE_OP_ECB, CE_OP_CBC 77 }, 78 .cipher_t_dlen_in_bytes = true, 79 .hash_t_dlen_in_bits = true, 80 .prng_t_dlen_in_bytes = true, 81 .trng_t_dlen_in_bytes = true, 82 .ce_clks = { 83 { "bus", 0, 200000000 }, 84 { "mod", 300000000, 0 }, 85 { "ram", 0, 400000000 }, 86 }, 87 .esr = ESR_H6, 88 .prng = CE_ALG_PRNG_V2, 89 .trng = CE_ALG_TRNG_V2, 90 }; 91 92 static const struct ce_variant ce_a64_variant = { 93 .alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES, 94 }, 95 .alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256, 96 CE_ID_NOTSUPP, CE_ID_NOTSUPP 97 }, 98 .op_mode = { CE_OP_ECB, CE_OP_CBC 99 }, 100 .ce_clks = { 101 { "bus", 0, 200000000 }, 102 { "mod", 300000000, 0 }, 103 }, 104 .esr = ESR_A64, 105 .prng = CE_ALG_PRNG, 106 .trng = CE_ID_NOTSUPP, 107 }; 108 109 static const struct ce_variant ce_d1_variant = { 110 .alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES, 111 }, 112 .alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256, 113 CE_ALG_SHA384, CE_ALG_SHA512 114 }, 115 .op_mode = { CE_OP_ECB, CE_OP_CBC 116 }, 117 .ce_clks = { 118 { "bus", 0, 200000000 }, 119 { "mod", 300000000, 0 }, 120 { "ram", 0, 400000000 }, 121 { "trng", 0, 0 }, 122 }, 123 .esr = ESR_D1, 124 .prng = CE_ALG_PRNG, 125 .trng = CE_ALG_TRNG, 126 }; 127 128 static const struct ce_variant ce_r40_variant = { 129 .alg_cipher = { CE_ALG_AES, CE_ALG_DES, CE_ALG_3DES, 130 }, 131 .alg_hash = { CE_ALG_MD5, CE_ALG_SHA1, CE_ALG_SHA224, CE_ALG_SHA256, 132 CE_ID_NOTSUPP, CE_ID_NOTSUPP 133 }, 134 .op_mode = { CE_OP_ECB, CE_OP_CBC 135 }, 136 .ce_clks = { 137 { "bus", 0, 200000000 }, 138 { "mod", 300000000, 0 }, 139 }, 140 .esr = ESR_R40, 141 .prng = CE_ALG_PRNG, 142 .trng = CE_ID_NOTSUPP, 143 }; 144 145 /* 146 * sun8i_ce_get_engine_number() get the next channel slot 147 * This is a simple round-robin way of getting the next channel 148 * The flow 3 is reserve for xRNG operations 149 */ 150 int sun8i_ce_get_engine_number(struct sun8i_ce_dev *ce) 151 { 152 return atomic_inc_return(&ce->flow) % (MAXFLOW - 1); 153 } 154 155 int sun8i_ce_run_task(struct sun8i_ce_dev *ce, int flow, const char *name) 156 { 157 u32 v; 158 int err = 0; 159 struct ce_task *cet = ce->chanlist[flow].tl; 160 161 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG 162 ce->chanlist[flow].stat_req++; 163 #endif 164 165 mutex_lock(&ce->mlock); 166 167 v = readl(ce->base + CE_ICR); 168 v |= 1 << flow; 169 writel(v, ce->base + CE_ICR); 170 171 reinit_completion(&ce->chanlist[flow].complete); 172 writel(ce->chanlist[flow].t_phy, ce->base + CE_TDQ); 173 174 ce->chanlist[flow].status = 0; 175 /* Be sure all data is written before enabling the task */ 176 wmb(); 177 178 /* Only H6 needs to write a part of t_common_ctl along with "1", but since it is ignored 179 * on older SoCs, we have no reason to complicate things. 180 */ 181 v = 1 | ((le32_to_cpu(ce->chanlist[flow].tl->t_common_ctl) & 0x7F) << 8); 182 writel(v, ce->base + CE_TLR); 183 mutex_unlock(&ce->mlock); 184 185 wait_for_completion_interruptible_timeout(&ce->chanlist[flow].complete, 186 msecs_to_jiffies(ce->chanlist[flow].timeout)); 187 188 if (ce->chanlist[flow].status == 0) { 189 dev_err(ce->dev, "DMA timeout for %s (tm=%d) on flow %d\n", name, 190 ce->chanlist[flow].timeout, flow); 191 err = -EFAULT; 192 } 193 /* No need to lock for this read, the channel is locked so 194 * nothing could modify the error value for this channel 195 */ 196 v = readl(ce->base + CE_ESR); 197 switch (ce->variant->esr) { 198 case ESR_H3: 199 /* Sadly, the error bit is not per flow */ 200 if (v) { 201 dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow); 202 err = -EFAULT; 203 print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4, 204 cet, sizeof(struct ce_task), false); 205 } 206 if (v & CE_ERR_ALGO_NOTSUP) 207 dev_err(ce->dev, "CE ERROR: algorithm not supported\n"); 208 if (v & CE_ERR_DATALEN) 209 dev_err(ce->dev, "CE ERROR: data length error\n"); 210 if (v & CE_ERR_KEYSRAM) 211 dev_err(ce->dev, "CE ERROR: keysram access error for AES\n"); 212 break; 213 case ESR_A64: 214 case ESR_D1: 215 case ESR_H5: 216 case ESR_R40: 217 v >>= (flow * 4); 218 v &= 0xF; 219 if (v) { 220 dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow); 221 err = -EFAULT; 222 print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4, 223 cet, sizeof(struct ce_task), false); 224 } 225 if (v & CE_ERR_ALGO_NOTSUP) 226 dev_err(ce->dev, "CE ERROR: algorithm not supported\n"); 227 if (v & CE_ERR_DATALEN) 228 dev_err(ce->dev, "CE ERROR: data length error\n"); 229 if (v & CE_ERR_KEYSRAM) 230 dev_err(ce->dev, "CE ERROR: keysram access error for AES\n"); 231 break; 232 case ESR_H6: 233 v >>= (flow * 8); 234 v &= 0xFF; 235 if (v) { 236 dev_err(ce->dev, "CE ERROR: %x for flow %x\n", v, flow); 237 err = -EFAULT; 238 print_hex_dump(KERN_INFO, "TASK: ", DUMP_PREFIX_NONE, 16, 4, 239 cet, sizeof(struct ce_task), false); 240 } 241 if (v & CE_ERR_ALGO_NOTSUP) 242 dev_err(ce->dev, "CE ERROR: algorithm not supported\n"); 243 if (v & CE_ERR_DATALEN) 244 dev_err(ce->dev, "CE ERROR: data length error\n"); 245 if (v & CE_ERR_KEYSRAM) 246 dev_err(ce->dev, "CE ERROR: keysram access error for AES\n"); 247 if (v & CE_ERR_ADDR_INVALID) 248 dev_err(ce->dev, "CE ERROR: address invalid\n"); 249 if (v & CE_ERR_KEYLADDER) 250 dev_err(ce->dev, "CE ERROR: key ladder configuration error\n"); 251 break; 252 } 253 254 return err; 255 } 256 257 static irqreturn_t ce_irq_handler(int irq, void *data) 258 { 259 struct sun8i_ce_dev *ce = (struct sun8i_ce_dev *)data; 260 int flow = 0; 261 u32 p; 262 263 p = readl(ce->base + CE_ISR); 264 for (flow = 0; flow < MAXFLOW; flow++) { 265 if (p & (BIT(flow))) { 266 writel(BIT(flow), ce->base + CE_ISR); 267 ce->chanlist[flow].status = 1; 268 complete(&ce->chanlist[flow].complete); 269 } 270 } 271 272 return IRQ_HANDLED; 273 } 274 275 static struct sun8i_ce_alg_template ce_algs[] = { 276 { 277 .type = CRYPTO_ALG_TYPE_SKCIPHER, 278 .ce_algo_id = CE_ID_CIPHER_AES, 279 .ce_blockmode = CE_ID_OP_CBC, 280 .alg.skcipher = { 281 .base = { 282 .cra_name = "cbc(aes)", 283 .cra_driver_name = "cbc-aes-sun8i-ce", 284 .cra_priority = 400, 285 .cra_blocksize = AES_BLOCK_SIZE, 286 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 287 CRYPTO_ALG_ASYNC | 288 CRYPTO_ALG_NEED_FALLBACK, 289 .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx), 290 .cra_module = THIS_MODULE, 291 .cra_alignmask = 0xf, 292 .cra_init = sun8i_ce_cipher_init, 293 .cra_exit = sun8i_ce_cipher_exit, 294 }, 295 .min_keysize = AES_MIN_KEY_SIZE, 296 .max_keysize = AES_MAX_KEY_SIZE, 297 .ivsize = AES_BLOCK_SIZE, 298 .setkey = sun8i_ce_aes_setkey, 299 .encrypt = sun8i_ce_skencrypt, 300 .decrypt = sun8i_ce_skdecrypt, 301 } 302 }, 303 { 304 .type = CRYPTO_ALG_TYPE_SKCIPHER, 305 .ce_algo_id = CE_ID_CIPHER_AES, 306 .ce_blockmode = CE_ID_OP_ECB, 307 .alg.skcipher = { 308 .base = { 309 .cra_name = "ecb(aes)", 310 .cra_driver_name = "ecb-aes-sun8i-ce", 311 .cra_priority = 400, 312 .cra_blocksize = AES_BLOCK_SIZE, 313 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 314 CRYPTO_ALG_ASYNC | 315 CRYPTO_ALG_NEED_FALLBACK, 316 .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx), 317 .cra_module = THIS_MODULE, 318 .cra_alignmask = 0xf, 319 .cra_init = sun8i_ce_cipher_init, 320 .cra_exit = sun8i_ce_cipher_exit, 321 }, 322 .min_keysize = AES_MIN_KEY_SIZE, 323 .max_keysize = AES_MAX_KEY_SIZE, 324 .setkey = sun8i_ce_aes_setkey, 325 .encrypt = sun8i_ce_skencrypt, 326 .decrypt = sun8i_ce_skdecrypt, 327 } 328 }, 329 { 330 .type = CRYPTO_ALG_TYPE_SKCIPHER, 331 .ce_algo_id = CE_ID_CIPHER_DES3, 332 .ce_blockmode = CE_ID_OP_CBC, 333 .alg.skcipher = { 334 .base = { 335 .cra_name = "cbc(des3_ede)", 336 .cra_driver_name = "cbc-des3-sun8i-ce", 337 .cra_priority = 400, 338 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 339 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 340 CRYPTO_ALG_ASYNC | 341 CRYPTO_ALG_NEED_FALLBACK, 342 .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx), 343 .cra_module = THIS_MODULE, 344 .cra_alignmask = 0xf, 345 .cra_init = sun8i_ce_cipher_init, 346 .cra_exit = sun8i_ce_cipher_exit, 347 }, 348 .min_keysize = DES3_EDE_KEY_SIZE, 349 .max_keysize = DES3_EDE_KEY_SIZE, 350 .ivsize = DES3_EDE_BLOCK_SIZE, 351 .setkey = sun8i_ce_des3_setkey, 352 .encrypt = sun8i_ce_skencrypt, 353 .decrypt = sun8i_ce_skdecrypt, 354 } 355 }, 356 { 357 .type = CRYPTO_ALG_TYPE_SKCIPHER, 358 .ce_algo_id = CE_ID_CIPHER_DES3, 359 .ce_blockmode = CE_ID_OP_ECB, 360 .alg.skcipher = { 361 .base = { 362 .cra_name = "ecb(des3_ede)", 363 .cra_driver_name = "ecb-des3-sun8i-ce", 364 .cra_priority = 400, 365 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 366 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 367 CRYPTO_ALG_ASYNC | 368 CRYPTO_ALG_NEED_FALLBACK, 369 .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx), 370 .cra_module = THIS_MODULE, 371 .cra_alignmask = 0xf, 372 .cra_init = sun8i_ce_cipher_init, 373 .cra_exit = sun8i_ce_cipher_exit, 374 }, 375 .min_keysize = DES3_EDE_KEY_SIZE, 376 .max_keysize = DES3_EDE_KEY_SIZE, 377 .setkey = sun8i_ce_des3_setkey, 378 .encrypt = sun8i_ce_skencrypt, 379 .decrypt = sun8i_ce_skdecrypt, 380 } 381 }, 382 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_HASH 383 { .type = CRYPTO_ALG_TYPE_AHASH, 384 .ce_algo_id = CE_ID_HASH_MD5, 385 .alg.hash = { 386 .init = sun8i_ce_hash_init, 387 .update = sun8i_ce_hash_update, 388 .final = sun8i_ce_hash_final, 389 .finup = sun8i_ce_hash_finup, 390 .digest = sun8i_ce_hash_digest, 391 .export = sun8i_ce_hash_export, 392 .import = sun8i_ce_hash_import, 393 .halg = { 394 .digestsize = MD5_DIGEST_SIZE, 395 .statesize = sizeof(struct md5_state), 396 .base = { 397 .cra_name = "md5", 398 .cra_driver_name = "md5-sun8i-ce", 399 .cra_priority = 300, 400 .cra_alignmask = 3, 401 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 402 CRYPTO_ALG_ASYNC | 403 CRYPTO_ALG_NEED_FALLBACK, 404 .cra_blocksize = MD5_HMAC_BLOCK_SIZE, 405 .cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx), 406 .cra_module = THIS_MODULE, 407 .cra_init = sun8i_ce_hash_crainit, 408 .cra_exit = sun8i_ce_hash_craexit, 409 } 410 } 411 } 412 }, 413 { .type = CRYPTO_ALG_TYPE_AHASH, 414 .ce_algo_id = CE_ID_HASH_SHA1, 415 .alg.hash = { 416 .init = sun8i_ce_hash_init, 417 .update = sun8i_ce_hash_update, 418 .final = sun8i_ce_hash_final, 419 .finup = sun8i_ce_hash_finup, 420 .digest = sun8i_ce_hash_digest, 421 .export = sun8i_ce_hash_export, 422 .import = sun8i_ce_hash_import, 423 .halg = { 424 .digestsize = SHA1_DIGEST_SIZE, 425 .statesize = sizeof(struct sha1_state), 426 .base = { 427 .cra_name = "sha1", 428 .cra_driver_name = "sha1-sun8i-ce", 429 .cra_priority = 300, 430 .cra_alignmask = 3, 431 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 432 CRYPTO_ALG_ASYNC | 433 CRYPTO_ALG_NEED_FALLBACK, 434 .cra_blocksize = SHA1_BLOCK_SIZE, 435 .cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx), 436 .cra_module = THIS_MODULE, 437 .cra_init = sun8i_ce_hash_crainit, 438 .cra_exit = sun8i_ce_hash_craexit, 439 } 440 } 441 } 442 }, 443 { .type = CRYPTO_ALG_TYPE_AHASH, 444 .ce_algo_id = CE_ID_HASH_SHA224, 445 .alg.hash = { 446 .init = sun8i_ce_hash_init, 447 .update = sun8i_ce_hash_update, 448 .final = sun8i_ce_hash_final, 449 .finup = sun8i_ce_hash_finup, 450 .digest = sun8i_ce_hash_digest, 451 .export = sun8i_ce_hash_export, 452 .import = sun8i_ce_hash_import, 453 .halg = { 454 .digestsize = SHA224_DIGEST_SIZE, 455 .statesize = sizeof(struct sha256_state), 456 .base = { 457 .cra_name = "sha224", 458 .cra_driver_name = "sha224-sun8i-ce", 459 .cra_priority = 300, 460 .cra_alignmask = 3, 461 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 462 CRYPTO_ALG_ASYNC | 463 CRYPTO_ALG_NEED_FALLBACK, 464 .cra_blocksize = SHA224_BLOCK_SIZE, 465 .cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx), 466 .cra_module = THIS_MODULE, 467 .cra_init = sun8i_ce_hash_crainit, 468 .cra_exit = sun8i_ce_hash_craexit, 469 } 470 } 471 } 472 }, 473 { .type = CRYPTO_ALG_TYPE_AHASH, 474 .ce_algo_id = CE_ID_HASH_SHA256, 475 .alg.hash = { 476 .init = sun8i_ce_hash_init, 477 .update = sun8i_ce_hash_update, 478 .final = sun8i_ce_hash_final, 479 .finup = sun8i_ce_hash_finup, 480 .digest = sun8i_ce_hash_digest, 481 .export = sun8i_ce_hash_export, 482 .import = sun8i_ce_hash_import, 483 .halg = { 484 .digestsize = SHA256_DIGEST_SIZE, 485 .statesize = sizeof(struct sha256_state), 486 .base = { 487 .cra_name = "sha256", 488 .cra_driver_name = "sha256-sun8i-ce", 489 .cra_priority = 300, 490 .cra_alignmask = 3, 491 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 492 CRYPTO_ALG_ASYNC | 493 CRYPTO_ALG_NEED_FALLBACK, 494 .cra_blocksize = SHA256_BLOCK_SIZE, 495 .cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx), 496 .cra_module = THIS_MODULE, 497 .cra_init = sun8i_ce_hash_crainit, 498 .cra_exit = sun8i_ce_hash_craexit, 499 } 500 } 501 } 502 }, 503 { .type = CRYPTO_ALG_TYPE_AHASH, 504 .ce_algo_id = CE_ID_HASH_SHA384, 505 .alg.hash = { 506 .init = sun8i_ce_hash_init, 507 .update = sun8i_ce_hash_update, 508 .final = sun8i_ce_hash_final, 509 .finup = sun8i_ce_hash_finup, 510 .digest = sun8i_ce_hash_digest, 511 .export = sun8i_ce_hash_export, 512 .import = sun8i_ce_hash_import, 513 .halg = { 514 .digestsize = SHA384_DIGEST_SIZE, 515 .statesize = sizeof(struct sha512_state), 516 .base = { 517 .cra_name = "sha384", 518 .cra_driver_name = "sha384-sun8i-ce", 519 .cra_priority = 300, 520 .cra_alignmask = 3, 521 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 522 CRYPTO_ALG_ASYNC | 523 CRYPTO_ALG_NEED_FALLBACK, 524 .cra_blocksize = SHA384_BLOCK_SIZE, 525 .cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx), 526 .cra_module = THIS_MODULE, 527 .cra_init = sun8i_ce_hash_crainit, 528 .cra_exit = sun8i_ce_hash_craexit, 529 } 530 } 531 } 532 }, 533 { .type = CRYPTO_ALG_TYPE_AHASH, 534 .ce_algo_id = CE_ID_HASH_SHA512, 535 .alg.hash = { 536 .init = sun8i_ce_hash_init, 537 .update = sun8i_ce_hash_update, 538 .final = sun8i_ce_hash_final, 539 .finup = sun8i_ce_hash_finup, 540 .digest = sun8i_ce_hash_digest, 541 .export = sun8i_ce_hash_export, 542 .import = sun8i_ce_hash_import, 543 .halg = { 544 .digestsize = SHA512_DIGEST_SIZE, 545 .statesize = sizeof(struct sha512_state), 546 .base = { 547 .cra_name = "sha512", 548 .cra_driver_name = "sha512-sun8i-ce", 549 .cra_priority = 300, 550 .cra_alignmask = 3, 551 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 552 CRYPTO_ALG_ASYNC | 553 CRYPTO_ALG_NEED_FALLBACK, 554 .cra_blocksize = SHA512_BLOCK_SIZE, 555 .cra_ctxsize = sizeof(struct sun8i_ce_hash_tfm_ctx), 556 .cra_module = THIS_MODULE, 557 .cra_init = sun8i_ce_hash_crainit, 558 .cra_exit = sun8i_ce_hash_craexit, 559 } 560 } 561 } 562 }, 563 #endif 564 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_PRNG 565 { 566 .type = CRYPTO_ALG_TYPE_RNG, 567 .alg.rng = { 568 .base = { 569 .cra_name = "stdrng", 570 .cra_driver_name = "sun8i-ce-prng", 571 .cra_priority = 300, 572 .cra_ctxsize = sizeof(struct sun8i_ce_rng_tfm_ctx), 573 .cra_module = THIS_MODULE, 574 .cra_init = sun8i_ce_prng_init, 575 .cra_exit = sun8i_ce_prng_exit, 576 }, 577 .generate = sun8i_ce_prng_generate, 578 .seed = sun8i_ce_prng_seed, 579 .seedsize = PRNG_SEED_SIZE, 580 } 581 }, 582 #endif 583 }; 584 585 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG 586 static int sun8i_ce_debugfs_show(struct seq_file *seq, void *v) 587 { 588 struct sun8i_ce_dev *ce = seq->private; 589 unsigned int i; 590 591 for (i = 0; i < MAXFLOW; i++) 592 seq_printf(seq, "Channel %d: nreq %lu\n", i, ce->chanlist[i].stat_req); 593 594 for (i = 0; i < ARRAY_SIZE(ce_algs); i++) { 595 if (!ce_algs[i].ce) 596 continue; 597 switch (ce_algs[i].type) { 598 case CRYPTO_ALG_TYPE_SKCIPHER: 599 seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n", 600 ce_algs[i].alg.skcipher.base.cra_driver_name, 601 ce_algs[i].alg.skcipher.base.cra_name, 602 ce_algs[i].stat_req, ce_algs[i].stat_fb); 603 seq_printf(seq, "\tLast fallback is: %s\n", 604 ce_algs[i].fbname); 605 seq_printf(seq, "\tFallback due to 0 length: %lu\n", 606 ce_algs[i].stat_fb_len0); 607 seq_printf(seq, "\tFallback due to length !mod16: %lu\n", 608 ce_algs[i].stat_fb_mod16); 609 seq_printf(seq, "\tFallback due to length < IV: %lu\n", 610 ce_algs[i].stat_fb_leniv); 611 seq_printf(seq, "\tFallback due to source alignment: %lu\n", 612 ce_algs[i].stat_fb_srcali); 613 seq_printf(seq, "\tFallback due to dest alignment: %lu\n", 614 ce_algs[i].stat_fb_dstali); 615 seq_printf(seq, "\tFallback due to source length: %lu\n", 616 ce_algs[i].stat_fb_srclen); 617 seq_printf(seq, "\tFallback due to dest length: %lu\n", 618 ce_algs[i].stat_fb_dstlen); 619 seq_printf(seq, "\tFallback due to SG numbers: %lu\n", 620 ce_algs[i].stat_fb_maxsg); 621 break; 622 case CRYPTO_ALG_TYPE_AHASH: 623 seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n", 624 ce_algs[i].alg.hash.halg.base.cra_driver_name, 625 ce_algs[i].alg.hash.halg.base.cra_name, 626 ce_algs[i].stat_req, ce_algs[i].stat_fb); 627 seq_printf(seq, "\tLast fallback is: %s\n", 628 ce_algs[i].fbname); 629 seq_printf(seq, "\tFallback due to 0 length: %lu\n", 630 ce_algs[i].stat_fb_len0); 631 seq_printf(seq, "\tFallback due to length: %lu\n", 632 ce_algs[i].stat_fb_srclen); 633 seq_printf(seq, "\tFallback due to alignment: %lu\n", 634 ce_algs[i].stat_fb_srcali); 635 seq_printf(seq, "\tFallback due to SG numbers: %lu\n", 636 ce_algs[i].stat_fb_maxsg); 637 break; 638 case CRYPTO_ALG_TYPE_RNG: 639 seq_printf(seq, "%s %s reqs=%lu bytes=%lu\n", 640 ce_algs[i].alg.rng.base.cra_driver_name, 641 ce_algs[i].alg.rng.base.cra_name, 642 ce_algs[i].stat_req, ce_algs[i].stat_bytes); 643 break; 644 } 645 } 646 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG 647 seq_printf(seq, "HWRNG %lu %lu\n", 648 ce->hwrng_stat_req, ce->hwrng_stat_bytes); 649 #endif 650 return 0; 651 } 652 653 DEFINE_SHOW_ATTRIBUTE(sun8i_ce_debugfs); 654 #endif 655 656 static void sun8i_ce_free_chanlist(struct sun8i_ce_dev *ce, int i) 657 { 658 while (i >= 0) { 659 crypto_engine_exit(ce->chanlist[i].engine); 660 if (ce->chanlist[i].tl) 661 dma_free_coherent(ce->dev, sizeof(struct ce_task), 662 ce->chanlist[i].tl, 663 ce->chanlist[i].t_phy); 664 i--; 665 } 666 } 667 668 /* 669 * Allocate the channel list structure 670 */ 671 static int sun8i_ce_allocate_chanlist(struct sun8i_ce_dev *ce) 672 { 673 int i, err; 674 675 ce->chanlist = devm_kcalloc(ce->dev, MAXFLOW, 676 sizeof(struct sun8i_ce_flow), GFP_KERNEL); 677 if (!ce->chanlist) 678 return -ENOMEM; 679 680 for (i = 0; i < MAXFLOW; i++) { 681 init_completion(&ce->chanlist[i].complete); 682 683 ce->chanlist[i].engine = crypto_engine_alloc_init(ce->dev, true); 684 if (!ce->chanlist[i].engine) { 685 dev_err(ce->dev, "Cannot allocate engine\n"); 686 i--; 687 err = -ENOMEM; 688 goto error_engine; 689 } 690 err = crypto_engine_start(ce->chanlist[i].engine); 691 if (err) { 692 dev_err(ce->dev, "Cannot start engine\n"); 693 goto error_engine; 694 } 695 ce->chanlist[i].tl = dma_alloc_coherent(ce->dev, 696 sizeof(struct ce_task), 697 &ce->chanlist[i].t_phy, 698 GFP_KERNEL); 699 if (!ce->chanlist[i].tl) { 700 dev_err(ce->dev, "Cannot get DMA memory for task %d\n", 701 i); 702 err = -ENOMEM; 703 goto error_engine; 704 } 705 ce->chanlist[i].bounce_iv = devm_kmalloc(ce->dev, AES_BLOCK_SIZE, 706 GFP_KERNEL | GFP_DMA); 707 if (!ce->chanlist[i].bounce_iv) { 708 err = -ENOMEM; 709 goto error_engine; 710 } 711 ce->chanlist[i].backup_iv = devm_kmalloc(ce->dev, AES_BLOCK_SIZE, 712 GFP_KERNEL); 713 if (!ce->chanlist[i].backup_iv) { 714 err = -ENOMEM; 715 goto error_engine; 716 } 717 } 718 return 0; 719 error_engine: 720 sun8i_ce_free_chanlist(ce, i); 721 return err; 722 } 723 724 /* 725 * Power management strategy: The device is suspended unless a TFM exists for 726 * one of the algorithms proposed by this driver. 727 */ 728 static int sun8i_ce_pm_suspend(struct device *dev) 729 { 730 struct sun8i_ce_dev *ce = dev_get_drvdata(dev); 731 int i; 732 733 reset_control_assert(ce->reset); 734 for (i = 0; i < CE_MAX_CLOCKS; i++) 735 clk_disable_unprepare(ce->ceclks[i]); 736 return 0; 737 } 738 739 static int sun8i_ce_pm_resume(struct device *dev) 740 { 741 struct sun8i_ce_dev *ce = dev_get_drvdata(dev); 742 int err, i; 743 744 for (i = 0; i < CE_MAX_CLOCKS; i++) { 745 if (!ce->variant->ce_clks[i].name) 746 continue; 747 err = clk_prepare_enable(ce->ceclks[i]); 748 if (err) { 749 dev_err(ce->dev, "Cannot prepare_enable %s\n", 750 ce->variant->ce_clks[i].name); 751 goto error; 752 } 753 } 754 err = reset_control_deassert(ce->reset); 755 if (err) { 756 dev_err(ce->dev, "Cannot deassert reset control\n"); 757 goto error; 758 } 759 return 0; 760 error: 761 sun8i_ce_pm_suspend(dev); 762 return err; 763 } 764 765 static const struct dev_pm_ops sun8i_ce_pm_ops = { 766 SET_RUNTIME_PM_OPS(sun8i_ce_pm_suspend, sun8i_ce_pm_resume, NULL) 767 }; 768 769 static int sun8i_ce_pm_init(struct sun8i_ce_dev *ce) 770 { 771 int err; 772 773 pm_runtime_use_autosuspend(ce->dev); 774 pm_runtime_set_autosuspend_delay(ce->dev, 2000); 775 776 err = pm_runtime_set_suspended(ce->dev); 777 if (err) 778 return err; 779 pm_runtime_enable(ce->dev); 780 return err; 781 } 782 783 static void sun8i_ce_pm_exit(struct sun8i_ce_dev *ce) 784 { 785 pm_runtime_disable(ce->dev); 786 } 787 788 static int sun8i_ce_get_clks(struct sun8i_ce_dev *ce) 789 { 790 unsigned long cr; 791 int err, i; 792 793 for (i = 0; i < CE_MAX_CLOCKS; i++) { 794 if (!ce->variant->ce_clks[i].name) 795 continue; 796 ce->ceclks[i] = devm_clk_get(ce->dev, ce->variant->ce_clks[i].name); 797 if (IS_ERR(ce->ceclks[i])) { 798 err = PTR_ERR(ce->ceclks[i]); 799 dev_err(ce->dev, "Cannot get %s CE clock err=%d\n", 800 ce->variant->ce_clks[i].name, err); 801 return err; 802 } 803 cr = clk_get_rate(ce->ceclks[i]); 804 if (!cr) 805 return -EINVAL; 806 if (ce->variant->ce_clks[i].freq > 0 && 807 cr != ce->variant->ce_clks[i].freq) { 808 dev_info(ce->dev, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n", 809 ce->variant->ce_clks[i].name, 810 ce->variant->ce_clks[i].freq, 811 ce->variant->ce_clks[i].freq / 1000000, 812 cr, cr / 1000000); 813 err = clk_set_rate(ce->ceclks[i], ce->variant->ce_clks[i].freq); 814 if (err) 815 dev_err(ce->dev, "Fail to set %s clk speed to %lu hz\n", 816 ce->variant->ce_clks[i].name, 817 ce->variant->ce_clks[i].freq); 818 } 819 if (ce->variant->ce_clks[i].max_freq > 0 && 820 cr > ce->variant->ce_clks[i].max_freq) 821 dev_warn(ce->dev, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)", 822 ce->variant->ce_clks[i].name, cr, 823 ce->variant->ce_clks[i].max_freq); 824 } 825 return 0; 826 } 827 828 static int sun8i_ce_register_algs(struct sun8i_ce_dev *ce) 829 { 830 int ce_method, err, id; 831 unsigned int i; 832 833 for (i = 0; i < ARRAY_SIZE(ce_algs); i++) { 834 ce_algs[i].ce = ce; 835 switch (ce_algs[i].type) { 836 case CRYPTO_ALG_TYPE_SKCIPHER: 837 id = ce_algs[i].ce_algo_id; 838 ce_method = ce->variant->alg_cipher[id]; 839 if (ce_method == CE_ID_NOTSUPP) { 840 dev_dbg(ce->dev, 841 "DEBUG: Algo of %s not supported\n", 842 ce_algs[i].alg.skcipher.base.cra_name); 843 ce_algs[i].ce = NULL; 844 break; 845 } 846 id = ce_algs[i].ce_blockmode; 847 ce_method = ce->variant->op_mode[id]; 848 if (ce_method == CE_ID_NOTSUPP) { 849 dev_dbg(ce->dev, "DEBUG: Blockmode of %s not supported\n", 850 ce_algs[i].alg.skcipher.base.cra_name); 851 ce_algs[i].ce = NULL; 852 break; 853 } 854 dev_info(ce->dev, "Register %s\n", 855 ce_algs[i].alg.skcipher.base.cra_name); 856 err = crypto_register_skcipher(&ce_algs[i].alg.skcipher); 857 if (err) { 858 dev_err(ce->dev, "ERROR: Fail to register %s\n", 859 ce_algs[i].alg.skcipher.base.cra_name); 860 ce_algs[i].ce = NULL; 861 return err; 862 } 863 break; 864 case CRYPTO_ALG_TYPE_AHASH: 865 id = ce_algs[i].ce_algo_id; 866 ce_method = ce->variant->alg_hash[id]; 867 if (ce_method == CE_ID_NOTSUPP) { 868 dev_info(ce->dev, 869 "DEBUG: Algo of %s not supported\n", 870 ce_algs[i].alg.hash.halg.base.cra_name); 871 ce_algs[i].ce = NULL; 872 break; 873 } 874 dev_info(ce->dev, "Register %s\n", 875 ce_algs[i].alg.hash.halg.base.cra_name); 876 err = crypto_register_ahash(&ce_algs[i].alg.hash); 877 if (err) { 878 dev_err(ce->dev, "ERROR: Fail to register %s\n", 879 ce_algs[i].alg.hash.halg.base.cra_name); 880 ce_algs[i].ce = NULL; 881 return err; 882 } 883 break; 884 case CRYPTO_ALG_TYPE_RNG: 885 if (ce->variant->prng == CE_ID_NOTSUPP) { 886 dev_info(ce->dev, 887 "DEBUG: Algo of %s not supported\n", 888 ce_algs[i].alg.rng.base.cra_name); 889 ce_algs[i].ce = NULL; 890 break; 891 } 892 dev_info(ce->dev, "Register %s\n", 893 ce_algs[i].alg.rng.base.cra_name); 894 err = crypto_register_rng(&ce_algs[i].alg.rng); 895 if (err) { 896 dev_err(ce->dev, "Fail to register %s\n", 897 ce_algs[i].alg.rng.base.cra_name); 898 ce_algs[i].ce = NULL; 899 } 900 break; 901 default: 902 ce_algs[i].ce = NULL; 903 dev_err(ce->dev, "ERROR: tried to register an unknown algo\n"); 904 } 905 } 906 return 0; 907 } 908 909 static void sun8i_ce_unregister_algs(struct sun8i_ce_dev *ce) 910 { 911 unsigned int i; 912 913 for (i = 0; i < ARRAY_SIZE(ce_algs); i++) { 914 if (!ce_algs[i].ce) 915 continue; 916 switch (ce_algs[i].type) { 917 case CRYPTO_ALG_TYPE_SKCIPHER: 918 dev_info(ce->dev, "Unregister %d %s\n", i, 919 ce_algs[i].alg.skcipher.base.cra_name); 920 crypto_unregister_skcipher(&ce_algs[i].alg.skcipher); 921 break; 922 case CRYPTO_ALG_TYPE_AHASH: 923 dev_info(ce->dev, "Unregister %d %s\n", i, 924 ce_algs[i].alg.hash.halg.base.cra_name); 925 crypto_unregister_ahash(&ce_algs[i].alg.hash); 926 break; 927 case CRYPTO_ALG_TYPE_RNG: 928 dev_info(ce->dev, "Unregister %d %s\n", i, 929 ce_algs[i].alg.rng.base.cra_name); 930 crypto_unregister_rng(&ce_algs[i].alg.rng); 931 break; 932 } 933 } 934 } 935 936 static int sun8i_ce_probe(struct platform_device *pdev) 937 { 938 struct sun8i_ce_dev *ce; 939 int err, irq; 940 u32 v; 941 942 ce = devm_kzalloc(&pdev->dev, sizeof(*ce), GFP_KERNEL); 943 if (!ce) 944 return -ENOMEM; 945 946 ce->dev = &pdev->dev; 947 platform_set_drvdata(pdev, ce); 948 949 ce->variant = of_device_get_match_data(&pdev->dev); 950 if (!ce->variant) { 951 dev_err(&pdev->dev, "Missing Crypto Engine variant\n"); 952 return -EINVAL; 953 } 954 955 ce->base = devm_platform_ioremap_resource(pdev, 0); 956 if (IS_ERR(ce->base)) 957 return PTR_ERR(ce->base); 958 959 err = sun8i_ce_get_clks(ce); 960 if (err) 961 return err; 962 963 /* Get Non Secure IRQ */ 964 irq = platform_get_irq(pdev, 0); 965 if (irq < 0) 966 return irq; 967 968 ce->reset = devm_reset_control_get(&pdev->dev, NULL); 969 if (IS_ERR(ce->reset)) 970 return dev_err_probe(&pdev->dev, PTR_ERR(ce->reset), 971 "No reset control found\n"); 972 973 mutex_init(&ce->mlock); 974 mutex_init(&ce->rnglock); 975 976 err = sun8i_ce_allocate_chanlist(ce); 977 if (err) 978 return err; 979 980 err = sun8i_ce_pm_init(ce); 981 if (err) 982 goto error_pm; 983 984 err = devm_request_irq(&pdev->dev, irq, ce_irq_handler, 0, 985 "sun8i-ce-ns", ce); 986 if (err) { 987 dev_err(ce->dev, "Cannot request CryptoEngine Non-secure IRQ (err=%d)\n", err); 988 goto error_irq; 989 } 990 991 err = sun8i_ce_register_algs(ce); 992 if (err) 993 goto error_alg; 994 995 err = pm_runtime_resume_and_get(ce->dev); 996 if (err < 0) 997 goto error_alg; 998 999 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG 1000 sun8i_ce_hwrng_register(ce); 1001 #endif 1002 1003 v = readl(ce->base + CE_CTR); 1004 v >>= CE_DIE_ID_SHIFT; 1005 v &= CE_DIE_ID_MASK; 1006 dev_info(&pdev->dev, "CryptoEngine Die ID %x\n", v); 1007 1008 pm_runtime_put_sync(ce->dev); 1009 1010 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG 1011 /* Ignore error of debugfs */ 1012 ce->dbgfs_dir = debugfs_create_dir("sun8i-ce", NULL); 1013 ce->dbgfs_stats = debugfs_create_file("stats", 0444, 1014 ce->dbgfs_dir, ce, 1015 &sun8i_ce_debugfs_fops); 1016 #endif 1017 1018 return 0; 1019 error_alg: 1020 sun8i_ce_unregister_algs(ce); 1021 error_irq: 1022 sun8i_ce_pm_exit(ce); 1023 error_pm: 1024 sun8i_ce_free_chanlist(ce, MAXFLOW - 1); 1025 return err; 1026 } 1027 1028 static int sun8i_ce_remove(struct platform_device *pdev) 1029 { 1030 struct sun8i_ce_dev *ce = platform_get_drvdata(pdev); 1031 1032 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG 1033 sun8i_ce_hwrng_unregister(ce); 1034 #endif 1035 1036 sun8i_ce_unregister_algs(ce); 1037 1038 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG 1039 debugfs_remove_recursive(ce->dbgfs_dir); 1040 #endif 1041 1042 sun8i_ce_free_chanlist(ce, MAXFLOW - 1); 1043 1044 sun8i_ce_pm_exit(ce); 1045 return 0; 1046 } 1047 1048 static const struct of_device_id sun8i_ce_crypto_of_match_table[] = { 1049 { .compatible = "allwinner,sun8i-h3-crypto", 1050 .data = &ce_h3_variant }, 1051 { .compatible = "allwinner,sun8i-r40-crypto", 1052 .data = &ce_r40_variant }, 1053 { .compatible = "allwinner,sun20i-d1-crypto", 1054 .data = &ce_d1_variant }, 1055 { .compatible = "allwinner,sun50i-a64-crypto", 1056 .data = &ce_a64_variant }, 1057 { .compatible = "allwinner,sun50i-h5-crypto", 1058 .data = &ce_h5_variant }, 1059 { .compatible = "allwinner,sun50i-h6-crypto", 1060 .data = &ce_h6_variant }, 1061 {} 1062 }; 1063 MODULE_DEVICE_TABLE(of, sun8i_ce_crypto_of_match_table); 1064 1065 static struct platform_driver sun8i_ce_driver = { 1066 .probe = sun8i_ce_probe, 1067 .remove = sun8i_ce_remove, 1068 .driver = { 1069 .name = "sun8i-ce", 1070 .pm = &sun8i_ce_pm_ops, 1071 .of_match_table = sun8i_ce_crypto_of_match_table, 1072 }, 1073 }; 1074 1075 module_platform_driver(sun8i_ce_driver); 1076 1077 MODULE_DESCRIPTION("Allwinner Crypto Engine cryptographic offloader"); 1078 MODULE_LICENSE("GPL"); 1079 MODULE_AUTHOR("Corentin Labbe <clabbe.montjoie@gmail.com>"); 1080