1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * sun8i-ss-core.c - hardware cryptographic offloader for 4 * Allwinner A80/A83T SoC 5 * 6 * Copyright (C) 2015-2019 Corentin Labbe <clabbe.montjoie@gmail.com> 7 * 8 * Core file which registers crypto algorithms supported by the SecuritySystem 9 * 10 * You could find a link for the datasheet in Documentation/arm/sunxi.rst 11 */ 12 #include <linux/clk.h> 13 #include <linux/crypto.h> 14 #include <linux/delay.h> 15 #include <linux/dma-mapping.h> 16 #include <linux/interrupt.h> 17 #include <linux/io.h> 18 #include <linux/irq.h> 19 #include <linux/module.h> 20 #include <linux/of.h> 21 #include <linux/of_device.h> 22 #include <linux/platform_device.h> 23 #include <linux/pm_runtime.h> 24 #include <linux/reset.h> 25 #include <crypto/internal/rng.h> 26 #include <crypto/internal/skcipher.h> 27 28 #include "sun8i-ss.h" 29 30 static const struct ss_variant ss_a80_variant = { 31 .alg_cipher = { SS_ALG_AES, SS_ALG_DES, SS_ALG_3DES, 32 }, 33 .op_mode = { SS_OP_ECB, SS_OP_CBC, 34 }, 35 .ss_clks = { 36 { "bus", 0, 300 * 1000 * 1000 }, 37 { "mod", 0, 300 * 1000 * 1000 }, 38 } 39 }; 40 41 static const struct ss_variant ss_a83t_variant = { 42 .alg_cipher = { SS_ALG_AES, SS_ALG_DES, SS_ALG_3DES, 43 }, 44 .alg_hash = { SS_ALG_MD5, SS_ALG_SHA1, SS_ALG_SHA224, SS_ALG_SHA256, 45 }, 46 .op_mode = { SS_OP_ECB, SS_OP_CBC, 47 }, 48 .ss_clks = { 49 { "bus", 0, 300 * 1000 * 1000 }, 50 { "mod", 0, 300 * 1000 * 1000 }, 51 } 52 }; 53 54 /* 55 * sun8i_ss_get_engine_number() get the next channel slot 56 * This is a simple round-robin way of getting the next channel 57 */ 58 int sun8i_ss_get_engine_number(struct sun8i_ss_dev *ss) 59 { 60 return atomic_inc_return(&ss->flow) % MAXFLOW; 61 } 62 63 int sun8i_ss_run_task(struct sun8i_ss_dev *ss, struct sun8i_cipher_req_ctx *rctx, 64 const char *name) 65 { 66 int flow = rctx->flow; 67 u32 v = SS_START; 68 int i; 69 70 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG 71 ss->flows[flow].stat_req++; 72 #endif 73 74 /* choose between stream0/stream1 */ 75 if (flow) 76 v |= SS_FLOW1; 77 else 78 v |= SS_FLOW0; 79 80 v |= rctx->op_mode; 81 v |= rctx->method; 82 83 if (rctx->op_dir) 84 v |= SS_DECRYPTION; 85 86 switch (rctx->keylen) { 87 case 128 / 8: 88 v |= SS_AES_128BITS << 7; 89 break; 90 case 192 / 8: 91 v |= SS_AES_192BITS << 7; 92 break; 93 case 256 / 8: 94 v |= SS_AES_256BITS << 7; 95 break; 96 } 97 98 for (i = 0; i < MAX_SG; i++) { 99 if (!rctx->t_dst[i].addr) 100 break; 101 102 mutex_lock(&ss->mlock); 103 writel(rctx->p_key, ss->base + SS_KEY_ADR_REG); 104 105 if (i == 0) { 106 if (rctx->p_iv) 107 writel(rctx->p_iv, ss->base + SS_IV_ADR_REG); 108 } else { 109 if (rctx->biv) { 110 if (rctx->op_dir == SS_ENCRYPTION) 111 writel(rctx->t_dst[i - 1].addr + rctx->t_dst[i - 1].len * 4 - rctx->ivlen, ss->base + SS_IV_ADR_REG); 112 else 113 writel(rctx->t_src[i - 1].addr + rctx->t_src[i - 1].len * 4 - rctx->ivlen, ss->base + SS_IV_ADR_REG); 114 } 115 } 116 117 dev_dbg(ss->dev, 118 "Processing SG %d on flow %d %s ctl=%x %d to %d method=%x opmode=%x opdir=%x srclen=%d\n", 119 i, flow, name, v, 120 rctx->t_src[i].len, rctx->t_dst[i].len, 121 rctx->method, rctx->op_mode, 122 rctx->op_dir, rctx->t_src[i].len); 123 124 writel(rctx->t_src[i].addr, ss->base + SS_SRC_ADR_REG); 125 writel(rctx->t_dst[i].addr, ss->base + SS_DST_ADR_REG); 126 writel(rctx->t_src[i].len, ss->base + SS_LEN_ADR_REG); 127 128 reinit_completion(&ss->flows[flow].complete); 129 ss->flows[flow].status = 0; 130 wmb(); 131 132 writel(v, ss->base + SS_CTL_REG); 133 mutex_unlock(&ss->mlock); 134 wait_for_completion_interruptible_timeout(&ss->flows[flow].complete, 135 msecs_to_jiffies(2000)); 136 if (ss->flows[flow].status == 0) { 137 dev_err(ss->dev, "DMA timeout for %s\n", name); 138 return -EFAULT; 139 } 140 } 141 142 return 0; 143 } 144 145 static irqreturn_t ss_irq_handler(int irq, void *data) 146 { 147 struct sun8i_ss_dev *ss = (struct sun8i_ss_dev *)data; 148 int flow = 0; 149 u32 p; 150 151 p = readl(ss->base + SS_INT_STA_REG); 152 for (flow = 0; flow < MAXFLOW; flow++) { 153 if (p & (BIT(flow))) { 154 writel(BIT(flow), ss->base + SS_INT_STA_REG); 155 ss->flows[flow].status = 1; 156 complete(&ss->flows[flow].complete); 157 } 158 } 159 160 return IRQ_HANDLED; 161 } 162 163 static struct sun8i_ss_alg_template ss_algs[] = { 164 { 165 .type = CRYPTO_ALG_TYPE_SKCIPHER, 166 .ss_algo_id = SS_ID_CIPHER_AES, 167 .ss_blockmode = SS_ID_OP_CBC, 168 .alg.skcipher = { 169 .base = { 170 .cra_name = "cbc(aes)", 171 .cra_driver_name = "cbc-aes-sun8i-ss", 172 .cra_priority = 400, 173 .cra_blocksize = AES_BLOCK_SIZE, 174 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 175 CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 176 CRYPTO_ALG_NEED_FALLBACK, 177 .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx), 178 .cra_module = THIS_MODULE, 179 .cra_alignmask = 0xf, 180 .cra_init = sun8i_ss_cipher_init, 181 .cra_exit = sun8i_ss_cipher_exit, 182 }, 183 .min_keysize = AES_MIN_KEY_SIZE, 184 .max_keysize = AES_MAX_KEY_SIZE, 185 .ivsize = AES_BLOCK_SIZE, 186 .setkey = sun8i_ss_aes_setkey, 187 .encrypt = sun8i_ss_skencrypt, 188 .decrypt = sun8i_ss_skdecrypt, 189 } 190 }, 191 { 192 .type = CRYPTO_ALG_TYPE_SKCIPHER, 193 .ss_algo_id = SS_ID_CIPHER_AES, 194 .ss_blockmode = SS_ID_OP_ECB, 195 .alg.skcipher = { 196 .base = { 197 .cra_name = "ecb(aes)", 198 .cra_driver_name = "ecb-aes-sun8i-ss", 199 .cra_priority = 400, 200 .cra_blocksize = AES_BLOCK_SIZE, 201 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 202 CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 203 CRYPTO_ALG_NEED_FALLBACK, 204 .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx), 205 .cra_module = THIS_MODULE, 206 .cra_alignmask = 0xf, 207 .cra_init = sun8i_ss_cipher_init, 208 .cra_exit = sun8i_ss_cipher_exit, 209 }, 210 .min_keysize = AES_MIN_KEY_SIZE, 211 .max_keysize = AES_MAX_KEY_SIZE, 212 .setkey = sun8i_ss_aes_setkey, 213 .encrypt = sun8i_ss_skencrypt, 214 .decrypt = sun8i_ss_skdecrypt, 215 } 216 }, 217 { 218 .type = CRYPTO_ALG_TYPE_SKCIPHER, 219 .ss_algo_id = SS_ID_CIPHER_DES3, 220 .ss_blockmode = SS_ID_OP_CBC, 221 .alg.skcipher = { 222 .base = { 223 .cra_name = "cbc(des3_ede)", 224 .cra_driver_name = "cbc-des3-sun8i-ss", 225 .cra_priority = 400, 226 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 227 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 228 CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 229 CRYPTO_ALG_NEED_FALLBACK, 230 .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx), 231 .cra_module = THIS_MODULE, 232 .cra_alignmask = 0xf, 233 .cra_init = sun8i_ss_cipher_init, 234 .cra_exit = sun8i_ss_cipher_exit, 235 }, 236 .min_keysize = DES3_EDE_KEY_SIZE, 237 .max_keysize = DES3_EDE_KEY_SIZE, 238 .ivsize = DES3_EDE_BLOCK_SIZE, 239 .setkey = sun8i_ss_des3_setkey, 240 .encrypt = sun8i_ss_skencrypt, 241 .decrypt = sun8i_ss_skdecrypt, 242 } 243 }, 244 { 245 .type = CRYPTO_ALG_TYPE_SKCIPHER, 246 .ss_algo_id = SS_ID_CIPHER_DES3, 247 .ss_blockmode = SS_ID_OP_ECB, 248 .alg.skcipher = { 249 .base = { 250 .cra_name = "ecb(des3_ede)", 251 .cra_driver_name = "ecb-des3-sun8i-ss", 252 .cra_priority = 400, 253 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 254 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | 255 CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 256 CRYPTO_ALG_NEED_FALLBACK, 257 .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx), 258 .cra_module = THIS_MODULE, 259 .cra_alignmask = 0xf, 260 .cra_init = sun8i_ss_cipher_init, 261 .cra_exit = sun8i_ss_cipher_exit, 262 }, 263 .min_keysize = DES3_EDE_KEY_SIZE, 264 .max_keysize = DES3_EDE_KEY_SIZE, 265 .setkey = sun8i_ss_des3_setkey, 266 .encrypt = sun8i_ss_skencrypt, 267 .decrypt = sun8i_ss_skdecrypt, 268 } 269 }, 270 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_PRNG 271 { 272 .type = CRYPTO_ALG_TYPE_RNG, 273 .alg.rng = { 274 .base = { 275 .cra_name = "stdrng", 276 .cra_driver_name = "sun8i-ss-prng", 277 .cra_priority = 300, 278 .cra_ctxsize = sizeof(struct sun8i_ss_rng_tfm_ctx), 279 .cra_module = THIS_MODULE, 280 .cra_init = sun8i_ss_prng_init, 281 .cra_exit = sun8i_ss_prng_exit, 282 }, 283 .generate = sun8i_ss_prng_generate, 284 .seed = sun8i_ss_prng_seed, 285 .seedsize = PRNG_SEED_SIZE, 286 } 287 }, 288 #endif 289 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_HASH 290 { .type = CRYPTO_ALG_TYPE_AHASH, 291 .ss_algo_id = SS_ID_HASH_MD5, 292 .alg.hash = { 293 .init = sun8i_ss_hash_init, 294 .update = sun8i_ss_hash_update, 295 .final = sun8i_ss_hash_final, 296 .finup = sun8i_ss_hash_finup, 297 .digest = sun8i_ss_hash_digest, 298 .export = sun8i_ss_hash_export, 299 .import = sun8i_ss_hash_import, 300 .halg = { 301 .digestsize = MD5_DIGEST_SIZE, 302 .statesize = sizeof(struct md5_state), 303 .base = { 304 .cra_name = "md5", 305 .cra_driver_name = "md5-sun8i-ss", 306 .cra_priority = 300, 307 .cra_alignmask = 3, 308 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 309 CRYPTO_ALG_ASYNC | 310 CRYPTO_ALG_NEED_FALLBACK, 311 .cra_blocksize = MD5_HMAC_BLOCK_SIZE, 312 .cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx), 313 .cra_module = THIS_MODULE, 314 .cra_init = sun8i_ss_hash_crainit, 315 .cra_exit = sun8i_ss_hash_craexit, 316 } 317 } 318 } 319 }, 320 { .type = CRYPTO_ALG_TYPE_AHASH, 321 .ss_algo_id = SS_ID_HASH_SHA1, 322 .alg.hash = { 323 .init = sun8i_ss_hash_init, 324 .update = sun8i_ss_hash_update, 325 .final = sun8i_ss_hash_final, 326 .finup = sun8i_ss_hash_finup, 327 .digest = sun8i_ss_hash_digest, 328 .export = sun8i_ss_hash_export, 329 .import = sun8i_ss_hash_import, 330 .halg = { 331 .digestsize = SHA1_DIGEST_SIZE, 332 .statesize = sizeof(struct sha1_state), 333 .base = { 334 .cra_name = "sha1", 335 .cra_driver_name = "sha1-sun8i-ss", 336 .cra_priority = 300, 337 .cra_alignmask = 3, 338 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 339 CRYPTO_ALG_ASYNC | 340 CRYPTO_ALG_NEED_FALLBACK, 341 .cra_blocksize = SHA1_BLOCK_SIZE, 342 .cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx), 343 .cra_module = THIS_MODULE, 344 .cra_init = sun8i_ss_hash_crainit, 345 .cra_exit = sun8i_ss_hash_craexit, 346 } 347 } 348 } 349 }, 350 { .type = CRYPTO_ALG_TYPE_AHASH, 351 .ss_algo_id = SS_ID_HASH_SHA224, 352 .alg.hash = { 353 .init = sun8i_ss_hash_init, 354 .update = sun8i_ss_hash_update, 355 .final = sun8i_ss_hash_final, 356 .finup = sun8i_ss_hash_finup, 357 .digest = sun8i_ss_hash_digest, 358 .export = sun8i_ss_hash_export, 359 .import = sun8i_ss_hash_import, 360 .halg = { 361 .digestsize = SHA224_DIGEST_SIZE, 362 .statesize = sizeof(struct sha256_state), 363 .base = { 364 .cra_name = "sha224", 365 .cra_driver_name = "sha224-sun8i-ss", 366 .cra_priority = 300, 367 .cra_alignmask = 3, 368 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 369 CRYPTO_ALG_ASYNC | 370 CRYPTO_ALG_NEED_FALLBACK, 371 .cra_blocksize = SHA224_BLOCK_SIZE, 372 .cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx), 373 .cra_module = THIS_MODULE, 374 .cra_init = sun8i_ss_hash_crainit, 375 .cra_exit = sun8i_ss_hash_craexit, 376 } 377 } 378 } 379 }, 380 { .type = CRYPTO_ALG_TYPE_AHASH, 381 .ss_algo_id = SS_ID_HASH_SHA256, 382 .alg.hash = { 383 .init = sun8i_ss_hash_init, 384 .update = sun8i_ss_hash_update, 385 .final = sun8i_ss_hash_final, 386 .finup = sun8i_ss_hash_finup, 387 .digest = sun8i_ss_hash_digest, 388 .export = sun8i_ss_hash_export, 389 .import = sun8i_ss_hash_import, 390 .halg = { 391 .digestsize = SHA256_DIGEST_SIZE, 392 .statesize = sizeof(struct sha256_state), 393 .base = { 394 .cra_name = "sha256", 395 .cra_driver_name = "sha256-sun8i-ss", 396 .cra_priority = 300, 397 .cra_alignmask = 3, 398 .cra_flags = CRYPTO_ALG_TYPE_AHASH | 399 CRYPTO_ALG_ASYNC | 400 CRYPTO_ALG_NEED_FALLBACK, 401 .cra_blocksize = SHA256_BLOCK_SIZE, 402 .cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx), 403 .cra_module = THIS_MODULE, 404 .cra_init = sun8i_ss_hash_crainit, 405 .cra_exit = sun8i_ss_hash_craexit, 406 } 407 } 408 } 409 }, 410 #endif 411 }; 412 413 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG 414 static int sun8i_ss_debugfs_show(struct seq_file *seq, void *v) 415 { 416 struct sun8i_ss_dev *ss = seq->private; 417 unsigned int i; 418 419 for (i = 0; i < MAXFLOW; i++) 420 seq_printf(seq, "Channel %d: nreq %lu\n", i, ss->flows[i].stat_req); 421 422 for (i = 0; i < ARRAY_SIZE(ss_algs); i++) { 423 if (!ss_algs[i].ss) 424 continue; 425 switch (ss_algs[i].type) { 426 case CRYPTO_ALG_TYPE_SKCIPHER: 427 seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n", 428 ss_algs[i].alg.skcipher.base.cra_driver_name, 429 ss_algs[i].alg.skcipher.base.cra_name, 430 ss_algs[i].stat_req, ss_algs[i].stat_fb); 431 break; 432 case CRYPTO_ALG_TYPE_RNG: 433 seq_printf(seq, "%s %s reqs=%lu tsize=%lu\n", 434 ss_algs[i].alg.rng.base.cra_driver_name, 435 ss_algs[i].alg.rng.base.cra_name, 436 ss_algs[i].stat_req, ss_algs[i].stat_bytes); 437 break; 438 case CRYPTO_ALG_TYPE_AHASH: 439 seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n", 440 ss_algs[i].alg.hash.halg.base.cra_driver_name, 441 ss_algs[i].alg.hash.halg.base.cra_name, 442 ss_algs[i].stat_req, ss_algs[i].stat_fb); 443 break; 444 } 445 } 446 return 0; 447 } 448 449 DEFINE_SHOW_ATTRIBUTE(sun8i_ss_debugfs); 450 #endif 451 452 static void sun8i_ss_free_flows(struct sun8i_ss_dev *ss, int i) 453 { 454 while (i >= 0) { 455 crypto_engine_exit(ss->flows[i].engine); 456 i--; 457 } 458 } 459 460 /* 461 * Allocate the flow list structure 462 */ 463 static int allocate_flows(struct sun8i_ss_dev *ss) 464 { 465 int i, err; 466 467 ss->flows = devm_kcalloc(ss->dev, MAXFLOW, sizeof(struct sun8i_ss_flow), 468 GFP_KERNEL); 469 if (!ss->flows) 470 return -ENOMEM; 471 472 for (i = 0; i < MAXFLOW; i++) { 473 init_completion(&ss->flows[i].complete); 474 475 ss->flows[i].engine = crypto_engine_alloc_init(ss->dev, true); 476 if (!ss->flows[i].engine) { 477 dev_err(ss->dev, "Cannot allocate engine\n"); 478 i--; 479 err = -ENOMEM; 480 goto error_engine; 481 } 482 err = crypto_engine_start(ss->flows[i].engine); 483 if (err) { 484 dev_err(ss->dev, "Cannot start engine\n"); 485 goto error_engine; 486 } 487 } 488 return 0; 489 error_engine: 490 sun8i_ss_free_flows(ss, i); 491 return err; 492 } 493 494 /* 495 * Power management strategy: The device is suspended unless a TFM exists for 496 * one of the algorithms proposed by this driver. 497 */ 498 static int sun8i_ss_pm_suspend(struct device *dev) 499 { 500 struct sun8i_ss_dev *ss = dev_get_drvdata(dev); 501 int i; 502 503 reset_control_assert(ss->reset); 504 for (i = 0; i < SS_MAX_CLOCKS; i++) 505 clk_disable_unprepare(ss->ssclks[i]); 506 return 0; 507 } 508 509 static int sun8i_ss_pm_resume(struct device *dev) 510 { 511 struct sun8i_ss_dev *ss = dev_get_drvdata(dev); 512 int err, i; 513 514 for (i = 0; i < SS_MAX_CLOCKS; i++) { 515 if (!ss->variant->ss_clks[i].name) 516 continue; 517 err = clk_prepare_enable(ss->ssclks[i]); 518 if (err) { 519 dev_err(ss->dev, "Cannot prepare_enable %s\n", 520 ss->variant->ss_clks[i].name); 521 goto error; 522 } 523 } 524 err = reset_control_deassert(ss->reset); 525 if (err) { 526 dev_err(ss->dev, "Cannot deassert reset control\n"); 527 goto error; 528 } 529 /* enable interrupts for all flows */ 530 writel(BIT(0) | BIT(1), ss->base + SS_INT_CTL_REG); 531 532 return 0; 533 error: 534 sun8i_ss_pm_suspend(dev); 535 return err; 536 } 537 538 static const struct dev_pm_ops sun8i_ss_pm_ops = { 539 SET_RUNTIME_PM_OPS(sun8i_ss_pm_suspend, sun8i_ss_pm_resume, NULL) 540 }; 541 542 static int sun8i_ss_pm_init(struct sun8i_ss_dev *ss) 543 { 544 int err; 545 546 pm_runtime_use_autosuspend(ss->dev); 547 pm_runtime_set_autosuspend_delay(ss->dev, 2000); 548 549 err = pm_runtime_set_suspended(ss->dev); 550 if (err) 551 return err; 552 pm_runtime_enable(ss->dev); 553 return err; 554 } 555 556 static void sun8i_ss_pm_exit(struct sun8i_ss_dev *ss) 557 { 558 pm_runtime_disable(ss->dev); 559 } 560 561 static int sun8i_ss_register_algs(struct sun8i_ss_dev *ss) 562 { 563 int ss_method, err, id; 564 unsigned int i; 565 566 for (i = 0; i < ARRAY_SIZE(ss_algs); i++) { 567 ss_algs[i].ss = ss; 568 switch (ss_algs[i].type) { 569 case CRYPTO_ALG_TYPE_SKCIPHER: 570 id = ss_algs[i].ss_algo_id; 571 ss_method = ss->variant->alg_cipher[id]; 572 if (ss_method == SS_ID_NOTSUPP) { 573 dev_info(ss->dev, 574 "DEBUG: Algo of %s not supported\n", 575 ss_algs[i].alg.skcipher.base.cra_name); 576 ss_algs[i].ss = NULL; 577 break; 578 } 579 id = ss_algs[i].ss_blockmode; 580 ss_method = ss->variant->op_mode[id]; 581 if (ss_method == SS_ID_NOTSUPP) { 582 dev_info(ss->dev, "DEBUG: Blockmode of %s not supported\n", 583 ss_algs[i].alg.skcipher.base.cra_name); 584 ss_algs[i].ss = NULL; 585 break; 586 } 587 dev_info(ss->dev, "DEBUG: Register %s\n", 588 ss_algs[i].alg.skcipher.base.cra_name); 589 err = crypto_register_skcipher(&ss_algs[i].alg.skcipher); 590 if (err) { 591 dev_err(ss->dev, "Fail to register %s\n", 592 ss_algs[i].alg.skcipher.base.cra_name); 593 ss_algs[i].ss = NULL; 594 return err; 595 } 596 break; 597 case CRYPTO_ALG_TYPE_RNG: 598 err = crypto_register_rng(&ss_algs[i].alg.rng); 599 if (err) { 600 dev_err(ss->dev, "Fail to register %s\n", 601 ss_algs[i].alg.rng.base.cra_name); 602 ss_algs[i].ss = NULL; 603 } 604 break; 605 case CRYPTO_ALG_TYPE_AHASH: 606 id = ss_algs[i].ss_algo_id; 607 ss_method = ss->variant->alg_hash[id]; 608 if (ss_method == SS_ID_NOTSUPP) { 609 dev_info(ss->dev, 610 "DEBUG: Algo of %s not supported\n", 611 ss_algs[i].alg.hash.halg.base.cra_name); 612 ss_algs[i].ss = NULL; 613 break; 614 } 615 dev_info(ss->dev, "Register %s\n", 616 ss_algs[i].alg.hash.halg.base.cra_name); 617 err = crypto_register_ahash(&ss_algs[i].alg.hash); 618 if (err) { 619 dev_err(ss->dev, "ERROR: Fail to register %s\n", 620 ss_algs[i].alg.hash.halg.base.cra_name); 621 ss_algs[i].ss = NULL; 622 return err; 623 } 624 break; 625 default: 626 ss_algs[i].ss = NULL; 627 dev_err(ss->dev, "ERROR: tried to register an unknown algo\n"); 628 } 629 } 630 return 0; 631 } 632 633 static void sun8i_ss_unregister_algs(struct sun8i_ss_dev *ss) 634 { 635 unsigned int i; 636 637 for (i = 0; i < ARRAY_SIZE(ss_algs); i++) { 638 if (!ss_algs[i].ss) 639 continue; 640 switch (ss_algs[i].type) { 641 case CRYPTO_ALG_TYPE_SKCIPHER: 642 dev_info(ss->dev, "Unregister %d %s\n", i, 643 ss_algs[i].alg.skcipher.base.cra_name); 644 crypto_unregister_skcipher(&ss_algs[i].alg.skcipher); 645 break; 646 case CRYPTO_ALG_TYPE_RNG: 647 dev_info(ss->dev, "Unregister %d %s\n", i, 648 ss_algs[i].alg.rng.base.cra_name); 649 crypto_unregister_rng(&ss_algs[i].alg.rng); 650 break; 651 case CRYPTO_ALG_TYPE_AHASH: 652 dev_info(ss->dev, "Unregister %d %s\n", i, 653 ss_algs[i].alg.hash.halg.base.cra_name); 654 crypto_unregister_ahash(&ss_algs[i].alg.hash); 655 break; 656 } 657 } 658 } 659 660 static int sun8i_ss_get_clks(struct sun8i_ss_dev *ss) 661 { 662 unsigned long cr; 663 int err, i; 664 665 for (i = 0; i < SS_MAX_CLOCKS; i++) { 666 if (!ss->variant->ss_clks[i].name) 667 continue; 668 ss->ssclks[i] = devm_clk_get(ss->dev, ss->variant->ss_clks[i].name); 669 if (IS_ERR(ss->ssclks[i])) { 670 err = PTR_ERR(ss->ssclks[i]); 671 dev_err(ss->dev, "Cannot get %s SS clock err=%d\n", 672 ss->variant->ss_clks[i].name, err); 673 return err; 674 } 675 cr = clk_get_rate(ss->ssclks[i]); 676 if (!cr) 677 return -EINVAL; 678 if (ss->variant->ss_clks[i].freq > 0 && 679 cr != ss->variant->ss_clks[i].freq) { 680 dev_info(ss->dev, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n", 681 ss->variant->ss_clks[i].name, 682 ss->variant->ss_clks[i].freq, 683 ss->variant->ss_clks[i].freq / 1000000, 684 cr, cr / 1000000); 685 err = clk_set_rate(ss->ssclks[i], ss->variant->ss_clks[i].freq); 686 if (err) 687 dev_err(ss->dev, "Fail to set %s clk speed to %lu hz\n", 688 ss->variant->ss_clks[i].name, 689 ss->variant->ss_clks[i].freq); 690 } 691 if (ss->variant->ss_clks[i].max_freq > 0 && 692 cr > ss->variant->ss_clks[i].max_freq) 693 dev_warn(ss->dev, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)", 694 ss->variant->ss_clks[i].name, cr, 695 ss->variant->ss_clks[i].max_freq); 696 } 697 return 0; 698 } 699 700 static int sun8i_ss_probe(struct platform_device *pdev) 701 { 702 struct sun8i_ss_dev *ss; 703 int err, irq; 704 u32 v; 705 706 ss = devm_kzalloc(&pdev->dev, sizeof(*ss), GFP_KERNEL); 707 if (!ss) 708 return -ENOMEM; 709 710 ss->dev = &pdev->dev; 711 platform_set_drvdata(pdev, ss); 712 713 ss->variant = of_device_get_match_data(&pdev->dev); 714 if (!ss->variant) { 715 dev_err(&pdev->dev, "Missing Crypto Engine variant\n"); 716 return -EINVAL; 717 } 718 719 ss->base = devm_platform_ioremap_resource(pdev, 0); 720 if (IS_ERR(ss->base)) 721 return PTR_ERR(ss->base); 722 723 err = sun8i_ss_get_clks(ss); 724 if (err) 725 return err; 726 727 irq = platform_get_irq(pdev, 0); 728 if (irq < 0) 729 return irq; 730 731 ss->reset = devm_reset_control_get(&pdev->dev, NULL); 732 if (IS_ERR(ss->reset)) 733 return dev_err_probe(&pdev->dev, PTR_ERR(ss->reset), 734 "No reset control found\n"); 735 736 mutex_init(&ss->mlock); 737 738 err = allocate_flows(ss); 739 if (err) 740 return err; 741 742 err = sun8i_ss_pm_init(ss); 743 if (err) 744 goto error_pm; 745 746 err = devm_request_irq(&pdev->dev, irq, ss_irq_handler, 0, "sun8i-ss", ss); 747 if (err) { 748 dev_err(ss->dev, "Cannot request SecuritySystem IRQ (err=%d)\n", err); 749 goto error_irq; 750 } 751 752 err = sun8i_ss_register_algs(ss); 753 if (err) 754 goto error_alg; 755 756 err = pm_runtime_resume_and_get(ss->dev); 757 if (err < 0) 758 goto error_alg; 759 760 v = readl(ss->base + SS_CTL_REG); 761 v >>= SS_DIE_ID_SHIFT; 762 v &= SS_DIE_ID_MASK; 763 dev_info(&pdev->dev, "Security System Die ID %x\n", v); 764 765 pm_runtime_put_sync(ss->dev); 766 767 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG 768 /* Ignore error of debugfs */ 769 ss->dbgfs_dir = debugfs_create_dir("sun8i-ss", NULL); 770 ss->dbgfs_stats = debugfs_create_file("stats", 0444, 771 ss->dbgfs_dir, ss, 772 &sun8i_ss_debugfs_fops); 773 #endif 774 775 return 0; 776 error_alg: 777 sun8i_ss_unregister_algs(ss); 778 error_irq: 779 sun8i_ss_pm_exit(ss); 780 error_pm: 781 sun8i_ss_free_flows(ss, MAXFLOW - 1); 782 return err; 783 } 784 785 static int sun8i_ss_remove(struct platform_device *pdev) 786 { 787 struct sun8i_ss_dev *ss = platform_get_drvdata(pdev); 788 789 sun8i_ss_unregister_algs(ss); 790 791 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG 792 debugfs_remove_recursive(ss->dbgfs_dir); 793 #endif 794 795 sun8i_ss_free_flows(ss, MAXFLOW - 1); 796 797 sun8i_ss_pm_exit(ss); 798 799 return 0; 800 } 801 802 static const struct of_device_id sun8i_ss_crypto_of_match_table[] = { 803 { .compatible = "allwinner,sun8i-a83t-crypto", 804 .data = &ss_a83t_variant }, 805 { .compatible = "allwinner,sun9i-a80-crypto", 806 .data = &ss_a80_variant }, 807 {} 808 }; 809 MODULE_DEVICE_TABLE(of, sun8i_ss_crypto_of_match_table); 810 811 static struct platform_driver sun8i_ss_driver = { 812 .probe = sun8i_ss_probe, 813 .remove = sun8i_ss_remove, 814 .driver = { 815 .name = "sun8i-ss", 816 .pm = &sun8i_ss_pm_ops, 817 .of_match_table = sun8i_ss_crypto_of_match_table, 818 }, 819 }; 820 821 module_platform_driver(sun8i_ss_driver); 822 823 MODULE_DESCRIPTION("Allwinner SecuritySystem cryptographic offloader"); 824 MODULE_LICENSE("GPL"); 825 MODULE_AUTHOR("Corentin Labbe <clabbe.montjoie@gmail.com>"); 826