1 /** 2 * AMCC SoC PPC4xx Crypto Driver 3 * 4 * Copyright (c) 2008 Applied Micro Circuits Corporation. 5 * All rights reserved. James Hsiao <jhsiao@amcc.com> 6 * 7 * This program is free software; you can redistribute it and/or modify 8 * it under the terms of the GNU General Public License as published by 9 * the Free Software Foundation; either version 2 of the License, or 10 * (at your option) any later version. 11 * 12 * This program is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 * GNU General Public License for more details. 16 * 17 * This file implements the Linux crypto algorithms. 18 */ 19 20 #include <linux/kernel.h> 21 #include <linux/interrupt.h> 22 #include <linux/spinlock_types.h> 23 #include <linux/scatterlist.h> 24 #include <linux/crypto.h> 25 #include <linux/hash.h> 26 #include <crypto/internal/hash.h> 27 #include <linux/dma-mapping.h> 28 #include <crypto/algapi.h> 29 #include <crypto/aead.h> 30 #include <crypto/aes.h> 31 #include <crypto/gcm.h> 32 #include <crypto/sha.h> 33 #include <crypto/ctr.h> 34 #include "crypto4xx_reg_def.h" 35 #include "crypto4xx_core.h" 36 #include "crypto4xx_sa.h" 37 38 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h, 39 u32 save_iv, u32 ld_h, u32 ld_iv, 40 u32 hdr_proc, u32 h, u32 c, u32 pad_type, 41 u32 op_grp, u32 op, u32 dir) 42 { 43 sa->sa_command_0.w = 0; 44 sa->sa_command_0.bf.save_hash_state = save_h; 45 sa->sa_command_0.bf.save_iv = save_iv; 46 sa->sa_command_0.bf.load_hash_state = ld_h; 47 sa->sa_command_0.bf.load_iv = ld_iv; 48 sa->sa_command_0.bf.hdr_proc = hdr_proc; 49 sa->sa_command_0.bf.hash_alg = h; 50 sa->sa_command_0.bf.cipher_alg = c; 51 sa->sa_command_0.bf.pad_type = pad_type & 3; 52 sa->sa_command_0.bf.extend_pad = pad_type >> 2; 53 sa->sa_command_0.bf.op_group = op_grp; 54 sa->sa_command_0.bf.opcode = op; 55 sa->sa_command_0.bf.dir = dir; 56 } 57 58 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm, 59 u32 hmac_mc, u32 cfb, u32 esn, 60 u32 sn_mask, u32 mute, u32 cp_pad, 61 u32 cp_pay, u32 cp_hdr) 62 { 63 sa->sa_command_1.w = 0; 64 sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2; 65 sa->sa_command_1.bf.crypto_mode9_8 = cm & 3; 66 sa->sa_command_1.bf.feedback_mode = cfb, 67 sa->sa_command_1.bf.sa_rev = 1; 68 sa->sa_command_1.bf.hmac_muting = hmac_mc; 69 sa->sa_command_1.bf.extended_seq_num = esn; 70 sa->sa_command_1.bf.seq_num_mask = sn_mask; 71 sa->sa_command_1.bf.mutable_bit_proc = mute; 72 sa->sa_command_1.bf.copy_pad = cp_pad; 73 sa->sa_command_1.bf.copy_payload = cp_pay; 74 sa->sa_command_1.bf.copy_hdr = cp_hdr; 75 } 76 77 int crypto4xx_encrypt(struct ablkcipher_request *req) 78 { 79 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 80 unsigned int ivlen = crypto_ablkcipher_ivsize( 81 crypto_ablkcipher_reqtfm(req)); 82 __le32 iv[ivlen]; 83 84 if (ivlen) 85 crypto4xx_memcpy_to_le32(iv, req->info, ivlen); 86 87 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, 88 req->nbytes, iv, ivlen, ctx->sa_out, ctx->sa_len, 0); 89 } 90 91 int crypto4xx_decrypt(struct ablkcipher_request *req) 92 { 93 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 94 unsigned int ivlen = crypto_ablkcipher_ivsize( 95 crypto_ablkcipher_reqtfm(req)); 96 __le32 iv[ivlen]; 97 98 if (ivlen) 99 crypto4xx_memcpy_to_le32(iv, req->info, ivlen); 100 101 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, 102 req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len, 0); 103 } 104 105 /** 106 * AES Functions 107 */ 108 static int crypto4xx_setkey_aes(struct crypto_ablkcipher *cipher, 109 const u8 *key, 110 unsigned int keylen, 111 unsigned char cm, 112 u8 fb) 113 { 114 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); 115 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); 116 struct dynamic_sa_ctl *sa; 117 int rc; 118 119 if (keylen != AES_KEYSIZE_256 && 120 keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) { 121 crypto_ablkcipher_set_flags(cipher, 122 CRYPTO_TFM_RES_BAD_KEY_LEN); 123 return -EINVAL; 124 } 125 126 /* Create SA */ 127 if (ctx->sa_in || ctx->sa_out) 128 crypto4xx_free_sa(ctx); 129 130 rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4); 131 if (rc) 132 return rc; 133 134 /* Setup SA */ 135 sa = ctx->sa_in; 136 137 set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, SA_NOT_SAVE_IV, 138 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE, 139 SA_NO_HEADER_PROC, SA_HASH_ALG_NULL, 140 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO, 141 SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT, 142 DIR_INBOUND); 143 144 set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH, 145 fb, SA_EXTENDED_SN_OFF, 146 SA_SEQ_MASK_OFF, SA_MC_ENABLE, 147 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD, 148 SA_NOT_COPY_HDR); 149 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), 150 key, keylen); 151 sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2); 152 sa->sa_command_1.bf.key_len = keylen >> 3; 153 154 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4); 155 sa = ctx->sa_out; 156 sa->sa_command_0.bf.dir = DIR_OUTBOUND; 157 158 return 0; 159 } 160 161 int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher, 162 const u8 *key, unsigned int keylen) 163 { 164 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC, 165 CRYPTO_FEEDBACK_MODE_NO_FB); 166 } 167 168 int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher, 169 const u8 *key, unsigned int keylen) 170 { 171 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB, 172 CRYPTO_FEEDBACK_MODE_128BIT_CFB); 173 } 174 175 int crypto4xx_setkey_aes_ecb(struct crypto_ablkcipher *cipher, 176 const u8 *key, unsigned int keylen) 177 { 178 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB, 179 CRYPTO_FEEDBACK_MODE_NO_FB); 180 } 181 182 int crypto4xx_setkey_aes_ofb(struct crypto_ablkcipher *cipher, 183 const u8 *key, unsigned int keylen) 184 { 185 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB, 186 CRYPTO_FEEDBACK_MODE_64BIT_OFB); 187 } 188 189 int crypto4xx_setkey_rfc3686(struct crypto_ablkcipher *cipher, 190 const u8 *key, unsigned int keylen) 191 { 192 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher); 193 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); 194 int rc; 195 196 rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE, 197 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB); 198 if (rc) 199 return rc; 200 201 ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen - 202 CTR_RFC3686_NONCE_SIZE]); 203 204 return 0; 205 } 206 207 int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req) 208 { 209 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 210 __le32 iv[AES_IV_SIZE / 4] = { 211 ctx->iv_nonce, 212 cpu_to_le32p((u32 *) req->info), 213 cpu_to_le32p((u32 *) (req->info + 4)), 214 cpu_to_le32(1) }; 215 216 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, 217 req->nbytes, iv, AES_IV_SIZE, 218 ctx->sa_out, ctx->sa_len, 0); 219 } 220 221 int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req) 222 { 223 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 224 __le32 iv[AES_IV_SIZE / 4] = { 225 ctx->iv_nonce, 226 cpu_to_le32p((u32 *) req->info), 227 cpu_to_le32p((u32 *) (req->info + 4)), 228 cpu_to_le32(1) }; 229 230 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, 231 req->nbytes, iv, AES_IV_SIZE, 232 ctx->sa_out, ctx->sa_len, 0); 233 } 234 235 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req, 236 bool is_ccm, bool decrypt) 237 { 238 struct crypto_aead *aead = crypto_aead_reqtfm(req); 239 240 /* authsize has to be a multiple of 4 */ 241 if (aead->authsize & 3) 242 return true; 243 244 /* 245 * hardware does not handle cases where cryptlen 246 * is less than a block 247 */ 248 if (req->cryptlen < AES_BLOCK_SIZE) 249 return true; 250 251 /* assoc len needs to be a multiple of 4 */ 252 if (req->assoclen & 0x3) 253 return true; 254 255 /* CCM supports only counter field length of 2 and 4 bytes */ 256 if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3)) 257 return true; 258 259 /* CCM - fix CBC MAC mismatch in special case */ 260 if (is_ccm && decrypt && !req->assoclen) 261 return true; 262 263 return false; 264 } 265 266 static int crypto4xx_aead_fallback(struct aead_request *req, 267 struct crypto4xx_ctx *ctx, bool do_decrypt) 268 { 269 char aead_req_data[sizeof(struct aead_request) + 270 crypto_aead_reqsize(ctx->sw_cipher.aead)] 271 __aligned(__alignof__(struct aead_request)); 272 273 struct aead_request *subreq = (void *) aead_req_data; 274 275 memset(subreq, 0, sizeof(aead_req_data)); 276 277 aead_request_set_tfm(subreq, ctx->sw_cipher.aead); 278 aead_request_set_callback(subreq, req->base.flags, 279 req->base.complete, req->base.data); 280 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, 281 req->iv); 282 aead_request_set_ad(subreq, req->assoclen); 283 return do_decrypt ? crypto_aead_decrypt(subreq) : 284 crypto_aead_encrypt(subreq); 285 } 286 287 static int crypto4xx_setup_fallback(struct crypto4xx_ctx *ctx, 288 struct crypto_aead *cipher, 289 const u8 *key, 290 unsigned int keylen) 291 { 292 int rc; 293 294 crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK); 295 crypto_aead_set_flags(ctx->sw_cipher.aead, 296 crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK); 297 rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen); 298 crypto_aead_clear_flags(cipher, CRYPTO_TFM_RES_MASK); 299 crypto_aead_set_flags(cipher, 300 crypto_aead_get_flags(ctx->sw_cipher.aead) & 301 CRYPTO_TFM_RES_MASK); 302 303 return rc; 304 } 305 306 /** 307 * AES-CCM Functions 308 */ 309 310 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key, 311 unsigned int keylen) 312 { 313 struct crypto_tfm *tfm = crypto_aead_tfm(cipher); 314 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); 315 struct dynamic_sa_ctl *sa; 316 int rc = 0; 317 318 rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen); 319 if (rc) 320 return rc; 321 322 if (ctx->sa_in || ctx->sa_out) 323 crypto4xx_free_sa(ctx); 324 325 rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4); 326 if (rc) 327 return rc; 328 329 /* Setup SA */ 330 sa = (struct dynamic_sa_ctl *) ctx->sa_in; 331 sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2); 332 333 set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, SA_NOT_SAVE_IV, 334 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE, 335 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC, 336 SA_CIPHER_ALG_AES, 337 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC, 338 SA_OPCODE_HASH_DECRYPT, DIR_INBOUND); 339 340 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH, 341 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF, 342 SA_SEQ_MASK_OFF, SA_MC_ENABLE, 343 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD, 344 SA_NOT_COPY_HDR); 345 346 sa->sa_command_1.bf.key_len = keylen >> 3; 347 348 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen); 349 350 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4); 351 sa = (struct dynamic_sa_ctl *) ctx->sa_out; 352 353 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV, 354 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE, 355 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC, 356 SA_CIPHER_ALG_AES, 357 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC, 358 SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND); 359 360 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH, 361 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF, 362 SA_SEQ_MASK_OFF, SA_MC_ENABLE, 363 SA_COPY_PAD, SA_COPY_PAYLOAD, 364 SA_NOT_COPY_HDR); 365 366 sa->sa_command_1.bf.key_len = keylen >> 3; 367 return 0; 368 } 369 370 static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt) 371 { 372 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 373 struct crypto_aead *aead = crypto_aead_reqtfm(req); 374 unsigned int len = req->cryptlen; 375 __le32 iv[16]; 376 u32 tmp_sa[ctx->sa_len * 4]; 377 struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa; 378 379 if (crypto4xx_aead_need_fallback(req, true, decrypt)) 380 return crypto4xx_aead_fallback(req, ctx, decrypt); 381 382 if (decrypt) 383 len -= crypto_aead_authsize(aead); 384 385 memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, sizeof(tmp_sa)); 386 sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2; 387 388 if (req->iv[0] == 1) { 389 /* CRYPTO_MODE_AES_ICM */ 390 sa->sa_command_1.bf.crypto_mode9_8 = 1; 391 } 392 393 iv[3] = cpu_to_le32(0); 394 crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1)); 395 396 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, 397 len, iv, sizeof(iv), 398 sa, ctx->sa_len, req->assoclen); 399 } 400 401 int crypto4xx_encrypt_aes_ccm(struct aead_request *req) 402 { 403 return crypto4xx_crypt_aes_ccm(req, false); 404 } 405 406 int crypto4xx_decrypt_aes_ccm(struct aead_request *req) 407 { 408 return crypto4xx_crypt_aes_ccm(req, true); 409 } 410 411 int crypto4xx_setauthsize_aead(struct crypto_aead *cipher, 412 unsigned int authsize) 413 { 414 struct crypto_tfm *tfm = crypto_aead_tfm(cipher); 415 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); 416 417 return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize); 418 } 419 420 /** 421 * AES-GCM Functions 422 */ 423 424 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen) 425 { 426 switch (keylen) { 427 case 16: 428 case 24: 429 case 32: 430 return 0; 431 default: 432 return -EINVAL; 433 } 434 } 435 436 static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key, 437 unsigned int keylen) 438 { 439 struct crypto_cipher *aes_tfm = NULL; 440 uint8_t src[16] = { 0 }; 441 int rc = 0; 442 443 aes_tfm = crypto_alloc_cipher("aes", 0, CRYPTO_ALG_ASYNC | 444 CRYPTO_ALG_NEED_FALLBACK); 445 if (IS_ERR(aes_tfm)) { 446 rc = PTR_ERR(aes_tfm); 447 pr_warn("could not load aes cipher driver: %d\n", rc); 448 return rc; 449 } 450 451 rc = crypto_cipher_setkey(aes_tfm, key, keylen); 452 if (rc) { 453 pr_err("setkey() failed: %d\n", rc); 454 goto out; 455 } 456 457 crypto_cipher_encrypt_one(aes_tfm, src, src); 458 crypto4xx_memcpy_to_le32(hash_start, src, 16); 459 out: 460 crypto_free_cipher(aes_tfm); 461 return rc; 462 } 463 464 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher, 465 const u8 *key, unsigned int keylen) 466 { 467 struct crypto_tfm *tfm = crypto_aead_tfm(cipher); 468 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); 469 struct dynamic_sa_ctl *sa; 470 int rc = 0; 471 472 if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0) { 473 crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 474 return -EINVAL; 475 } 476 477 rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen); 478 if (rc) 479 return rc; 480 481 if (ctx->sa_in || ctx->sa_out) 482 crypto4xx_free_sa(ctx); 483 484 rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4); 485 if (rc) 486 return rc; 487 488 sa = (struct dynamic_sa_ctl *) ctx->sa_in; 489 490 sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2); 491 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV, 492 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE, 493 SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH, 494 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO, 495 SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT, 496 DIR_INBOUND); 497 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH, 498 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF, 499 SA_SEQ_MASK_ON, SA_MC_DISABLE, 500 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD, 501 SA_NOT_COPY_HDR); 502 503 sa->sa_command_1.bf.key_len = keylen >> 3; 504 505 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), 506 key, keylen); 507 508 rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa), 509 key, keylen); 510 if (rc) { 511 pr_err("GCM hash key setting failed = %d\n", rc); 512 goto err; 513 } 514 515 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4); 516 sa = (struct dynamic_sa_ctl *) ctx->sa_out; 517 sa->sa_command_0.bf.dir = DIR_OUTBOUND; 518 sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH; 519 520 return 0; 521 err: 522 crypto4xx_free_sa(ctx); 523 return rc; 524 } 525 526 static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req, 527 bool decrypt) 528 { 529 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 530 unsigned int len = req->cryptlen; 531 __le32 iv[4]; 532 533 if (crypto4xx_aead_need_fallback(req, false, decrypt)) 534 return crypto4xx_aead_fallback(req, ctx, decrypt); 535 536 crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE); 537 iv[3] = cpu_to_le32(1); 538 539 if (decrypt) 540 len -= crypto_aead_authsize(crypto_aead_reqtfm(req)); 541 542 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst, 543 len, iv, sizeof(iv), 544 decrypt ? ctx->sa_in : ctx->sa_out, 545 ctx->sa_len, req->assoclen); 546 } 547 548 int crypto4xx_encrypt_aes_gcm(struct aead_request *req) 549 { 550 return crypto4xx_crypt_aes_gcm(req, false); 551 } 552 553 int crypto4xx_decrypt_aes_gcm(struct aead_request *req) 554 { 555 return crypto4xx_crypt_aes_gcm(req, true); 556 } 557 558 /** 559 * HASH SHA1 Functions 560 */ 561 static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm, 562 unsigned int sa_len, 563 unsigned char ha, 564 unsigned char hm) 565 { 566 struct crypto_alg *alg = tfm->__crt_alg; 567 struct crypto4xx_alg *my_alg; 568 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); 569 struct dynamic_sa_hash160 *sa; 570 int rc; 571 572 my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg, 573 alg.u.hash); 574 ctx->dev = my_alg->dev; 575 576 /* Create SA */ 577 if (ctx->sa_in || ctx->sa_out) 578 crypto4xx_free_sa(ctx); 579 580 rc = crypto4xx_alloc_sa(ctx, sa_len); 581 if (rc) 582 return rc; 583 584 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), 585 sizeof(struct crypto4xx_ctx)); 586 sa = (struct dynamic_sa_hash160 *)ctx->sa_in; 587 set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV, 588 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA, 589 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL, 590 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC, 591 SA_OPCODE_HASH, DIR_INBOUND); 592 set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH, 593 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF, 594 SA_SEQ_MASK_OFF, SA_MC_ENABLE, 595 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD, 596 SA_NOT_COPY_HDR); 597 /* Need to zero hash digest in SA */ 598 memset(sa->inner_digest, 0, sizeof(sa->inner_digest)); 599 memset(sa->outer_digest, 0, sizeof(sa->outer_digest)); 600 601 return 0; 602 } 603 604 int crypto4xx_hash_init(struct ahash_request *req) 605 { 606 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 607 int ds; 608 struct dynamic_sa_ctl *sa; 609 610 sa = ctx->sa_in; 611 ds = crypto_ahash_digestsize( 612 __crypto_ahash_cast(req->base.tfm)); 613 sa->sa_command_0.bf.digest_len = ds >> 2; 614 sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA; 615 616 return 0; 617 } 618 619 int crypto4xx_hash_update(struct ahash_request *req) 620 { 621 struct crypto_ahash *ahash = crypto_ahash_reqtfm(req); 622 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 623 struct scatterlist dst; 624 unsigned int ds = crypto_ahash_digestsize(ahash); 625 626 sg_init_one(&dst, req->result, ds); 627 628 return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, 629 req->nbytes, NULL, 0, ctx->sa_in, 630 ctx->sa_len, 0); 631 } 632 633 int crypto4xx_hash_final(struct ahash_request *req) 634 { 635 return 0; 636 } 637 638 int crypto4xx_hash_digest(struct ahash_request *req) 639 { 640 struct crypto_ahash *ahash = crypto_ahash_reqtfm(req); 641 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 642 struct scatterlist dst; 643 unsigned int ds = crypto_ahash_digestsize(ahash); 644 645 sg_init_one(&dst, req->result, ds); 646 647 return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, 648 req->nbytes, NULL, 0, ctx->sa_in, 649 ctx->sa_len, 0); 650 } 651 652 /** 653 * SHA1 Algorithm 654 */ 655 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm) 656 { 657 return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1, 658 SA_HASH_MODE_HASH); 659 } 660