1 /**
2  * AMCC SoC PPC4xx Crypto Driver
3  *
4  * Copyright (c) 2008 Applied Micro Circuits Corporation.
5  * All rights reserved. James Hsiao <jhsiao@amcc.com>
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2 of the License, or
10  * (at your option) any later version.
11  *
12  * This program is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15  * GNU General Public License for more details.
16  *
17  * This file implements the Linux crypto algorithms.
18  */
19 
20 #include <linux/kernel.h>
21 #include <linux/interrupt.h>
22 #include <linux/spinlock_types.h>
23 #include <linux/scatterlist.h>
24 #include <linux/crypto.h>
25 #include <linux/hash.h>
26 #include <crypto/internal/hash.h>
27 #include <linux/dma-mapping.h>
28 #include <crypto/algapi.h>
29 #include <crypto/aead.h>
30 #include <crypto/aes.h>
31 #include <crypto/gcm.h>
32 #include <crypto/sha.h>
33 #include <crypto/ctr.h>
34 #include <crypto/skcipher.h>
35 #include "crypto4xx_reg_def.h"
36 #include "crypto4xx_core.h"
37 #include "crypto4xx_sa.h"
38 
39 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
40 				     u32 save_iv, u32 ld_h, u32 ld_iv,
41 				     u32 hdr_proc, u32 h, u32 c, u32 pad_type,
42 				     u32 op_grp, u32 op, u32 dir)
43 {
44 	sa->sa_command_0.w = 0;
45 	sa->sa_command_0.bf.save_hash_state = save_h;
46 	sa->sa_command_0.bf.save_iv = save_iv;
47 	sa->sa_command_0.bf.load_hash_state = ld_h;
48 	sa->sa_command_0.bf.load_iv = ld_iv;
49 	sa->sa_command_0.bf.hdr_proc = hdr_proc;
50 	sa->sa_command_0.bf.hash_alg = h;
51 	sa->sa_command_0.bf.cipher_alg = c;
52 	sa->sa_command_0.bf.pad_type = pad_type & 3;
53 	sa->sa_command_0.bf.extend_pad = pad_type >> 2;
54 	sa->sa_command_0.bf.op_group = op_grp;
55 	sa->sa_command_0.bf.opcode = op;
56 	sa->sa_command_0.bf.dir = dir;
57 }
58 
59 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
60 				     u32 hmac_mc, u32 cfb, u32 esn,
61 				     u32 sn_mask, u32 mute, u32 cp_pad,
62 				     u32 cp_pay, u32 cp_hdr)
63 {
64 	sa->sa_command_1.w = 0;
65 	sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
66 	sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
67 	sa->sa_command_1.bf.feedback_mode = cfb,
68 	sa->sa_command_1.bf.sa_rev = 1;
69 	sa->sa_command_1.bf.hmac_muting = hmac_mc;
70 	sa->sa_command_1.bf.extended_seq_num = esn;
71 	sa->sa_command_1.bf.seq_num_mask = sn_mask;
72 	sa->sa_command_1.bf.mutable_bit_proc = mute;
73 	sa->sa_command_1.bf.copy_pad = cp_pad;
74 	sa->sa_command_1.bf.copy_payload = cp_pay;
75 	sa->sa_command_1.bf.copy_hdr = cp_hdr;
76 }
77 
78 static inline int crypto4xx_crypt(struct skcipher_request *req,
79 				  const unsigned int ivlen, bool decrypt)
80 {
81 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
82 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
83 	__le32 iv[AES_IV_SIZE];
84 
85 	if (ivlen)
86 		crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
87 
88 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
89 		req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
90 		ctx->sa_len, 0, NULL);
91 }
92 
93 int crypto4xx_encrypt_noiv(struct skcipher_request *req)
94 {
95 	return crypto4xx_crypt(req, 0, false);
96 }
97 
98 int crypto4xx_encrypt_iv(struct skcipher_request *req)
99 {
100 	return crypto4xx_crypt(req, AES_IV_SIZE, false);
101 }
102 
103 int crypto4xx_decrypt_noiv(struct skcipher_request *req)
104 {
105 	return crypto4xx_crypt(req, 0, true);
106 }
107 
108 int crypto4xx_decrypt_iv(struct skcipher_request *req)
109 {
110 	return crypto4xx_crypt(req, AES_IV_SIZE, true);
111 }
112 
113 /**
114  * AES Functions
115  */
116 static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
117 				const u8 *key,
118 				unsigned int keylen,
119 				unsigned char cm,
120 				u8 fb)
121 {
122 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
123 	struct dynamic_sa_ctl *sa;
124 	int    rc;
125 
126 	if (keylen != AES_KEYSIZE_256 &&
127 		keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
128 		crypto_skcipher_set_flags(cipher,
129 				CRYPTO_TFM_RES_BAD_KEY_LEN);
130 		return -EINVAL;
131 	}
132 
133 	/* Create SA */
134 	if (ctx->sa_in || ctx->sa_out)
135 		crypto4xx_free_sa(ctx);
136 
137 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
138 	if (rc)
139 		return rc;
140 
141 	/* Setup SA */
142 	sa = ctx->sa_in;
143 
144 	set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_CBC ?
145 				 SA_SAVE_IV : SA_NOT_SAVE_IV),
146 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
147 				 SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
148 				 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
149 				 SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
150 				 DIR_INBOUND);
151 
152 	set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
153 				 fb, SA_EXTENDED_SN_OFF,
154 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
155 				 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
156 				 SA_NOT_COPY_HDR);
157 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
158 				 key, keylen);
159 	sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
160 	sa->sa_command_1.bf.key_len = keylen >> 3;
161 
162 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
163 	sa = ctx->sa_out;
164 	sa->sa_command_0.bf.dir = DIR_OUTBOUND;
165 
166 	return 0;
167 }
168 
169 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
170 			     const u8 *key, unsigned int keylen)
171 {
172 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
173 				    CRYPTO_FEEDBACK_MODE_NO_FB);
174 }
175 
176 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
177 			     const u8 *key, unsigned int keylen)
178 {
179 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
180 				    CRYPTO_FEEDBACK_MODE_128BIT_CFB);
181 }
182 
183 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
184 			     const u8 *key, unsigned int keylen)
185 {
186 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
187 				    CRYPTO_FEEDBACK_MODE_NO_FB);
188 }
189 
190 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
191 			     const u8 *key, unsigned int keylen)
192 {
193 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
194 				    CRYPTO_FEEDBACK_MODE_64BIT_OFB);
195 }
196 
197 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
198 			     const u8 *key, unsigned int keylen)
199 {
200 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
201 	int rc;
202 
203 	rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
204 		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
205 	if (rc)
206 		return rc;
207 
208 	ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
209 						 CTR_RFC3686_NONCE_SIZE]);
210 
211 	return 0;
212 }
213 
214 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
215 {
216 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
217 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
218 	__le32 iv[AES_IV_SIZE / 4] = {
219 		ctx->iv_nonce,
220 		cpu_to_le32p((u32 *) req->iv),
221 		cpu_to_le32p((u32 *) (req->iv + 4)),
222 		cpu_to_le32(1) };
223 
224 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
225 				  req->cryptlen, iv, AES_IV_SIZE,
226 				  ctx->sa_out, ctx->sa_len, 0, NULL);
227 }
228 
229 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
230 {
231 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
232 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
233 	__le32 iv[AES_IV_SIZE / 4] = {
234 		ctx->iv_nonce,
235 		cpu_to_le32p((u32 *) req->iv),
236 		cpu_to_le32p((u32 *) (req->iv + 4)),
237 		cpu_to_le32(1) };
238 
239 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
240 				  req->cryptlen, iv, AES_IV_SIZE,
241 				  ctx->sa_out, ctx->sa_len, 0, NULL);
242 }
243 
244 static int
245 crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
246 {
247 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
248 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
249 	size_t iv_len = crypto_skcipher_ivsize(cipher);
250 	unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
251 	unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
252 			AES_BLOCK_SIZE;
253 
254 	/*
255 	 * The hardware uses only the last 32-bits as the counter while the
256 	 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
257 	 * the whole IV is a counter.  So fallback if the counter is going to
258 	 * overlow.
259 	 */
260 	if (counter + nblks < counter) {
261 		struct skcipher_request *subreq = skcipher_request_ctx(req);
262 		int ret;
263 
264 		skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher);
265 		skcipher_request_set_callback(subreq, req->base.flags,
266 			NULL, NULL);
267 		skcipher_request_set_crypt(subreq, req->src, req->dst,
268 			req->cryptlen, req->iv);
269 		ret = encrypt ? crypto_skcipher_encrypt(subreq)
270 			: crypto_skcipher_decrypt(subreq);
271 		skcipher_request_zero(subreq);
272 		return ret;
273 	}
274 
275 	return encrypt ? crypto4xx_encrypt_iv(req)
276 		       : crypto4xx_decrypt_iv(req);
277 }
278 
279 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
280 				       struct crypto_skcipher *cipher,
281 				       const u8 *key,
282 				       unsigned int keylen)
283 {
284 	int rc;
285 
286 	crypto_skcipher_clear_flags(ctx->sw_cipher.cipher,
287 				    CRYPTO_TFM_REQ_MASK);
288 	crypto_skcipher_set_flags(ctx->sw_cipher.cipher,
289 		crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
290 	rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
291 	crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
292 	crypto_skcipher_set_flags(cipher,
293 		crypto_skcipher_get_flags(ctx->sw_cipher.cipher) &
294 			CRYPTO_TFM_RES_MASK);
295 
296 	return rc;
297 }
298 
299 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
300 			     const u8 *key, unsigned int keylen)
301 {
302 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
303 	int rc;
304 
305 	rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
306 	if (rc)
307 		return rc;
308 
309 	return crypto4xx_setkey_aes(cipher, key, keylen,
310 		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
311 }
312 
313 int crypto4xx_encrypt_ctr(struct skcipher_request *req)
314 {
315 	return crypto4xx_ctr_crypt(req, true);
316 }
317 
318 int crypto4xx_decrypt_ctr(struct skcipher_request *req)
319 {
320 	return crypto4xx_ctr_crypt(req, false);
321 }
322 
323 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
324 						unsigned int len,
325 						bool is_ccm, bool decrypt)
326 {
327 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
328 
329 	/* authsize has to be a multiple of 4 */
330 	if (aead->authsize & 3)
331 		return true;
332 
333 	/*
334 	 * hardware does not handle cases where plaintext
335 	 * is less than a block.
336 	 */
337 	if (len < AES_BLOCK_SIZE)
338 		return true;
339 
340 	/* assoc len needs to be a multiple of 4 and <= 1020 */
341 	if (req->assoclen & 0x3 || req->assoclen > 1020)
342 		return true;
343 
344 	/* CCM supports only counter field length of 2 and 4 bytes */
345 	if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
346 		return true;
347 
348 	return false;
349 }
350 
351 static int crypto4xx_aead_fallback(struct aead_request *req,
352 	struct crypto4xx_ctx *ctx, bool do_decrypt)
353 {
354 	struct aead_request *subreq = aead_request_ctx(req);
355 
356 	aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
357 	aead_request_set_callback(subreq, req->base.flags,
358 				  req->base.complete, req->base.data);
359 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
360 			       req->iv);
361 	aead_request_set_ad(subreq, req->assoclen);
362 	return do_decrypt ? crypto_aead_decrypt(subreq) :
363 			    crypto_aead_encrypt(subreq);
364 }
365 
366 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
367 					 struct crypto_aead *cipher,
368 					 const u8 *key,
369 					 unsigned int keylen)
370 {
371 	int rc;
372 
373 	crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
374 	crypto_aead_set_flags(ctx->sw_cipher.aead,
375 		crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
376 	rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
377 	crypto_aead_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
378 	crypto_aead_set_flags(cipher,
379 		crypto_aead_get_flags(ctx->sw_cipher.aead) &
380 			CRYPTO_TFM_RES_MASK);
381 
382 	return rc;
383 }
384 
385 /**
386  * AES-CCM Functions
387  */
388 
389 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
390 			     unsigned int keylen)
391 {
392 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
393 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
394 	struct dynamic_sa_ctl *sa;
395 	int rc = 0;
396 
397 	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
398 	if (rc)
399 		return rc;
400 
401 	if (ctx->sa_in || ctx->sa_out)
402 		crypto4xx_free_sa(ctx);
403 
404 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
405 	if (rc)
406 		return rc;
407 
408 	/* Setup SA */
409 	sa = (struct dynamic_sa_ctl *) ctx->sa_in;
410 	sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
411 
412 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
413 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
414 				 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
415 				 SA_CIPHER_ALG_AES,
416 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
417 				 SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
418 
419 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
420 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
421 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
422 				 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
423 				 SA_NOT_COPY_HDR);
424 
425 	sa->sa_command_1.bf.key_len = keylen >> 3;
426 
427 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
428 
429 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
430 	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
431 
432 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
433 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
434 				 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
435 				 SA_CIPHER_ALG_AES,
436 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
437 				 SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
438 
439 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
440 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
441 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
442 				 SA_COPY_PAD, SA_COPY_PAYLOAD,
443 				 SA_NOT_COPY_HDR);
444 
445 	sa->sa_command_1.bf.key_len = keylen >> 3;
446 	return 0;
447 }
448 
449 static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
450 {
451 	struct crypto4xx_ctx *ctx  = crypto_tfm_ctx(req->base.tfm);
452 	struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
453 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
454 	__le32 iv[16];
455 	u32 tmp_sa[SA_AES128_CCM_LEN + 4];
456 	struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
457 	unsigned int len = req->cryptlen;
458 
459 	if (decrypt)
460 		len -= crypto_aead_authsize(aead);
461 
462 	if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
463 		return crypto4xx_aead_fallback(req, ctx, decrypt);
464 
465 	memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
466 	sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
467 
468 	if (req->iv[0] == 1) {
469 		/* CRYPTO_MODE_AES_ICM */
470 		sa->sa_command_1.bf.crypto_mode9_8 = 1;
471 	}
472 
473 	iv[3] = cpu_to_le32(0);
474 	crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
475 
476 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
477 				  len, iv, sizeof(iv),
478 				  sa, ctx->sa_len, req->assoclen, rctx->dst);
479 }
480 
481 int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
482 {
483 	return crypto4xx_crypt_aes_ccm(req, false);
484 }
485 
486 int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
487 {
488 	return crypto4xx_crypt_aes_ccm(req, true);
489 }
490 
491 int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
492 			       unsigned int authsize)
493 {
494 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
495 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
496 
497 	return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
498 }
499 
500 /**
501  * AES-GCM Functions
502  */
503 
504 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
505 {
506 	switch (keylen) {
507 	case 16:
508 	case 24:
509 	case 32:
510 		return 0;
511 	default:
512 		return -EINVAL;
513 	}
514 }
515 
516 static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
517 					     unsigned int keylen)
518 {
519 	struct crypto_cipher *aes_tfm = NULL;
520 	uint8_t src[16] = { 0 };
521 	int rc = 0;
522 
523 	aes_tfm = crypto_alloc_cipher("aes", 0, CRYPTO_ALG_ASYNC |
524 				      CRYPTO_ALG_NEED_FALLBACK);
525 	if (IS_ERR(aes_tfm)) {
526 		rc = PTR_ERR(aes_tfm);
527 		pr_warn("could not load aes cipher driver: %d\n", rc);
528 		return rc;
529 	}
530 
531 	rc = crypto_cipher_setkey(aes_tfm, key, keylen);
532 	if (rc) {
533 		pr_err("setkey() failed: %d\n", rc);
534 		goto out;
535 	}
536 
537 	crypto_cipher_encrypt_one(aes_tfm, src, src);
538 	crypto4xx_memcpy_to_le32(hash_start, src, 16);
539 out:
540 	crypto_free_cipher(aes_tfm);
541 	return rc;
542 }
543 
544 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
545 			     const u8 *key, unsigned int keylen)
546 {
547 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
548 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
549 	struct dynamic_sa_ctl *sa;
550 	int    rc = 0;
551 
552 	if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0) {
553 		crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
554 		return -EINVAL;
555 	}
556 
557 	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
558 	if (rc)
559 		return rc;
560 
561 	if (ctx->sa_in || ctx->sa_out)
562 		crypto4xx_free_sa(ctx);
563 
564 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
565 	if (rc)
566 		return rc;
567 
568 	sa  = (struct dynamic_sa_ctl *) ctx->sa_in;
569 
570 	sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
571 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
572 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
573 				 SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
574 				 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
575 				 SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
576 				 DIR_INBOUND);
577 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
578 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
579 				 SA_SEQ_MASK_ON, SA_MC_DISABLE,
580 				 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
581 				 SA_NOT_COPY_HDR);
582 
583 	sa->sa_command_1.bf.key_len = keylen >> 3;
584 
585 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
586 				 key, keylen);
587 
588 	rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
589 		key, keylen);
590 	if (rc) {
591 		pr_err("GCM hash key setting failed = %d\n", rc);
592 		goto err;
593 	}
594 
595 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
596 	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
597 	sa->sa_command_0.bf.dir = DIR_OUTBOUND;
598 	sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
599 
600 	return 0;
601 err:
602 	crypto4xx_free_sa(ctx);
603 	return rc;
604 }
605 
606 static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
607 					  bool decrypt)
608 {
609 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
610 	struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
611 	__le32 iv[4];
612 	unsigned int len = req->cryptlen;
613 
614 	if (decrypt)
615 		len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
616 
617 	if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
618 		return crypto4xx_aead_fallback(req, ctx, decrypt);
619 
620 	crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
621 	iv[3] = cpu_to_le32(1);
622 
623 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
624 				  len, iv, sizeof(iv),
625 				  decrypt ? ctx->sa_in : ctx->sa_out,
626 				  ctx->sa_len, req->assoclen, rctx->dst);
627 }
628 
629 int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
630 {
631 	return crypto4xx_crypt_aes_gcm(req, false);
632 }
633 
634 int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
635 {
636 	return crypto4xx_crypt_aes_gcm(req, true);
637 }
638 
639 /**
640  * HASH SHA1 Functions
641  */
642 static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
643 				   unsigned int sa_len,
644 				   unsigned char ha,
645 				   unsigned char hm)
646 {
647 	struct crypto_alg *alg = tfm->__crt_alg;
648 	struct crypto4xx_alg *my_alg;
649 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
650 	struct dynamic_sa_hash160 *sa;
651 	int rc;
652 
653 	my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
654 			      alg.u.hash);
655 	ctx->dev   = my_alg->dev;
656 
657 	/* Create SA */
658 	if (ctx->sa_in || ctx->sa_out)
659 		crypto4xx_free_sa(ctx);
660 
661 	rc = crypto4xx_alloc_sa(ctx, sa_len);
662 	if (rc)
663 		return rc;
664 
665 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
666 				 sizeof(struct crypto4xx_ctx));
667 	sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
668 	set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
669 				 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
670 				 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
671 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
672 				 SA_OPCODE_HASH, DIR_INBOUND);
673 	set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
674 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
675 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
676 				 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
677 				 SA_NOT_COPY_HDR);
678 	/* Need to zero hash digest in SA */
679 	memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
680 	memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
681 
682 	return 0;
683 }
684 
685 int crypto4xx_hash_init(struct ahash_request *req)
686 {
687 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
688 	int ds;
689 	struct dynamic_sa_ctl *sa;
690 
691 	sa = ctx->sa_in;
692 	ds = crypto_ahash_digestsize(
693 			__crypto_ahash_cast(req->base.tfm));
694 	sa->sa_command_0.bf.digest_len = ds >> 2;
695 	sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
696 
697 	return 0;
698 }
699 
700 int crypto4xx_hash_update(struct ahash_request *req)
701 {
702 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
703 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
704 	struct scatterlist dst;
705 	unsigned int ds = crypto_ahash_digestsize(ahash);
706 
707 	sg_init_one(&dst, req->result, ds);
708 
709 	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
710 				  req->nbytes, NULL, 0, ctx->sa_in,
711 				  ctx->sa_len, 0, NULL);
712 }
713 
714 int crypto4xx_hash_final(struct ahash_request *req)
715 {
716 	return 0;
717 }
718 
719 int crypto4xx_hash_digest(struct ahash_request *req)
720 {
721 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
722 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
723 	struct scatterlist dst;
724 	unsigned int ds = crypto_ahash_digestsize(ahash);
725 
726 	sg_init_one(&dst, req->result, ds);
727 
728 	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
729 				  req->nbytes, NULL, 0, ctx->sa_in,
730 				  ctx->sa_len, 0, NULL);
731 }
732 
733 /**
734  * SHA1 Algorithm
735  */
736 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
737 {
738 	return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
739 				       SA_HASH_MODE_HASH);
740 }
741