1 /**
2  * AMCC SoC PPC4xx Crypto Driver
3  *
4  * Copyright (c) 2008 Applied Micro Circuits Corporation.
5  * All rights reserved. James Hsiao <jhsiao@amcc.com>
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2 of the License, or
10  * (at your option) any later version.
11  *
12  * This program is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15  * GNU General Public License for more details.
16  *
17  * This file implements the Linux crypto algorithms.
18  */
19 
20 #include <linux/kernel.h>
21 #include <linux/interrupt.h>
22 #include <linux/spinlock_types.h>
23 #include <linux/scatterlist.h>
24 #include <linux/crypto.h>
25 #include <linux/hash.h>
26 #include <crypto/internal/hash.h>
27 #include <linux/dma-mapping.h>
28 #include <crypto/algapi.h>
29 #include <crypto/aead.h>
30 #include <crypto/aes.h>
31 #include <crypto/gcm.h>
32 #include <crypto/sha.h>
33 #include <crypto/ctr.h>
34 #include <crypto/skcipher.h>
35 #include "crypto4xx_reg_def.h"
36 #include "crypto4xx_core.h"
37 #include "crypto4xx_sa.h"
38 
39 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
40 				     u32 save_iv, u32 ld_h, u32 ld_iv,
41 				     u32 hdr_proc, u32 h, u32 c, u32 pad_type,
42 				     u32 op_grp, u32 op, u32 dir)
43 {
44 	sa->sa_command_0.w = 0;
45 	sa->sa_command_0.bf.save_hash_state = save_h;
46 	sa->sa_command_0.bf.save_iv = save_iv;
47 	sa->sa_command_0.bf.load_hash_state = ld_h;
48 	sa->sa_command_0.bf.load_iv = ld_iv;
49 	sa->sa_command_0.bf.hdr_proc = hdr_proc;
50 	sa->sa_command_0.bf.hash_alg = h;
51 	sa->sa_command_0.bf.cipher_alg = c;
52 	sa->sa_command_0.bf.pad_type = pad_type & 3;
53 	sa->sa_command_0.bf.extend_pad = pad_type >> 2;
54 	sa->sa_command_0.bf.op_group = op_grp;
55 	sa->sa_command_0.bf.opcode = op;
56 	sa->sa_command_0.bf.dir = dir;
57 }
58 
59 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
60 				     u32 hmac_mc, u32 cfb, u32 esn,
61 				     u32 sn_mask, u32 mute, u32 cp_pad,
62 				     u32 cp_pay, u32 cp_hdr)
63 {
64 	sa->sa_command_1.w = 0;
65 	sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
66 	sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
67 	sa->sa_command_1.bf.feedback_mode = cfb,
68 	sa->sa_command_1.bf.sa_rev = 1;
69 	sa->sa_command_1.bf.hmac_muting = hmac_mc;
70 	sa->sa_command_1.bf.extended_seq_num = esn;
71 	sa->sa_command_1.bf.seq_num_mask = sn_mask;
72 	sa->sa_command_1.bf.mutable_bit_proc = mute;
73 	sa->sa_command_1.bf.copy_pad = cp_pad;
74 	sa->sa_command_1.bf.copy_payload = cp_pay;
75 	sa->sa_command_1.bf.copy_hdr = cp_hdr;
76 }
77 
78 static inline int crypto4xx_crypt(struct skcipher_request *req,
79 				  const unsigned int ivlen, bool decrypt)
80 {
81 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
82 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
83 	__le32 iv[AES_IV_SIZE];
84 
85 	if (ivlen)
86 		crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
87 
88 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
89 		req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
90 		ctx->sa_len, 0, NULL);
91 }
92 
93 int crypto4xx_encrypt_noiv(struct skcipher_request *req)
94 {
95 	return crypto4xx_crypt(req, 0, false);
96 }
97 
98 int crypto4xx_encrypt_iv(struct skcipher_request *req)
99 {
100 	return crypto4xx_crypt(req, AES_IV_SIZE, false);
101 }
102 
103 int crypto4xx_decrypt_noiv(struct skcipher_request *req)
104 {
105 	return crypto4xx_crypt(req, 0, true);
106 }
107 
108 int crypto4xx_decrypt_iv(struct skcipher_request *req)
109 {
110 	return crypto4xx_crypt(req, AES_IV_SIZE, true);
111 }
112 
113 /**
114  * AES Functions
115  */
116 static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
117 				const u8 *key,
118 				unsigned int keylen,
119 				unsigned char cm,
120 				u8 fb)
121 {
122 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
123 	struct dynamic_sa_ctl *sa;
124 	int    rc;
125 
126 	if (keylen != AES_KEYSIZE_256 &&
127 		keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
128 		crypto_skcipher_set_flags(cipher,
129 				CRYPTO_TFM_RES_BAD_KEY_LEN);
130 		return -EINVAL;
131 	}
132 
133 	/* Create SA */
134 	if (ctx->sa_in || ctx->sa_out)
135 		crypto4xx_free_sa(ctx);
136 
137 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
138 	if (rc)
139 		return rc;
140 
141 	/* Setup SA */
142 	sa = ctx->sa_in;
143 
144 	set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
145 				 SA_NOT_SAVE_IV : SA_SAVE_IV),
146 				 SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
147 				 SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
148 				 SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
149 				 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
150 				 SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
151 				 DIR_INBOUND);
152 
153 	set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
154 				 fb, SA_EXTENDED_SN_OFF,
155 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
156 				 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
157 				 SA_NOT_COPY_HDR);
158 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
159 				 key, keylen);
160 	sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
161 	sa->sa_command_1.bf.key_len = keylen >> 3;
162 
163 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
164 	sa = ctx->sa_out;
165 	sa->sa_command_0.bf.dir = DIR_OUTBOUND;
166 	/*
167 	 * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
168 	 * it's the DIR_(IN|OUT)BOUND that matters
169 	 */
170 	sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
171 
172 	return 0;
173 }
174 
175 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
176 			     const u8 *key, unsigned int keylen)
177 {
178 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
179 				    CRYPTO_FEEDBACK_MODE_NO_FB);
180 }
181 
182 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
183 			     const u8 *key, unsigned int keylen)
184 {
185 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
186 				    CRYPTO_FEEDBACK_MODE_128BIT_CFB);
187 }
188 
189 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
190 			     const u8 *key, unsigned int keylen)
191 {
192 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
193 				    CRYPTO_FEEDBACK_MODE_NO_FB);
194 }
195 
196 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
197 			     const u8 *key, unsigned int keylen)
198 {
199 	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
200 				    CRYPTO_FEEDBACK_MODE_64BIT_OFB);
201 }
202 
203 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
204 			     const u8 *key, unsigned int keylen)
205 {
206 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
207 	int rc;
208 
209 	rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
210 		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
211 	if (rc)
212 		return rc;
213 
214 	ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
215 						 CTR_RFC3686_NONCE_SIZE]);
216 
217 	return 0;
218 }
219 
220 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
221 {
222 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
223 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
224 	__le32 iv[AES_IV_SIZE / 4] = {
225 		ctx->iv_nonce,
226 		cpu_to_le32p((u32 *) req->iv),
227 		cpu_to_le32p((u32 *) (req->iv + 4)),
228 		cpu_to_le32(1) };
229 
230 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
231 				  req->cryptlen, iv, AES_IV_SIZE,
232 				  ctx->sa_out, ctx->sa_len, 0, NULL);
233 }
234 
235 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
236 {
237 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
238 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
239 	__le32 iv[AES_IV_SIZE / 4] = {
240 		ctx->iv_nonce,
241 		cpu_to_le32p((u32 *) req->iv),
242 		cpu_to_le32p((u32 *) (req->iv + 4)),
243 		cpu_to_le32(1) };
244 
245 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
246 				  req->cryptlen, iv, AES_IV_SIZE,
247 				  ctx->sa_out, ctx->sa_len, 0, NULL);
248 }
249 
250 static int
251 crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
252 {
253 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
254 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
255 	size_t iv_len = crypto_skcipher_ivsize(cipher);
256 	unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
257 	unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
258 			AES_BLOCK_SIZE;
259 
260 	/*
261 	 * The hardware uses only the last 32-bits as the counter while the
262 	 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
263 	 * the whole IV is a counter.  So fallback if the counter is going to
264 	 * overlow.
265 	 */
266 	if (counter + nblks < counter) {
267 		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
268 		int ret;
269 
270 		skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
271 		skcipher_request_set_callback(subreq, req->base.flags,
272 			NULL, NULL);
273 		skcipher_request_set_crypt(subreq, req->src, req->dst,
274 			req->cryptlen, req->iv);
275 		ret = encrypt ? crypto_skcipher_encrypt(subreq)
276 			: crypto_skcipher_decrypt(subreq);
277 		skcipher_request_zero(subreq);
278 		return ret;
279 	}
280 
281 	return encrypt ? crypto4xx_encrypt_iv(req)
282 		       : crypto4xx_decrypt_iv(req);
283 }
284 
285 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
286 				       struct crypto_skcipher *cipher,
287 				       const u8 *key,
288 				       unsigned int keylen)
289 {
290 	int rc;
291 
292 	crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
293 				    CRYPTO_TFM_REQ_MASK);
294 	crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
295 		crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
296 	rc = crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
297 	crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
298 	crypto_skcipher_set_flags(cipher,
299 		crypto_sync_skcipher_get_flags(ctx->sw_cipher.cipher) &
300 			CRYPTO_TFM_RES_MASK);
301 
302 	return rc;
303 }
304 
305 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
306 			     const u8 *key, unsigned int keylen)
307 {
308 	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
309 	int rc;
310 
311 	rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
312 	if (rc)
313 		return rc;
314 
315 	return crypto4xx_setkey_aes(cipher, key, keylen,
316 		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
317 }
318 
319 int crypto4xx_encrypt_ctr(struct skcipher_request *req)
320 {
321 	return crypto4xx_ctr_crypt(req, true);
322 }
323 
324 int crypto4xx_decrypt_ctr(struct skcipher_request *req)
325 {
326 	return crypto4xx_ctr_crypt(req, false);
327 }
328 
329 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
330 						unsigned int len,
331 						bool is_ccm, bool decrypt)
332 {
333 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
334 
335 	/* authsize has to be a multiple of 4 */
336 	if (aead->authsize & 3)
337 		return true;
338 
339 	/*
340 	 * hardware does not handle cases where plaintext
341 	 * is less than a block.
342 	 */
343 	if (len < AES_BLOCK_SIZE)
344 		return true;
345 
346 	/* assoc len needs to be a multiple of 4 and <= 1020 */
347 	if (req->assoclen & 0x3 || req->assoclen > 1020)
348 		return true;
349 
350 	/* CCM supports only counter field length of 2 and 4 bytes */
351 	if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
352 		return true;
353 
354 	return false;
355 }
356 
357 static int crypto4xx_aead_fallback(struct aead_request *req,
358 	struct crypto4xx_ctx *ctx, bool do_decrypt)
359 {
360 	struct aead_request *subreq = aead_request_ctx(req);
361 
362 	aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
363 	aead_request_set_callback(subreq, req->base.flags,
364 				  req->base.complete, req->base.data);
365 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
366 			       req->iv);
367 	aead_request_set_ad(subreq, req->assoclen);
368 	return do_decrypt ? crypto_aead_decrypt(subreq) :
369 			    crypto_aead_encrypt(subreq);
370 }
371 
372 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
373 					 struct crypto_aead *cipher,
374 					 const u8 *key,
375 					 unsigned int keylen)
376 {
377 	int rc;
378 
379 	crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
380 	crypto_aead_set_flags(ctx->sw_cipher.aead,
381 		crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
382 	rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
383 	crypto_aead_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
384 	crypto_aead_set_flags(cipher,
385 		crypto_aead_get_flags(ctx->sw_cipher.aead) &
386 			CRYPTO_TFM_RES_MASK);
387 
388 	return rc;
389 }
390 
391 /**
392  * AES-CCM Functions
393  */
394 
395 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
396 			     unsigned int keylen)
397 {
398 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
399 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
400 	struct dynamic_sa_ctl *sa;
401 	int rc = 0;
402 
403 	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
404 	if (rc)
405 		return rc;
406 
407 	if (ctx->sa_in || ctx->sa_out)
408 		crypto4xx_free_sa(ctx);
409 
410 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
411 	if (rc)
412 		return rc;
413 
414 	/* Setup SA */
415 	sa = (struct dynamic_sa_ctl *) ctx->sa_in;
416 	sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
417 
418 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
419 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
420 				 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
421 				 SA_CIPHER_ALG_AES,
422 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
423 				 SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
424 
425 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
426 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
427 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
428 				 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
429 				 SA_NOT_COPY_HDR);
430 
431 	sa->sa_command_1.bf.key_len = keylen >> 3;
432 
433 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
434 
435 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
436 	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
437 
438 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
439 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
440 				 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
441 				 SA_CIPHER_ALG_AES,
442 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
443 				 SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
444 
445 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
446 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
447 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
448 				 SA_COPY_PAD, SA_COPY_PAYLOAD,
449 				 SA_NOT_COPY_HDR);
450 
451 	sa->sa_command_1.bf.key_len = keylen >> 3;
452 	return 0;
453 }
454 
455 static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
456 {
457 	struct crypto4xx_ctx *ctx  = crypto_tfm_ctx(req->base.tfm);
458 	struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
459 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
460 	__le32 iv[16];
461 	u32 tmp_sa[SA_AES128_CCM_LEN + 4];
462 	struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
463 	unsigned int len = req->cryptlen;
464 
465 	if (decrypt)
466 		len -= crypto_aead_authsize(aead);
467 
468 	if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
469 		return crypto4xx_aead_fallback(req, ctx, decrypt);
470 
471 	memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
472 	sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
473 
474 	if (req->iv[0] == 1) {
475 		/* CRYPTO_MODE_AES_ICM */
476 		sa->sa_command_1.bf.crypto_mode9_8 = 1;
477 	}
478 
479 	iv[3] = cpu_to_le32(0);
480 	crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
481 
482 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
483 				  len, iv, sizeof(iv),
484 				  sa, ctx->sa_len, req->assoclen, rctx->dst);
485 }
486 
487 int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
488 {
489 	return crypto4xx_crypt_aes_ccm(req, false);
490 }
491 
492 int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
493 {
494 	return crypto4xx_crypt_aes_ccm(req, true);
495 }
496 
497 int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
498 			       unsigned int authsize)
499 {
500 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
501 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
502 
503 	return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
504 }
505 
506 /**
507  * AES-GCM Functions
508  */
509 
510 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
511 {
512 	switch (keylen) {
513 	case 16:
514 	case 24:
515 	case 32:
516 		return 0;
517 	default:
518 		return -EINVAL;
519 	}
520 }
521 
522 static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
523 					     unsigned int keylen)
524 {
525 	struct crypto_cipher *aes_tfm = NULL;
526 	uint8_t src[16] = { 0 };
527 	int rc = 0;
528 
529 	aes_tfm = crypto_alloc_cipher("aes", 0, CRYPTO_ALG_NEED_FALLBACK);
530 	if (IS_ERR(aes_tfm)) {
531 		rc = PTR_ERR(aes_tfm);
532 		pr_warn("could not load aes cipher driver: %d\n", rc);
533 		return rc;
534 	}
535 
536 	rc = crypto_cipher_setkey(aes_tfm, key, keylen);
537 	if (rc) {
538 		pr_err("setkey() failed: %d\n", rc);
539 		goto out;
540 	}
541 
542 	crypto_cipher_encrypt_one(aes_tfm, src, src);
543 	crypto4xx_memcpy_to_le32(hash_start, src, 16);
544 out:
545 	crypto_free_cipher(aes_tfm);
546 	return rc;
547 }
548 
549 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
550 			     const u8 *key, unsigned int keylen)
551 {
552 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
553 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
554 	struct dynamic_sa_ctl *sa;
555 	int    rc = 0;
556 
557 	if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0) {
558 		crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
559 		return -EINVAL;
560 	}
561 
562 	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
563 	if (rc)
564 		return rc;
565 
566 	if (ctx->sa_in || ctx->sa_out)
567 		crypto4xx_free_sa(ctx);
568 
569 	rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
570 	if (rc)
571 		return rc;
572 
573 	sa  = (struct dynamic_sa_ctl *) ctx->sa_in;
574 
575 	sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
576 	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
577 				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
578 				 SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
579 				 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
580 				 SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
581 				 DIR_INBOUND);
582 	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
583 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
584 				 SA_SEQ_MASK_ON, SA_MC_DISABLE,
585 				 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
586 				 SA_NOT_COPY_HDR);
587 
588 	sa->sa_command_1.bf.key_len = keylen >> 3;
589 
590 	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
591 				 key, keylen);
592 
593 	rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
594 		key, keylen);
595 	if (rc) {
596 		pr_err("GCM hash key setting failed = %d\n", rc);
597 		goto err;
598 	}
599 
600 	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
601 	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
602 	sa->sa_command_0.bf.dir = DIR_OUTBOUND;
603 	sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
604 
605 	return 0;
606 err:
607 	crypto4xx_free_sa(ctx);
608 	return rc;
609 }
610 
611 static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
612 					  bool decrypt)
613 {
614 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
615 	struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
616 	__le32 iv[4];
617 	unsigned int len = req->cryptlen;
618 
619 	if (decrypt)
620 		len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
621 
622 	if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
623 		return crypto4xx_aead_fallback(req, ctx, decrypt);
624 
625 	crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
626 	iv[3] = cpu_to_le32(1);
627 
628 	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
629 				  len, iv, sizeof(iv),
630 				  decrypt ? ctx->sa_in : ctx->sa_out,
631 				  ctx->sa_len, req->assoclen, rctx->dst);
632 }
633 
634 int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
635 {
636 	return crypto4xx_crypt_aes_gcm(req, false);
637 }
638 
639 int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
640 {
641 	return crypto4xx_crypt_aes_gcm(req, true);
642 }
643 
644 /**
645  * HASH SHA1 Functions
646  */
647 static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
648 				   unsigned int sa_len,
649 				   unsigned char ha,
650 				   unsigned char hm)
651 {
652 	struct crypto_alg *alg = tfm->__crt_alg;
653 	struct crypto4xx_alg *my_alg;
654 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
655 	struct dynamic_sa_hash160 *sa;
656 	int rc;
657 
658 	my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
659 			      alg.u.hash);
660 	ctx->dev   = my_alg->dev;
661 
662 	/* Create SA */
663 	if (ctx->sa_in || ctx->sa_out)
664 		crypto4xx_free_sa(ctx);
665 
666 	rc = crypto4xx_alloc_sa(ctx, sa_len);
667 	if (rc)
668 		return rc;
669 
670 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
671 				 sizeof(struct crypto4xx_ctx));
672 	sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
673 	set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
674 				 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
675 				 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
676 				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
677 				 SA_OPCODE_HASH, DIR_INBOUND);
678 	set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
679 				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
680 				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
681 				 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
682 				 SA_NOT_COPY_HDR);
683 	/* Need to zero hash digest in SA */
684 	memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
685 	memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
686 
687 	return 0;
688 }
689 
690 int crypto4xx_hash_init(struct ahash_request *req)
691 {
692 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
693 	int ds;
694 	struct dynamic_sa_ctl *sa;
695 
696 	sa = ctx->sa_in;
697 	ds = crypto_ahash_digestsize(
698 			__crypto_ahash_cast(req->base.tfm));
699 	sa->sa_command_0.bf.digest_len = ds >> 2;
700 	sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
701 
702 	return 0;
703 }
704 
705 int crypto4xx_hash_update(struct ahash_request *req)
706 {
707 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
708 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
709 	struct scatterlist dst;
710 	unsigned int ds = crypto_ahash_digestsize(ahash);
711 
712 	sg_init_one(&dst, req->result, ds);
713 
714 	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
715 				  req->nbytes, NULL, 0, ctx->sa_in,
716 				  ctx->sa_len, 0, NULL);
717 }
718 
719 int crypto4xx_hash_final(struct ahash_request *req)
720 {
721 	return 0;
722 }
723 
724 int crypto4xx_hash_digest(struct ahash_request *req)
725 {
726 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
727 	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
728 	struct scatterlist dst;
729 	unsigned int ds = crypto_ahash_digestsize(ahash);
730 
731 	sg_init_one(&dst, req->result, ds);
732 
733 	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
734 				  req->nbytes, NULL, 0, ctx->sa_in,
735 				  ctx->sa_len, 0, NULL);
736 }
737 
738 /**
739  * SHA1 Algorithm
740  */
741 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
742 {
743 	return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
744 				       SA_HASH_MODE_HASH);
745 }
746