xref: /openbmc/linux/drivers/crypto/ccree/cc_cipher.c (revision 3dc4b6fb)
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
3 
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/internal/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
12 
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
18 
19 #define MAX_ABLKCIPHER_SEQ_LEN 6
20 
21 #define template_skcipher	template_u.skcipher
22 
23 struct cc_cipher_handle {
24 	struct list_head alg_list;
25 };
26 
27 struct cc_user_key_info {
28 	u8 *key;
29 	dma_addr_t key_dma_addr;
30 };
31 
32 struct cc_hw_key_info {
33 	enum cc_hw_crypto_key key1_slot;
34 	enum cc_hw_crypto_key key2_slot;
35 };
36 
37 struct cc_cpp_key_info {
38 	u8 slot;
39 	enum cc_cpp_alg alg;
40 };
41 
42 enum cc_key_type {
43 	CC_UNPROTECTED_KEY,		/* User key */
44 	CC_HW_PROTECTED_KEY,		/* HW (FDE) key */
45 	CC_POLICY_PROTECTED_KEY,	/* CPP key */
46 	CC_INVALID_PROTECTED_KEY	/* Invalid key */
47 };
48 
49 struct cc_cipher_ctx {
50 	struct cc_drvdata *drvdata;
51 	int keylen;
52 	int key_round_number;
53 	int cipher_mode;
54 	int flow_mode;
55 	unsigned int flags;
56 	enum cc_key_type key_type;
57 	struct cc_user_key_info user;
58 	union {
59 		struct cc_hw_key_info hw;
60 		struct cc_cpp_key_info cpp;
61 	};
62 	struct crypto_shash *shash_tfm;
63 };
64 
65 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
66 
67 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
68 {
69 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
70 
71 	return ctx_p->key_type;
72 }
73 
74 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
75 {
76 	switch (ctx_p->flow_mode) {
77 	case S_DIN_to_AES:
78 		switch (size) {
79 		case CC_AES_128_BIT_KEY_SIZE:
80 		case CC_AES_192_BIT_KEY_SIZE:
81 			if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
82 			    ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
83 			    ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
84 				return 0;
85 			break;
86 		case CC_AES_256_BIT_KEY_SIZE:
87 			return 0;
88 		case (CC_AES_192_BIT_KEY_SIZE * 2):
89 		case (CC_AES_256_BIT_KEY_SIZE * 2):
90 			if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
91 			    ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
92 			    ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
93 				return 0;
94 			break;
95 		default:
96 			break;
97 		}
98 		break;
99 	case S_DIN_to_DES:
100 		if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
101 			return 0;
102 		break;
103 	case S_DIN_to_SM4:
104 		if (size == SM4_KEY_SIZE)
105 			return 0;
106 	default:
107 		break;
108 	}
109 	return -EINVAL;
110 }
111 
112 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
113 			      unsigned int size)
114 {
115 	switch (ctx_p->flow_mode) {
116 	case S_DIN_to_AES:
117 		switch (ctx_p->cipher_mode) {
118 		case DRV_CIPHER_XTS:
119 		case DRV_CIPHER_CBC_CTS:
120 			if (size >= AES_BLOCK_SIZE)
121 				return 0;
122 			break;
123 		case DRV_CIPHER_OFB:
124 		case DRV_CIPHER_CTR:
125 				return 0;
126 		case DRV_CIPHER_ECB:
127 		case DRV_CIPHER_CBC:
128 		case DRV_CIPHER_ESSIV:
129 		case DRV_CIPHER_BITLOCKER:
130 			if (IS_ALIGNED(size, AES_BLOCK_SIZE))
131 				return 0;
132 			break;
133 		default:
134 			break;
135 		}
136 		break;
137 	case S_DIN_to_DES:
138 		if (IS_ALIGNED(size, DES_BLOCK_SIZE))
139 			return 0;
140 		break;
141 	case S_DIN_to_SM4:
142 		switch (ctx_p->cipher_mode) {
143 		case DRV_CIPHER_CTR:
144 			return 0;
145 		case DRV_CIPHER_ECB:
146 		case DRV_CIPHER_CBC:
147 			if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
148 				return 0;
149 		default:
150 			break;
151 		}
152 	default:
153 		break;
154 	}
155 	return -EINVAL;
156 }
157 
158 static int cc_cipher_init(struct crypto_tfm *tfm)
159 {
160 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
161 	struct cc_crypto_alg *cc_alg =
162 			container_of(tfm->__crt_alg, struct cc_crypto_alg,
163 				     skcipher_alg.base);
164 	struct device *dev = drvdata_to_dev(cc_alg->drvdata);
165 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
166 	int rc = 0;
167 
168 	dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
169 		crypto_tfm_alg_name(tfm));
170 
171 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
172 				    sizeof(struct cipher_req_ctx));
173 
174 	ctx_p->cipher_mode = cc_alg->cipher_mode;
175 	ctx_p->flow_mode = cc_alg->flow_mode;
176 	ctx_p->drvdata = cc_alg->drvdata;
177 
178 	/* Allocate key buffer, cache line aligned */
179 	ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
180 	if (!ctx_p->user.key)
181 		return -ENOMEM;
182 
183 	dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
184 		ctx_p->user.key);
185 
186 	/* Map key buffer */
187 	ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
188 						  max_key_buf_size,
189 						  DMA_TO_DEVICE);
190 	if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
191 		dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
192 			max_key_buf_size, ctx_p->user.key);
193 		return -ENOMEM;
194 	}
195 	dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
196 		max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
197 
198 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
199 		/* Alloc hash tfm for essiv */
200 		ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
201 		if (IS_ERR(ctx_p->shash_tfm)) {
202 			dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
203 			return PTR_ERR(ctx_p->shash_tfm);
204 		}
205 	}
206 
207 	return rc;
208 }
209 
210 static void cc_cipher_exit(struct crypto_tfm *tfm)
211 {
212 	struct crypto_alg *alg = tfm->__crt_alg;
213 	struct cc_crypto_alg *cc_alg =
214 			container_of(alg, struct cc_crypto_alg,
215 				     skcipher_alg.base);
216 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
217 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
218 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
219 
220 	dev_dbg(dev, "Clearing context @%p for %s\n",
221 		crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
222 
223 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
224 		/* Free hash tfm for essiv */
225 		crypto_free_shash(ctx_p->shash_tfm);
226 		ctx_p->shash_tfm = NULL;
227 	}
228 
229 	/* Unmap key buffer */
230 	dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
231 			 DMA_TO_DEVICE);
232 	dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
233 		&ctx_p->user.key_dma_addr);
234 
235 	/* Free key buffer in context */
236 	kzfree(ctx_p->user.key);
237 	dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
238 }
239 
240 struct tdes_keys {
241 	u8	key1[DES_KEY_SIZE];
242 	u8	key2[DES_KEY_SIZE];
243 	u8	key3[DES_KEY_SIZE];
244 };
245 
246 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
247 {
248 	switch (slot_num) {
249 	case 0:
250 		return KFDE0_KEY;
251 	case 1:
252 		return KFDE1_KEY;
253 	case 2:
254 		return KFDE2_KEY;
255 	case 3:
256 		return KFDE3_KEY;
257 	}
258 	return END_OF_KEYS;
259 }
260 
261 static u8 cc_slot_to_cpp_key(u8 slot_num)
262 {
263 	return (slot_num - CC_FIRST_CPP_KEY_SLOT);
264 }
265 
266 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
267 {
268 	if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
269 		return CC_HW_PROTECTED_KEY;
270 	else if (slot_num >=  CC_FIRST_CPP_KEY_SLOT &&
271 		 slot_num <=  CC_LAST_CPP_KEY_SLOT)
272 		return CC_POLICY_PROTECTED_KEY;
273 	else
274 		return CC_INVALID_PROTECTED_KEY;
275 }
276 
277 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
278 			     unsigned int keylen)
279 {
280 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
281 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
282 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
283 	struct cc_hkey_info hki;
284 
285 	dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
286 		ctx_p, crypto_tfm_alg_name(tfm), keylen);
287 	dump_byte_array("key", (u8 *)key, keylen);
288 
289 	/* STAT_PHASE_0: Init and sanity checks */
290 
291 	/* This check the size of the protected key token */
292 	if (keylen != sizeof(hki)) {
293 		dev_err(dev, "Unsupported protected key size %d.\n", keylen);
294 		crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
295 		return -EINVAL;
296 	}
297 
298 	memcpy(&hki, key, keylen);
299 
300 	/* The real key len for crypto op is the size of the HW key
301 	 * referenced by the HW key slot, not the hardware key token
302 	 */
303 	keylen = hki.keylen;
304 
305 	if (validate_keys_sizes(ctx_p, keylen)) {
306 		dev_err(dev, "Unsupported key size %d.\n", keylen);
307 		crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
308 		return -EINVAL;
309 	}
310 
311 	ctx_p->keylen = keylen;
312 
313 	switch (cc_slot_to_key_type(hki.hw_key1)) {
314 	case CC_HW_PROTECTED_KEY:
315 		if (ctx_p->flow_mode == S_DIN_to_SM4) {
316 			dev_err(dev, "Only AES HW protected keys are supported\n");
317 			return -EINVAL;
318 		}
319 
320 		ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
321 		if (ctx_p->hw.key1_slot == END_OF_KEYS) {
322 			dev_err(dev, "Unsupported hw key1 number (%d)\n",
323 				hki.hw_key1);
324 			return -EINVAL;
325 		}
326 
327 		if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
328 		    ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
329 		    ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
330 			if (hki.hw_key1 == hki.hw_key2) {
331 				dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
332 					hki.hw_key1, hki.hw_key2);
333 				return -EINVAL;
334 			}
335 
336 			ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
337 			if (ctx_p->hw.key2_slot == END_OF_KEYS) {
338 				dev_err(dev, "Unsupported hw key2 number (%d)\n",
339 					hki.hw_key2);
340 				return -EINVAL;
341 			}
342 		}
343 
344 		ctx_p->key_type = CC_HW_PROTECTED_KEY;
345 		dev_dbg(dev, "HW protected key  %d/%d set\n.",
346 			ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
347 		break;
348 
349 	case CC_POLICY_PROTECTED_KEY:
350 		if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
351 			dev_err(dev, "CPP keys not supported in this hardware revision.\n");
352 			return -EINVAL;
353 		}
354 
355 		if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
356 		    ctx_p->cipher_mode != DRV_CIPHER_CTR) {
357 			dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
358 			return -EINVAL;
359 		}
360 
361 		ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
362 		if (ctx_p->flow_mode == S_DIN_to_AES)
363 			ctx_p->cpp.alg = CC_CPP_AES;
364 		else /* Must be SM4 since due to sethkey registration */
365 			ctx_p->cpp.alg = CC_CPP_SM4;
366 		ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
367 		dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
368 			ctx_p->cpp.alg, ctx_p->cpp.slot);
369 		break;
370 
371 	default:
372 		dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
373 		return -EINVAL;
374 	}
375 
376 	return 0;
377 }
378 
379 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
380 			    unsigned int keylen)
381 {
382 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
383 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
384 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
385 	struct cc_crypto_alg *cc_alg =
386 			container_of(tfm->__crt_alg, struct cc_crypto_alg,
387 				     skcipher_alg.base);
388 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
389 
390 	dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
391 		ctx_p, crypto_tfm_alg_name(tfm), keylen);
392 	dump_byte_array("key", (u8 *)key, keylen);
393 
394 	/* STAT_PHASE_0: Init and sanity checks */
395 
396 	if (validate_keys_sizes(ctx_p, keylen)) {
397 		dev_err(dev, "Unsupported key size %d.\n", keylen);
398 		crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
399 		return -EINVAL;
400 	}
401 
402 	ctx_p->key_type = CC_UNPROTECTED_KEY;
403 
404 	/*
405 	 * Verify DES weak keys
406 	 * Note that we're dropping the expanded key since the
407 	 * HW does the expansion on its own.
408 	 */
409 	if (ctx_p->flow_mode == S_DIN_to_DES) {
410 		if ((keylen == DES3_EDE_KEY_SIZE &&
411 		     verify_skcipher_des3_key(sktfm, key)) ||
412 		    verify_skcipher_des_key(sktfm, key)) {
413 			dev_dbg(dev, "weak DES key");
414 			return -EINVAL;
415 		}
416 	}
417 
418 	if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
419 	    xts_check_key(tfm, key, keylen)) {
420 		dev_dbg(dev, "weak XTS key");
421 		return -EINVAL;
422 	}
423 
424 	/* STAT_PHASE_1: Copy key to ctx */
425 	dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
426 				max_key_buf_size, DMA_TO_DEVICE);
427 
428 	memcpy(ctx_p->user.key, key, keylen);
429 	if (keylen == 24)
430 		memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
431 
432 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
433 		/* sha256 for key2 - use sw implementation */
434 		int key_len = keylen >> 1;
435 		int err;
436 
437 		SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
438 
439 		desc->tfm = ctx_p->shash_tfm;
440 
441 		err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
442 					  ctx_p->user.key + key_len);
443 		if (err) {
444 			dev_err(dev, "Failed to hash ESSIV key.\n");
445 			return err;
446 		}
447 	}
448 	dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
449 				   max_key_buf_size, DMA_TO_DEVICE);
450 	ctx_p->keylen = keylen;
451 
452 	dev_dbg(dev, "return safely");
453 	return 0;
454 }
455 
456 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
457 {
458 	switch (ctx_p->flow_mode) {
459 	case S_DIN_to_AES:
460 		return S_AES_to_DOUT;
461 	case S_DIN_to_DES:
462 		return S_DES_to_DOUT;
463 	case S_DIN_to_SM4:
464 		return S_SM4_to_DOUT;
465 	default:
466 		return ctx_p->flow_mode;
467 	}
468 }
469 
470 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
471 				 struct cipher_req_ctx *req_ctx,
472 				 unsigned int ivsize, struct cc_hw_desc desc[],
473 				 unsigned int *seq_size)
474 {
475 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
476 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
477 	int cipher_mode = ctx_p->cipher_mode;
478 	int flow_mode = cc_out_setup_mode(ctx_p);
479 	int direction = req_ctx->gen_ctx.op_type;
480 	dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
481 
482 	if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
483 		return;
484 
485 	switch (cipher_mode) {
486 	case DRV_CIPHER_ECB:
487 		break;
488 	case DRV_CIPHER_CBC:
489 	case DRV_CIPHER_CBC_CTS:
490 	case DRV_CIPHER_CTR:
491 	case DRV_CIPHER_OFB:
492 		/* Read next IV */
493 		hw_desc_init(&desc[*seq_size]);
494 		set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
495 		set_cipher_config0(&desc[*seq_size], direction);
496 		set_flow_mode(&desc[*seq_size], flow_mode);
497 		set_cipher_mode(&desc[*seq_size], cipher_mode);
498 		if (cipher_mode == DRV_CIPHER_CTR ||
499 		    cipher_mode == DRV_CIPHER_OFB) {
500 			set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
501 		} else {
502 			set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
503 		}
504 		set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
505 		(*seq_size)++;
506 		break;
507 	case DRV_CIPHER_XTS:
508 	case DRV_CIPHER_ESSIV:
509 	case DRV_CIPHER_BITLOCKER:
510 		/*  IV */
511 		hw_desc_init(&desc[*seq_size]);
512 		set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
513 		set_cipher_mode(&desc[*seq_size], cipher_mode);
514 		set_cipher_config0(&desc[*seq_size], direction);
515 		set_flow_mode(&desc[*seq_size], flow_mode);
516 		set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
517 			     NS_BIT, 1);
518 		set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
519 		(*seq_size)++;
520 		break;
521 	default:
522 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
523 	}
524 }
525 
526 static void cc_setup_state_desc(struct crypto_tfm *tfm,
527 				 struct cipher_req_ctx *req_ctx,
528 				 unsigned int ivsize, unsigned int nbytes,
529 				 struct cc_hw_desc desc[],
530 				 unsigned int *seq_size)
531 {
532 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
533 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
534 	int cipher_mode = ctx_p->cipher_mode;
535 	int flow_mode = ctx_p->flow_mode;
536 	int direction = req_ctx->gen_ctx.op_type;
537 	dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
538 	unsigned int key_len = ctx_p->keylen;
539 	dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
540 	unsigned int du_size = nbytes;
541 
542 	struct cc_crypto_alg *cc_alg =
543 		container_of(tfm->__crt_alg, struct cc_crypto_alg,
544 			     skcipher_alg.base);
545 
546 	if (cc_alg->data_unit)
547 		du_size = cc_alg->data_unit;
548 
549 	switch (cipher_mode) {
550 	case DRV_CIPHER_ECB:
551 		break;
552 	case DRV_CIPHER_CBC:
553 	case DRV_CIPHER_CBC_CTS:
554 	case DRV_CIPHER_CTR:
555 	case DRV_CIPHER_OFB:
556 		/* Load IV */
557 		hw_desc_init(&desc[*seq_size]);
558 		set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
559 			     NS_BIT);
560 		set_cipher_config0(&desc[*seq_size], direction);
561 		set_flow_mode(&desc[*seq_size], flow_mode);
562 		set_cipher_mode(&desc[*seq_size], cipher_mode);
563 		if (cipher_mode == DRV_CIPHER_CTR ||
564 		    cipher_mode == DRV_CIPHER_OFB) {
565 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
566 		} else {
567 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
568 		}
569 		(*seq_size)++;
570 		break;
571 	case DRV_CIPHER_XTS:
572 	case DRV_CIPHER_ESSIV:
573 	case DRV_CIPHER_BITLOCKER:
574 		/* load XEX key */
575 		hw_desc_init(&desc[*seq_size]);
576 		set_cipher_mode(&desc[*seq_size], cipher_mode);
577 		set_cipher_config0(&desc[*seq_size], direction);
578 		if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
579 			set_hw_crypto_key(&desc[*seq_size],
580 					  ctx_p->hw.key2_slot);
581 		} else {
582 			set_din_type(&desc[*seq_size], DMA_DLLI,
583 				     (key_dma_addr + (key_len / 2)),
584 				     (key_len / 2), NS_BIT);
585 		}
586 		set_xex_data_unit_size(&desc[*seq_size], du_size);
587 		set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
588 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
589 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
590 		(*seq_size)++;
591 
592 		/* Load IV */
593 		hw_desc_init(&desc[*seq_size]);
594 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
595 		set_cipher_mode(&desc[*seq_size], cipher_mode);
596 		set_cipher_config0(&desc[*seq_size], direction);
597 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
598 		set_flow_mode(&desc[*seq_size], flow_mode);
599 		set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
600 			     CC_AES_BLOCK_SIZE, NS_BIT);
601 		(*seq_size)++;
602 		break;
603 	default:
604 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
605 	}
606 }
607 
608 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
609 {
610 	switch (ctx_p->flow_mode) {
611 	case S_DIN_to_AES:
612 		return DIN_AES_DOUT;
613 	case S_DIN_to_DES:
614 		return DIN_DES_DOUT;
615 	case S_DIN_to_SM4:
616 		return DIN_SM4_DOUT;
617 	default:
618 		return ctx_p->flow_mode;
619 	}
620 }
621 
622 static void cc_setup_key_desc(struct crypto_tfm *tfm,
623 			      struct cipher_req_ctx *req_ctx,
624 			      unsigned int nbytes, struct cc_hw_desc desc[],
625 			      unsigned int *seq_size)
626 {
627 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
628 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
629 	int cipher_mode = ctx_p->cipher_mode;
630 	int flow_mode = ctx_p->flow_mode;
631 	int direction = req_ctx->gen_ctx.op_type;
632 	dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
633 	unsigned int key_len = ctx_p->keylen;
634 	unsigned int din_size;
635 
636 	switch (cipher_mode) {
637 	case DRV_CIPHER_CBC:
638 	case DRV_CIPHER_CBC_CTS:
639 	case DRV_CIPHER_CTR:
640 	case DRV_CIPHER_OFB:
641 	case DRV_CIPHER_ECB:
642 		/* Load key */
643 		hw_desc_init(&desc[*seq_size]);
644 		set_cipher_mode(&desc[*seq_size], cipher_mode);
645 		set_cipher_config0(&desc[*seq_size], direction);
646 
647 		if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
648 			/* We use the AES key size coding for all CPP algs */
649 			set_key_size_aes(&desc[*seq_size], key_len);
650 			set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
651 			flow_mode = cc_out_flow_mode(ctx_p);
652 		} else {
653 			if (flow_mode == S_DIN_to_AES) {
654 				if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
655 					set_hw_crypto_key(&desc[*seq_size],
656 							  ctx_p->hw.key1_slot);
657 				} else {
658 					/* CC_POLICY_UNPROTECTED_KEY
659 					 * Invalid keys are filtered out in
660 					 * sethkey()
661 					 */
662 					din_size = (key_len == 24) ?
663 						AES_MAX_KEY_SIZE : key_len;
664 
665 					set_din_type(&desc[*seq_size], DMA_DLLI,
666 						     key_dma_addr, din_size,
667 						     NS_BIT);
668 				}
669 				set_key_size_aes(&desc[*seq_size], key_len);
670 			} else {
671 				/*des*/
672 				set_din_type(&desc[*seq_size], DMA_DLLI,
673 					     key_dma_addr, key_len, NS_BIT);
674 				set_key_size_des(&desc[*seq_size], key_len);
675 			}
676 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
677 		}
678 		set_flow_mode(&desc[*seq_size], flow_mode);
679 		(*seq_size)++;
680 		break;
681 	case DRV_CIPHER_XTS:
682 	case DRV_CIPHER_ESSIV:
683 	case DRV_CIPHER_BITLOCKER:
684 		/* Load AES key */
685 		hw_desc_init(&desc[*seq_size]);
686 		set_cipher_mode(&desc[*seq_size], cipher_mode);
687 		set_cipher_config0(&desc[*seq_size], direction);
688 		if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
689 			set_hw_crypto_key(&desc[*seq_size],
690 					  ctx_p->hw.key1_slot);
691 		} else {
692 			set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
693 				     (key_len / 2), NS_BIT);
694 		}
695 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
696 		set_flow_mode(&desc[*seq_size], flow_mode);
697 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
698 		(*seq_size)++;
699 		break;
700 	default:
701 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
702 	}
703 }
704 
705 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
706 			       struct cipher_req_ctx *req_ctx,
707 			       struct scatterlist *dst, struct scatterlist *src,
708 			       unsigned int nbytes, void *areq,
709 			       struct cc_hw_desc desc[], unsigned int *seq_size)
710 {
711 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
712 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
713 
714 	if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
715 		/* bypass */
716 		dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
717 			&req_ctx->mlli_params.mlli_dma_addr,
718 			req_ctx->mlli_params.mlli_len,
719 			(unsigned int)ctx_p->drvdata->mlli_sram_addr);
720 		hw_desc_init(&desc[*seq_size]);
721 		set_din_type(&desc[*seq_size], DMA_DLLI,
722 			     req_ctx->mlli_params.mlli_dma_addr,
723 			     req_ctx->mlli_params.mlli_len, NS_BIT);
724 		set_dout_sram(&desc[*seq_size],
725 			      ctx_p->drvdata->mlli_sram_addr,
726 			      req_ctx->mlli_params.mlli_len);
727 		set_flow_mode(&desc[*seq_size], BYPASS);
728 		(*seq_size)++;
729 	}
730 }
731 
732 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
733 			       struct cipher_req_ctx *req_ctx,
734 			       struct scatterlist *dst, struct scatterlist *src,
735 			       unsigned int nbytes, struct cc_hw_desc desc[],
736 			       unsigned int *seq_size)
737 {
738 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
739 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
740 	unsigned int flow_mode = cc_out_flow_mode(ctx_p);
741 	bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
742 			  ctx_p->cipher_mode == DRV_CIPHER_ECB);
743 
744 	/* Process */
745 	if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
746 		dev_dbg(dev, " data params addr %pad length 0x%X\n",
747 			&sg_dma_address(src), nbytes);
748 		dev_dbg(dev, " data params addr %pad length 0x%X\n",
749 			&sg_dma_address(dst), nbytes);
750 		hw_desc_init(&desc[*seq_size]);
751 		set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
752 			     nbytes, NS_BIT);
753 		set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
754 			      nbytes, NS_BIT, (!last_desc ? 0 : 1));
755 		if (last_desc)
756 			set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
757 
758 		set_flow_mode(&desc[*seq_size], flow_mode);
759 		(*seq_size)++;
760 	} else {
761 		hw_desc_init(&desc[*seq_size]);
762 		set_din_type(&desc[*seq_size], DMA_MLLI,
763 			     ctx_p->drvdata->mlli_sram_addr,
764 			     req_ctx->in_mlli_nents, NS_BIT);
765 		if (req_ctx->out_nents == 0) {
766 			dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
767 				(unsigned int)ctx_p->drvdata->mlli_sram_addr,
768 				(unsigned int)ctx_p->drvdata->mlli_sram_addr);
769 			set_dout_mlli(&desc[*seq_size],
770 				      ctx_p->drvdata->mlli_sram_addr,
771 				      req_ctx->in_mlli_nents, NS_BIT,
772 				      (!last_desc ? 0 : 1));
773 		} else {
774 			dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
775 				(unsigned int)ctx_p->drvdata->mlli_sram_addr,
776 				(unsigned int)ctx_p->drvdata->mlli_sram_addr +
777 				(u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
778 			set_dout_mlli(&desc[*seq_size],
779 				      (ctx_p->drvdata->mlli_sram_addr +
780 				       (LLI_ENTRY_BYTE_SIZE *
781 					req_ctx->in_mlli_nents)),
782 				      req_ctx->out_mlli_nents, NS_BIT,
783 				      (!last_desc ? 0 : 1));
784 		}
785 		if (last_desc)
786 			set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
787 
788 		set_flow_mode(&desc[*seq_size], flow_mode);
789 		(*seq_size)++;
790 	}
791 }
792 
793 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
794 {
795 	struct skcipher_request *req = (struct skcipher_request *)cc_req;
796 	struct scatterlist *dst = req->dst;
797 	struct scatterlist *src = req->src;
798 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
799 	struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
800 	unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
801 
802 	if (err != -EINPROGRESS) {
803 		/* Not a BACKLOG notification */
804 		cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
805 		memcpy(req->iv, req_ctx->iv, ivsize);
806 		kzfree(req_ctx->iv);
807 	}
808 
809 	skcipher_request_complete(req, err);
810 }
811 
812 static int cc_cipher_process(struct skcipher_request *req,
813 			     enum drv_crypto_direction direction)
814 {
815 	struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
816 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
817 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
818 	unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
819 	struct scatterlist *dst = req->dst;
820 	struct scatterlist *src = req->src;
821 	unsigned int nbytes = req->cryptlen;
822 	void *iv = req->iv;
823 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
824 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
825 	struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
826 	struct cc_crypto_req cc_req = {};
827 	int rc;
828 	unsigned int seq_len = 0;
829 	gfp_t flags = cc_gfp_flags(&req->base);
830 
831 	dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
832 		((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
833 		"Encrypt" : "Decrypt"), req, iv, nbytes);
834 
835 	/* STAT_PHASE_0: Init and sanity checks */
836 
837 	/* TODO: check data length according to mode */
838 	if (validate_data_size(ctx_p, nbytes)) {
839 		dev_err(dev, "Unsupported data size %d.\n", nbytes);
840 		crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
841 		rc = -EINVAL;
842 		goto exit_process;
843 	}
844 	if (nbytes == 0) {
845 		/* No data to process is valid */
846 		rc = 0;
847 		goto exit_process;
848 	}
849 
850 	/* The IV we are handed may be allocted from the stack so
851 	 * we must copy it to a DMAable buffer before use.
852 	 */
853 	req_ctx->iv = kmemdup(iv, ivsize, flags);
854 	if (!req_ctx->iv) {
855 		rc = -ENOMEM;
856 		goto exit_process;
857 	}
858 
859 	/* Setup request structure */
860 	cc_req.user_cb = (void *)cc_cipher_complete;
861 	cc_req.user_arg = (void *)req;
862 
863 	/* Setup CPP operation details */
864 	if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
865 		cc_req.cpp.is_cpp = true;
866 		cc_req.cpp.alg = ctx_p->cpp.alg;
867 		cc_req.cpp.slot = ctx_p->cpp.slot;
868 	}
869 
870 	/* Setup request context */
871 	req_ctx->gen_ctx.op_type = direction;
872 
873 	/* STAT_PHASE_1: Map buffers */
874 
875 	rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
876 				      req_ctx->iv, src, dst, flags);
877 	if (rc) {
878 		dev_err(dev, "map_request() failed\n");
879 		goto exit_process;
880 	}
881 
882 	/* STAT_PHASE_2: Create sequence */
883 
884 	/* Setup IV and XEX key used */
885 	cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
886 	/* Setup MLLI line, if needed */
887 	cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
888 	/* Setup key */
889 	cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
890 	/* Data processing */
891 	cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
892 	/* Read next IV */
893 	cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
894 
895 	/* STAT_PHASE_3: Lock HW and push sequence */
896 
897 	rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
898 			     &req->base);
899 	if (rc != -EINPROGRESS && rc != -EBUSY) {
900 		/* Failed to send the request or request completed
901 		 * synchronously
902 		 */
903 		cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
904 	}
905 
906 exit_process:
907 	if (rc != -EINPROGRESS && rc != -EBUSY) {
908 		kzfree(req_ctx->iv);
909 	}
910 
911 	return rc;
912 }
913 
914 static int cc_cipher_encrypt(struct skcipher_request *req)
915 {
916 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
917 
918 	memset(req_ctx, 0, sizeof(*req_ctx));
919 
920 	return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
921 }
922 
923 static int cc_cipher_decrypt(struct skcipher_request *req)
924 {
925 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
926 
927 	memset(req_ctx, 0, sizeof(*req_ctx));
928 
929 	return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
930 }
931 
932 /* Block cipher alg */
933 static const struct cc_alg_template skcipher_algs[] = {
934 	{
935 		.name = "xts(paes)",
936 		.driver_name = "xts-paes-ccree",
937 		.blocksize = 1,
938 		.template_skcipher = {
939 			.setkey = cc_cipher_sethkey,
940 			.encrypt = cc_cipher_encrypt,
941 			.decrypt = cc_cipher_decrypt,
942 			.min_keysize = CC_HW_KEY_SIZE,
943 			.max_keysize = CC_HW_KEY_SIZE,
944 			.ivsize = AES_BLOCK_SIZE,
945 			},
946 		.cipher_mode = DRV_CIPHER_XTS,
947 		.flow_mode = S_DIN_to_AES,
948 		.min_hw_rev = CC_HW_REV_630,
949 		.std_body = CC_STD_NIST,
950 		.sec_func = true,
951 	},
952 	{
953 		.name = "xts512(paes)",
954 		.driver_name = "xts-paes-du512-ccree",
955 		.blocksize = 1,
956 		.template_skcipher = {
957 			.setkey = cc_cipher_sethkey,
958 			.encrypt = cc_cipher_encrypt,
959 			.decrypt = cc_cipher_decrypt,
960 			.min_keysize = CC_HW_KEY_SIZE,
961 			.max_keysize = CC_HW_KEY_SIZE,
962 			.ivsize = AES_BLOCK_SIZE,
963 			},
964 		.cipher_mode = DRV_CIPHER_XTS,
965 		.flow_mode = S_DIN_to_AES,
966 		.data_unit = 512,
967 		.min_hw_rev = CC_HW_REV_712,
968 		.std_body = CC_STD_NIST,
969 		.sec_func = true,
970 	},
971 	{
972 		.name = "xts4096(paes)",
973 		.driver_name = "xts-paes-du4096-ccree",
974 		.blocksize = 1,
975 		.template_skcipher = {
976 			.setkey = cc_cipher_sethkey,
977 			.encrypt = cc_cipher_encrypt,
978 			.decrypt = cc_cipher_decrypt,
979 			.min_keysize = CC_HW_KEY_SIZE,
980 			.max_keysize = CC_HW_KEY_SIZE,
981 			.ivsize = AES_BLOCK_SIZE,
982 			},
983 		.cipher_mode = DRV_CIPHER_XTS,
984 		.flow_mode = S_DIN_to_AES,
985 		.data_unit = 4096,
986 		.min_hw_rev = CC_HW_REV_712,
987 		.std_body = CC_STD_NIST,
988 		.sec_func = true,
989 	},
990 	{
991 		.name = "essiv(paes)",
992 		.driver_name = "essiv-paes-ccree",
993 		.blocksize = AES_BLOCK_SIZE,
994 		.template_skcipher = {
995 			.setkey = cc_cipher_sethkey,
996 			.encrypt = cc_cipher_encrypt,
997 			.decrypt = cc_cipher_decrypt,
998 			.min_keysize = CC_HW_KEY_SIZE,
999 			.max_keysize = CC_HW_KEY_SIZE,
1000 			.ivsize = AES_BLOCK_SIZE,
1001 			},
1002 		.cipher_mode = DRV_CIPHER_ESSIV,
1003 		.flow_mode = S_DIN_to_AES,
1004 		.min_hw_rev = CC_HW_REV_712,
1005 		.std_body = CC_STD_NIST,
1006 		.sec_func = true,
1007 	},
1008 	{
1009 		.name = "essiv512(paes)",
1010 		.driver_name = "essiv-paes-du512-ccree",
1011 		.blocksize = AES_BLOCK_SIZE,
1012 		.template_skcipher = {
1013 			.setkey = cc_cipher_sethkey,
1014 			.encrypt = cc_cipher_encrypt,
1015 			.decrypt = cc_cipher_decrypt,
1016 			.min_keysize = CC_HW_KEY_SIZE,
1017 			.max_keysize = CC_HW_KEY_SIZE,
1018 			.ivsize = AES_BLOCK_SIZE,
1019 			},
1020 		.cipher_mode = DRV_CIPHER_ESSIV,
1021 		.flow_mode = S_DIN_to_AES,
1022 		.data_unit = 512,
1023 		.min_hw_rev = CC_HW_REV_712,
1024 		.std_body = CC_STD_NIST,
1025 		.sec_func = true,
1026 	},
1027 	{
1028 		.name = "essiv4096(paes)",
1029 		.driver_name = "essiv-paes-du4096-ccree",
1030 		.blocksize = AES_BLOCK_SIZE,
1031 		.template_skcipher = {
1032 			.setkey = cc_cipher_sethkey,
1033 			.encrypt = cc_cipher_encrypt,
1034 			.decrypt = cc_cipher_decrypt,
1035 			.min_keysize = CC_HW_KEY_SIZE,
1036 			.max_keysize = CC_HW_KEY_SIZE,
1037 			.ivsize = AES_BLOCK_SIZE,
1038 			},
1039 		.cipher_mode = DRV_CIPHER_ESSIV,
1040 		.flow_mode = S_DIN_to_AES,
1041 		.data_unit = 4096,
1042 		.min_hw_rev = CC_HW_REV_712,
1043 		.std_body = CC_STD_NIST,
1044 		.sec_func = true,
1045 	},
1046 	{
1047 		.name = "bitlocker(paes)",
1048 		.driver_name = "bitlocker-paes-ccree",
1049 		.blocksize = AES_BLOCK_SIZE,
1050 		.template_skcipher = {
1051 			.setkey = cc_cipher_sethkey,
1052 			.encrypt = cc_cipher_encrypt,
1053 			.decrypt = cc_cipher_decrypt,
1054 			.min_keysize = CC_HW_KEY_SIZE,
1055 			.max_keysize = CC_HW_KEY_SIZE,
1056 			.ivsize = AES_BLOCK_SIZE,
1057 			},
1058 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1059 		.flow_mode = S_DIN_to_AES,
1060 		.min_hw_rev = CC_HW_REV_712,
1061 		.std_body = CC_STD_NIST,
1062 		.sec_func = true,
1063 	},
1064 	{
1065 		.name = "bitlocker512(paes)",
1066 		.driver_name = "bitlocker-paes-du512-ccree",
1067 		.blocksize = AES_BLOCK_SIZE,
1068 		.template_skcipher = {
1069 			.setkey = cc_cipher_sethkey,
1070 			.encrypt = cc_cipher_encrypt,
1071 			.decrypt = cc_cipher_decrypt,
1072 			.min_keysize = CC_HW_KEY_SIZE,
1073 			.max_keysize = CC_HW_KEY_SIZE,
1074 			.ivsize = AES_BLOCK_SIZE,
1075 			},
1076 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1077 		.flow_mode = S_DIN_to_AES,
1078 		.data_unit = 512,
1079 		.min_hw_rev = CC_HW_REV_712,
1080 		.std_body = CC_STD_NIST,
1081 		.sec_func = true,
1082 	},
1083 	{
1084 		.name = "bitlocker4096(paes)",
1085 		.driver_name = "bitlocker-paes-du4096-ccree",
1086 		.blocksize = AES_BLOCK_SIZE,
1087 		.template_skcipher = {
1088 			.setkey = cc_cipher_sethkey,
1089 			.encrypt = cc_cipher_encrypt,
1090 			.decrypt = cc_cipher_decrypt,
1091 			.min_keysize = CC_HW_KEY_SIZE,
1092 			.max_keysize =  CC_HW_KEY_SIZE,
1093 			.ivsize = AES_BLOCK_SIZE,
1094 			},
1095 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1096 		.flow_mode = S_DIN_to_AES,
1097 		.data_unit = 4096,
1098 		.min_hw_rev = CC_HW_REV_712,
1099 		.std_body = CC_STD_NIST,
1100 		.sec_func = true,
1101 	},
1102 	{
1103 		.name = "ecb(paes)",
1104 		.driver_name = "ecb-paes-ccree",
1105 		.blocksize = AES_BLOCK_SIZE,
1106 		.template_skcipher = {
1107 			.setkey = cc_cipher_sethkey,
1108 			.encrypt = cc_cipher_encrypt,
1109 			.decrypt = cc_cipher_decrypt,
1110 			.min_keysize = CC_HW_KEY_SIZE,
1111 			.max_keysize = CC_HW_KEY_SIZE,
1112 			.ivsize = 0,
1113 			},
1114 		.cipher_mode = DRV_CIPHER_ECB,
1115 		.flow_mode = S_DIN_to_AES,
1116 		.min_hw_rev = CC_HW_REV_712,
1117 		.std_body = CC_STD_NIST,
1118 		.sec_func = true,
1119 	},
1120 	{
1121 		.name = "cbc(paes)",
1122 		.driver_name = "cbc-paes-ccree",
1123 		.blocksize = AES_BLOCK_SIZE,
1124 		.template_skcipher = {
1125 			.setkey = cc_cipher_sethkey,
1126 			.encrypt = cc_cipher_encrypt,
1127 			.decrypt = cc_cipher_decrypt,
1128 			.min_keysize = CC_HW_KEY_SIZE,
1129 			.max_keysize = CC_HW_KEY_SIZE,
1130 			.ivsize = AES_BLOCK_SIZE,
1131 		},
1132 		.cipher_mode = DRV_CIPHER_CBC,
1133 		.flow_mode = S_DIN_to_AES,
1134 		.min_hw_rev = CC_HW_REV_712,
1135 		.std_body = CC_STD_NIST,
1136 		.sec_func = true,
1137 	},
1138 	{
1139 		.name = "ofb(paes)",
1140 		.driver_name = "ofb-paes-ccree",
1141 		.blocksize = AES_BLOCK_SIZE,
1142 		.template_skcipher = {
1143 			.setkey = cc_cipher_sethkey,
1144 			.encrypt = cc_cipher_encrypt,
1145 			.decrypt = cc_cipher_decrypt,
1146 			.min_keysize = CC_HW_KEY_SIZE,
1147 			.max_keysize = CC_HW_KEY_SIZE,
1148 			.ivsize = AES_BLOCK_SIZE,
1149 			},
1150 		.cipher_mode = DRV_CIPHER_OFB,
1151 		.flow_mode = S_DIN_to_AES,
1152 		.min_hw_rev = CC_HW_REV_712,
1153 		.std_body = CC_STD_NIST,
1154 		.sec_func = true,
1155 	},
1156 	{
1157 		.name = "cts(cbc(paes))",
1158 		.driver_name = "cts-cbc-paes-ccree",
1159 		.blocksize = AES_BLOCK_SIZE,
1160 		.template_skcipher = {
1161 			.setkey = cc_cipher_sethkey,
1162 			.encrypt = cc_cipher_encrypt,
1163 			.decrypt = cc_cipher_decrypt,
1164 			.min_keysize = CC_HW_KEY_SIZE,
1165 			.max_keysize = CC_HW_KEY_SIZE,
1166 			.ivsize = AES_BLOCK_SIZE,
1167 			},
1168 		.cipher_mode = DRV_CIPHER_CBC_CTS,
1169 		.flow_mode = S_DIN_to_AES,
1170 		.min_hw_rev = CC_HW_REV_712,
1171 		.std_body = CC_STD_NIST,
1172 		.sec_func = true,
1173 	},
1174 	{
1175 		.name = "ctr(paes)",
1176 		.driver_name = "ctr-paes-ccree",
1177 		.blocksize = 1,
1178 		.template_skcipher = {
1179 			.setkey = cc_cipher_sethkey,
1180 			.encrypt = cc_cipher_encrypt,
1181 			.decrypt = cc_cipher_decrypt,
1182 			.min_keysize = CC_HW_KEY_SIZE,
1183 			.max_keysize = CC_HW_KEY_SIZE,
1184 			.ivsize = AES_BLOCK_SIZE,
1185 			},
1186 		.cipher_mode = DRV_CIPHER_CTR,
1187 		.flow_mode = S_DIN_to_AES,
1188 		.min_hw_rev = CC_HW_REV_712,
1189 		.std_body = CC_STD_NIST,
1190 		.sec_func = true,
1191 	},
1192 	{
1193 		.name = "xts(aes)",
1194 		.driver_name = "xts-aes-ccree",
1195 		.blocksize = 1,
1196 		.template_skcipher = {
1197 			.setkey = cc_cipher_setkey,
1198 			.encrypt = cc_cipher_encrypt,
1199 			.decrypt = cc_cipher_decrypt,
1200 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1201 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1202 			.ivsize = AES_BLOCK_SIZE,
1203 			},
1204 		.cipher_mode = DRV_CIPHER_XTS,
1205 		.flow_mode = S_DIN_to_AES,
1206 		.min_hw_rev = CC_HW_REV_630,
1207 		.std_body = CC_STD_NIST,
1208 	},
1209 	{
1210 		.name = "xts512(aes)",
1211 		.driver_name = "xts-aes-du512-ccree",
1212 		.blocksize = 1,
1213 		.template_skcipher = {
1214 			.setkey = cc_cipher_setkey,
1215 			.encrypt = cc_cipher_encrypt,
1216 			.decrypt = cc_cipher_decrypt,
1217 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1218 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1219 			.ivsize = AES_BLOCK_SIZE,
1220 			},
1221 		.cipher_mode = DRV_CIPHER_XTS,
1222 		.flow_mode = S_DIN_to_AES,
1223 		.data_unit = 512,
1224 		.min_hw_rev = CC_HW_REV_712,
1225 		.std_body = CC_STD_NIST,
1226 	},
1227 	{
1228 		.name = "xts4096(aes)",
1229 		.driver_name = "xts-aes-du4096-ccree",
1230 		.blocksize = 1,
1231 		.template_skcipher = {
1232 			.setkey = cc_cipher_setkey,
1233 			.encrypt = cc_cipher_encrypt,
1234 			.decrypt = cc_cipher_decrypt,
1235 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1236 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1237 			.ivsize = AES_BLOCK_SIZE,
1238 			},
1239 		.cipher_mode = DRV_CIPHER_XTS,
1240 		.flow_mode = S_DIN_to_AES,
1241 		.data_unit = 4096,
1242 		.min_hw_rev = CC_HW_REV_712,
1243 		.std_body = CC_STD_NIST,
1244 	},
1245 	{
1246 		.name = "essiv(aes)",
1247 		.driver_name = "essiv-aes-ccree",
1248 		.blocksize = AES_BLOCK_SIZE,
1249 		.template_skcipher = {
1250 			.setkey = cc_cipher_setkey,
1251 			.encrypt = cc_cipher_encrypt,
1252 			.decrypt = cc_cipher_decrypt,
1253 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1254 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1255 			.ivsize = AES_BLOCK_SIZE,
1256 			},
1257 		.cipher_mode = DRV_CIPHER_ESSIV,
1258 		.flow_mode = S_DIN_to_AES,
1259 		.min_hw_rev = CC_HW_REV_712,
1260 		.std_body = CC_STD_NIST,
1261 	},
1262 	{
1263 		.name = "essiv512(aes)",
1264 		.driver_name = "essiv-aes-du512-ccree",
1265 		.blocksize = AES_BLOCK_SIZE,
1266 		.template_skcipher = {
1267 			.setkey = cc_cipher_setkey,
1268 			.encrypt = cc_cipher_encrypt,
1269 			.decrypt = cc_cipher_decrypt,
1270 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1271 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1272 			.ivsize = AES_BLOCK_SIZE,
1273 			},
1274 		.cipher_mode = DRV_CIPHER_ESSIV,
1275 		.flow_mode = S_DIN_to_AES,
1276 		.data_unit = 512,
1277 		.min_hw_rev = CC_HW_REV_712,
1278 		.std_body = CC_STD_NIST,
1279 	},
1280 	{
1281 		.name = "essiv4096(aes)",
1282 		.driver_name = "essiv-aes-du4096-ccree",
1283 		.blocksize = AES_BLOCK_SIZE,
1284 		.template_skcipher = {
1285 			.setkey = cc_cipher_setkey,
1286 			.encrypt = cc_cipher_encrypt,
1287 			.decrypt = cc_cipher_decrypt,
1288 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1289 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1290 			.ivsize = AES_BLOCK_SIZE,
1291 			},
1292 		.cipher_mode = DRV_CIPHER_ESSIV,
1293 		.flow_mode = S_DIN_to_AES,
1294 		.data_unit = 4096,
1295 		.min_hw_rev = CC_HW_REV_712,
1296 		.std_body = CC_STD_NIST,
1297 	},
1298 	{
1299 		.name = "bitlocker(aes)",
1300 		.driver_name = "bitlocker-aes-ccree",
1301 		.blocksize = AES_BLOCK_SIZE,
1302 		.template_skcipher = {
1303 			.setkey = cc_cipher_setkey,
1304 			.encrypt = cc_cipher_encrypt,
1305 			.decrypt = cc_cipher_decrypt,
1306 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1307 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1308 			.ivsize = AES_BLOCK_SIZE,
1309 			},
1310 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1311 		.flow_mode = S_DIN_to_AES,
1312 		.min_hw_rev = CC_HW_REV_712,
1313 		.std_body = CC_STD_NIST,
1314 	},
1315 	{
1316 		.name = "bitlocker512(aes)",
1317 		.driver_name = "bitlocker-aes-du512-ccree",
1318 		.blocksize = AES_BLOCK_SIZE,
1319 		.template_skcipher = {
1320 			.setkey = cc_cipher_setkey,
1321 			.encrypt = cc_cipher_encrypt,
1322 			.decrypt = cc_cipher_decrypt,
1323 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1324 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1325 			.ivsize = AES_BLOCK_SIZE,
1326 			},
1327 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1328 		.flow_mode = S_DIN_to_AES,
1329 		.data_unit = 512,
1330 		.min_hw_rev = CC_HW_REV_712,
1331 		.std_body = CC_STD_NIST,
1332 	},
1333 	{
1334 		.name = "bitlocker4096(aes)",
1335 		.driver_name = "bitlocker-aes-du4096-ccree",
1336 		.blocksize = AES_BLOCK_SIZE,
1337 		.template_skcipher = {
1338 			.setkey = cc_cipher_setkey,
1339 			.encrypt = cc_cipher_encrypt,
1340 			.decrypt = cc_cipher_decrypt,
1341 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1342 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1343 			.ivsize = AES_BLOCK_SIZE,
1344 			},
1345 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1346 		.flow_mode = S_DIN_to_AES,
1347 		.data_unit = 4096,
1348 		.min_hw_rev = CC_HW_REV_712,
1349 		.std_body = CC_STD_NIST,
1350 	},
1351 	{
1352 		.name = "ecb(aes)",
1353 		.driver_name = "ecb-aes-ccree",
1354 		.blocksize = AES_BLOCK_SIZE,
1355 		.template_skcipher = {
1356 			.setkey = cc_cipher_setkey,
1357 			.encrypt = cc_cipher_encrypt,
1358 			.decrypt = cc_cipher_decrypt,
1359 			.min_keysize = AES_MIN_KEY_SIZE,
1360 			.max_keysize = AES_MAX_KEY_SIZE,
1361 			.ivsize = 0,
1362 			},
1363 		.cipher_mode = DRV_CIPHER_ECB,
1364 		.flow_mode = S_DIN_to_AES,
1365 		.min_hw_rev = CC_HW_REV_630,
1366 		.std_body = CC_STD_NIST,
1367 	},
1368 	{
1369 		.name = "cbc(aes)",
1370 		.driver_name = "cbc-aes-ccree",
1371 		.blocksize = AES_BLOCK_SIZE,
1372 		.template_skcipher = {
1373 			.setkey = cc_cipher_setkey,
1374 			.encrypt = cc_cipher_encrypt,
1375 			.decrypt = cc_cipher_decrypt,
1376 			.min_keysize = AES_MIN_KEY_SIZE,
1377 			.max_keysize = AES_MAX_KEY_SIZE,
1378 			.ivsize = AES_BLOCK_SIZE,
1379 		},
1380 		.cipher_mode = DRV_CIPHER_CBC,
1381 		.flow_mode = S_DIN_to_AES,
1382 		.min_hw_rev = CC_HW_REV_630,
1383 		.std_body = CC_STD_NIST,
1384 	},
1385 	{
1386 		.name = "ofb(aes)",
1387 		.driver_name = "ofb-aes-ccree",
1388 		.blocksize = AES_BLOCK_SIZE,
1389 		.template_skcipher = {
1390 			.setkey = cc_cipher_setkey,
1391 			.encrypt = cc_cipher_encrypt,
1392 			.decrypt = cc_cipher_decrypt,
1393 			.min_keysize = AES_MIN_KEY_SIZE,
1394 			.max_keysize = AES_MAX_KEY_SIZE,
1395 			.ivsize = AES_BLOCK_SIZE,
1396 			},
1397 		.cipher_mode = DRV_CIPHER_OFB,
1398 		.flow_mode = S_DIN_to_AES,
1399 		.min_hw_rev = CC_HW_REV_630,
1400 		.std_body = CC_STD_NIST,
1401 	},
1402 	{
1403 		.name = "cts(cbc(aes))",
1404 		.driver_name = "cts-cbc-aes-ccree",
1405 		.blocksize = AES_BLOCK_SIZE,
1406 		.template_skcipher = {
1407 			.setkey = cc_cipher_setkey,
1408 			.encrypt = cc_cipher_encrypt,
1409 			.decrypt = cc_cipher_decrypt,
1410 			.min_keysize = AES_MIN_KEY_SIZE,
1411 			.max_keysize = AES_MAX_KEY_SIZE,
1412 			.ivsize = AES_BLOCK_SIZE,
1413 			},
1414 		.cipher_mode = DRV_CIPHER_CBC_CTS,
1415 		.flow_mode = S_DIN_to_AES,
1416 		.min_hw_rev = CC_HW_REV_630,
1417 		.std_body = CC_STD_NIST,
1418 	},
1419 	{
1420 		.name = "ctr(aes)",
1421 		.driver_name = "ctr-aes-ccree",
1422 		.blocksize = 1,
1423 		.template_skcipher = {
1424 			.setkey = cc_cipher_setkey,
1425 			.encrypt = cc_cipher_encrypt,
1426 			.decrypt = cc_cipher_decrypt,
1427 			.min_keysize = AES_MIN_KEY_SIZE,
1428 			.max_keysize = AES_MAX_KEY_SIZE,
1429 			.ivsize = AES_BLOCK_SIZE,
1430 			},
1431 		.cipher_mode = DRV_CIPHER_CTR,
1432 		.flow_mode = S_DIN_to_AES,
1433 		.min_hw_rev = CC_HW_REV_630,
1434 		.std_body = CC_STD_NIST,
1435 	},
1436 	{
1437 		.name = "cbc(des3_ede)",
1438 		.driver_name = "cbc-3des-ccree",
1439 		.blocksize = DES3_EDE_BLOCK_SIZE,
1440 		.template_skcipher = {
1441 			.setkey = cc_cipher_setkey,
1442 			.encrypt = cc_cipher_encrypt,
1443 			.decrypt = cc_cipher_decrypt,
1444 			.min_keysize = DES3_EDE_KEY_SIZE,
1445 			.max_keysize = DES3_EDE_KEY_SIZE,
1446 			.ivsize = DES3_EDE_BLOCK_SIZE,
1447 			},
1448 		.cipher_mode = DRV_CIPHER_CBC,
1449 		.flow_mode = S_DIN_to_DES,
1450 		.min_hw_rev = CC_HW_REV_630,
1451 		.std_body = CC_STD_NIST,
1452 	},
1453 	{
1454 		.name = "ecb(des3_ede)",
1455 		.driver_name = "ecb-3des-ccree",
1456 		.blocksize = DES3_EDE_BLOCK_SIZE,
1457 		.template_skcipher = {
1458 			.setkey = cc_cipher_setkey,
1459 			.encrypt = cc_cipher_encrypt,
1460 			.decrypt = cc_cipher_decrypt,
1461 			.min_keysize = DES3_EDE_KEY_SIZE,
1462 			.max_keysize = DES3_EDE_KEY_SIZE,
1463 			.ivsize = 0,
1464 			},
1465 		.cipher_mode = DRV_CIPHER_ECB,
1466 		.flow_mode = S_DIN_to_DES,
1467 		.min_hw_rev = CC_HW_REV_630,
1468 		.std_body = CC_STD_NIST,
1469 	},
1470 	{
1471 		.name = "cbc(des)",
1472 		.driver_name = "cbc-des-ccree",
1473 		.blocksize = DES_BLOCK_SIZE,
1474 		.template_skcipher = {
1475 			.setkey = cc_cipher_setkey,
1476 			.encrypt = cc_cipher_encrypt,
1477 			.decrypt = cc_cipher_decrypt,
1478 			.min_keysize = DES_KEY_SIZE,
1479 			.max_keysize = DES_KEY_SIZE,
1480 			.ivsize = DES_BLOCK_SIZE,
1481 			},
1482 		.cipher_mode = DRV_CIPHER_CBC,
1483 		.flow_mode = S_DIN_to_DES,
1484 		.min_hw_rev = CC_HW_REV_630,
1485 		.std_body = CC_STD_NIST,
1486 	},
1487 	{
1488 		.name = "ecb(des)",
1489 		.driver_name = "ecb-des-ccree",
1490 		.blocksize = DES_BLOCK_SIZE,
1491 		.template_skcipher = {
1492 			.setkey = cc_cipher_setkey,
1493 			.encrypt = cc_cipher_encrypt,
1494 			.decrypt = cc_cipher_decrypt,
1495 			.min_keysize = DES_KEY_SIZE,
1496 			.max_keysize = DES_KEY_SIZE,
1497 			.ivsize = 0,
1498 			},
1499 		.cipher_mode = DRV_CIPHER_ECB,
1500 		.flow_mode = S_DIN_to_DES,
1501 		.min_hw_rev = CC_HW_REV_630,
1502 		.std_body = CC_STD_NIST,
1503 	},
1504 	{
1505 		.name = "cbc(sm4)",
1506 		.driver_name = "cbc-sm4-ccree",
1507 		.blocksize = SM4_BLOCK_SIZE,
1508 		.template_skcipher = {
1509 			.setkey = cc_cipher_setkey,
1510 			.encrypt = cc_cipher_encrypt,
1511 			.decrypt = cc_cipher_decrypt,
1512 			.min_keysize = SM4_KEY_SIZE,
1513 			.max_keysize = SM4_KEY_SIZE,
1514 			.ivsize = SM4_BLOCK_SIZE,
1515 			},
1516 		.cipher_mode = DRV_CIPHER_CBC,
1517 		.flow_mode = S_DIN_to_SM4,
1518 		.min_hw_rev = CC_HW_REV_713,
1519 		.std_body = CC_STD_OSCCA,
1520 	},
1521 	{
1522 		.name = "ecb(sm4)",
1523 		.driver_name = "ecb-sm4-ccree",
1524 		.blocksize = SM4_BLOCK_SIZE,
1525 		.template_skcipher = {
1526 			.setkey = cc_cipher_setkey,
1527 			.encrypt = cc_cipher_encrypt,
1528 			.decrypt = cc_cipher_decrypt,
1529 			.min_keysize = SM4_KEY_SIZE,
1530 			.max_keysize = SM4_KEY_SIZE,
1531 			.ivsize = 0,
1532 			},
1533 		.cipher_mode = DRV_CIPHER_ECB,
1534 		.flow_mode = S_DIN_to_SM4,
1535 		.min_hw_rev = CC_HW_REV_713,
1536 		.std_body = CC_STD_OSCCA,
1537 	},
1538 	{
1539 		.name = "ctr(sm4)",
1540 		.driver_name = "ctr-sm4-ccree",
1541 		.blocksize = SM4_BLOCK_SIZE,
1542 		.template_skcipher = {
1543 			.setkey = cc_cipher_setkey,
1544 			.encrypt = cc_cipher_encrypt,
1545 			.decrypt = cc_cipher_decrypt,
1546 			.min_keysize = SM4_KEY_SIZE,
1547 			.max_keysize = SM4_KEY_SIZE,
1548 			.ivsize = SM4_BLOCK_SIZE,
1549 			},
1550 		.cipher_mode = DRV_CIPHER_CTR,
1551 		.flow_mode = S_DIN_to_SM4,
1552 		.min_hw_rev = CC_HW_REV_713,
1553 		.std_body = CC_STD_OSCCA,
1554 	},
1555 	{
1556 		.name = "cbc(psm4)",
1557 		.driver_name = "cbc-psm4-ccree",
1558 		.blocksize = SM4_BLOCK_SIZE,
1559 		.template_skcipher = {
1560 			.setkey = cc_cipher_sethkey,
1561 			.encrypt = cc_cipher_encrypt,
1562 			.decrypt = cc_cipher_decrypt,
1563 			.min_keysize = CC_HW_KEY_SIZE,
1564 			.max_keysize = CC_HW_KEY_SIZE,
1565 			.ivsize = SM4_BLOCK_SIZE,
1566 			},
1567 		.cipher_mode = DRV_CIPHER_CBC,
1568 		.flow_mode = S_DIN_to_SM4,
1569 		.min_hw_rev = CC_HW_REV_713,
1570 		.std_body = CC_STD_OSCCA,
1571 		.sec_func = true,
1572 	},
1573 	{
1574 		.name = "ctr(psm4)",
1575 		.driver_name = "ctr-psm4-ccree",
1576 		.blocksize = SM4_BLOCK_SIZE,
1577 		.template_skcipher = {
1578 			.setkey = cc_cipher_sethkey,
1579 			.encrypt = cc_cipher_encrypt,
1580 			.decrypt = cc_cipher_decrypt,
1581 			.min_keysize = CC_HW_KEY_SIZE,
1582 			.max_keysize = CC_HW_KEY_SIZE,
1583 			.ivsize = SM4_BLOCK_SIZE,
1584 			},
1585 		.cipher_mode = DRV_CIPHER_CTR,
1586 		.flow_mode = S_DIN_to_SM4,
1587 		.min_hw_rev = CC_HW_REV_713,
1588 		.std_body = CC_STD_OSCCA,
1589 		.sec_func = true,
1590 	},
1591 };
1592 
1593 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1594 					   struct device *dev)
1595 {
1596 	struct cc_crypto_alg *t_alg;
1597 	struct skcipher_alg *alg;
1598 
1599 	t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1600 	if (!t_alg)
1601 		return ERR_PTR(-ENOMEM);
1602 
1603 	alg = &t_alg->skcipher_alg;
1604 
1605 	memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1606 
1607 	snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1608 	snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1609 		 tmpl->driver_name);
1610 	alg->base.cra_module = THIS_MODULE;
1611 	alg->base.cra_priority = CC_CRA_PRIO;
1612 	alg->base.cra_blocksize = tmpl->blocksize;
1613 	alg->base.cra_alignmask = 0;
1614 	alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1615 
1616 	alg->base.cra_init = cc_cipher_init;
1617 	alg->base.cra_exit = cc_cipher_exit;
1618 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1619 
1620 	t_alg->cipher_mode = tmpl->cipher_mode;
1621 	t_alg->flow_mode = tmpl->flow_mode;
1622 	t_alg->data_unit = tmpl->data_unit;
1623 
1624 	return t_alg;
1625 }
1626 
1627 int cc_cipher_free(struct cc_drvdata *drvdata)
1628 {
1629 	struct cc_crypto_alg *t_alg, *n;
1630 	struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle;
1631 
1632 	if (cipher_handle) {
1633 		/* Remove registered algs */
1634 		list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list,
1635 					 entry) {
1636 			crypto_unregister_skcipher(&t_alg->skcipher_alg);
1637 			list_del(&t_alg->entry);
1638 			kfree(t_alg);
1639 		}
1640 		kfree(cipher_handle);
1641 		drvdata->cipher_handle = NULL;
1642 	}
1643 	return 0;
1644 }
1645 
1646 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1647 {
1648 	struct cc_cipher_handle *cipher_handle;
1649 	struct cc_crypto_alg *t_alg;
1650 	struct device *dev = drvdata_to_dev(drvdata);
1651 	int rc = -ENOMEM;
1652 	int alg;
1653 
1654 	cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL);
1655 	if (!cipher_handle)
1656 		return -ENOMEM;
1657 
1658 	INIT_LIST_HEAD(&cipher_handle->alg_list);
1659 	drvdata->cipher_handle = cipher_handle;
1660 
1661 	/* Linux crypto */
1662 	dev_dbg(dev, "Number of algorithms = %zu\n",
1663 		ARRAY_SIZE(skcipher_algs));
1664 	for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1665 		if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1666 		    !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1667 		    (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1668 			continue;
1669 
1670 		dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1671 		t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1672 		if (IS_ERR(t_alg)) {
1673 			rc = PTR_ERR(t_alg);
1674 			dev_err(dev, "%s alg allocation failed\n",
1675 				skcipher_algs[alg].driver_name);
1676 			goto fail0;
1677 		}
1678 		t_alg->drvdata = drvdata;
1679 
1680 		dev_dbg(dev, "registering %s\n",
1681 			skcipher_algs[alg].driver_name);
1682 		rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1683 		dev_dbg(dev, "%s alg registration rc = %x\n",
1684 			t_alg->skcipher_alg.base.cra_driver_name, rc);
1685 		if (rc) {
1686 			dev_err(dev, "%s alg registration failed\n",
1687 				t_alg->skcipher_alg.base.cra_driver_name);
1688 			kfree(t_alg);
1689 			goto fail0;
1690 		} else {
1691 			list_add_tail(&t_alg->entry,
1692 				      &cipher_handle->alg_list);
1693 			dev_dbg(dev, "Registered %s\n",
1694 				t_alg->skcipher_alg.base.cra_driver_name);
1695 		}
1696 	}
1697 	return 0;
1698 
1699 fail0:
1700 	cc_cipher_free(drvdata);
1701 	return rc;
1702 }
1703