xref: /openbmc/linux/drivers/crypto/ccree/cc_cipher.c (revision 1372a51b)
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
3 
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/internal/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
12 
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
18 
19 #define MAX_SKCIPHER_SEQ_LEN 6
20 
21 #define template_skcipher	template_u.skcipher
22 
23 struct cc_cipher_handle {
24 	struct list_head alg_list;
25 };
26 
27 struct cc_user_key_info {
28 	u8 *key;
29 	dma_addr_t key_dma_addr;
30 };
31 
32 struct cc_hw_key_info {
33 	enum cc_hw_crypto_key key1_slot;
34 	enum cc_hw_crypto_key key2_slot;
35 };
36 
37 struct cc_cpp_key_info {
38 	u8 slot;
39 	enum cc_cpp_alg alg;
40 };
41 
42 enum cc_key_type {
43 	CC_UNPROTECTED_KEY,		/* User key */
44 	CC_HW_PROTECTED_KEY,		/* HW (FDE) key */
45 	CC_POLICY_PROTECTED_KEY,	/* CPP key */
46 	CC_INVALID_PROTECTED_KEY	/* Invalid key */
47 };
48 
49 struct cc_cipher_ctx {
50 	struct cc_drvdata *drvdata;
51 	int keylen;
52 	int key_round_number;
53 	int cipher_mode;
54 	int flow_mode;
55 	unsigned int flags;
56 	enum cc_key_type key_type;
57 	struct cc_user_key_info user;
58 	union {
59 		struct cc_hw_key_info hw;
60 		struct cc_cpp_key_info cpp;
61 	};
62 	struct crypto_shash *shash_tfm;
63 };
64 
65 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
66 
67 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
68 {
69 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
70 
71 	return ctx_p->key_type;
72 }
73 
74 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
75 {
76 	switch (ctx_p->flow_mode) {
77 	case S_DIN_to_AES:
78 		switch (size) {
79 		case CC_AES_128_BIT_KEY_SIZE:
80 		case CC_AES_192_BIT_KEY_SIZE:
81 			if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
82 			    ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
83 			    ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
84 				return 0;
85 			break;
86 		case CC_AES_256_BIT_KEY_SIZE:
87 			return 0;
88 		case (CC_AES_192_BIT_KEY_SIZE * 2):
89 		case (CC_AES_256_BIT_KEY_SIZE * 2):
90 			if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
91 			    ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
92 			    ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
93 				return 0;
94 			break;
95 		default:
96 			break;
97 		}
98 		break;
99 	case S_DIN_to_DES:
100 		if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
101 			return 0;
102 		break;
103 	case S_DIN_to_SM4:
104 		if (size == SM4_KEY_SIZE)
105 			return 0;
106 	default:
107 		break;
108 	}
109 	return -EINVAL;
110 }
111 
112 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
113 			      unsigned int size)
114 {
115 	switch (ctx_p->flow_mode) {
116 	case S_DIN_to_AES:
117 		switch (ctx_p->cipher_mode) {
118 		case DRV_CIPHER_XTS:
119 		case DRV_CIPHER_CBC_CTS:
120 			if (size >= AES_BLOCK_SIZE)
121 				return 0;
122 			break;
123 		case DRV_CIPHER_OFB:
124 		case DRV_CIPHER_CTR:
125 				return 0;
126 		case DRV_CIPHER_ECB:
127 		case DRV_CIPHER_CBC:
128 		case DRV_CIPHER_ESSIV:
129 		case DRV_CIPHER_BITLOCKER:
130 			if (IS_ALIGNED(size, AES_BLOCK_SIZE))
131 				return 0;
132 			break;
133 		default:
134 			break;
135 		}
136 		break;
137 	case S_DIN_to_DES:
138 		if (IS_ALIGNED(size, DES_BLOCK_SIZE))
139 			return 0;
140 		break;
141 	case S_DIN_to_SM4:
142 		switch (ctx_p->cipher_mode) {
143 		case DRV_CIPHER_CTR:
144 			return 0;
145 		case DRV_CIPHER_ECB:
146 		case DRV_CIPHER_CBC:
147 			if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
148 				return 0;
149 		default:
150 			break;
151 		}
152 	default:
153 		break;
154 	}
155 	return -EINVAL;
156 }
157 
158 static int cc_cipher_init(struct crypto_tfm *tfm)
159 {
160 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
161 	struct cc_crypto_alg *cc_alg =
162 			container_of(tfm->__crt_alg, struct cc_crypto_alg,
163 				     skcipher_alg.base);
164 	struct device *dev = drvdata_to_dev(cc_alg->drvdata);
165 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
166 	int rc = 0;
167 
168 	dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
169 		crypto_tfm_alg_name(tfm));
170 
171 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
172 				    sizeof(struct cipher_req_ctx));
173 
174 	ctx_p->cipher_mode = cc_alg->cipher_mode;
175 	ctx_p->flow_mode = cc_alg->flow_mode;
176 	ctx_p->drvdata = cc_alg->drvdata;
177 
178 	/* Allocate key buffer, cache line aligned */
179 	ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
180 	if (!ctx_p->user.key)
181 		return -ENOMEM;
182 
183 	dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
184 		ctx_p->user.key);
185 
186 	/* Map key buffer */
187 	ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
188 						  max_key_buf_size,
189 						  DMA_TO_DEVICE);
190 	if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
191 		dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
192 			max_key_buf_size, ctx_p->user.key);
193 		return -ENOMEM;
194 	}
195 	dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
196 		max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
197 
198 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
199 		/* Alloc hash tfm for essiv */
200 		ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
201 		if (IS_ERR(ctx_p->shash_tfm)) {
202 			dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
203 			return PTR_ERR(ctx_p->shash_tfm);
204 		}
205 	}
206 
207 	return rc;
208 }
209 
210 static void cc_cipher_exit(struct crypto_tfm *tfm)
211 {
212 	struct crypto_alg *alg = tfm->__crt_alg;
213 	struct cc_crypto_alg *cc_alg =
214 			container_of(alg, struct cc_crypto_alg,
215 				     skcipher_alg.base);
216 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
217 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
218 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
219 
220 	dev_dbg(dev, "Clearing context @%p for %s\n",
221 		crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
222 
223 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
224 		/* Free hash tfm for essiv */
225 		crypto_free_shash(ctx_p->shash_tfm);
226 		ctx_p->shash_tfm = NULL;
227 	}
228 
229 	/* Unmap key buffer */
230 	dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
231 			 DMA_TO_DEVICE);
232 	dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
233 		&ctx_p->user.key_dma_addr);
234 
235 	/* Free key buffer in context */
236 	kzfree(ctx_p->user.key);
237 	dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
238 }
239 
240 struct tdes_keys {
241 	u8	key1[DES_KEY_SIZE];
242 	u8	key2[DES_KEY_SIZE];
243 	u8	key3[DES_KEY_SIZE];
244 };
245 
246 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
247 {
248 	switch (slot_num) {
249 	case 0:
250 		return KFDE0_KEY;
251 	case 1:
252 		return KFDE1_KEY;
253 	case 2:
254 		return KFDE2_KEY;
255 	case 3:
256 		return KFDE3_KEY;
257 	}
258 	return END_OF_KEYS;
259 }
260 
261 static u8 cc_slot_to_cpp_key(u8 slot_num)
262 {
263 	return (slot_num - CC_FIRST_CPP_KEY_SLOT);
264 }
265 
266 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
267 {
268 	if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
269 		return CC_HW_PROTECTED_KEY;
270 	else if (slot_num >=  CC_FIRST_CPP_KEY_SLOT &&
271 		 slot_num <=  CC_LAST_CPP_KEY_SLOT)
272 		return CC_POLICY_PROTECTED_KEY;
273 	else
274 		return CC_INVALID_PROTECTED_KEY;
275 }
276 
277 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
278 			     unsigned int keylen)
279 {
280 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
281 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
282 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
283 	struct cc_hkey_info hki;
284 
285 	dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
286 		ctx_p, crypto_tfm_alg_name(tfm), keylen);
287 	dump_byte_array("key", (u8 *)key, keylen);
288 
289 	/* STAT_PHASE_0: Init and sanity checks */
290 
291 	/* This check the size of the protected key token */
292 	if (keylen != sizeof(hki)) {
293 		dev_err(dev, "Unsupported protected key size %d.\n", keylen);
294 		return -EINVAL;
295 	}
296 
297 	memcpy(&hki, key, keylen);
298 
299 	/* The real key len for crypto op is the size of the HW key
300 	 * referenced by the HW key slot, not the hardware key token
301 	 */
302 	keylen = hki.keylen;
303 
304 	if (validate_keys_sizes(ctx_p, keylen)) {
305 		dev_err(dev, "Unsupported key size %d.\n", keylen);
306 		return -EINVAL;
307 	}
308 
309 	ctx_p->keylen = keylen;
310 
311 	switch (cc_slot_to_key_type(hki.hw_key1)) {
312 	case CC_HW_PROTECTED_KEY:
313 		if (ctx_p->flow_mode == S_DIN_to_SM4) {
314 			dev_err(dev, "Only AES HW protected keys are supported\n");
315 			return -EINVAL;
316 		}
317 
318 		ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
319 		if (ctx_p->hw.key1_slot == END_OF_KEYS) {
320 			dev_err(dev, "Unsupported hw key1 number (%d)\n",
321 				hki.hw_key1);
322 			return -EINVAL;
323 		}
324 
325 		if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
326 		    ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
327 		    ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
328 			if (hki.hw_key1 == hki.hw_key2) {
329 				dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
330 					hki.hw_key1, hki.hw_key2);
331 				return -EINVAL;
332 			}
333 
334 			ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
335 			if (ctx_p->hw.key2_slot == END_OF_KEYS) {
336 				dev_err(dev, "Unsupported hw key2 number (%d)\n",
337 					hki.hw_key2);
338 				return -EINVAL;
339 			}
340 		}
341 
342 		ctx_p->key_type = CC_HW_PROTECTED_KEY;
343 		dev_dbg(dev, "HW protected key  %d/%d set\n.",
344 			ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
345 		break;
346 
347 	case CC_POLICY_PROTECTED_KEY:
348 		if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
349 			dev_err(dev, "CPP keys not supported in this hardware revision.\n");
350 			return -EINVAL;
351 		}
352 
353 		if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
354 		    ctx_p->cipher_mode != DRV_CIPHER_CTR) {
355 			dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
356 			return -EINVAL;
357 		}
358 
359 		ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
360 		if (ctx_p->flow_mode == S_DIN_to_AES)
361 			ctx_p->cpp.alg = CC_CPP_AES;
362 		else /* Must be SM4 since due to sethkey registration */
363 			ctx_p->cpp.alg = CC_CPP_SM4;
364 		ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
365 		dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
366 			ctx_p->cpp.alg, ctx_p->cpp.slot);
367 		break;
368 
369 	default:
370 		dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
371 		return -EINVAL;
372 	}
373 
374 	return 0;
375 }
376 
377 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
378 			    unsigned int keylen)
379 {
380 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
381 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
382 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
383 	struct cc_crypto_alg *cc_alg =
384 			container_of(tfm->__crt_alg, struct cc_crypto_alg,
385 				     skcipher_alg.base);
386 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
387 
388 	dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
389 		ctx_p, crypto_tfm_alg_name(tfm), keylen);
390 	dump_byte_array("key", (u8 *)key, keylen);
391 
392 	/* STAT_PHASE_0: Init and sanity checks */
393 
394 	if (validate_keys_sizes(ctx_p, keylen)) {
395 		dev_err(dev, "Unsupported key size %d.\n", keylen);
396 		return -EINVAL;
397 	}
398 
399 	ctx_p->key_type = CC_UNPROTECTED_KEY;
400 
401 	/*
402 	 * Verify DES weak keys
403 	 * Note that we're dropping the expanded key since the
404 	 * HW does the expansion on its own.
405 	 */
406 	if (ctx_p->flow_mode == S_DIN_to_DES) {
407 		if ((keylen == DES3_EDE_KEY_SIZE &&
408 		     verify_skcipher_des3_key(sktfm, key)) ||
409 		    verify_skcipher_des_key(sktfm, key)) {
410 			dev_dbg(dev, "weak DES key");
411 			return -EINVAL;
412 		}
413 	}
414 
415 	if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
416 	    xts_check_key(tfm, key, keylen)) {
417 		dev_dbg(dev, "weak XTS key");
418 		return -EINVAL;
419 	}
420 
421 	/* STAT_PHASE_1: Copy key to ctx */
422 	dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
423 				max_key_buf_size, DMA_TO_DEVICE);
424 
425 	memcpy(ctx_p->user.key, key, keylen);
426 	if (keylen == 24)
427 		memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
428 
429 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
430 		/* sha256 for key2 - use sw implementation */
431 		int key_len = keylen >> 1;
432 		int err;
433 
434 		SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
435 
436 		desc->tfm = ctx_p->shash_tfm;
437 
438 		err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
439 					  ctx_p->user.key + key_len);
440 		if (err) {
441 			dev_err(dev, "Failed to hash ESSIV key.\n");
442 			return err;
443 		}
444 	}
445 	dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
446 				   max_key_buf_size, DMA_TO_DEVICE);
447 	ctx_p->keylen = keylen;
448 
449 	dev_dbg(dev, "return safely");
450 	return 0;
451 }
452 
453 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
454 {
455 	switch (ctx_p->flow_mode) {
456 	case S_DIN_to_AES:
457 		return S_AES_to_DOUT;
458 	case S_DIN_to_DES:
459 		return S_DES_to_DOUT;
460 	case S_DIN_to_SM4:
461 		return S_SM4_to_DOUT;
462 	default:
463 		return ctx_p->flow_mode;
464 	}
465 }
466 
467 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
468 				 struct cipher_req_ctx *req_ctx,
469 				 unsigned int ivsize, struct cc_hw_desc desc[],
470 				 unsigned int *seq_size)
471 {
472 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
473 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
474 	int cipher_mode = ctx_p->cipher_mode;
475 	int flow_mode = cc_out_setup_mode(ctx_p);
476 	int direction = req_ctx->gen_ctx.op_type;
477 	dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
478 
479 	if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
480 		return;
481 
482 	switch (cipher_mode) {
483 	case DRV_CIPHER_ECB:
484 		break;
485 	case DRV_CIPHER_CBC:
486 	case DRV_CIPHER_CBC_CTS:
487 	case DRV_CIPHER_CTR:
488 	case DRV_CIPHER_OFB:
489 		/* Read next IV */
490 		hw_desc_init(&desc[*seq_size]);
491 		set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
492 		set_cipher_config0(&desc[*seq_size], direction);
493 		set_flow_mode(&desc[*seq_size], flow_mode);
494 		set_cipher_mode(&desc[*seq_size], cipher_mode);
495 		if (cipher_mode == DRV_CIPHER_CTR ||
496 		    cipher_mode == DRV_CIPHER_OFB) {
497 			set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
498 		} else {
499 			set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
500 		}
501 		set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
502 		(*seq_size)++;
503 		break;
504 	case DRV_CIPHER_XTS:
505 	case DRV_CIPHER_ESSIV:
506 	case DRV_CIPHER_BITLOCKER:
507 		/*  IV */
508 		hw_desc_init(&desc[*seq_size]);
509 		set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
510 		set_cipher_mode(&desc[*seq_size], cipher_mode);
511 		set_cipher_config0(&desc[*seq_size], direction);
512 		set_flow_mode(&desc[*seq_size], flow_mode);
513 		set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
514 			     NS_BIT, 1);
515 		set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
516 		(*seq_size)++;
517 		break;
518 	default:
519 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
520 	}
521 }
522 
523 static void cc_setup_state_desc(struct crypto_tfm *tfm,
524 				 struct cipher_req_ctx *req_ctx,
525 				 unsigned int ivsize, unsigned int nbytes,
526 				 struct cc_hw_desc desc[],
527 				 unsigned int *seq_size)
528 {
529 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
530 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
531 	int cipher_mode = ctx_p->cipher_mode;
532 	int flow_mode = ctx_p->flow_mode;
533 	int direction = req_ctx->gen_ctx.op_type;
534 	dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
535 	unsigned int key_len = ctx_p->keylen;
536 	dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
537 	unsigned int du_size = nbytes;
538 
539 	struct cc_crypto_alg *cc_alg =
540 		container_of(tfm->__crt_alg, struct cc_crypto_alg,
541 			     skcipher_alg.base);
542 
543 	if (cc_alg->data_unit)
544 		du_size = cc_alg->data_unit;
545 
546 	switch (cipher_mode) {
547 	case DRV_CIPHER_ECB:
548 		break;
549 	case DRV_CIPHER_CBC:
550 	case DRV_CIPHER_CBC_CTS:
551 	case DRV_CIPHER_CTR:
552 	case DRV_CIPHER_OFB:
553 		/* Load IV */
554 		hw_desc_init(&desc[*seq_size]);
555 		set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
556 			     NS_BIT);
557 		set_cipher_config0(&desc[*seq_size], direction);
558 		set_flow_mode(&desc[*seq_size], flow_mode);
559 		set_cipher_mode(&desc[*seq_size], cipher_mode);
560 		if (cipher_mode == DRV_CIPHER_CTR ||
561 		    cipher_mode == DRV_CIPHER_OFB) {
562 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
563 		} else {
564 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
565 		}
566 		(*seq_size)++;
567 		break;
568 	case DRV_CIPHER_XTS:
569 	case DRV_CIPHER_ESSIV:
570 	case DRV_CIPHER_BITLOCKER:
571 		/* load XEX key */
572 		hw_desc_init(&desc[*seq_size]);
573 		set_cipher_mode(&desc[*seq_size], cipher_mode);
574 		set_cipher_config0(&desc[*seq_size], direction);
575 		if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
576 			set_hw_crypto_key(&desc[*seq_size],
577 					  ctx_p->hw.key2_slot);
578 		} else {
579 			set_din_type(&desc[*seq_size], DMA_DLLI,
580 				     (key_dma_addr + (key_len / 2)),
581 				     (key_len / 2), NS_BIT);
582 		}
583 		set_xex_data_unit_size(&desc[*seq_size], du_size);
584 		set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
585 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
586 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
587 		(*seq_size)++;
588 
589 		/* Load IV */
590 		hw_desc_init(&desc[*seq_size]);
591 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
592 		set_cipher_mode(&desc[*seq_size], cipher_mode);
593 		set_cipher_config0(&desc[*seq_size], direction);
594 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
595 		set_flow_mode(&desc[*seq_size], flow_mode);
596 		set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
597 			     CC_AES_BLOCK_SIZE, NS_BIT);
598 		(*seq_size)++;
599 		break;
600 	default:
601 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
602 	}
603 }
604 
605 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
606 {
607 	switch (ctx_p->flow_mode) {
608 	case S_DIN_to_AES:
609 		return DIN_AES_DOUT;
610 	case S_DIN_to_DES:
611 		return DIN_DES_DOUT;
612 	case S_DIN_to_SM4:
613 		return DIN_SM4_DOUT;
614 	default:
615 		return ctx_p->flow_mode;
616 	}
617 }
618 
619 static void cc_setup_key_desc(struct crypto_tfm *tfm,
620 			      struct cipher_req_ctx *req_ctx,
621 			      unsigned int nbytes, struct cc_hw_desc desc[],
622 			      unsigned int *seq_size)
623 {
624 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
625 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
626 	int cipher_mode = ctx_p->cipher_mode;
627 	int flow_mode = ctx_p->flow_mode;
628 	int direction = req_ctx->gen_ctx.op_type;
629 	dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
630 	unsigned int key_len = ctx_p->keylen;
631 	unsigned int din_size;
632 
633 	switch (cipher_mode) {
634 	case DRV_CIPHER_CBC:
635 	case DRV_CIPHER_CBC_CTS:
636 	case DRV_CIPHER_CTR:
637 	case DRV_CIPHER_OFB:
638 	case DRV_CIPHER_ECB:
639 		/* Load key */
640 		hw_desc_init(&desc[*seq_size]);
641 		set_cipher_mode(&desc[*seq_size], cipher_mode);
642 		set_cipher_config0(&desc[*seq_size], direction);
643 
644 		if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
645 			/* We use the AES key size coding for all CPP algs */
646 			set_key_size_aes(&desc[*seq_size], key_len);
647 			set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
648 			flow_mode = cc_out_flow_mode(ctx_p);
649 		} else {
650 			if (flow_mode == S_DIN_to_AES) {
651 				if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
652 					set_hw_crypto_key(&desc[*seq_size],
653 							  ctx_p->hw.key1_slot);
654 				} else {
655 					/* CC_POLICY_UNPROTECTED_KEY
656 					 * Invalid keys are filtered out in
657 					 * sethkey()
658 					 */
659 					din_size = (key_len == 24) ?
660 						AES_MAX_KEY_SIZE : key_len;
661 
662 					set_din_type(&desc[*seq_size], DMA_DLLI,
663 						     key_dma_addr, din_size,
664 						     NS_BIT);
665 				}
666 				set_key_size_aes(&desc[*seq_size], key_len);
667 			} else {
668 				/*des*/
669 				set_din_type(&desc[*seq_size], DMA_DLLI,
670 					     key_dma_addr, key_len, NS_BIT);
671 				set_key_size_des(&desc[*seq_size], key_len);
672 			}
673 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
674 		}
675 		set_flow_mode(&desc[*seq_size], flow_mode);
676 		(*seq_size)++;
677 		break;
678 	case DRV_CIPHER_XTS:
679 	case DRV_CIPHER_ESSIV:
680 	case DRV_CIPHER_BITLOCKER:
681 		/* Load AES key */
682 		hw_desc_init(&desc[*seq_size]);
683 		set_cipher_mode(&desc[*seq_size], cipher_mode);
684 		set_cipher_config0(&desc[*seq_size], direction);
685 		if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
686 			set_hw_crypto_key(&desc[*seq_size],
687 					  ctx_p->hw.key1_slot);
688 		} else {
689 			set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
690 				     (key_len / 2), NS_BIT);
691 		}
692 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
693 		set_flow_mode(&desc[*seq_size], flow_mode);
694 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
695 		(*seq_size)++;
696 		break;
697 	default:
698 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
699 	}
700 }
701 
702 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
703 			       struct cipher_req_ctx *req_ctx,
704 			       struct scatterlist *dst, struct scatterlist *src,
705 			       unsigned int nbytes, void *areq,
706 			       struct cc_hw_desc desc[], unsigned int *seq_size)
707 {
708 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
709 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
710 
711 	if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
712 		/* bypass */
713 		dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
714 			&req_ctx->mlli_params.mlli_dma_addr,
715 			req_ctx->mlli_params.mlli_len,
716 			(unsigned int)ctx_p->drvdata->mlli_sram_addr);
717 		hw_desc_init(&desc[*seq_size]);
718 		set_din_type(&desc[*seq_size], DMA_DLLI,
719 			     req_ctx->mlli_params.mlli_dma_addr,
720 			     req_ctx->mlli_params.mlli_len, NS_BIT);
721 		set_dout_sram(&desc[*seq_size],
722 			      ctx_p->drvdata->mlli_sram_addr,
723 			      req_ctx->mlli_params.mlli_len);
724 		set_flow_mode(&desc[*seq_size], BYPASS);
725 		(*seq_size)++;
726 	}
727 }
728 
729 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
730 			       struct cipher_req_ctx *req_ctx,
731 			       struct scatterlist *dst, struct scatterlist *src,
732 			       unsigned int nbytes, struct cc_hw_desc desc[],
733 			       unsigned int *seq_size)
734 {
735 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
736 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
737 	unsigned int flow_mode = cc_out_flow_mode(ctx_p);
738 	bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
739 			  ctx_p->cipher_mode == DRV_CIPHER_ECB);
740 
741 	/* Process */
742 	if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
743 		dev_dbg(dev, " data params addr %pad length 0x%X\n",
744 			&sg_dma_address(src), nbytes);
745 		dev_dbg(dev, " data params addr %pad length 0x%X\n",
746 			&sg_dma_address(dst), nbytes);
747 		hw_desc_init(&desc[*seq_size]);
748 		set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
749 			     nbytes, NS_BIT);
750 		set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
751 			      nbytes, NS_BIT, (!last_desc ? 0 : 1));
752 		if (last_desc)
753 			set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
754 
755 		set_flow_mode(&desc[*seq_size], flow_mode);
756 		(*seq_size)++;
757 	} else {
758 		hw_desc_init(&desc[*seq_size]);
759 		set_din_type(&desc[*seq_size], DMA_MLLI,
760 			     ctx_p->drvdata->mlli_sram_addr,
761 			     req_ctx->in_mlli_nents, NS_BIT);
762 		if (req_ctx->out_nents == 0) {
763 			dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
764 				(unsigned int)ctx_p->drvdata->mlli_sram_addr,
765 				(unsigned int)ctx_p->drvdata->mlli_sram_addr);
766 			set_dout_mlli(&desc[*seq_size],
767 				      ctx_p->drvdata->mlli_sram_addr,
768 				      req_ctx->in_mlli_nents, NS_BIT,
769 				      (!last_desc ? 0 : 1));
770 		} else {
771 			dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
772 				(unsigned int)ctx_p->drvdata->mlli_sram_addr,
773 				(unsigned int)ctx_p->drvdata->mlli_sram_addr +
774 				(u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
775 			set_dout_mlli(&desc[*seq_size],
776 				      (ctx_p->drvdata->mlli_sram_addr +
777 				       (LLI_ENTRY_BYTE_SIZE *
778 					req_ctx->in_mlli_nents)),
779 				      req_ctx->out_mlli_nents, NS_BIT,
780 				      (!last_desc ? 0 : 1));
781 		}
782 		if (last_desc)
783 			set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
784 
785 		set_flow_mode(&desc[*seq_size], flow_mode);
786 		(*seq_size)++;
787 	}
788 }
789 
790 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
791 {
792 	struct skcipher_request *req = (struct skcipher_request *)cc_req;
793 	struct scatterlist *dst = req->dst;
794 	struct scatterlist *src = req->src;
795 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
796 	struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
797 	unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
798 
799 	if (err != -EINPROGRESS) {
800 		/* Not a BACKLOG notification */
801 		cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
802 		memcpy(req->iv, req_ctx->iv, ivsize);
803 		kzfree(req_ctx->iv);
804 	}
805 
806 	skcipher_request_complete(req, err);
807 }
808 
809 static int cc_cipher_process(struct skcipher_request *req,
810 			     enum drv_crypto_direction direction)
811 {
812 	struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
813 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
814 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
815 	unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
816 	struct scatterlist *dst = req->dst;
817 	struct scatterlist *src = req->src;
818 	unsigned int nbytes = req->cryptlen;
819 	void *iv = req->iv;
820 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
821 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
822 	struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
823 	struct cc_crypto_req cc_req = {};
824 	int rc;
825 	unsigned int seq_len = 0;
826 	gfp_t flags = cc_gfp_flags(&req->base);
827 
828 	dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
829 		((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
830 		"Encrypt" : "Decrypt"), req, iv, nbytes);
831 
832 	/* STAT_PHASE_0: Init and sanity checks */
833 
834 	/* TODO: check data length according to mode */
835 	if (validate_data_size(ctx_p, nbytes)) {
836 		dev_err(dev, "Unsupported data size %d.\n", nbytes);
837 		rc = -EINVAL;
838 		goto exit_process;
839 	}
840 	if (nbytes == 0) {
841 		/* No data to process is valid */
842 		rc = 0;
843 		goto exit_process;
844 	}
845 
846 	/* The IV we are handed may be allocted from the stack so
847 	 * we must copy it to a DMAable buffer before use.
848 	 */
849 	req_ctx->iv = kmemdup(iv, ivsize, flags);
850 	if (!req_ctx->iv) {
851 		rc = -ENOMEM;
852 		goto exit_process;
853 	}
854 
855 	/* Setup request structure */
856 	cc_req.user_cb = (void *)cc_cipher_complete;
857 	cc_req.user_arg = (void *)req;
858 
859 	/* Setup CPP operation details */
860 	if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
861 		cc_req.cpp.is_cpp = true;
862 		cc_req.cpp.alg = ctx_p->cpp.alg;
863 		cc_req.cpp.slot = ctx_p->cpp.slot;
864 	}
865 
866 	/* Setup request context */
867 	req_ctx->gen_ctx.op_type = direction;
868 
869 	/* STAT_PHASE_1: Map buffers */
870 
871 	rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
872 				      req_ctx->iv, src, dst, flags);
873 	if (rc) {
874 		dev_err(dev, "map_request() failed\n");
875 		goto exit_process;
876 	}
877 
878 	/* STAT_PHASE_2: Create sequence */
879 
880 	/* Setup IV and XEX key used */
881 	cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
882 	/* Setup MLLI line, if needed */
883 	cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
884 	/* Setup key */
885 	cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
886 	/* Data processing */
887 	cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
888 	/* Read next IV */
889 	cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
890 
891 	/* STAT_PHASE_3: Lock HW and push sequence */
892 
893 	rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
894 			     &req->base);
895 	if (rc != -EINPROGRESS && rc != -EBUSY) {
896 		/* Failed to send the request or request completed
897 		 * synchronously
898 		 */
899 		cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
900 	}
901 
902 exit_process:
903 	if (rc != -EINPROGRESS && rc != -EBUSY) {
904 		kzfree(req_ctx->iv);
905 	}
906 
907 	return rc;
908 }
909 
910 static int cc_cipher_encrypt(struct skcipher_request *req)
911 {
912 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
913 
914 	memset(req_ctx, 0, sizeof(*req_ctx));
915 
916 	return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
917 }
918 
919 static int cc_cipher_decrypt(struct skcipher_request *req)
920 {
921 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
922 
923 	memset(req_ctx, 0, sizeof(*req_ctx));
924 
925 	return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
926 }
927 
928 /* Block cipher alg */
929 static const struct cc_alg_template skcipher_algs[] = {
930 	{
931 		.name = "xts(paes)",
932 		.driver_name = "xts-paes-ccree",
933 		.blocksize = 1,
934 		.template_skcipher = {
935 			.setkey = cc_cipher_sethkey,
936 			.encrypt = cc_cipher_encrypt,
937 			.decrypt = cc_cipher_decrypt,
938 			.min_keysize = CC_HW_KEY_SIZE,
939 			.max_keysize = CC_HW_KEY_SIZE,
940 			.ivsize = AES_BLOCK_SIZE,
941 			},
942 		.cipher_mode = DRV_CIPHER_XTS,
943 		.flow_mode = S_DIN_to_AES,
944 		.min_hw_rev = CC_HW_REV_630,
945 		.std_body = CC_STD_NIST,
946 		.sec_func = true,
947 	},
948 	{
949 		.name = "xts512(paes)",
950 		.driver_name = "xts-paes-du512-ccree",
951 		.blocksize = 1,
952 		.template_skcipher = {
953 			.setkey = cc_cipher_sethkey,
954 			.encrypt = cc_cipher_encrypt,
955 			.decrypt = cc_cipher_decrypt,
956 			.min_keysize = CC_HW_KEY_SIZE,
957 			.max_keysize = CC_HW_KEY_SIZE,
958 			.ivsize = AES_BLOCK_SIZE,
959 			},
960 		.cipher_mode = DRV_CIPHER_XTS,
961 		.flow_mode = S_DIN_to_AES,
962 		.data_unit = 512,
963 		.min_hw_rev = CC_HW_REV_712,
964 		.std_body = CC_STD_NIST,
965 		.sec_func = true,
966 	},
967 	{
968 		.name = "xts4096(paes)",
969 		.driver_name = "xts-paes-du4096-ccree",
970 		.blocksize = 1,
971 		.template_skcipher = {
972 			.setkey = cc_cipher_sethkey,
973 			.encrypt = cc_cipher_encrypt,
974 			.decrypt = cc_cipher_decrypt,
975 			.min_keysize = CC_HW_KEY_SIZE,
976 			.max_keysize = CC_HW_KEY_SIZE,
977 			.ivsize = AES_BLOCK_SIZE,
978 			},
979 		.cipher_mode = DRV_CIPHER_XTS,
980 		.flow_mode = S_DIN_to_AES,
981 		.data_unit = 4096,
982 		.min_hw_rev = CC_HW_REV_712,
983 		.std_body = CC_STD_NIST,
984 		.sec_func = true,
985 	},
986 	{
987 		.name = "essiv(paes)",
988 		.driver_name = "essiv-paes-ccree",
989 		.blocksize = AES_BLOCK_SIZE,
990 		.template_skcipher = {
991 			.setkey = cc_cipher_sethkey,
992 			.encrypt = cc_cipher_encrypt,
993 			.decrypt = cc_cipher_decrypt,
994 			.min_keysize = CC_HW_KEY_SIZE,
995 			.max_keysize = CC_HW_KEY_SIZE,
996 			.ivsize = AES_BLOCK_SIZE,
997 			},
998 		.cipher_mode = DRV_CIPHER_ESSIV,
999 		.flow_mode = S_DIN_to_AES,
1000 		.min_hw_rev = CC_HW_REV_712,
1001 		.std_body = CC_STD_NIST,
1002 		.sec_func = true,
1003 	},
1004 	{
1005 		.name = "essiv512(paes)",
1006 		.driver_name = "essiv-paes-du512-ccree",
1007 		.blocksize = AES_BLOCK_SIZE,
1008 		.template_skcipher = {
1009 			.setkey = cc_cipher_sethkey,
1010 			.encrypt = cc_cipher_encrypt,
1011 			.decrypt = cc_cipher_decrypt,
1012 			.min_keysize = CC_HW_KEY_SIZE,
1013 			.max_keysize = CC_HW_KEY_SIZE,
1014 			.ivsize = AES_BLOCK_SIZE,
1015 			},
1016 		.cipher_mode = DRV_CIPHER_ESSIV,
1017 		.flow_mode = S_DIN_to_AES,
1018 		.data_unit = 512,
1019 		.min_hw_rev = CC_HW_REV_712,
1020 		.std_body = CC_STD_NIST,
1021 		.sec_func = true,
1022 	},
1023 	{
1024 		.name = "essiv4096(paes)",
1025 		.driver_name = "essiv-paes-du4096-ccree",
1026 		.blocksize = AES_BLOCK_SIZE,
1027 		.template_skcipher = {
1028 			.setkey = cc_cipher_sethkey,
1029 			.encrypt = cc_cipher_encrypt,
1030 			.decrypt = cc_cipher_decrypt,
1031 			.min_keysize = CC_HW_KEY_SIZE,
1032 			.max_keysize = CC_HW_KEY_SIZE,
1033 			.ivsize = AES_BLOCK_SIZE,
1034 			},
1035 		.cipher_mode = DRV_CIPHER_ESSIV,
1036 		.flow_mode = S_DIN_to_AES,
1037 		.data_unit = 4096,
1038 		.min_hw_rev = CC_HW_REV_712,
1039 		.std_body = CC_STD_NIST,
1040 		.sec_func = true,
1041 	},
1042 	{
1043 		.name = "bitlocker(paes)",
1044 		.driver_name = "bitlocker-paes-ccree",
1045 		.blocksize = AES_BLOCK_SIZE,
1046 		.template_skcipher = {
1047 			.setkey = cc_cipher_sethkey,
1048 			.encrypt = cc_cipher_encrypt,
1049 			.decrypt = cc_cipher_decrypt,
1050 			.min_keysize = CC_HW_KEY_SIZE,
1051 			.max_keysize = CC_HW_KEY_SIZE,
1052 			.ivsize = AES_BLOCK_SIZE,
1053 			},
1054 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1055 		.flow_mode = S_DIN_to_AES,
1056 		.min_hw_rev = CC_HW_REV_712,
1057 		.std_body = CC_STD_NIST,
1058 		.sec_func = true,
1059 	},
1060 	{
1061 		.name = "bitlocker512(paes)",
1062 		.driver_name = "bitlocker-paes-du512-ccree",
1063 		.blocksize = AES_BLOCK_SIZE,
1064 		.template_skcipher = {
1065 			.setkey = cc_cipher_sethkey,
1066 			.encrypt = cc_cipher_encrypt,
1067 			.decrypt = cc_cipher_decrypt,
1068 			.min_keysize = CC_HW_KEY_SIZE,
1069 			.max_keysize = CC_HW_KEY_SIZE,
1070 			.ivsize = AES_BLOCK_SIZE,
1071 			},
1072 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1073 		.flow_mode = S_DIN_to_AES,
1074 		.data_unit = 512,
1075 		.min_hw_rev = CC_HW_REV_712,
1076 		.std_body = CC_STD_NIST,
1077 		.sec_func = true,
1078 	},
1079 	{
1080 		.name = "bitlocker4096(paes)",
1081 		.driver_name = "bitlocker-paes-du4096-ccree",
1082 		.blocksize = AES_BLOCK_SIZE,
1083 		.template_skcipher = {
1084 			.setkey = cc_cipher_sethkey,
1085 			.encrypt = cc_cipher_encrypt,
1086 			.decrypt = cc_cipher_decrypt,
1087 			.min_keysize = CC_HW_KEY_SIZE,
1088 			.max_keysize =  CC_HW_KEY_SIZE,
1089 			.ivsize = AES_BLOCK_SIZE,
1090 			},
1091 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1092 		.flow_mode = S_DIN_to_AES,
1093 		.data_unit = 4096,
1094 		.min_hw_rev = CC_HW_REV_712,
1095 		.std_body = CC_STD_NIST,
1096 		.sec_func = true,
1097 	},
1098 	{
1099 		.name = "ecb(paes)",
1100 		.driver_name = "ecb-paes-ccree",
1101 		.blocksize = AES_BLOCK_SIZE,
1102 		.template_skcipher = {
1103 			.setkey = cc_cipher_sethkey,
1104 			.encrypt = cc_cipher_encrypt,
1105 			.decrypt = cc_cipher_decrypt,
1106 			.min_keysize = CC_HW_KEY_SIZE,
1107 			.max_keysize = CC_HW_KEY_SIZE,
1108 			.ivsize = 0,
1109 			},
1110 		.cipher_mode = DRV_CIPHER_ECB,
1111 		.flow_mode = S_DIN_to_AES,
1112 		.min_hw_rev = CC_HW_REV_712,
1113 		.std_body = CC_STD_NIST,
1114 		.sec_func = true,
1115 	},
1116 	{
1117 		.name = "cbc(paes)",
1118 		.driver_name = "cbc-paes-ccree",
1119 		.blocksize = AES_BLOCK_SIZE,
1120 		.template_skcipher = {
1121 			.setkey = cc_cipher_sethkey,
1122 			.encrypt = cc_cipher_encrypt,
1123 			.decrypt = cc_cipher_decrypt,
1124 			.min_keysize = CC_HW_KEY_SIZE,
1125 			.max_keysize = CC_HW_KEY_SIZE,
1126 			.ivsize = AES_BLOCK_SIZE,
1127 		},
1128 		.cipher_mode = DRV_CIPHER_CBC,
1129 		.flow_mode = S_DIN_to_AES,
1130 		.min_hw_rev = CC_HW_REV_712,
1131 		.std_body = CC_STD_NIST,
1132 		.sec_func = true,
1133 	},
1134 	{
1135 		.name = "ofb(paes)",
1136 		.driver_name = "ofb-paes-ccree",
1137 		.blocksize = AES_BLOCK_SIZE,
1138 		.template_skcipher = {
1139 			.setkey = cc_cipher_sethkey,
1140 			.encrypt = cc_cipher_encrypt,
1141 			.decrypt = cc_cipher_decrypt,
1142 			.min_keysize = CC_HW_KEY_SIZE,
1143 			.max_keysize = CC_HW_KEY_SIZE,
1144 			.ivsize = AES_BLOCK_SIZE,
1145 			},
1146 		.cipher_mode = DRV_CIPHER_OFB,
1147 		.flow_mode = S_DIN_to_AES,
1148 		.min_hw_rev = CC_HW_REV_712,
1149 		.std_body = CC_STD_NIST,
1150 		.sec_func = true,
1151 	},
1152 	{
1153 		.name = "cts(cbc(paes))",
1154 		.driver_name = "cts-cbc-paes-ccree",
1155 		.blocksize = AES_BLOCK_SIZE,
1156 		.template_skcipher = {
1157 			.setkey = cc_cipher_sethkey,
1158 			.encrypt = cc_cipher_encrypt,
1159 			.decrypt = cc_cipher_decrypt,
1160 			.min_keysize = CC_HW_KEY_SIZE,
1161 			.max_keysize = CC_HW_KEY_SIZE,
1162 			.ivsize = AES_BLOCK_SIZE,
1163 			},
1164 		.cipher_mode = DRV_CIPHER_CBC_CTS,
1165 		.flow_mode = S_DIN_to_AES,
1166 		.min_hw_rev = CC_HW_REV_712,
1167 		.std_body = CC_STD_NIST,
1168 		.sec_func = true,
1169 	},
1170 	{
1171 		.name = "ctr(paes)",
1172 		.driver_name = "ctr-paes-ccree",
1173 		.blocksize = 1,
1174 		.template_skcipher = {
1175 			.setkey = cc_cipher_sethkey,
1176 			.encrypt = cc_cipher_encrypt,
1177 			.decrypt = cc_cipher_decrypt,
1178 			.min_keysize = CC_HW_KEY_SIZE,
1179 			.max_keysize = CC_HW_KEY_SIZE,
1180 			.ivsize = AES_BLOCK_SIZE,
1181 			},
1182 		.cipher_mode = DRV_CIPHER_CTR,
1183 		.flow_mode = S_DIN_to_AES,
1184 		.min_hw_rev = CC_HW_REV_712,
1185 		.std_body = CC_STD_NIST,
1186 		.sec_func = true,
1187 	},
1188 	{
1189 		.name = "xts(aes)",
1190 		.driver_name = "xts-aes-ccree",
1191 		.blocksize = 1,
1192 		.template_skcipher = {
1193 			.setkey = cc_cipher_setkey,
1194 			.encrypt = cc_cipher_encrypt,
1195 			.decrypt = cc_cipher_decrypt,
1196 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1197 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1198 			.ivsize = AES_BLOCK_SIZE,
1199 			},
1200 		.cipher_mode = DRV_CIPHER_XTS,
1201 		.flow_mode = S_DIN_to_AES,
1202 		.min_hw_rev = CC_HW_REV_630,
1203 		.std_body = CC_STD_NIST,
1204 	},
1205 	{
1206 		.name = "xts512(aes)",
1207 		.driver_name = "xts-aes-du512-ccree",
1208 		.blocksize = 1,
1209 		.template_skcipher = {
1210 			.setkey = cc_cipher_setkey,
1211 			.encrypt = cc_cipher_encrypt,
1212 			.decrypt = cc_cipher_decrypt,
1213 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1214 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1215 			.ivsize = AES_BLOCK_SIZE,
1216 			},
1217 		.cipher_mode = DRV_CIPHER_XTS,
1218 		.flow_mode = S_DIN_to_AES,
1219 		.data_unit = 512,
1220 		.min_hw_rev = CC_HW_REV_712,
1221 		.std_body = CC_STD_NIST,
1222 	},
1223 	{
1224 		.name = "xts4096(aes)",
1225 		.driver_name = "xts-aes-du4096-ccree",
1226 		.blocksize = 1,
1227 		.template_skcipher = {
1228 			.setkey = cc_cipher_setkey,
1229 			.encrypt = cc_cipher_encrypt,
1230 			.decrypt = cc_cipher_decrypt,
1231 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1232 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1233 			.ivsize = AES_BLOCK_SIZE,
1234 			},
1235 		.cipher_mode = DRV_CIPHER_XTS,
1236 		.flow_mode = S_DIN_to_AES,
1237 		.data_unit = 4096,
1238 		.min_hw_rev = CC_HW_REV_712,
1239 		.std_body = CC_STD_NIST,
1240 	},
1241 	{
1242 		.name = "essiv(aes)",
1243 		.driver_name = "essiv-aes-ccree",
1244 		.blocksize = AES_BLOCK_SIZE,
1245 		.template_skcipher = {
1246 			.setkey = cc_cipher_setkey,
1247 			.encrypt = cc_cipher_encrypt,
1248 			.decrypt = cc_cipher_decrypt,
1249 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1250 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1251 			.ivsize = AES_BLOCK_SIZE,
1252 			},
1253 		.cipher_mode = DRV_CIPHER_ESSIV,
1254 		.flow_mode = S_DIN_to_AES,
1255 		.min_hw_rev = CC_HW_REV_712,
1256 		.std_body = CC_STD_NIST,
1257 	},
1258 	{
1259 		.name = "essiv512(aes)",
1260 		.driver_name = "essiv-aes-du512-ccree",
1261 		.blocksize = AES_BLOCK_SIZE,
1262 		.template_skcipher = {
1263 			.setkey = cc_cipher_setkey,
1264 			.encrypt = cc_cipher_encrypt,
1265 			.decrypt = cc_cipher_decrypt,
1266 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1267 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1268 			.ivsize = AES_BLOCK_SIZE,
1269 			},
1270 		.cipher_mode = DRV_CIPHER_ESSIV,
1271 		.flow_mode = S_DIN_to_AES,
1272 		.data_unit = 512,
1273 		.min_hw_rev = CC_HW_REV_712,
1274 		.std_body = CC_STD_NIST,
1275 	},
1276 	{
1277 		.name = "essiv4096(aes)",
1278 		.driver_name = "essiv-aes-du4096-ccree",
1279 		.blocksize = AES_BLOCK_SIZE,
1280 		.template_skcipher = {
1281 			.setkey = cc_cipher_setkey,
1282 			.encrypt = cc_cipher_encrypt,
1283 			.decrypt = cc_cipher_decrypt,
1284 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1285 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1286 			.ivsize = AES_BLOCK_SIZE,
1287 			},
1288 		.cipher_mode = DRV_CIPHER_ESSIV,
1289 		.flow_mode = S_DIN_to_AES,
1290 		.data_unit = 4096,
1291 		.min_hw_rev = CC_HW_REV_712,
1292 		.std_body = CC_STD_NIST,
1293 	},
1294 	{
1295 		.name = "bitlocker(aes)",
1296 		.driver_name = "bitlocker-aes-ccree",
1297 		.blocksize = AES_BLOCK_SIZE,
1298 		.template_skcipher = {
1299 			.setkey = cc_cipher_setkey,
1300 			.encrypt = cc_cipher_encrypt,
1301 			.decrypt = cc_cipher_decrypt,
1302 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1303 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1304 			.ivsize = AES_BLOCK_SIZE,
1305 			},
1306 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1307 		.flow_mode = S_DIN_to_AES,
1308 		.min_hw_rev = CC_HW_REV_712,
1309 		.std_body = CC_STD_NIST,
1310 	},
1311 	{
1312 		.name = "bitlocker512(aes)",
1313 		.driver_name = "bitlocker-aes-du512-ccree",
1314 		.blocksize = AES_BLOCK_SIZE,
1315 		.template_skcipher = {
1316 			.setkey = cc_cipher_setkey,
1317 			.encrypt = cc_cipher_encrypt,
1318 			.decrypt = cc_cipher_decrypt,
1319 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1320 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1321 			.ivsize = AES_BLOCK_SIZE,
1322 			},
1323 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1324 		.flow_mode = S_DIN_to_AES,
1325 		.data_unit = 512,
1326 		.min_hw_rev = CC_HW_REV_712,
1327 		.std_body = CC_STD_NIST,
1328 	},
1329 	{
1330 		.name = "bitlocker4096(aes)",
1331 		.driver_name = "bitlocker-aes-du4096-ccree",
1332 		.blocksize = AES_BLOCK_SIZE,
1333 		.template_skcipher = {
1334 			.setkey = cc_cipher_setkey,
1335 			.encrypt = cc_cipher_encrypt,
1336 			.decrypt = cc_cipher_decrypt,
1337 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1338 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1339 			.ivsize = AES_BLOCK_SIZE,
1340 			},
1341 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1342 		.flow_mode = S_DIN_to_AES,
1343 		.data_unit = 4096,
1344 		.min_hw_rev = CC_HW_REV_712,
1345 		.std_body = CC_STD_NIST,
1346 	},
1347 	{
1348 		.name = "ecb(aes)",
1349 		.driver_name = "ecb-aes-ccree",
1350 		.blocksize = AES_BLOCK_SIZE,
1351 		.template_skcipher = {
1352 			.setkey = cc_cipher_setkey,
1353 			.encrypt = cc_cipher_encrypt,
1354 			.decrypt = cc_cipher_decrypt,
1355 			.min_keysize = AES_MIN_KEY_SIZE,
1356 			.max_keysize = AES_MAX_KEY_SIZE,
1357 			.ivsize = 0,
1358 			},
1359 		.cipher_mode = DRV_CIPHER_ECB,
1360 		.flow_mode = S_DIN_to_AES,
1361 		.min_hw_rev = CC_HW_REV_630,
1362 		.std_body = CC_STD_NIST,
1363 	},
1364 	{
1365 		.name = "cbc(aes)",
1366 		.driver_name = "cbc-aes-ccree",
1367 		.blocksize = AES_BLOCK_SIZE,
1368 		.template_skcipher = {
1369 			.setkey = cc_cipher_setkey,
1370 			.encrypt = cc_cipher_encrypt,
1371 			.decrypt = cc_cipher_decrypt,
1372 			.min_keysize = AES_MIN_KEY_SIZE,
1373 			.max_keysize = AES_MAX_KEY_SIZE,
1374 			.ivsize = AES_BLOCK_SIZE,
1375 		},
1376 		.cipher_mode = DRV_CIPHER_CBC,
1377 		.flow_mode = S_DIN_to_AES,
1378 		.min_hw_rev = CC_HW_REV_630,
1379 		.std_body = CC_STD_NIST,
1380 	},
1381 	{
1382 		.name = "ofb(aes)",
1383 		.driver_name = "ofb-aes-ccree",
1384 		.blocksize = AES_BLOCK_SIZE,
1385 		.template_skcipher = {
1386 			.setkey = cc_cipher_setkey,
1387 			.encrypt = cc_cipher_encrypt,
1388 			.decrypt = cc_cipher_decrypt,
1389 			.min_keysize = AES_MIN_KEY_SIZE,
1390 			.max_keysize = AES_MAX_KEY_SIZE,
1391 			.ivsize = AES_BLOCK_SIZE,
1392 			},
1393 		.cipher_mode = DRV_CIPHER_OFB,
1394 		.flow_mode = S_DIN_to_AES,
1395 		.min_hw_rev = CC_HW_REV_630,
1396 		.std_body = CC_STD_NIST,
1397 	},
1398 	{
1399 		.name = "cts(cbc(aes))",
1400 		.driver_name = "cts-cbc-aes-ccree",
1401 		.blocksize = AES_BLOCK_SIZE,
1402 		.template_skcipher = {
1403 			.setkey = cc_cipher_setkey,
1404 			.encrypt = cc_cipher_encrypt,
1405 			.decrypt = cc_cipher_decrypt,
1406 			.min_keysize = AES_MIN_KEY_SIZE,
1407 			.max_keysize = AES_MAX_KEY_SIZE,
1408 			.ivsize = AES_BLOCK_SIZE,
1409 			},
1410 		.cipher_mode = DRV_CIPHER_CBC_CTS,
1411 		.flow_mode = S_DIN_to_AES,
1412 		.min_hw_rev = CC_HW_REV_630,
1413 		.std_body = CC_STD_NIST,
1414 	},
1415 	{
1416 		.name = "ctr(aes)",
1417 		.driver_name = "ctr-aes-ccree",
1418 		.blocksize = 1,
1419 		.template_skcipher = {
1420 			.setkey = cc_cipher_setkey,
1421 			.encrypt = cc_cipher_encrypt,
1422 			.decrypt = cc_cipher_decrypt,
1423 			.min_keysize = AES_MIN_KEY_SIZE,
1424 			.max_keysize = AES_MAX_KEY_SIZE,
1425 			.ivsize = AES_BLOCK_SIZE,
1426 			},
1427 		.cipher_mode = DRV_CIPHER_CTR,
1428 		.flow_mode = S_DIN_to_AES,
1429 		.min_hw_rev = CC_HW_REV_630,
1430 		.std_body = CC_STD_NIST,
1431 	},
1432 	{
1433 		.name = "cbc(des3_ede)",
1434 		.driver_name = "cbc-3des-ccree",
1435 		.blocksize = DES3_EDE_BLOCK_SIZE,
1436 		.template_skcipher = {
1437 			.setkey = cc_cipher_setkey,
1438 			.encrypt = cc_cipher_encrypt,
1439 			.decrypt = cc_cipher_decrypt,
1440 			.min_keysize = DES3_EDE_KEY_SIZE,
1441 			.max_keysize = DES3_EDE_KEY_SIZE,
1442 			.ivsize = DES3_EDE_BLOCK_SIZE,
1443 			},
1444 		.cipher_mode = DRV_CIPHER_CBC,
1445 		.flow_mode = S_DIN_to_DES,
1446 		.min_hw_rev = CC_HW_REV_630,
1447 		.std_body = CC_STD_NIST,
1448 	},
1449 	{
1450 		.name = "ecb(des3_ede)",
1451 		.driver_name = "ecb-3des-ccree",
1452 		.blocksize = DES3_EDE_BLOCK_SIZE,
1453 		.template_skcipher = {
1454 			.setkey = cc_cipher_setkey,
1455 			.encrypt = cc_cipher_encrypt,
1456 			.decrypt = cc_cipher_decrypt,
1457 			.min_keysize = DES3_EDE_KEY_SIZE,
1458 			.max_keysize = DES3_EDE_KEY_SIZE,
1459 			.ivsize = 0,
1460 			},
1461 		.cipher_mode = DRV_CIPHER_ECB,
1462 		.flow_mode = S_DIN_to_DES,
1463 		.min_hw_rev = CC_HW_REV_630,
1464 		.std_body = CC_STD_NIST,
1465 	},
1466 	{
1467 		.name = "cbc(des)",
1468 		.driver_name = "cbc-des-ccree",
1469 		.blocksize = DES_BLOCK_SIZE,
1470 		.template_skcipher = {
1471 			.setkey = cc_cipher_setkey,
1472 			.encrypt = cc_cipher_encrypt,
1473 			.decrypt = cc_cipher_decrypt,
1474 			.min_keysize = DES_KEY_SIZE,
1475 			.max_keysize = DES_KEY_SIZE,
1476 			.ivsize = DES_BLOCK_SIZE,
1477 			},
1478 		.cipher_mode = DRV_CIPHER_CBC,
1479 		.flow_mode = S_DIN_to_DES,
1480 		.min_hw_rev = CC_HW_REV_630,
1481 		.std_body = CC_STD_NIST,
1482 	},
1483 	{
1484 		.name = "ecb(des)",
1485 		.driver_name = "ecb-des-ccree",
1486 		.blocksize = DES_BLOCK_SIZE,
1487 		.template_skcipher = {
1488 			.setkey = cc_cipher_setkey,
1489 			.encrypt = cc_cipher_encrypt,
1490 			.decrypt = cc_cipher_decrypt,
1491 			.min_keysize = DES_KEY_SIZE,
1492 			.max_keysize = DES_KEY_SIZE,
1493 			.ivsize = 0,
1494 			},
1495 		.cipher_mode = DRV_CIPHER_ECB,
1496 		.flow_mode = S_DIN_to_DES,
1497 		.min_hw_rev = CC_HW_REV_630,
1498 		.std_body = CC_STD_NIST,
1499 	},
1500 	{
1501 		.name = "cbc(sm4)",
1502 		.driver_name = "cbc-sm4-ccree",
1503 		.blocksize = SM4_BLOCK_SIZE,
1504 		.template_skcipher = {
1505 			.setkey = cc_cipher_setkey,
1506 			.encrypt = cc_cipher_encrypt,
1507 			.decrypt = cc_cipher_decrypt,
1508 			.min_keysize = SM4_KEY_SIZE,
1509 			.max_keysize = SM4_KEY_SIZE,
1510 			.ivsize = SM4_BLOCK_SIZE,
1511 			},
1512 		.cipher_mode = DRV_CIPHER_CBC,
1513 		.flow_mode = S_DIN_to_SM4,
1514 		.min_hw_rev = CC_HW_REV_713,
1515 		.std_body = CC_STD_OSCCA,
1516 	},
1517 	{
1518 		.name = "ecb(sm4)",
1519 		.driver_name = "ecb-sm4-ccree",
1520 		.blocksize = SM4_BLOCK_SIZE,
1521 		.template_skcipher = {
1522 			.setkey = cc_cipher_setkey,
1523 			.encrypt = cc_cipher_encrypt,
1524 			.decrypt = cc_cipher_decrypt,
1525 			.min_keysize = SM4_KEY_SIZE,
1526 			.max_keysize = SM4_KEY_SIZE,
1527 			.ivsize = 0,
1528 			},
1529 		.cipher_mode = DRV_CIPHER_ECB,
1530 		.flow_mode = S_DIN_to_SM4,
1531 		.min_hw_rev = CC_HW_REV_713,
1532 		.std_body = CC_STD_OSCCA,
1533 	},
1534 	{
1535 		.name = "ctr(sm4)",
1536 		.driver_name = "ctr-sm4-ccree",
1537 		.blocksize = SM4_BLOCK_SIZE,
1538 		.template_skcipher = {
1539 			.setkey = cc_cipher_setkey,
1540 			.encrypt = cc_cipher_encrypt,
1541 			.decrypt = cc_cipher_decrypt,
1542 			.min_keysize = SM4_KEY_SIZE,
1543 			.max_keysize = SM4_KEY_SIZE,
1544 			.ivsize = SM4_BLOCK_SIZE,
1545 			},
1546 		.cipher_mode = DRV_CIPHER_CTR,
1547 		.flow_mode = S_DIN_to_SM4,
1548 		.min_hw_rev = CC_HW_REV_713,
1549 		.std_body = CC_STD_OSCCA,
1550 	},
1551 	{
1552 		.name = "cbc(psm4)",
1553 		.driver_name = "cbc-psm4-ccree",
1554 		.blocksize = SM4_BLOCK_SIZE,
1555 		.template_skcipher = {
1556 			.setkey = cc_cipher_sethkey,
1557 			.encrypt = cc_cipher_encrypt,
1558 			.decrypt = cc_cipher_decrypt,
1559 			.min_keysize = CC_HW_KEY_SIZE,
1560 			.max_keysize = CC_HW_KEY_SIZE,
1561 			.ivsize = SM4_BLOCK_SIZE,
1562 			},
1563 		.cipher_mode = DRV_CIPHER_CBC,
1564 		.flow_mode = S_DIN_to_SM4,
1565 		.min_hw_rev = CC_HW_REV_713,
1566 		.std_body = CC_STD_OSCCA,
1567 		.sec_func = true,
1568 	},
1569 	{
1570 		.name = "ctr(psm4)",
1571 		.driver_name = "ctr-psm4-ccree",
1572 		.blocksize = SM4_BLOCK_SIZE,
1573 		.template_skcipher = {
1574 			.setkey = cc_cipher_sethkey,
1575 			.encrypt = cc_cipher_encrypt,
1576 			.decrypt = cc_cipher_decrypt,
1577 			.min_keysize = CC_HW_KEY_SIZE,
1578 			.max_keysize = CC_HW_KEY_SIZE,
1579 			.ivsize = SM4_BLOCK_SIZE,
1580 			},
1581 		.cipher_mode = DRV_CIPHER_CTR,
1582 		.flow_mode = S_DIN_to_SM4,
1583 		.min_hw_rev = CC_HW_REV_713,
1584 		.std_body = CC_STD_OSCCA,
1585 		.sec_func = true,
1586 	},
1587 };
1588 
1589 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1590 					   struct device *dev)
1591 {
1592 	struct cc_crypto_alg *t_alg;
1593 	struct skcipher_alg *alg;
1594 
1595 	t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1596 	if (!t_alg)
1597 		return ERR_PTR(-ENOMEM);
1598 
1599 	alg = &t_alg->skcipher_alg;
1600 
1601 	memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1602 
1603 	snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1604 	snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1605 		 tmpl->driver_name);
1606 	alg->base.cra_module = THIS_MODULE;
1607 	alg->base.cra_priority = CC_CRA_PRIO;
1608 	alg->base.cra_blocksize = tmpl->blocksize;
1609 	alg->base.cra_alignmask = 0;
1610 	alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1611 
1612 	alg->base.cra_init = cc_cipher_init;
1613 	alg->base.cra_exit = cc_cipher_exit;
1614 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1615 
1616 	t_alg->cipher_mode = tmpl->cipher_mode;
1617 	t_alg->flow_mode = tmpl->flow_mode;
1618 	t_alg->data_unit = tmpl->data_unit;
1619 
1620 	return t_alg;
1621 }
1622 
1623 int cc_cipher_free(struct cc_drvdata *drvdata)
1624 {
1625 	struct cc_crypto_alg *t_alg, *n;
1626 	struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle;
1627 
1628 	if (cipher_handle) {
1629 		/* Remove registered algs */
1630 		list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list,
1631 					 entry) {
1632 			crypto_unregister_skcipher(&t_alg->skcipher_alg);
1633 			list_del(&t_alg->entry);
1634 			kfree(t_alg);
1635 		}
1636 		kfree(cipher_handle);
1637 		drvdata->cipher_handle = NULL;
1638 	}
1639 	return 0;
1640 }
1641 
1642 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1643 {
1644 	struct cc_cipher_handle *cipher_handle;
1645 	struct cc_crypto_alg *t_alg;
1646 	struct device *dev = drvdata_to_dev(drvdata);
1647 	int rc = -ENOMEM;
1648 	int alg;
1649 
1650 	cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL);
1651 	if (!cipher_handle)
1652 		return -ENOMEM;
1653 
1654 	INIT_LIST_HEAD(&cipher_handle->alg_list);
1655 	drvdata->cipher_handle = cipher_handle;
1656 
1657 	/* Linux crypto */
1658 	dev_dbg(dev, "Number of algorithms = %zu\n",
1659 		ARRAY_SIZE(skcipher_algs));
1660 	for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1661 		if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1662 		    !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1663 		    (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1664 			continue;
1665 
1666 		dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1667 		t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1668 		if (IS_ERR(t_alg)) {
1669 			rc = PTR_ERR(t_alg);
1670 			dev_err(dev, "%s alg allocation failed\n",
1671 				skcipher_algs[alg].driver_name);
1672 			goto fail0;
1673 		}
1674 		t_alg->drvdata = drvdata;
1675 
1676 		dev_dbg(dev, "registering %s\n",
1677 			skcipher_algs[alg].driver_name);
1678 		rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1679 		dev_dbg(dev, "%s alg registration rc = %x\n",
1680 			t_alg->skcipher_alg.base.cra_driver_name, rc);
1681 		if (rc) {
1682 			dev_err(dev, "%s alg registration failed\n",
1683 				t_alg->skcipher_alg.base.cra_driver_name);
1684 			kfree(t_alg);
1685 			goto fail0;
1686 		} else {
1687 			list_add_tail(&t_alg->entry,
1688 				      &cipher_handle->alg_list);
1689 			dev_dbg(dev, "Registered %s\n",
1690 				t_alg->skcipher_alg.base.cra_driver_name);
1691 		}
1692 	}
1693 	return 0;
1694 
1695 fail0:
1696 	cc_cipher_free(drvdata);
1697 	return rc;
1698 }
1699