xref: /openbmc/linux/drivers/crypto/ccree/cc_cipher.c (revision 165f2d28)
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
3 
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/internal/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
12 
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
18 
19 #define MAX_SKCIPHER_SEQ_LEN 6
20 
21 #define template_skcipher	template_u.skcipher
22 
23 struct cc_user_key_info {
24 	u8 *key;
25 	dma_addr_t key_dma_addr;
26 };
27 
28 struct cc_hw_key_info {
29 	enum cc_hw_crypto_key key1_slot;
30 	enum cc_hw_crypto_key key2_slot;
31 };
32 
33 struct cc_cpp_key_info {
34 	u8 slot;
35 	enum cc_cpp_alg alg;
36 };
37 
38 enum cc_key_type {
39 	CC_UNPROTECTED_KEY,		/* User key */
40 	CC_HW_PROTECTED_KEY,		/* HW (FDE) key */
41 	CC_POLICY_PROTECTED_KEY,	/* CPP key */
42 	CC_INVALID_PROTECTED_KEY	/* Invalid key */
43 };
44 
45 struct cc_cipher_ctx {
46 	struct cc_drvdata *drvdata;
47 	int keylen;
48 	int key_round_number;
49 	int cipher_mode;
50 	int flow_mode;
51 	unsigned int flags;
52 	enum cc_key_type key_type;
53 	struct cc_user_key_info user;
54 	union {
55 		struct cc_hw_key_info hw;
56 		struct cc_cpp_key_info cpp;
57 	};
58 	struct crypto_shash *shash_tfm;
59 };
60 
61 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
62 
63 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
64 {
65 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
66 
67 	return ctx_p->key_type;
68 }
69 
70 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
71 {
72 	switch (ctx_p->flow_mode) {
73 	case S_DIN_to_AES:
74 		switch (size) {
75 		case CC_AES_128_BIT_KEY_SIZE:
76 		case CC_AES_192_BIT_KEY_SIZE:
77 			if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
78 			    ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
79 			    ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
80 				return 0;
81 			break;
82 		case CC_AES_256_BIT_KEY_SIZE:
83 			return 0;
84 		case (CC_AES_192_BIT_KEY_SIZE * 2):
85 		case (CC_AES_256_BIT_KEY_SIZE * 2):
86 			if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
87 			    ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
88 			    ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
89 				return 0;
90 			break;
91 		default:
92 			break;
93 		}
94 		break;
95 	case S_DIN_to_DES:
96 		if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
97 			return 0;
98 		break;
99 	case S_DIN_to_SM4:
100 		if (size == SM4_KEY_SIZE)
101 			return 0;
102 	default:
103 		break;
104 	}
105 	return -EINVAL;
106 }
107 
108 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
109 			      unsigned int size)
110 {
111 	switch (ctx_p->flow_mode) {
112 	case S_DIN_to_AES:
113 		switch (ctx_p->cipher_mode) {
114 		case DRV_CIPHER_XTS:
115 		case DRV_CIPHER_CBC_CTS:
116 			if (size >= AES_BLOCK_SIZE)
117 				return 0;
118 			break;
119 		case DRV_CIPHER_OFB:
120 		case DRV_CIPHER_CTR:
121 				return 0;
122 		case DRV_CIPHER_ECB:
123 		case DRV_CIPHER_CBC:
124 		case DRV_CIPHER_ESSIV:
125 		case DRV_CIPHER_BITLOCKER:
126 			if (IS_ALIGNED(size, AES_BLOCK_SIZE))
127 				return 0;
128 			break;
129 		default:
130 			break;
131 		}
132 		break;
133 	case S_DIN_to_DES:
134 		if (IS_ALIGNED(size, DES_BLOCK_SIZE))
135 			return 0;
136 		break;
137 	case S_DIN_to_SM4:
138 		switch (ctx_p->cipher_mode) {
139 		case DRV_CIPHER_CTR:
140 			return 0;
141 		case DRV_CIPHER_ECB:
142 		case DRV_CIPHER_CBC:
143 			if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
144 				return 0;
145 		default:
146 			break;
147 		}
148 	default:
149 		break;
150 	}
151 	return -EINVAL;
152 }
153 
154 static int cc_cipher_init(struct crypto_tfm *tfm)
155 {
156 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
157 	struct cc_crypto_alg *cc_alg =
158 			container_of(tfm->__crt_alg, struct cc_crypto_alg,
159 				     skcipher_alg.base);
160 	struct device *dev = drvdata_to_dev(cc_alg->drvdata);
161 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
162 	int rc = 0;
163 
164 	dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
165 		crypto_tfm_alg_name(tfm));
166 
167 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
168 				    sizeof(struct cipher_req_ctx));
169 
170 	ctx_p->cipher_mode = cc_alg->cipher_mode;
171 	ctx_p->flow_mode = cc_alg->flow_mode;
172 	ctx_p->drvdata = cc_alg->drvdata;
173 
174 	/* Allocate key buffer, cache line aligned */
175 	ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
176 	if (!ctx_p->user.key)
177 		return -ENOMEM;
178 
179 	dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
180 		ctx_p->user.key);
181 
182 	/* Map key buffer */
183 	ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key,
184 						  max_key_buf_size,
185 						  DMA_TO_DEVICE);
186 	if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
187 		dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
188 			max_key_buf_size, ctx_p->user.key);
189 		return -ENOMEM;
190 	}
191 	dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
192 		max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
193 
194 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
195 		/* Alloc hash tfm for essiv */
196 		ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
197 		if (IS_ERR(ctx_p->shash_tfm)) {
198 			dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
199 			return PTR_ERR(ctx_p->shash_tfm);
200 		}
201 	}
202 
203 	return rc;
204 }
205 
206 static void cc_cipher_exit(struct crypto_tfm *tfm)
207 {
208 	struct crypto_alg *alg = tfm->__crt_alg;
209 	struct cc_crypto_alg *cc_alg =
210 			container_of(alg, struct cc_crypto_alg,
211 				     skcipher_alg.base);
212 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
213 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
214 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
215 
216 	dev_dbg(dev, "Clearing context @%p for %s\n",
217 		crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
218 
219 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
220 		/* Free hash tfm for essiv */
221 		crypto_free_shash(ctx_p->shash_tfm);
222 		ctx_p->shash_tfm = NULL;
223 	}
224 
225 	/* Unmap key buffer */
226 	dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
227 			 DMA_TO_DEVICE);
228 	dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
229 		&ctx_p->user.key_dma_addr);
230 
231 	/* Free key buffer in context */
232 	kzfree(ctx_p->user.key);
233 	dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
234 }
235 
236 struct tdes_keys {
237 	u8	key1[DES_KEY_SIZE];
238 	u8	key2[DES_KEY_SIZE];
239 	u8	key3[DES_KEY_SIZE];
240 };
241 
242 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
243 {
244 	switch (slot_num) {
245 	case 0:
246 		return KFDE0_KEY;
247 	case 1:
248 		return KFDE1_KEY;
249 	case 2:
250 		return KFDE2_KEY;
251 	case 3:
252 		return KFDE3_KEY;
253 	}
254 	return END_OF_KEYS;
255 }
256 
257 static u8 cc_slot_to_cpp_key(u8 slot_num)
258 {
259 	return (slot_num - CC_FIRST_CPP_KEY_SLOT);
260 }
261 
262 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
263 {
264 	if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
265 		return CC_HW_PROTECTED_KEY;
266 	else if (slot_num >=  CC_FIRST_CPP_KEY_SLOT &&
267 		 slot_num <=  CC_LAST_CPP_KEY_SLOT)
268 		return CC_POLICY_PROTECTED_KEY;
269 	else
270 		return CC_INVALID_PROTECTED_KEY;
271 }
272 
273 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
274 			     unsigned int keylen)
275 {
276 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
277 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
278 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
279 	struct cc_hkey_info hki;
280 
281 	dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
282 		ctx_p, crypto_tfm_alg_name(tfm), keylen);
283 	dump_byte_array("key", key, keylen);
284 
285 	/* STAT_PHASE_0: Init and sanity checks */
286 
287 	/* This check the size of the protected key token */
288 	if (keylen != sizeof(hki)) {
289 		dev_err(dev, "Unsupported protected key size %d.\n", keylen);
290 		return -EINVAL;
291 	}
292 
293 	memcpy(&hki, key, keylen);
294 
295 	/* The real key len for crypto op is the size of the HW key
296 	 * referenced by the HW key slot, not the hardware key token
297 	 */
298 	keylen = hki.keylen;
299 
300 	if (validate_keys_sizes(ctx_p, keylen)) {
301 		dev_dbg(dev, "Unsupported key size %d.\n", keylen);
302 		return -EINVAL;
303 	}
304 
305 	ctx_p->keylen = keylen;
306 
307 	switch (cc_slot_to_key_type(hki.hw_key1)) {
308 	case CC_HW_PROTECTED_KEY:
309 		if (ctx_p->flow_mode == S_DIN_to_SM4) {
310 			dev_err(dev, "Only AES HW protected keys are supported\n");
311 			return -EINVAL;
312 		}
313 
314 		ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
315 		if (ctx_p->hw.key1_slot == END_OF_KEYS) {
316 			dev_err(dev, "Unsupported hw key1 number (%d)\n",
317 				hki.hw_key1);
318 			return -EINVAL;
319 		}
320 
321 		if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
322 		    ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
323 		    ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
324 			if (hki.hw_key1 == hki.hw_key2) {
325 				dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
326 					hki.hw_key1, hki.hw_key2);
327 				return -EINVAL;
328 			}
329 
330 			ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
331 			if (ctx_p->hw.key2_slot == END_OF_KEYS) {
332 				dev_err(dev, "Unsupported hw key2 number (%d)\n",
333 					hki.hw_key2);
334 				return -EINVAL;
335 			}
336 		}
337 
338 		ctx_p->key_type = CC_HW_PROTECTED_KEY;
339 		dev_dbg(dev, "HW protected key  %d/%d set\n.",
340 			ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
341 		break;
342 
343 	case CC_POLICY_PROTECTED_KEY:
344 		if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
345 			dev_err(dev, "CPP keys not supported in this hardware revision.\n");
346 			return -EINVAL;
347 		}
348 
349 		if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
350 		    ctx_p->cipher_mode != DRV_CIPHER_CTR) {
351 			dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
352 			return -EINVAL;
353 		}
354 
355 		ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
356 		if (ctx_p->flow_mode == S_DIN_to_AES)
357 			ctx_p->cpp.alg = CC_CPP_AES;
358 		else /* Must be SM4 since due to sethkey registration */
359 			ctx_p->cpp.alg = CC_CPP_SM4;
360 		ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
361 		dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
362 			ctx_p->cpp.alg, ctx_p->cpp.slot);
363 		break;
364 
365 	default:
366 		dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
367 		return -EINVAL;
368 	}
369 
370 	return 0;
371 }
372 
373 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
374 			    unsigned int keylen)
375 {
376 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
377 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
378 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
379 	struct cc_crypto_alg *cc_alg =
380 			container_of(tfm->__crt_alg, struct cc_crypto_alg,
381 				     skcipher_alg.base);
382 	unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
383 
384 	dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
385 		ctx_p, crypto_tfm_alg_name(tfm), keylen);
386 	dump_byte_array("key", key, keylen);
387 
388 	/* STAT_PHASE_0: Init and sanity checks */
389 
390 	if (validate_keys_sizes(ctx_p, keylen)) {
391 		dev_dbg(dev, "Unsupported key size %d.\n", keylen);
392 		return -EINVAL;
393 	}
394 
395 	ctx_p->key_type = CC_UNPROTECTED_KEY;
396 
397 	/*
398 	 * Verify DES weak keys
399 	 * Note that we're dropping the expanded key since the
400 	 * HW does the expansion on its own.
401 	 */
402 	if (ctx_p->flow_mode == S_DIN_to_DES) {
403 		if ((keylen == DES3_EDE_KEY_SIZE &&
404 		     verify_skcipher_des3_key(sktfm, key)) ||
405 		    verify_skcipher_des_key(sktfm, key)) {
406 			dev_dbg(dev, "weak DES key");
407 			return -EINVAL;
408 		}
409 	}
410 
411 	if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
412 	    xts_check_key(tfm, key, keylen)) {
413 		dev_dbg(dev, "weak XTS key");
414 		return -EINVAL;
415 	}
416 
417 	/* STAT_PHASE_1: Copy key to ctx */
418 	dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
419 				max_key_buf_size, DMA_TO_DEVICE);
420 
421 	memcpy(ctx_p->user.key, key, keylen);
422 	if (keylen == 24)
423 		memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
424 
425 	if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
426 		/* sha256 for key2 - use sw implementation */
427 		int key_len = keylen >> 1;
428 		int err;
429 
430 		SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
431 
432 		desc->tfm = ctx_p->shash_tfm;
433 
434 		err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
435 					  ctx_p->user.key + key_len);
436 		if (err) {
437 			dev_err(dev, "Failed to hash ESSIV key.\n");
438 			return err;
439 		}
440 	}
441 	dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
442 				   max_key_buf_size, DMA_TO_DEVICE);
443 	ctx_p->keylen = keylen;
444 
445 	dev_dbg(dev, "return safely");
446 	return 0;
447 }
448 
449 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
450 {
451 	switch (ctx_p->flow_mode) {
452 	case S_DIN_to_AES:
453 		return S_AES_to_DOUT;
454 	case S_DIN_to_DES:
455 		return S_DES_to_DOUT;
456 	case S_DIN_to_SM4:
457 		return S_SM4_to_DOUT;
458 	default:
459 		return ctx_p->flow_mode;
460 	}
461 }
462 
463 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
464 				 struct cipher_req_ctx *req_ctx,
465 				 unsigned int ivsize, struct cc_hw_desc desc[],
466 				 unsigned int *seq_size)
467 {
468 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
469 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
470 	int cipher_mode = ctx_p->cipher_mode;
471 	int flow_mode = cc_out_setup_mode(ctx_p);
472 	int direction = req_ctx->gen_ctx.op_type;
473 	dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
474 
475 	if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
476 		return;
477 
478 	switch (cipher_mode) {
479 	case DRV_CIPHER_ECB:
480 		break;
481 	case DRV_CIPHER_CBC:
482 	case DRV_CIPHER_CBC_CTS:
483 	case DRV_CIPHER_CTR:
484 	case DRV_CIPHER_OFB:
485 		/* Read next IV */
486 		hw_desc_init(&desc[*seq_size]);
487 		set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
488 		set_cipher_config0(&desc[*seq_size], direction);
489 		set_flow_mode(&desc[*seq_size], flow_mode);
490 		set_cipher_mode(&desc[*seq_size], cipher_mode);
491 		if (cipher_mode == DRV_CIPHER_CTR ||
492 		    cipher_mode == DRV_CIPHER_OFB) {
493 			set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
494 		} else {
495 			set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
496 		}
497 		set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
498 		(*seq_size)++;
499 		break;
500 	case DRV_CIPHER_XTS:
501 	case DRV_CIPHER_ESSIV:
502 	case DRV_CIPHER_BITLOCKER:
503 		/*  IV */
504 		hw_desc_init(&desc[*seq_size]);
505 		set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
506 		set_cipher_mode(&desc[*seq_size], cipher_mode);
507 		set_cipher_config0(&desc[*seq_size], direction);
508 		set_flow_mode(&desc[*seq_size], flow_mode);
509 		set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
510 			     NS_BIT, 1);
511 		set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
512 		(*seq_size)++;
513 		break;
514 	default:
515 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
516 	}
517 }
518 
519 
520 static void cc_setup_state_desc(struct crypto_tfm *tfm,
521 				 struct cipher_req_ctx *req_ctx,
522 				 unsigned int ivsize, unsigned int nbytes,
523 				 struct cc_hw_desc desc[],
524 				 unsigned int *seq_size)
525 {
526 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
527 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
528 	int cipher_mode = ctx_p->cipher_mode;
529 	int flow_mode = ctx_p->flow_mode;
530 	int direction = req_ctx->gen_ctx.op_type;
531 	dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
532 
533 	switch (cipher_mode) {
534 	case DRV_CIPHER_ECB:
535 		break;
536 	case DRV_CIPHER_CBC:
537 	case DRV_CIPHER_CBC_CTS:
538 	case DRV_CIPHER_CTR:
539 	case DRV_CIPHER_OFB:
540 		/* Load IV */
541 		hw_desc_init(&desc[*seq_size]);
542 		set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
543 			     NS_BIT);
544 		set_cipher_config0(&desc[*seq_size], direction);
545 		set_flow_mode(&desc[*seq_size], flow_mode);
546 		set_cipher_mode(&desc[*seq_size], cipher_mode);
547 		if (cipher_mode == DRV_CIPHER_CTR ||
548 		    cipher_mode == DRV_CIPHER_OFB) {
549 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
550 		} else {
551 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
552 		}
553 		(*seq_size)++;
554 		break;
555 	case DRV_CIPHER_XTS:
556 	case DRV_CIPHER_ESSIV:
557 	case DRV_CIPHER_BITLOCKER:
558 		break;
559 	default:
560 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
561 	}
562 }
563 
564 
565 static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
566 				 struct cipher_req_ctx *req_ctx,
567 				 unsigned int ivsize, unsigned int nbytes,
568 				 struct cc_hw_desc desc[],
569 				 unsigned int *seq_size)
570 {
571 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
572 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
573 	int cipher_mode = ctx_p->cipher_mode;
574 	int flow_mode = ctx_p->flow_mode;
575 	int direction = req_ctx->gen_ctx.op_type;
576 	dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
577 	unsigned int key_len = ctx_p->keylen;
578 	dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
579 	unsigned int du_size = nbytes;
580 
581 	struct cc_crypto_alg *cc_alg =
582 		container_of(tfm->__crt_alg, struct cc_crypto_alg,
583 			     skcipher_alg.base);
584 
585 	if (cc_alg->data_unit)
586 		du_size = cc_alg->data_unit;
587 
588 	switch (cipher_mode) {
589 	case DRV_CIPHER_ECB:
590 		break;
591 	case DRV_CIPHER_CBC:
592 	case DRV_CIPHER_CBC_CTS:
593 	case DRV_CIPHER_CTR:
594 	case DRV_CIPHER_OFB:
595 		break;
596 	case DRV_CIPHER_XTS:
597 	case DRV_CIPHER_ESSIV:
598 	case DRV_CIPHER_BITLOCKER:
599 		/* load XEX key */
600 		hw_desc_init(&desc[*seq_size]);
601 		set_cipher_mode(&desc[*seq_size], cipher_mode);
602 		set_cipher_config0(&desc[*seq_size], direction);
603 		if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
604 			set_hw_crypto_key(&desc[*seq_size],
605 					  ctx_p->hw.key2_slot);
606 		} else {
607 			set_din_type(&desc[*seq_size], DMA_DLLI,
608 				     (key_dma_addr + (key_len / 2)),
609 				     (key_len / 2), NS_BIT);
610 		}
611 		set_xex_data_unit_size(&desc[*seq_size], du_size);
612 		set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
613 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
614 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
615 		(*seq_size)++;
616 
617 		/* Load IV */
618 		hw_desc_init(&desc[*seq_size]);
619 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
620 		set_cipher_mode(&desc[*seq_size], cipher_mode);
621 		set_cipher_config0(&desc[*seq_size], direction);
622 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
623 		set_flow_mode(&desc[*seq_size], flow_mode);
624 		set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
625 			     CC_AES_BLOCK_SIZE, NS_BIT);
626 		(*seq_size)++;
627 		break;
628 	default:
629 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
630 	}
631 }
632 
633 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
634 {
635 	switch (ctx_p->flow_mode) {
636 	case S_DIN_to_AES:
637 		return DIN_AES_DOUT;
638 	case S_DIN_to_DES:
639 		return DIN_DES_DOUT;
640 	case S_DIN_to_SM4:
641 		return DIN_SM4_DOUT;
642 	default:
643 		return ctx_p->flow_mode;
644 	}
645 }
646 
647 static void cc_setup_key_desc(struct crypto_tfm *tfm,
648 			      struct cipher_req_ctx *req_ctx,
649 			      unsigned int nbytes, struct cc_hw_desc desc[],
650 			      unsigned int *seq_size)
651 {
652 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
653 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
654 	int cipher_mode = ctx_p->cipher_mode;
655 	int flow_mode = ctx_p->flow_mode;
656 	int direction = req_ctx->gen_ctx.op_type;
657 	dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
658 	unsigned int key_len = ctx_p->keylen;
659 	unsigned int din_size;
660 
661 	switch (cipher_mode) {
662 	case DRV_CIPHER_CBC:
663 	case DRV_CIPHER_CBC_CTS:
664 	case DRV_CIPHER_CTR:
665 	case DRV_CIPHER_OFB:
666 	case DRV_CIPHER_ECB:
667 		/* Load key */
668 		hw_desc_init(&desc[*seq_size]);
669 		set_cipher_mode(&desc[*seq_size], cipher_mode);
670 		set_cipher_config0(&desc[*seq_size], direction);
671 
672 		if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
673 			/* We use the AES key size coding for all CPP algs */
674 			set_key_size_aes(&desc[*seq_size], key_len);
675 			set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
676 			flow_mode = cc_out_flow_mode(ctx_p);
677 		} else {
678 			if (flow_mode == S_DIN_to_AES) {
679 				if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
680 					set_hw_crypto_key(&desc[*seq_size],
681 							  ctx_p->hw.key1_slot);
682 				} else {
683 					/* CC_POLICY_UNPROTECTED_KEY
684 					 * Invalid keys are filtered out in
685 					 * sethkey()
686 					 */
687 					din_size = (key_len == 24) ?
688 						AES_MAX_KEY_SIZE : key_len;
689 
690 					set_din_type(&desc[*seq_size], DMA_DLLI,
691 						     key_dma_addr, din_size,
692 						     NS_BIT);
693 				}
694 				set_key_size_aes(&desc[*seq_size], key_len);
695 			} else {
696 				/*des*/
697 				set_din_type(&desc[*seq_size], DMA_DLLI,
698 					     key_dma_addr, key_len, NS_BIT);
699 				set_key_size_des(&desc[*seq_size], key_len);
700 			}
701 			set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
702 		}
703 		set_flow_mode(&desc[*seq_size], flow_mode);
704 		(*seq_size)++;
705 		break;
706 	case DRV_CIPHER_XTS:
707 	case DRV_CIPHER_ESSIV:
708 	case DRV_CIPHER_BITLOCKER:
709 		/* Load AES key */
710 		hw_desc_init(&desc[*seq_size]);
711 		set_cipher_mode(&desc[*seq_size], cipher_mode);
712 		set_cipher_config0(&desc[*seq_size], direction);
713 		if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
714 			set_hw_crypto_key(&desc[*seq_size],
715 					  ctx_p->hw.key1_slot);
716 		} else {
717 			set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
718 				     (key_len / 2), NS_BIT);
719 		}
720 		set_key_size_aes(&desc[*seq_size], (key_len / 2));
721 		set_flow_mode(&desc[*seq_size], flow_mode);
722 		set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
723 		(*seq_size)++;
724 		break;
725 	default:
726 		dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
727 	}
728 }
729 
730 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
731 			       struct cipher_req_ctx *req_ctx,
732 			       struct scatterlist *dst, struct scatterlist *src,
733 			       unsigned int nbytes, void *areq,
734 			       struct cc_hw_desc desc[], unsigned int *seq_size)
735 {
736 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
737 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
738 
739 	if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
740 		/* bypass */
741 		dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
742 			&req_ctx->mlli_params.mlli_dma_addr,
743 			req_ctx->mlli_params.mlli_len,
744 			ctx_p->drvdata->mlli_sram_addr);
745 		hw_desc_init(&desc[*seq_size]);
746 		set_din_type(&desc[*seq_size], DMA_DLLI,
747 			     req_ctx->mlli_params.mlli_dma_addr,
748 			     req_ctx->mlli_params.mlli_len, NS_BIT);
749 		set_dout_sram(&desc[*seq_size],
750 			      ctx_p->drvdata->mlli_sram_addr,
751 			      req_ctx->mlli_params.mlli_len);
752 		set_flow_mode(&desc[*seq_size], BYPASS);
753 		(*seq_size)++;
754 	}
755 }
756 
757 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
758 			       struct cipher_req_ctx *req_ctx,
759 			       struct scatterlist *dst, struct scatterlist *src,
760 			       unsigned int nbytes, struct cc_hw_desc desc[],
761 			       unsigned int *seq_size)
762 {
763 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
764 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
765 	unsigned int flow_mode = cc_out_flow_mode(ctx_p);
766 	bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
767 			  ctx_p->cipher_mode == DRV_CIPHER_ECB);
768 
769 	/* Process */
770 	if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
771 		dev_dbg(dev, " data params addr %pad length 0x%X\n",
772 			&sg_dma_address(src), nbytes);
773 		dev_dbg(dev, " data params addr %pad length 0x%X\n",
774 			&sg_dma_address(dst), nbytes);
775 		hw_desc_init(&desc[*seq_size]);
776 		set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
777 			     nbytes, NS_BIT);
778 		set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
779 			      nbytes, NS_BIT, (!last_desc ? 0 : 1));
780 		if (last_desc)
781 			set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
782 
783 		set_flow_mode(&desc[*seq_size], flow_mode);
784 		(*seq_size)++;
785 	} else {
786 		hw_desc_init(&desc[*seq_size]);
787 		set_din_type(&desc[*seq_size], DMA_MLLI,
788 			     ctx_p->drvdata->mlli_sram_addr,
789 			     req_ctx->in_mlli_nents, NS_BIT);
790 		if (req_ctx->out_nents == 0) {
791 			dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
792 				ctx_p->drvdata->mlli_sram_addr,
793 				ctx_p->drvdata->mlli_sram_addr);
794 			set_dout_mlli(&desc[*seq_size],
795 				      ctx_p->drvdata->mlli_sram_addr,
796 				      req_ctx->in_mlli_nents, NS_BIT,
797 				      (!last_desc ? 0 : 1));
798 		} else {
799 			dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
800 				ctx_p->drvdata->mlli_sram_addr,
801 				ctx_p->drvdata->mlli_sram_addr +
802 				(u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
803 			set_dout_mlli(&desc[*seq_size],
804 				      (ctx_p->drvdata->mlli_sram_addr +
805 				       (LLI_ENTRY_BYTE_SIZE *
806 					req_ctx->in_mlli_nents)),
807 				      req_ctx->out_mlli_nents, NS_BIT,
808 				      (!last_desc ? 0 : 1));
809 		}
810 		if (last_desc)
811 			set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
812 
813 		set_flow_mode(&desc[*seq_size], flow_mode);
814 		(*seq_size)++;
815 	}
816 }
817 
818 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
819 {
820 	struct skcipher_request *req = (struct skcipher_request *)cc_req;
821 	struct scatterlist *dst = req->dst;
822 	struct scatterlist *src = req->src;
823 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
824 	struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
825 	unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
826 
827 	if (err != -EINPROGRESS) {
828 		/* Not a BACKLOG notification */
829 		cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
830 		memcpy(req->iv, req_ctx->iv, ivsize);
831 		kzfree(req_ctx->iv);
832 	}
833 
834 	skcipher_request_complete(req, err);
835 }
836 
837 static int cc_cipher_process(struct skcipher_request *req,
838 			     enum drv_crypto_direction direction)
839 {
840 	struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
841 	struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
842 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
843 	unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
844 	struct scatterlist *dst = req->dst;
845 	struct scatterlist *src = req->src;
846 	unsigned int nbytes = req->cryptlen;
847 	void *iv = req->iv;
848 	struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
849 	struct device *dev = drvdata_to_dev(ctx_p->drvdata);
850 	struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
851 	struct cc_crypto_req cc_req = {};
852 	int rc;
853 	unsigned int seq_len = 0;
854 	gfp_t flags = cc_gfp_flags(&req->base);
855 
856 	dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
857 		((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
858 		"Encrypt" : "Decrypt"), req, iv, nbytes);
859 
860 	/* STAT_PHASE_0: Init and sanity checks */
861 
862 	if (validate_data_size(ctx_p, nbytes)) {
863 		dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
864 		rc = -EINVAL;
865 		goto exit_process;
866 	}
867 	if (nbytes == 0) {
868 		/* No data to process is valid */
869 		rc = 0;
870 		goto exit_process;
871 	}
872 
873 	/* The IV we are handed may be allocted from the stack so
874 	 * we must copy it to a DMAable buffer before use.
875 	 */
876 	req_ctx->iv = kmemdup(iv, ivsize, flags);
877 	if (!req_ctx->iv) {
878 		rc = -ENOMEM;
879 		goto exit_process;
880 	}
881 
882 	/* Setup request structure */
883 	cc_req.user_cb = cc_cipher_complete;
884 	cc_req.user_arg = req;
885 
886 	/* Setup CPP operation details */
887 	if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
888 		cc_req.cpp.is_cpp = true;
889 		cc_req.cpp.alg = ctx_p->cpp.alg;
890 		cc_req.cpp.slot = ctx_p->cpp.slot;
891 	}
892 
893 	/* Setup request context */
894 	req_ctx->gen_ctx.op_type = direction;
895 
896 	/* STAT_PHASE_1: Map buffers */
897 
898 	rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
899 				      req_ctx->iv, src, dst, flags);
900 	if (rc) {
901 		dev_err(dev, "map_request() failed\n");
902 		goto exit_process;
903 	}
904 
905 	/* STAT_PHASE_2: Create sequence */
906 
907 	/* Setup state (IV)  */
908 	cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
909 	/* Setup MLLI line, if needed */
910 	cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
911 	/* Setup key */
912 	cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
913 	/* Setup state (IV and XEX key)  */
914 	cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
915 	/* Data processing */
916 	cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
917 	/* Read next IV */
918 	cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
919 
920 	/* STAT_PHASE_3: Lock HW and push sequence */
921 
922 	rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
923 			     &req->base);
924 	if (rc != -EINPROGRESS && rc != -EBUSY) {
925 		/* Failed to send the request or request completed
926 		 * synchronously
927 		 */
928 		cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
929 	}
930 
931 exit_process:
932 	if (rc != -EINPROGRESS && rc != -EBUSY) {
933 		kzfree(req_ctx->iv);
934 	}
935 
936 	return rc;
937 }
938 
939 static int cc_cipher_encrypt(struct skcipher_request *req)
940 {
941 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
942 
943 	memset(req_ctx, 0, sizeof(*req_ctx));
944 
945 	return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
946 }
947 
948 static int cc_cipher_decrypt(struct skcipher_request *req)
949 {
950 	struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
951 
952 	memset(req_ctx, 0, sizeof(*req_ctx));
953 
954 	return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
955 }
956 
957 /* Block cipher alg */
958 static const struct cc_alg_template skcipher_algs[] = {
959 	{
960 		.name = "xts(paes)",
961 		.driver_name = "xts-paes-ccree",
962 		.blocksize = 1,
963 		.template_skcipher = {
964 			.setkey = cc_cipher_sethkey,
965 			.encrypt = cc_cipher_encrypt,
966 			.decrypt = cc_cipher_decrypt,
967 			.min_keysize = CC_HW_KEY_SIZE,
968 			.max_keysize = CC_HW_KEY_SIZE,
969 			.ivsize = AES_BLOCK_SIZE,
970 			},
971 		.cipher_mode = DRV_CIPHER_XTS,
972 		.flow_mode = S_DIN_to_AES,
973 		.min_hw_rev = CC_HW_REV_630,
974 		.std_body = CC_STD_NIST,
975 		.sec_func = true,
976 	},
977 	{
978 		.name = "xts512(paes)",
979 		.driver_name = "xts-paes-du512-ccree",
980 		.blocksize = 1,
981 		.template_skcipher = {
982 			.setkey = cc_cipher_sethkey,
983 			.encrypt = cc_cipher_encrypt,
984 			.decrypt = cc_cipher_decrypt,
985 			.min_keysize = CC_HW_KEY_SIZE,
986 			.max_keysize = CC_HW_KEY_SIZE,
987 			.ivsize = AES_BLOCK_SIZE,
988 			},
989 		.cipher_mode = DRV_CIPHER_XTS,
990 		.flow_mode = S_DIN_to_AES,
991 		.data_unit = 512,
992 		.min_hw_rev = CC_HW_REV_712,
993 		.std_body = CC_STD_NIST,
994 		.sec_func = true,
995 	},
996 	{
997 		.name = "xts4096(paes)",
998 		.driver_name = "xts-paes-du4096-ccree",
999 		.blocksize = 1,
1000 		.template_skcipher = {
1001 			.setkey = cc_cipher_sethkey,
1002 			.encrypt = cc_cipher_encrypt,
1003 			.decrypt = cc_cipher_decrypt,
1004 			.min_keysize = CC_HW_KEY_SIZE,
1005 			.max_keysize = CC_HW_KEY_SIZE,
1006 			.ivsize = AES_BLOCK_SIZE,
1007 			},
1008 		.cipher_mode = DRV_CIPHER_XTS,
1009 		.flow_mode = S_DIN_to_AES,
1010 		.data_unit = 4096,
1011 		.min_hw_rev = CC_HW_REV_712,
1012 		.std_body = CC_STD_NIST,
1013 		.sec_func = true,
1014 	},
1015 	{
1016 		.name = "essiv(paes)",
1017 		.driver_name = "essiv-paes-ccree",
1018 		.blocksize = AES_BLOCK_SIZE,
1019 		.template_skcipher = {
1020 			.setkey = cc_cipher_sethkey,
1021 			.encrypt = cc_cipher_encrypt,
1022 			.decrypt = cc_cipher_decrypt,
1023 			.min_keysize = CC_HW_KEY_SIZE,
1024 			.max_keysize = CC_HW_KEY_SIZE,
1025 			.ivsize = AES_BLOCK_SIZE,
1026 			},
1027 		.cipher_mode = DRV_CIPHER_ESSIV,
1028 		.flow_mode = S_DIN_to_AES,
1029 		.min_hw_rev = CC_HW_REV_712,
1030 		.std_body = CC_STD_NIST,
1031 		.sec_func = true,
1032 	},
1033 	{
1034 		.name = "essiv512(paes)",
1035 		.driver_name = "essiv-paes-du512-ccree",
1036 		.blocksize = AES_BLOCK_SIZE,
1037 		.template_skcipher = {
1038 			.setkey = cc_cipher_sethkey,
1039 			.encrypt = cc_cipher_encrypt,
1040 			.decrypt = cc_cipher_decrypt,
1041 			.min_keysize = CC_HW_KEY_SIZE,
1042 			.max_keysize = CC_HW_KEY_SIZE,
1043 			.ivsize = AES_BLOCK_SIZE,
1044 			},
1045 		.cipher_mode = DRV_CIPHER_ESSIV,
1046 		.flow_mode = S_DIN_to_AES,
1047 		.data_unit = 512,
1048 		.min_hw_rev = CC_HW_REV_712,
1049 		.std_body = CC_STD_NIST,
1050 		.sec_func = true,
1051 	},
1052 	{
1053 		.name = "essiv4096(paes)",
1054 		.driver_name = "essiv-paes-du4096-ccree",
1055 		.blocksize = AES_BLOCK_SIZE,
1056 		.template_skcipher = {
1057 			.setkey = cc_cipher_sethkey,
1058 			.encrypt = cc_cipher_encrypt,
1059 			.decrypt = cc_cipher_decrypt,
1060 			.min_keysize = CC_HW_KEY_SIZE,
1061 			.max_keysize = CC_HW_KEY_SIZE,
1062 			.ivsize = AES_BLOCK_SIZE,
1063 			},
1064 		.cipher_mode = DRV_CIPHER_ESSIV,
1065 		.flow_mode = S_DIN_to_AES,
1066 		.data_unit = 4096,
1067 		.min_hw_rev = CC_HW_REV_712,
1068 		.std_body = CC_STD_NIST,
1069 		.sec_func = true,
1070 	},
1071 	{
1072 		.name = "bitlocker(paes)",
1073 		.driver_name = "bitlocker-paes-ccree",
1074 		.blocksize = AES_BLOCK_SIZE,
1075 		.template_skcipher = {
1076 			.setkey = cc_cipher_sethkey,
1077 			.encrypt = cc_cipher_encrypt,
1078 			.decrypt = cc_cipher_decrypt,
1079 			.min_keysize = CC_HW_KEY_SIZE,
1080 			.max_keysize = CC_HW_KEY_SIZE,
1081 			.ivsize = AES_BLOCK_SIZE,
1082 			},
1083 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1084 		.flow_mode = S_DIN_to_AES,
1085 		.min_hw_rev = CC_HW_REV_712,
1086 		.std_body = CC_STD_NIST,
1087 		.sec_func = true,
1088 	},
1089 	{
1090 		.name = "bitlocker512(paes)",
1091 		.driver_name = "bitlocker-paes-du512-ccree",
1092 		.blocksize = AES_BLOCK_SIZE,
1093 		.template_skcipher = {
1094 			.setkey = cc_cipher_sethkey,
1095 			.encrypt = cc_cipher_encrypt,
1096 			.decrypt = cc_cipher_decrypt,
1097 			.min_keysize = CC_HW_KEY_SIZE,
1098 			.max_keysize = CC_HW_KEY_SIZE,
1099 			.ivsize = AES_BLOCK_SIZE,
1100 			},
1101 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1102 		.flow_mode = S_DIN_to_AES,
1103 		.data_unit = 512,
1104 		.min_hw_rev = CC_HW_REV_712,
1105 		.std_body = CC_STD_NIST,
1106 		.sec_func = true,
1107 	},
1108 	{
1109 		.name = "bitlocker4096(paes)",
1110 		.driver_name = "bitlocker-paes-du4096-ccree",
1111 		.blocksize = AES_BLOCK_SIZE,
1112 		.template_skcipher = {
1113 			.setkey = cc_cipher_sethkey,
1114 			.encrypt = cc_cipher_encrypt,
1115 			.decrypt = cc_cipher_decrypt,
1116 			.min_keysize = CC_HW_KEY_SIZE,
1117 			.max_keysize =  CC_HW_KEY_SIZE,
1118 			.ivsize = AES_BLOCK_SIZE,
1119 			},
1120 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1121 		.flow_mode = S_DIN_to_AES,
1122 		.data_unit = 4096,
1123 		.min_hw_rev = CC_HW_REV_712,
1124 		.std_body = CC_STD_NIST,
1125 		.sec_func = true,
1126 	},
1127 	{
1128 		.name = "ecb(paes)",
1129 		.driver_name = "ecb-paes-ccree",
1130 		.blocksize = AES_BLOCK_SIZE,
1131 		.template_skcipher = {
1132 			.setkey = cc_cipher_sethkey,
1133 			.encrypt = cc_cipher_encrypt,
1134 			.decrypt = cc_cipher_decrypt,
1135 			.min_keysize = CC_HW_KEY_SIZE,
1136 			.max_keysize = CC_HW_KEY_SIZE,
1137 			.ivsize = 0,
1138 			},
1139 		.cipher_mode = DRV_CIPHER_ECB,
1140 		.flow_mode = S_DIN_to_AES,
1141 		.min_hw_rev = CC_HW_REV_712,
1142 		.std_body = CC_STD_NIST,
1143 		.sec_func = true,
1144 	},
1145 	{
1146 		.name = "cbc(paes)",
1147 		.driver_name = "cbc-paes-ccree",
1148 		.blocksize = AES_BLOCK_SIZE,
1149 		.template_skcipher = {
1150 			.setkey = cc_cipher_sethkey,
1151 			.encrypt = cc_cipher_encrypt,
1152 			.decrypt = cc_cipher_decrypt,
1153 			.min_keysize = CC_HW_KEY_SIZE,
1154 			.max_keysize = CC_HW_KEY_SIZE,
1155 			.ivsize = AES_BLOCK_SIZE,
1156 		},
1157 		.cipher_mode = DRV_CIPHER_CBC,
1158 		.flow_mode = S_DIN_to_AES,
1159 		.min_hw_rev = CC_HW_REV_712,
1160 		.std_body = CC_STD_NIST,
1161 		.sec_func = true,
1162 	},
1163 	{
1164 		.name = "ofb(paes)",
1165 		.driver_name = "ofb-paes-ccree",
1166 		.blocksize = AES_BLOCK_SIZE,
1167 		.template_skcipher = {
1168 			.setkey = cc_cipher_sethkey,
1169 			.encrypt = cc_cipher_encrypt,
1170 			.decrypt = cc_cipher_decrypt,
1171 			.min_keysize = CC_HW_KEY_SIZE,
1172 			.max_keysize = CC_HW_KEY_SIZE,
1173 			.ivsize = AES_BLOCK_SIZE,
1174 			},
1175 		.cipher_mode = DRV_CIPHER_OFB,
1176 		.flow_mode = S_DIN_to_AES,
1177 		.min_hw_rev = CC_HW_REV_712,
1178 		.std_body = CC_STD_NIST,
1179 		.sec_func = true,
1180 	},
1181 	{
1182 		.name = "cts(cbc(paes))",
1183 		.driver_name = "cts-cbc-paes-ccree",
1184 		.blocksize = AES_BLOCK_SIZE,
1185 		.template_skcipher = {
1186 			.setkey = cc_cipher_sethkey,
1187 			.encrypt = cc_cipher_encrypt,
1188 			.decrypt = cc_cipher_decrypt,
1189 			.min_keysize = CC_HW_KEY_SIZE,
1190 			.max_keysize = CC_HW_KEY_SIZE,
1191 			.ivsize = AES_BLOCK_SIZE,
1192 			},
1193 		.cipher_mode = DRV_CIPHER_CBC_CTS,
1194 		.flow_mode = S_DIN_to_AES,
1195 		.min_hw_rev = CC_HW_REV_712,
1196 		.std_body = CC_STD_NIST,
1197 		.sec_func = true,
1198 	},
1199 	{
1200 		.name = "ctr(paes)",
1201 		.driver_name = "ctr-paes-ccree",
1202 		.blocksize = 1,
1203 		.template_skcipher = {
1204 			.setkey = cc_cipher_sethkey,
1205 			.encrypt = cc_cipher_encrypt,
1206 			.decrypt = cc_cipher_decrypt,
1207 			.min_keysize = CC_HW_KEY_SIZE,
1208 			.max_keysize = CC_HW_KEY_SIZE,
1209 			.ivsize = AES_BLOCK_SIZE,
1210 			},
1211 		.cipher_mode = DRV_CIPHER_CTR,
1212 		.flow_mode = S_DIN_to_AES,
1213 		.min_hw_rev = CC_HW_REV_712,
1214 		.std_body = CC_STD_NIST,
1215 		.sec_func = true,
1216 	},
1217 	{
1218 		/* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1219 		 * for the reason why this differs from the generic
1220 		 * implementation.
1221 		 */
1222 		.name = "xts(aes)",
1223 		.driver_name = "xts-aes-ccree",
1224 		.blocksize = 1,
1225 		.template_skcipher = {
1226 			.setkey = cc_cipher_setkey,
1227 			.encrypt = cc_cipher_encrypt,
1228 			.decrypt = cc_cipher_decrypt,
1229 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1230 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1231 			.ivsize = AES_BLOCK_SIZE,
1232 			},
1233 		.cipher_mode = DRV_CIPHER_XTS,
1234 		.flow_mode = S_DIN_to_AES,
1235 		.min_hw_rev = CC_HW_REV_630,
1236 		.std_body = CC_STD_NIST,
1237 	},
1238 	{
1239 		.name = "xts512(aes)",
1240 		.driver_name = "xts-aes-du512-ccree",
1241 		.blocksize = 1,
1242 		.template_skcipher = {
1243 			.setkey = cc_cipher_setkey,
1244 			.encrypt = cc_cipher_encrypt,
1245 			.decrypt = cc_cipher_decrypt,
1246 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1247 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1248 			.ivsize = AES_BLOCK_SIZE,
1249 			},
1250 		.cipher_mode = DRV_CIPHER_XTS,
1251 		.flow_mode = S_DIN_to_AES,
1252 		.data_unit = 512,
1253 		.min_hw_rev = CC_HW_REV_712,
1254 		.std_body = CC_STD_NIST,
1255 	},
1256 	{
1257 		.name = "xts4096(aes)",
1258 		.driver_name = "xts-aes-du4096-ccree",
1259 		.blocksize = 1,
1260 		.template_skcipher = {
1261 			.setkey = cc_cipher_setkey,
1262 			.encrypt = cc_cipher_encrypt,
1263 			.decrypt = cc_cipher_decrypt,
1264 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1265 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1266 			.ivsize = AES_BLOCK_SIZE,
1267 			},
1268 		.cipher_mode = DRV_CIPHER_XTS,
1269 		.flow_mode = S_DIN_to_AES,
1270 		.data_unit = 4096,
1271 		.min_hw_rev = CC_HW_REV_712,
1272 		.std_body = CC_STD_NIST,
1273 	},
1274 	{
1275 		.name = "essiv(aes)",
1276 		.driver_name = "essiv-aes-ccree",
1277 		.blocksize = AES_BLOCK_SIZE,
1278 		.template_skcipher = {
1279 			.setkey = cc_cipher_setkey,
1280 			.encrypt = cc_cipher_encrypt,
1281 			.decrypt = cc_cipher_decrypt,
1282 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1283 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1284 			.ivsize = AES_BLOCK_SIZE,
1285 			},
1286 		.cipher_mode = DRV_CIPHER_ESSIV,
1287 		.flow_mode = S_DIN_to_AES,
1288 		.min_hw_rev = CC_HW_REV_712,
1289 		.std_body = CC_STD_NIST,
1290 	},
1291 	{
1292 		.name = "essiv512(aes)",
1293 		.driver_name = "essiv-aes-du512-ccree",
1294 		.blocksize = AES_BLOCK_SIZE,
1295 		.template_skcipher = {
1296 			.setkey = cc_cipher_setkey,
1297 			.encrypt = cc_cipher_encrypt,
1298 			.decrypt = cc_cipher_decrypt,
1299 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1300 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1301 			.ivsize = AES_BLOCK_SIZE,
1302 			},
1303 		.cipher_mode = DRV_CIPHER_ESSIV,
1304 		.flow_mode = S_DIN_to_AES,
1305 		.data_unit = 512,
1306 		.min_hw_rev = CC_HW_REV_712,
1307 		.std_body = CC_STD_NIST,
1308 	},
1309 	{
1310 		.name = "essiv4096(aes)",
1311 		.driver_name = "essiv-aes-du4096-ccree",
1312 		.blocksize = AES_BLOCK_SIZE,
1313 		.template_skcipher = {
1314 			.setkey = cc_cipher_setkey,
1315 			.encrypt = cc_cipher_encrypt,
1316 			.decrypt = cc_cipher_decrypt,
1317 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1318 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1319 			.ivsize = AES_BLOCK_SIZE,
1320 			},
1321 		.cipher_mode = DRV_CIPHER_ESSIV,
1322 		.flow_mode = S_DIN_to_AES,
1323 		.data_unit = 4096,
1324 		.min_hw_rev = CC_HW_REV_712,
1325 		.std_body = CC_STD_NIST,
1326 	},
1327 	{
1328 		.name = "bitlocker(aes)",
1329 		.driver_name = "bitlocker-aes-ccree",
1330 		.blocksize = AES_BLOCK_SIZE,
1331 		.template_skcipher = {
1332 			.setkey = cc_cipher_setkey,
1333 			.encrypt = cc_cipher_encrypt,
1334 			.decrypt = cc_cipher_decrypt,
1335 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1336 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1337 			.ivsize = AES_BLOCK_SIZE,
1338 			},
1339 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1340 		.flow_mode = S_DIN_to_AES,
1341 		.min_hw_rev = CC_HW_REV_712,
1342 		.std_body = CC_STD_NIST,
1343 	},
1344 	{
1345 		.name = "bitlocker512(aes)",
1346 		.driver_name = "bitlocker-aes-du512-ccree",
1347 		.blocksize = AES_BLOCK_SIZE,
1348 		.template_skcipher = {
1349 			.setkey = cc_cipher_setkey,
1350 			.encrypt = cc_cipher_encrypt,
1351 			.decrypt = cc_cipher_decrypt,
1352 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1353 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1354 			.ivsize = AES_BLOCK_SIZE,
1355 			},
1356 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1357 		.flow_mode = S_DIN_to_AES,
1358 		.data_unit = 512,
1359 		.min_hw_rev = CC_HW_REV_712,
1360 		.std_body = CC_STD_NIST,
1361 	},
1362 	{
1363 		.name = "bitlocker4096(aes)",
1364 		.driver_name = "bitlocker-aes-du4096-ccree",
1365 		.blocksize = AES_BLOCK_SIZE,
1366 		.template_skcipher = {
1367 			.setkey = cc_cipher_setkey,
1368 			.encrypt = cc_cipher_encrypt,
1369 			.decrypt = cc_cipher_decrypt,
1370 			.min_keysize = AES_MIN_KEY_SIZE * 2,
1371 			.max_keysize = AES_MAX_KEY_SIZE * 2,
1372 			.ivsize = AES_BLOCK_SIZE,
1373 			},
1374 		.cipher_mode = DRV_CIPHER_BITLOCKER,
1375 		.flow_mode = S_DIN_to_AES,
1376 		.data_unit = 4096,
1377 		.min_hw_rev = CC_HW_REV_712,
1378 		.std_body = CC_STD_NIST,
1379 	},
1380 	{
1381 		.name = "ecb(aes)",
1382 		.driver_name = "ecb-aes-ccree",
1383 		.blocksize = AES_BLOCK_SIZE,
1384 		.template_skcipher = {
1385 			.setkey = cc_cipher_setkey,
1386 			.encrypt = cc_cipher_encrypt,
1387 			.decrypt = cc_cipher_decrypt,
1388 			.min_keysize = AES_MIN_KEY_SIZE,
1389 			.max_keysize = AES_MAX_KEY_SIZE,
1390 			.ivsize = 0,
1391 			},
1392 		.cipher_mode = DRV_CIPHER_ECB,
1393 		.flow_mode = S_DIN_to_AES,
1394 		.min_hw_rev = CC_HW_REV_630,
1395 		.std_body = CC_STD_NIST,
1396 	},
1397 	{
1398 		.name = "cbc(aes)",
1399 		.driver_name = "cbc-aes-ccree",
1400 		.blocksize = AES_BLOCK_SIZE,
1401 		.template_skcipher = {
1402 			.setkey = cc_cipher_setkey,
1403 			.encrypt = cc_cipher_encrypt,
1404 			.decrypt = cc_cipher_decrypt,
1405 			.min_keysize = AES_MIN_KEY_SIZE,
1406 			.max_keysize = AES_MAX_KEY_SIZE,
1407 			.ivsize = AES_BLOCK_SIZE,
1408 		},
1409 		.cipher_mode = DRV_CIPHER_CBC,
1410 		.flow_mode = S_DIN_to_AES,
1411 		.min_hw_rev = CC_HW_REV_630,
1412 		.std_body = CC_STD_NIST,
1413 	},
1414 	{
1415 		.name = "ofb(aes)",
1416 		.driver_name = "ofb-aes-ccree",
1417 		.blocksize = 1,
1418 		.template_skcipher = {
1419 			.setkey = cc_cipher_setkey,
1420 			.encrypt = cc_cipher_encrypt,
1421 			.decrypt = cc_cipher_decrypt,
1422 			.min_keysize = AES_MIN_KEY_SIZE,
1423 			.max_keysize = AES_MAX_KEY_SIZE,
1424 			.ivsize = AES_BLOCK_SIZE,
1425 			},
1426 		.cipher_mode = DRV_CIPHER_OFB,
1427 		.flow_mode = S_DIN_to_AES,
1428 		.min_hw_rev = CC_HW_REV_630,
1429 		.std_body = CC_STD_NIST,
1430 	},
1431 	{
1432 		.name = "cts(cbc(aes))",
1433 		.driver_name = "cts-cbc-aes-ccree",
1434 		.blocksize = AES_BLOCK_SIZE,
1435 		.template_skcipher = {
1436 			.setkey = cc_cipher_setkey,
1437 			.encrypt = cc_cipher_encrypt,
1438 			.decrypt = cc_cipher_decrypt,
1439 			.min_keysize = AES_MIN_KEY_SIZE,
1440 			.max_keysize = AES_MAX_KEY_SIZE,
1441 			.ivsize = AES_BLOCK_SIZE,
1442 			},
1443 		.cipher_mode = DRV_CIPHER_CBC_CTS,
1444 		.flow_mode = S_DIN_to_AES,
1445 		.min_hw_rev = CC_HW_REV_630,
1446 		.std_body = CC_STD_NIST,
1447 	},
1448 	{
1449 		.name = "ctr(aes)",
1450 		.driver_name = "ctr-aes-ccree",
1451 		.blocksize = 1,
1452 		.template_skcipher = {
1453 			.setkey = cc_cipher_setkey,
1454 			.encrypt = cc_cipher_encrypt,
1455 			.decrypt = cc_cipher_decrypt,
1456 			.min_keysize = AES_MIN_KEY_SIZE,
1457 			.max_keysize = AES_MAX_KEY_SIZE,
1458 			.ivsize = AES_BLOCK_SIZE,
1459 			},
1460 		.cipher_mode = DRV_CIPHER_CTR,
1461 		.flow_mode = S_DIN_to_AES,
1462 		.min_hw_rev = CC_HW_REV_630,
1463 		.std_body = CC_STD_NIST,
1464 	},
1465 	{
1466 		.name = "cbc(des3_ede)",
1467 		.driver_name = "cbc-3des-ccree",
1468 		.blocksize = DES3_EDE_BLOCK_SIZE,
1469 		.template_skcipher = {
1470 			.setkey = cc_cipher_setkey,
1471 			.encrypt = cc_cipher_encrypt,
1472 			.decrypt = cc_cipher_decrypt,
1473 			.min_keysize = DES3_EDE_KEY_SIZE,
1474 			.max_keysize = DES3_EDE_KEY_SIZE,
1475 			.ivsize = DES3_EDE_BLOCK_SIZE,
1476 			},
1477 		.cipher_mode = DRV_CIPHER_CBC,
1478 		.flow_mode = S_DIN_to_DES,
1479 		.min_hw_rev = CC_HW_REV_630,
1480 		.std_body = CC_STD_NIST,
1481 	},
1482 	{
1483 		.name = "ecb(des3_ede)",
1484 		.driver_name = "ecb-3des-ccree",
1485 		.blocksize = DES3_EDE_BLOCK_SIZE,
1486 		.template_skcipher = {
1487 			.setkey = cc_cipher_setkey,
1488 			.encrypt = cc_cipher_encrypt,
1489 			.decrypt = cc_cipher_decrypt,
1490 			.min_keysize = DES3_EDE_KEY_SIZE,
1491 			.max_keysize = DES3_EDE_KEY_SIZE,
1492 			.ivsize = 0,
1493 			},
1494 		.cipher_mode = DRV_CIPHER_ECB,
1495 		.flow_mode = S_DIN_to_DES,
1496 		.min_hw_rev = CC_HW_REV_630,
1497 		.std_body = CC_STD_NIST,
1498 	},
1499 	{
1500 		.name = "cbc(des)",
1501 		.driver_name = "cbc-des-ccree",
1502 		.blocksize = DES_BLOCK_SIZE,
1503 		.template_skcipher = {
1504 			.setkey = cc_cipher_setkey,
1505 			.encrypt = cc_cipher_encrypt,
1506 			.decrypt = cc_cipher_decrypt,
1507 			.min_keysize = DES_KEY_SIZE,
1508 			.max_keysize = DES_KEY_SIZE,
1509 			.ivsize = DES_BLOCK_SIZE,
1510 			},
1511 		.cipher_mode = DRV_CIPHER_CBC,
1512 		.flow_mode = S_DIN_to_DES,
1513 		.min_hw_rev = CC_HW_REV_630,
1514 		.std_body = CC_STD_NIST,
1515 	},
1516 	{
1517 		.name = "ecb(des)",
1518 		.driver_name = "ecb-des-ccree",
1519 		.blocksize = DES_BLOCK_SIZE,
1520 		.template_skcipher = {
1521 			.setkey = cc_cipher_setkey,
1522 			.encrypt = cc_cipher_encrypt,
1523 			.decrypt = cc_cipher_decrypt,
1524 			.min_keysize = DES_KEY_SIZE,
1525 			.max_keysize = DES_KEY_SIZE,
1526 			.ivsize = 0,
1527 			},
1528 		.cipher_mode = DRV_CIPHER_ECB,
1529 		.flow_mode = S_DIN_to_DES,
1530 		.min_hw_rev = CC_HW_REV_630,
1531 		.std_body = CC_STD_NIST,
1532 	},
1533 	{
1534 		.name = "cbc(sm4)",
1535 		.driver_name = "cbc-sm4-ccree",
1536 		.blocksize = SM4_BLOCK_SIZE,
1537 		.template_skcipher = {
1538 			.setkey = cc_cipher_setkey,
1539 			.encrypt = cc_cipher_encrypt,
1540 			.decrypt = cc_cipher_decrypt,
1541 			.min_keysize = SM4_KEY_SIZE,
1542 			.max_keysize = SM4_KEY_SIZE,
1543 			.ivsize = SM4_BLOCK_SIZE,
1544 			},
1545 		.cipher_mode = DRV_CIPHER_CBC,
1546 		.flow_mode = S_DIN_to_SM4,
1547 		.min_hw_rev = CC_HW_REV_713,
1548 		.std_body = CC_STD_OSCCA,
1549 	},
1550 	{
1551 		.name = "ecb(sm4)",
1552 		.driver_name = "ecb-sm4-ccree",
1553 		.blocksize = SM4_BLOCK_SIZE,
1554 		.template_skcipher = {
1555 			.setkey = cc_cipher_setkey,
1556 			.encrypt = cc_cipher_encrypt,
1557 			.decrypt = cc_cipher_decrypt,
1558 			.min_keysize = SM4_KEY_SIZE,
1559 			.max_keysize = SM4_KEY_SIZE,
1560 			.ivsize = 0,
1561 			},
1562 		.cipher_mode = DRV_CIPHER_ECB,
1563 		.flow_mode = S_DIN_to_SM4,
1564 		.min_hw_rev = CC_HW_REV_713,
1565 		.std_body = CC_STD_OSCCA,
1566 	},
1567 	{
1568 		.name = "ctr(sm4)",
1569 		.driver_name = "ctr-sm4-ccree",
1570 		.blocksize = 1,
1571 		.template_skcipher = {
1572 			.setkey = cc_cipher_setkey,
1573 			.encrypt = cc_cipher_encrypt,
1574 			.decrypt = cc_cipher_decrypt,
1575 			.min_keysize = SM4_KEY_SIZE,
1576 			.max_keysize = SM4_KEY_SIZE,
1577 			.ivsize = SM4_BLOCK_SIZE,
1578 			},
1579 		.cipher_mode = DRV_CIPHER_CTR,
1580 		.flow_mode = S_DIN_to_SM4,
1581 		.min_hw_rev = CC_HW_REV_713,
1582 		.std_body = CC_STD_OSCCA,
1583 	},
1584 	{
1585 		.name = "cbc(psm4)",
1586 		.driver_name = "cbc-psm4-ccree",
1587 		.blocksize = SM4_BLOCK_SIZE,
1588 		.template_skcipher = {
1589 			.setkey = cc_cipher_sethkey,
1590 			.encrypt = cc_cipher_encrypt,
1591 			.decrypt = cc_cipher_decrypt,
1592 			.min_keysize = CC_HW_KEY_SIZE,
1593 			.max_keysize = CC_HW_KEY_SIZE,
1594 			.ivsize = SM4_BLOCK_SIZE,
1595 			},
1596 		.cipher_mode = DRV_CIPHER_CBC,
1597 		.flow_mode = S_DIN_to_SM4,
1598 		.min_hw_rev = CC_HW_REV_713,
1599 		.std_body = CC_STD_OSCCA,
1600 		.sec_func = true,
1601 	},
1602 	{
1603 		.name = "ctr(psm4)",
1604 		.driver_name = "ctr-psm4-ccree",
1605 		.blocksize = SM4_BLOCK_SIZE,
1606 		.template_skcipher = {
1607 			.setkey = cc_cipher_sethkey,
1608 			.encrypt = cc_cipher_encrypt,
1609 			.decrypt = cc_cipher_decrypt,
1610 			.min_keysize = CC_HW_KEY_SIZE,
1611 			.max_keysize = CC_HW_KEY_SIZE,
1612 			.ivsize = SM4_BLOCK_SIZE,
1613 			},
1614 		.cipher_mode = DRV_CIPHER_CTR,
1615 		.flow_mode = S_DIN_to_SM4,
1616 		.min_hw_rev = CC_HW_REV_713,
1617 		.std_body = CC_STD_OSCCA,
1618 		.sec_func = true,
1619 	},
1620 };
1621 
1622 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1623 					   struct device *dev)
1624 {
1625 	struct cc_crypto_alg *t_alg;
1626 	struct skcipher_alg *alg;
1627 
1628 	t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL);
1629 	if (!t_alg)
1630 		return ERR_PTR(-ENOMEM);
1631 
1632 	alg = &t_alg->skcipher_alg;
1633 
1634 	memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1635 
1636 	snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1637 	snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1638 		 tmpl->driver_name);
1639 	alg->base.cra_module = THIS_MODULE;
1640 	alg->base.cra_priority = CC_CRA_PRIO;
1641 	alg->base.cra_blocksize = tmpl->blocksize;
1642 	alg->base.cra_alignmask = 0;
1643 	alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1644 
1645 	alg->base.cra_init = cc_cipher_init;
1646 	alg->base.cra_exit = cc_cipher_exit;
1647 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1648 
1649 	t_alg->cipher_mode = tmpl->cipher_mode;
1650 	t_alg->flow_mode = tmpl->flow_mode;
1651 	t_alg->data_unit = tmpl->data_unit;
1652 
1653 	return t_alg;
1654 }
1655 
1656 int cc_cipher_free(struct cc_drvdata *drvdata)
1657 {
1658 	struct cc_crypto_alg *t_alg, *n;
1659 
1660 	/* Remove registered algs */
1661 	list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) {
1662 		crypto_unregister_skcipher(&t_alg->skcipher_alg);
1663 		list_del(&t_alg->entry);
1664 	}
1665 	return 0;
1666 }
1667 
1668 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1669 {
1670 	struct cc_crypto_alg *t_alg;
1671 	struct device *dev = drvdata_to_dev(drvdata);
1672 	int rc = -ENOMEM;
1673 	int alg;
1674 
1675 	INIT_LIST_HEAD(&drvdata->alg_list);
1676 
1677 	/* Linux crypto */
1678 	dev_dbg(dev, "Number of algorithms = %zu\n",
1679 		ARRAY_SIZE(skcipher_algs));
1680 	for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1681 		if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1682 		    !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1683 		    (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1684 			continue;
1685 
1686 		dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1687 		t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1688 		if (IS_ERR(t_alg)) {
1689 			rc = PTR_ERR(t_alg);
1690 			dev_err(dev, "%s alg allocation failed\n",
1691 				skcipher_algs[alg].driver_name);
1692 			goto fail0;
1693 		}
1694 		t_alg->drvdata = drvdata;
1695 
1696 		dev_dbg(dev, "registering %s\n",
1697 			skcipher_algs[alg].driver_name);
1698 		rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1699 		dev_dbg(dev, "%s alg registration rc = %x\n",
1700 			t_alg->skcipher_alg.base.cra_driver_name, rc);
1701 		if (rc) {
1702 			dev_err(dev, "%s alg registration failed\n",
1703 				t_alg->skcipher_alg.base.cra_driver_name);
1704 			goto fail0;
1705 		}
1706 
1707 		list_add_tail(&t_alg->entry, &drvdata->alg_list);
1708 		dev_dbg(dev, "Registered %s\n",
1709 			t_alg->skcipher_alg.base.cra_driver_name);
1710 	}
1711 	return 0;
1712 
1713 fail0:
1714 	cc_cipher_free(drvdata);
1715 	return rc;
1716 }
1717