xref: /openbmc/linux/drivers/crypto/inside-secure/safexcel_cipher.c (revision 1188f7f111c61394ec56beb8e30322305a8220b6)
1  // SPDX-License-Identifier: GPL-2.0
2  /*
3   * Copyright (C) 2017 Marvell
4   *
5   * Antoine Tenart <antoine.tenart@free-electrons.com>
6   */
7  
8  #include <asm/unaligned.h>
9  #include <linux/device.h>
10  #include <linux/dma-mapping.h>
11  #include <linux/dmapool.h>
12  #include <crypto/aead.h>
13  #include <crypto/aes.h>
14  #include <crypto/authenc.h>
15  #include <crypto/chacha.h>
16  #include <crypto/ctr.h>
17  #include <crypto/internal/des.h>
18  #include <crypto/gcm.h>
19  #include <crypto/ghash.h>
20  #include <crypto/poly1305.h>
21  #include <crypto/sha1.h>
22  #include <crypto/sha2.h>
23  #include <crypto/sm3.h>
24  #include <crypto/sm4.h>
25  #include <crypto/xts.h>
26  #include <crypto/skcipher.h>
27  #include <crypto/internal/aead.h>
28  #include <crypto/internal/skcipher.h>
29  
30  #include "safexcel.h"
31  
32  enum safexcel_cipher_direction {
33  	SAFEXCEL_ENCRYPT,
34  	SAFEXCEL_DECRYPT,
35  };
36  
37  enum safexcel_cipher_alg {
38  	SAFEXCEL_DES,
39  	SAFEXCEL_3DES,
40  	SAFEXCEL_AES,
41  	SAFEXCEL_CHACHA20,
42  	SAFEXCEL_SM4,
43  };
44  
45  struct safexcel_cipher_ctx {
46  	struct safexcel_context base;
47  	struct safexcel_crypto_priv *priv;
48  
49  	u32 mode;
50  	enum safexcel_cipher_alg alg;
51  	u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52  	u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
53  	u8 aadskip;
54  	u8 blocksz;
55  	u32 ivmask;
56  	u32 ctrinit;
57  
58  	__le32 key[16];
59  	u32 nonce;
60  	unsigned int key_len, xts;
61  
62  	/* All the below is AEAD specific */
63  	u32 hash_alg;
64  	u32 state_sz;
65  
66  	struct crypto_aead *fback;
67  };
68  
69  struct safexcel_cipher_req {
70  	enum safexcel_cipher_direction direction;
71  	/* Number of result descriptors associated to the request */
72  	unsigned int rdescs;
73  	bool needs_inv;
74  	int  nr_src, nr_dst;
75  };
76  
safexcel_skcipher_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)77  static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78  				struct safexcel_command_desc *cdesc)
79  {
80  	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81  		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82  		/* 32 bit nonce */
83  		cdesc->control_data.token[0] = ctx->nonce;
84  		/* 64 bit IV part */
85  		memcpy(&cdesc->control_data.token[1], iv, 8);
86  		/* 32 bit counter, start at 0 or 1 (big endian!) */
87  		cdesc->control_data.token[3] =
88  			(__force u32)cpu_to_be32(ctx->ctrinit);
89  		return 4;
90  	}
91  	if (ctx->alg == SAFEXCEL_CHACHA20) {
92  		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93  		/* 96 bit nonce part */
94  		memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95  		/* 32 bit counter */
96  		cdesc->control_data.token[3] = *(u32 *)iv;
97  		return 4;
98  	}
99  
100  	cdesc->control_data.options |= ctx->ivmask;
101  	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102  	return ctx->blocksz / sizeof(u32);
103  }
104  
safexcel_skcipher_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,u32 length)105  static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
106  				    struct safexcel_command_desc *cdesc,
107  				    struct safexcel_token *atoken,
108  				    u32 length)
109  {
110  	struct safexcel_token *token;
111  	int ivlen;
112  
113  	ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114  	if (ivlen == 4) {
115  		/* No space in cdesc, instruction moves to atoken */
116  		cdesc->additional_cdata_size = 1;
117  		token = atoken;
118  	} else {
119  		/* Everything fits in cdesc */
120  		token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121  		/* Need to pad with NOP */
122  		eip197_noop_token(&token[1]);
123  	}
124  
125  	token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126  	token->packet_length = length;
127  	token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128  		      EIP197_TOKEN_STAT_LAST_HASH;
129  	token->instructions = EIP197_TOKEN_INS_LAST |
130  			      EIP197_TOKEN_INS_TYPE_CRYPTO |
131  			      EIP197_TOKEN_INS_TYPE_OUTPUT;
132  }
133  
safexcel_aead_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)134  static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135  			     struct safexcel_command_desc *cdesc)
136  {
137  	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138  	    ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139  		/* 32 bit nonce */
140  		cdesc->control_data.token[0] = ctx->nonce;
141  		/* 64 bit IV part */
142  		memcpy(&cdesc->control_data.token[1], iv, 8);
143  		/* 32 bit counter, start at 0 or 1 (big endian!) */
144  		cdesc->control_data.token[3] =
145  			(__force u32)cpu_to_be32(ctx->ctrinit);
146  		return;
147  	}
148  	if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149  		/* 96 bit IV part */
150  		memcpy(&cdesc->control_data.token[0], iv, 12);
151  		/* 32 bit counter, start at 0 or 1 (big endian!) */
152  		cdesc->control_data.token[3] =
153  			(__force u32)cpu_to_be32(ctx->ctrinit);
154  		return;
155  	}
156  	/* CBC */
157  	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
158  }
159  
safexcel_aead_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,enum safexcel_cipher_direction direction,u32 cryptlen,u32 assoclen,u32 digestsize)160  static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
161  				struct safexcel_command_desc *cdesc,
162  				struct safexcel_token *atoken,
163  				enum safexcel_cipher_direction direction,
164  				u32 cryptlen, u32 assoclen, u32 digestsize)
165  {
166  	struct safexcel_token *aadref;
167  	int atoksize = 2; /* Start with minimum size */
168  	int assocadj = assoclen - ctx->aadskip, aadalign;
169  
170  	/* Always 4 dwords of embedded IV  for AEAD modes */
171  	cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
172  
173  	if (direction == SAFEXCEL_DECRYPT)
174  		cryptlen -= digestsize;
175  
176  	if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177  		/* Construct IV block B0 for the CBC-MAC */
178  		u8 *final_iv = (u8 *)cdesc->control_data.token;
179  		u8 *cbcmaciv = (u8 *)&atoken[1];
180  		__le32 *aadlen = (__le32 *)&atoken[5];
181  
182  		if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183  			/* Length + nonce */
184  			cdesc->control_data.token[0] = ctx->nonce;
185  			/* Fixup flags byte */
186  			*(__le32 *)cbcmaciv =
187  				cpu_to_le32(ctx->nonce |
188  					    ((assocadj > 0) << 6) |
189  					    ((digestsize - 2) << 2));
190  			/* 64 bit IV part */
191  			memcpy(&cdesc->control_data.token[1], iv, 8);
192  			memcpy(cbcmaciv + 4, iv, 8);
193  			/* Start counter at 0 */
194  			cdesc->control_data.token[3] = 0;
195  			/* Message length */
196  			*(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197  		} else {
198  			/* Variable length IV part */
199  			memcpy(final_iv, iv, 15 - iv[0]);
200  			memcpy(cbcmaciv, iv, 15 - iv[0]);
201  			/* Start variable length counter at 0 */
202  			memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203  			memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204  			/* fixup flags byte */
205  			cbcmaciv[0] |= ((assocadj > 0) << 6) |
206  				       ((digestsize - 2) << 2);
207  			/* insert lower 2 bytes of message length */
208  			cbcmaciv[14] = cryptlen >> 8;
209  			cbcmaciv[15] = cryptlen & 255;
210  		}
211  
212  		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213  		atoken->packet_length = AES_BLOCK_SIZE +
214  					((assocadj > 0) << 1);
215  		atoken->stat = 0;
216  		atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217  				       EIP197_TOKEN_INS_TYPE_HASH;
218  
219  		if (likely(assocadj)) {
220  			*aadlen = cpu_to_le32((assocadj >> 8) |
221  					      (assocadj & 255) << 8);
222  			atoken += 6;
223  			atoksize += 7;
224  		} else {
225  			atoken += 5;
226  			atoksize += 6;
227  		}
228  
229  		/* Process AAD data */
230  		aadref = atoken;
231  		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232  		atoken->packet_length = assocadj;
233  		atoken->stat = 0;
234  		atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235  		atoken++;
236  
237  		/* For CCM only, align AAD data towards hash engine */
238  		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239  		aadalign = (assocadj + 2) & 15;
240  		atoken->packet_length = assocadj && aadalign ?
241  						16 - aadalign :
242  						0;
243  		if (likely(cryptlen)) {
244  			atoken->stat = 0;
245  			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246  		} else {
247  			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248  			atoken->instructions = EIP197_TOKEN_INS_LAST |
249  					       EIP197_TOKEN_INS_TYPE_HASH;
250  		}
251  	} else {
252  		safexcel_aead_iv(ctx, iv, cdesc);
253  
254  		/* Process AAD data */
255  		aadref = atoken;
256  		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257  		atoken->packet_length = assocadj;
258  		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259  		atoken->instructions = EIP197_TOKEN_INS_LAST |
260  				       EIP197_TOKEN_INS_TYPE_HASH;
261  	}
262  	atoken++;
263  
264  	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265  		/* For ESP mode (and not GMAC), skip over the IV */
266  		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267  		atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268  		atoken->stat = 0;
269  		atoken->instructions = 0;
270  		atoken++;
271  		atoksize++;
272  	} else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273  			    direction == SAFEXCEL_DECRYPT)) {
274  		/* Poly-chacha decryption needs a dummy NOP here ... */
275  		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276  		atoken->packet_length = 16; /* According to Op Manual */
277  		atoken->stat = 0;
278  		atoken->instructions = 0;
279  		atoken++;
280  		atoksize++;
281  	}
282  
283  	if  (ctx->xcm) {
284  		/* For GCM and CCM, obtain enc(Y0) */
285  		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286  		atoken->packet_length = 0;
287  		atoken->stat = 0;
288  		atoken->instructions = AES_BLOCK_SIZE;
289  		atoken++;
290  
291  		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292  		atoken->packet_length = AES_BLOCK_SIZE;
293  		atoken->stat = 0;
294  		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295  				       EIP197_TOKEN_INS_TYPE_CRYPTO;
296  		atoken++;
297  		atoksize += 2;
298  	}
299  
300  	if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301  		/* Fixup stat field for AAD direction instruction */
302  		aadref->stat = 0;
303  
304  		/* Process crypto data */
305  		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306  		atoken->packet_length = cryptlen;
307  
308  		if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309  			/* Fixup instruction field for AAD dir instruction */
310  			aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311  
312  			/* Do not send to crypt engine in case of GMAC */
313  			atoken->instructions = EIP197_TOKEN_INS_LAST |
314  					       EIP197_TOKEN_INS_TYPE_HASH |
315  					       EIP197_TOKEN_INS_TYPE_OUTPUT;
316  		} else {
317  			atoken->instructions = EIP197_TOKEN_INS_LAST |
318  					       EIP197_TOKEN_INS_TYPE_CRYPTO |
319  					       EIP197_TOKEN_INS_TYPE_HASH |
320  					       EIP197_TOKEN_INS_TYPE_OUTPUT;
321  		}
322  
323  		cryptlen &= 15;
324  		if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325  			atoken->stat = 0;
326  			/* For CCM only, pad crypto data to the hash engine */
327  			atoken++;
328  			atoksize++;
329  			atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330  			atoken->packet_length = 16 - cryptlen;
331  			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332  			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333  		} else {
334  			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335  		}
336  		atoken++;
337  		atoksize++;
338  	}
339  
340  	if (direction == SAFEXCEL_ENCRYPT) {
341  		/* Append ICV */
342  		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343  		atoken->packet_length = digestsize;
344  		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345  			       EIP197_TOKEN_STAT_LAST_PACKET;
346  		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347  				       EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
348  	} else {
349  		/* Extract ICV */
350  		atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351  		atoken->packet_length = digestsize;
352  		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353  			       EIP197_TOKEN_STAT_LAST_PACKET;
354  		atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355  		atoken++;
356  		atoksize++;
357  
358  		/* Verify ICV */
359  		atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360  		atoken->packet_length = digestsize |
361  					EIP197_TOKEN_HASH_RESULT_VERIFY;
362  		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363  			       EIP197_TOKEN_STAT_LAST_PACKET;
364  		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
365  	}
366  
367  	/* Fixup length of the token in the command descriptor */
368  	cdesc->additional_cdata_size = atoksize;
369  }
370  
safexcel_skcipher_aes_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)371  static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
372  					const u8 *key, unsigned int len)
373  {
374  	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
375  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
376  	struct safexcel_crypto_priv *priv = ctx->base.priv;
377  	struct crypto_aes_ctx aes;
378  	int ret, i;
379  
380  	ret = aes_expandkey(&aes, key, len);
381  	if (ret)
382  		return ret;
383  
384  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
385  		for (i = 0; i < len / sizeof(u32); i++) {
386  			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
387  				ctx->base.needs_inv = true;
388  				break;
389  			}
390  		}
391  	}
392  
393  	for (i = 0; i < len / sizeof(u32); i++)
394  		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
395  
396  	ctx->key_len = len;
397  
398  	memzero_explicit(&aes, sizeof(aes));
399  	return 0;
400  }
401  
safexcel_aead_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)402  static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403  				unsigned int len)
404  {
405  	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
406  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407  	struct safexcel_crypto_priv *priv = ctx->base.priv;
408  	struct crypto_authenc_keys keys;
409  	struct crypto_aes_ctx aes;
410  	int err = -EINVAL, i;
411  	const char *alg;
412  
413  	if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
414  		goto badkey;
415  
416  	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417  		/* Must have at least space for the nonce here */
418  		if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419  			goto badkey;
420  		/* last 4 bytes of key are the nonce! */
421  		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422  				      CTR_RFC3686_NONCE_SIZE);
423  		/* exclude the nonce here */
424  		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425  	}
426  
427  	/* Encryption key */
428  	switch (ctx->alg) {
429  	case SAFEXCEL_DES:
430  		err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431  		if (unlikely(err))
432  			goto badkey;
433  		break;
434  	case SAFEXCEL_3DES:
435  		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436  		if (unlikely(err))
437  			goto badkey;
438  		break;
439  	case SAFEXCEL_AES:
440  		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441  		if (unlikely(err))
442  			goto badkey;
443  		break;
444  	case SAFEXCEL_SM4:
445  		if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446  			goto badkey;
447  		break;
448  	default:
449  		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450  		goto badkey;
451  	}
452  
453  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454  		for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455  			if (le32_to_cpu(ctx->key[i]) !=
456  			    ((u32 *)keys.enckey)[i]) {
457  				ctx->base.needs_inv = true;
458  				break;
459  			}
460  		}
461  	}
462  
463  	/* Auth key */
464  	switch (ctx->hash_alg) {
465  	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
466  		alg = "safexcel-sha1";
467  		break;
468  	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
469  		alg = "safexcel-sha224";
470  		break;
471  	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
472  		alg = "safexcel-sha256";
473  		break;
474  	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
475  		alg = "safexcel-sha384";
476  		break;
477  	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
478  		alg = "safexcel-sha512";
479  		break;
480  	case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481  		alg = "safexcel-sm3";
482  		break;
483  	default:
484  		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
485  		goto badkey;
486  	}
487  
488  	if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489  				 alg, ctx->state_sz))
490  		goto badkey;
491  
492  	/* Now copy the keys into the context */
493  	for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494  		ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
495  	ctx->key_len = keys.enckeylen;
496  
497  	memzero_explicit(&keys, sizeof(keys));
498  	return 0;
499  
500  badkey:
501  	memzero_explicit(&keys, sizeof(keys));
502  	return err;
503  }
504  
safexcel_context_control(struct safexcel_cipher_ctx * ctx,struct crypto_async_request * async,struct safexcel_cipher_req * sreq,struct safexcel_command_desc * cdesc)505  static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
506  				    struct crypto_async_request *async,
507  				    struct safexcel_cipher_req *sreq,
508  				    struct safexcel_command_desc *cdesc)
509  {
510  	struct safexcel_crypto_priv *priv = ctx->base.priv;
511  	int ctrl_size = ctx->key_len / sizeof(u32);
512  
513  	cdesc->control_data.control1 = ctx->mode;
514  
515  	if (ctx->aead) {
516  		/* Take in account the ipad+opad digests */
517  		if (ctx->xcm) {
518  			ctrl_size += ctx->state_sz / sizeof(u32);
519  			cdesc->control_data.control0 =
520  				CONTEXT_CONTROL_KEY_EN |
521  				CONTEXT_CONTROL_DIGEST_XCM |
522  				ctx->hash_alg |
523  				CONTEXT_CONTROL_SIZE(ctrl_size);
524  		} else if (ctx->alg == SAFEXCEL_CHACHA20) {
525  			/* Chacha20-Poly1305 */
526  			cdesc->control_data.control0 =
527  				CONTEXT_CONTROL_KEY_EN |
528  				CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529  				(sreq->direction == SAFEXCEL_ENCRYPT ?
530  					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531  					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532  				ctx->hash_alg |
533  				CONTEXT_CONTROL_SIZE(ctrl_size);
534  			return 0;
535  		} else {
536  			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537  			cdesc->control_data.control0 =
538  				CONTEXT_CONTROL_KEY_EN |
539  				CONTEXT_CONTROL_DIGEST_HMAC |
540  				ctx->hash_alg |
541  				CONTEXT_CONTROL_SIZE(ctrl_size);
542  		}
543  
544  		if (sreq->direction == SAFEXCEL_ENCRYPT &&
545  		    (ctx->xcm == EIP197_XCM_MODE_CCM ||
546  		     ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547  			cdesc->control_data.control0 |=
548  				CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549  		else if (sreq->direction == SAFEXCEL_ENCRYPT)
550  			cdesc->control_data.control0 |=
551  				CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552  		else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553  			cdesc->control_data.control0 |=
554  				CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555  		else
556  			cdesc->control_data.control0 |=
557  				CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558  	} else {
559  		if (sreq->direction == SAFEXCEL_ENCRYPT)
560  			cdesc->control_data.control0 =
561  				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562  				CONTEXT_CONTROL_KEY_EN |
563  				CONTEXT_CONTROL_SIZE(ctrl_size);
564  		else
565  			cdesc->control_data.control0 =
566  				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567  				CONTEXT_CONTROL_KEY_EN |
568  				CONTEXT_CONTROL_SIZE(ctrl_size);
569  	}
570  
571  	if (ctx->alg == SAFEXCEL_DES) {
572  		cdesc->control_data.control0 |=
573  			CONTEXT_CONTROL_CRYPTO_ALG_DES;
574  	} else if (ctx->alg == SAFEXCEL_3DES) {
575  		cdesc->control_data.control0 |=
576  			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
577  	} else if (ctx->alg == SAFEXCEL_AES) {
578  		switch (ctx->key_len >> ctx->xts) {
579  		case AES_KEYSIZE_128:
580  			cdesc->control_data.control0 |=
581  				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
582  			break;
583  		case AES_KEYSIZE_192:
584  			cdesc->control_data.control0 |=
585  				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
586  			break;
587  		case AES_KEYSIZE_256:
588  			cdesc->control_data.control0 |=
589  				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
590  			break;
591  		default:
592  			dev_err(priv->dev, "aes keysize not supported: %u\n",
593  				ctx->key_len >> ctx->xts);
594  			return -EINVAL;
595  		}
596  	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
597  		cdesc->control_data.control0 |=
598  			CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599  	} else if (ctx->alg == SAFEXCEL_SM4) {
600  		cdesc->control_data.control0 |=
601  			CONTEXT_CONTROL_CRYPTO_ALG_SM4;
602  	}
603  
604  	return 0;
605  }
606  
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)607  static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
608  				      struct crypto_async_request *async,
609  				      struct scatterlist *src,
610  				      struct scatterlist *dst,
611  				      unsigned int cryptlen,
612  				      struct safexcel_cipher_req *sreq,
613  				      bool *should_complete, int *ret)
614  {
615  	struct skcipher_request *areq = skcipher_request_cast(async);
616  	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617  	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
618  	struct safexcel_result_desc *rdesc;
619  	int ndesc = 0;
620  
621  	*ret = 0;
622  
623  	if (unlikely(!sreq->rdescs))
624  		return 0;
625  
626  	while (sreq->rdescs--) {
627  		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
628  		if (IS_ERR(rdesc)) {
629  			dev_err(priv->dev,
630  				"cipher: result: could not retrieve the result descriptor\n");
631  			*ret = PTR_ERR(rdesc);
632  			break;
633  		}
634  
635  		if (likely(!*ret))
636  			*ret = safexcel_rdesc_check_errors(priv, rdesc);
637  
638  		ndesc++;
639  	}
640  
641  	safexcel_complete(priv, ring);
642  
643  	if (src == dst) {
644  		if (sreq->nr_src > 0)
645  			dma_unmap_sg(priv->dev, src, sreq->nr_src,
646  				     DMA_BIDIRECTIONAL);
647  	} else {
648  		if (sreq->nr_src > 0)
649  			dma_unmap_sg(priv->dev, src, sreq->nr_src,
650  				     DMA_TO_DEVICE);
651  		if (sreq->nr_dst > 0)
652  			dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
653  				     DMA_FROM_DEVICE);
654  	}
655  
656  	/*
657  	 * Update IV in req from last crypto output word for CBC modes
658  	 */
659  	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
660  	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
661  		/* For encrypt take the last output word */
662  		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
663  				   crypto_skcipher_ivsize(skcipher),
664  				   (cryptlen -
665  				    crypto_skcipher_ivsize(skcipher)));
666  	}
667  
668  	*should_complete = true;
669  
670  	return ndesc;
671  }
672  
safexcel_send_req(struct crypto_async_request * base,int ring,struct safexcel_cipher_req * sreq,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,unsigned int assoclen,unsigned int digestsize,u8 * iv,int * commands,int * results)673  static int safexcel_send_req(struct crypto_async_request *base, int ring,
674  			     struct safexcel_cipher_req *sreq,
675  			     struct scatterlist *src, struct scatterlist *dst,
676  			     unsigned int cryptlen, unsigned int assoclen,
677  			     unsigned int digestsize, u8 *iv, int *commands,
678  			     int *results)
679  {
680  	struct skcipher_request *areq = skcipher_request_cast(base);
681  	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
682  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
683  	struct safexcel_crypto_priv *priv = ctx->base.priv;
684  	struct safexcel_command_desc *cdesc;
685  	struct safexcel_command_desc *first_cdesc = NULL;
686  	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
687  	struct scatterlist *sg;
688  	unsigned int totlen;
689  	unsigned int totlen_src = cryptlen + assoclen;
690  	unsigned int totlen_dst = totlen_src;
691  	struct safexcel_token *atoken;
692  	int n_cdesc = 0, n_rdesc = 0;
693  	int queued, i, ret = 0;
694  	bool first = true;
695  
696  	sreq->nr_src = sg_nents_for_len(src, totlen_src);
697  
698  	if (ctx->aead) {
699  		/*
700  		 * AEAD has auth tag appended to output for encrypt and
701  		 * removed from the output for decrypt!
702  		 */
703  		if (sreq->direction == SAFEXCEL_DECRYPT)
704  			totlen_dst -= digestsize;
705  		else
706  			totlen_dst += digestsize;
707  
708  		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
709  		       &ctx->base.ipad, ctx->state_sz);
710  		if (!ctx->xcm)
711  			memcpy(ctx->base.ctxr->data + (ctx->key_len +
712  			       ctx->state_sz) / sizeof(u32), &ctx->base.opad,
713  			       ctx->state_sz);
714  	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
715  		   (sreq->direction == SAFEXCEL_DECRYPT)) {
716  		/*
717  		 * Save IV from last crypto input word for CBC modes in decrypt
718  		 * direction. Need to do this first in case of inplace operation
719  		 * as it will be overwritten.
720  		 */
721  		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
722  				   crypto_skcipher_ivsize(skcipher),
723  				   (totlen_src -
724  				    crypto_skcipher_ivsize(skcipher)));
725  	}
726  
727  	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
728  
729  	/*
730  	 * Remember actual input length, source buffer length may be
731  	 * updated in case of inline operation below.
732  	 */
733  	totlen = totlen_src;
734  	queued = totlen_src;
735  
736  	if (src == dst) {
737  		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
738  		sreq->nr_dst = sreq->nr_src;
739  		if (unlikely((totlen_src || totlen_dst) &&
740  		    (sreq->nr_src <= 0))) {
741  			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
742  				max(totlen_src, totlen_dst));
743  			return -EINVAL;
744  		}
745  		if (sreq->nr_src > 0 &&
746  		    !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
747  			return -EIO;
748  	} else {
749  		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
750  			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
751  				totlen_src);
752  			return -EINVAL;
753  		}
754  
755  		if (sreq->nr_src > 0 &&
756  		    !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
757  			return -EIO;
758  
759  		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
760  			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
761  				totlen_dst);
762  			ret = -EINVAL;
763  			goto unmap;
764  		}
765  
766  		if (sreq->nr_dst > 0 &&
767  		    !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
768  			ret = -EIO;
769  			goto unmap;
770  		}
771  	}
772  
773  	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
774  
775  	if (!totlen) {
776  		/*
777  		 * The EIP97 cannot deal with zero length input packets!
778  		 * So stuff a dummy command descriptor indicating a 1 byte
779  		 * (dummy) input packet, using the context record as source.
780  		 */
781  		first_cdesc = safexcel_add_cdesc(priv, ring,
782  						 1, 1, ctx->base.ctxr_dma,
783  						 1, 1, ctx->base.ctxr_dma,
784  						 &atoken);
785  		if (IS_ERR(first_cdesc)) {
786  			/* No space left in the command descriptor ring */
787  			ret = PTR_ERR(first_cdesc);
788  			goto cdesc_rollback;
789  		}
790  		n_cdesc = 1;
791  		goto skip_cdesc;
792  	}
793  
794  	/* command descriptors */
795  	for_each_sg(src, sg, sreq->nr_src, i) {
796  		int len = sg_dma_len(sg);
797  
798  		/* Do not overflow the request */
799  		if (queued < len)
800  			len = queued;
801  
802  		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
803  					   !(queued - len),
804  					   sg_dma_address(sg), len, totlen,
805  					   ctx->base.ctxr_dma, &atoken);
806  		if (IS_ERR(cdesc)) {
807  			/* No space left in the command descriptor ring */
808  			ret = PTR_ERR(cdesc);
809  			goto cdesc_rollback;
810  		}
811  
812  		if (!n_cdesc)
813  			first_cdesc = cdesc;
814  
815  		n_cdesc++;
816  		queued -= len;
817  		if (!queued)
818  			break;
819  	}
820  skip_cdesc:
821  	/* Add context control words and token to first command descriptor */
822  	safexcel_context_control(ctx, base, sreq, first_cdesc);
823  	if (ctx->aead)
824  		safexcel_aead_token(ctx, iv, first_cdesc, atoken,
825  				    sreq->direction, cryptlen,
826  				    assoclen, digestsize);
827  	else
828  		safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
829  					cryptlen);
830  
831  	/* result descriptors */
832  	for_each_sg(dst, sg, sreq->nr_dst, i) {
833  		bool last = (i == sreq->nr_dst - 1);
834  		u32 len = sg_dma_len(sg);
835  
836  		/* only allow the part of the buffer we know we need */
837  		if (len > totlen_dst)
838  			len = totlen_dst;
839  		if (unlikely(!len))
840  			break;
841  		totlen_dst -= len;
842  
843  		/* skip over AAD space in buffer - not written */
844  		if (assoclen) {
845  			if (assoclen >= len) {
846  				assoclen -= len;
847  				continue;
848  			}
849  			rdesc = safexcel_add_rdesc(priv, ring, first, last,
850  						   sg_dma_address(sg) +
851  						   assoclen,
852  						   len - assoclen);
853  			assoclen = 0;
854  		} else {
855  			rdesc = safexcel_add_rdesc(priv, ring, first, last,
856  						   sg_dma_address(sg),
857  						   len);
858  		}
859  		if (IS_ERR(rdesc)) {
860  			/* No space left in the result descriptor ring */
861  			ret = PTR_ERR(rdesc);
862  			goto rdesc_rollback;
863  		}
864  		if (first) {
865  			first_rdesc = rdesc;
866  			first = false;
867  		}
868  		n_rdesc++;
869  	}
870  
871  	if (unlikely(first)) {
872  		/*
873  		 * Special case: AEAD decrypt with only AAD data.
874  		 * In this case there is NO output data from the engine,
875  		 * but the engine still needs a result descriptor!
876  		 * Create a dummy one just for catching the result token.
877  		 */
878  		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
879  		if (IS_ERR(rdesc)) {
880  			/* No space left in the result descriptor ring */
881  			ret = PTR_ERR(rdesc);
882  			goto rdesc_rollback;
883  		}
884  		first_rdesc = rdesc;
885  		n_rdesc = 1;
886  	}
887  
888  	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
889  
890  	*commands = n_cdesc;
891  	*results = n_rdesc;
892  	return 0;
893  
894  rdesc_rollback:
895  	for (i = 0; i < n_rdesc; i++)
896  		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
897  cdesc_rollback:
898  	for (i = 0; i < n_cdesc; i++)
899  		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
900  unmap:
901  	if (src == dst) {
902  		if (sreq->nr_src > 0)
903  			dma_unmap_sg(priv->dev, src, sreq->nr_src,
904  				     DMA_BIDIRECTIONAL);
905  	} else {
906  		if (sreq->nr_src > 0)
907  			dma_unmap_sg(priv->dev, src, sreq->nr_src,
908  				     DMA_TO_DEVICE);
909  		if (sreq->nr_dst > 0)
910  			dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
911  				     DMA_FROM_DEVICE);
912  	}
913  
914  	return ret;
915  }
916  
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)917  static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
918  				      int ring,
919  				      struct crypto_async_request *base,
920  				      struct safexcel_cipher_req *sreq,
921  				      bool *should_complete, int *ret)
922  {
923  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
924  	struct safexcel_result_desc *rdesc;
925  	int ndesc = 0, enq_ret;
926  
927  	*ret = 0;
928  
929  	if (unlikely(!sreq->rdescs))
930  		return 0;
931  
932  	while (sreq->rdescs--) {
933  		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
934  		if (IS_ERR(rdesc)) {
935  			dev_err(priv->dev,
936  				"cipher: invalidate: could not retrieve the result descriptor\n");
937  			*ret = PTR_ERR(rdesc);
938  			break;
939  		}
940  
941  		if (likely(!*ret))
942  			*ret = safexcel_rdesc_check_errors(priv, rdesc);
943  
944  		ndesc++;
945  	}
946  
947  	safexcel_complete(priv, ring);
948  
949  	if (ctx->base.exit_inv) {
950  		dma_pool_free(priv->context_pool, ctx->base.ctxr,
951  			      ctx->base.ctxr_dma);
952  
953  		*should_complete = true;
954  
955  		return ndesc;
956  	}
957  
958  	ring = safexcel_select_ring(priv);
959  	ctx->base.ring = ring;
960  
961  	spin_lock_bh(&priv->ring[ring].queue_lock);
962  	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
963  	spin_unlock_bh(&priv->ring[ring].queue_lock);
964  
965  	if (enq_ret != -EINPROGRESS)
966  		*ret = enq_ret;
967  
968  	queue_work(priv->ring[ring].workqueue,
969  		   &priv->ring[ring].work_data.work);
970  
971  	*should_complete = false;
972  
973  	return ndesc;
974  }
975  
safexcel_skcipher_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)976  static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
977  					   int ring,
978  					   struct crypto_async_request *async,
979  					   bool *should_complete, int *ret)
980  {
981  	struct skcipher_request *req = skcipher_request_cast(async);
982  	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
983  	int err;
984  
985  	if (sreq->needs_inv) {
986  		sreq->needs_inv = false;
987  		err = safexcel_handle_inv_result(priv, ring, async, sreq,
988  						 should_complete, ret);
989  	} else {
990  		err = safexcel_handle_req_result(priv, ring, async, req->src,
991  						 req->dst, req->cryptlen, sreq,
992  						 should_complete, ret);
993  	}
994  
995  	return err;
996  }
997  
safexcel_aead_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)998  static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
999  				       int ring,
1000  				       struct crypto_async_request *async,
1001  				       bool *should_complete, int *ret)
1002  {
1003  	struct aead_request *req = aead_request_cast(async);
1004  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1005  	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1006  	int err;
1007  
1008  	if (sreq->needs_inv) {
1009  		sreq->needs_inv = false;
1010  		err = safexcel_handle_inv_result(priv, ring, async, sreq,
1011  						 should_complete, ret);
1012  	} else {
1013  		err = safexcel_handle_req_result(priv, ring, async, req->src,
1014  						 req->dst,
1015  						 req->cryptlen + crypto_aead_authsize(tfm),
1016  						 sreq, should_complete, ret);
1017  	}
1018  
1019  	return err;
1020  }
1021  
safexcel_cipher_send_inv(struct crypto_async_request * base,int ring,int * commands,int * results)1022  static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1023  				    int ring, int *commands, int *results)
1024  {
1025  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1026  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1027  	int ret;
1028  
1029  	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1030  	if (unlikely(ret))
1031  		return ret;
1032  
1033  	*commands = 1;
1034  	*results = 1;
1035  
1036  	return 0;
1037  }
1038  
safexcel_skcipher_send(struct crypto_async_request * async,int ring,int * commands,int * results)1039  static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1040  				  int *commands, int *results)
1041  {
1042  	struct skcipher_request *req = skcipher_request_cast(async);
1043  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1044  	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1045  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1046  	int ret;
1047  
1048  	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1049  
1050  	if (sreq->needs_inv) {
1051  		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1052  	} else {
1053  		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1054  		u8 input_iv[AES_BLOCK_SIZE];
1055  
1056  		/*
1057  		 * Save input IV in case of CBC decrypt mode
1058  		 * Will be overwritten with output IV prior to use!
1059  		 */
1060  		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1061  
1062  		ret = safexcel_send_req(async, ring, sreq, req->src,
1063  					req->dst, req->cryptlen, 0, 0, input_iv,
1064  					commands, results);
1065  	}
1066  
1067  	sreq->rdescs = *results;
1068  	return ret;
1069  }
1070  
safexcel_aead_send(struct crypto_async_request * async,int ring,int * commands,int * results)1071  static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1072  			      int *commands, int *results)
1073  {
1074  	struct aead_request *req = aead_request_cast(async);
1075  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1076  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1077  	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1078  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1079  	int ret;
1080  
1081  	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1082  
1083  	if (sreq->needs_inv)
1084  		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1085  	else
1086  		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1087  					req->cryptlen, req->assoclen,
1088  					crypto_aead_authsize(tfm), req->iv,
1089  					commands, results);
1090  	sreq->rdescs = *results;
1091  	return ret;
1092  }
1093  
safexcel_cipher_exit_inv(struct crypto_tfm * tfm,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,struct crypto_wait * result)1094  static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1095  				    struct crypto_async_request *base,
1096  				    struct safexcel_cipher_req *sreq,
1097  				    struct crypto_wait *result)
1098  {
1099  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1100  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1101  	int ring = ctx->base.ring;
1102  	int err;
1103  
1104  	ctx = crypto_tfm_ctx(base->tfm);
1105  	ctx->base.exit_inv = true;
1106  	sreq->needs_inv = true;
1107  
1108  	spin_lock_bh(&priv->ring[ring].queue_lock);
1109  	crypto_enqueue_request(&priv->ring[ring].queue, base);
1110  	spin_unlock_bh(&priv->ring[ring].queue_lock);
1111  
1112  	queue_work(priv->ring[ring].workqueue,
1113  		   &priv->ring[ring].work_data.work);
1114  
1115  	err = crypto_wait_req(-EINPROGRESS, result);
1116  
1117  	if (err) {
1118  		dev_warn(priv->dev,
1119  			"cipher: sync: invalidate: completion error %d\n",
1120  			 err);
1121  		return err;
1122  	}
1123  
1124  	return 0;
1125  }
1126  
safexcel_skcipher_exit_inv(struct crypto_tfm * tfm)1127  static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1128  {
1129  	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1130  	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1131  	DECLARE_CRYPTO_WAIT(result);
1132  
1133  	memset(req, 0, sizeof(struct skcipher_request));
1134  
1135  	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1136  				      crypto_req_done, &result);
1137  	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1138  
1139  	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1140  }
1141  
safexcel_aead_exit_inv(struct crypto_tfm * tfm)1142  static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1143  {
1144  	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1145  	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1146  	DECLARE_CRYPTO_WAIT(result);
1147  
1148  	memset(req, 0, sizeof(struct aead_request));
1149  
1150  	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1151  				  crypto_req_done, &result);
1152  	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1153  
1154  	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1155  }
1156  
safexcel_queue_req(struct crypto_async_request * base,struct safexcel_cipher_req * sreq,enum safexcel_cipher_direction dir)1157  static int safexcel_queue_req(struct crypto_async_request *base,
1158  			struct safexcel_cipher_req *sreq,
1159  			enum safexcel_cipher_direction dir)
1160  {
1161  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1162  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1163  	int ret, ring;
1164  
1165  	sreq->needs_inv = false;
1166  	sreq->direction = dir;
1167  
1168  	if (ctx->base.ctxr) {
1169  		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1170  			sreq->needs_inv = true;
1171  			ctx->base.needs_inv = false;
1172  		}
1173  	} else {
1174  		ctx->base.ring = safexcel_select_ring(priv);
1175  		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1176  						 EIP197_GFP_FLAGS(*base),
1177  						 &ctx->base.ctxr_dma);
1178  		if (!ctx->base.ctxr)
1179  			return -ENOMEM;
1180  	}
1181  
1182  	ring = ctx->base.ring;
1183  
1184  	spin_lock_bh(&priv->ring[ring].queue_lock);
1185  	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1186  	spin_unlock_bh(&priv->ring[ring].queue_lock);
1187  
1188  	queue_work(priv->ring[ring].workqueue,
1189  		   &priv->ring[ring].work_data.work);
1190  
1191  	return ret;
1192  }
1193  
safexcel_encrypt(struct skcipher_request * req)1194  static int safexcel_encrypt(struct skcipher_request *req)
1195  {
1196  	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1197  			SAFEXCEL_ENCRYPT);
1198  }
1199  
safexcel_decrypt(struct skcipher_request * req)1200  static int safexcel_decrypt(struct skcipher_request *req)
1201  {
1202  	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1203  			SAFEXCEL_DECRYPT);
1204  }
1205  
safexcel_skcipher_cra_init(struct crypto_tfm * tfm)1206  static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1207  {
1208  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209  	struct safexcel_alg_template *tmpl =
1210  		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1211  			     alg.skcipher.base);
1212  
1213  	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1214  				    sizeof(struct safexcel_cipher_req));
1215  
1216  	ctx->base.priv = tmpl->priv;
1217  
1218  	ctx->base.send = safexcel_skcipher_send;
1219  	ctx->base.handle_result = safexcel_skcipher_handle_result;
1220  	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1221  	ctx->ctrinit = 1;
1222  	return 0;
1223  }
1224  
safexcel_cipher_cra_exit(struct crypto_tfm * tfm)1225  static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1226  {
1227  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1228  
1229  	memzero_explicit(ctx->key, sizeof(ctx->key));
1230  
1231  	/* context not allocated, skip invalidation */
1232  	if (!ctx->base.ctxr)
1233  		return -ENOMEM;
1234  
1235  	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1236  	return 0;
1237  }
1238  
safexcel_skcipher_cra_exit(struct crypto_tfm * tfm)1239  static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1240  {
1241  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1242  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1243  	int ret;
1244  
1245  	if (safexcel_cipher_cra_exit(tfm))
1246  		return;
1247  
1248  	if (priv->flags & EIP197_TRC_CACHE) {
1249  		ret = safexcel_skcipher_exit_inv(tfm);
1250  		if (ret)
1251  			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1252  				 ret);
1253  	} else {
1254  		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255  			      ctx->base.ctxr_dma);
1256  	}
1257  }
1258  
safexcel_aead_cra_exit(struct crypto_tfm * tfm)1259  static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1260  {
1261  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1263  	int ret;
1264  
1265  	if (safexcel_cipher_cra_exit(tfm))
1266  		return;
1267  
1268  	if (priv->flags & EIP197_TRC_CACHE) {
1269  		ret = safexcel_aead_exit_inv(tfm);
1270  		if (ret)
1271  			dev_warn(priv->dev, "aead: invalidation error %d\n",
1272  				 ret);
1273  	} else {
1274  		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1275  			      ctx->base.ctxr_dma);
1276  	}
1277  }
1278  
safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm * tfm)1279  static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1280  {
1281  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1282  
1283  	safexcel_skcipher_cra_init(tfm);
1284  	ctx->alg  = SAFEXCEL_AES;
1285  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1286  	ctx->blocksz = 0;
1287  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1288  	return 0;
1289  }
1290  
1291  struct safexcel_alg_template safexcel_alg_ecb_aes = {
1292  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1293  	.algo_mask = SAFEXCEL_ALG_AES,
1294  	.alg.skcipher = {
1295  		.setkey = safexcel_skcipher_aes_setkey,
1296  		.encrypt = safexcel_encrypt,
1297  		.decrypt = safexcel_decrypt,
1298  		.min_keysize = AES_MIN_KEY_SIZE,
1299  		.max_keysize = AES_MAX_KEY_SIZE,
1300  		.base = {
1301  			.cra_name = "ecb(aes)",
1302  			.cra_driver_name = "safexcel-ecb-aes",
1303  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1304  			.cra_flags = CRYPTO_ALG_ASYNC |
1305  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1306  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1307  			.cra_blocksize = AES_BLOCK_SIZE,
1308  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1309  			.cra_alignmask = 0,
1310  			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1311  			.cra_exit = safexcel_skcipher_cra_exit,
1312  			.cra_module = THIS_MODULE,
1313  		},
1314  	},
1315  };
1316  
safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm * tfm)1317  static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1318  {
1319  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1320  
1321  	safexcel_skcipher_cra_init(tfm);
1322  	ctx->alg  = SAFEXCEL_AES;
1323  	ctx->blocksz = AES_BLOCK_SIZE;
1324  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1325  	return 0;
1326  }
1327  
1328  struct safexcel_alg_template safexcel_alg_cbc_aes = {
1329  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1330  	.algo_mask = SAFEXCEL_ALG_AES,
1331  	.alg.skcipher = {
1332  		.setkey = safexcel_skcipher_aes_setkey,
1333  		.encrypt = safexcel_encrypt,
1334  		.decrypt = safexcel_decrypt,
1335  		.min_keysize = AES_MIN_KEY_SIZE,
1336  		.max_keysize = AES_MAX_KEY_SIZE,
1337  		.ivsize = AES_BLOCK_SIZE,
1338  		.base = {
1339  			.cra_name = "cbc(aes)",
1340  			.cra_driver_name = "safexcel-cbc-aes",
1341  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1342  			.cra_flags = CRYPTO_ALG_ASYNC |
1343  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1344  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1345  			.cra_blocksize = AES_BLOCK_SIZE,
1346  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1347  			.cra_alignmask = 0,
1348  			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1349  			.cra_exit = safexcel_skcipher_cra_exit,
1350  			.cra_module = THIS_MODULE,
1351  		},
1352  	},
1353  };
1354  
safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm * tfm)1355  static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1356  {
1357  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1358  
1359  	safexcel_skcipher_cra_init(tfm);
1360  	ctx->alg  = SAFEXCEL_AES;
1361  	ctx->blocksz = AES_BLOCK_SIZE;
1362  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1363  	return 0;
1364  }
1365  
1366  struct safexcel_alg_template safexcel_alg_cfb_aes = {
1367  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1368  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1369  	.alg.skcipher = {
1370  		.setkey = safexcel_skcipher_aes_setkey,
1371  		.encrypt = safexcel_encrypt,
1372  		.decrypt = safexcel_decrypt,
1373  		.min_keysize = AES_MIN_KEY_SIZE,
1374  		.max_keysize = AES_MAX_KEY_SIZE,
1375  		.ivsize = AES_BLOCK_SIZE,
1376  		.base = {
1377  			.cra_name = "cfb(aes)",
1378  			.cra_driver_name = "safexcel-cfb-aes",
1379  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1380  			.cra_flags = CRYPTO_ALG_ASYNC |
1381  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1382  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1383  			.cra_blocksize = 1,
1384  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1385  			.cra_alignmask = 0,
1386  			.cra_init = safexcel_skcipher_aes_cfb_cra_init,
1387  			.cra_exit = safexcel_skcipher_cra_exit,
1388  			.cra_module = THIS_MODULE,
1389  		},
1390  	},
1391  };
1392  
safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm * tfm)1393  static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1394  {
1395  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1396  
1397  	safexcel_skcipher_cra_init(tfm);
1398  	ctx->alg  = SAFEXCEL_AES;
1399  	ctx->blocksz = AES_BLOCK_SIZE;
1400  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1401  	return 0;
1402  }
1403  
1404  struct safexcel_alg_template safexcel_alg_ofb_aes = {
1405  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1406  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1407  	.alg.skcipher = {
1408  		.setkey = safexcel_skcipher_aes_setkey,
1409  		.encrypt = safexcel_encrypt,
1410  		.decrypt = safexcel_decrypt,
1411  		.min_keysize = AES_MIN_KEY_SIZE,
1412  		.max_keysize = AES_MAX_KEY_SIZE,
1413  		.ivsize = AES_BLOCK_SIZE,
1414  		.base = {
1415  			.cra_name = "ofb(aes)",
1416  			.cra_driver_name = "safexcel-ofb-aes",
1417  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1418  			.cra_flags = CRYPTO_ALG_ASYNC |
1419  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1420  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1421  			.cra_blocksize = 1,
1422  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1423  			.cra_alignmask = 0,
1424  			.cra_init = safexcel_skcipher_aes_ofb_cra_init,
1425  			.cra_exit = safexcel_skcipher_cra_exit,
1426  			.cra_module = THIS_MODULE,
1427  		},
1428  	},
1429  };
1430  
safexcel_skcipher_aesctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1431  static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1432  					   const u8 *key, unsigned int len)
1433  {
1434  	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1435  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1436  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1437  	struct crypto_aes_ctx aes;
1438  	int ret, i;
1439  	unsigned int keylen;
1440  
1441  	/* last 4 bytes of key are the nonce! */
1442  	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1443  	/* exclude the nonce here */
1444  	keylen = len - CTR_RFC3686_NONCE_SIZE;
1445  	ret = aes_expandkey(&aes, key, keylen);
1446  	if (ret)
1447  		return ret;
1448  
1449  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1450  		for (i = 0; i < keylen / sizeof(u32); i++) {
1451  			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1452  				ctx->base.needs_inv = true;
1453  				break;
1454  			}
1455  		}
1456  	}
1457  
1458  	for (i = 0; i < keylen / sizeof(u32); i++)
1459  		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1460  
1461  	ctx->key_len = keylen;
1462  
1463  	memzero_explicit(&aes, sizeof(aes));
1464  	return 0;
1465  }
1466  
safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm * tfm)1467  static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1468  {
1469  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1470  
1471  	safexcel_skcipher_cra_init(tfm);
1472  	ctx->alg  = SAFEXCEL_AES;
1473  	ctx->blocksz = AES_BLOCK_SIZE;
1474  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1475  	return 0;
1476  }
1477  
1478  struct safexcel_alg_template safexcel_alg_ctr_aes = {
1479  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1480  	.algo_mask = SAFEXCEL_ALG_AES,
1481  	.alg.skcipher = {
1482  		.setkey = safexcel_skcipher_aesctr_setkey,
1483  		.encrypt = safexcel_encrypt,
1484  		.decrypt = safexcel_decrypt,
1485  		/* Add nonce size */
1486  		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1487  		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1488  		.ivsize = CTR_RFC3686_IV_SIZE,
1489  		.base = {
1490  			.cra_name = "rfc3686(ctr(aes))",
1491  			.cra_driver_name = "safexcel-ctr-aes",
1492  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1493  			.cra_flags = CRYPTO_ALG_ASYNC |
1494  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1495  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1496  			.cra_blocksize = 1,
1497  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1498  			.cra_alignmask = 0,
1499  			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1500  			.cra_exit = safexcel_skcipher_cra_exit,
1501  			.cra_module = THIS_MODULE,
1502  		},
1503  	},
1504  };
1505  
safexcel_des_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1506  static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1507  			       unsigned int len)
1508  {
1509  	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1510  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1511  	int ret;
1512  
1513  	ret = verify_skcipher_des_key(ctfm, key);
1514  	if (ret)
1515  		return ret;
1516  
1517  	/* if context exits and key changed, need to invalidate it */
1518  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1519  		if (memcmp(ctx->key, key, len))
1520  			ctx->base.needs_inv = true;
1521  
1522  	memcpy(ctx->key, key, len);
1523  	ctx->key_len = len;
1524  
1525  	return 0;
1526  }
1527  
safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm * tfm)1528  static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1529  {
1530  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1531  
1532  	safexcel_skcipher_cra_init(tfm);
1533  	ctx->alg  = SAFEXCEL_DES;
1534  	ctx->blocksz = DES_BLOCK_SIZE;
1535  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1536  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1537  	return 0;
1538  }
1539  
1540  struct safexcel_alg_template safexcel_alg_cbc_des = {
1541  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1542  	.algo_mask = SAFEXCEL_ALG_DES,
1543  	.alg.skcipher = {
1544  		.setkey = safexcel_des_setkey,
1545  		.encrypt = safexcel_encrypt,
1546  		.decrypt = safexcel_decrypt,
1547  		.min_keysize = DES_KEY_SIZE,
1548  		.max_keysize = DES_KEY_SIZE,
1549  		.ivsize = DES_BLOCK_SIZE,
1550  		.base = {
1551  			.cra_name = "cbc(des)",
1552  			.cra_driver_name = "safexcel-cbc-des",
1553  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1554  			.cra_flags = CRYPTO_ALG_ASYNC |
1555  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1556  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1557  			.cra_blocksize = DES_BLOCK_SIZE,
1558  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1559  			.cra_alignmask = 0,
1560  			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1561  			.cra_exit = safexcel_skcipher_cra_exit,
1562  			.cra_module = THIS_MODULE,
1563  		},
1564  	},
1565  };
1566  
safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm * tfm)1567  static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1568  {
1569  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1570  
1571  	safexcel_skcipher_cra_init(tfm);
1572  	ctx->alg  = SAFEXCEL_DES;
1573  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1574  	ctx->blocksz = 0;
1575  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1576  	return 0;
1577  }
1578  
1579  struct safexcel_alg_template safexcel_alg_ecb_des = {
1580  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1581  	.algo_mask = SAFEXCEL_ALG_DES,
1582  	.alg.skcipher = {
1583  		.setkey = safexcel_des_setkey,
1584  		.encrypt = safexcel_encrypt,
1585  		.decrypt = safexcel_decrypt,
1586  		.min_keysize = DES_KEY_SIZE,
1587  		.max_keysize = DES_KEY_SIZE,
1588  		.base = {
1589  			.cra_name = "ecb(des)",
1590  			.cra_driver_name = "safexcel-ecb-des",
1591  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1592  			.cra_flags = CRYPTO_ALG_ASYNC |
1593  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1594  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1595  			.cra_blocksize = DES_BLOCK_SIZE,
1596  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1597  			.cra_alignmask = 0,
1598  			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1599  			.cra_exit = safexcel_skcipher_cra_exit,
1600  			.cra_module = THIS_MODULE,
1601  		},
1602  	},
1603  };
1604  
safexcel_des3_ede_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1605  static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1606  				   const u8 *key, unsigned int len)
1607  {
1608  	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1609  	struct safexcel_crypto_priv *priv = ctx->base.priv;
1610  	int err;
1611  
1612  	err = verify_skcipher_des3_key(ctfm, key);
1613  	if (err)
1614  		return err;
1615  
1616  	/* if context exits and key changed, need to invalidate it */
1617  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1618  		if (memcmp(ctx->key, key, len))
1619  			ctx->base.needs_inv = true;
1620  
1621  	memcpy(ctx->key, key, len);
1622  	ctx->key_len = len;
1623  
1624  	return 0;
1625  }
1626  
safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm * tfm)1627  static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1628  {
1629  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1630  
1631  	safexcel_skcipher_cra_init(tfm);
1632  	ctx->alg  = SAFEXCEL_3DES;
1633  	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1634  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1635  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1636  	return 0;
1637  }
1638  
1639  struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1640  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1641  	.algo_mask = SAFEXCEL_ALG_DES,
1642  	.alg.skcipher = {
1643  		.setkey = safexcel_des3_ede_setkey,
1644  		.encrypt = safexcel_encrypt,
1645  		.decrypt = safexcel_decrypt,
1646  		.min_keysize = DES3_EDE_KEY_SIZE,
1647  		.max_keysize = DES3_EDE_KEY_SIZE,
1648  		.ivsize = DES3_EDE_BLOCK_SIZE,
1649  		.base = {
1650  			.cra_name = "cbc(des3_ede)",
1651  			.cra_driver_name = "safexcel-cbc-des3_ede",
1652  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1653  			.cra_flags = CRYPTO_ALG_ASYNC |
1654  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1655  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1656  			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1657  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1658  			.cra_alignmask = 0,
1659  			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1660  			.cra_exit = safexcel_skcipher_cra_exit,
1661  			.cra_module = THIS_MODULE,
1662  		},
1663  	},
1664  };
1665  
safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm * tfm)1666  static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1667  {
1668  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1669  
1670  	safexcel_skcipher_cra_init(tfm);
1671  	ctx->alg  = SAFEXCEL_3DES;
1672  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1673  	ctx->blocksz = 0;
1674  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1675  	return 0;
1676  }
1677  
1678  struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1679  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1680  	.algo_mask = SAFEXCEL_ALG_DES,
1681  	.alg.skcipher = {
1682  		.setkey = safexcel_des3_ede_setkey,
1683  		.encrypt = safexcel_encrypt,
1684  		.decrypt = safexcel_decrypt,
1685  		.min_keysize = DES3_EDE_KEY_SIZE,
1686  		.max_keysize = DES3_EDE_KEY_SIZE,
1687  		.base = {
1688  			.cra_name = "ecb(des3_ede)",
1689  			.cra_driver_name = "safexcel-ecb-des3_ede",
1690  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1691  			.cra_flags = CRYPTO_ALG_ASYNC |
1692  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1693  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1694  			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1695  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1696  			.cra_alignmask = 0,
1697  			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1698  			.cra_exit = safexcel_skcipher_cra_exit,
1699  			.cra_module = THIS_MODULE,
1700  		},
1701  	},
1702  };
1703  
safexcel_aead_encrypt(struct aead_request * req)1704  static int safexcel_aead_encrypt(struct aead_request *req)
1705  {
1706  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1707  
1708  	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1709  }
1710  
safexcel_aead_decrypt(struct aead_request * req)1711  static int safexcel_aead_decrypt(struct aead_request *req)
1712  {
1713  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1714  
1715  	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1716  }
1717  
safexcel_aead_cra_init(struct crypto_tfm * tfm)1718  static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1719  {
1720  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1721  	struct safexcel_alg_template *tmpl =
1722  		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1723  			     alg.aead.base);
1724  
1725  	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1726  				sizeof(struct safexcel_cipher_req));
1727  
1728  	ctx->base.priv = tmpl->priv;
1729  
1730  	ctx->alg  = SAFEXCEL_AES; /* default */
1731  	ctx->blocksz = AES_BLOCK_SIZE;
1732  	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1733  	ctx->ctrinit = 1;
1734  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1735  	ctx->aead = true;
1736  	ctx->base.send = safexcel_aead_send;
1737  	ctx->base.handle_result = safexcel_aead_handle_result;
1738  	return 0;
1739  }
1740  
safexcel_aead_sha1_cra_init(struct crypto_tfm * tfm)1741  static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1742  {
1743  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1744  
1745  	safexcel_aead_cra_init(tfm);
1746  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1747  	ctx->state_sz = SHA1_DIGEST_SIZE;
1748  	return 0;
1749  }
1750  
1751  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1752  	.type = SAFEXCEL_ALG_TYPE_AEAD,
1753  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1754  	.alg.aead = {
1755  		.setkey = safexcel_aead_setkey,
1756  		.encrypt = safexcel_aead_encrypt,
1757  		.decrypt = safexcel_aead_decrypt,
1758  		.ivsize = AES_BLOCK_SIZE,
1759  		.maxauthsize = SHA1_DIGEST_SIZE,
1760  		.base = {
1761  			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1762  			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1763  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1764  			.cra_flags = CRYPTO_ALG_ASYNC |
1765  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1766  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1767  			.cra_blocksize = AES_BLOCK_SIZE,
1768  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1769  			.cra_alignmask = 0,
1770  			.cra_init = safexcel_aead_sha1_cra_init,
1771  			.cra_exit = safexcel_aead_cra_exit,
1772  			.cra_module = THIS_MODULE,
1773  		},
1774  	},
1775  };
1776  
safexcel_aead_sha256_cra_init(struct crypto_tfm * tfm)1777  static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1778  {
1779  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1780  
1781  	safexcel_aead_cra_init(tfm);
1782  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1783  	ctx->state_sz = SHA256_DIGEST_SIZE;
1784  	return 0;
1785  }
1786  
1787  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1788  	.type = SAFEXCEL_ALG_TYPE_AEAD,
1789  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1790  	.alg.aead = {
1791  		.setkey = safexcel_aead_setkey,
1792  		.encrypt = safexcel_aead_encrypt,
1793  		.decrypt = safexcel_aead_decrypt,
1794  		.ivsize = AES_BLOCK_SIZE,
1795  		.maxauthsize = SHA256_DIGEST_SIZE,
1796  		.base = {
1797  			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1798  			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1799  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1800  			.cra_flags = CRYPTO_ALG_ASYNC |
1801  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1802  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1803  			.cra_blocksize = AES_BLOCK_SIZE,
1804  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1805  			.cra_alignmask = 0,
1806  			.cra_init = safexcel_aead_sha256_cra_init,
1807  			.cra_exit = safexcel_aead_cra_exit,
1808  			.cra_module = THIS_MODULE,
1809  		},
1810  	},
1811  };
1812  
safexcel_aead_sha224_cra_init(struct crypto_tfm * tfm)1813  static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1814  {
1815  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1816  
1817  	safexcel_aead_cra_init(tfm);
1818  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1819  	ctx->state_sz = SHA256_DIGEST_SIZE;
1820  	return 0;
1821  }
1822  
1823  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1824  	.type = SAFEXCEL_ALG_TYPE_AEAD,
1825  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1826  	.alg.aead = {
1827  		.setkey = safexcel_aead_setkey,
1828  		.encrypt = safexcel_aead_encrypt,
1829  		.decrypt = safexcel_aead_decrypt,
1830  		.ivsize = AES_BLOCK_SIZE,
1831  		.maxauthsize = SHA224_DIGEST_SIZE,
1832  		.base = {
1833  			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1834  			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1835  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1836  			.cra_flags = CRYPTO_ALG_ASYNC |
1837  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1838  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1839  			.cra_blocksize = AES_BLOCK_SIZE,
1840  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1841  			.cra_alignmask = 0,
1842  			.cra_init = safexcel_aead_sha224_cra_init,
1843  			.cra_exit = safexcel_aead_cra_exit,
1844  			.cra_module = THIS_MODULE,
1845  		},
1846  	},
1847  };
1848  
safexcel_aead_sha512_cra_init(struct crypto_tfm * tfm)1849  static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1850  {
1851  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1852  
1853  	safexcel_aead_cra_init(tfm);
1854  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1855  	ctx->state_sz = SHA512_DIGEST_SIZE;
1856  	return 0;
1857  }
1858  
1859  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1860  	.type = SAFEXCEL_ALG_TYPE_AEAD,
1861  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1862  	.alg.aead = {
1863  		.setkey = safexcel_aead_setkey,
1864  		.encrypt = safexcel_aead_encrypt,
1865  		.decrypt = safexcel_aead_decrypt,
1866  		.ivsize = AES_BLOCK_SIZE,
1867  		.maxauthsize = SHA512_DIGEST_SIZE,
1868  		.base = {
1869  			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1870  			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1871  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1872  			.cra_flags = CRYPTO_ALG_ASYNC |
1873  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1874  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1875  			.cra_blocksize = AES_BLOCK_SIZE,
1876  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1877  			.cra_alignmask = 0,
1878  			.cra_init = safexcel_aead_sha512_cra_init,
1879  			.cra_exit = safexcel_aead_cra_exit,
1880  			.cra_module = THIS_MODULE,
1881  		},
1882  	},
1883  };
1884  
safexcel_aead_sha384_cra_init(struct crypto_tfm * tfm)1885  static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1886  {
1887  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1888  
1889  	safexcel_aead_cra_init(tfm);
1890  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1891  	ctx->state_sz = SHA512_DIGEST_SIZE;
1892  	return 0;
1893  }
1894  
1895  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1896  	.type = SAFEXCEL_ALG_TYPE_AEAD,
1897  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1898  	.alg.aead = {
1899  		.setkey = safexcel_aead_setkey,
1900  		.encrypt = safexcel_aead_encrypt,
1901  		.decrypt = safexcel_aead_decrypt,
1902  		.ivsize = AES_BLOCK_SIZE,
1903  		.maxauthsize = SHA384_DIGEST_SIZE,
1904  		.base = {
1905  			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1906  			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1907  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1908  			.cra_flags = CRYPTO_ALG_ASYNC |
1909  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1910  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1911  			.cra_blocksize = AES_BLOCK_SIZE,
1912  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1913  			.cra_alignmask = 0,
1914  			.cra_init = safexcel_aead_sha384_cra_init,
1915  			.cra_exit = safexcel_aead_cra_exit,
1916  			.cra_module = THIS_MODULE,
1917  		},
1918  	},
1919  };
1920  
safexcel_aead_sha1_des3_cra_init(struct crypto_tfm * tfm)1921  static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1922  {
1923  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1924  
1925  	safexcel_aead_sha1_cra_init(tfm);
1926  	ctx->alg = SAFEXCEL_3DES; /* override default */
1927  	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1928  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1929  	return 0;
1930  }
1931  
1932  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1933  	.type = SAFEXCEL_ALG_TYPE_AEAD,
1934  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1935  	.alg.aead = {
1936  		.setkey = safexcel_aead_setkey,
1937  		.encrypt = safexcel_aead_encrypt,
1938  		.decrypt = safexcel_aead_decrypt,
1939  		.ivsize = DES3_EDE_BLOCK_SIZE,
1940  		.maxauthsize = SHA1_DIGEST_SIZE,
1941  		.base = {
1942  			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1943  			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1944  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1945  			.cra_flags = CRYPTO_ALG_ASYNC |
1946  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1947  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1948  			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1949  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1950  			.cra_alignmask = 0,
1951  			.cra_init = safexcel_aead_sha1_des3_cra_init,
1952  			.cra_exit = safexcel_aead_cra_exit,
1953  			.cra_module = THIS_MODULE,
1954  		},
1955  	},
1956  };
1957  
safexcel_aead_sha256_des3_cra_init(struct crypto_tfm * tfm)1958  static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1959  {
1960  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1961  
1962  	safexcel_aead_sha256_cra_init(tfm);
1963  	ctx->alg = SAFEXCEL_3DES; /* override default */
1964  	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1965  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1966  	return 0;
1967  }
1968  
1969  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1970  	.type = SAFEXCEL_ALG_TYPE_AEAD,
1971  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1972  	.alg.aead = {
1973  		.setkey = safexcel_aead_setkey,
1974  		.encrypt = safexcel_aead_encrypt,
1975  		.decrypt = safexcel_aead_decrypt,
1976  		.ivsize = DES3_EDE_BLOCK_SIZE,
1977  		.maxauthsize = SHA256_DIGEST_SIZE,
1978  		.base = {
1979  			.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1980  			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1981  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1982  			.cra_flags = CRYPTO_ALG_ASYNC |
1983  				     CRYPTO_ALG_ALLOCATES_MEMORY |
1984  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1985  			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1986  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1987  			.cra_alignmask = 0,
1988  			.cra_init = safexcel_aead_sha256_des3_cra_init,
1989  			.cra_exit = safexcel_aead_cra_exit,
1990  			.cra_module = THIS_MODULE,
1991  		},
1992  	},
1993  };
1994  
safexcel_aead_sha224_des3_cra_init(struct crypto_tfm * tfm)1995  static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1996  {
1997  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1998  
1999  	safexcel_aead_sha224_cra_init(tfm);
2000  	ctx->alg = SAFEXCEL_3DES; /* override default */
2001  	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2002  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2003  	return 0;
2004  }
2005  
2006  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
2007  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2008  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2009  	.alg.aead = {
2010  		.setkey = safexcel_aead_setkey,
2011  		.encrypt = safexcel_aead_encrypt,
2012  		.decrypt = safexcel_aead_decrypt,
2013  		.ivsize = DES3_EDE_BLOCK_SIZE,
2014  		.maxauthsize = SHA224_DIGEST_SIZE,
2015  		.base = {
2016  			.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2017  			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2018  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2019  			.cra_flags = CRYPTO_ALG_ASYNC |
2020  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2021  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2022  			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2023  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2024  			.cra_alignmask = 0,
2025  			.cra_init = safexcel_aead_sha224_des3_cra_init,
2026  			.cra_exit = safexcel_aead_cra_exit,
2027  			.cra_module = THIS_MODULE,
2028  		},
2029  	},
2030  };
2031  
safexcel_aead_sha512_des3_cra_init(struct crypto_tfm * tfm)2032  static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2033  {
2034  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2035  
2036  	safexcel_aead_sha512_cra_init(tfm);
2037  	ctx->alg = SAFEXCEL_3DES; /* override default */
2038  	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2039  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2040  	return 0;
2041  }
2042  
2043  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2044  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2045  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2046  	.alg.aead = {
2047  		.setkey = safexcel_aead_setkey,
2048  		.encrypt = safexcel_aead_encrypt,
2049  		.decrypt = safexcel_aead_decrypt,
2050  		.ivsize = DES3_EDE_BLOCK_SIZE,
2051  		.maxauthsize = SHA512_DIGEST_SIZE,
2052  		.base = {
2053  			.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2054  			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2055  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2056  			.cra_flags = CRYPTO_ALG_ASYNC |
2057  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2058  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2059  			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2060  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2061  			.cra_alignmask = 0,
2062  			.cra_init = safexcel_aead_sha512_des3_cra_init,
2063  			.cra_exit = safexcel_aead_cra_exit,
2064  			.cra_module = THIS_MODULE,
2065  		},
2066  	},
2067  };
2068  
safexcel_aead_sha384_des3_cra_init(struct crypto_tfm * tfm)2069  static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2070  {
2071  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2072  
2073  	safexcel_aead_sha384_cra_init(tfm);
2074  	ctx->alg = SAFEXCEL_3DES; /* override default */
2075  	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2076  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2077  	return 0;
2078  }
2079  
2080  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2081  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2082  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2083  	.alg.aead = {
2084  		.setkey = safexcel_aead_setkey,
2085  		.encrypt = safexcel_aead_encrypt,
2086  		.decrypt = safexcel_aead_decrypt,
2087  		.ivsize = DES3_EDE_BLOCK_SIZE,
2088  		.maxauthsize = SHA384_DIGEST_SIZE,
2089  		.base = {
2090  			.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2091  			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2092  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2093  			.cra_flags = CRYPTO_ALG_ASYNC |
2094  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2095  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2096  			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2097  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2098  			.cra_alignmask = 0,
2099  			.cra_init = safexcel_aead_sha384_des3_cra_init,
2100  			.cra_exit = safexcel_aead_cra_exit,
2101  			.cra_module = THIS_MODULE,
2102  		},
2103  	},
2104  };
2105  
safexcel_aead_sha1_des_cra_init(struct crypto_tfm * tfm)2106  static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2107  {
2108  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2109  
2110  	safexcel_aead_sha1_cra_init(tfm);
2111  	ctx->alg = SAFEXCEL_DES; /* override default */
2112  	ctx->blocksz = DES_BLOCK_SIZE;
2113  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2114  	return 0;
2115  }
2116  
2117  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2118  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2119  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2120  	.alg.aead = {
2121  		.setkey = safexcel_aead_setkey,
2122  		.encrypt = safexcel_aead_encrypt,
2123  		.decrypt = safexcel_aead_decrypt,
2124  		.ivsize = DES_BLOCK_SIZE,
2125  		.maxauthsize = SHA1_DIGEST_SIZE,
2126  		.base = {
2127  			.cra_name = "authenc(hmac(sha1),cbc(des))",
2128  			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2129  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2130  			.cra_flags = CRYPTO_ALG_ASYNC |
2131  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2132  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2133  			.cra_blocksize = DES_BLOCK_SIZE,
2134  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2135  			.cra_alignmask = 0,
2136  			.cra_init = safexcel_aead_sha1_des_cra_init,
2137  			.cra_exit = safexcel_aead_cra_exit,
2138  			.cra_module = THIS_MODULE,
2139  		},
2140  	},
2141  };
2142  
safexcel_aead_sha256_des_cra_init(struct crypto_tfm * tfm)2143  static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2144  {
2145  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2146  
2147  	safexcel_aead_sha256_cra_init(tfm);
2148  	ctx->alg = SAFEXCEL_DES; /* override default */
2149  	ctx->blocksz = DES_BLOCK_SIZE;
2150  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2151  	return 0;
2152  }
2153  
2154  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2155  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2156  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2157  	.alg.aead = {
2158  		.setkey = safexcel_aead_setkey,
2159  		.encrypt = safexcel_aead_encrypt,
2160  		.decrypt = safexcel_aead_decrypt,
2161  		.ivsize = DES_BLOCK_SIZE,
2162  		.maxauthsize = SHA256_DIGEST_SIZE,
2163  		.base = {
2164  			.cra_name = "authenc(hmac(sha256),cbc(des))",
2165  			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2166  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2167  			.cra_flags = CRYPTO_ALG_ASYNC |
2168  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2169  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2170  			.cra_blocksize = DES_BLOCK_SIZE,
2171  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2172  			.cra_alignmask = 0,
2173  			.cra_init = safexcel_aead_sha256_des_cra_init,
2174  			.cra_exit = safexcel_aead_cra_exit,
2175  			.cra_module = THIS_MODULE,
2176  		},
2177  	},
2178  };
2179  
safexcel_aead_sha224_des_cra_init(struct crypto_tfm * tfm)2180  static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2181  {
2182  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2183  
2184  	safexcel_aead_sha224_cra_init(tfm);
2185  	ctx->alg = SAFEXCEL_DES; /* override default */
2186  	ctx->blocksz = DES_BLOCK_SIZE;
2187  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2188  	return 0;
2189  }
2190  
2191  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2192  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2193  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2194  	.alg.aead = {
2195  		.setkey = safexcel_aead_setkey,
2196  		.encrypt = safexcel_aead_encrypt,
2197  		.decrypt = safexcel_aead_decrypt,
2198  		.ivsize = DES_BLOCK_SIZE,
2199  		.maxauthsize = SHA224_DIGEST_SIZE,
2200  		.base = {
2201  			.cra_name = "authenc(hmac(sha224),cbc(des))",
2202  			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2203  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2204  			.cra_flags = CRYPTO_ALG_ASYNC |
2205  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2206  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2207  			.cra_blocksize = DES_BLOCK_SIZE,
2208  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2209  			.cra_alignmask = 0,
2210  			.cra_init = safexcel_aead_sha224_des_cra_init,
2211  			.cra_exit = safexcel_aead_cra_exit,
2212  			.cra_module = THIS_MODULE,
2213  		},
2214  	},
2215  };
2216  
safexcel_aead_sha512_des_cra_init(struct crypto_tfm * tfm)2217  static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2218  {
2219  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2220  
2221  	safexcel_aead_sha512_cra_init(tfm);
2222  	ctx->alg = SAFEXCEL_DES; /* override default */
2223  	ctx->blocksz = DES_BLOCK_SIZE;
2224  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2225  	return 0;
2226  }
2227  
2228  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2229  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2230  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2231  	.alg.aead = {
2232  		.setkey = safexcel_aead_setkey,
2233  		.encrypt = safexcel_aead_encrypt,
2234  		.decrypt = safexcel_aead_decrypt,
2235  		.ivsize = DES_BLOCK_SIZE,
2236  		.maxauthsize = SHA512_DIGEST_SIZE,
2237  		.base = {
2238  			.cra_name = "authenc(hmac(sha512),cbc(des))",
2239  			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2240  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2241  			.cra_flags = CRYPTO_ALG_ASYNC |
2242  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2243  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2244  			.cra_blocksize = DES_BLOCK_SIZE,
2245  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2246  			.cra_alignmask = 0,
2247  			.cra_init = safexcel_aead_sha512_des_cra_init,
2248  			.cra_exit = safexcel_aead_cra_exit,
2249  			.cra_module = THIS_MODULE,
2250  		},
2251  	},
2252  };
2253  
safexcel_aead_sha384_des_cra_init(struct crypto_tfm * tfm)2254  static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2255  {
2256  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2257  
2258  	safexcel_aead_sha384_cra_init(tfm);
2259  	ctx->alg = SAFEXCEL_DES; /* override default */
2260  	ctx->blocksz = DES_BLOCK_SIZE;
2261  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2262  	return 0;
2263  }
2264  
2265  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2266  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2267  	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2268  	.alg.aead = {
2269  		.setkey = safexcel_aead_setkey,
2270  		.encrypt = safexcel_aead_encrypt,
2271  		.decrypt = safexcel_aead_decrypt,
2272  		.ivsize = DES_BLOCK_SIZE,
2273  		.maxauthsize = SHA384_DIGEST_SIZE,
2274  		.base = {
2275  			.cra_name = "authenc(hmac(sha384),cbc(des))",
2276  			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2277  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2278  			.cra_flags = CRYPTO_ALG_ASYNC |
2279  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2280  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2281  			.cra_blocksize = DES_BLOCK_SIZE,
2282  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2283  			.cra_alignmask = 0,
2284  			.cra_init = safexcel_aead_sha384_des_cra_init,
2285  			.cra_exit = safexcel_aead_cra_exit,
2286  			.cra_module = THIS_MODULE,
2287  		},
2288  	},
2289  };
2290  
safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm * tfm)2291  static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2292  {
2293  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2294  
2295  	safexcel_aead_sha1_cra_init(tfm);
2296  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2297  	return 0;
2298  }
2299  
2300  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2301  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2302  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2303  	.alg.aead = {
2304  		.setkey = safexcel_aead_setkey,
2305  		.encrypt = safexcel_aead_encrypt,
2306  		.decrypt = safexcel_aead_decrypt,
2307  		.ivsize = CTR_RFC3686_IV_SIZE,
2308  		.maxauthsize = SHA1_DIGEST_SIZE,
2309  		.base = {
2310  			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2311  			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2312  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2313  			.cra_flags = CRYPTO_ALG_ASYNC |
2314  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2315  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2316  			.cra_blocksize = 1,
2317  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2318  			.cra_alignmask = 0,
2319  			.cra_init = safexcel_aead_sha1_ctr_cra_init,
2320  			.cra_exit = safexcel_aead_cra_exit,
2321  			.cra_module = THIS_MODULE,
2322  		},
2323  	},
2324  };
2325  
safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm * tfm)2326  static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2327  {
2328  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2329  
2330  	safexcel_aead_sha256_cra_init(tfm);
2331  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2332  	return 0;
2333  }
2334  
2335  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2336  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2337  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2338  	.alg.aead = {
2339  		.setkey = safexcel_aead_setkey,
2340  		.encrypt = safexcel_aead_encrypt,
2341  		.decrypt = safexcel_aead_decrypt,
2342  		.ivsize = CTR_RFC3686_IV_SIZE,
2343  		.maxauthsize = SHA256_DIGEST_SIZE,
2344  		.base = {
2345  			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2346  			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2347  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2348  			.cra_flags = CRYPTO_ALG_ASYNC |
2349  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2350  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2351  			.cra_blocksize = 1,
2352  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2353  			.cra_alignmask = 0,
2354  			.cra_init = safexcel_aead_sha256_ctr_cra_init,
2355  			.cra_exit = safexcel_aead_cra_exit,
2356  			.cra_module = THIS_MODULE,
2357  		},
2358  	},
2359  };
2360  
safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm * tfm)2361  static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2362  {
2363  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2364  
2365  	safexcel_aead_sha224_cra_init(tfm);
2366  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2367  	return 0;
2368  }
2369  
2370  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2371  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2372  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2373  	.alg.aead = {
2374  		.setkey = safexcel_aead_setkey,
2375  		.encrypt = safexcel_aead_encrypt,
2376  		.decrypt = safexcel_aead_decrypt,
2377  		.ivsize = CTR_RFC3686_IV_SIZE,
2378  		.maxauthsize = SHA224_DIGEST_SIZE,
2379  		.base = {
2380  			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2381  			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2382  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2383  			.cra_flags = CRYPTO_ALG_ASYNC |
2384  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2385  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2386  			.cra_blocksize = 1,
2387  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2388  			.cra_alignmask = 0,
2389  			.cra_init = safexcel_aead_sha224_ctr_cra_init,
2390  			.cra_exit = safexcel_aead_cra_exit,
2391  			.cra_module = THIS_MODULE,
2392  		},
2393  	},
2394  };
2395  
safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm * tfm)2396  static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2397  {
2398  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2399  
2400  	safexcel_aead_sha512_cra_init(tfm);
2401  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2402  	return 0;
2403  }
2404  
2405  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2406  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2407  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2408  	.alg.aead = {
2409  		.setkey = safexcel_aead_setkey,
2410  		.encrypt = safexcel_aead_encrypt,
2411  		.decrypt = safexcel_aead_decrypt,
2412  		.ivsize = CTR_RFC3686_IV_SIZE,
2413  		.maxauthsize = SHA512_DIGEST_SIZE,
2414  		.base = {
2415  			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2416  			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2417  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2418  			.cra_flags = CRYPTO_ALG_ASYNC |
2419  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2420  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2421  			.cra_blocksize = 1,
2422  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2423  			.cra_alignmask = 0,
2424  			.cra_init = safexcel_aead_sha512_ctr_cra_init,
2425  			.cra_exit = safexcel_aead_cra_exit,
2426  			.cra_module = THIS_MODULE,
2427  		},
2428  	},
2429  };
2430  
safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm * tfm)2431  static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2432  {
2433  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2434  
2435  	safexcel_aead_sha384_cra_init(tfm);
2436  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2437  	return 0;
2438  }
2439  
2440  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2441  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2442  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2443  	.alg.aead = {
2444  		.setkey = safexcel_aead_setkey,
2445  		.encrypt = safexcel_aead_encrypt,
2446  		.decrypt = safexcel_aead_decrypt,
2447  		.ivsize = CTR_RFC3686_IV_SIZE,
2448  		.maxauthsize = SHA384_DIGEST_SIZE,
2449  		.base = {
2450  			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2451  			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2452  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2453  			.cra_flags = CRYPTO_ALG_ASYNC |
2454  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2455  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2456  			.cra_blocksize = 1,
2457  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2458  			.cra_alignmask = 0,
2459  			.cra_init = safexcel_aead_sha384_ctr_cra_init,
2460  			.cra_exit = safexcel_aead_cra_exit,
2461  			.cra_module = THIS_MODULE,
2462  		},
2463  	},
2464  };
2465  
safexcel_skcipher_aesxts_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2466  static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2467  					   const u8 *key, unsigned int len)
2468  {
2469  	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2470  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2471  	struct safexcel_crypto_priv *priv = ctx->base.priv;
2472  	struct crypto_aes_ctx aes;
2473  	int ret, i;
2474  	unsigned int keylen;
2475  
2476  	/* Check for illegal XTS keys */
2477  	ret = xts_verify_key(ctfm, key, len);
2478  	if (ret)
2479  		return ret;
2480  
2481  	/* Only half of the key data is cipher key */
2482  	keylen = (len >> 1);
2483  	ret = aes_expandkey(&aes, key, keylen);
2484  	if (ret)
2485  		return ret;
2486  
2487  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2488  		for (i = 0; i < keylen / sizeof(u32); i++) {
2489  			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2490  				ctx->base.needs_inv = true;
2491  				break;
2492  			}
2493  		}
2494  	}
2495  
2496  	for (i = 0; i < keylen / sizeof(u32); i++)
2497  		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2498  
2499  	/* The other half is the tweak key */
2500  	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2501  	if (ret)
2502  		return ret;
2503  
2504  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2505  		for (i = 0; i < keylen / sizeof(u32); i++) {
2506  			if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2507  			    aes.key_enc[i]) {
2508  				ctx->base.needs_inv = true;
2509  				break;
2510  			}
2511  		}
2512  	}
2513  
2514  	for (i = 0; i < keylen / sizeof(u32); i++)
2515  		ctx->key[i + keylen / sizeof(u32)] =
2516  			cpu_to_le32(aes.key_enc[i]);
2517  
2518  	ctx->key_len = keylen << 1;
2519  
2520  	memzero_explicit(&aes, sizeof(aes));
2521  	return 0;
2522  }
2523  
safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm * tfm)2524  static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2525  {
2526  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2527  
2528  	safexcel_skcipher_cra_init(tfm);
2529  	ctx->alg  = SAFEXCEL_AES;
2530  	ctx->blocksz = AES_BLOCK_SIZE;
2531  	ctx->xts  = 1;
2532  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2533  	return 0;
2534  }
2535  
safexcel_encrypt_xts(struct skcipher_request * req)2536  static int safexcel_encrypt_xts(struct skcipher_request *req)
2537  {
2538  	if (req->cryptlen < XTS_BLOCK_SIZE)
2539  		return -EINVAL;
2540  	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2541  				  SAFEXCEL_ENCRYPT);
2542  }
2543  
safexcel_decrypt_xts(struct skcipher_request * req)2544  static int safexcel_decrypt_xts(struct skcipher_request *req)
2545  {
2546  	if (req->cryptlen < XTS_BLOCK_SIZE)
2547  		return -EINVAL;
2548  	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2549  				  SAFEXCEL_DECRYPT);
2550  }
2551  
2552  struct safexcel_alg_template safexcel_alg_xts_aes = {
2553  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2554  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2555  	.alg.skcipher = {
2556  		.setkey = safexcel_skcipher_aesxts_setkey,
2557  		.encrypt = safexcel_encrypt_xts,
2558  		.decrypt = safexcel_decrypt_xts,
2559  		/* XTS actually uses 2 AES keys glued together */
2560  		.min_keysize = AES_MIN_KEY_SIZE * 2,
2561  		.max_keysize = AES_MAX_KEY_SIZE * 2,
2562  		.ivsize = XTS_BLOCK_SIZE,
2563  		.base = {
2564  			.cra_name = "xts(aes)",
2565  			.cra_driver_name = "safexcel-xts-aes",
2566  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2567  			.cra_flags = CRYPTO_ALG_ASYNC |
2568  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2569  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2570  			.cra_blocksize = XTS_BLOCK_SIZE,
2571  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2572  			.cra_alignmask = 0,
2573  			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2574  			.cra_exit = safexcel_skcipher_cra_exit,
2575  			.cra_module = THIS_MODULE,
2576  		},
2577  	},
2578  };
2579  
safexcel_aead_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2580  static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2581  				    unsigned int len)
2582  {
2583  	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2584  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2585  	struct safexcel_crypto_priv *priv = ctx->base.priv;
2586  	struct crypto_aes_ctx aes;
2587  	u32 hashkey[AES_BLOCK_SIZE >> 2];
2588  	int ret, i;
2589  
2590  	ret = aes_expandkey(&aes, key, len);
2591  	if (ret) {
2592  		memzero_explicit(&aes, sizeof(aes));
2593  		return ret;
2594  	}
2595  
2596  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2597  		for (i = 0; i < len / sizeof(u32); i++) {
2598  			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2599  				ctx->base.needs_inv = true;
2600  				break;
2601  			}
2602  		}
2603  	}
2604  
2605  	for (i = 0; i < len / sizeof(u32); i++)
2606  		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2607  
2608  	ctx->key_len = len;
2609  
2610  	/* Compute hash key by encrypting zeroes with cipher key */
2611  	memset(hashkey, 0, AES_BLOCK_SIZE);
2612  	aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2613  
2614  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2615  		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2616  			if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2617  				ctx->base.needs_inv = true;
2618  				break;
2619  			}
2620  		}
2621  	}
2622  
2623  	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2624  		ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2625  
2626  	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2627  	memzero_explicit(&aes, sizeof(aes));
2628  	return 0;
2629  }
2630  
safexcel_aead_gcm_cra_init(struct crypto_tfm * tfm)2631  static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2632  {
2633  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2634  
2635  	safexcel_aead_cra_init(tfm);
2636  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2637  	ctx->state_sz = GHASH_BLOCK_SIZE;
2638  	ctx->xcm = EIP197_XCM_MODE_GCM;
2639  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2640  
2641  	return 0;
2642  }
2643  
safexcel_aead_gcm_cra_exit(struct crypto_tfm * tfm)2644  static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2645  {
2646  	safexcel_aead_cra_exit(tfm);
2647  }
2648  
safexcel_aead_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2649  static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2650  					 unsigned int authsize)
2651  {
2652  	return crypto_gcm_check_authsize(authsize);
2653  }
2654  
2655  struct safexcel_alg_template safexcel_alg_gcm = {
2656  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2657  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2658  	.alg.aead = {
2659  		.setkey = safexcel_aead_gcm_setkey,
2660  		.setauthsize = safexcel_aead_gcm_setauthsize,
2661  		.encrypt = safexcel_aead_encrypt,
2662  		.decrypt = safexcel_aead_decrypt,
2663  		.ivsize = GCM_AES_IV_SIZE,
2664  		.maxauthsize = GHASH_DIGEST_SIZE,
2665  		.base = {
2666  			.cra_name = "gcm(aes)",
2667  			.cra_driver_name = "safexcel-gcm-aes",
2668  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2669  			.cra_flags = CRYPTO_ALG_ASYNC |
2670  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2671  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2672  			.cra_blocksize = 1,
2673  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2674  			.cra_alignmask = 0,
2675  			.cra_init = safexcel_aead_gcm_cra_init,
2676  			.cra_exit = safexcel_aead_gcm_cra_exit,
2677  			.cra_module = THIS_MODULE,
2678  		},
2679  	},
2680  };
2681  
safexcel_aead_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2682  static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2683  				    unsigned int len)
2684  {
2685  	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2686  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2687  	struct safexcel_crypto_priv *priv = ctx->base.priv;
2688  	struct crypto_aes_ctx aes;
2689  	int ret, i;
2690  
2691  	ret = aes_expandkey(&aes, key, len);
2692  	if (ret) {
2693  		memzero_explicit(&aes, sizeof(aes));
2694  		return ret;
2695  	}
2696  
2697  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2698  		for (i = 0; i < len / sizeof(u32); i++) {
2699  			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2700  				ctx->base.needs_inv = true;
2701  				break;
2702  			}
2703  		}
2704  	}
2705  
2706  	for (i = 0; i < len / sizeof(u32); i++) {
2707  		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2708  		ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2709  			cpu_to_be32(aes.key_enc[i]);
2710  	}
2711  
2712  	ctx->key_len = len;
2713  	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2714  
2715  	if (len == AES_KEYSIZE_192)
2716  		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2717  	else if (len == AES_KEYSIZE_256)
2718  		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2719  	else
2720  		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2721  
2722  	memzero_explicit(&aes, sizeof(aes));
2723  	return 0;
2724  }
2725  
safexcel_aead_ccm_cra_init(struct crypto_tfm * tfm)2726  static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2727  {
2728  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2729  
2730  	safexcel_aead_cra_init(tfm);
2731  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2732  	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2733  	ctx->xcm = EIP197_XCM_MODE_CCM;
2734  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2735  	ctx->ctrinit = 0;
2736  	return 0;
2737  }
2738  
safexcel_aead_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2739  static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2740  					 unsigned int authsize)
2741  {
2742  	/* Borrowed from crypto/ccm.c */
2743  	switch (authsize) {
2744  	case 4:
2745  	case 6:
2746  	case 8:
2747  	case 10:
2748  	case 12:
2749  	case 14:
2750  	case 16:
2751  		break;
2752  	default:
2753  		return -EINVAL;
2754  	}
2755  
2756  	return 0;
2757  }
2758  
safexcel_ccm_encrypt(struct aead_request * req)2759  static int safexcel_ccm_encrypt(struct aead_request *req)
2760  {
2761  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2762  
2763  	if (req->iv[0] < 1 || req->iv[0] > 7)
2764  		return -EINVAL;
2765  
2766  	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2767  }
2768  
safexcel_ccm_decrypt(struct aead_request * req)2769  static int safexcel_ccm_decrypt(struct aead_request *req)
2770  {
2771  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2772  
2773  	if (req->iv[0] < 1 || req->iv[0] > 7)
2774  		return -EINVAL;
2775  
2776  	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2777  }
2778  
2779  struct safexcel_alg_template safexcel_alg_ccm = {
2780  	.type = SAFEXCEL_ALG_TYPE_AEAD,
2781  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2782  	.alg.aead = {
2783  		.setkey = safexcel_aead_ccm_setkey,
2784  		.setauthsize = safexcel_aead_ccm_setauthsize,
2785  		.encrypt = safexcel_ccm_encrypt,
2786  		.decrypt = safexcel_ccm_decrypt,
2787  		.ivsize = AES_BLOCK_SIZE,
2788  		.maxauthsize = AES_BLOCK_SIZE,
2789  		.base = {
2790  			.cra_name = "ccm(aes)",
2791  			.cra_driver_name = "safexcel-ccm-aes",
2792  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2793  			.cra_flags = CRYPTO_ALG_ASYNC |
2794  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2795  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2796  			.cra_blocksize = 1,
2797  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2798  			.cra_alignmask = 0,
2799  			.cra_init = safexcel_aead_ccm_cra_init,
2800  			.cra_exit = safexcel_aead_cra_exit,
2801  			.cra_module = THIS_MODULE,
2802  		},
2803  	},
2804  };
2805  
safexcel_chacha20_setkey(struct safexcel_cipher_ctx * ctx,const u8 * key)2806  static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2807  				     const u8 *key)
2808  {
2809  	struct safexcel_crypto_priv *priv = ctx->base.priv;
2810  
2811  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2812  		if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2813  			ctx->base.needs_inv = true;
2814  
2815  	memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2816  	ctx->key_len = CHACHA_KEY_SIZE;
2817  }
2818  
safexcel_skcipher_chacha20_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2819  static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2820  					     const u8 *key, unsigned int len)
2821  {
2822  	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2823  
2824  	if (len != CHACHA_KEY_SIZE)
2825  		return -EINVAL;
2826  
2827  	safexcel_chacha20_setkey(ctx, key);
2828  
2829  	return 0;
2830  }
2831  
safexcel_skcipher_chacha20_cra_init(struct crypto_tfm * tfm)2832  static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2833  {
2834  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2835  
2836  	safexcel_skcipher_cra_init(tfm);
2837  	ctx->alg  = SAFEXCEL_CHACHA20;
2838  	ctx->ctrinit = 0;
2839  	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2840  	return 0;
2841  }
2842  
2843  struct safexcel_alg_template safexcel_alg_chacha20 = {
2844  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2845  	.algo_mask = SAFEXCEL_ALG_CHACHA20,
2846  	.alg.skcipher = {
2847  		.setkey = safexcel_skcipher_chacha20_setkey,
2848  		.encrypt = safexcel_encrypt,
2849  		.decrypt = safexcel_decrypt,
2850  		.min_keysize = CHACHA_KEY_SIZE,
2851  		.max_keysize = CHACHA_KEY_SIZE,
2852  		.ivsize = CHACHA_IV_SIZE,
2853  		.base = {
2854  			.cra_name = "chacha20",
2855  			.cra_driver_name = "safexcel-chacha20",
2856  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2857  			.cra_flags = CRYPTO_ALG_ASYNC |
2858  				     CRYPTO_ALG_ALLOCATES_MEMORY |
2859  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2860  			.cra_blocksize = 1,
2861  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2862  			.cra_alignmask = 0,
2863  			.cra_init = safexcel_skcipher_chacha20_cra_init,
2864  			.cra_exit = safexcel_skcipher_cra_exit,
2865  			.cra_module = THIS_MODULE,
2866  		},
2867  	},
2868  };
2869  
safexcel_aead_chachapoly_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2870  static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2871  				    const u8 *key, unsigned int len)
2872  {
2873  	struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2874  
2875  	if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2876  	    len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2877  		/* ESP variant has nonce appended to key */
2878  		len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2879  		ctx->nonce = *(u32 *)(key + len);
2880  	}
2881  	if (len != CHACHA_KEY_SIZE)
2882  		return -EINVAL;
2883  
2884  	safexcel_chacha20_setkey(ctx, key);
2885  
2886  	return 0;
2887  }
2888  
safexcel_aead_chachapoly_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2889  static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2890  					 unsigned int authsize)
2891  {
2892  	if (authsize != POLY1305_DIGEST_SIZE)
2893  		return -EINVAL;
2894  	return 0;
2895  }
2896  
safexcel_aead_chachapoly_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)2897  static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2898  					  enum safexcel_cipher_direction dir)
2899  {
2900  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2901  	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2902  	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2903  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2904  	struct aead_request *subreq = aead_request_ctx(req);
2905  	u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2906  	int ret = 0;
2907  
2908  	/*
2909  	 * Instead of wasting time detecting umpteen silly corner cases,
2910  	 * just dump all "small" requests to the fallback implementation.
2911  	 * HW would not be faster on such small requests anyway.
2912  	 */
2913  	if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2914  		    req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2915  		   req->cryptlen > POLY1305_DIGEST_SIZE)) {
2916  		return safexcel_queue_req(&req->base, creq, dir);
2917  	}
2918  
2919  	/* HW cannot do full (AAD+payload) zero length, use fallback */
2920  	memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2921  	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2922  		/* ESP variant has nonce appended to the key */
2923  		key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2924  		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2925  					 CHACHA_KEY_SIZE +
2926  					 EIP197_AEAD_IPSEC_NONCE_SIZE);
2927  	} else {
2928  		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2929  					 CHACHA_KEY_SIZE);
2930  	}
2931  	if (ret) {
2932  		crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2933  		crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2934  					    CRYPTO_TFM_REQ_MASK);
2935  		return ret;
2936  	}
2937  
2938  	aead_request_set_tfm(subreq, ctx->fback);
2939  	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2940  				  req->base.data);
2941  	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2942  			       req->iv);
2943  	aead_request_set_ad(subreq, req->assoclen);
2944  
2945  	return (dir ==  SAFEXCEL_ENCRYPT) ?
2946  		crypto_aead_encrypt(subreq) :
2947  		crypto_aead_decrypt(subreq);
2948  }
2949  
safexcel_aead_chachapoly_encrypt(struct aead_request * req)2950  static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2951  {
2952  	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2953  }
2954  
safexcel_aead_chachapoly_decrypt(struct aead_request * req)2955  static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2956  {
2957  	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2958  }
2959  
safexcel_aead_fallback_cra_init(struct crypto_tfm * tfm)2960  static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2961  {
2962  	struct crypto_aead *aead = __crypto_aead_cast(tfm);
2963  	struct aead_alg *alg = crypto_aead_alg(aead);
2964  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2965  
2966  	safexcel_aead_cra_init(tfm);
2967  
2968  	/* Allocate fallback implementation */
2969  	ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2970  				       CRYPTO_ALG_ASYNC |
2971  				       CRYPTO_ALG_NEED_FALLBACK);
2972  	if (IS_ERR(ctx->fback))
2973  		return PTR_ERR(ctx->fback);
2974  
2975  	crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2976  					  sizeof(struct aead_request) +
2977  					  crypto_aead_reqsize(ctx->fback)));
2978  
2979  	return 0;
2980  }
2981  
safexcel_aead_chachapoly_cra_init(struct crypto_tfm * tfm)2982  static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2983  {
2984  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2985  
2986  	safexcel_aead_fallback_cra_init(tfm);
2987  	ctx->alg  = SAFEXCEL_CHACHA20;
2988  	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2989  		    CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2990  	ctx->ctrinit = 0;
2991  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2992  	ctx->state_sz = 0; /* Precomputed by HW */
2993  	return 0;
2994  }
2995  
safexcel_aead_fallback_cra_exit(struct crypto_tfm * tfm)2996  static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2997  {
2998  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2999  
3000  	crypto_free_aead(ctx->fback);
3001  	safexcel_aead_cra_exit(tfm);
3002  }
3003  
3004  struct safexcel_alg_template safexcel_alg_chachapoly = {
3005  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3006  	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3007  	.alg.aead = {
3008  		.setkey = safexcel_aead_chachapoly_setkey,
3009  		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3010  		.encrypt = safexcel_aead_chachapoly_encrypt,
3011  		.decrypt = safexcel_aead_chachapoly_decrypt,
3012  		.ivsize = CHACHAPOLY_IV_SIZE,
3013  		.maxauthsize = POLY1305_DIGEST_SIZE,
3014  		.base = {
3015  			.cra_name = "rfc7539(chacha20,poly1305)",
3016  			.cra_driver_name = "safexcel-chacha20-poly1305",
3017  			/* +1 to put it above HW chacha + SW poly */
3018  			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3019  			.cra_flags = CRYPTO_ALG_ASYNC |
3020  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3021  				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3022  				     CRYPTO_ALG_NEED_FALLBACK,
3023  			.cra_blocksize = 1,
3024  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3025  			.cra_alignmask = 0,
3026  			.cra_init = safexcel_aead_chachapoly_cra_init,
3027  			.cra_exit = safexcel_aead_fallback_cra_exit,
3028  			.cra_module = THIS_MODULE,
3029  		},
3030  	},
3031  };
3032  
safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm * tfm)3033  static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3034  {
3035  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3036  	int ret;
3037  
3038  	ret = safexcel_aead_chachapoly_cra_init(tfm);
3039  	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3040  	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3041  	return ret;
3042  }
3043  
3044  struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3045  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3046  	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3047  	.alg.aead = {
3048  		.setkey = safexcel_aead_chachapoly_setkey,
3049  		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3050  		.encrypt = safexcel_aead_chachapoly_encrypt,
3051  		.decrypt = safexcel_aead_chachapoly_decrypt,
3052  		.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3053  		.maxauthsize = POLY1305_DIGEST_SIZE,
3054  		.base = {
3055  			.cra_name = "rfc7539esp(chacha20,poly1305)",
3056  			.cra_driver_name = "safexcel-chacha20-poly1305-esp",
3057  			/* +1 to put it above HW chacha + SW poly */
3058  			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3059  			.cra_flags = CRYPTO_ALG_ASYNC |
3060  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3061  				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3062  				     CRYPTO_ALG_NEED_FALLBACK,
3063  			.cra_blocksize = 1,
3064  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3065  			.cra_alignmask = 0,
3066  			.cra_init = safexcel_aead_chachapolyesp_cra_init,
3067  			.cra_exit = safexcel_aead_fallback_cra_exit,
3068  			.cra_module = THIS_MODULE,
3069  		},
3070  	},
3071  };
3072  
safexcel_skcipher_sm4_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3073  static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3074  					const u8 *key, unsigned int len)
3075  {
3076  	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3077  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3078  	struct safexcel_crypto_priv *priv = ctx->base.priv;
3079  
3080  	if (len != SM4_KEY_SIZE)
3081  		return -EINVAL;
3082  
3083  	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3084  		if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3085  			ctx->base.needs_inv = true;
3086  
3087  	memcpy(ctx->key, key, SM4_KEY_SIZE);
3088  	ctx->key_len = SM4_KEY_SIZE;
3089  
3090  	return 0;
3091  }
3092  
safexcel_sm4_blk_encrypt(struct skcipher_request * req)3093  static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3094  {
3095  	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3096  	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3097  		return -EINVAL;
3098  	else
3099  		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3100  					  SAFEXCEL_ENCRYPT);
3101  }
3102  
safexcel_sm4_blk_decrypt(struct skcipher_request * req)3103  static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3104  {
3105  	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3106  	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3107  		return -EINVAL;
3108  	else
3109  		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3110  					  SAFEXCEL_DECRYPT);
3111  }
3112  
safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm * tfm)3113  static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3114  {
3115  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3116  
3117  	safexcel_skcipher_cra_init(tfm);
3118  	ctx->alg  = SAFEXCEL_SM4;
3119  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3120  	ctx->blocksz = 0;
3121  	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3122  	return 0;
3123  }
3124  
3125  struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3126  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3127  	.algo_mask = SAFEXCEL_ALG_SM4,
3128  	.alg.skcipher = {
3129  		.setkey = safexcel_skcipher_sm4_setkey,
3130  		.encrypt = safexcel_sm4_blk_encrypt,
3131  		.decrypt = safexcel_sm4_blk_decrypt,
3132  		.min_keysize = SM4_KEY_SIZE,
3133  		.max_keysize = SM4_KEY_SIZE,
3134  		.base = {
3135  			.cra_name = "ecb(sm4)",
3136  			.cra_driver_name = "safexcel-ecb-sm4",
3137  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3138  			.cra_flags = CRYPTO_ALG_ASYNC |
3139  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3140  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3141  			.cra_blocksize = SM4_BLOCK_SIZE,
3142  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3143  			.cra_alignmask = 0,
3144  			.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3145  			.cra_exit = safexcel_skcipher_cra_exit,
3146  			.cra_module = THIS_MODULE,
3147  		},
3148  	},
3149  };
3150  
safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm * tfm)3151  static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3152  {
3153  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3154  
3155  	safexcel_skcipher_cra_init(tfm);
3156  	ctx->alg  = SAFEXCEL_SM4;
3157  	ctx->blocksz = SM4_BLOCK_SIZE;
3158  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3159  	return 0;
3160  }
3161  
3162  struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3163  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3164  	.algo_mask = SAFEXCEL_ALG_SM4,
3165  	.alg.skcipher = {
3166  		.setkey = safexcel_skcipher_sm4_setkey,
3167  		.encrypt = safexcel_sm4_blk_encrypt,
3168  		.decrypt = safexcel_sm4_blk_decrypt,
3169  		.min_keysize = SM4_KEY_SIZE,
3170  		.max_keysize = SM4_KEY_SIZE,
3171  		.ivsize = SM4_BLOCK_SIZE,
3172  		.base = {
3173  			.cra_name = "cbc(sm4)",
3174  			.cra_driver_name = "safexcel-cbc-sm4",
3175  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3176  			.cra_flags = CRYPTO_ALG_ASYNC |
3177  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3178  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3179  			.cra_blocksize = SM4_BLOCK_SIZE,
3180  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3181  			.cra_alignmask = 0,
3182  			.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3183  			.cra_exit = safexcel_skcipher_cra_exit,
3184  			.cra_module = THIS_MODULE,
3185  		},
3186  	},
3187  };
3188  
safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm * tfm)3189  static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3190  {
3191  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3192  
3193  	safexcel_skcipher_cra_init(tfm);
3194  	ctx->alg  = SAFEXCEL_SM4;
3195  	ctx->blocksz = SM4_BLOCK_SIZE;
3196  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3197  	return 0;
3198  }
3199  
3200  struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3201  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3202  	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3203  	.alg.skcipher = {
3204  		.setkey = safexcel_skcipher_sm4_setkey,
3205  		.encrypt = safexcel_encrypt,
3206  		.decrypt = safexcel_decrypt,
3207  		.min_keysize = SM4_KEY_SIZE,
3208  		.max_keysize = SM4_KEY_SIZE,
3209  		.ivsize = SM4_BLOCK_SIZE,
3210  		.base = {
3211  			.cra_name = "ofb(sm4)",
3212  			.cra_driver_name = "safexcel-ofb-sm4",
3213  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3214  			.cra_flags = CRYPTO_ALG_ASYNC |
3215  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3216  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3217  			.cra_blocksize = 1,
3218  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3219  			.cra_alignmask = 0,
3220  			.cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3221  			.cra_exit = safexcel_skcipher_cra_exit,
3222  			.cra_module = THIS_MODULE,
3223  		},
3224  	},
3225  };
3226  
safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm * tfm)3227  static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3228  {
3229  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3230  
3231  	safexcel_skcipher_cra_init(tfm);
3232  	ctx->alg  = SAFEXCEL_SM4;
3233  	ctx->blocksz = SM4_BLOCK_SIZE;
3234  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3235  	return 0;
3236  }
3237  
3238  struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3239  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3240  	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3241  	.alg.skcipher = {
3242  		.setkey = safexcel_skcipher_sm4_setkey,
3243  		.encrypt = safexcel_encrypt,
3244  		.decrypt = safexcel_decrypt,
3245  		.min_keysize = SM4_KEY_SIZE,
3246  		.max_keysize = SM4_KEY_SIZE,
3247  		.ivsize = SM4_BLOCK_SIZE,
3248  		.base = {
3249  			.cra_name = "cfb(sm4)",
3250  			.cra_driver_name = "safexcel-cfb-sm4",
3251  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3252  			.cra_flags = CRYPTO_ALG_ASYNC |
3253  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3254  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3255  			.cra_blocksize = 1,
3256  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3257  			.cra_alignmask = 0,
3258  			.cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3259  			.cra_exit = safexcel_skcipher_cra_exit,
3260  			.cra_module = THIS_MODULE,
3261  		},
3262  	},
3263  };
3264  
safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3265  static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3266  					   const u8 *key, unsigned int len)
3267  {
3268  	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3269  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3270  
3271  	/* last 4 bytes of key are the nonce! */
3272  	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3273  	/* exclude the nonce here */
3274  	len -= CTR_RFC3686_NONCE_SIZE;
3275  
3276  	return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3277  }
3278  
safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm * tfm)3279  static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3280  {
3281  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3282  
3283  	safexcel_skcipher_cra_init(tfm);
3284  	ctx->alg  = SAFEXCEL_SM4;
3285  	ctx->blocksz = SM4_BLOCK_SIZE;
3286  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3287  	return 0;
3288  }
3289  
3290  struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3291  	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3292  	.algo_mask = SAFEXCEL_ALG_SM4,
3293  	.alg.skcipher = {
3294  		.setkey = safexcel_skcipher_sm4ctr_setkey,
3295  		.encrypt = safexcel_encrypt,
3296  		.decrypt = safexcel_decrypt,
3297  		/* Add nonce size */
3298  		.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3299  		.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3300  		.ivsize = CTR_RFC3686_IV_SIZE,
3301  		.base = {
3302  			.cra_name = "rfc3686(ctr(sm4))",
3303  			.cra_driver_name = "safexcel-ctr-sm4",
3304  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3305  			.cra_flags = CRYPTO_ALG_ASYNC |
3306  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3307  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3308  			.cra_blocksize = 1,
3309  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3310  			.cra_alignmask = 0,
3311  			.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3312  			.cra_exit = safexcel_skcipher_cra_exit,
3313  			.cra_module = THIS_MODULE,
3314  		},
3315  	},
3316  };
3317  
safexcel_aead_sm4_blk_encrypt(struct aead_request * req)3318  static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3319  {
3320  	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3321  	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3322  		return -EINVAL;
3323  
3324  	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3325  				  SAFEXCEL_ENCRYPT);
3326  }
3327  
safexcel_aead_sm4_blk_decrypt(struct aead_request * req)3328  static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3329  {
3330  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3331  
3332  	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3333  	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3334  		return -EINVAL;
3335  
3336  	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3337  				  SAFEXCEL_DECRYPT);
3338  }
3339  
safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm * tfm)3340  static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3341  {
3342  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3343  
3344  	safexcel_aead_cra_init(tfm);
3345  	ctx->alg = SAFEXCEL_SM4;
3346  	ctx->blocksz = SM4_BLOCK_SIZE;
3347  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3348  	ctx->state_sz = SHA1_DIGEST_SIZE;
3349  	return 0;
3350  }
3351  
3352  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3353  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3354  	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3355  	.alg.aead = {
3356  		.setkey = safexcel_aead_setkey,
3357  		.encrypt = safexcel_aead_sm4_blk_encrypt,
3358  		.decrypt = safexcel_aead_sm4_blk_decrypt,
3359  		.ivsize = SM4_BLOCK_SIZE,
3360  		.maxauthsize = SHA1_DIGEST_SIZE,
3361  		.base = {
3362  			.cra_name = "authenc(hmac(sha1),cbc(sm4))",
3363  			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3364  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3365  			.cra_flags = CRYPTO_ALG_ASYNC |
3366  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3367  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3368  			.cra_blocksize = SM4_BLOCK_SIZE,
3369  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3370  			.cra_alignmask = 0,
3371  			.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3372  			.cra_exit = safexcel_aead_cra_exit,
3373  			.cra_module = THIS_MODULE,
3374  		},
3375  	},
3376  };
3377  
safexcel_aead_fallback_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3378  static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3379  					 const u8 *key, unsigned int len)
3380  {
3381  	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3382  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3383  
3384  	/* Keep fallback cipher synchronized */
3385  	return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3386  	       safexcel_aead_setkey(ctfm, key, len);
3387  }
3388  
safexcel_aead_fallback_setauthsize(struct crypto_aead * ctfm,unsigned int authsize)3389  static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3390  					      unsigned int authsize)
3391  {
3392  	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3393  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3394  
3395  	/* Keep fallback cipher synchronized */
3396  	return crypto_aead_setauthsize(ctx->fback, authsize);
3397  }
3398  
safexcel_aead_fallback_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)3399  static int safexcel_aead_fallback_crypt(struct aead_request *req,
3400  					enum safexcel_cipher_direction dir)
3401  {
3402  	struct crypto_aead *aead = crypto_aead_reqtfm(req);
3403  	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3404  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3405  	struct aead_request *subreq = aead_request_ctx(req);
3406  
3407  	aead_request_set_tfm(subreq, ctx->fback);
3408  	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3409  				  req->base.data);
3410  	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3411  			       req->iv);
3412  	aead_request_set_ad(subreq, req->assoclen);
3413  
3414  	return (dir ==  SAFEXCEL_ENCRYPT) ?
3415  		crypto_aead_encrypt(subreq) :
3416  		crypto_aead_decrypt(subreq);
3417  }
3418  
safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request * req)3419  static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3420  {
3421  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3422  
3423  	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3424  	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3425  		return -EINVAL;
3426  	else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3427  		return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3428  
3429  	/* HW cannot do full (AAD+payload) zero length, use fallback */
3430  	return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3431  }
3432  
safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request * req)3433  static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3434  {
3435  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3436  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3437  
3438  	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3439  	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3440  		return -EINVAL;
3441  	else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3442  		/* If input length > 0 only */
3443  		return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3444  
3445  	/* HW cannot do full (AAD+payload) zero length, use fallback */
3446  	return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3447  }
3448  
safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm * tfm)3449  static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3450  {
3451  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3452  
3453  	safexcel_aead_fallback_cra_init(tfm);
3454  	ctx->alg = SAFEXCEL_SM4;
3455  	ctx->blocksz = SM4_BLOCK_SIZE;
3456  	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3457  	ctx->state_sz = SM3_DIGEST_SIZE;
3458  	return 0;
3459  }
3460  
3461  struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3462  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3463  	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3464  	.alg.aead = {
3465  		.setkey = safexcel_aead_fallback_setkey,
3466  		.setauthsize = safexcel_aead_fallback_setauthsize,
3467  		.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3468  		.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3469  		.ivsize = SM4_BLOCK_SIZE,
3470  		.maxauthsize = SM3_DIGEST_SIZE,
3471  		.base = {
3472  			.cra_name = "authenc(hmac(sm3),cbc(sm4))",
3473  			.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3474  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3475  			.cra_flags = CRYPTO_ALG_ASYNC |
3476  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3477  				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3478  				     CRYPTO_ALG_NEED_FALLBACK,
3479  			.cra_blocksize = SM4_BLOCK_SIZE,
3480  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3481  			.cra_alignmask = 0,
3482  			.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3483  			.cra_exit = safexcel_aead_fallback_cra_exit,
3484  			.cra_module = THIS_MODULE,
3485  		},
3486  	},
3487  };
3488  
safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm * tfm)3489  static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3490  {
3491  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3492  
3493  	safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3494  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3495  	return 0;
3496  }
3497  
3498  struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3499  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3500  	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3501  	.alg.aead = {
3502  		.setkey = safexcel_aead_setkey,
3503  		.encrypt = safexcel_aead_encrypt,
3504  		.decrypt = safexcel_aead_decrypt,
3505  		.ivsize = CTR_RFC3686_IV_SIZE,
3506  		.maxauthsize = SHA1_DIGEST_SIZE,
3507  		.base = {
3508  			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3509  			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3510  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3511  			.cra_flags = CRYPTO_ALG_ASYNC |
3512  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3513  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3514  			.cra_blocksize = 1,
3515  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3516  			.cra_alignmask = 0,
3517  			.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3518  			.cra_exit = safexcel_aead_cra_exit,
3519  			.cra_module = THIS_MODULE,
3520  		},
3521  	},
3522  };
3523  
safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm * tfm)3524  static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3525  {
3526  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3527  
3528  	safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3529  	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3530  	return 0;
3531  }
3532  
3533  struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3534  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3535  	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3536  	.alg.aead = {
3537  		.setkey = safexcel_aead_setkey,
3538  		.encrypt = safexcel_aead_encrypt,
3539  		.decrypt = safexcel_aead_decrypt,
3540  		.ivsize = CTR_RFC3686_IV_SIZE,
3541  		.maxauthsize = SM3_DIGEST_SIZE,
3542  		.base = {
3543  			.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3544  			.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3545  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3546  			.cra_flags = CRYPTO_ALG_ASYNC |
3547  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3548  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3549  			.cra_blocksize = 1,
3550  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3551  			.cra_alignmask = 0,
3552  			.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3553  			.cra_exit = safexcel_aead_cra_exit,
3554  			.cra_module = THIS_MODULE,
3555  		},
3556  	},
3557  };
3558  
safexcel_rfc4106_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3559  static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3560  				       unsigned int len)
3561  {
3562  	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3563  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3564  
3565  	/* last 4 bytes of key are the nonce! */
3566  	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3567  
3568  	len -= CTR_RFC3686_NONCE_SIZE;
3569  	return safexcel_aead_gcm_setkey(ctfm, key, len);
3570  }
3571  
safexcel_rfc4106_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3572  static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3573  					    unsigned int authsize)
3574  {
3575  	return crypto_rfc4106_check_authsize(authsize);
3576  }
3577  
safexcel_rfc4106_encrypt(struct aead_request * req)3578  static int safexcel_rfc4106_encrypt(struct aead_request *req)
3579  {
3580  	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3581  	       safexcel_aead_encrypt(req);
3582  }
3583  
safexcel_rfc4106_decrypt(struct aead_request * req)3584  static int safexcel_rfc4106_decrypt(struct aead_request *req)
3585  {
3586  	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3587  	       safexcel_aead_decrypt(req);
3588  }
3589  
safexcel_rfc4106_gcm_cra_init(struct crypto_tfm * tfm)3590  static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3591  {
3592  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3593  	int ret;
3594  
3595  	ret = safexcel_aead_gcm_cra_init(tfm);
3596  	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3597  	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3598  	return ret;
3599  }
3600  
3601  struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3602  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3603  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3604  	.alg.aead = {
3605  		.setkey = safexcel_rfc4106_gcm_setkey,
3606  		.setauthsize = safexcel_rfc4106_gcm_setauthsize,
3607  		.encrypt = safexcel_rfc4106_encrypt,
3608  		.decrypt = safexcel_rfc4106_decrypt,
3609  		.ivsize = GCM_RFC4106_IV_SIZE,
3610  		.maxauthsize = GHASH_DIGEST_SIZE,
3611  		.base = {
3612  			.cra_name = "rfc4106(gcm(aes))",
3613  			.cra_driver_name = "safexcel-rfc4106-gcm-aes",
3614  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3615  			.cra_flags = CRYPTO_ALG_ASYNC |
3616  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3617  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3618  			.cra_blocksize = 1,
3619  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3620  			.cra_alignmask = 0,
3621  			.cra_init = safexcel_rfc4106_gcm_cra_init,
3622  			.cra_exit = safexcel_aead_gcm_cra_exit,
3623  		},
3624  	},
3625  };
3626  
safexcel_rfc4543_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3627  static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3628  					    unsigned int authsize)
3629  {
3630  	if (authsize != GHASH_DIGEST_SIZE)
3631  		return -EINVAL;
3632  
3633  	return 0;
3634  }
3635  
safexcel_rfc4543_gcm_cra_init(struct crypto_tfm * tfm)3636  static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3637  {
3638  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3639  	int ret;
3640  
3641  	ret = safexcel_aead_gcm_cra_init(tfm);
3642  	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3643  	return ret;
3644  }
3645  
3646  struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3647  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3648  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3649  	.alg.aead = {
3650  		.setkey = safexcel_rfc4106_gcm_setkey,
3651  		.setauthsize = safexcel_rfc4543_gcm_setauthsize,
3652  		.encrypt = safexcel_rfc4106_encrypt,
3653  		.decrypt = safexcel_rfc4106_decrypt,
3654  		.ivsize = GCM_RFC4543_IV_SIZE,
3655  		.maxauthsize = GHASH_DIGEST_SIZE,
3656  		.base = {
3657  			.cra_name = "rfc4543(gcm(aes))",
3658  			.cra_driver_name = "safexcel-rfc4543-gcm-aes",
3659  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3660  			.cra_flags = CRYPTO_ALG_ASYNC |
3661  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3662  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3663  			.cra_blocksize = 1,
3664  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3665  			.cra_alignmask = 0,
3666  			.cra_init = safexcel_rfc4543_gcm_cra_init,
3667  			.cra_exit = safexcel_aead_gcm_cra_exit,
3668  		},
3669  	},
3670  };
3671  
safexcel_rfc4309_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3672  static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3673  				       unsigned int len)
3674  {
3675  	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3676  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3677  
3678  	/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3679  	*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3680  	/* last 3 bytes of key are the nonce! */
3681  	memcpy((u8 *)&ctx->nonce + 1, key + len -
3682  	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3683  	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3684  
3685  	len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3686  	return safexcel_aead_ccm_setkey(ctfm, key, len);
3687  }
3688  
safexcel_rfc4309_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3689  static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3690  					    unsigned int authsize)
3691  {
3692  	/* Borrowed from crypto/ccm.c */
3693  	switch (authsize) {
3694  	case 8:
3695  	case 12:
3696  	case 16:
3697  		break;
3698  	default:
3699  		return -EINVAL;
3700  	}
3701  
3702  	return 0;
3703  }
3704  
safexcel_rfc4309_ccm_encrypt(struct aead_request * req)3705  static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3706  {
3707  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3708  
3709  	/* Borrowed from crypto/ccm.c */
3710  	if (req->assoclen != 16 && req->assoclen != 20)
3711  		return -EINVAL;
3712  
3713  	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3714  }
3715  
safexcel_rfc4309_ccm_decrypt(struct aead_request * req)3716  static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3717  {
3718  	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3719  
3720  	/* Borrowed from crypto/ccm.c */
3721  	if (req->assoclen != 16 && req->assoclen != 20)
3722  		return -EINVAL;
3723  
3724  	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3725  }
3726  
safexcel_rfc4309_ccm_cra_init(struct crypto_tfm * tfm)3727  static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3728  {
3729  	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3730  	int ret;
3731  
3732  	ret = safexcel_aead_ccm_cra_init(tfm);
3733  	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3734  	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3735  	return ret;
3736  }
3737  
3738  struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3739  	.type = SAFEXCEL_ALG_TYPE_AEAD,
3740  	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3741  	.alg.aead = {
3742  		.setkey = safexcel_rfc4309_ccm_setkey,
3743  		.setauthsize = safexcel_rfc4309_ccm_setauthsize,
3744  		.encrypt = safexcel_rfc4309_ccm_encrypt,
3745  		.decrypt = safexcel_rfc4309_ccm_decrypt,
3746  		.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3747  		.maxauthsize = AES_BLOCK_SIZE,
3748  		.base = {
3749  			.cra_name = "rfc4309(ccm(aes))",
3750  			.cra_driver_name = "safexcel-rfc4309-ccm-aes",
3751  			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3752  			.cra_flags = CRYPTO_ALG_ASYNC |
3753  				     CRYPTO_ALG_ALLOCATES_MEMORY |
3754  				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3755  			.cra_blocksize = 1,
3756  			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3757  			.cra_alignmask = 0,
3758  			.cra_init = safexcel_rfc4309_ccm_cra_init,
3759  			.cra_exit = safexcel_aead_cra_exit,
3760  			.cra_module = THIS_MODULE,
3761  		},
3762  	},
3763  };
3764