1324429d7SHariprasad Shenai /*
2324429d7SHariprasad Shenai  * This file is part of the Chelsio T6 Crypto driver for Linux.
3324429d7SHariprasad Shenai  *
4324429d7SHariprasad Shenai  * Copyright (c) 2003-2016 Chelsio Communications, Inc. All rights reserved.
5324429d7SHariprasad Shenai  *
6324429d7SHariprasad Shenai  * This software is available to you under a choice of one of two
7324429d7SHariprasad Shenai  * licenses.  You may choose to be licensed under the terms of the GNU
8324429d7SHariprasad Shenai  * General Public License (GPL) Version 2, available from the file
9324429d7SHariprasad Shenai  * COPYING in the main directory of this source tree, or the
10324429d7SHariprasad Shenai  * OpenIB.org BSD license below:
11324429d7SHariprasad Shenai  *
12324429d7SHariprasad Shenai  *     Redistribution and use in source and binary forms, with or
13324429d7SHariprasad Shenai  *     without modification, are permitted provided that the following
14324429d7SHariprasad Shenai  *     conditions are met:
15324429d7SHariprasad Shenai  *
16324429d7SHariprasad Shenai  *      - Redistributions of source code must retain the above
17324429d7SHariprasad Shenai  *        copyright notice, this list of conditions and the following
18324429d7SHariprasad Shenai  *        disclaimer.
19324429d7SHariprasad Shenai  *
20324429d7SHariprasad Shenai  *      - Redistributions in binary form must reproduce the above
21324429d7SHariprasad Shenai  *        copyright notice, this list of conditions and the following
22324429d7SHariprasad Shenai  *        disclaimer in the documentation and/or other materials
23324429d7SHariprasad Shenai  *        provided with the distribution.
24324429d7SHariprasad Shenai  *
25324429d7SHariprasad Shenai  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
26324429d7SHariprasad Shenai  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
27324429d7SHariprasad Shenai  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
28324429d7SHariprasad Shenai  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
29324429d7SHariprasad Shenai  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
30324429d7SHariprasad Shenai  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
31324429d7SHariprasad Shenai  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
32324429d7SHariprasad Shenai  * SOFTWARE.
33324429d7SHariprasad Shenai  *
34324429d7SHariprasad Shenai  * Written and Maintained by:
35324429d7SHariprasad Shenai  *	Manoj Malviya (manojmalviya@chelsio.com)
36324429d7SHariprasad Shenai  *	Atul Gupta (atul.gupta@chelsio.com)
37324429d7SHariprasad Shenai  *	Jitendra Lulla (jlulla@chelsio.com)
38324429d7SHariprasad Shenai  *	Yeshaswi M R Gowda (yeshaswi@chelsio.com)
39324429d7SHariprasad Shenai  *	Harsh Jain (harsh@chelsio.com)
40324429d7SHariprasad Shenai  */
41324429d7SHariprasad Shenai 
42324429d7SHariprasad Shenai #define pr_fmt(fmt) "chcr:" fmt
43324429d7SHariprasad Shenai 
44324429d7SHariprasad Shenai #include <linux/kernel.h>
45324429d7SHariprasad Shenai #include <linux/module.h>
46324429d7SHariprasad Shenai #include <linux/crypto.h>
47324429d7SHariprasad Shenai #include <linux/skbuff.h>
48324429d7SHariprasad Shenai #include <linux/rtnetlink.h>
49324429d7SHariprasad Shenai #include <linux/highmem.h>
50324429d7SHariprasad Shenai #include <linux/scatterlist.h>
51324429d7SHariprasad Shenai 
52324429d7SHariprasad Shenai #include <crypto/aes.h>
53324429d7SHariprasad Shenai #include <crypto/algapi.h>
54324429d7SHariprasad Shenai #include <crypto/hash.h>
558f6acb7fSCorentin LABBE #include <crypto/gcm.h>
56a24d22b2SEric Biggers #include <crypto/sha1.h>
57a24d22b2SEric Biggers #include <crypto/sha2.h>
582debd332SHarsh Jain #include <crypto/authenc.h>
59b8fd1f41SHarsh Jain #include <crypto/ctr.h>
60b8fd1f41SHarsh Jain #include <crypto/gf128mul.h>
612debd332SHarsh Jain #include <crypto/internal/aead.h>
622debd332SHarsh Jain #include <crypto/null.h>
632debd332SHarsh Jain #include <crypto/internal/skcipher.h>
642debd332SHarsh Jain #include <crypto/aead.h>
652debd332SHarsh Jain #include <crypto/scatterwalk.h>
66324429d7SHariprasad Shenai #include <crypto/internal/hash.h>
67324429d7SHariprasad Shenai 
68324429d7SHariprasad Shenai #include "t4fw_api.h"
69324429d7SHariprasad Shenai #include "t4_msg.h"
70324429d7SHariprasad Shenai #include "chcr_core.h"
71324429d7SHariprasad Shenai #include "chcr_algo.h"
72324429d7SHariprasad Shenai #include "chcr_crypto.h"
73324429d7SHariprasad Shenai 
742f47d580SHarsh Jain #define IV AES_BLOCK_SIZE
752f47d580SHarsh Jain 
768579e076SColin Ian King static unsigned int sgl_ent_len[] = {
778579e076SColin Ian King 	0, 0, 16, 24, 40, 48, 64, 72, 88,
786dad4e8aSAtul Gupta 	96, 112, 120, 136, 144, 160, 168, 184,
796dad4e8aSAtul Gupta 	192, 208, 216, 232, 240, 256, 264, 280,
808579e076SColin Ian King 	288, 304, 312, 328, 336, 352, 360, 376
818579e076SColin Ian King };
826dad4e8aSAtul Gupta 
838579e076SColin Ian King static unsigned int dsgl_ent_len[] = {
848579e076SColin Ian King 	0, 32, 32, 48, 48, 64, 64, 80, 80,
856dad4e8aSAtul Gupta 	112, 112, 128, 128, 144, 144, 160, 160,
866dad4e8aSAtul Gupta 	192, 192, 208, 208, 224, 224, 240, 240,
878579e076SColin Ian King 	272, 272, 288, 288, 304, 304, 320, 320
888579e076SColin Ian King };
896dad4e8aSAtul Gupta 
906dad4e8aSAtul Gupta static u32 round_constant[11] = {
916dad4e8aSAtul Gupta 	0x01000000, 0x02000000, 0x04000000, 0x08000000,
926dad4e8aSAtul Gupta 	0x10000000, 0x20000000, 0x40000000, 0x80000000,
936dad4e8aSAtul Gupta 	0x1B000000, 0x36000000, 0x6C000000
946dad4e8aSAtul Gupta };
956dad4e8aSAtul Gupta 
967cea6d3eSArd Biesheuvel static int chcr_handle_cipher_resp(struct skcipher_request *req,
976dad4e8aSAtul Gupta 				   unsigned char *input, int err);
986dad4e8aSAtul Gupta 
AEAD_CTX(struct chcr_context * ctx)992debd332SHarsh Jain static inline  struct chcr_aead_ctx *AEAD_CTX(struct chcr_context *ctx)
1002debd332SHarsh Jain {
1013564f5a2SHerbert Xu 	return &ctx->crypto_ctx->aeadctx;
1022debd332SHarsh Jain }
1032debd332SHarsh Jain 
ABLK_CTX(struct chcr_context * ctx)104324429d7SHariprasad Shenai static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx)
105324429d7SHariprasad Shenai {
1063564f5a2SHerbert Xu 	return &ctx->crypto_ctx->ablkctx;
107324429d7SHariprasad Shenai }
108324429d7SHariprasad Shenai 
HMAC_CTX(struct chcr_context * ctx)109324429d7SHariprasad Shenai static inline struct hmac_ctx *HMAC_CTX(struct chcr_context *ctx)
110324429d7SHariprasad Shenai {
1113564f5a2SHerbert Xu 	return &ctx->crypto_ctx->hmacctx;
112324429d7SHariprasad Shenai }
113324429d7SHariprasad Shenai 
GCM_CTX(struct chcr_aead_ctx * gctx)1142debd332SHarsh Jain static inline struct chcr_gcm_ctx *GCM_CTX(struct chcr_aead_ctx *gctx)
1152debd332SHarsh Jain {
1162debd332SHarsh Jain 	return gctx->ctx->gcm;
1172debd332SHarsh Jain }
1182debd332SHarsh Jain 
AUTHENC_CTX(struct chcr_aead_ctx * gctx)1192debd332SHarsh Jain static inline struct chcr_authenc_ctx *AUTHENC_CTX(struct chcr_aead_ctx *gctx)
1202debd332SHarsh Jain {
1212debd332SHarsh Jain 	return gctx->ctx->authenc;
1222debd332SHarsh Jain }
1232debd332SHarsh Jain 
ULD_CTX(struct chcr_context * ctx)124324429d7SHariprasad Shenai static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx)
125324429d7SHariprasad Shenai {
126fef4912bSHarsh Jain 	return container_of(ctx->dev, struct uld_ctx, dev);
127324429d7SHariprasad Shenai }
128324429d7SHariprasad Shenai 
chcr_init_hctx_per_wr(struct chcr_ahash_req_ctx * reqctx)1295110e655SHarsh Jain static inline void chcr_init_hctx_per_wr(struct chcr_ahash_req_ctx *reqctx)
1305110e655SHarsh Jain {
1315110e655SHarsh Jain 	memset(&reqctx->hctx_wr, 0, sizeof(struct chcr_hctx_per_wr));
1325110e655SHarsh Jain }
1335110e655SHarsh Jain 
sg_nents_xlen(struct scatterlist * sg,unsigned int reqlen,unsigned int entlen,unsigned int skip)1342f47d580SHarsh Jain static int sg_nents_xlen(struct scatterlist *sg, unsigned int reqlen,
1352f47d580SHarsh Jain 			 unsigned int entlen,
1362f47d580SHarsh Jain 			 unsigned int skip)
1372956f36cSHarsh Jain {
1382956f36cSHarsh Jain 	int nents = 0;
1392956f36cSHarsh Jain 	unsigned int less;
1402f47d580SHarsh Jain 	unsigned int skip_len = 0;
1412956f36cSHarsh Jain 
1422f47d580SHarsh Jain 	while (sg && skip) {
1432f47d580SHarsh Jain 		if (sg_dma_len(sg) <= skip) {
1442f47d580SHarsh Jain 			skip -= sg_dma_len(sg);
1452f47d580SHarsh Jain 			skip_len = 0;
1462f47d580SHarsh Jain 			sg = sg_next(sg);
1472f47d580SHarsh Jain 		} else {
1482f47d580SHarsh Jain 			skip_len = skip;
1492f47d580SHarsh Jain 			skip = 0;
1502f47d580SHarsh Jain 		}
1512956f36cSHarsh Jain 	}
1522956f36cSHarsh Jain 
1532f47d580SHarsh Jain 	while (sg && reqlen) {
1542f47d580SHarsh Jain 		less = min(reqlen, sg_dma_len(sg) - skip_len);
1552f47d580SHarsh Jain 		nents += DIV_ROUND_UP(less, entlen);
1562f47d580SHarsh Jain 		reqlen -= less;
1572f47d580SHarsh Jain 		skip_len = 0;
1582f47d580SHarsh Jain 		sg = sg_next(sg);
1592f47d580SHarsh Jain 	}
1602956f36cSHarsh Jain 	return nents;
1612956f36cSHarsh Jain }
1622956f36cSHarsh Jain 
get_aead_subtype(struct crypto_aead * aead)1636dad4e8aSAtul Gupta static inline int get_aead_subtype(struct crypto_aead *aead)
1642f47d580SHarsh Jain {
1656dad4e8aSAtul Gupta 	struct aead_alg *alg = crypto_aead_alg(aead);
1666dad4e8aSAtul Gupta 	struct chcr_alg_template *chcr_crypto_alg =
1676dad4e8aSAtul Gupta 		container_of(alg, struct chcr_alg_template, alg.aead);
1686dad4e8aSAtul Gupta 	return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
1692f47d580SHarsh Jain }
1702f47d580SHarsh Jain 
chcr_verify_tag(struct aead_request * req,u8 * input,int * err)1716dad4e8aSAtul Gupta void chcr_verify_tag(struct aead_request *req, u8 *input, int *err)
1722debd332SHarsh Jain {
1732debd332SHarsh Jain 	u8 temp[SHA512_DIGEST_SIZE];
1742debd332SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1752debd332SHarsh Jain 	int authsize = crypto_aead_authsize(tfm);
1762debd332SHarsh Jain 	struct cpl_fw6_pld *fw6_pld;
1772debd332SHarsh Jain 	int cmp = 0;
1782debd332SHarsh Jain 
1792debd332SHarsh Jain 	fw6_pld = (struct cpl_fw6_pld *)input;
1802debd332SHarsh Jain 	if ((get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) ||
1812debd332SHarsh Jain 	    (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_GCM)) {
182d600fc8aSHarsh Jain 		cmp = crypto_memneq(&fw6_pld->data[2], (fw6_pld + 1), authsize);
1832debd332SHarsh Jain 	} else {
1842debd332SHarsh Jain 
1852debd332SHarsh Jain 		sg_pcopy_to_buffer(req->src, sg_nents(req->src), temp,
1862debd332SHarsh Jain 				authsize, req->assoclen +
1872debd332SHarsh Jain 				req->cryptlen - authsize);
188d600fc8aSHarsh Jain 		cmp = crypto_memneq(temp, (fw6_pld + 1), authsize);
1892debd332SHarsh Jain 	}
1902debd332SHarsh Jain 	if (cmp)
1912debd332SHarsh Jain 		*err = -EBADMSG;
1922debd332SHarsh Jain 	else
1932debd332SHarsh Jain 		*err = 0;
1942debd332SHarsh Jain }
1952debd332SHarsh Jain 
chcr_inc_wrcount(struct chcr_dev * dev)196fef4912bSHarsh Jain static int chcr_inc_wrcount(struct chcr_dev *dev)
197fef4912bSHarsh Jain {
198fef4912bSHarsh Jain 	if (dev->state == CHCR_DETACH)
19933ddc108SAtul Gupta 		return 1;
200fef4912bSHarsh Jain 	atomic_inc(&dev->inflight);
20133ddc108SAtul Gupta 	return 0;
202fef4912bSHarsh Jain }
203fef4912bSHarsh Jain 
chcr_dec_wrcount(struct chcr_dev * dev)204fef4912bSHarsh Jain static inline void chcr_dec_wrcount(struct chcr_dev *dev)
205fef4912bSHarsh Jain {
206fef4912bSHarsh Jain 	atomic_dec(&dev->inflight);
207fef4912bSHarsh Jain }
208fef4912bSHarsh Jain 
chcr_handle_aead_resp(struct aead_request * req,unsigned char * input,int err)209f31ba0f9SHarsh Jain static inline int chcr_handle_aead_resp(struct aead_request *req,
2106dad4e8aSAtul Gupta 					 unsigned char *input,
2116dad4e8aSAtul Gupta 					 int err)
2126dad4e8aSAtul Gupta {
213e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
214fef4912bSHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
215fef4912bSHarsh Jain 	struct chcr_dev *dev = a_ctx(tfm)->dev;
2166dad4e8aSAtul Gupta 
2174262c98aSHarsh Jain 	chcr_aead_common_exit(req);
2186dad4e8aSAtul Gupta 	if (reqctx->verify == VERIFY_SW) {
2196dad4e8aSAtul Gupta 		chcr_verify_tag(req, input, &err);
2206dad4e8aSAtul Gupta 		reqctx->verify = VERIFY_HW;
2216dad4e8aSAtul Gupta 	}
222fef4912bSHarsh Jain 	chcr_dec_wrcount(dev);
22313c20754SHerbert Xu 	aead_request_complete(req, err);
224f31ba0f9SHarsh Jain 
225f31ba0f9SHarsh Jain 	return err;
2266dad4e8aSAtul Gupta }
2276dad4e8aSAtul Gupta 
get_aes_decrypt_key(unsigned char * dec_key,const unsigned char * key,unsigned int keylength)2282f47d580SHarsh Jain static void get_aes_decrypt_key(unsigned char *dec_key,
22939f91a34SHarsh Jain 				       const unsigned char *key,
23039f91a34SHarsh Jain 				       unsigned int keylength)
23139f91a34SHarsh Jain {
23239f91a34SHarsh Jain 	u32 temp;
23339f91a34SHarsh Jain 	u32 w_ring[MAX_NK];
23439f91a34SHarsh Jain 	int i, j, k;
23539f91a34SHarsh Jain 	u8  nr, nk;
23639f91a34SHarsh Jain 
23739f91a34SHarsh Jain 	switch (keylength) {
23839f91a34SHarsh Jain 	case AES_KEYLENGTH_128BIT:
23939f91a34SHarsh Jain 		nk = KEYLENGTH_4BYTES;
24039f91a34SHarsh Jain 		nr = NUMBER_OF_ROUNDS_10;
24139f91a34SHarsh Jain 		break;
24239f91a34SHarsh Jain 	case AES_KEYLENGTH_192BIT:
24339f91a34SHarsh Jain 		nk = KEYLENGTH_6BYTES;
24439f91a34SHarsh Jain 		nr = NUMBER_OF_ROUNDS_12;
24539f91a34SHarsh Jain 		break;
24639f91a34SHarsh Jain 	case AES_KEYLENGTH_256BIT:
24739f91a34SHarsh Jain 		nk = KEYLENGTH_8BYTES;
24839f91a34SHarsh Jain 		nr = NUMBER_OF_ROUNDS_14;
24939f91a34SHarsh Jain 		break;
25039f91a34SHarsh Jain 	default:
25139f91a34SHarsh Jain 		return;
25239f91a34SHarsh Jain 	}
25339f91a34SHarsh Jain 	for (i = 0; i < nk; i++)
254f3b140adSAyush Sawal 		w_ring[i] = get_unaligned_be32(&key[i * 4]);
25539f91a34SHarsh Jain 
25639f91a34SHarsh Jain 	i = 0;
25739f91a34SHarsh Jain 	temp = w_ring[nk - 1];
25839f91a34SHarsh Jain 	while (i + nk < (nr + 1) * 4) {
25939f91a34SHarsh Jain 		if (!(i % nk)) {
26039f91a34SHarsh Jain 			/* RotWord(temp) */
26139f91a34SHarsh Jain 			temp = (temp << 8) | (temp >> 24);
26239f91a34SHarsh Jain 			temp = aes_ks_subword(temp);
26339f91a34SHarsh Jain 			temp ^= round_constant[i / nk];
26439f91a34SHarsh Jain 		} else if (nk == 8 && (i % 4 == 0)) {
26539f91a34SHarsh Jain 			temp = aes_ks_subword(temp);
26639f91a34SHarsh Jain 		}
26739f91a34SHarsh Jain 		w_ring[i % nk] ^= temp;
26839f91a34SHarsh Jain 		temp = w_ring[i % nk];
26939f91a34SHarsh Jain 		i++;
27039f91a34SHarsh Jain 	}
27139f91a34SHarsh Jain 	i--;
27239f91a34SHarsh Jain 	for (k = 0, j = i % nk; k < nk; k++) {
273f3b140adSAyush Sawal 		put_unaligned_be32(w_ring[j], &dec_key[k * 4]);
27439f91a34SHarsh Jain 		j--;
27539f91a34SHarsh Jain 		if (j < 0)
27639f91a34SHarsh Jain 			j += nk;
27739f91a34SHarsh Jain 	}
27839f91a34SHarsh Jain }
27939f91a34SHarsh Jain 
chcr_alloc_shash(unsigned int ds)280e7922729SHarsh Jain static struct crypto_shash *chcr_alloc_shash(unsigned int ds)
281324429d7SHariprasad Shenai {
282ec1bca94SChristophe Jaillet 	struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
283324429d7SHariprasad Shenai 
284324429d7SHariprasad Shenai 	switch (ds) {
285324429d7SHariprasad Shenai 	case SHA1_DIGEST_SIZE:
286e7922729SHarsh Jain 		base_hash = crypto_alloc_shash("sha1", 0, 0);
287324429d7SHariprasad Shenai 		break;
288324429d7SHariprasad Shenai 	case SHA224_DIGEST_SIZE:
289e7922729SHarsh Jain 		base_hash = crypto_alloc_shash("sha224", 0, 0);
290324429d7SHariprasad Shenai 		break;
291324429d7SHariprasad Shenai 	case SHA256_DIGEST_SIZE:
292e7922729SHarsh Jain 		base_hash = crypto_alloc_shash("sha256", 0, 0);
293324429d7SHariprasad Shenai 		break;
294324429d7SHariprasad Shenai 	case SHA384_DIGEST_SIZE:
295e7922729SHarsh Jain 		base_hash = crypto_alloc_shash("sha384", 0, 0);
296324429d7SHariprasad Shenai 		break;
297324429d7SHariprasad Shenai 	case SHA512_DIGEST_SIZE:
298e7922729SHarsh Jain 		base_hash = crypto_alloc_shash("sha512", 0, 0);
299324429d7SHariprasad Shenai 		break;
300324429d7SHariprasad Shenai 	}
301324429d7SHariprasad Shenai 
302e7922729SHarsh Jain 	return base_hash;
303324429d7SHariprasad Shenai }
304324429d7SHariprasad Shenai 
chcr_compute_partial_hash(struct shash_desc * desc,char * iopad,char * result_hash,int digest_size)305324429d7SHariprasad Shenai static int chcr_compute_partial_hash(struct shash_desc *desc,
306324429d7SHariprasad Shenai 				     char *iopad, char *result_hash,
307324429d7SHariprasad Shenai 				     int digest_size)
308324429d7SHariprasad Shenai {
309324429d7SHariprasad Shenai 	struct sha1_state sha1_st;
310324429d7SHariprasad Shenai 	struct sha256_state sha256_st;
311324429d7SHariprasad Shenai 	struct sha512_state sha512_st;
312324429d7SHariprasad Shenai 	int error;
313324429d7SHariprasad Shenai 
314324429d7SHariprasad Shenai 	if (digest_size == SHA1_DIGEST_SIZE) {
315324429d7SHariprasad Shenai 		error = crypto_shash_init(desc) ?:
316324429d7SHariprasad Shenai 			crypto_shash_update(desc, iopad, SHA1_BLOCK_SIZE) ?:
317324429d7SHariprasad Shenai 			crypto_shash_export(desc, (void *)&sha1_st);
318324429d7SHariprasad Shenai 		memcpy(result_hash, sha1_st.state, SHA1_DIGEST_SIZE);
319324429d7SHariprasad Shenai 	} else if (digest_size == SHA224_DIGEST_SIZE) {
320324429d7SHariprasad Shenai 		error = crypto_shash_init(desc) ?:
321324429d7SHariprasad Shenai 			crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
322324429d7SHariprasad Shenai 			crypto_shash_export(desc, (void *)&sha256_st);
323324429d7SHariprasad Shenai 		memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
324324429d7SHariprasad Shenai 
325324429d7SHariprasad Shenai 	} else if (digest_size == SHA256_DIGEST_SIZE) {
326324429d7SHariprasad Shenai 		error = crypto_shash_init(desc) ?:
327324429d7SHariprasad Shenai 			crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
328324429d7SHariprasad Shenai 			crypto_shash_export(desc, (void *)&sha256_st);
329324429d7SHariprasad Shenai 		memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
330324429d7SHariprasad Shenai 
331324429d7SHariprasad Shenai 	} else if (digest_size == SHA384_DIGEST_SIZE) {
332324429d7SHariprasad Shenai 		error = crypto_shash_init(desc) ?:
333324429d7SHariprasad Shenai 			crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
334324429d7SHariprasad Shenai 			crypto_shash_export(desc, (void *)&sha512_st);
335324429d7SHariprasad Shenai 		memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
336324429d7SHariprasad Shenai 
337324429d7SHariprasad Shenai 	} else if (digest_size == SHA512_DIGEST_SIZE) {
338324429d7SHariprasad Shenai 		error = crypto_shash_init(desc) ?:
339324429d7SHariprasad Shenai 			crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
340324429d7SHariprasad Shenai 			crypto_shash_export(desc, (void *)&sha512_st);
341324429d7SHariprasad Shenai 		memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
342324429d7SHariprasad Shenai 	} else {
343324429d7SHariprasad Shenai 		error = -EINVAL;
344324429d7SHariprasad Shenai 		pr_err("Unknown digest size %d\n", digest_size);
345324429d7SHariprasad Shenai 	}
346324429d7SHariprasad Shenai 	return error;
347324429d7SHariprasad Shenai }
348324429d7SHariprasad Shenai 
chcr_change_order(char * buf,int ds)349324429d7SHariprasad Shenai static void chcr_change_order(char *buf, int ds)
350324429d7SHariprasad Shenai {
351324429d7SHariprasad Shenai 	int i;
352324429d7SHariprasad Shenai 
353324429d7SHariprasad Shenai 	if (ds == SHA512_DIGEST_SIZE) {
354324429d7SHariprasad Shenai 		for (i = 0; i < (ds / sizeof(u64)); i++)
355324429d7SHariprasad Shenai 			*((__be64 *)buf + i) =
356324429d7SHariprasad Shenai 				cpu_to_be64(*((u64 *)buf + i));
357324429d7SHariprasad Shenai 	} else {
358324429d7SHariprasad Shenai 		for (i = 0; i < (ds / sizeof(u32)); i++)
359324429d7SHariprasad Shenai 			*((__be32 *)buf + i) =
360324429d7SHariprasad Shenai 				cpu_to_be32(*((u32 *)buf + i));
361324429d7SHariprasad Shenai 	}
362324429d7SHariprasad Shenai }
363324429d7SHariprasad Shenai 
is_hmac(struct crypto_tfm * tfm)364324429d7SHariprasad Shenai static inline int is_hmac(struct crypto_tfm *tfm)
365324429d7SHariprasad Shenai {
366324429d7SHariprasad Shenai 	struct crypto_alg *alg = tfm->__crt_alg;
367324429d7SHariprasad Shenai 	struct chcr_alg_template *chcr_crypto_alg =
368324429d7SHariprasad Shenai 		container_of(__crypto_ahash_alg(alg), struct chcr_alg_template,
369324429d7SHariprasad Shenai 			     alg.hash);
3705c86a8ffSHarsh Jain 	if (chcr_crypto_alg->type == CRYPTO_ALG_TYPE_HMAC)
371324429d7SHariprasad Shenai 		return 1;
372324429d7SHariprasad Shenai 	return 0;
373324429d7SHariprasad Shenai }
374324429d7SHariprasad Shenai 
dsgl_walk_init(struct dsgl_walk * walk,struct cpl_rx_phys_dsgl * dsgl)3752f47d580SHarsh Jain static inline void dsgl_walk_init(struct dsgl_walk *walk,
3762f47d580SHarsh Jain 				   struct cpl_rx_phys_dsgl *dsgl)
377324429d7SHariprasad Shenai {
3782f47d580SHarsh Jain 	walk->dsgl = dsgl;
3792f47d580SHarsh Jain 	walk->nents = 0;
3802f47d580SHarsh Jain 	walk->to = (struct phys_sge_pairs *)(dsgl + 1);
3812f47d580SHarsh Jain }
3822f47d580SHarsh Jain 
dsgl_walk_end(struct dsgl_walk * walk,unsigned short qid,int pci_chan_id)383add92a81SHarsh Jain static inline void dsgl_walk_end(struct dsgl_walk *walk, unsigned short qid,
384add92a81SHarsh Jain 				 int pci_chan_id)
3852f47d580SHarsh Jain {
3862f47d580SHarsh Jain 	struct cpl_rx_phys_dsgl *phys_cpl;
3872f47d580SHarsh Jain 
3882f47d580SHarsh Jain 	phys_cpl = walk->dsgl;
389324429d7SHariprasad Shenai 
390324429d7SHariprasad Shenai 	phys_cpl->op_to_tid = htonl(CPL_RX_PHYS_DSGL_OPCODE_V(CPL_RX_PHYS_DSGL)
391324429d7SHariprasad Shenai 				    | CPL_RX_PHYS_DSGL_ISRDMA_V(0));
392324429d7SHariprasad Shenai 	phys_cpl->pcirlxorder_to_noofsgentr =
393324429d7SHariprasad Shenai 		htonl(CPL_RX_PHYS_DSGL_PCIRLXORDER_V(0) |
394324429d7SHariprasad Shenai 		      CPL_RX_PHYS_DSGL_PCINOSNOOP_V(0) |
395324429d7SHariprasad Shenai 		      CPL_RX_PHYS_DSGL_PCITPHNTENB_V(0) |
396324429d7SHariprasad Shenai 		      CPL_RX_PHYS_DSGL_PCITPHNT_V(0) |
397324429d7SHariprasad Shenai 		      CPL_RX_PHYS_DSGL_DCAID_V(0) |
3982f47d580SHarsh Jain 		      CPL_RX_PHYS_DSGL_NOOFSGENTR_V(walk->nents));
399324429d7SHariprasad Shenai 	phys_cpl->rss_hdr_int.opcode = CPL_RX_PHYS_ADDR;
4002f47d580SHarsh Jain 	phys_cpl->rss_hdr_int.qid = htons(qid);
401324429d7SHariprasad Shenai 	phys_cpl->rss_hdr_int.hash_val = 0;
402add92a81SHarsh Jain 	phys_cpl->rss_hdr_int.channel = pci_chan_id;
403324429d7SHariprasad Shenai }
404324429d7SHariprasad Shenai 
dsgl_walk_add_page(struct dsgl_walk * walk,size_t size,dma_addr_t addr)4052f47d580SHarsh Jain static inline void dsgl_walk_add_page(struct dsgl_walk *walk,
4062f47d580SHarsh Jain 					size_t size,
407c4f6d44dSHarsh Jain 					dma_addr_t addr)
408324429d7SHariprasad Shenai {
4092f47d580SHarsh Jain 	int j;
410324429d7SHariprasad Shenai 
4112f47d580SHarsh Jain 	if (!size)
4122f47d580SHarsh Jain 		return;
4132f47d580SHarsh Jain 	j = walk->nents;
4142f47d580SHarsh Jain 	walk->to->len[j % 8] = htons(size);
415c4f6d44dSHarsh Jain 	walk->to->addr[j % 8] = cpu_to_be64(addr);
4162f47d580SHarsh Jain 	j++;
4172f47d580SHarsh Jain 	if ((j % 8) == 0)
4182f47d580SHarsh Jain 		walk->to++;
4192f47d580SHarsh Jain 	walk->nents = j;
420324429d7SHariprasad Shenai }
4212f47d580SHarsh Jain 
dsgl_walk_add_sg(struct dsgl_walk * walk,struct scatterlist * sg,unsigned int slen,unsigned int skip)4222f47d580SHarsh Jain static void  dsgl_walk_add_sg(struct dsgl_walk *walk,
4232f47d580SHarsh Jain 			   struct scatterlist *sg,
4242f47d580SHarsh Jain 			      unsigned int slen,
4252f47d580SHarsh Jain 			      unsigned int skip)
4262f47d580SHarsh Jain {
4272f47d580SHarsh Jain 	int skip_len = 0;
4282f47d580SHarsh Jain 	unsigned int left_size = slen, len = 0;
4292f47d580SHarsh Jain 	unsigned int j = walk->nents;
4302f47d580SHarsh Jain 	int offset, ent_len;
4312f47d580SHarsh Jain 
4322f47d580SHarsh Jain 	if (!slen)
4332f47d580SHarsh Jain 		return;
4342f47d580SHarsh Jain 	while (sg && skip) {
4352f47d580SHarsh Jain 		if (sg_dma_len(sg) <= skip) {
4362f47d580SHarsh Jain 			skip -= sg_dma_len(sg);
4372f47d580SHarsh Jain 			skip_len = 0;
4382f47d580SHarsh Jain 			sg = sg_next(sg);
4392f47d580SHarsh Jain 		} else {
4402f47d580SHarsh Jain 			skip_len = skip;
4412f47d580SHarsh Jain 			skip = 0;
4422f47d580SHarsh Jain 		}
4432f47d580SHarsh Jain 	}
4442f47d580SHarsh Jain 
4452f47d580SHarsh Jain 	while (left_size && sg) {
4462f47d580SHarsh Jain 		len = min_t(u32, left_size, sg_dma_len(sg) - skip_len);
4472f47d580SHarsh Jain 		offset = 0;
4482f47d580SHarsh Jain 		while (len) {
4492f47d580SHarsh Jain 			ent_len =  min_t(u32, len, CHCR_DST_SG_SIZE);
4502f47d580SHarsh Jain 			walk->to->len[j % 8] = htons(ent_len);
4512f47d580SHarsh Jain 			walk->to->addr[j % 8] = cpu_to_be64(sg_dma_address(sg) +
4522f47d580SHarsh Jain 						      offset + skip_len);
4532f47d580SHarsh Jain 			offset += ent_len;
4542f47d580SHarsh Jain 			len -= ent_len;
4552f47d580SHarsh Jain 			j++;
4562f47d580SHarsh Jain 			if ((j % 8) == 0)
4572f47d580SHarsh Jain 				walk->to++;
4582f47d580SHarsh Jain 		}
4592f47d580SHarsh Jain 		walk->last_sg = sg;
4602f47d580SHarsh Jain 		walk->last_sg_len = min_t(u32, left_size, sg_dma_len(sg) -
4612f47d580SHarsh Jain 					  skip_len) + skip_len;
4622f47d580SHarsh Jain 		left_size -= min_t(u32, left_size, sg_dma_len(sg) - skip_len);
4632f47d580SHarsh Jain 		skip_len = 0;
4642f47d580SHarsh Jain 		sg = sg_next(sg);
4652f47d580SHarsh Jain 	}
4662f47d580SHarsh Jain 	walk->nents = j;
4672f47d580SHarsh Jain }
4682f47d580SHarsh Jain 
ulptx_walk_init(struct ulptx_walk * walk,struct ulptx_sgl * ulp)4692f47d580SHarsh Jain static inline void ulptx_walk_init(struct ulptx_walk *walk,
4702f47d580SHarsh Jain 				   struct ulptx_sgl *ulp)
4712f47d580SHarsh Jain {
4722f47d580SHarsh Jain 	walk->sgl = ulp;
4732f47d580SHarsh Jain 	walk->nents = 0;
4742f47d580SHarsh Jain 	walk->pair_idx = 0;
4752f47d580SHarsh Jain 	walk->pair = ulp->sge;
4762f47d580SHarsh Jain 	walk->last_sg = NULL;
4772f47d580SHarsh Jain 	walk->last_sg_len = 0;
4782f47d580SHarsh Jain }
4792f47d580SHarsh Jain 
ulptx_walk_end(struct ulptx_walk * walk)4802f47d580SHarsh Jain static inline void ulptx_walk_end(struct ulptx_walk *walk)
4812f47d580SHarsh Jain {
4822f47d580SHarsh Jain 	walk->sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) |
4832f47d580SHarsh Jain 			      ULPTX_NSGE_V(walk->nents));
4842f47d580SHarsh Jain }
4852f47d580SHarsh Jain 
4862f47d580SHarsh Jain 
ulptx_walk_add_page(struct ulptx_walk * walk,size_t size,dma_addr_t addr)4872f47d580SHarsh Jain static inline void ulptx_walk_add_page(struct ulptx_walk *walk,
4882f47d580SHarsh Jain 					size_t size,
489c4f6d44dSHarsh Jain 					dma_addr_t addr)
4902f47d580SHarsh Jain {
4912f47d580SHarsh Jain 	if (!size)
4922f47d580SHarsh Jain 		return;
4932f47d580SHarsh Jain 
4942f47d580SHarsh Jain 	if (walk->nents == 0) {
4952f47d580SHarsh Jain 		walk->sgl->len0 = cpu_to_be32(size);
496c4f6d44dSHarsh Jain 		walk->sgl->addr0 = cpu_to_be64(addr);
4972f47d580SHarsh Jain 	} else {
498c4f6d44dSHarsh Jain 		walk->pair->addr[walk->pair_idx] = cpu_to_be64(addr);
4992f47d580SHarsh Jain 		walk->pair->len[walk->pair_idx] = cpu_to_be32(size);
5002f47d580SHarsh Jain 		walk->pair_idx = !walk->pair_idx;
5012f47d580SHarsh Jain 		if (!walk->pair_idx)
5022f47d580SHarsh Jain 			walk->pair++;
5032f47d580SHarsh Jain 	}
5042f47d580SHarsh Jain 	walk->nents++;
5052f47d580SHarsh Jain }
5062f47d580SHarsh Jain 
ulptx_walk_add_sg(struct ulptx_walk * walk,struct scatterlist * sg,unsigned int len,unsigned int skip)5072f47d580SHarsh Jain static void  ulptx_walk_add_sg(struct ulptx_walk *walk,
5082f47d580SHarsh Jain 					struct scatterlist *sg,
5092f47d580SHarsh Jain 			       unsigned int len,
5102f47d580SHarsh Jain 			       unsigned int skip)
5112f47d580SHarsh Jain {
5122f47d580SHarsh Jain 	int small;
5132f47d580SHarsh Jain 	int skip_len = 0;
5142f47d580SHarsh Jain 	unsigned int sgmin;
5152f47d580SHarsh Jain 
5162f47d580SHarsh Jain 	if (!len)
5172f47d580SHarsh Jain 		return;
5182f47d580SHarsh Jain 	while (sg && skip) {
5192f47d580SHarsh Jain 		if (sg_dma_len(sg) <= skip) {
5202f47d580SHarsh Jain 			skip -= sg_dma_len(sg);
5212f47d580SHarsh Jain 			skip_len = 0;
5222f47d580SHarsh Jain 			sg = sg_next(sg);
5232f47d580SHarsh Jain 		} else {
5242f47d580SHarsh Jain 			skip_len = skip;
5252f47d580SHarsh Jain 			skip = 0;
5262f47d580SHarsh Jain 		}
5272f47d580SHarsh Jain 	}
5288daa32b9SHarsh Jain 	WARN(!sg, "SG should not be null here\n");
5298daa32b9SHarsh Jain 	if (sg && (walk->nents == 0)) {
5302f47d580SHarsh Jain 		small = min_t(unsigned int, sg_dma_len(sg) - skip_len, len);
5312f47d580SHarsh Jain 		sgmin = min_t(unsigned int, small, CHCR_SRC_SG_SIZE);
5322f47d580SHarsh Jain 		walk->sgl->len0 = cpu_to_be32(sgmin);
5332f47d580SHarsh Jain 		walk->sgl->addr0 = cpu_to_be64(sg_dma_address(sg) + skip_len);
5342f47d580SHarsh Jain 		walk->nents++;
5352f47d580SHarsh Jain 		len -= sgmin;
5362f47d580SHarsh Jain 		walk->last_sg = sg;
5372f47d580SHarsh Jain 		walk->last_sg_len = sgmin + skip_len;
5382f47d580SHarsh Jain 		skip_len += sgmin;
5392f47d580SHarsh Jain 		if (sg_dma_len(sg) == skip_len) {
5402f47d580SHarsh Jain 			sg = sg_next(sg);
5412f47d580SHarsh Jain 			skip_len = 0;
5422f47d580SHarsh Jain 		}
5432f47d580SHarsh Jain 	}
5442f47d580SHarsh Jain 
5452f47d580SHarsh Jain 	while (sg && len) {
5462f47d580SHarsh Jain 		small = min(sg_dma_len(sg) - skip_len, len);
5472f47d580SHarsh Jain 		sgmin = min_t(unsigned int, small, CHCR_SRC_SG_SIZE);
5482f47d580SHarsh Jain 		walk->pair->len[walk->pair_idx] = cpu_to_be32(sgmin);
5492f47d580SHarsh Jain 		walk->pair->addr[walk->pair_idx] =
5502f47d580SHarsh Jain 			cpu_to_be64(sg_dma_address(sg) + skip_len);
5512f47d580SHarsh Jain 		walk->pair_idx = !walk->pair_idx;
5522f47d580SHarsh Jain 		walk->nents++;
5532f47d580SHarsh Jain 		if (!walk->pair_idx)
5542f47d580SHarsh Jain 			walk->pair++;
5552f47d580SHarsh Jain 		len -= sgmin;
5562f47d580SHarsh Jain 		skip_len += sgmin;
5572f47d580SHarsh Jain 		walk->last_sg = sg;
5582f47d580SHarsh Jain 		walk->last_sg_len = skip_len;
5592f47d580SHarsh Jain 		if (sg_dma_len(sg) == skip_len) {
5602f47d580SHarsh Jain 			sg = sg_next(sg);
5612f47d580SHarsh Jain 			skip_len = 0;
5622f47d580SHarsh Jain 		}
5632f47d580SHarsh Jain 	}
564324429d7SHariprasad Shenai }
565324429d7SHariprasad Shenai 
get_cryptoalg_subtype(struct crypto_skcipher * tfm)5667cea6d3eSArd Biesheuvel static inline int get_cryptoalg_subtype(struct crypto_skcipher *tfm)
567324429d7SHariprasad Shenai {
5687cea6d3eSArd Biesheuvel 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
569324429d7SHariprasad Shenai 	struct chcr_alg_template *chcr_crypto_alg =
5707cea6d3eSArd Biesheuvel 		container_of(alg, struct chcr_alg_template, alg.skcipher);
571324429d7SHariprasad Shenai 
572324429d7SHariprasad Shenai 	return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
573324429d7SHariprasad Shenai }
574324429d7SHariprasad Shenai 
cxgb4_is_crypto_q_full(struct net_device * dev,unsigned int idx)575b8fd1f41SHarsh Jain static int cxgb4_is_crypto_q_full(struct net_device *dev, unsigned int idx)
576b8fd1f41SHarsh Jain {
577b8fd1f41SHarsh Jain 	struct adapter *adap = netdev2adap(dev);
578b8fd1f41SHarsh Jain 	struct sge_uld_txq_info *txq_info =
579b8fd1f41SHarsh Jain 		adap->sge.uld_txq_info[CXGB4_TX_CRYPTO];
580b8fd1f41SHarsh Jain 	struct sge_uld_txq *txq;
581b8fd1f41SHarsh Jain 	int ret = 0;
582b8fd1f41SHarsh Jain 
583b8fd1f41SHarsh Jain 	local_bh_disable();
584b8fd1f41SHarsh Jain 	txq = &txq_info->uldtxq[idx];
585b8fd1f41SHarsh Jain 	spin_lock(&txq->sendq.lock);
586b8fd1f41SHarsh Jain 	if (txq->full)
587b8fd1f41SHarsh Jain 		ret = -1;
588b8fd1f41SHarsh Jain 	spin_unlock(&txq->sendq.lock);
589b8fd1f41SHarsh Jain 	local_bh_enable();
590b8fd1f41SHarsh Jain 	return ret;
591b8fd1f41SHarsh Jain }
592b8fd1f41SHarsh Jain 
generate_copy_rrkey(struct ablk_ctx * ablkctx,struct _key_ctx * key_ctx)593324429d7SHariprasad Shenai static int generate_copy_rrkey(struct ablk_ctx *ablkctx,
594324429d7SHariprasad Shenai 			       struct _key_ctx *key_ctx)
595324429d7SHariprasad Shenai {
596324429d7SHariprasad Shenai 	if (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) {
597cc1b156dSHarsh Jain 		memcpy(key_ctx->key, ablkctx->rrkey, ablkctx->enckey_len);
598324429d7SHariprasad Shenai 	} else {
599324429d7SHariprasad Shenai 		memcpy(key_ctx->key,
600324429d7SHariprasad Shenai 		       ablkctx->key + (ablkctx->enckey_len >> 1),
601324429d7SHariprasad Shenai 		       ablkctx->enckey_len >> 1);
602cc1b156dSHarsh Jain 		memcpy(key_ctx->key + (ablkctx->enckey_len >> 1),
603cc1b156dSHarsh Jain 		       ablkctx->rrkey, ablkctx->enckey_len >> 1);
604324429d7SHariprasad Shenai 	}
605324429d7SHariprasad Shenai 	return 0;
606324429d7SHariprasad Shenai }
6075110e655SHarsh Jain 
chcr_hash_ent_in_wr(struct scatterlist * src,unsigned int minsg,unsigned int space,unsigned int srcskip)6085110e655SHarsh Jain static int chcr_hash_ent_in_wr(struct scatterlist *src,
6095110e655SHarsh Jain 			     unsigned int minsg,
6105110e655SHarsh Jain 			     unsigned int space,
6115110e655SHarsh Jain 			     unsigned int srcskip)
6125110e655SHarsh Jain {
6135110e655SHarsh Jain 	int srclen = 0;
6145110e655SHarsh Jain 	int srcsg = minsg;
6155110e655SHarsh Jain 	int soffset = 0, sless;
6165110e655SHarsh Jain 
6175110e655SHarsh Jain 	if (sg_dma_len(src) == srcskip) {
6185110e655SHarsh Jain 		src = sg_next(src);
6195110e655SHarsh Jain 		srcskip = 0;
6205110e655SHarsh Jain 	}
6215110e655SHarsh Jain 	while (src && space > (sgl_ent_len[srcsg + 1])) {
6225110e655SHarsh Jain 		sless = min_t(unsigned int, sg_dma_len(src) - soffset -	srcskip,
6235110e655SHarsh Jain 							CHCR_SRC_SG_SIZE);
6245110e655SHarsh Jain 		srclen += sless;
6255110e655SHarsh Jain 		soffset += sless;
6265110e655SHarsh Jain 		srcsg++;
6275110e655SHarsh Jain 		if (sg_dma_len(src) == (soffset + srcskip)) {
6285110e655SHarsh Jain 			src = sg_next(src);
6295110e655SHarsh Jain 			soffset = 0;
6305110e655SHarsh Jain 			srcskip = 0;
6315110e655SHarsh Jain 		}
6325110e655SHarsh Jain 	}
6335110e655SHarsh Jain 	return srclen;
6345110e655SHarsh Jain }
6355110e655SHarsh Jain 
chcr_sg_ent_in_wr(struct scatterlist * src,struct scatterlist * dst,unsigned int minsg,unsigned int space,unsigned int srcskip,unsigned int dstskip)636b8fd1f41SHarsh Jain static int chcr_sg_ent_in_wr(struct scatterlist *src,
637b8fd1f41SHarsh Jain 			     struct scatterlist *dst,
638b8fd1f41SHarsh Jain 			     unsigned int minsg,
6392f47d580SHarsh Jain 			     unsigned int space,
6402f47d580SHarsh Jain 			     unsigned int srcskip,
6412f47d580SHarsh Jain 			     unsigned int dstskip)
642b8fd1f41SHarsh Jain {
643b8fd1f41SHarsh Jain 	int srclen = 0, dstlen = 0;
6442f47d580SHarsh Jain 	int srcsg = minsg, dstsg = minsg;
6451d693cf6SHarsh Jain 	int offset = 0, soffset = 0, less, sless = 0;
646324429d7SHariprasad Shenai 
6472f47d580SHarsh Jain 	if (sg_dma_len(src) == srcskip) {
6482f47d580SHarsh Jain 		src = sg_next(src);
6492f47d580SHarsh Jain 		srcskip = 0;
6502f47d580SHarsh Jain 	}
6512f47d580SHarsh Jain 	if (sg_dma_len(dst) == dstskip) {
6522f47d580SHarsh Jain 		dst = sg_next(dst);
6532f47d580SHarsh Jain 		dstskip = 0;
6542f47d580SHarsh Jain 	}
6552f47d580SHarsh Jain 
6562f47d580SHarsh Jain 	while (src && dst &&
657b8fd1f41SHarsh Jain 	       space > (sgl_ent_len[srcsg + 1] + dsgl_ent_len[dstsg])) {
6581d693cf6SHarsh Jain 		sless = min_t(unsigned int, sg_dma_len(src) - srcskip - soffset,
6591d693cf6SHarsh Jain 				CHCR_SRC_SG_SIZE);
6601d693cf6SHarsh Jain 		srclen += sless;
661b8fd1f41SHarsh Jain 		srcsg++;
6622956f36cSHarsh Jain 		offset = 0;
663b8fd1f41SHarsh Jain 		while (dst && ((dstsg + 1) <= MAX_DSGL_ENT) &&
664b8fd1f41SHarsh Jain 		       space > (sgl_ent_len[srcsg] + dsgl_ent_len[dstsg + 1])) {
665b8fd1f41SHarsh Jain 			if (srclen <= dstlen)
666b8fd1f41SHarsh Jain 				break;
6672f47d580SHarsh Jain 			less = min_t(unsigned int, sg_dma_len(dst) - offset -
6682f47d580SHarsh Jain 				     dstskip, CHCR_DST_SG_SIZE);
6692956f36cSHarsh Jain 			dstlen += less;
6702956f36cSHarsh Jain 			offset += less;
6711d693cf6SHarsh Jain 			if ((offset + dstskip) == sg_dma_len(dst)) {
672b8fd1f41SHarsh Jain 				dst = sg_next(dst);
6732956f36cSHarsh Jain 				offset = 0;
6742956f36cSHarsh Jain 			}
675b8fd1f41SHarsh Jain 			dstsg++;
6762f47d580SHarsh Jain 			dstskip = 0;
677b8fd1f41SHarsh Jain 		}
6781d693cf6SHarsh Jain 		soffset += sless;
6791d693cf6SHarsh Jain 		if ((soffset + srcskip) == sg_dma_len(src)) {
680b8fd1f41SHarsh Jain 			src = sg_next(src);
6812f47d580SHarsh Jain 			srcskip = 0;
6821d693cf6SHarsh Jain 			soffset = 0;
6831d693cf6SHarsh Jain 		}
6841d693cf6SHarsh Jain 
685b8fd1f41SHarsh Jain 	}
686b8fd1f41SHarsh Jain 	return min(srclen, dstlen);
687b8fd1f41SHarsh Jain }
688b8fd1f41SHarsh Jain 
chcr_cipher_fallback(struct crypto_skcipher * cipher,struct skcipher_request * req,u8 * iv,unsigned short op_type)689d8c6d188SArd Biesheuvel static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
690d8c6d188SArd Biesheuvel 				struct skcipher_request *req,
691b8fd1f41SHarsh Jain 				u8 *iv,
692b8fd1f41SHarsh Jain 				unsigned short op_type)
693b8fd1f41SHarsh Jain {
694d8c6d188SArd Biesheuvel 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
695b8fd1f41SHarsh Jain 	int err;
696b8fd1f41SHarsh Jain 
697d8c6d188SArd Biesheuvel 	skcipher_request_set_tfm(&reqctx->fallback_req, cipher);
698d8c6d188SArd Biesheuvel 	skcipher_request_set_callback(&reqctx->fallback_req, req->base.flags,
699d8c6d188SArd Biesheuvel 				      req->base.complete, req->base.data);
700d8c6d188SArd Biesheuvel 	skcipher_request_set_crypt(&reqctx->fallback_req, req->src, req->dst,
701d8c6d188SArd Biesheuvel 				   req->cryptlen, iv);
7026faa0f57SHarsh Jain 
703d8c6d188SArd Biesheuvel 	err = op_type ? crypto_skcipher_decrypt(&reqctx->fallback_req) :
704d8c6d188SArd Biesheuvel 			crypto_skcipher_encrypt(&reqctx->fallback_req);
705b8fd1f41SHarsh Jain 
706b8fd1f41SHarsh Jain 	return err;
707b8fd1f41SHarsh Jain 
708b8fd1f41SHarsh Jain }
709567be3a5SAyush Sawal 
get_qidxs(struct crypto_async_request * req,unsigned int * txqidx,unsigned int * rxqidx)710567be3a5SAyush Sawal static inline int get_qidxs(struct crypto_async_request *req,
711567be3a5SAyush Sawal 			    unsigned int *txqidx, unsigned int *rxqidx)
712567be3a5SAyush Sawal {
713567be3a5SAyush Sawal 	struct crypto_tfm *tfm = req->tfm;
714567be3a5SAyush Sawal 	int ret = 0;
715567be3a5SAyush Sawal 
716567be3a5SAyush Sawal 	switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
717567be3a5SAyush Sawal 	case CRYPTO_ALG_TYPE_AEAD:
718567be3a5SAyush Sawal 	{
719567be3a5SAyush Sawal 		struct aead_request *aead_req =
720567be3a5SAyush Sawal 			container_of(req, struct aead_request, base);
721e055bffaSHerbert Xu 		struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(aead_req);
722567be3a5SAyush Sawal 		*txqidx = reqctx->txqidx;
723567be3a5SAyush Sawal 		*rxqidx = reqctx->rxqidx;
724567be3a5SAyush Sawal 		break;
725567be3a5SAyush Sawal 	}
726567be3a5SAyush Sawal 	case CRYPTO_ALG_TYPE_SKCIPHER:
727567be3a5SAyush Sawal 	{
728567be3a5SAyush Sawal 		struct skcipher_request *sk_req =
729567be3a5SAyush Sawal 			container_of(req, struct skcipher_request, base);
730567be3a5SAyush Sawal 		struct chcr_skcipher_req_ctx *reqctx =
731567be3a5SAyush Sawal 			skcipher_request_ctx(sk_req);
732567be3a5SAyush Sawal 		*txqidx = reqctx->txqidx;
733567be3a5SAyush Sawal 		*rxqidx = reqctx->rxqidx;
734567be3a5SAyush Sawal 		break;
735567be3a5SAyush Sawal 	}
736567be3a5SAyush Sawal 	case CRYPTO_ALG_TYPE_AHASH:
737567be3a5SAyush Sawal 	{
738567be3a5SAyush Sawal 		struct ahash_request *ahash_req =
739567be3a5SAyush Sawal 			container_of(req, struct ahash_request, base);
740567be3a5SAyush Sawal 		struct chcr_ahash_req_ctx *reqctx =
741567be3a5SAyush Sawal 			ahash_request_ctx(ahash_req);
742567be3a5SAyush Sawal 		*txqidx = reqctx->txqidx;
743567be3a5SAyush Sawal 		*rxqidx = reqctx->rxqidx;
744567be3a5SAyush Sawal 		break;
745567be3a5SAyush Sawal 	}
746567be3a5SAyush Sawal 	default:
747567be3a5SAyush Sawal 		ret = -EINVAL;
748567be3a5SAyush Sawal 		/* should never get here */
749567be3a5SAyush Sawal 		BUG();
750567be3a5SAyush Sawal 		break;
751567be3a5SAyush Sawal 	}
752567be3a5SAyush Sawal 	return ret;
753567be3a5SAyush Sawal }
754567be3a5SAyush Sawal 
create_wreq(struct chcr_context * ctx,struct chcr_wr * chcr_req,struct crypto_async_request * req,unsigned int imm,int hash_sz,unsigned int len16,unsigned int sc_len,unsigned int lcb)755324429d7SHariprasad Shenai static inline void create_wreq(struct chcr_context *ctx,
756358961d1SHarsh Jain 			       struct chcr_wr *chcr_req,
7572f47d580SHarsh Jain 			       struct crypto_async_request *req,
7582f47d580SHarsh Jain 			       unsigned int imm,
759570265bfSYeshaswi M R Gowda 			       int hash_sz,
7602f47d580SHarsh Jain 			       unsigned int len16,
7612512a624SHarsh Jain 			       unsigned int sc_len,
7622512a624SHarsh Jain 			       unsigned int lcb)
763324429d7SHariprasad Shenai {
764324429d7SHariprasad Shenai 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
765567be3a5SAyush Sawal 	unsigned int tx_channel_id, rx_channel_id;
766567be3a5SAyush Sawal 	unsigned int txqidx = 0, rxqidx = 0;
76716a9874fSAyush Sawal 	unsigned int qid, fid, portno;
768567be3a5SAyush Sawal 
769567be3a5SAyush Sawal 	get_qidxs(req, &txqidx, &rxqidx);
770567be3a5SAyush Sawal 	qid = u_ctx->lldi.rxq_ids[rxqidx];
771567be3a5SAyush Sawal 	fid = u_ctx->lldi.rxq_ids[0];
77216a9874fSAyush Sawal 	portno = rxqidx / ctx->rxq_perchan;
773567be3a5SAyush Sawal 	tx_channel_id = txqidx / ctx->txq_perchan;
77416a9874fSAyush Sawal 	rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[portno]);
775324429d7SHariprasad Shenai 
776324429d7SHariprasad Shenai 
777570265bfSYeshaswi M R Gowda 	chcr_req->wreq.op_to_cctx_size = FILL_WR_OP_CCTX_SIZE;
778358961d1SHarsh Jain 	chcr_req->wreq.pld_size_hash_size =
779570265bfSYeshaswi M R Gowda 		htonl(FW_CRYPTO_LOOKASIDE_WR_HASH_SIZE_V(hash_sz));
780358961d1SHarsh Jain 	chcr_req->wreq.len16_pkd =
7812f47d580SHarsh Jain 		htonl(FW_CRYPTO_LOOKASIDE_WR_LEN16_V(DIV_ROUND_UP(len16, 16)));
782358961d1SHarsh Jain 	chcr_req->wreq.cookie = cpu_to_be64((uintptr_t)req);
783567be3a5SAyush Sawal 	chcr_req->wreq.rx_chid_to_rx_q_id = FILL_WR_RX_Q_ID(rx_channel_id, qid,
784567be3a5SAyush Sawal 							    !!lcb, txqidx);
785324429d7SHariprasad Shenai 
786567be3a5SAyush Sawal 	chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(tx_channel_id, fid);
7872f47d580SHarsh Jain 	chcr_req->ulptx.len = htonl((DIV_ROUND_UP(len16, 16) -
7882f47d580SHarsh Jain 				((sizeof(chcr_req->wreq)) >> 4)));
7892f47d580SHarsh Jain 	chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(!imm);
790358961d1SHarsh Jain 	chcr_req->sc_imm.len = cpu_to_be32(sizeof(struct cpl_tx_sec_pdu) +
7912f47d580SHarsh Jain 					   sizeof(chcr_req->key_ctx) + sc_len);
792324429d7SHariprasad Shenai }
793324429d7SHariprasad Shenai 
794324429d7SHariprasad Shenai /**
795324429d7SHariprasad Shenai  *	create_cipher_wr - form the WR for cipher operations
79629e5b878SLee Jones  *	@wrparam: Container for create_cipher_wr()'s parameters
797324429d7SHariprasad Shenai  */
create_cipher_wr(struct cipher_wr_param * wrparam)798b8fd1f41SHarsh Jain static struct sk_buff *create_cipher_wr(struct cipher_wr_param *wrparam)
799324429d7SHariprasad Shenai {
8007cea6d3eSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(wrparam->req);
801567be3a5SAyush Sawal 	struct chcr_context *ctx = c_ctx(tfm);
80216a9874fSAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
803567be3a5SAyush Sawal 	struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
804324429d7SHariprasad Shenai 	struct sk_buff *skb = NULL;
805358961d1SHarsh Jain 	struct chcr_wr *chcr_req;
806324429d7SHariprasad Shenai 	struct cpl_rx_phys_dsgl *phys_cpl;
8072f47d580SHarsh Jain 	struct ulptx_sgl *ulptx;
8087cea6d3eSArd Biesheuvel 	struct chcr_skcipher_req_ctx *reqctx =
8097cea6d3eSArd Biesheuvel 		skcipher_request_ctx(wrparam->req);
8102f47d580SHarsh Jain 	unsigned int temp = 0, transhdr_len, dst_size;
811b8fd1f41SHarsh Jain 	int error;
8122956f36cSHarsh Jain 	int nents;
8132f47d580SHarsh Jain 	unsigned int kctx_len;
814b8fd1f41SHarsh Jain 	gfp_t flags = wrparam->req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
815b8fd1f41SHarsh Jain 			GFP_KERNEL : GFP_ATOMIC;
816567be3a5SAyush Sawal 	struct adapter *adap = padap(ctx->dev);
817567be3a5SAyush Sawal 	unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
818324429d7SHariprasad Shenai 
81916a9874fSAyush Sawal 	rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
8202f47d580SHarsh Jain 	nents = sg_nents_xlen(reqctx->dstsg,  wrparam->bytes, CHCR_DST_SG_SIZE,
8212f47d580SHarsh Jain 			      reqctx->dst_ofst);
822335bcc4aSHarsh Jain 	dst_size = get_space_for_phys_dsgl(nents);
823125d01caSHarsh Jain 	kctx_len = roundup(ablkctx->enckey_len, 16);
8242f47d580SHarsh Jain 	transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
8252f47d580SHarsh Jain 	nents = sg_nents_xlen(reqctx->srcsg, wrparam->bytes,
8262f47d580SHarsh Jain 				  CHCR_SRC_SG_SIZE, reqctx->src_ofst);
827335bcc4aSHarsh Jain 	temp = reqctx->imm ? roundup(wrparam->bytes, 16) :
828335bcc4aSHarsh Jain 				     (sgl_len(nents) * 8);
8292f47d580SHarsh Jain 	transhdr_len += temp;
830125d01caSHarsh Jain 	transhdr_len = roundup(transhdr_len, 16);
8312f47d580SHarsh Jain 	skb = alloc_skb(SGE_MAX_WR_LEN, flags);
832b8fd1f41SHarsh Jain 	if (!skb) {
833b8fd1f41SHarsh Jain 		error = -ENOMEM;
834b8fd1f41SHarsh Jain 		goto err;
835b8fd1f41SHarsh Jain 	}
836de77b966Syuan linyu 	chcr_req = __skb_put_zero(skb, transhdr_len);
837358961d1SHarsh Jain 	chcr_req->sec_cpl.op_ivinsrtofst =
838567be3a5SAyush Sawal 			FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 1);
839324429d7SHariprasad Shenai 
8402f47d580SHarsh Jain 	chcr_req->sec_cpl.pldlen = htonl(IV + wrparam->bytes);
841358961d1SHarsh Jain 	chcr_req->sec_cpl.aadstart_cipherstop_hi =
8422f47d580SHarsh Jain 			FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, IV + 1, 0);
843324429d7SHariprasad Shenai 
844358961d1SHarsh Jain 	chcr_req->sec_cpl.cipherstop_lo_authinsert =
845358961d1SHarsh Jain 			FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
846b8fd1f41SHarsh Jain 	chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, 0,
847324429d7SHariprasad Shenai 							 ablkctx->ciph_mode,
8482f47d580SHarsh Jain 							 0, 0, IV >> 1);
849358961d1SHarsh Jain 	chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 0,
850335bcc4aSHarsh Jain 							  0, 1, dst_size);
851324429d7SHariprasad Shenai 
852358961d1SHarsh Jain 	chcr_req->key_ctx.ctx_hdr = ablkctx->key_ctx_hdr;
853b8fd1f41SHarsh Jain 	if ((reqctx->op == CHCR_DECRYPT_OP) &&
8547cea6d3eSArd Biesheuvel 	    (!(get_cryptoalg_subtype(tfm) ==
855b8fd1f41SHarsh Jain 	       CRYPTO_ALG_SUB_TYPE_CTR)) &&
8567cea6d3eSArd Biesheuvel 	    (!(get_cryptoalg_subtype(tfm) ==
857b8fd1f41SHarsh Jain 	       CRYPTO_ALG_SUB_TYPE_CTR_RFC3686))) {
858358961d1SHarsh Jain 		generate_copy_rrkey(ablkctx, &chcr_req->key_ctx);
859324429d7SHariprasad Shenai 	} else {
860b8fd1f41SHarsh Jain 		if ((ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) ||
861b8fd1f41SHarsh Jain 		    (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CTR)) {
862358961d1SHarsh Jain 			memcpy(chcr_req->key_ctx.key, ablkctx->key,
863358961d1SHarsh Jain 			       ablkctx->enckey_len);
864324429d7SHariprasad Shenai 		} else {
865358961d1SHarsh Jain 			memcpy(chcr_req->key_ctx.key, ablkctx->key +
866324429d7SHariprasad Shenai 			       (ablkctx->enckey_len >> 1),
867324429d7SHariprasad Shenai 			       ablkctx->enckey_len >> 1);
868358961d1SHarsh Jain 			memcpy(chcr_req->key_ctx.key +
869324429d7SHariprasad Shenai 			       (ablkctx->enckey_len >> 1),
870324429d7SHariprasad Shenai 			       ablkctx->key,
871324429d7SHariprasad Shenai 			       ablkctx->enckey_len >> 1);
872324429d7SHariprasad Shenai 		}
873324429d7SHariprasad Shenai 	}
874358961d1SHarsh Jain 	phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
8752f47d580SHarsh Jain 	ulptx = (struct ulptx_sgl *)((u8 *)(phys_cpl + 1) + dst_size);
8762f47d580SHarsh Jain 	chcr_add_cipher_src_ent(wrparam->req, ulptx, wrparam);
8772f47d580SHarsh Jain 	chcr_add_cipher_dst_ent(wrparam->req, phys_cpl, wrparam, wrparam->qid);
878324429d7SHariprasad Shenai 
879ee0863baSHarsh Jain 	atomic_inc(&adap->chcr_stats.cipher_rqst);
880335bcc4aSHarsh Jain 	temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + kctx_len + IV
881335bcc4aSHarsh Jain 		+ (reqctx->imm ? (wrparam->bytes) : 0);
8822f47d580SHarsh Jain 	create_wreq(c_ctx(tfm), chcr_req, &(wrparam->req->base), reqctx->imm, 0,
8832f47d580SHarsh Jain 		    transhdr_len, temp,
8842512a624SHarsh Jain 			ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC);
8855c86a8ffSHarsh Jain 	reqctx->skb = skb;
8865fb78dbaSHarsh Jain 
8875fb78dbaSHarsh Jain 	if (reqctx->op && (ablkctx->ciph_mode ==
8885fb78dbaSHarsh Jain 			   CHCR_SCMD_CIPHER_MODE_AES_CBC))
8895fb78dbaSHarsh Jain 		sg_pcopy_to_buffer(wrparam->req->src,
8907cea6d3eSArd Biesheuvel 			sg_nents(wrparam->req->src), wrparam->req->iv, 16,
8915fb78dbaSHarsh Jain 			reqctx->processed + wrparam->bytes - AES_BLOCK_SIZE);
8925fb78dbaSHarsh Jain 
893324429d7SHariprasad Shenai 	return skb;
894b8fd1f41SHarsh Jain err:
895b8fd1f41SHarsh Jain 	return ERR_PTR(error);
896324429d7SHariprasad Shenai }
897324429d7SHariprasad Shenai 
chcr_keyctx_ck_size(unsigned int keylen)898b8fd1f41SHarsh Jain static inline int chcr_keyctx_ck_size(unsigned int keylen)
899b8fd1f41SHarsh Jain {
900b8fd1f41SHarsh Jain 	int ck_size = 0;
901b8fd1f41SHarsh Jain 
902b8fd1f41SHarsh Jain 	if (keylen == AES_KEYSIZE_128)
903b8fd1f41SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
904b8fd1f41SHarsh Jain 	else if (keylen == AES_KEYSIZE_192)
905b8fd1f41SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
906b8fd1f41SHarsh Jain 	else if (keylen == AES_KEYSIZE_256)
907b8fd1f41SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
908b8fd1f41SHarsh Jain 	else
909b8fd1f41SHarsh Jain 		ck_size = 0;
910b8fd1f41SHarsh Jain 
911b8fd1f41SHarsh Jain 	return ck_size;
912b8fd1f41SHarsh Jain }
chcr_cipher_fallback_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)9137cea6d3eSArd Biesheuvel static int chcr_cipher_fallback_setkey(struct crypto_skcipher *cipher,
914b8fd1f41SHarsh Jain 				       const u8 *key,
915324429d7SHariprasad Shenai 				       unsigned int keylen)
916324429d7SHariprasad Shenai {
9172f47d580SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
918b8fd1f41SHarsh Jain 
919d8c6d188SArd Biesheuvel 	crypto_skcipher_clear_flags(ablkctx->sw_cipher,
920b8fd1f41SHarsh Jain 				CRYPTO_TFM_REQ_MASK);
921d8c6d188SArd Biesheuvel 	crypto_skcipher_set_flags(ablkctx->sw_cipher,
92228874f26SKees Cook 				cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK);
923d8c6d188SArd Biesheuvel 	return crypto_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
924b8fd1f41SHarsh Jain }
925b8fd1f41SHarsh Jain 
chcr_aes_cbc_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)9267cea6d3eSArd Biesheuvel static int chcr_aes_cbc_setkey(struct crypto_skcipher *cipher,
927b8fd1f41SHarsh Jain 			       const u8 *key,
928b8fd1f41SHarsh Jain 			       unsigned int keylen)
929b8fd1f41SHarsh Jain {
9302f47d580SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
931324429d7SHariprasad Shenai 	unsigned int ck_size, context_size;
932324429d7SHariprasad Shenai 	u16 alignment = 0;
933b8fd1f41SHarsh Jain 	int err;
934324429d7SHariprasad Shenai 
935b8fd1f41SHarsh Jain 	err = chcr_cipher_fallback_setkey(cipher, key, keylen);
936b8fd1f41SHarsh Jain 	if (err)
937324429d7SHariprasad Shenai 		goto badkey_err;
938b8fd1f41SHarsh Jain 
939b8fd1f41SHarsh Jain 	ck_size = chcr_keyctx_ck_size(keylen);
940b8fd1f41SHarsh Jain 	alignment = ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192 ? 8 : 0;
941cc1b156dSHarsh Jain 	memcpy(ablkctx->key, key, keylen);
942cc1b156dSHarsh Jain 	ablkctx->enckey_len = keylen;
943cc1b156dSHarsh Jain 	get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, keylen << 3);
944324429d7SHariprasad Shenai 	context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
945324429d7SHariprasad Shenai 			keylen + alignment) >> 4;
946324429d7SHariprasad Shenai 
947324429d7SHariprasad Shenai 	ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
948324429d7SHariprasad Shenai 						0, 0, context_size);
949324429d7SHariprasad Shenai 	ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CBC;
950324429d7SHariprasad Shenai 	return 0;
951324429d7SHariprasad Shenai badkey_err:
952324429d7SHariprasad Shenai 	ablkctx->enckey_len = 0;
953b8fd1f41SHarsh Jain 
954b8fd1f41SHarsh Jain 	return err;
955324429d7SHariprasad Shenai }
956324429d7SHariprasad Shenai 
chcr_aes_ctr_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)9577cea6d3eSArd Biesheuvel static int chcr_aes_ctr_setkey(struct crypto_skcipher *cipher,
958b8fd1f41SHarsh Jain 				   const u8 *key,
959b8fd1f41SHarsh Jain 				   unsigned int keylen)
960324429d7SHariprasad Shenai {
9612f47d580SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
962b8fd1f41SHarsh Jain 	unsigned int ck_size, context_size;
963b8fd1f41SHarsh Jain 	u16 alignment = 0;
964b8fd1f41SHarsh Jain 	int err;
965b8fd1f41SHarsh Jain 
966b8fd1f41SHarsh Jain 	err = chcr_cipher_fallback_setkey(cipher, key, keylen);
967b8fd1f41SHarsh Jain 	if (err)
968b8fd1f41SHarsh Jain 		goto badkey_err;
969b8fd1f41SHarsh Jain 	ck_size = chcr_keyctx_ck_size(keylen);
970b8fd1f41SHarsh Jain 	alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0;
971b8fd1f41SHarsh Jain 	memcpy(ablkctx->key, key, keylen);
972b8fd1f41SHarsh Jain 	ablkctx->enckey_len = keylen;
973b8fd1f41SHarsh Jain 	context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
974b8fd1f41SHarsh Jain 			keylen + alignment) >> 4;
975b8fd1f41SHarsh Jain 
976b8fd1f41SHarsh Jain 	ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
977b8fd1f41SHarsh Jain 						0, 0, context_size);
978b8fd1f41SHarsh Jain 	ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CTR;
979b8fd1f41SHarsh Jain 
980b8fd1f41SHarsh Jain 	return 0;
981b8fd1f41SHarsh Jain badkey_err:
982b8fd1f41SHarsh Jain 	ablkctx->enckey_len = 0;
983b8fd1f41SHarsh Jain 
984b8fd1f41SHarsh Jain 	return err;
985b8fd1f41SHarsh Jain }
986b8fd1f41SHarsh Jain 
chcr_aes_rfc3686_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)9877cea6d3eSArd Biesheuvel static int chcr_aes_rfc3686_setkey(struct crypto_skcipher *cipher,
988b8fd1f41SHarsh Jain 				   const u8 *key,
989b8fd1f41SHarsh Jain 				   unsigned int keylen)
990b8fd1f41SHarsh Jain {
9912f47d580SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
992b8fd1f41SHarsh Jain 	unsigned int ck_size, context_size;
993b8fd1f41SHarsh Jain 	u16 alignment = 0;
994b8fd1f41SHarsh Jain 	int err;
995b8fd1f41SHarsh Jain 
996b8fd1f41SHarsh Jain 	if (keylen < CTR_RFC3686_NONCE_SIZE)
997b8fd1f41SHarsh Jain 		return -EINVAL;
998b8fd1f41SHarsh Jain 	memcpy(ablkctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
999b8fd1f41SHarsh Jain 	       CTR_RFC3686_NONCE_SIZE);
1000b8fd1f41SHarsh Jain 
1001b8fd1f41SHarsh Jain 	keylen -= CTR_RFC3686_NONCE_SIZE;
1002b8fd1f41SHarsh Jain 	err = chcr_cipher_fallback_setkey(cipher, key, keylen);
1003b8fd1f41SHarsh Jain 	if (err)
1004b8fd1f41SHarsh Jain 		goto badkey_err;
1005b8fd1f41SHarsh Jain 
1006b8fd1f41SHarsh Jain 	ck_size = chcr_keyctx_ck_size(keylen);
1007b8fd1f41SHarsh Jain 	alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0;
1008b8fd1f41SHarsh Jain 	memcpy(ablkctx->key, key, keylen);
1009b8fd1f41SHarsh Jain 	ablkctx->enckey_len = keylen;
1010b8fd1f41SHarsh Jain 	context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
1011b8fd1f41SHarsh Jain 			keylen + alignment) >> 4;
1012b8fd1f41SHarsh Jain 
1013b8fd1f41SHarsh Jain 	ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
1014b8fd1f41SHarsh Jain 						0, 0, context_size);
1015b8fd1f41SHarsh Jain 	ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CTR;
1016b8fd1f41SHarsh Jain 
1017b8fd1f41SHarsh Jain 	return 0;
1018b8fd1f41SHarsh Jain badkey_err:
1019b8fd1f41SHarsh Jain 	ablkctx->enckey_len = 0;
1020b8fd1f41SHarsh Jain 
1021b8fd1f41SHarsh Jain 	return err;
1022b8fd1f41SHarsh Jain }
ctr_add_iv(u8 * dstiv,u8 * srciv,u32 add)1023b8fd1f41SHarsh Jain static void ctr_add_iv(u8 *dstiv, u8 *srciv, u32 add)
1024b8fd1f41SHarsh Jain {
1025b8fd1f41SHarsh Jain 	unsigned int size = AES_BLOCK_SIZE;
1026b8fd1f41SHarsh Jain 	__be32 *b = (__be32 *)(dstiv + size);
1027b8fd1f41SHarsh Jain 	u32 c, prev;
1028b8fd1f41SHarsh Jain 
1029b8fd1f41SHarsh Jain 	memcpy(dstiv, srciv, AES_BLOCK_SIZE);
1030b8fd1f41SHarsh Jain 	for (; size >= 4; size -= 4) {
1031b8fd1f41SHarsh Jain 		prev = be32_to_cpu(*--b);
1032b8fd1f41SHarsh Jain 		c = prev + add;
1033b8fd1f41SHarsh Jain 		*b = cpu_to_be32(c);
1034b8fd1f41SHarsh Jain 		if (prev < c)
1035b8fd1f41SHarsh Jain 			break;
1036b8fd1f41SHarsh Jain 		add = 1;
1037b8fd1f41SHarsh Jain 	}
1038b8fd1f41SHarsh Jain 
1039b8fd1f41SHarsh Jain }
1040b8fd1f41SHarsh Jain 
adjust_ctr_overflow(u8 * iv,u32 bytes)1041b8fd1f41SHarsh Jain static unsigned int adjust_ctr_overflow(u8 *iv, u32 bytes)
1042b8fd1f41SHarsh Jain {
1043b8fd1f41SHarsh Jain 	__be32 *b = (__be32 *)(iv + AES_BLOCK_SIZE);
1044b8fd1f41SHarsh Jain 	u64 c;
1045b8fd1f41SHarsh Jain 	u32 temp = be32_to_cpu(*--b);
1046b8fd1f41SHarsh Jain 
1047b8fd1f41SHarsh Jain 	temp = ~temp;
10486b363a28SDevulapally Shiva Krishna 	c = (u64)temp +  1; // No of block can processed without overflow
10496b363a28SDevulapally Shiva Krishna 	if ((bytes / AES_BLOCK_SIZE) >= c)
1050b8fd1f41SHarsh Jain 		bytes = c * AES_BLOCK_SIZE;
1051b8fd1f41SHarsh Jain 	return bytes;
1052b8fd1f41SHarsh Jain }
1053b8fd1f41SHarsh Jain 
chcr_update_tweak(struct skcipher_request * req,u8 * iv,u32 isfinal)10547cea6d3eSArd Biesheuvel static int chcr_update_tweak(struct skcipher_request *req, u8 *iv,
1055209897d5SHarsh Jain 			     u32 isfinal)
1056b8fd1f41SHarsh Jain {
10577cea6d3eSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
10582f47d580SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
10597cea6d3eSArd Biesheuvel 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1060571c47abSArd Biesheuvel 	struct crypto_aes_ctx aes;
1061b8fd1f41SHarsh Jain 	int ret, i;
1062b8fd1f41SHarsh Jain 	u8 *key;
1063b8fd1f41SHarsh Jain 	unsigned int keylen;
1064de1a00acSHarsh Jain 	int round = reqctx->last_req_len / AES_BLOCK_SIZE;
1065de1a00acSHarsh Jain 	int round8 = round / 8;
1066b8fd1f41SHarsh Jain 
1067de1a00acSHarsh Jain 	memcpy(iv, reqctx->iv, AES_BLOCK_SIZE);
1068b8fd1f41SHarsh Jain 
1069b8fd1f41SHarsh Jain 	keylen = ablkctx->enckey_len / 2;
1070b8fd1f41SHarsh Jain 	key = ablkctx->key + keylen;
1071ee91ac1bSDevulapally Shiva Krishna 	/* For a 192 bit key remove the padded zeroes which was
1072ee91ac1bSDevulapally Shiva Krishna 	 * added in chcr_xts_setkey
1073ee91ac1bSDevulapally Shiva Krishna 	 */
1074ee91ac1bSDevulapally Shiva Krishna 	if (KEY_CONTEXT_CK_SIZE_G(ntohl(ablkctx->key_ctx_hdr))
1075ee91ac1bSDevulapally Shiva Krishna 			== CHCR_KEYCTX_CIPHER_KEY_SIZE_192)
1076ee91ac1bSDevulapally Shiva Krishna 		ret = aes_expandkey(&aes, key, keylen - 8);
1077ee91ac1bSDevulapally Shiva Krishna 	else
1078571c47abSArd Biesheuvel 		ret = aes_expandkey(&aes, key, keylen);
1079b8fd1f41SHarsh Jain 	if (ret)
1080571c47abSArd Biesheuvel 		return ret;
1081571c47abSArd Biesheuvel 	aes_encrypt(&aes, iv, iv);
1082de1a00acSHarsh Jain 	for (i = 0; i < round8; i++)
1083de1a00acSHarsh Jain 		gf128mul_x8_ble((le128 *)iv, (le128 *)iv);
1084de1a00acSHarsh Jain 
1085de1a00acSHarsh Jain 	for (i = 0; i < (round % 8); i++)
1086b8fd1f41SHarsh Jain 		gf128mul_x_ble((le128 *)iv, (le128 *)iv);
1087b8fd1f41SHarsh Jain 
1088209897d5SHarsh Jain 	if (!isfinal)
1089571c47abSArd Biesheuvel 		aes_decrypt(&aes, iv, iv);
1090571c47abSArd Biesheuvel 
1091571c47abSArd Biesheuvel 	memzero_explicit(&aes, sizeof(aes));
1092571c47abSArd Biesheuvel 	return 0;
1093b8fd1f41SHarsh Jain }
1094b8fd1f41SHarsh Jain 
chcr_update_cipher_iv(struct skcipher_request * req,struct cpl_fw6_pld * fw6_pld,u8 * iv)10957cea6d3eSArd Biesheuvel static int chcr_update_cipher_iv(struct skcipher_request *req,
1096b8fd1f41SHarsh Jain 				   struct cpl_fw6_pld *fw6_pld, u8 *iv)
1097b8fd1f41SHarsh Jain {
10987cea6d3eSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
10997cea6d3eSArd Biesheuvel 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
11007cea6d3eSArd Biesheuvel 	int subtype = get_cryptoalg_subtype(tfm);
1101ab677ff4SHariprasad Shenai 	int ret = 0;
1102324429d7SHariprasad Shenai 
1103b8fd1f41SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
11047cea6d3eSArd Biesheuvel 		ctr_add_iv(iv, req->iv, (reqctx->processed /
1105b8fd1f41SHarsh Jain 			   AES_BLOCK_SIZE));
1106b8fd1f41SHarsh Jain 	else if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_RFC3686)
1107b8fd1f41SHarsh Jain 		*(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
1108b8fd1f41SHarsh Jain 			CTR_RFC3686_IV_SIZE) = cpu_to_be32((reqctx->processed /
1109b8fd1f41SHarsh Jain 						AES_BLOCK_SIZE) + 1);
1110b8fd1f41SHarsh Jain 	else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS)
1111209897d5SHarsh Jain 		ret = chcr_update_tweak(req, iv, 0);
1112b8fd1f41SHarsh Jain 	else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
1113b8fd1f41SHarsh Jain 		if (reqctx->op)
11145fb78dbaSHarsh Jain 			/*Updated before sending last WR*/
11157cea6d3eSArd Biesheuvel 			memcpy(iv, req->iv, AES_BLOCK_SIZE);
1116b8fd1f41SHarsh Jain 		else
1117b8fd1f41SHarsh Jain 			memcpy(iv, &fw6_pld->data[2], AES_BLOCK_SIZE);
1118b8fd1f41SHarsh Jain 	}
1119b8fd1f41SHarsh Jain 
1120324429d7SHariprasad Shenai 	return ret;
1121b8fd1f41SHarsh Jain 
1122b8fd1f41SHarsh Jain }
1123b8fd1f41SHarsh Jain 
1124b8fd1f41SHarsh Jain /* We need separate function for final iv because in rfc3686  Initial counter
1125b8fd1f41SHarsh Jain  * starts from 1 and buffer size of iv is 8 byte only which remains constant
1126b8fd1f41SHarsh Jain  * for subsequent update requests
1127b8fd1f41SHarsh Jain  */
1128b8fd1f41SHarsh Jain 
chcr_final_cipher_iv(struct skcipher_request * req,struct cpl_fw6_pld * fw6_pld,u8 * iv)11297cea6d3eSArd Biesheuvel static int chcr_final_cipher_iv(struct skcipher_request *req,
1130b8fd1f41SHarsh Jain 				   struct cpl_fw6_pld *fw6_pld, u8 *iv)
1131b8fd1f41SHarsh Jain {
11327cea6d3eSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
11337cea6d3eSArd Biesheuvel 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
11347cea6d3eSArd Biesheuvel 	int subtype = get_cryptoalg_subtype(tfm);
1135b8fd1f41SHarsh Jain 	int ret = 0;
1136b8fd1f41SHarsh Jain 
1137b8fd1f41SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
11387cea6d3eSArd Biesheuvel 		ctr_add_iv(iv, req->iv, DIV_ROUND_UP(reqctx->processed,
1139b8fd1f41SHarsh Jain 						       AES_BLOCK_SIZE));
1140bed44d0cSAyush Sawal 	else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS) {
1141bed44d0cSAyush Sawal 		if (!reqctx->partial_req)
1142bed44d0cSAyush Sawal 			memcpy(iv, reqctx->iv, AES_BLOCK_SIZE);
1143bed44d0cSAyush Sawal 		else
1144209897d5SHarsh Jain 			ret = chcr_update_tweak(req, iv, 1);
1145bed44d0cSAyush Sawal 	}
1146b8fd1f41SHarsh Jain 	else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
11475fb78dbaSHarsh Jain 		/*Already updated for Decrypt*/
11485fb78dbaSHarsh Jain 		if (!reqctx->op)
1149b8fd1f41SHarsh Jain 			memcpy(iv, &fw6_pld->data[2], AES_BLOCK_SIZE);
1150b8fd1f41SHarsh Jain 
1151b8fd1f41SHarsh Jain 	}
1152b8fd1f41SHarsh Jain 	return ret;
1153b8fd1f41SHarsh Jain 
1154b8fd1f41SHarsh Jain }
1155b8fd1f41SHarsh Jain 
chcr_handle_cipher_resp(struct skcipher_request * req,unsigned char * input,int err)11567cea6d3eSArd Biesheuvel static int chcr_handle_cipher_resp(struct skcipher_request *req,
1157b8fd1f41SHarsh Jain 				   unsigned char *input, int err)
1158b8fd1f41SHarsh Jain {
11597cea6d3eSArd Biesheuvel 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
11606b363a28SDevulapally Shiva Krishna 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
11616b363a28SDevulapally Shiva Krishna 	struct cpl_fw6_pld *fw6_pld = (struct cpl_fw6_pld *)input;
11626b363a28SDevulapally Shiva Krishna 	struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
11636b363a28SDevulapally Shiva Krishna 	struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1164fef4912bSHarsh Jain 	struct chcr_dev *dev = c_ctx(tfm)->dev;
11656b363a28SDevulapally Shiva Krishna 	struct chcr_context *ctx = c_ctx(tfm);
11666b363a28SDevulapally Shiva Krishna 	struct adapter *adap = padap(ctx->dev);
11676b363a28SDevulapally Shiva Krishna 	struct cipher_wr_param wrparam;
11686b363a28SDevulapally Shiva Krishna 	struct sk_buff *skb;
1169b8fd1f41SHarsh Jain 	int bytes;
1170b8fd1f41SHarsh Jain 
1171b8fd1f41SHarsh Jain 	if (err)
11722f47d580SHarsh Jain 		goto unmap;
11737cea6d3eSArd Biesheuvel 	if (req->cryptlen == reqctx->processed) {
11742f47d580SHarsh Jain 		chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
11752f47d580SHarsh Jain 				      req);
11767cea6d3eSArd Biesheuvel 		err = chcr_final_cipher_iv(req, fw6_pld, req->iv);
1177b8fd1f41SHarsh Jain 		goto complete;
1178b8fd1f41SHarsh Jain 	}
1179b8fd1f41SHarsh Jain 
11802f47d580SHarsh Jain 	if (!reqctx->imm) {
1181335bcc4aSHarsh Jain 		bytes = chcr_sg_ent_in_wr(reqctx->srcsg, reqctx->dstsg, 0,
11825110e655SHarsh Jain 					  CIP_SPACE_LEFT(ablkctx->enckey_len),
11832f47d580SHarsh Jain 					  reqctx->src_ofst, reqctx->dst_ofst);
11847cea6d3eSArd Biesheuvel 		if ((bytes + reqctx->processed) >= req->cryptlen)
11857cea6d3eSArd Biesheuvel 			bytes  = req->cryptlen - reqctx->processed;
1186b8fd1f41SHarsh Jain 		else
1187125d01caSHarsh Jain 			bytes = rounddown(bytes, 16);
11882f47d580SHarsh Jain 	} else {
11892f47d580SHarsh Jain 		/*CTR mode counter overfloa*/
11907cea6d3eSArd Biesheuvel 		bytes  = req->cryptlen - reqctx->processed;
11912f47d580SHarsh Jain 	}
1192b8fd1f41SHarsh Jain 	err = chcr_update_cipher_iv(req, fw6_pld, reqctx->iv);
1193b8fd1f41SHarsh Jain 	if (err)
11942f47d580SHarsh Jain 		goto unmap;
1195b8fd1f41SHarsh Jain 
1196b8fd1f41SHarsh Jain 	if (unlikely(bytes == 0)) {
11972f47d580SHarsh Jain 		chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
11982f47d580SHarsh Jain 				      req);
11996b363a28SDevulapally Shiva Krishna 		memcpy(req->iv, reqctx->init_iv, IV);
12006b363a28SDevulapally Shiva Krishna 		atomic_inc(&adap->chcr_stats.fallback);
1201d8c6d188SArd Biesheuvel 		err = chcr_cipher_fallback(ablkctx->sw_cipher, req, req->iv,
1202b8fd1f41SHarsh Jain 					   reqctx->op);
1203b8fd1f41SHarsh Jain 		goto complete;
1204b8fd1f41SHarsh Jain 	}
1205b8fd1f41SHarsh Jain 
12067cea6d3eSArd Biesheuvel 	if (get_cryptoalg_subtype(tfm) ==
1207b8fd1f41SHarsh Jain 	    CRYPTO_ALG_SUB_TYPE_CTR)
1208b8fd1f41SHarsh Jain 		bytes = adjust_ctr_overflow(reqctx->iv, bytes);
1209567be3a5SAyush Sawal 	wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx];
1210b8fd1f41SHarsh Jain 	wrparam.req = req;
1211b8fd1f41SHarsh Jain 	wrparam.bytes = bytes;
1212b8fd1f41SHarsh Jain 	skb = create_cipher_wr(&wrparam);
1213b8fd1f41SHarsh Jain 	if (IS_ERR(skb)) {
12141b3eeb87SChristophe JAILLET 		pr_err("%s : Failed to form WR. No memory\n", __func__);
1215b8fd1f41SHarsh Jain 		err = PTR_ERR(skb);
12162f47d580SHarsh Jain 		goto unmap;
1217b8fd1f41SHarsh Jain 	}
1218b8fd1f41SHarsh Jain 	skb->dev = u_ctx->lldi.ports[0];
1219567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
1220b8fd1f41SHarsh Jain 	chcr_send_wr(skb);
12212f47d580SHarsh Jain 	reqctx->last_req_len = bytes;
12222f47d580SHarsh Jain 	reqctx->processed += bytes;
12231c502e2eSAyush Sawal 	if (get_cryptoalg_subtype(tfm) ==
12241c502e2eSAyush Sawal 		CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
12251c502e2eSAyush Sawal 			CRYPTO_TFM_REQ_MAY_SLEEP ) {
12261c502e2eSAyush Sawal 		complete(&ctx->cbc_aes_aio_done);
12271c502e2eSAyush Sawal 	}
1228b8fd1f41SHarsh Jain 	return 0;
12292f47d580SHarsh Jain unmap:
12302f47d580SHarsh Jain 	chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1231b8fd1f41SHarsh Jain complete:
12321c502e2eSAyush Sawal 	if (get_cryptoalg_subtype(tfm) ==
12331c502e2eSAyush Sawal 		CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
12341c502e2eSAyush Sawal 			CRYPTO_TFM_REQ_MAY_SLEEP ) {
12351c502e2eSAyush Sawal 		complete(&ctx->cbc_aes_aio_done);
12361c502e2eSAyush Sawal 	}
1237fef4912bSHarsh Jain 	chcr_dec_wrcount(dev);
123813c20754SHerbert Xu 	skcipher_request_complete(req, err);
1239b8fd1f41SHarsh Jain 	return err;
1240b8fd1f41SHarsh Jain }
1241b8fd1f41SHarsh Jain 
process_cipher(struct skcipher_request * req,unsigned short qid,struct sk_buff ** skb,unsigned short op_type)12427cea6d3eSArd Biesheuvel static int process_cipher(struct skcipher_request *req,
1243b8fd1f41SHarsh Jain 				  unsigned short qid,
1244b8fd1f41SHarsh Jain 				  struct sk_buff **skb,
1245b8fd1f41SHarsh Jain 				  unsigned short op_type)
1246b8fd1f41SHarsh Jain {
12476b363a28SDevulapally Shiva Krishna 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
12487cea6d3eSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
12497cea6d3eSArd Biesheuvel 	unsigned int ivsize = crypto_skcipher_ivsize(tfm);
12502f47d580SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
12516b363a28SDevulapally Shiva Krishna 	struct adapter *adap = padap(c_ctx(tfm)->dev);
1252b8fd1f41SHarsh Jain 	struct	cipher_wr_param wrparam;
12532956f36cSHarsh Jain 	int bytes, err = -EINVAL;
12546b363a28SDevulapally Shiva Krishna 	int subtype;
1255b8fd1f41SHarsh Jain 
1256b8fd1f41SHarsh Jain 	reqctx->processed = 0;
1257bed44d0cSAyush Sawal 	reqctx->partial_req = 0;
12587cea6d3eSArd Biesheuvel 	if (!req->iv)
1259b8fd1f41SHarsh Jain 		goto error;
12606b363a28SDevulapally Shiva Krishna 	subtype = get_cryptoalg_subtype(tfm);
1261b8fd1f41SHarsh Jain 	if ((ablkctx->enckey_len == 0) || (ivsize > AES_BLOCK_SIZE) ||
12627cea6d3eSArd Biesheuvel 	    (req->cryptlen == 0) ||
12637cea6d3eSArd Biesheuvel 	    (req->cryptlen % crypto_skcipher_blocksize(tfm))) {
12646b363a28SDevulapally Shiva Krishna 		if (req->cryptlen == 0 && subtype != CRYPTO_ALG_SUB_TYPE_XTS)
12656b363a28SDevulapally Shiva Krishna 			goto fallback;
12666b363a28SDevulapally Shiva Krishna 		else if (req->cryptlen % crypto_skcipher_blocksize(tfm) &&
12676b363a28SDevulapally Shiva Krishna 			 subtype == CRYPTO_ALG_SUB_TYPE_XTS)
12686b363a28SDevulapally Shiva Krishna 			goto fallback;
1269b8fd1f41SHarsh Jain 		pr_err("AES: Invalid value of Key Len %d nbytes %d IV Len %d\n",
12707cea6d3eSArd Biesheuvel 		       ablkctx->enckey_len, req->cryptlen, ivsize);
1271b8fd1f41SHarsh Jain 		goto error;
1272b8fd1f41SHarsh Jain 	}
1273fef4912bSHarsh Jain 
1274fef4912bSHarsh Jain 	err = chcr_cipher_dma_map(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1275fef4912bSHarsh Jain 	if (err)
1276fef4912bSHarsh Jain 		goto error;
12777cea6d3eSArd Biesheuvel 	if (req->cryptlen < (SGE_MAX_WR_LEN - (sizeof(struct chcr_wr) +
12782f47d580SHarsh Jain 					    AES_MIN_KEY_SIZE +
12792f47d580SHarsh Jain 					    sizeof(struct cpl_rx_phys_dsgl) +
12802f47d580SHarsh Jain 					/*Min dsgl size*/
12812f47d580SHarsh Jain 					    32))) {
12822f47d580SHarsh Jain 		/* Can be sent as Imm*/
12832f47d580SHarsh Jain 		unsigned int dnents = 0, transhdr_len, phys_dsgl, kctx_len;
12842f47d580SHarsh Jain 
12857cea6d3eSArd Biesheuvel 		dnents = sg_nents_xlen(req->dst, req->cryptlen,
12862f47d580SHarsh Jain 				       CHCR_DST_SG_SIZE, 0);
12872f47d580SHarsh Jain 		phys_dsgl = get_space_for_phys_dsgl(dnents);
1288125d01caSHarsh Jain 		kctx_len = roundup(ablkctx->enckey_len, 16);
12892f47d580SHarsh Jain 		transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, phys_dsgl);
12907cea6d3eSArd Biesheuvel 		reqctx->imm = (transhdr_len + IV + req->cryptlen) <=
12912f47d580SHarsh Jain 			SGE_MAX_WR_LEN;
12927cea6d3eSArd Biesheuvel 		bytes = IV + req->cryptlen;
12932f47d580SHarsh Jain 
12942f47d580SHarsh Jain 	} else {
12952f47d580SHarsh Jain 		reqctx->imm = 0;
12962f47d580SHarsh Jain 	}
12972f47d580SHarsh Jain 
12982f47d580SHarsh Jain 	if (!reqctx->imm) {
1299335bcc4aSHarsh Jain 		bytes = chcr_sg_ent_in_wr(req->src, req->dst, 0,
13005110e655SHarsh Jain 					  CIP_SPACE_LEFT(ablkctx->enckey_len),
13012f47d580SHarsh Jain 					  0, 0);
13027cea6d3eSArd Biesheuvel 		if ((bytes + reqctx->processed) >= req->cryptlen)
13037cea6d3eSArd Biesheuvel 			bytes  = req->cryptlen - reqctx->processed;
1304b8fd1f41SHarsh Jain 		else
1305125d01caSHarsh Jain 			bytes = rounddown(bytes, 16);
13062f47d580SHarsh Jain 	} else {
13077cea6d3eSArd Biesheuvel 		bytes = req->cryptlen;
13082f47d580SHarsh Jain 	}
13096b363a28SDevulapally Shiva Krishna 	if (subtype == CRYPTO_ALG_SUB_TYPE_CTR) {
13107cea6d3eSArd Biesheuvel 		bytes = adjust_ctr_overflow(req->iv, bytes);
1311b8fd1f41SHarsh Jain 	}
13126b363a28SDevulapally Shiva Krishna 	if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_RFC3686) {
1313b8fd1f41SHarsh Jain 		memcpy(reqctx->iv, ablkctx->nonce, CTR_RFC3686_NONCE_SIZE);
13147cea6d3eSArd Biesheuvel 		memcpy(reqctx->iv + CTR_RFC3686_NONCE_SIZE, req->iv,
1315b8fd1f41SHarsh Jain 				CTR_RFC3686_IV_SIZE);
1316b8fd1f41SHarsh Jain 
1317b8fd1f41SHarsh Jain 		/* initialize counter portion of counter block */
1318b8fd1f41SHarsh Jain 		*(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
1319b8fd1f41SHarsh Jain 			CTR_RFC3686_IV_SIZE) = cpu_to_be32(1);
13206b363a28SDevulapally Shiva Krishna 		memcpy(reqctx->init_iv, reqctx->iv, IV);
1321b8fd1f41SHarsh Jain 
1322b8fd1f41SHarsh Jain 	} else {
1323b8fd1f41SHarsh Jain 
13247cea6d3eSArd Biesheuvel 		memcpy(reqctx->iv, req->iv, IV);
13256b363a28SDevulapally Shiva Krishna 		memcpy(reqctx->init_iv, req->iv, IV);
1326b8fd1f41SHarsh Jain 	}
1327b8fd1f41SHarsh Jain 	if (unlikely(bytes == 0)) {
13282f47d580SHarsh Jain 		chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
13292f47d580SHarsh Jain 				      req);
13306b363a28SDevulapally Shiva Krishna fallback:       atomic_inc(&adap->chcr_stats.fallback);
1331d8c6d188SArd Biesheuvel 		err = chcr_cipher_fallback(ablkctx->sw_cipher, req,
13326b363a28SDevulapally Shiva Krishna 					   subtype ==
13336b363a28SDevulapally Shiva Krishna 					   CRYPTO_ALG_SUB_TYPE_CTR_RFC3686 ?
13346b363a28SDevulapally Shiva Krishna 					   reqctx->iv : req->iv,
1335b8fd1f41SHarsh Jain 					   op_type);
1336b8fd1f41SHarsh Jain 		goto error;
1337b8fd1f41SHarsh Jain 	}
1338b8fd1f41SHarsh Jain 	reqctx->op = op_type;
13392f47d580SHarsh Jain 	reqctx->srcsg = req->src;
13402f47d580SHarsh Jain 	reqctx->dstsg = req->dst;
13412f47d580SHarsh Jain 	reqctx->src_ofst = 0;
13422f47d580SHarsh Jain 	reqctx->dst_ofst = 0;
1343b8fd1f41SHarsh Jain 	wrparam.qid = qid;
1344b8fd1f41SHarsh Jain 	wrparam.req = req;
1345b8fd1f41SHarsh Jain 	wrparam.bytes = bytes;
1346b8fd1f41SHarsh Jain 	*skb = create_cipher_wr(&wrparam);
1347b8fd1f41SHarsh Jain 	if (IS_ERR(*skb)) {
1348b8fd1f41SHarsh Jain 		err = PTR_ERR(*skb);
13492f47d580SHarsh Jain 		goto unmap;
1350b8fd1f41SHarsh Jain 	}
13512f47d580SHarsh Jain 	reqctx->processed = bytes;
13522f47d580SHarsh Jain 	reqctx->last_req_len = bytes;
1353bed44d0cSAyush Sawal 	reqctx->partial_req = !!(req->cryptlen - reqctx->processed);
1354b8fd1f41SHarsh Jain 
1355b8fd1f41SHarsh Jain 	return 0;
13562f47d580SHarsh Jain unmap:
13572f47d580SHarsh Jain 	chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1358b8fd1f41SHarsh Jain error:
1359b8fd1f41SHarsh Jain 	return err;
1360324429d7SHariprasad Shenai }
1361324429d7SHariprasad Shenai 
chcr_aes_encrypt(struct skcipher_request * req)13627cea6d3eSArd Biesheuvel static int chcr_aes_encrypt(struct skcipher_request *req)
1363324429d7SHariprasad Shenai {
13647cea6d3eSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1365bed44d0cSAyush Sawal 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1366fef4912bSHarsh Jain 	struct chcr_dev *dev = c_ctx(tfm)->dev;
1367b8fd1f41SHarsh Jain 	struct sk_buff *skb = NULL;
1368567be3a5SAyush Sawal 	int err;
13692f47d580SHarsh Jain 	struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1370567be3a5SAyush Sawal 	struct chcr_context *ctx = c_ctx(tfm);
1371567be3a5SAyush Sawal 	unsigned int cpu;
1372567be3a5SAyush Sawal 
1373567be3a5SAyush Sawal 	cpu = get_cpu();
1374567be3a5SAyush Sawal 	reqctx->txqidx = cpu % ctx->ntxq;
1375567be3a5SAyush Sawal 	reqctx->rxqidx = cpu % ctx->nrxq;
1376567be3a5SAyush Sawal 	put_cpu();
1377324429d7SHariprasad Shenai 
1378fef4912bSHarsh Jain 	err = chcr_inc_wrcount(dev);
1379fef4912bSHarsh Jain 	if (err)
1380fef4912bSHarsh Jain 		return -ENXIO;
1381324429d7SHariprasad Shenai 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1382567be3a5SAyush Sawal 						reqctx->txqidx) &&
1383567be3a5SAyush Sawal 		(!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)))) {
1384fef4912bSHarsh Jain 			err = -ENOSPC;
1385fef4912bSHarsh Jain 			goto error;
1386fef4912bSHarsh Jain 	}
1387324429d7SHariprasad Shenai 
1388567be3a5SAyush Sawal 	err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
13892f47d580SHarsh Jain 			     &skb, CHCR_ENCRYPT_OP);
1390b8fd1f41SHarsh Jain 	if (err || !skb)
1391b8fd1f41SHarsh Jain 		return  err;
1392324429d7SHariprasad Shenai 	skb->dev = u_ctx->lldi.ports[0];
1393567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
1394324429d7SHariprasad Shenai 	chcr_send_wr(skb);
13951c502e2eSAyush Sawal 	if (get_cryptoalg_subtype(tfm) ==
13961c502e2eSAyush Sawal 		CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
13971c502e2eSAyush Sawal 			CRYPTO_TFM_REQ_MAY_SLEEP ) {
1398bed44d0cSAyush Sawal 			reqctx->partial_req = 1;
13991c502e2eSAyush Sawal 			wait_for_completion(&ctx->cbc_aes_aio_done);
14001c502e2eSAyush Sawal         }
1401567be3a5SAyush Sawal 	return -EINPROGRESS;
1402fef4912bSHarsh Jain error:
1403fef4912bSHarsh Jain 	chcr_dec_wrcount(dev);
1404fef4912bSHarsh Jain 	return err;
1405324429d7SHariprasad Shenai }
1406324429d7SHariprasad Shenai 
chcr_aes_decrypt(struct skcipher_request * req)14077cea6d3eSArd Biesheuvel static int chcr_aes_decrypt(struct skcipher_request *req)
1408324429d7SHariprasad Shenai {
14097cea6d3eSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1410567be3a5SAyush Sawal 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
14112f47d580SHarsh Jain 	struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1412fef4912bSHarsh Jain 	struct chcr_dev *dev = c_ctx(tfm)->dev;
1413b8fd1f41SHarsh Jain 	struct sk_buff *skb = NULL;
1414567be3a5SAyush Sawal 	int err;
1415567be3a5SAyush Sawal 	struct chcr_context *ctx = c_ctx(tfm);
1416567be3a5SAyush Sawal 	unsigned int cpu;
1417567be3a5SAyush Sawal 
1418567be3a5SAyush Sawal 	cpu = get_cpu();
1419567be3a5SAyush Sawal 	reqctx->txqidx = cpu % ctx->ntxq;
1420567be3a5SAyush Sawal 	reqctx->rxqidx = cpu % ctx->nrxq;
1421567be3a5SAyush Sawal 	put_cpu();
1422324429d7SHariprasad Shenai 
1423fef4912bSHarsh Jain 	err = chcr_inc_wrcount(dev);
1424fef4912bSHarsh Jain 	if (err)
1425fef4912bSHarsh Jain 		return -ENXIO;
1426fef4912bSHarsh Jain 
1427324429d7SHariprasad Shenai 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1428567be3a5SAyush Sawal 						reqctx->txqidx) &&
1429567be3a5SAyush Sawal 		(!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))))
14306faa0f57SHarsh Jain 			return -ENOSPC;
1431567be3a5SAyush Sawal 	err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
14322f47d580SHarsh Jain 			     &skb, CHCR_DECRYPT_OP);
1433b8fd1f41SHarsh Jain 	if (err || !skb)
1434b8fd1f41SHarsh Jain 		return err;
1435324429d7SHariprasad Shenai 	skb->dev = u_ctx->lldi.ports[0];
1436567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
1437324429d7SHariprasad Shenai 	chcr_send_wr(skb);
1438567be3a5SAyush Sawal 	return -EINPROGRESS;
1439324429d7SHariprasad Shenai }
chcr_device_init(struct chcr_context * ctx)1440324429d7SHariprasad Shenai static int chcr_device_init(struct chcr_context *ctx)
1441324429d7SHariprasad Shenai {
144214c19b17SHarsh Jain 	struct uld_ctx *u_ctx = NULL;
1443567be3a5SAyush Sawal 	int txq_perchan, ntxq;
1444567be3a5SAyush Sawal 	int err = 0, rxq_perchan;
1445324429d7SHariprasad Shenai 
1446324429d7SHariprasad Shenai 	if (!ctx->dev) {
144714c19b17SHarsh Jain 		u_ctx = assign_chcr_device();
144814c19b17SHarsh Jain 		if (!u_ctx) {
1449055be686SAyush Sawal 			err = -ENXIO;
1450324429d7SHariprasad Shenai 			pr_err("chcr device assignment fails\n");
1451324429d7SHariprasad Shenai 			goto out;
1452324429d7SHariprasad Shenai 		}
1453fef4912bSHarsh Jain 		ctx->dev = &u_ctx->dev;
1454a1c6fd43SHarsh Jain 		ntxq = u_ctx->lldi.ntxq;
1455324429d7SHariprasad Shenai 		rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
145672a56ca9SHarsh Jain 		txq_perchan = ntxq / u_ctx->lldi.nchan;
1457567be3a5SAyush Sawal 		ctx->ntxq = ntxq;
1458567be3a5SAyush Sawal 		ctx->nrxq = u_ctx->lldi.nrxq;
1459567be3a5SAyush Sawal 		ctx->rxq_perchan = rxq_perchan;
1460567be3a5SAyush Sawal 		ctx->txq_perchan = txq_perchan;
1461324429d7SHariprasad Shenai 	}
1462324429d7SHariprasad Shenai out:
1463324429d7SHariprasad Shenai 	return err;
1464324429d7SHariprasad Shenai }
1465324429d7SHariprasad Shenai 
chcr_init_tfm(struct crypto_skcipher * tfm)14667cea6d3eSArd Biesheuvel static int chcr_init_tfm(struct crypto_skcipher *tfm)
1467324429d7SHariprasad Shenai {
14687cea6d3eSArd Biesheuvel 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
14697cea6d3eSArd Biesheuvel 	struct chcr_context *ctx = crypto_skcipher_ctx(tfm);
1470b8fd1f41SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1471b8fd1f41SHarsh Jain 
1472d8c6d188SArd Biesheuvel 	ablkctx->sw_cipher = crypto_alloc_skcipher(alg->base.cra_name, 0,
147328874f26SKees Cook 				CRYPTO_ALG_NEED_FALLBACK);
1474b8fd1f41SHarsh Jain 	if (IS_ERR(ablkctx->sw_cipher)) {
14757cea6d3eSArd Biesheuvel 		pr_err("failed to allocate fallback for %s\n", alg->base.cra_name);
1476b8fd1f41SHarsh Jain 		return PTR_ERR(ablkctx->sw_cipher);
1477b8fd1f41SHarsh Jain 	}
14781c502e2eSAyush Sawal 	init_completion(&ctx->cbc_aes_aio_done);
1479d8c6d188SArd Biesheuvel 	crypto_skcipher_set_reqsize(tfm, sizeof(struct chcr_skcipher_req_ctx) +
1480d8c6d188SArd Biesheuvel 					 crypto_skcipher_reqsize(ablkctx->sw_cipher));
14817cea6d3eSArd Biesheuvel 
14827cea6d3eSArd Biesheuvel 	return chcr_device_init(ctx);
1483324429d7SHariprasad Shenai }
1484324429d7SHariprasad Shenai 
chcr_rfc3686_init(struct crypto_skcipher * tfm)14857cea6d3eSArd Biesheuvel static int chcr_rfc3686_init(struct crypto_skcipher *tfm)
1486b8fd1f41SHarsh Jain {
14877cea6d3eSArd Biesheuvel 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
14887cea6d3eSArd Biesheuvel 	struct chcr_context *ctx = crypto_skcipher_ctx(tfm);
1489b8fd1f41SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1490b8fd1f41SHarsh Jain 
1491b8fd1f41SHarsh Jain 	/*RFC3686 initialises IV counter value to 1, rfc3686(ctr(aes))
1492b8fd1f41SHarsh Jain 	 * cannot be used as fallback in chcr_handle_cipher_response
1493b8fd1f41SHarsh Jain 	 */
1494d8c6d188SArd Biesheuvel 	ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0,
149528874f26SKees Cook 				CRYPTO_ALG_NEED_FALLBACK);
1496b8fd1f41SHarsh Jain 	if (IS_ERR(ablkctx->sw_cipher)) {
14977cea6d3eSArd Biesheuvel 		pr_err("failed to allocate fallback for %s\n", alg->base.cra_name);
1498b8fd1f41SHarsh Jain 		return PTR_ERR(ablkctx->sw_cipher);
1499b8fd1f41SHarsh Jain 	}
1500d8c6d188SArd Biesheuvel 	crypto_skcipher_set_reqsize(tfm, sizeof(struct chcr_skcipher_req_ctx) +
1501d8c6d188SArd Biesheuvel 				    crypto_skcipher_reqsize(ablkctx->sw_cipher));
15027cea6d3eSArd Biesheuvel 	return chcr_device_init(ctx);
1503b8fd1f41SHarsh Jain }
1504b8fd1f41SHarsh Jain 
1505b8fd1f41SHarsh Jain 
chcr_exit_tfm(struct crypto_skcipher * tfm)15067cea6d3eSArd Biesheuvel static void chcr_exit_tfm(struct crypto_skcipher *tfm)
1507b8fd1f41SHarsh Jain {
15087cea6d3eSArd Biesheuvel 	struct chcr_context *ctx = crypto_skcipher_ctx(tfm);
1509b8fd1f41SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1510b8fd1f41SHarsh Jain 
1511d8c6d188SArd Biesheuvel 	crypto_free_skcipher(ablkctx->sw_cipher);
1512b8fd1f41SHarsh Jain }
1513b8fd1f41SHarsh Jain 
get_alg_config(struct algo_param * params,unsigned int auth_size)1514324429d7SHariprasad Shenai static int get_alg_config(struct algo_param *params,
1515324429d7SHariprasad Shenai 			  unsigned int auth_size)
1516324429d7SHariprasad Shenai {
1517324429d7SHariprasad Shenai 	switch (auth_size) {
1518324429d7SHariprasad Shenai 	case SHA1_DIGEST_SIZE:
1519324429d7SHariprasad Shenai 		params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_160;
1520324429d7SHariprasad Shenai 		params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA1;
1521324429d7SHariprasad Shenai 		params->result_size = SHA1_DIGEST_SIZE;
1522324429d7SHariprasad Shenai 		break;
1523324429d7SHariprasad Shenai 	case SHA224_DIGEST_SIZE:
1524324429d7SHariprasad Shenai 		params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
1525324429d7SHariprasad Shenai 		params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA224;
1526324429d7SHariprasad Shenai 		params->result_size = SHA256_DIGEST_SIZE;
1527324429d7SHariprasad Shenai 		break;
1528324429d7SHariprasad Shenai 	case SHA256_DIGEST_SIZE:
1529324429d7SHariprasad Shenai 		params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
1530324429d7SHariprasad Shenai 		params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA256;
1531324429d7SHariprasad Shenai 		params->result_size = SHA256_DIGEST_SIZE;
1532324429d7SHariprasad Shenai 		break;
1533324429d7SHariprasad Shenai 	case SHA384_DIGEST_SIZE:
1534324429d7SHariprasad Shenai 		params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
1535324429d7SHariprasad Shenai 		params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_384;
1536324429d7SHariprasad Shenai 		params->result_size = SHA512_DIGEST_SIZE;
1537324429d7SHariprasad Shenai 		break;
1538324429d7SHariprasad Shenai 	case SHA512_DIGEST_SIZE:
1539324429d7SHariprasad Shenai 		params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
1540324429d7SHariprasad Shenai 		params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_512;
1541324429d7SHariprasad Shenai 		params->result_size = SHA512_DIGEST_SIZE;
1542324429d7SHariprasad Shenai 		break;
1543324429d7SHariprasad Shenai 	default:
15441b3eeb87SChristophe JAILLET 		pr_err("ERROR, unsupported digest size\n");
1545324429d7SHariprasad Shenai 		return -EINVAL;
1546324429d7SHariprasad Shenai 	}
1547324429d7SHariprasad Shenai 	return 0;
1548324429d7SHariprasad Shenai }
1549324429d7SHariprasad Shenai 
chcr_free_shash(struct crypto_shash * base_hash)1550e7922729SHarsh Jain static inline void chcr_free_shash(struct crypto_shash *base_hash)
1551324429d7SHariprasad Shenai {
1552e7922729SHarsh Jain 		crypto_free_shash(base_hash);
1553324429d7SHariprasad Shenai }
1554324429d7SHariprasad Shenai 
1555324429d7SHariprasad Shenai /**
1556358961d1SHarsh Jain  *	create_hash_wr - Create hash work request
155729e5b878SLee Jones  *	@req: Cipher req base
155829e5b878SLee Jones  *	@param: Container for create_hash_wr()'s parameters
1559324429d7SHariprasad Shenai  */
create_hash_wr(struct ahash_request * req,struct hash_wr_param * param)1560358961d1SHarsh Jain static struct sk_buff *create_hash_wr(struct ahash_request *req,
1561324429d7SHariprasad Shenai 				      struct hash_wr_param *param)
1562324429d7SHariprasad Shenai {
1563324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1564324429d7SHariprasad Shenai 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1565567be3a5SAyush Sawal 	struct chcr_context *ctx = h_ctx(tfm);
1566567be3a5SAyush Sawal 	struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1567324429d7SHariprasad Shenai 	struct sk_buff *skb = NULL;
1568567be3a5SAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
1569358961d1SHarsh Jain 	struct chcr_wr *chcr_req;
15702f47d580SHarsh Jain 	struct ulptx_sgl *ulptx;
15715110e655SHarsh Jain 	unsigned int nents = 0, transhdr_len;
15725110e655SHarsh Jain 	unsigned int temp = 0;
1573358961d1SHarsh Jain 	gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1574358961d1SHarsh Jain 		GFP_ATOMIC;
15752f47d580SHarsh Jain 	struct adapter *adap = padap(h_ctx(tfm)->dev);
15762f47d580SHarsh Jain 	int error = 0;
1577567be3a5SAyush Sawal 	unsigned int rx_channel_id = req_ctx->rxqidx / ctx->rxq_perchan;
1578324429d7SHariprasad Shenai 
157916a9874fSAyush Sawal 	rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
15805110e655SHarsh Jain 	transhdr_len = HASH_TRANSHDR_SIZE(param->kctx_len);
15815110e655SHarsh Jain 	req_ctx->hctx_wr.imm = (transhdr_len + param->bfr_len +
15825110e655SHarsh Jain 				param->sg_len) <= SGE_MAX_WR_LEN;
15835110e655SHarsh Jain 	nents = sg_nents_xlen(req_ctx->hctx_wr.srcsg, param->sg_len,
15845110e655SHarsh Jain 		      CHCR_SRC_SG_SIZE, req_ctx->hctx_wr.src_ofst);
15852f47d580SHarsh Jain 	nents += param->bfr_len ? 1 : 0;
15865110e655SHarsh Jain 	transhdr_len += req_ctx->hctx_wr.imm ? roundup(param->bfr_len +
15875110e655SHarsh Jain 				param->sg_len, 16) : (sgl_len(nents) * 8);
1588125d01caSHarsh Jain 	transhdr_len = roundup(transhdr_len, 16);
1589324429d7SHariprasad Shenai 
15905110e655SHarsh Jain 	skb = alloc_skb(transhdr_len, flags);
15912f47d580SHarsh Jain 	if (!skb)
15922f47d580SHarsh Jain 		return ERR_PTR(-ENOMEM);
1593de77b966Syuan linyu 	chcr_req = __skb_put_zero(skb, transhdr_len);
1594324429d7SHariprasad Shenai 
1595358961d1SHarsh Jain 	chcr_req->sec_cpl.op_ivinsrtofst =
1596567be3a5SAyush Sawal 		FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 0);
1597567be3a5SAyush Sawal 
1598358961d1SHarsh Jain 	chcr_req->sec_cpl.pldlen = htonl(param->bfr_len + param->sg_len);
1599324429d7SHariprasad Shenai 
1600358961d1SHarsh Jain 	chcr_req->sec_cpl.aadstart_cipherstop_hi =
1601324429d7SHariprasad Shenai 		FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, 0, 0);
1602358961d1SHarsh Jain 	chcr_req->sec_cpl.cipherstop_lo_authinsert =
1603324429d7SHariprasad Shenai 		FILL_SEC_CPL_AUTHINSERT(0, 1, 0, 0);
1604358961d1SHarsh Jain 	chcr_req->sec_cpl.seqno_numivs =
1605324429d7SHariprasad Shenai 		FILL_SEC_CPL_SCMD0_SEQNO(0, 0, 0, param->alg_prm.auth_mode,
1606358961d1SHarsh Jain 					 param->opad_needed, 0);
1607324429d7SHariprasad Shenai 
1608358961d1SHarsh Jain 	chcr_req->sec_cpl.ivgen_hdrlen =
1609324429d7SHariprasad Shenai 		FILL_SEC_CPL_IVGEN_HDRLEN(param->last, param->more, 0, 1, 0, 0);
1610324429d7SHariprasad Shenai 
1611358961d1SHarsh Jain 	memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash,
1612358961d1SHarsh Jain 	       param->alg_prm.result_size);
1613324429d7SHariprasad Shenai 
1614324429d7SHariprasad Shenai 	if (param->opad_needed)
1615358961d1SHarsh Jain 		memcpy(chcr_req->key_ctx.key +
1616358961d1SHarsh Jain 		       ((param->alg_prm.result_size <= 32) ? 32 :
1617324429d7SHariprasad Shenai 			CHCR_HASH_MAX_DIGEST_SIZE),
1618324429d7SHariprasad Shenai 		       hmacctx->opad, param->alg_prm.result_size);
1619324429d7SHariprasad Shenai 
1620358961d1SHarsh Jain 	chcr_req->key_ctx.ctx_hdr = FILL_KEY_CTX_HDR(CHCR_KEYCTX_NO_KEY,
1621324429d7SHariprasad Shenai 					    param->alg_prm.mk_size, 0,
1622324429d7SHariprasad Shenai 					    param->opad_needed,
16235110e655SHarsh Jain 					    ((param->kctx_len +
1624358961d1SHarsh Jain 					     sizeof(chcr_req->key_ctx)) >> 4));
1625358961d1SHarsh Jain 	chcr_req->sec_cpl.scmd1 = cpu_to_be64((u64)param->scmd1);
16265110e655SHarsh Jain 	ulptx = (struct ulptx_sgl *)((u8 *)(chcr_req + 1) + param->kctx_len +
16272f47d580SHarsh Jain 				     DUMMY_BYTES);
16282f47d580SHarsh Jain 	if (param->bfr_len != 0) {
16295110e655SHarsh Jain 		req_ctx->hctx_wr.dma_addr =
16305110e655SHarsh Jain 			dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr,
16315110e655SHarsh Jain 				       param->bfr_len, DMA_TO_DEVICE);
16322f47d580SHarsh Jain 		if (dma_mapping_error(&u_ctx->lldi.pdev->dev,
16335110e655SHarsh Jain 				       req_ctx->hctx_wr. dma_addr)) {
16342f47d580SHarsh Jain 			error = -ENOMEM;
16352f47d580SHarsh Jain 			goto err;
16362f47d580SHarsh Jain 		}
16375110e655SHarsh Jain 		req_ctx->hctx_wr.dma_len = param->bfr_len;
16382f47d580SHarsh Jain 	} else {
16395110e655SHarsh Jain 		req_ctx->hctx_wr.dma_addr = 0;
16402f47d580SHarsh Jain 	}
16412f47d580SHarsh Jain 	chcr_add_hash_src_ent(req, ulptx, param);
16422f47d580SHarsh Jain 	/* Request upto max wr size */
16435110e655SHarsh Jain 	temp = param->kctx_len + DUMMY_BYTES + (req_ctx->hctx_wr.imm ?
16445110e655SHarsh Jain 				(param->sg_len + param->bfr_len) : 0);
1645ee0863baSHarsh Jain 	atomic_inc(&adap->chcr_stats.digest_rqst);
16465110e655SHarsh Jain 	create_wreq(h_ctx(tfm), chcr_req, &req->base, req_ctx->hctx_wr.imm,
16475110e655SHarsh Jain 		    param->hash_size, transhdr_len,
16482f47d580SHarsh Jain 		    temp,  0);
16495110e655SHarsh Jain 	req_ctx->hctx_wr.skb = skb;
1650324429d7SHariprasad Shenai 	return skb;
16512f47d580SHarsh Jain err:
16522f47d580SHarsh Jain 	kfree_skb(skb);
16532f47d580SHarsh Jain 	return  ERR_PTR(error);
1654324429d7SHariprasad Shenai }
1655324429d7SHariprasad Shenai 
chcr_ahash_update(struct ahash_request * req)1656324429d7SHariprasad Shenai static int chcr_ahash_update(struct ahash_request *req)
1657324429d7SHariprasad Shenai {
1658324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1659324429d7SHariprasad Shenai 	struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1660567be3a5SAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1661567be3a5SAyush Sawal 	struct chcr_context *ctx = h_ctx(rtfm);
1662fef4912bSHarsh Jain 	struct chcr_dev *dev = h_ctx(rtfm)->dev;
1663324429d7SHariprasad Shenai 	struct sk_buff *skb;
1664324429d7SHariprasad Shenai 	u8 remainder = 0, bs;
1665324429d7SHariprasad Shenai 	unsigned int nbytes = req->nbytes;
1666324429d7SHariprasad Shenai 	struct hash_wr_param params;
1667567be3a5SAyush Sawal 	int error;
1668567be3a5SAyush Sawal 	unsigned int cpu;
1669567be3a5SAyush Sawal 
1670567be3a5SAyush Sawal 	cpu = get_cpu();
1671567be3a5SAyush Sawal 	req_ctx->txqidx = cpu % ctx->ntxq;
1672567be3a5SAyush Sawal 	req_ctx->rxqidx = cpu % ctx->nrxq;
1673567be3a5SAyush Sawal 	put_cpu();
1674324429d7SHariprasad Shenai 
1675324429d7SHariprasad Shenai 	bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1676324429d7SHariprasad Shenai 
167744fce12aSHarsh Jain 	if (nbytes + req_ctx->reqlen >= bs) {
167844fce12aSHarsh Jain 		remainder = (nbytes + req_ctx->reqlen) % bs;
167944fce12aSHarsh Jain 		nbytes = nbytes + req_ctx->reqlen - remainder;
1680324429d7SHariprasad Shenai 	} else {
168144fce12aSHarsh Jain 		sg_pcopy_to_buffer(req->src, sg_nents(req->src), req_ctx->reqbfr
168244fce12aSHarsh Jain 				   + req_ctx->reqlen, nbytes, 0);
168344fce12aSHarsh Jain 		req_ctx->reqlen += nbytes;
1684324429d7SHariprasad Shenai 		return 0;
1685324429d7SHariprasad Shenai 	}
1686fef4912bSHarsh Jain 	error = chcr_inc_wrcount(dev);
1687fef4912bSHarsh Jain 	if (error)
1688fef4912bSHarsh Jain 		return -ENXIO;
1689fef4912bSHarsh Jain 	/* Detach state for CHCR means lldi or padap is freed. Increasing
1690fef4912bSHarsh Jain 	 * inflight count for dev guarantees that lldi and padap is valid
1691fef4912bSHarsh Jain 	 */
1692fef4912bSHarsh Jain 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1693567be3a5SAyush Sawal 						req_ctx->txqidx) &&
1694567be3a5SAyush Sawal 		(!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)))) {
1695fef4912bSHarsh Jain 			error = -ENOSPC;
1696fef4912bSHarsh Jain 			goto err;
1697fef4912bSHarsh Jain 	}
1698fef4912bSHarsh Jain 
16995110e655SHarsh Jain 	chcr_init_hctx_per_wr(req_ctx);
17002f47d580SHarsh Jain 	error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1701fef4912bSHarsh Jain 	if (error) {
1702fef4912bSHarsh Jain 		error = -ENOMEM;
1703fef4912bSHarsh Jain 		goto err;
1704fef4912bSHarsh Jain 	}
17055110e655SHarsh Jain 	get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
17065110e655SHarsh Jain 	params.kctx_len = roundup(params.alg_prm.result_size, 16);
17075110e655SHarsh Jain 	params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
17085110e655SHarsh Jain 				     HASH_SPACE_LEFT(params.kctx_len), 0);
17095110e655SHarsh Jain 	if (params.sg_len > req->nbytes)
17105110e655SHarsh Jain 		params.sg_len = req->nbytes;
17115110e655SHarsh Jain 	params.sg_len = rounddown(params.sg_len + req_ctx->reqlen, bs) -
17125110e655SHarsh Jain 			req_ctx->reqlen;
1713324429d7SHariprasad Shenai 	params.opad_needed = 0;
1714324429d7SHariprasad Shenai 	params.more = 1;
1715324429d7SHariprasad Shenai 	params.last = 0;
171644fce12aSHarsh Jain 	params.bfr_len = req_ctx->reqlen;
1717324429d7SHariprasad Shenai 	params.scmd1 = 0;
17185110e655SHarsh Jain 	req_ctx->hctx_wr.srcsg = req->src;
17195110e655SHarsh Jain 
17205110e655SHarsh Jain 	params.hash_size = params.alg_prm.result_size;
1721324429d7SHariprasad Shenai 	req_ctx->data_len += params.sg_len + params.bfr_len;
1722358961d1SHarsh Jain 	skb = create_hash_wr(req, &params);
17232f47d580SHarsh Jain 	if (IS_ERR(skb)) {
17242f47d580SHarsh Jain 		error = PTR_ERR(skb);
17252f47d580SHarsh Jain 		goto unmap;
17262f47d580SHarsh Jain 	}
1727324429d7SHariprasad Shenai 
17285110e655SHarsh Jain 	req_ctx->hctx_wr.processed += params.sg_len;
172944fce12aSHarsh Jain 	if (remainder) {
173044fce12aSHarsh Jain 		/* Swap buffers */
1731abfa2b37SGustavo A. R. Silva 		swap(req_ctx->reqbfr, req_ctx->skbfr);
1732324429d7SHariprasad Shenai 		sg_pcopy_to_buffer(req->src, sg_nents(req->src),
173344fce12aSHarsh Jain 				   req_ctx->reqbfr, remainder, req->nbytes -
1734324429d7SHariprasad Shenai 				   remainder);
173544fce12aSHarsh Jain 	}
173644fce12aSHarsh Jain 	req_ctx->reqlen = remainder;
1737324429d7SHariprasad Shenai 	skb->dev = u_ctx->lldi.ports[0];
1738567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, req_ctx->txqidx);
1739324429d7SHariprasad Shenai 	chcr_send_wr(skb);
1740567be3a5SAyush Sawal 	return -EINPROGRESS;
17412f47d580SHarsh Jain unmap:
17422f47d580SHarsh Jain 	chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1743fef4912bSHarsh Jain err:
1744fef4912bSHarsh Jain 	chcr_dec_wrcount(dev);
17452f47d580SHarsh Jain 	return error;
1746324429d7SHariprasad Shenai }
1747324429d7SHariprasad Shenai 
create_last_hash_block(char * bfr_ptr,unsigned int bs,u64 scmd1)1748324429d7SHariprasad Shenai static void create_last_hash_block(char *bfr_ptr, unsigned int bs, u64 scmd1)
1749324429d7SHariprasad Shenai {
1750324429d7SHariprasad Shenai 	memset(bfr_ptr, 0, bs);
1751324429d7SHariprasad Shenai 	*bfr_ptr = 0x80;
1752324429d7SHariprasad Shenai 	if (bs == 64)
1753324429d7SHariprasad Shenai 		*(__be64 *)(bfr_ptr + 56) = cpu_to_be64(scmd1  << 3);
1754324429d7SHariprasad Shenai 	else
1755324429d7SHariprasad Shenai 		*(__be64 *)(bfr_ptr + 120) =  cpu_to_be64(scmd1  << 3);
1756324429d7SHariprasad Shenai }
1757324429d7SHariprasad Shenai 
chcr_ahash_final(struct ahash_request * req)1758324429d7SHariprasad Shenai static int chcr_ahash_final(struct ahash_request *req)
1759324429d7SHariprasad Shenai {
1760324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1761324429d7SHariprasad Shenai 	struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1762fef4912bSHarsh Jain 	struct chcr_dev *dev = h_ctx(rtfm)->dev;
1763324429d7SHariprasad Shenai 	struct hash_wr_param params;
1764324429d7SHariprasad Shenai 	struct sk_buff *skb;
1765567be3a5SAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1766567be3a5SAyush Sawal 	struct chcr_context *ctx = h_ctx(rtfm);
1767324429d7SHariprasad Shenai 	u8 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
17688a656a48SColin Ian King 	int error;
1769567be3a5SAyush Sawal 	unsigned int cpu;
1770567be3a5SAyush Sawal 
1771567be3a5SAyush Sawal 	cpu = get_cpu();
1772567be3a5SAyush Sawal 	req_ctx->txqidx = cpu % ctx->ntxq;
1773567be3a5SAyush Sawal 	req_ctx->rxqidx = cpu % ctx->nrxq;
1774567be3a5SAyush Sawal 	put_cpu();
1775fef4912bSHarsh Jain 
1776fef4912bSHarsh Jain 	error = chcr_inc_wrcount(dev);
1777fef4912bSHarsh Jain 	if (error)
1778fef4912bSHarsh Jain 		return -ENXIO;
1779324429d7SHariprasad Shenai 
17805110e655SHarsh Jain 	chcr_init_hctx_per_wr(req_ctx);
1781324429d7SHariprasad Shenai 	if (is_hmac(crypto_ahash_tfm(rtfm)))
1782324429d7SHariprasad Shenai 		params.opad_needed = 1;
1783324429d7SHariprasad Shenai 	else
1784324429d7SHariprasad Shenai 		params.opad_needed = 0;
1785324429d7SHariprasad Shenai 	params.sg_len = 0;
17865110e655SHarsh Jain 	req_ctx->hctx_wr.isfinal = 1;
1787324429d7SHariprasad Shenai 	get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
17885110e655SHarsh Jain 	params.kctx_len = roundup(params.alg_prm.result_size, 16);
17895110e655SHarsh Jain 	if (is_hmac(crypto_ahash_tfm(rtfm))) {
17905110e655SHarsh Jain 		params.opad_needed = 1;
17915110e655SHarsh Jain 		params.kctx_len *= 2;
17925110e655SHarsh Jain 	} else {
17935110e655SHarsh Jain 		params.opad_needed = 0;
17945110e655SHarsh Jain 	}
17955110e655SHarsh Jain 
17965110e655SHarsh Jain 	req_ctx->hctx_wr.result = 1;
179744fce12aSHarsh Jain 	params.bfr_len = req_ctx->reqlen;
1798324429d7SHariprasad Shenai 	req_ctx->data_len += params.bfr_len + params.sg_len;
17995110e655SHarsh Jain 	req_ctx->hctx_wr.srcsg = req->src;
180044fce12aSHarsh Jain 	if (req_ctx->reqlen == 0) {
180144fce12aSHarsh Jain 		create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1802324429d7SHariprasad Shenai 		params.last = 0;
1803324429d7SHariprasad Shenai 		params.more = 1;
1804324429d7SHariprasad Shenai 		params.scmd1 = 0;
1805324429d7SHariprasad Shenai 		params.bfr_len = bs;
1806324429d7SHariprasad Shenai 
1807324429d7SHariprasad Shenai 	} else {
1808324429d7SHariprasad Shenai 		params.scmd1 = req_ctx->data_len;
1809324429d7SHariprasad Shenai 		params.last = 1;
1810324429d7SHariprasad Shenai 		params.more = 0;
1811324429d7SHariprasad Shenai 	}
18125110e655SHarsh Jain 	params.hash_size = crypto_ahash_digestsize(rtfm);
1813358961d1SHarsh Jain 	skb = create_hash_wr(req, &params);
1814fef4912bSHarsh Jain 	if (IS_ERR(skb)) {
1815fef4912bSHarsh Jain 		error = PTR_ERR(skb);
1816fef4912bSHarsh Jain 		goto err;
1817fef4912bSHarsh Jain 	}
18185110e655SHarsh Jain 	req_ctx->reqlen = 0;
1819324429d7SHariprasad Shenai 	skb->dev = u_ctx->lldi.ports[0];
1820567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, req_ctx->txqidx);
1821324429d7SHariprasad Shenai 	chcr_send_wr(skb);
1822324429d7SHariprasad Shenai 	return -EINPROGRESS;
1823fef4912bSHarsh Jain err:
1824fef4912bSHarsh Jain 	chcr_dec_wrcount(dev);
1825fef4912bSHarsh Jain 	return error;
1826324429d7SHariprasad Shenai }
1827324429d7SHariprasad Shenai 
chcr_ahash_finup(struct ahash_request * req)1828324429d7SHariprasad Shenai static int chcr_ahash_finup(struct ahash_request *req)
1829324429d7SHariprasad Shenai {
1830324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1831324429d7SHariprasad Shenai 	struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1832fef4912bSHarsh Jain 	struct chcr_dev *dev = h_ctx(rtfm)->dev;
1833567be3a5SAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1834567be3a5SAyush Sawal 	struct chcr_context *ctx = h_ctx(rtfm);
1835324429d7SHariprasad Shenai 	struct sk_buff *skb;
1836324429d7SHariprasad Shenai 	struct hash_wr_param params;
1837324429d7SHariprasad Shenai 	u8  bs;
1838567be3a5SAyush Sawal 	int error;
1839567be3a5SAyush Sawal 	unsigned int cpu;
1840567be3a5SAyush Sawal 
1841567be3a5SAyush Sawal 	cpu = get_cpu();
1842567be3a5SAyush Sawal 	req_ctx->txqidx = cpu % ctx->ntxq;
1843567be3a5SAyush Sawal 	req_ctx->rxqidx = cpu % ctx->nrxq;
1844567be3a5SAyush Sawal 	put_cpu();
1845324429d7SHariprasad Shenai 
1846324429d7SHariprasad Shenai 	bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1847fef4912bSHarsh Jain 	error = chcr_inc_wrcount(dev);
1848fef4912bSHarsh Jain 	if (error)
1849fef4912bSHarsh Jain 		return -ENXIO;
1850324429d7SHariprasad Shenai 
1851324429d7SHariprasad Shenai 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1852567be3a5SAyush Sawal 						req_ctx->txqidx) &&
1853567be3a5SAyush Sawal 		(!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)))) {
1854fef4912bSHarsh Jain 			error = -ENOSPC;
1855fef4912bSHarsh Jain 			goto err;
1856fef4912bSHarsh Jain 	}
18575110e655SHarsh Jain 	chcr_init_hctx_per_wr(req_ctx);
18585110e655SHarsh Jain 	error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1859fef4912bSHarsh Jain 	if (error) {
1860fef4912bSHarsh Jain 		error = -ENOMEM;
1861fef4912bSHarsh Jain 		goto err;
1862fef4912bSHarsh Jain 	}
1863324429d7SHariprasad Shenai 
1864324429d7SHariprasad Shenai 	get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
18655110e655SHarsh Jain 	params.kctx_len = roundup(params.alg_prm.result_size, 16);
18665110e655SHarsh Jain 	if (is_hmac(crypto_ahash_tfm(rtfm))) {
18675110e655SHarsh Jain 		params.kctx_len *= 2;
18685110e655SHarsh Jain 		params.opad_needed = 1;
18695110e655SHarsh Jain 	} else {
18705110e655SHarsh Jain 		params.opad_needed = 0;
18715110e655SHarsh Jain 	}
18725110e655SHarsh Jain 
18735110e655SHarsh Jain 	params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
18745110e655SHarsh Jain 				    HASH_SPACE_LEFT(params.kctx_len), 0);
18755110e655SHarsh Jain 	if (params.sg_len < req->nbytes) {
18765110e655SHarsh Jain 		if (is_hmac(crypto_ahash_tfm(rtfm))) {
18775110e655SHarsh Jain 			params.kctx_len /= 2;
18785110e655SHarsh Jain 			params.opad_needed = 0;
18795110e655SHarsh Jain 		}
18805110e655SHarsh Jain 		params.last = 0;
18815110e655SHarsh Jain 		params.more = 1;
18825110e655SHarsh Jain 		params.sg_len = rounddown(params.sg_len + req_ctx->reqlen, bs)
18835110e655SHarsh Jain 					- req_ctx->reqlen;
18845110e655SHarsh Jain 		params.hash_size = params.alg_prm.result_size;
18855110e655SHarsh Jain 		params.scmd1 = 0;
18865110e655SHarsh Jain 	} else {
18875110e655SHarsh Jain 		params.last = 1;
18885110e655SHarsh Jain 		params.more = 0;
18895110e655SHarsh Jain 		params.sg_len = req->nbytes;
18905110e655SHarsh Jain 		params.hash_size = crypto_ahash_digestsize(rtfm);
18915110e655SHarsh Jain 		params.scmd1 = req_ctx->data_len + req_ctx->reqlen +
18925110e655SHarsh Jain 				params.sg_len;
18935110e655SHarsh Jain 	}
18945110e655SHarsh Jain 	params.bfr_len = req_ctx->reqlen;
1895324429d7SHariprasad Shenai 	req_ctx->data_len += params.bfr_len + params.sg_len;
18965110e655SHarsh Jain 	req_ctx->hctx_wr.result = 1;
18975110e655SHarsh Jain 	req_ctx->hctx_wr.srcsg = req->src;
189844fce12aSHarsh Jain 	if ((req_ctx->reqlen + req->nbytes) == 0) {
189944fce12aSHarsh Jain 		create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1900324429d7SHariprasad Shenai 		params.last = 0;
1901324429d7SHariprasad Shenai 		params.more = 1;
1902324429d7SHariprasad Shenai 		params.scmd1 = 0;
1903324429d7SHariprasad Shenai 		params.bfr_len = bs;
1904324429d7SHariprasad Shenai 	}
1905358961d1SHarsh Jain 	skb = create_hash_wr(req, &params);
19062f47d580SHarsh Jain 	if (IS_ERR(skb)) {
19072f47d580SHarsh Jain 		error = PTR_ERR(skb);
19082f47d580SHarsh Jain 		goto unmap;
19092f47d580SHarsh Jain 	}
19105110e655SHarsh Jain 	req_ctx->reqlen = 0;
19115110e655SHarsh Jain 	req_ctx->hctx_wr.processed += params.sg_len;
1912324429d7SHariprasad Shenai 	skb->dev = u_ctx->lldi.ports[0];
1913567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, req_ctx->txqidx);
1914324429d7SHariprasad Shenai 	chcr_send_wr(skb);
1915567be3a5SAyush Sawal 	return -EINPROGRESS;
19162f47d580SHarsh Jain unmap:
19172f47d580SHarsh Jain 	chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1918fef4912bSHarsh Jain err:
1919fef4912bSHarsh Jain 	chcr_dec_wrcount(dev);
19202f47d580SHarsh Jain 	return error;
1921324429d7SHariprasad Shenai }
1922324429d7SHariprasad Shenai 
chcr_ahash_digest(struct ahash_request * req)1923324429d7SHariprasad Shenai static int chcr_ahash_digest(struct ahash_request *req)
1924324429d7SHariprasad Shenai {
1925324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1926324429d7SHariprasad Shenai 	struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1927fef4912bSHarsh Jain 	struct chcr_dev *dev = h_ctx(rtfm)->dev;
1928567be3a5SAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1929567be3a5SAyush Sawal 	struct chcr_context *ctx = h_ctx(rtfm);
1930324429d7SHariprasad Shenai 	struct sk_buff *skb;
1931324429d7SHariprasad Shenai 	struct hash_wr_param params;
1932324429d7SHariprasad Shenai 	u8  bs;
1933567be3a5SAyush Sawal 	int error;
1934567be3a5SAyush Sawal 	unsigned int cpu;
1935567be3a5SAyush Sawal 
1936567be3a5SAyush Sawal 	cpu = get_cpu();
1937567be3a5SAyush Sawal 	req_ctx->txqidx = cpu % ctx->ntxq;
1938567be3a5SAyush Sawal 	req_ctx->rxqidx = cpu % ctx->nrxq;
1939567be3a5SAyush Sawal 	put_cpu();
1940324429d7SHariprasad Shenai 
1941324429d7SHariprasad Shenai 	rtfm->init(req);
1942324429d7SHariprasad Shenai 	bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1943fef4912bSHarsh Jain 	error = chcr_inc_wrcount(dev);
1944fef4912bSHarsh Jain 	if (error)
1945fef4912bSHarsh Jain 		return -ENXIO;
1946324429d7SHariprasad Shenai 
1947324429d7SHariprasad Shenai 	if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1948567be3a5SAyush Sawal 						req_ctx->txqidx) &&
1949567be3a5SAyush Sawal 		(!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)))) {
1950fef4912bSHarsh Jain 			error = -ENOSPC;
1951fef4912bSHarsh Jain 			goto err;
1952fef4912bSHarsh Jain 	}
1953324429d7SHariprasad Shenai 
19545110e655SHarsh Jain 	chcr_init_hctx_per_wr(req_ctx);
19552f47d580SHarsh Jain 	error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1956fef4912bSHarsh Jain 	if (error) {
1957fef4912bSHarsh Jain 		error = -ENOMEM;
1958fef4912bSHarsh Jain 		goto err;
1959fef4912bSHarsh Jain 	}
1960324429d7SHariprasad Shenai 
1961324429d7SHariprasad Shenai 	get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
19625110e655SHarsh Jain 	params.kctx_len = roundup(params.alg_prm.result_size, 16);
19635110e655SHarsh Jain 	if (is_hmac(crypto_ahash_tfm(rtfm))) {
19645110e655SHarsh Jain 		params.kctx_len *= 2;
19655110e655SHarsh Jain 		params.opad_needed = 1;
19665110e655SHarsh Jain 	} else {
19675110e655SHarsh Jain 		params.opad_needed = 0;
19685110e655SHarsh Jain 	}
19695110e655SHarsh Jain 	params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
19705110e655SHarsh Jain 				HASH_SPACE_LEFT(params.kctx_len), 0);
19715110e655SHarsh Jain 	if (params.sg_len < req->nbytes) {
19725110e655SHarsh Jain 		if (is_hmac(crypto_ahash_tfm(rtfm))) {
19735110e655SHarsh Jain 			params.kctx_len /= 2;
19745110e655SHarsh Jain 			params.opad_needed = 0;
19755110e655SHarsh Jain 		}
19765110e655SHarsh Jain 		params.last = 0;
19775110e655SHarsh Jain 		params.more = 1;
19785110e655SHarsh Jain 		params.scmd1 = 0;
19795110e655SHarsh Jain 		params.sg_len = rounddown(params.sg_len, bs);
19805110e655SHarsh Jain 		params.hash_size = params.alg_prm.result_size;
19815110e655SHarsh Jain 	} else {
19825110e655SHarsh Jain 		params.sg_len = req->nbytes;
19835110e655SHarsh Jain 		params.hash_size = crypto_ahash_digestsize(rtfm);
19845110e655SHarsh Jain 		params.last = 1;
19855110e655SHarsh Jain 		params.more = 0;
19865110e655SHarsh Jain 		params.scmd1 = req->nbytes + req_ctx->data_len;
19875110e655SHarsh Jain 
19885110e655SHarsh Jain 	}
19895110e655SHarsh Jain 	params.bfr_len = 0;
19905110e655SHarsh Jain 	req_ctx->hctx_wr.result = 1;
19915110e655SHarsh Jain 	req_ctx->hctx_wr.srcsg = req->src;
1992324429d7SHariprasad Shenai 	req_ctx->data_len += params.bfr_len + params.sg_len;
1993324429d7SHariprasad Shenai 
199444fce12aSHarsh Jain 	if (req->nbytes == 0) {
199502f58e5bSDevulapally Shiva Krishna 		create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1996324429d7SHariprasad Shenai 		params.more = 1;
1997324429d7SHariprasad Shenai 		params.bfr_len = bs;
1998324429d7SHariprasad Shenai 	}
1999324429d7SHariprasad Shenai 
2000358961d1SHarsh Jain 	skb = create_hash_wr(req, &params);
20012f47d580SHarsh Jain 	if (IS_ERR(skb)) {
20022f47d580SHarsh Jain 		error = PTR_ERR(skb);
20032f47d580SHarsh Jain 		goto unmap;
20042f47d580SHarsh Jain 	}
20055110e655SHarsh Jain 	req_ctx->hctx_wr.processed += params.sg_len;
2006324429d7SHariprasad Shenai 	skb->dev = u_ctx->lldi.ports[0];
2007567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, req_ctx->txqidx);
2008324429d7SHariprasad Shenai 	chcr_send_wr(skb);
2009567be3a5SAyush Sawal 	return -EINPROGRESS;
20102f47d580SHarsh Jain unmap:
20112f47d580SHarsh Jain 	chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2012fef4912bSHarsh Jain err:
2013fef4912bSHarsh Jain 	chcr_dec_wrcount(dev);
20142f47d580SHarsh Jain 	return error;
2015324429d7SHariprasad Shenai }
2016324429d7SHariprasad Shenai 
chcr_ahash_continue(struct ahash_request * req)20176f76672bSHarsh Jain static int chcr_ahash_continue(struct ahash_request *req)
20186f76672bSHarsh Jain {
20196f76672bSHarsh Jain 	struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
20206f76672bSHarsh Jain 	struct chcr_hctx_per_wr *hctx_wr = &reqctx->hctx_wr;
20216f76672bSHarsh Jain 	struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
2022567be3a5SAyush Sawal 	struct chcr_context *ctx = h_ctx(rtfm);
2023567be3a5SAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
20246f76672bSHarsh Jain 	struct sk_buff *skb;
20256f76672bSHarsh Jain 	struct hash_wr_param params;
20266f76672bSHarsh Jain 	u8  bs;
20276f76672bSHarsh Jain 	int error;
2028567be3a5SAyush Sawal 	unsigned int cpu;
2029567be3a5SAyush Sawal 
2030567be3a5SAyush Sawal 	cpu = get_cpu();
2031567be3a5SAyush Sawal 	reqctx->txqidx = cpu % ctx->ntxq;
2032567be3a5SAyush Sawal 	reqctx->rxqidx = cpu % ctx->nrxq;
2033567be3a5SAyush Sawal 	put_cpu();
20346f76672bSHarsh Jain 
20356f76672bSHarsh Jain 	bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
20366f76672bSHarsh Jain 	get_alg_config(&params.alg_prm, crypto_ahash_digestsize(rtfm));
20376f76672bSHarsh Jain 	params.kctx_len = roundup(params.alg_prm.result_size, 16);
20386f76672bSHarsh Jain 	if (is_hmac(crypto_ahash_tfm(rtfm))) {
20396f76672bSHarsh Jain 		params.kctx_len *= 2;
20406f76672bSHarsh Jain 		params.opad_needed = 1;
20416f76672bSHarsh Jain 	} else {
20426f76672bSHarsh Jain 		params.opad_needed = 0;
20436f76672bSHarsh Jain 	}
20446f76672bSHarsh Jain 	params.sg_len = chcr_hash_ent_in_wr(hctx_wr->srcsg, 0,
20456f76672bSHarsh Jain 					    HASH_SPACE_LEFT(params.kctx_len),
20466f76672bSHarsh Jain 					    hctx_wr->src_ofst);
20476f76672bSHarsh Jain 	if ((params.sg_len + hctx_wr->processed) > req->nbytes)
20486f76672bSHarsh Jain 		params.sg_len = req->nbytes - hctx_wr->processed;
20496f76672bSHarsh Jain 	if (!hctx_wr->result ||
20506f76672bSHarsh Jain 	    ((params.sg_len + hctx_wr->processed) < req->nbytes)) {
20516f76672bSHarsh Jain 		if (is_hmac(crypto_ahash_tfm(rtfm))) {
20526f76672bSHarsh Jain 			params.kctx_len /= 2;
20536f76672bSHarsh Jain 			params.opad_needed = 0;
20546f76672bSHarsh Jain 		}
20556f76672bSHarsh Jain 		params.last = 0;
20566f76672bSHarsh Jain 		params.more = 1;
20576f76672bSHarsh Jain 		params.sg_len = rounddown(params.sg_len, bs);
20586f76672bSHarsh Jain 		params.hash_size = params.alg_prm.result_size;
20596f76672bSHarsh Jain 		params.scmd1 = 0;
20606f76672bSHarsh Jain 	} else {
20616f76672bSHarsh Jain 		params.last = 1;
20626f76672bSHarsh Jain 		params.more = 0;
20636f76672bSHarsh Jain 		params.hash_size = crypto_ahash_digestsize(rtfm);
20646f76672bSHarsh Jain 		params.scmd1 = reqctx->data_len + params.sg_len;
20656f76672bSHarsh Jain 	}
20666f76672bSHarsh Jain 	params.bfr_len = 0;
20676f76672bSHarsh Jain 	reqctx->data_len += params.sg_len;
20686f76672bSHarsh Jain 	skb = create_hash_wr(req, &params);
20696f76672bSHarsh Jain 	if (IS_ERR(skb)) {
20706f76672bSHarsh Jain 		error = PTR_ERR(skb);
20716f76672bSHarsh Jain 		goto err;
20726f76672bSHarsh Jain 	}
20736f76672bSHarsh Jain 	hctx_wr->processed += params.sg_len;
20746f76672bSHarsh Jain 	skb->dev = u_ctx->lldi.ports[0];
2075567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
20766f76672bSHarsh Jain 	chcr_send_wr(skb);
20776f76672bSHarsh Jain 	return 0;
20786f76672bSHarsh Jain err:
20796f76672bSHarsh Jain 	return error;
20806f76672bSHarsh Jain }
20816f76672bSHarsh Jain 
chcr_handle_ahash_resp(struct ahash_request * req,unsigned char * input,int err)20826f76672bSHarsh Jain static inline void chcr_handle_ahash_resp(struct ahash_request *req,
20836f76672bSHarsh Jain 					  unsigned char *input,
20846f76672bSHarsh Jain 					  int err)
20856f76672bSHarsh Jain {
20866f76672bSHarsh Jain 	struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
20876f76672bSHarsh Jain 	struct chcr_hctx_per_wr *hctx_wr = &reqctx->hctx_wr;
20886f76672bSHarsh Jain 	int digestsize, updated_digestsize;
20896f76672bSHarsh Jain 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
20906f76672bSHarsh Jain 	struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm));
2091fef4912bSHarsh Jain 	struct chcr_dev *dev = h_ctx(tfm)->dev;
20926f76672bSHarsh Jain 
20936f76672bSHarsh Jain 	if (input == NULL)
20946f76672bSHarsh Jain 		goto out;
20956f76672bSHarsh Jain 	digestsize = crypto_ahash_digestsize(crypto_ahash_reqtfm(req));
20966f76672bSHarsh Jain 	updated_digestsize = digestsize;
20976f76672bSHarsh Jain 	if (digestsize == SHA224_DIGEST_SIZE)
20986f76672bSHarsh Jain 		updated_digestsize = SHA256_DIGEST_SIZE;
20996f76672bSHarsh Jain 	else if (digestsize == SHA384_DIGEST_SIZE)
21006f76672bSHarsh Jain 		updated_digestsize = SHA512_DIGEST_SIZE;
21016f76672bSHarsh Jain 
21026f76672bSHarsh Jain 	if (hctx_wr->dma_addr) {
21036f76672bSHarsh Jain 		dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr,
21046f76672bSHarsh Jain 				 hctx_wr->dma_len, DMA_TO_DEVICE);
21056f76672bSHarsh Jain 		hctx_wr->dma_addr = 0;
21066f76672bSHarsh Jain 	}
21076f76672bSHarsh Jain 	if (hctx_wr->isfinal || ((hctx_wr->processed + reqctx->reqlen) ==
21086f76672bSHarsh Jain 				 req->nbytes)) {
21096f76672bSHarsh Jain 		if (hctx_wr->result == 1) {
21106f76672bSHarsh Jain 			hctx_wr->result = 0;
21116f76672bSHarsh Jain 			memcpy(req->result, input + sizeof(struct cpl_fw6_pld),
21126f76672bSHarsh Jain 			       digestsize);
21136f76672bSHarsh Jain 		} else {
21146f76672bSHarsh Jain 			memcpy(reqctx->partial_hash,
21156f76672bSHarsh Jain 			       input + sizeof(struct cpl_fw6_pld),
21166f76672bSHarsh Jain 			       updated_digestsize);
21176f76672bSHarsh Jain 
21186f76672bSHarsh Jain 		}
21196f76672bSHarsh Jain 		goto unmap;
21206f76672bSHarsh Jain 	}
21216f76672bSHarsh Jain 	memcpy(reqctx->partial_hash, input + sizeof(struct cpl_fw6_pld),
21226f76672bSHarsh Jain 	       updated_digestsize);
21236f76672bSHarsh Jain 
21246f76672bSHarsh Jain 	err = chcr_ahash_continue(req);
21256f76672bSHarsh Jain 	if (err)
21266f76672bSHarsh Jain 		goto unmap;
21276f76672bSHarsh Jain 	return;
21286f76672bSHarsh Jain unmap:
21296f76672bSHarsh Jain 	if (hctx_wr->is_sg_map)
21306f76672bSHarsh Jain 		chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
21316f76672bSHarsh Jain 
21326f76672bSHarsh Jain 
21336f76672bSHarsh Jain out:
2134fef4912bSHarsh Jain 	chcr_dec_wrcount(dev);
213513c20754SHerbert Xu 	ahash_request_complete(req, err);
21366f76672bSHarsh Jain }
21376f76672bSHarsh Jain 
21386f76672bSHarsh Jain /*
21396f76672bSHarsh Jain  *	chcr_handle_resp - Unmap the DMA buffers associated with the request
21406f76672bSHarsh Jain  *	@req: crypto request
21416f76672bSHarsh Jain  */
chcr_handle_resp(struct crypto_async_request * req,unsigned char * input,int err)21426f76672bSHarsh Jain int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
21436f76672bSHarsh Jain 			 int err)
21446f76672bSHarsh Jain {
21456f76672bSHarsh Jain 	struct crypto_tfm *tfm = req->tfm;
21466f76672bSHarsh Jain 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
21476f76672bSHarsh Jain 	struct adapter *adap = padap(ctx->dev);
21486f76672bSHarsh Jain 
21496f76672bSHarsh Jain 	switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
21506f76672bSHarsh Jain 	case CRYPTO_ALG_TYPE_AEAD:
2151f31ba0f9SHarsh Jain 		err = chcr_handle_aead_resp(aead_request_cast(req), input, err);
21526f76672bSHarsh Jain 		break;
21536f76672bSHarsh Jain 
21547cea6d3eSArd Biesheuvel 	case CRYPTO_ALG_TYPE_SKCIPHER:
21557cea6d3eSArd Biesheuvel 		 chcr_handle_cipher_resp(skcipher_request_cast(req),
21566f76672bSHarsh Jain 					       input, err);
21576f76672bSHarsh Jain 		break;
21586f76672bSHarsh Jain 	case CRYPTO_ALG_TYPE_AHASH:
21596f76672bSHarsh Jain 		chcr_handle_ahash_resp(ahash_request_cast(req), input, err);
21606f76672bSHarsh Jain 		}
21616f76672bSHarsh Jain 	atomic_inc(&adap->chcr_stats.complete);
21626f76672bSHarsh Jain 	return err;
21636f76672bSHarsh Jain }
chcr_ahash_export(struct ahash_request * areq,void * out)2164324429d7SHariprasad Shenai static int chcr_ahash_export(struct ahash_request *areq, void *out)
2165324429d7SHariprasad Shenai {
2166324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2167324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *state = out;
2168324429d7SHariprasad Shenai 
216944fce12aSHarsh Jain 	state->reqlen = req_ctx->reqlen;
2170324429d7SHariprasad Shenai 	state->data_len = req_ctx->data_len;
217144fce12aSHarsh Jain 	memcpy(state->bfr1, req_ctx->reqbfr, req_ctx->reqlen);
2172324429d7SHariprasad Shenai 	memcpy(state->partial_hash, req_ctx->partial_hash,
2173324429d7SHariprasad Shenai 	       CHCR_HASH_MAX_DIGEST_SIZE);
21745110e655SHarsh Jain 	chcr_init_hctx_per_wr(state);
2175324429d7SHariprasad Shenai 	return 0;
2176324429d7SHariprasad Shenai }
2177324429d7SHariprasad Shenai 
chcr_ahash_import(struct ahash_request * areq,const void * in)2178324429d7SHariprasad Shenai static int chcr_ahash_import(struct ahash_request *areq, const void *in)
2179324429d7SHariprasad Shenai {
2180324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2181324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *state = (struct chcr_ahash_req_ctx *)in;
2182324429d7SHariprasad Shenai 
218344fce12aSHarsh Jain 	req_ctx->reqlen = state->reqlen;
2184324429d7SHariprasad Shenai 	req_ctx->data_len = state->data_len;
218544fce12aSHarsh Jain 	req_ctx->reqbfr = req_ctx->bfr1;
218644fce12aSHarsh Jain 	req_ctx->skbfr = req_ctx->bfr2;
218744fce12aSHarsh Jain 	memcpy(req_ctx->bfr1, state->bfr1, CHCR_HASH_MAX_BLOCK_SIZE_128);
2188324429d7SHariprasad Shenai 	memcpy(req_ctx->partial_hash, state->partial_hash,
2189324429d7SHariprasad Shenai 	       CHCR_HASH_MAX_DIGEST_SIZE);
21905110e655SHarsh Jain 	chcr_init_hctx_per_wr(req_ctx);
2191324429d7SHariprasad Shenai 	return 0;
2192324429d7SHariprasad Shenai }
2193324429d7SHariprasad Shenai 
chcr_ahash_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2194324429d7SHariprasad Shenai static int chcr_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2195324429d7SHariprasad Shenai 			     unsigned int keylen)
2196324429d7SHariprasad Shenai {
21972f47d580SHarsh Jain 	struct hmac_ctx *hmacctx = HMAC_CTX(h_ctx(tfm));
2198324429d7SHariprasad Shenai 	unsigned int digestsize = crypto_ahash_digestsize(tfm);
2199324429d7SHariprasad Shenai 	unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2200324429d7SHariprasad Shenai 	unsigned int i, err = 0, updated_digestsize;
2201324429d7SHariprasad Shenai 
2202e7922729SHarsh Jain 	SHASH_DESC_ON_STACK(shash, hmacctx->base_hash);
2203e7922729SHarsh Jain 
2204e7922729SHarsh Jain 	/* use the key to calculate the ipad and opad. ipad will sent with the
2205324429d7SHariprasad Shenai 	 * first request's data. opad will be sent with the final hash result
2206324429d7SHariprasad Shenai 	 * ipad in hmacctx->ipad and opad in hmacctx->opad location
2207324429d7SHariprasad Shenai 	 */
2208e7922729SHarsh Jain 	shash->tfm = hmacctx->base_hash;
2209324429d7SHariprasad Shenai 	if (keylen > bs) {
2210e7922729SHarsh Jain 		err = crypto_shash_digest(shash, key, keylen,
2211324429d7SHariprasad Shenai 					  hmacctx->ipad);
2212324429d7SHariprasad Shenai 		if (err)
2213324429d7SHariprasad Shenai 			goto out;
2214324429d7SHariprasad Shenai 		keylen = digestsize;
2215324429d7SHariprasad Shenai 	} else {
2216324429d7SHariprasad Shenai 		memcpy(hmacctx->ipad, key, keylen);
2217324429d7SHariprasad Shenai 	}
2218324429d7SHariprasad Shenai 	memset(hmacctx->ipad + keylen, 0, bs - keylen);
2219*f9fc1ec2SArnd Bergmann 	unsafe_memcpy(hmacctx->opad, hmacctx->ipad, bs,
2220*f9fc1ec2SArnd Bergmann 		      "fortified memcpy causes -Wrestrict warning");
2221324429d7SHariprasad Shenai 
2222324429d7SHariprasad Shenai 	for (i = 0; i < bs / sizeof(int); i++) {
2223324429d7SHariprasad Shenai 		*((unsigned int *)(&hmacctx->ipad) + i) ^= IPAD_DATA;
2224324429d7SHariprasad Shenai 		*((unsigned int *)(&hmacctx->opad) + i) ^= OPAD_DATA;
2225324429d7SHariprasad Shenai 	}
2226324429d7SHariprasad Shenai 
2227324429d7SHariprasad Shenai 	updated_digestsize = digestsize;
2228324429d7SHariprasad Shenai 	if (digestsize == SHA224_DIGEST_SIZE)
2229324429d7SHariprasad Shenai 		updated_digestsize = SHA256_DIGEST_SIZE;
2230324429d7SHariprasad Shenai 	else if (digestsize == SHA384_DIGEST_SIZE)
2231324429d7SHariprasad Shenai 		updated_digestsize = SHA512_DIGEST_SIZE;
2232e7922729SHarsh Jain 	err = chcr_compute_partial_hash(shash, hmacctx->ipad,
2233324429d7SHariprasad Shenai 					hmacctx->ipad, digestsize);
2234324429d7SHariprasad Shenai 	if (err)
2235324429d7SHariprasad Shenai 		goto out;
2236324429d7SHariprasad Shenai 	chcr_change_order(hmacctx->ipad, updated_digestsize);
2237324429d7SHariprasad Shenai 
2238e7922729SHarsh Jain 	err = chcr_compute_partial_hash(shash, hmacctx->opad,
2239324429d7SHariprasad Shenai 					hmacctx->opad, digestsize);
2240324429d7SHariprasad Shenai 	if (err)
2241324429d7SHariprasad Shenai 		goto out;
2242324429d7SHariprasad Shenai 	chcr_change_order(hmacctx->opad, updated_digestsize);
2243324429d7SHariprasad Shenai out:
2244324429d7SHariprasad Shenai 	return err;
2245324429d7SHariprasad Shenai }
2246324429d7SHariprasad Shenai 
chcr_aes_xts_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int key_len)22477cea6d3eSArd Biesheuvel static int chcr_aes_xts_setkey(struct crypto_skcipher *cipher, const u8 *key,
2248324429d7SHariprasad Shenai 			       unsigned int key_len)
2249324429d7SHariprasad Shenai {
22502f47d580SHarsh Jain 	struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
2251324429d7SHariprasad Shenai 	unsigned short context_size = 0;
2252b8fd1f41SHarsh Jain 	int err;
2253324429d7SHariprasad Shenai 
2254b8fd1f41SHarsh Jain 	err = chcr_cipher_fallback_setkey(cipher, key, key_len);
2255b8fd1f41SHarsh Jain 	if (err)
2256b8fd1f41SHarsh Jain 		goto badkey_err;
2257cc1b156dSHarsh Jain 
2258324429d7SHariprasad Shenai 	memcpy(ablkctx->key, key, key_len);
2259324429d7SHariprasad Shenai 	ablkctx->enckey_len = key_len;
2260cc1b156dSHarsh Jain 	get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, key_len << 2);
2261324429d7SHariprasad Shenai 	context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD + key_len) >> 4;
2262ee91ac1bSDevulapally Shiva Krishna 	/* Both keys for xts must be aligned to 16 byte boundary
2263ee91ac1bSDevulapally Shiva Krishna 	 * by padding with zeros. So for 24 byte keys padding 8 zeroes.
2264ee91ac1bSDevulapally Shiva Krishna 	 */
2265ee91ac1bSDevulapally Shiva Krishna 	if (key_len == 48) {
2266ee91ac1bSDevulapally Shiva Krishna 		context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD + key_len
2267ee91ac1bSDevulapally Shiva Krishna 				+ 16) >> 4;
2268ee91ac1bSDevulapally Shiva Krishna 		memmove(ablkctx->key + 32, ablkctx->key + 24, 24);
2269ee91ac1bSDevulapally Shiva Krishna 		memset(ablkctx->key + 24, 0, 8);
2270ee91ac1bSDevulapally Shiva Krishna 		memset(ablkctx->key + 56, 0, 8);
2271ee91ac1bSDevulapally Shiva Krishna 		ablkctx->enckey_len = 64;
2272ee91ac1bSDevulapally Shiva Krishna 		ablkctx->key_ctx_hdr =
2273ee91ac1bSDevulapally Shiva Krishna 			FILL_KEY_CTX_HDR(CHCR_KEYCTX_CIPHER_KEY_SIZE_192,
2274ee91ac1bSDevulapally Shiva Krishna 					 CHCR_KEYCTX_NO_KEY, 1,
2275ee91ac1bSDevulapally Shiva Krishna 					 0, context_size);
2276ee91ac1bSDevulapally Shiva Krishna 	} else {
2277324429d7SHariprasad Shenai 		ablkctx->key_ctx_hdr =
2278324429d7SHariprasad Shenai 		FILL_KEY_CTX_HDR((key_len == AES_KEYSIZE_256) ?
2279324429d7SHariprasad Shenai 				 CHCR_KEYCTX_CIPHER_KEY_SIZE_128 :
2280324429d7SHariprasad Shenai 				 CHCR_KEYCTX_CIPHER_KEY_SIZE_256,
2281324429d7SHariprasad Shenai 				 CHCR_KEYCTX_NO_KEY, 1,
2282324429d7SHariprasad Shenai 				 0, context_size);
2283ee91ac1bSDevulapally Shiva Krishna 	}
2284324429d7SHariprasad Shenai 	ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_XTS;
2285cc1b156dSHarsh Jain 	return 0;
2286b8fd1f41SHarsh Jain badkey_err:
2287b8fd1f41SHarsh Jain 	ablkctx->enckey_len = 0;
2288b8fd1f41SHarsh Jain 
2289b8fd1f41SHarsh Jain 	return err;
2290324429d7SHariprasad Shenai }
2291324429d7SHariprasad Shenai 
chcr_sha_init(struct ahash_request * areq)2292324429d7SHariprasad Shenai static int chcr_sha_init(struct ahash_request *areq)
2293324429d7SHariprasad Shenai {
2294324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2295324429d7SHariprasad Shenai 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2296324429d7SHariprasad Shenai 	int digestsize =  crypto_ahash_digestsize(tfm);
2297324429d7SHariprasad Shenai 
2298324429d7SHariprasad Shenai 	req_ctx->data_len = 0;
229944fce12aSHarsh Jain 	req_ctx->reqlen = 0;
230044fce12aSHarsh Jain 	req_ctx->reqbfr = req_ctx->bfr1;
230144fce12aSHarsh Jain 	req_ctx->skbfr = req_ctx->bfr2;
2302324429d7SHariprasad Shenai 	copy_hash_init_values(req_ctx->partial_hash, digestsize);
23035110e655SHarsh Jain 
2304324429d7SHariprasad Shenai 	return 0;
2305324429d7SHariprasad Shenai }
2306324429d7SHariprasad Shenai 
chcr_sha_cra_init(struct crypto_tfm * tfm)2307324429d7SHariprasad Shenai static int chcr_sha_cra_init(struct crypto_tfm *tfm)
2308324429d7SHariprasad Shenai {
2309324429d7SHariprasad Shenai 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2310324429d7SHariprasad Shenai 				 sizeof(struct chcr_ahash_req_ctx));
2311324429d7SHariprasad Shenai 	return chcr_device_init(crypto_tfm_ctx(tfm));
2312324429d7SHariprasad Shenai }
2313324429d7SHariprasad Shenai 
chcr_hmac_init(struct ahash_request * areq)2314324429d7SHariprasad Shenai static int chcr_hmac_init(struct ahash_request *areq)
2315324429d7SHariprasad Shenai {
2316324429d7SHariprasad Shenai 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2317324429d7SHariprasad Shenai 	struct crypto_ahash *rtfm = crypto_ahash_reqtfm(areq);
23182f47d580SHarsh Jain 	struct hmac_ctx *hmacctx = HMAC_CTX(h_ctx(rtfm));
2319324429d7SHariprasad Shenai 	unsigned int digestsize = crypto_ahash_digestsize(rtfm);
2320324429d7SHariprasad Shenai 	unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
2321324429d7SHariprasad Shenai 
2322324429d7SHariprasad Shenai 	chcr_sha_init(areq);
2323324429d7SHariprasad Shenai 	req_ctx->data_len = bs;
2324324429d7SHariprasad Shenai 	if (is_hmac(crypto_ahash_tfm(rtfm))) {
2325324429d7SHariprasad Shenai 		if (digestsize == SHA224_DIGEST_SIZE)
2326324429d7SHariprasad Shenai 			memcpy(req_ctx->partial_hash, hmacctx->ipad,
2327324429d7SHariprasad Shenai 			       SHA256_DIGEST_SIZE);
2328324429d7SHariprasad Shenai 		else if (digestsize == SHA384_DIGEST_SIZE)
2329324429d7SHariprasad Shenai 			memcpy(req_ctx->partial_hash, hmacctx->ipad,
2330324429d7SHariprasad Shenai 			       SHA512_DIGEST_SIZE);
2331324429d7SHariprasad Shenai 		else
2332324429d7SHariprasad Shenai 			memcpy(req_ctx->partial_hash, hmacctx->ipad,
2333324429d7SHariprasad Shenai 			       digestsize);
2334324429d7SHariprasad Shenai 	}
2335324429d7SHariprasad Shenai 	return 0;
2336324429d7SHariprasad Shenai }
2337324429d7SHariprasad Shenai 
chcr_hmac_cra_init(struct crypto_tfm * tfm)2338324429d7SHariprasad Shenai static int chcr_hmac_cra_init(struct crypto_tfm *tfm)
2339324429d7SHariprasad Shenai {
2340324429d7SHariprasad Shenai 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
2341324429d7SHariprasad Shenai 	struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
2342324429d7SHariprasad Shenai 	unsigned int digestsize =
2343324429d7SHariprasad Shenai 		crypto_ahash_digestsize(__crypto_ahash_cast(tfm));
2344324429d7SHariprasad Shenai 
2345324429d7SHariprasad Shenai 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2346324429d7SHariprasad Shenai 				 sizeof(struct chcr_ahash_req_ctx));
2347e7922729SHarsh Jain 	hmacctx->base_hash = chcr_alloc_shash(digestsize);
2348e7922729SHarsh Jain 	if (IS_ERR(hmacctx->base_hash))
2349e7922729SHarsh Jain 		return PTR_ERR(hmacctx->base_hash);
2350324429d7SHariprasad Shenai 	return chcr_device_init(crypto_tfm_ctx(tfm));
2351324429d7SHariprasad Shenai }
2352324429d7SHariprasad Shenai 
chcr_hmac_cra_exit(struct crypto_tfm * tfm)2353324429d7SHariprasad Shenai static void chcr_hmac_cra_exit(struct crypto_tfm *tfm)
2354324429d7SHariprasad Shenai {
2355324429d7SHariprasad Shenai 	struct chcr_context *ctx = crypto_tfm_ctx(tfm);
2356324429d7SHariprasad Shenai 	struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
2357324429d7SHariprasad Shenai 
2358e7922729SHarsh Jain 	if (hmacctx->base_hash) {
2359e7922729SHarsh Jain 		chcr_free_shash(hmacctx->base_hash);
2360e7922729SHarsh Jain 		hmacctx->base_hash = NULL;
2361324429d7SHariprasad Shenai 	}
2362324429d7SHariprasad Shenai }
2363324429d7SHariprasad Shenai 
chcr_aead_common_exit(struct aead_request * req)23644262c98aSHarsh Jain inline void chcr_aead_common_exit(struct aead_request *req)
23654262c98aSHarsh Jain {
2366e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
23674262c98aSHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
23684262c98aSHarsh Jain 	struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm));
23694262c98aSHarsh Jain 
23704262c98aSHarsh Jain 	chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op);
23714262c98aSHarsh Jain }
23724262c98aSHarsh Jain 
chcr_aead_common_init(struct aead_request * req)23734262c98aSHarsh Jain static int chcr_aead_common_init(struct aead_request *req)
23742debd332SHarsh Jain {
23752f47d580SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
23762f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2377e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
23782f47d580SHarsh Jain 	unsigned int authsize = crypto_aead_authsize(tfm);
23794262c98aSHarsh Jain 	int error = -EINVAL;
23802debd332SHarsh Jain 
23812f47d580SHarsh Jain 	/* validate key size */
23822f47d580SHarsh Jain 	if (aeadctx->enckey_len == 0)
23832f47d580SHarsh Jain 		goto err;
23844262c98aSHarsh Jain 	if (reqctx->op && req->cryptlen < authsize)
23852f47d580SHarsh Jain 		goto err;
23864262c98aSHarsh Jain 	if (reqctx->b0_len)
23874262c98aSHarsh Jain 		reqctx->scratch_pad = reqctx->iv + IV;
23884262c98aSHarsh Jain 	else
23894262c98aSHarsh Jain 		reqctx->scratch_pad = NULL;
23904262c98aSHarsh Jain 
23912f47d580SHarsh Jain 	error = chcr_aead_dma_map(&ULD_CTX(a_ctx(tfm))->lldi.pdev->dev, req,
23924262c98aSHarsh Jain 				  reqctx->op);
23932f47d580SHarsh Jain 	if (error) {
23942f47d580SHarsh Jain 		error = -ENOMEM;
23952f47d580SHarsh Jain 		goto err;
23962debd332SHarsh Jain 	}
23971f479e4cSHarsh Jain 
23982f47d580SHarsh Jain 	return 0;
23992f47d580SHarsh Jain err:
24002f47d580SHarsh Jain 	return error;
24012f47d580SHarsh Jain }
24022f47d580SHarsh Jain 
chcr_aead_need_fallback(struct aead_request * req,int dst_nents,int aadmax,int wrlen,unsigned short op_type)24032f47d580SHarsh Jain static int chcr_aead_need_fallback(struct aead_request *req, int dst_nents,
24040e93708dSHarsh Jain 				   int aadmax, int wrlen,
24050e93708dSHarsh Jain 				   unsigned short op_type)
24060e93708dSHarsh Jain {
24070e93708dSHarsh Jain 	unsigned int authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
24082debd332SHarsh Jain 
24090e93708dSHarsh Jain 	if (((req->cryptlen - (op_type ? authsize : 0)) == 0) ||
24102f47d580SHarsh Jain 	    dst_nents > MAX_DSGL_ENT ||
24110e93708dSHarsh Jain 	    (req->assoclen > aadmax) ||
24122f47d580SHarsh Jain 	    (wrlen > SGE_MAX_WR_LEN))
24130e93708dSHarsh Jain 		return 1;
24140e93708dSHarsh Jain 	return 0;
24150e93708dSHarsh Jain }
24160e93708dSHarsh Jain 
chcr_aead_fallback(struct aead_request * req,unsigned short op_type)24170e93708dSHarsh Jain static int chcr_aead_fallback(struct aead_request *req, unsigned short op_type)
24180e93708dSHarsh Jain {
24190e93708dSHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
24202f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2421e055bffaSHerbert Xu 	struct aead_request *subreq = aead_request_ctx_dma(req);
24220e93708dSHarsh Jain 
24230e93708dSHarsh Jain 	aead_request_set_tfm(subreq, aeadctx->sw_cipher);
24240e93708dSHarsh Jain 	aead_request_set_callback(subreq, req->base.flags,
24250e93708dSHarsh Jain 				  req->base.complete, req->base.data);
24260e93708dSHarsh Jain 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
24270e93708dSHarsh Jain 				 req->iv);
24280e93708dSHarsh Jain 	aead_request_set_ad(subreq, req->assoclen);
24290e93708dSHarsh Jain 	return op_type ? crypto_aead_decrypt(subreq) :
24300e93708dSHarsh Jain 		crypto_aead_encrypt(subreq);
24310e93708dSHarsh Jain }
24322debd332SHarsh Jain 
create_authenc_wr(struct aead_request * req,unsigned short qid,int size)24332debd332SHarsh Jain static struct sk_buff *create_authenc_wr(struct aead_request *req,
24342debd332SHarsh Jain 					 unsigned short qid,
24354262c98aSHarsh Jain 					 int size)
24362debd332SHarsh Jain {
24372debd332SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2438567be3a5SAyush Sawal 	struct chcr_context *ctx = a_ctx(tfm);
243916a9874fSAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
2440567be3a5SAyush Sawal 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
24412debd332SHarsh Jain 	struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2442e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
24432debd332SHarsh Jain 	struct sk_buff *skb = NULL;
24442debd332SHarsh Jain 	struct chcr_wr *chcr_req;
24452debd332SHarsh Jain 	struct cpl_rx_phys_dsgl *phys_cpl;
24462f47d580SHarsh Jain 	struct ulptx_sgl *ulptx;
24472f47d580SHarsh Jain 	unsigned int transhdr_len;
24483d64bd67SHarsh Jain 	unsigned int dst_size = 0, temp, subtype = get_aead_subtype(tfm);
24491f479e4cSHarsh Jain 	unsigned int   kctx_len = 0, dnents, snents;
24502debd332SHarsh Jain 	unsigned int  authsize = crypto_aead_authsize(tfm);
24512f47d580SHarsh Jain 	int error = -EINVAL;
24521f479e4cSHarsh Jain 	u8 *ivptr;
24532debd332SHarsh Jain 	int null = 0;
24542debd332SHarsh Jain 	gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
24552debd332SHarsh Jain 		GFP_ATOMIC;
2456567be3a5SAyush Sawal 	struct adapter *adap = padap(ctx->dev);
2457567be3a5SAyush Sawal 	unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
24582debd332SHarsh Jain 
245916a9874fSAyush Sawal 	rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
24602f47d580SHarsh Jain 	if (req->cryptlen == 0)
24612f47d580SHarsh Jain 		return NULL;
24622debd332SHarsh Jain 
24634262c98aSHarsh Jain 	reqctx->b0_len = 0;
24644262c98aSHarsh Jain 	error = chcr_aead_common_init(req);
24654262c98aSHarsh Jain 	if (error)
24664262c98aSHarsh Jain 		return ERR_PTR(error);
24674262c98aSHarsh Jain 
24683d64bd67SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL ||
24693d64bd67SHarsh Jain 		subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
24702debd332SHarsh Jain 		null = 1;
24712debd332SHarsh Jain 	}
24721f479e4cSHarsh Jain 	dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen +
24731f479e4cSHarsh Jain 		(reqctx->op ? -authsize : authsize), CHCR_DST_SG_SIZE, 0);
24742f47d580SHarsh Jain 	dnents += MIN_AUTH_SG; // For IV
24751f479e4cSHarsh Jain 	snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen,
24761f479e4cSHarsh Jain 			       CHCR_SRC_SG_SIZE, 0);
24772f47d580SHarsh Jain 	dst_size = get_space_for_phys_dsgl(dnents);
2478ff462ddfSAl Viro 	kctx_len = (KEY_CONTEXT_CTX_LEN_G(ntohl(aeadctx->key_ctx_hdr)) << 4)
24792debd332SHarsh Jain 		- sizeof(chcr_req->key_ctx);
24802debd332SHarsh Jain 	transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
24811f479e4cSHarsh Jain 	reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) <
24822f47d580SHarsh Jain 			SGE_MAX_WR_LEN;
24831f479e4cSHarsh Jain 	temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16)
24841f479e4cSHarsh Jain 			: (sgl_len(snents) * 8);
24852f47d580SHarsh Jain 	transhdr_len += temp;
2486125d01caSHarsh Jain 	transhdr_len = roundup(transhdr_len, 16);
24872f47d580SHarsh Jain 
24882f47d580SHarsh Jain 	if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE,
24894262c98aSHarsh Jain 				    transhdr_len, reqctx->op)) {
2490ee0863baSHarsh Jain 		atomic_inc(&adap->chcr_stats.fallback);
24914262c98aSHarsh Jain 		chcr_aead_common_exit(req);
24924262c98aSHarsh Jain 		return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
24930e93708dSHarsh Jain 	}
24941f479e4cSHarsh Jain 	skb = alloc_skb(transhdr_len, flags);
24955fe8c711SHarsh Jain 	if (!skb) {
24965fe8c711SHarsh Jain 		error = -ENOMEM;
24972debd332SHarsh Jain 		goto err;
24985fe8c711SHarsh Jain 	}
24992debd332SHarsh Jain 
2500de77b966Syuan linyu 	chcr_req = __skb_put_zero(skb, transhdr_len);
25012debd332SHarsh Jain 
25024262c98aSHarsh Jain 	temp  = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize;
25032debd332SHarsh Jain 
25042debd332SHarsh Jain 	/*
25052debd332SHarsh Jain 	 * Input order	is AAD,IV and Payload. where IV should be included as
25062debd332SHarsh Jain 	 * the part of authdata. All other fields should be filled according
25072debd332SHarsh Jain 	 * to the hardware spec
25082debd332SHarsh Jain 	 */
25092debd332SHarsh Jain 	chcr_req->sec_cpl.op_ivinsrtofst =
2510567be3a5SAyush Sawal 				FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 1);
25111f479e4cSHarsh Jain 	chcr_req->sec_cpl.pldlen = htonl(req->assoclen + IV + req->cryptlen);
25122debd332SHarsh Jain 	chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
25131f479e4cSHarsh Jain 					null ? 0 : 1 + IV,
25141f479e4cSHarsh Jain 					null ? 0 : IV + req->assoclen,
25151f479e4cSHarsh Jain 					req->assoclen + IV + 1,
25162f47d580SHarsh Jain 					(temp & 0x1F0) >> 4);
25172debd332SHarsh Jain 	chcr_req->sec_cpl.cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(
25182f47d580SHarsh Jain 					temp & 0xF,
25191f479e4cSHarsh Jain 					null ? 0 : req->assoclen + IV + 1,
25202f47d580SHarsh Jain 					temp, temp);
25213d64bd67SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL ||
25223d64bd67SHarsh Jain 	    subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA)
25233d64bd67SHarsh Jain 		temp = CHCR_SCMD_CIPHER_MODE_AES_CTR;
25243d64bd67SHarsh Jain 	else
25253d64bd67SHarsh Jain 		temp = CHCR_SCMD_CIPHER_MODE_AES_CBC;
25264262c98aSHarsh Jain 	chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op,
25274262c98aSHarsh Jain 					(reqctx->op == CHCR_ENCRYPT_OP) ? 1 : 0,
25283d64bd67SHarsh Jain 					temp,
25292debd332SHarsh Jain 					actx->auth_mode, aeadctx->hmac_ctrl,
25302f47d580SHarsh Jain 					IV >> 1);
25312debd332SHarsh Jain 	chcr_req->sec_cpl.ivgen_hdrlen =  FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
25322f47d580SHarsh Jain 					 0, 0, dst_size);
25332debd332SHarsh Jain 
25342debd332SHarsh Jain 	chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
25354262c98aSHarsh Jain 	if (reqctx->op == CHCR_ENCRYPT_OP ||
25363d64bd67SHarsh Jain 		subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
25373d64bd67SHarsh Jain 		subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL)
25382debd332SHarsh Jain 		memcpy(chcr_req->key_ctx.key, aeadctx->key,
25392debd332SHarsh Jain 		       aeadctx->enckey_len);
25402debd332SHarsh Jain 	else
25412debd332SHarsh Jain 		memcpy(chcr_req->key_ctx.key, actx->dec_rrkey,
25422debd332SHarsh Jain 		       aeadctx->enckey_len);
25432debd332SHarsh Jain 
2544125d01caSHarsh Jain 	memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
2545125d01caSHarsh Jain 	       actx->h_iopad, kctx_len - roundup(aeadctx->enckey_len, 16));
25461f479e4cSHarsh Jain 	phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
25471f479e4cSHarsh Jain 	ivptr = (u8 *)(phys_cpl + 1) + dst_size;
25481f479e4cSHarsh Jain 	ulptx = (struct ulptx_sgl *)(ivptr + IV);
25493d64bd67SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
25503d64bd67SHarsh Jain 	    subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
25511f479e4cSHarsh Jain 		memcpy(ivptr, aeadctx->nonce, CTR_RFC3686_NONCE_SIZE);
25521f479e4cSHarsh Jain 		memcpy(ivptr + CTR_RFC3686_NONCE_SIZE, req->iv,
25533d64bd67SHarsh Jain 				CTR_RFC3686_IV_SIZE);
25541f479e4cSHarsh Jain 		*(__be32 *)(ivptr + CTR_RFC3686_NONCE_SIZE +
25553d64bd67SHarsh Jain 			CTR_RFC3686_IV_SIZE) = cpu_to_be32(1);
25563d64bd67SHarsh Jain 	} else {
25571f479e4cSHarsh Jain 		memcpy(ivptr, req->iv, IV);
25583d64bd67SHarsh Jain 	}
25591f479e4cSHarsh Jain 	chcr_add_aead_dst_ent(req, phys_cpl, qid);
25601f479e4cSHarsh Jain 	chcr_add_aead_src_ent(req, ulptx);
2561ee0863baSHarsh Jain 	atomic_inc(&adap->chcr_stats.cipher_rqst);
25621f479e4cSHarsh Jain 	temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + IV +
25631f479e4cSHarsh Jain 		kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0);
25642f47d580SHarsh Jain 	create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size,
25652f47d580SHarsh Jain 		   transhdr_len, temp, 0);
25662debd332SHarsh Jain 	reqctx->skb = skb;
25672debd332SHarsh Jain 
25682debd332SHarsh Jain 	return skb;
25692debd332SHarsh Jain err:
25704262c98aSHarsh Jain 	chcr_aead_common_exit(req);
25712f47d580SHarsh Jain 
25725fe8c711SHarsh Jain 	return ERR_PTR(error);
25732debd332SHarsh Jain }
25742debd332SHarsh Jain 
chcr_aead_dma_map(struct device * dev,struct aead_request * req,unsigned short op_type)25756dad4e8aSAtul Gupta int chcr_aead_dma_map(struct device *dev,
25762f47d580SHarsh Jain 		      struct aead_request *req,
25772f47d580SHarsh Jain 		      unsigned short op_type)
25782f47d580SHarsh Jain {
25792f47d580SHarsh Jain 	int error;
2580e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
25812f47d580SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
25822f47d580SHarsh Jain 	unsigned int authsize = crypto_aead_authsize(tfm);
2583fb90a1c8SAyush Sawal 	int src_len, dst_len;
25842f47d580SHarsh Jain 
2585fb90a1c8SAyush Sawal 	/* calculate and handle src and dst sg length separately
2586fb90a1c8SAyush Sawal 	 * for inplace and out-of place operations
2587fb90a1c8SAyush Sawal 	 */
2588fb90a1c8SAyush Sawal 	if (req->src == req->dst) {
2589fb90a1c8SAyush Sawal 		src_len = req->assoclen + req->cryptlen + (op_type ?
2590d91a3159SDevulapally Shiva Krishna 							0 : authsize);
2591fb90a1c8SAyush Sawal 		dst_len = src_len;
2592fb90a1c8SAyush Sawal 	} else {
2593fb90a1c8SAyush Sawal 		src_len = req->assoclen + req->cryptlen;
2594fb90a1c8SAyush Sawal 		dst_len = req->assoclen + req->cryptlen + (op_type ?
2595fb90a1c8SAyush Sawal 							-authsize : authsize);
2596fb90a1c8SAyush Sawal 	}
2597fb90a1c8SAyush Sawal 
2598fb90a1c8SAyush Sawal 	if (!req->cryptlen || !src_len || !dst_len)
25992f47d580SHarsh Jain 		return 0;
26004262c98aSHarsh Jain 	reqctx->iv_dma = dma_map_single(dev, reqctx->iv, (IV + reqctx->b0_len),
26012f47d580SHarsh Jain 					DMA_BIDIRECTIONAL);
26022f47d580SHarsh Jain 	if (dma_mapping_error(dev, reqctx->iv_dma))
26032f47d580SHarsh Jain 		return -ENOMEM;
26044262c98aSHarsh Jain 	if (reqctx->b0_len)
26054262c98aSHarsh Jain 		reqctx->b0_dma = reqctx->iv_dma + IV;
26064262c98aSHarsh Jain 	else
26074262c98aSHarsh Jain 		reqctx->b0_dma = 0;
26082f47d580SHarsh Jain 	if (req->src == req->dst) {
26099195189eSAyush Sawal 		error = dma_map_sg(dev, req->src,
2610fb90a1c8SAyush Sawal 				sg_nents_for_len(req->src, src_len),
26112f47d580SHarsh Jain 					DMA_BIDIRECTIONAL);
26122f47d580SHarsh Jain 		if (!error)
26132f47d580SHarsh Jain 			goto err;
26142f47d580SHarsh Jain 	} else {
2615fb90a1c8SAyush Sawal 		error = dma_map_sg(dev, req->src,
2616fb90a1c8SAyush Sawal 				   sg_nents_for_len(req->src, src_len),
26172f47d580SHarsh Jain 				   DMA_TO_DEVICE);
26182f47d580SHarsh Jain 		if (!error)
26192f47d580SHarsh Jain 			goto err;
2620fb90a1c8SAyush Sawal 		error = dma_map_sg(dev, req->dst,
2621fb90a1c8SAyush Sawal 				   sg_nents_for_len(req->dst, dst_len),
26222f47d580SHarsh Jain 				   DMA_FROM_DEVICE);
26232f47d580SHarsh Jain 		if (!error) {
2624fb90a1c8SAyush Sawal 			dma_unmap_sg(dev, req->src,
2625fb90a1c8SAyush Sawal 				     sg_nents_for_len(req->src, src_len),
26262f47d580SHarsh Jain 				     DMA_TO_DEVICE);
26272f47d580SHarsh Jain 			goto err;
26282f47d580SHarsh Jain 		}
26292f47d580SHarsh Jain 	}
26302f47d580SHarsh Jain 
26312f47d580SHarsh Jain 	return 0;
26322f47d580SHarsh Jain err:
26332f47d580SHarsh Jain 	dma_unmap_single(dev, reqctx->iv_dma, IV, DMA_BIDIRECTIONAL);
26342f47d580SHarsh Jain 	return -ENOMEM;
26352f47d580SHarsh Jain }
26362f47d580SHarsh Jain 
chcr_aead_dma_unmap(struct device * dev,struct aead_request * req,unsigned short op_type)26376dad4e8aSAtul Gupta void chcr_aead_dma_unmap(struct device *dev,
26382f47d580SHarsh Jain 			 struct aead_request *req,
26392f47d580SHarsh Jain 			 unsigned short op_type)
26402f47d580SHarsh Jain {
2641e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
26422f47d580SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
26432f47d580SHarsh Jain 	unsigned int authsize = crypto_aead_authsize(tfm);
2644fb90a1c8SAyush Sawal 	int src_len, dst_len;
26452f47d580SHarsh Jain 
2646fb90a1c8SAyush Sawal 	/* calculate and handle src and dst sg length separately
2647fb90a1c8SAyush Sawal 	 * for inplace and out-of place operations
2648fb90a1c8SAyush Sawal 	 */
2649fb90a1c8SAyush Sawal 	if (req->src == req->dst) {
2650fb90a1c8SAyush Sawal 		src_len = req->assoclen + req->cryptlen + (op_type ?
2651d91a3159SDevulapally Shiva Krishna 							0 : authsize);
2652fb90a1c8SAyush Sawal 		dst_len = src_len;
2653fb90a1c8SAyush Sawal 	} else {
2654fb90a1c8SAyush Sawal 		src_len = req->assoclen + req->cryptlen;
2655fb90a1c8SAyush Sawal 		dst_len = req->assoclen + req->cryptlen + (op_type ?
2656fb90a1c8SAyush Sawal 						-authsize : authsize);
2657fb90a1c8SAyush Sawal 	}
2658fb90a1c8SAyush Sawal 
2659fb90a1c8SAyush Sawal 	if (!req->cryptlen || !src_len || !dst_len)
26602f47d580SHarsh Jain 		return;
26612f47d580SHarsh Jain 
26624262c98aSHarsh Jain 	dma_unmap_single(dev, reqctx->iv_dma, (IV + reqctx->b0_len),
26632f47d580SHarsh Jain 					DMA_BIDIRECTIONAL);
26642f47d580SHarsh Jain 	if (req->src == req->dst) {
2665d91a3159SDevulapally Shiva Krishna 		dma_unmap_sg(dev, req->src,
2666fb90a1c8SAyush Sawal 			     sg_nents_for_len(req->src, src_len),
26672f47d580SHarsh Jain 			     DMA_BIDIRECTIONAL);
26682f47d580SHarsh Jain 	} else {
2669fb90a1c8SAyush Sawal 		dma_unmap_sg(dev, req->src,
2670fb90a1c8SAyush Sawal 			     sg_nents_for_len(req->src, src_len),
26712f47d580SHarsh Jain 			     DMA_TO_DEVICE);
2672fb90a1c8SAyush Sawal 		dma_unmap_sg(dev, req->dst,
2673fb90a1c8SAyush Sawal 			     sg_nents_for_len(req->dst, dst_len),
26742f47d580SHarsh Jain 			     DMA_FROM_DEVICE);
26752f47d580SHarsh Jain 	}
26762f47d580SHarsh Jain }
26772f47d580SHarsh Jain 
chcr_add_aead_src_ent(struct aead_request * req,struct ulptx_sgl * ulptx)26786dad4e8aSAtul Gupta void chcr_add_aead_src_ent(struct aead_request *req,
26791f479e4cSHarsh Jain 			   struct ulptx_sgl *ulptx)
26802f47d580SHarsh Jain {
26812f47d580SHarsh Jain 	struct ulptx_walk ulp_walk;
2682e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
26832f47d580SHarsh Jain 
26842f47d580SHarsh Jain 	if (reqctx->imm) {
26852f47d580SHarsh Jain 		u8 *buf = (u8 *)ulptx;
26862f47d580SHarsh Jain 
26874262c98aSHarsh Jain 		if (reqctx->b0_len) {
26882f47d580SHarsh Jain 			memcpy(buf, reqctx->scratch_pad, reqctx->b0_len);
26892f47d580SHarsh Jain 			buf += reqctx->b0_len;
26902f47d580SHarsh Jain 		}
26912f47d580SHarsh Jain 		sg_pcopy_to_buffer(req->src, sg_nents(req->src),
26921f479e4cSHarsh Jain 				   buf, req->cryptlen + req->assoclen, 0);
26932f47d580SHarsh Jain 	} else {
26942f47d580SHarsh Jain 		ulptx_walk_init(&ulp_walk, ulptx);
26954262c98aSHarsh Jain 		if (reqctx->b0_len)
26962f47d580SHarsh Jain 			ulptx_walk_add_page(&ulp_walk, reqctx->b0_len,
2697c4f6d44dSHarsh Jain 					    reqctx->b0_dma);
26981f479e4cSHarsh Jain 		ulptx_walk_add_sg(&ulp_walk, req->src, req->cryptlen +
26991f479e4cSHarsh Jain 				  req->assoclen,  0);
27002f47d580SHarsh Jain 		ulptx_walk_end(&ulp_walk);
27012f47d580SHarsh Jain 	}
27022f47d580SHarsh Jain }
27032f47d580SHarsh Jain 
chcr_add_aead_dst_ent(struct aead_request * req,struct cpl_rx_phys_dsgl * phys_cpl,unsigned short qid)27046dad4e8aSAtul Gupta void chcr_add_aead_dst_ent(struct aead_request *req,
27052f47d580SHarsh Jain 			   struct cpl_rx_phys_dsgl *phys_cpl,
27062f47d580SHarsh Jain 			   unsigned short qid)
27072f47d580SHarsh Jain {
2708e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
27092f47d580SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
27102f47d580SHarsh Jain 	struct dsgl_walk dsgl_walk;
27112f47d580SHarsh Jain 	unsigned int authsize = crypto_aead_authsize(tfm);
2712add92a81SHarsh Jain 	struct chcr_context *ctx = a_ctx(tfm);
271316a9874fSAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
27142f47d580SHarsh Jain 	u32 temp;
2715567be3a5SAyush Sawal 	unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
27162f47d580SHarsh Jain 
271716a9874fSAyush Sawal 	rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
27182f47d580SHarsh Jain 	dsgl_walk_init(&dsgl_walk, phys_cpl);
2719c4f6d44dSHarsh Jain 	dsgl_walk_add_page(&dsgl_walk, IV + reqctx->b0_len, reqctx->iv_dma);
27201f479e4cSHarsh Jain 	temp = req->assoclen + req->cryptlen +
27211f479e4cSHarsh Jain 		(reqctx->op ? -authsize : authsize);
27221f479e4cSHarsh Jain 	dsgl_walk_add_sg(&dsgl_walk, req->dst, temp, 0);
2723567be3a5SAyush Sawal 	dsgl_walk_end(&dsgl_walk, qid, rx_channel_id);
27242f47d580SHarsh Jain }
27252f47d580SHarsh Jain 
chcr_add_cipher_src_ent(struct skcipher_request * req,void * ulptx,struct cipher_wr_param * wrparam)27267cea6d3eSArd Biesheuvel void chcr_add_cipher_src_ent(struct skcipher_request *req,
2727335bcc4aSHarsh Jain 			     void *ulptx,
27282f47d580SHarsh Jain 			     struct  cipher_wr_param *wrparam)
27292f47d580SHarsh Jain {
27302f47d580SHarsh Jain 	struct ulptx_walk ulp_walk;
27317cea6d3eSArd Biesheuvel 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
2732335bcc4aSHarsh Jain 	u8 *buf = ulptx;
27332f47d580SHarsh Jain 
27342f47d580SHarsh Jain 	memcpy(buf, reqctx->iv, IV);
27352f47d580SHarsh Jain 	buf += IV;
2736335bcc4aSHarsh Jain 	if (reqctx->imm) {
27372f47d580SHarsh Jain 		sg_pcopy_to_buffer(req->src, sg_nents(req->src),
27382f47d580SHarsh Jain 				   buf, wrparam->bytes, reqctx->processed);
27392f47d580SHarsh Jain 	} else {
2740335bcc4aSHarsh Jain 		ulptx_walk_init(&ulp_walk, (struct ulptx_sgl *)buf);
27412f47d580SHarsh Jain 		ulptx_walk_add_sg(&ulp_walk, reqctx->srcsg, wrparam->bytes,
27422f47d580SHarsh Jain 				  reqctx->src_ofst);
27432f47d580SHarsh Jain 		reqctx->srcsg = ulp_walk.last_sg;
27442f47d580SHarsh Jain 		reqctx->src_ofst = ulp_walk.last_sg_len;
27452f47d580SHarsh Jain 		ulptx_walk_end(&ulp_walk);
27462f47d580SHarsh Jain 	}
27472f47d580SHarsh Jain }
27482f47d580SHarsh Jain 
chcr_add_cipher_dst_ent(struct skcipher_request * req,struct cpl_rx_phys_dsgl * phys_cpl,struct cipher_wr_param * wrparam,unsigned short qid)27497cea6d3eSArd Biesheuvel void chcr_add_cipher_dst_ent(struct skcipher_request *req,
27502f47d580SHarsh Jain 			     struct cpl_rx_phys_dsgl *phys_cpl,
27512f47d580SHarsh Jain 			     struct  cipher_wr_param *wrparam,
27522f47d580SHarsh Jain 			     unsigned short qid)
27532f47d580SHarsh Jain {
27547cea6d3eSArd Biesheuvel 	struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
27557cea6d3eSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(wrparam->req);
2756add92a81SHarsh Jain 	struct chcr_context *ctx = c_ctx(tfm);
275716a9874fSAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
27582f47d580SHarsh Jain 	struct dsgl_walk dsgl_walk;
2759567be3a5SAyush Sawal 	unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
27602f47d580SHarsh Jain 
276116a9874fSAyush Sawal 	rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
27622f47d580SHarsh Jain 	dsgl_walk_init(&dsgl_walk, phys_cpl);
27632f47d580SHarsh Jain 	dsgl_walk_add_sg(&dsgl_walk, reqctx->dstsg, wrparam->bytes,
27642f47d580SHarsh Jain 			 reqctx->dst_ofst);
27652f47d580SHarsh Jain 	reqctx->dstsg = dsgl_walk.last_sg;
27662f47d580SHarsh Jain 	reqctx->dst_ofst = dsgl_walk.last_sg_len;
2767567be3a5SAyush Sawal 	dsgl_walk_end(&dsgl_walk, qid, rx_channel_id);
27682f47d580SHarsh Jain }
27692f47d580SHarsh Jain 
chcr_add_hash_src_ent(struct ahash_request * req,struct ulptx_sgl * ulptx,struct hash_wr_param * param)27706dad4e8aSAtul Gupta void chcr_add_hash_src_ent(struct ahash_request *req,
27712f47d580SHarsh Jain 			   struct ulptx_sgl *ulptx,
27722f47d580SHarsh Jain 			   struct hash_wr_param *param)
27732f47d580SHarsh Jain {
27742f47d580SHarsh Jain 	struct ulptx_walk ulp_walk;
27752f47d580SHarsh Jain 	struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
27762f47d580SHarsh Jain 
27775110e655SHarsh Jain 	if (reqctx->hctx_wr.imm) {
27782f47d580SHarsh Jain 		u8 *buf = (u8 *)ulptx;
27792f47d580SHarsh Jain 
27802f47d580SHarsh Jain 		if (param->bfr_len) {
27812f47d580SHarsh Jain 			memcpy(buf, reqctx->reqbfr, param->bfr_len);
27822f47d580SHarsh Jain 			buf += param->bfr_len;
27832f47d580SHarsh Jain 		}
27845110e655SHarsh Jain 
27855110e655SHarsh Jain 		sg_pcopy_to_buffer(reqctx->hctx_wr.srcsg,
27865110e655SHarsh Jain 				   sg_nents(reqctx->hctx_wr.srcsg), buf,
27875110e655SHarsh Jain 				   param->sg_len, 0);
27882f47d580SHarsh Jain 	} else {
27892f47d580SHarsh Jain 		ulptx_walk_init(&ulp_walk, ulptx);
27902f47d580SHarsh Jain 		if (param->bfr_len)
27912f47d580SHarsh Jain 			ulptx_walk_add_page(&ulp_walk, param->bfr_len,
2792c4f6d44dSHarsh Jain 					    reqctx->hctx_wr.dma_addr);
27935110e655SHarsh Jain 		ulptx_walk_add_sg(&ulp_walk, reqctx->hctx_wr.srcsg,
27945110e655SHarsh Jain 				  param->sg_len, reqctx->hctx_wr.src_ofst);
27955110e655SHarsh Jain 		reqctx->hctx_wr.srcsg = ulp_walk.last_sg;
27965110e655SHarsh Jain 		reqctx->hctx_wr.src_ofst = ulp_walk.last_sg_len;
27972f47d580SHarsh Jain 		ulptx_walk_end(&ulp_walk);
27982f47d580SHarsh Jain 	}
27992f47d580SHarsh Jain }
28002f47d580SHarsh Jain 
chcr_hash_dma_map(struct device * dev,struct ahash_request * req)28016dad4e8aSAtul Gupta int chcr_hash_dma_map(struct device *dev,
28022f47d580SHarsh Jain 		      struct ahash_request *req)
28032f47d580SHarsh Jain {
28042f47d580SHarsh Jain 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
28052f47d580SHarsh Jain 	int error = 0;
28062f47d580SHarsh Jain 
28072f47d580SHarsh Jain 	if (!req->nbytes)
28082f47d580SHarsh Jain 		return 0;
28092f47d580SHarsh Jain 	error = dma_map_sg(dev, req->src, sg_nents(req->src),
28102f47d580SHarsh Jain 			   DMA_TO_DEVICE);
28112f47d580SHarsh Jain 	if (!error)
28127814f552SDan Carpenter 		return -ENOMEM;
28135110e655SHarsh Jain 	req_ctx->hctx_wr.is_sg_map = 1;
28142f47d580SHarsh Jain 	return 0;
28152f47d580SHarsh Jain }
28162f47d580SHarsh Jain 
chcr_hash_dma_unmap(struct device * dev,struct ahash_request * req)28176dad4e8aSAtul Gupta void chcr_hash_dma_unmap(struct device *dev,
28182f47d580SHarsh Jain 			 struct ahash_request *req)
28192f47d580SHarsh Jain {
28202f47d580SHarsh Jain 	struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
28212f47d580SHarsh Jain 
28222f47d580SHarsh Jain 	if (!req->nbytes)
28232f47d580SHarsh Jain 		return;
28242f47d580SHarsh Jain 
28252f47d580SHarsh Jain 	dma_unmap_sg(dev, req->src, sg_nents(req->src),
28262f47d580SHarsh Jain 			   DMA_TO_DEVICE);
28275110e655SHarsh Jain 	req_ctx->hctx_wr.is_sg_map = 0;
28282f47d580SHarsh Jain 
28292f47d580SHarsh Jain }
28302f47d580SHarsh Jain 
chcr_cipher_dma_map(struct device * dev,struct skcipher_request * req)28316dad4e8aSAtul Gupta int chcr_cipher_dma_map(struct device *dev,
28327cea6d3eSArd Biesheuvel 			struct skcipher_request *req)
28332f47d580SHarsh Jain {
28342f47d580SHarsh Jain 	int error;
28352f47d580SHarsh Jain 
28362f47d580SHarsh Jain 	if (req->src == req->dst) {
28372f47d580SHarsh Jain 		error = dma_map_sg(dev, req->src, sg_nents(req->src),
28382f47d580SHarsh Jain 				   DMA_BIDIRECTIONAL);
28392f47d580SHarsh Jain 		if (!error)
28402f47d580SHarsh Jain 			goto err;
28412f47d580SHarsh Jain 	} else {
28422f47d580SHarsh Jain 		error = dma_map_sg(dev, req->src, sg_nents(req->src),
28432f47d580SHarsh Jain 				   DMA_TO_DEVICE);
28442f47d580SHarsh Jain 		if (!error)
28452f47d580SHarsh Jain 			goto err;
28462f47d580SHarsh Jain 		error = dma_map_sg(dev, req->dst, sg_nents(req->dst),
28472f47d580SHarsh Jain 				   DMA_FROM_DEVICE);
28482f47d580SHarsh Jain 		if (!error) {
28492f47d580SHarsh Jain 			dma_unmap_sg(dev, req->src, sg_nents(req->src),
28502f47d580SHarsh Jain 				   DMA_TO_DEVICE);
28512f47d580SHarsh Jain 			goto err;
28522f47d580SHarsh Jain 		}
28532f47d580SHarsh Jain 	}
28542f47d580SHarsh Jain 
28552f47d580SHarsh Jain 	return 0;
28562f47d580SHarsh Jain err:
28572f47d580SHarsh Jain 	return -ENOMEM;
28582f47d580SHarsh Jain }
28596dad4e8aSAtul Gupta 
chcr_cipher_dma_unmap(struct device * dev,struct skcipher_request * req)28606dad4e8aSAtul Gupta void chcr_cipher_dma_unmap(struct device *dev,
28617cea6d3eSArd Biesheuvel 			   struct skcipher_request *req)
28622f47d580SHarsh Jain {
28632f47d580SHarsh Jain 	if (req->src == req->dst) {
28642f47d580SHarsh Jain 		dma_unmap_sg(dev, req->src, sg_nents(req->src),
28652f47d580SHarsh Jain 				   DMA_BIDIRECTIONAL);
28662f47d580SHarsh Jain 	} else {
28672f47d580SHarsh Jain 		dma_unmap_sg(dev, req->src, sg_nents(req->src),
28682f47d580SHarsh Jain 				   DMA_TO_DEVICE);
28692f47d580SHarsh Jain 		dma_unmap_sg(dev, req->dst, sg_nents(req->dst),
28702f47d580SHarsh Jain 				   DMA_FROM_DEVICE);
28712f47d580SHarsh Jain 	}
28722f47d580SHarsh Jain }
28732f47d580SHarsh Jain 
set_msg_len(u8 * block,unsigned int msglen,int csize)28742debd332SHarsh Jain static int set_msg_len(u8 *block, unsigned int msglen, int csize)
28752debd332SHarsh Jain {
28762debd332SHarsh Jain 	__be32 data;
28772debd332SHarsh Jain 
28782debd332SHarsh Jain 	memset(block, 0, csize);
28792debd332SHarsh Jain 	block += csize;
28802debd332SHarsh Jain 
28812debd332SHarsh Jain 	if (csize >= 4)
28822debd332SHarsh Jain 		csize = 4;
28832debd332SHarsh Jain 	else if (msglen > (unsigned int)(1 << (8 * csize)))
28842debd332SHarsh Jain 		return -EOVERFLOW;
28852debd332SHarsh Jain 
28862debd332SHarsh Jain 	data = cpu_to_be32(msglen);
28872debd332SHarsh Jain 	memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
28882debd332SHarsh Jain 
28892debd332SHarsh Jain 	return 0;
28902debd332SHarsh Jain }
28912debd332SHarsh Jain 
generate_b0(struct aead_request * req,u8 * ivptr,unsigned short op_type)289266af86d9SYueHaibing static int generate_b0(struct aead_request *req, u8 *ivptr,
28932debd332SHarsh Jain 			unsigned short op_type)
28942debd332SHarsh Jain {
28952debd332SHarsh Jain 	unsigned int l, lp, m;
28962debd332SHarsh Jain 	int rc;
28972debd332SHarsh Jain 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2898e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
28992debd332SHarsh Jain 	u8 *b0 = reqctx->scratch_pad;
29002debd332SHarsh Jain 
29012debd332SHarsh Jain 	m = crypto_aead_authsize(aead);
29022debd332SHarsh Jain 
29031f479e4cSHarsh Jain 	memcpy(b0, ivptr, 16);
29042debd332SHarsh Jain 
29052debd332SHarsh Jain 	lp = b0[0];
29062debd332SHarsh Jain 	l = lp + 1;
29072debd332SHarsh Jain 
29082debd332SHarsh Jain 	/* set m, bits 3-5 */
29092debd332SHarsh Jain 	*b0 |= (8 * ((m - 2) / 2));
29102debd332SHarsh Jain 
29112debd332SHarsh Jain 	/* set adata, bit 6, if associated data is used */
29122debd332SHarsh Jain 	if (req->assoclen)
29132debd332SHarsh Jain 		*b0 |= 64;
29142debd332SHarsh Jain 	rc = set_msg_len(b0 + 16 - l,
29152debd332SHarsh Jain 			 (op_type == CHCR_DECRYPT_OP) ?
29162debd332SHarsh Jain 			 req->cryptlen - m : req->cryptlen, l);
291766af86d9SYueHaibing 
291866af86d9SYueHaibing 	return rc;
29192debd332SHarsh Jain }
29202debd332SHarsh Jain 
crypto_ccm_check_iv(const u8 * iv)29212debd332SHarsh Jain static inline int crypto_ccm_check_iv(const u8 *iv)
29222debd332SHarsh Jain {
29232debd332SHarsh Jain 	/* 2 <= L <= 8, so 1 <= L' <= 7. */
29242debd332SHarsh Jain 	if (iv[0] < 1 || iv[0] > 7)
29252debd332SHarsh Jain 		return -EINVAL;
29262debd332SHarsh Jain 
29272debd332SHarsh Jain 	return 0;
29282debd332SHarsh Jain }
29292debd332SHarsh Jain 
ccm_format_packet(struct aead_request * req,u8 * ivptr,unsigned int sub_type,unsigned short op_type,unsigned int assoclen)29302debd332SHarsh Jain static int ccm_format_packet(struct aead_request *req,
29311f479e4cSHarsh Jain 			     u8 *ivptr,
29322debd332SHarsh Jain 			     unsigned int sub_type,
29334262c98aSHarsh Jain 			     unsigned short op_type,
29344262c98aSHarsh Jain 			     unsigned int assoclen)
29352debd332SHarsh Jain {
2936e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
29371f479e4cSHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
29381f479e4cSHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
29392debd332SHarsh Jain 	int rc = 0;
29402debd332SHarsh Jain 
29412debd332SHarsh Jain 	if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
29421f479e4cSHarsh Jain 		ivptr[0] = 3;
29431f479e4cSHarsh Jain 		memcpy(ivptr + 1, &aeadctx->salt[0], 3);
29441f479e4cSHarsh Jain 		memcpy(ivptr + 4, req->iv, 8);
29451f479e4cSHarsh Jain 		memset(ivptr + 12, 0, 4);
29462debd332SHarsh Jain 	} else {
29471f479e4cSHarsh Jain 		memcpy(ivptr, req->iv, 16);
29482debd332SHarsh Jain 	}
29494262c98aSHarsh Jain 	if (assoclen)
2950f3b140adSAyush Sawal 		put_unaligned_be16(assoclen, &reqctx->scratch_pad[16]);
29514262c98aSHarsh Jain 
295266af86d9SYueHaibing 	rc = generate_b0(req, ivptr, op_type);
29532debd332SHarsh Jain 	/* zero the ctr value */
29541f479e4cSHarsh Jain 	memset(ivptr + 15 - ivptr[0], 0, ivptr[0] + 1);
29552debd332SHarsh Jain 	return rc;
29562debd332SHarsh Jain }
29572debd332SHarsh Jain 
fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu * sec_cpl,unsigned int dst_size,struct aead_request * req,unsigned short op_type)29582debd332SHarsh Jain static void fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu *sec_cpl,
29592debd332SHarsh Jain 				  unsigned int dst_size,
29602debd332SHarsh Jain 				  struct aead_request *req,
29612f47d580SHarsh Jain 				  unsigned short op_type)
29622debd332SHarsh Jain {
29632debd332SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2964567be3a5SAyush Sawal 	struct chcr_context *ctx = a_ctx(tfm);
296516a9874fSAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
2966567be3a5SAyush Sawal 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2967e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
29682debd332SHarsh Jain 	unsigned int cipher_mode = CHCR_SCMD_CIPHER_MODE_AES_CCM;
29692debd332SHarsh Jain 	unsigned int mac_mode = CHCR_SCMD_AUTH_MODE_CBCMAC;
2970567be3a5SAyush Sawal 	unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
29712debd332SHarsh Jain 	unsigned int ccm_xtra;
297210b0c75dSDevulapally Shiva Krishna 	unsigned int tag_offset = 0, auth_offset = 0;
29732debd332SHarsh Jain 	unsigned int assoclen;
29742debd332SHarsh Jain 
297516a9874fSAyush Sawal 	rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
297616a9874fSAyush Sawal 
29772debd332SHarsh Jain 	if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
29782debd332SHarsh Jain 		assoclen = req->assoclen - 8;
29792debd332SHarsh Jain 	else
29802debd332SHarsh Jain 		assoclen = req->assoclen;
29812debd332SHarsh Jain 	ccm_xtra = CCM_B0_SIZE +
29822debd332SHarsh Jain 		((assoclen) ? CCM_AAD_FIELD_SIZE : 0);
29832debd332SHarsh Jain 
29842debd332SHarsh Jain 	auth_offset = req->cryptlen ?
29851f479e4cSHarsh Jain 		(req->assoclen + IV + 1 + ccm_xtra) : 0;
29862debd332SHarsh Jain 	if (op_type == CHCR_DECRYPT_OP) {
29872debd332SHarsh Jain 		if (crypto_aead_authsize(tfm) != req->cryptlen)
29882debd332SHarsh Jain 			tag_offset = crypto_aead_authsize(tfm);
29892debd332SHarsh Jain 		else
29902debd332SHarsh Jain 			auth_offset = 0;
29912debd332SHarsh Jain 	}
29922debd332SHarsh Jain 
2993567be3a5SAyush Sawal 	sec_cpl->op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 1);
29942debd332SHarsh Jain 	sec_cpl->pldlen =
29951f479e4cSHarsh Jain 		htonl(req->assoclen + IV + req->cryptlen + ccm_xtra);
29962debd332SHarsh Jain 	/* For CCM there wil be b0 always. So AAD start will be 1 always */
29972debd332SHarsh Jain 	sec_cpl->aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
29981f479e4cSHarsh Jain 				1 + IV,	IV + assoclen + ccm_xtra,
29991f479e4cSHarsh Jain 				req->assoclen + IV + 1 + ccm_xtra, 0);
30002debd332SHarsh Jain 
30012debd332SHarsh Jain 	sec_cpl->cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(0,
30022debd332SHarsh Jain 					auth_offset, tag_offset,
30032debd332SHarsh Jain 					(op_type == CHCR_ENCRYPT_OP) ? 0 :
30042debd332SHarsh Jain 					crypto_aead_authsize(tfm));
30052debd332SHarsh Jain 	sec_cpl->seqno_numivs =  FILL_SEC_CPL_SCMD0_SEQNO(op_type,
30062debd332SHarsh Jain 					(op_type == CHCR_ENCRYPT_OP) ? 0 : 1,
30070a7bd30cSHarsh Jain 					cipher_mode, mac_mode,
30082f47d580SHarsh Jain 					aeadctx->hmac_ctrl, IV >> 1);
30092debd332SHarsh Jain 
30102debd332SHarsh Jain 	sec_cpl->ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0,
30112f47d580SHarsh Jain 					0, dst_size);
30122debd332SHarsh Jain }
30132debd332SHarsh Jain 
aead_ccm_validate_input(unsigned short op_type,struct aead_request * req,struct chcr_aead_ctx * aeadctx,unsigned int sub_type)30141efb892bSColin Ian King static int aead_ccm_validate_input(unsigned short op_type,
30152debd332SHarsh Jain 				   struct aead_request *req,
30162debd332SHarsh Jain 				   struct chcr_aead_ctx *aeadctx,
30172debd332SHarsh Jain 				   unsigned int sub_type)
30182debd332SHarsh Jain {
30192debd332SHarsh Jain 	if (sub_type != CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
30202debd332SHarsh Jain 		if (crypto_ccm_check_iv(req->iv)) {
30212debd332SHarsh Jain 			pr_err("CCM: IV check fails\n");
30222debd332SHarsh Jain 			return -EINVAL;
30232debd332SHarsh Jain 		}
30242debd332SHarsh Jain 	} else {
30252debd332SHarsh Jain 		if (req->assoclen != 16 && req->assoclen != 20) {
30262debd332SHarsh Jain 			pr_err("RFC4309: Invalid AAD length %d\n",
30272debd332SHarsh Jain 			       req->assoclen);
30282debd332SHarsh Jain 			return -EINVAL;
30292debd332SHarsh Jain 		}
30302debd332SHarsh Jain 	}
30312debd332SHarsh Jain 	return 0;
30322debd332SHarsh Jain }
30332debd332SHarsh Jain 
create_aead_ccm_wr(struct aead_request * req,unsigned short qid,int size)30342debd332SHarsh Jain static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
30352debd332SHarsh Jain 					  unsigned short qid,
30364262c98aSHarsh Jain 					  int size)
30372debd332SHarsh Jain {
30382debd332SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
30392f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3040e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
30412debd332SHarsh Jain 	struct sk_buff *skb = NULL;
30422debd332SHarsh Jain 	struct chcr_wr *chcr_req;
30432debd332SHarsh Jain 	struct cpl_rx_phys_dsgl *phys_cpl;
30442f47d580SHarsh Jain 	struct ulptx_sgl *ulptx;
30452f47d580SHarsh Jain 	unsigned int transhdr_len;
30461f479e4cSHarsh Jain 	unsigned int dst_size = 0, kctx_len, dnents, temp, snents;
30472f47d580SHarsh Jain 	unsigned int sub_type, assoclen = req->assoclen;
30482debd332SHarsh Jain 	unsigned int authsize = crypto_aead_authsize(tfm);
30492f47d580SHarsh Jain 	int error = -EINVAL;
30501f479e4cSHarsh Jain 	u8 *ivptr;
30512debd332SHarsh Jain 	gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
30522debd332SHarsh Jain 		GFP_ATOMIC;
30532f47d580SHarsh Jain 	struct adapter *adap = padap(a_ctx(tfm)->dev);
30542debd332SHarsh Jain 
30552debd332SHarsh Jain 	sub_type = get_aead_subtype(tfm);
30562f47d580SHarsh Jain 	if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
30572f47d580SHarsh Jain 		assoclen -= 8;
30584262c98aSHarsh Jain 	reqctx->b0_len = CCM_B0_SIZE + (assoclen ? CCM_AAD_FIELD_SIZE : 0);
30594262c98aSHarsh Jain 	error = chcr_aead_common_init(req);
30602f47d580SHarsh Jain 	if (error)
30615fe8c711SHarsh Jain 		return ERR_PTR(error);
30622f47d580SHarsh Jain 
30634262c98aSHarsh Jain 	error = aead_ccm_validate_input(reqctx->op, req, aeadctx, sub_type);
30645fe8c711SHarsh Jain 	if (error)
30652debd332SHarsh Jain 		goto err;
30661f479e4cSHarsh Jain 	dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen
30674262c98aSHarsh Jain 			+ (reqctx->op ? -authsize : authsize),
30681f479e4cSHarsh Jain 			CHCR_DST_SG_SIZE, 0);
30692f47d580SHarsh Jain 	dnents += MIN_CCM_SG; // For IV and B0
30702f47d580SHarsh Jain 	dst_size = get_space_for_phys_dsgl(dnents);
30711f479e4cSHarsh Jain 	snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen,
30721f479e4cSHarsh Jain 			       CHCR_SRC_SG_SIZE, 0);
30731f479e4cSHarsh Jain 	snents += MIN_CCM_SG; //For B0
3074125d01caSHarsh Jain 	kctx_len = roundup(aeadctx->enckey_len, 16) * 2;
30752debd332SHarsh Jain 	transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
30761f479e4cSHarsh Jain 	reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen +
30772f47d580SHarsh Jain 		       reqctx->b0_len) <= SGE_MAX_WR_LEN;
30781f479e4cSHarsh Jain 	temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen +
3079125d01caSHarsh Jain 				     reqctx->b0_len, 16) :
30801f479e4cSHarsh Jain 		(sgl_len(snents) *  8);
30812f47d580SHarsh Jain 	transhdr_len += temp;
3082125d01caSHarsh Jain 	transhdr_len = roundup(transhdr_len, 16);
30832f47d580SHarsh Jain 
30842f47d580SHarsh Jain 	if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE -
30854262c98aSHarsh Jain 				reqctx->b0_len, transhdr_len, reqctx->op)) {
3086ee0863baSHarsh Jain 		atomic_inc(&adap->chcr_stats.fallback);
30874262c98aSHarsh Jain 		chcr_aead_common_exit(req);
30884262c98aSHarsh Jain 		return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
30890e93708dSHarsh Jain 	}
30901f479e4cSHarsh Jain 	skb = alloc_skb(transhdr_len,  flags);
30912debd332SHarsh Jain 
30925fe8c711SHarsh Jain 	if (!skb) {
30935fe8c711SHarsh Jain 		error = -ENOMEM;
30942debd332SHarsh Jain 		goto err;
30955fe8c711SHarsh Jain 	}
30962debd332SHarsh Jain 
30971f479e4cSHarsh Jain 	chcr_req = __skb_put_zero(skb, transhdr_len);
30982debd332SHarsh Jain 
30994262c98aSHarsh Jain 	fill_sec_cpl_for_aead(&chcr_req->sec_cpl, dst_size, req, reqctx->op);
31002debd332SHarsh Jain 
31012debd332SHarsh Jain 	chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
31022debd332SHarsh Jain 	memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
3103125d01caSHarsh Jain 	memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
3104125d01caSHarsh Jain 			aeadctx->key, aeadctx->enckey_len);
31052debd332SHarsh Jain 
31062debd332SHarsh Jain 	phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
31071f479e4cSHarsh Jain 	ivptr = (u8 *)(phys_cpl + 1) + dst_size;
31081f479e4cSHarsh Jain 	ulptx = (struct ulptx_sgl *)(ivptr + IV);
31091f479e4cSHarsh Jain 	error = ccm_format_packet(req, ivptr, sub_type, reqctx->op, assoclen);
31105fe8c711SHarsh Jain 	if (error)
31112debd332SHarsh Jain 		goto dstmap_fail;
31121f479e4cSHarsh Jain 	chcr_add_aead_dst_ent(req, phys_cpl, qid);
31131f479e4cSHarsh Jain 	chcr_add_aead_src_ent(req, ulptx);
31142f47d580SHarsh Jain 
3115ee0863baSHarsh Jain 	atomic_inc(&adap->chcr_stats.aead_rqst);
31161f479e4cSHarsh Jain 	temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + IV +
31171f479e4cSHarsh Jain 		kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen +
31182f47d580SHarsh Jain 		reqctx->b0_len) : 0);
31192f47d580SHarsh Jain 	create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, 0,
31202f47d580SHarsh Jain 		    transhdr_len, temp, 0);
31212debd332SHarsh Jain 	reqctx->skb = skb;
31222f47d580SHarsh Jain 
31232debd332SHarsh Jain 	return skb;
31242debd332SHarsh Jain dstmap_fail:
31252debd332SHarsh Jain 	kfree_skb(skb);
31262debd332SHarsh Jain err:
31274262c98aSHarsh Jain 	chcr_aead_common_exit(req);
31285fe8c711SHarsh Jain 	return ERR_PTR(error);
31292debd332SHarsh Jain }
31302debd332SHarsh Jain 
create_gcm_wr(struct aead_request * req,unsigned short qid,int size)31312debd332SHarsh Jain static struct sk_buff *create_gcm_wr(struct aead_request *req,
31322debd332SHarsh Jain 				     unsigned short qid,
31334262c98aSHarsh Jain 				     int size)
31342debd332SHarsh Jain {
31352debd332SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3136567be3a5SAyush Sawal 	struct chcr_context *ctx = a_ctx(tfm);
313716a9874fSAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
3138567be3a5SAyush Sawal 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
3139e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
31402debd332SHarsh Jain 	struct sk_buff *skb = NULL;
31412debd332SHarsh Jain 	struct chcr_wr *chcr_req;
31422debd332SHarsh Jain 	struct cpl_rx_phys_dsgl *phys_cpl;
31432f47d580SHarsh Jain 	struct ulptx_sgl *ulptx;
31441f479e4cSHarsh Jain 	unsigned int transhdr_len, dnents = 0, snents;
31452f47d580SHarsh Jain 	unsigned int dst_size = 0, temp = 0, kctx_len, assoclen = req->assoclen;
31462debd332SHarsh Jain 	unsigned int authsize = crypto_aead_authsize(tfm);
31472f47d580SHarsh Jain 	int error = -EINVAL;
31481f479e4cSHarsh Jain 	u8 *ivptr;
31492debd332SHarsh Jain 	gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
31502debd332SHarsh Jain 		GFP_ATOMIC;
3151567be3a5SAyush Sawal 	struct adapter *adap = padap(ctx->dev);
3152567be3a5SAyush Sawal 	unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
31532debd332SHarsh Jain 
315416a9874fSAyush Sawal 	rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
31552f47d580SHarsh Jain 	if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106)
31562f47d580SHarsh Jain 		assoclen = req->assoclen - 8;
31572debd332SHarsh Jain 
31584262c98aSHarsh Jain 	reqctx->b0_len = 0;
31594262c98aSHarsh Jain 	error = chcr_aead_common_init(req);
31605fe8c711SHarsh Jain 	if (error)
31615fe8c711SHarsh Jain 		return ERR_PTR(error);
31621f479e4cSHarsh Jain 	dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen +
31634262c98aSHarsh Jain 				(reqctx->op ? -authsize : authsize),
31641f479e4cSHarsh Jain 				CHCR_DST_SG_SIZE, 0);
31651f479e4cSHarsh Jain 	snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen,
31661f479e4cSHarsh Jain 			       CHCR_SRC_SG_SIZE, 0);
31672f47d580SHarsh Jain 	dnents += MIN_GCM_SG; // For IV
31682f47d580SHarsh Jain 	dst_size = get_space_for_phys_dsgl(dnents);
3169125d01caSHarsh Jain 	kctx_len = roundup(aeadctx->enckey_len, 16) + AEAD_H_SIZE;
31702debd332SHarsh Jain 	transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
31711f479e4cSHarsh Jain 	reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) <=
31722f47d580SHarsh Jain 			SGE_MAX_WR_LEN;
31731f479e4cSHarsh Jain 	temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) :
31741f479e4cSHarsh Jain 		(sgl_len(snents) * 8);
31752f47d580SHarsh Jain 	transhdr_len += temp;
3176125d01caSHarsh Jain 	transhdr_len = roundup(transhdr_len, 16);
31772f47d580SHarsh Jain 	if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE,
31784262c98aSHarsh Jain 			    transhdr_len, reqctx->op)) {
31794262c98aSHarsh Jain 
3180ee0863baSHarsh Jain 		atomic_inc(&adap->chcr_stats.fallback);
31814262c98aSHarsh Jain 		chcr_aead_common_exit(req);
31824262c98aSHarsh Jain 		return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
31830e93708dSHarsh Jain 	}
31841f479e4cSHarsh Jain 	skb = alloc_skb(transhdr_len, flags);
31855fe8c711SHarsh Jain 	if (!skb) {
31865fe8c711SHarsh Jain 		error = -ENOMEM;
31872debd332SHarsh Jain 		goto err;
31885fe8c711SHarsh Jain 	}
31892debd332SHarsh Jain 
3190de77b966Syuan linyu 	chcr_req = __skb_put_zero(skb, transhdr_len);
31912debd332SHarsh Jain 
31922f47d580SHarsh Jain 	//Offset of tag from end
31934262c98aSHarsh Jain 	temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize;
31942debd332SHarsh Jain 	chcr_req->sec_cpl.op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(
3195567be3a5SAyush Sawal 						rx_channel_id, 2, 1);
31960e93708dSHarsh Jain 	chcr_req->sec_cpl.pldlen =
31971f479e4cSHarsh Jain 		htonl(req->assoclen + IV + req->cryptlen);
31982debd332SHarsh Jain 	chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
31991f479e4cSHarsh Jain 					assoclen ? 1 + IV : 0,
32001f479e4cSHarsh Jain 					assoclen ? IV + assoclen : 0,
32011f479e4cSHarsh Jain 					req->assoclen + IV + 1, 0);
32022debd332SHarsh Jain 	chcr_req->sec_cpl.cipherstop_lo_authinsert =
32031f479e4cSHarsh Jain 			FILL_SEC_CPL_AUTHINSERT(0, req->assoclen + IV + 1,
32042f47d580SHarsh Jain 						temp, temp);
32052debd332SHarsh Jain 	chcr_req->sec_cpl.seqno_numivs =
32064262c98aSHarsh Jain 			FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, (reqctx->op ==
32072debd332SHarsh Jain 					CHCR_ENCRYPT_OP) ? 1 : 0,
32082debd332SHarsh Jain 					CHCR_SCMD_CIPHER_MODE_AES_GCM,
32090a7bd30cSHarsh Jain 					CHCR_SCMD_AUTH_MODE_GHASH,
32102f47d580SHarsh Jain 					aeadctx->hmac_ctrl, IV >> 1);
32112debd332SHarsh Jain 	chcr_req->sec_cpl.ivgen_hdrlen =  FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
32122f47d580SHarsh Jain 					0, 0, dst_size);
32132debd332SHarsh Jain 	chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
32142debd332SHarsh Jain 	memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
3215125d01caSHarsh Jain 	memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
3216125d01caSHarsh Jain 	       GCM_CTX(aeadctx)->ghash_h, AEAD_H_SIZE);
32172debd332SHarsh Jain 
32181f479e4cSHarsh Jain 	phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
32191f479e4cSHarsh Jain 	ivptr = (u8 *)(phys_cpl + 1) + dst_size;
32202debd332SHarsh Jain 	/* prepare a 16 byte iv */
32212debd332SHarsh Jain 	/* S   A   L  T |  IV | 0x00000001 */
32222debd332SHarsh Jain 	if (get_aead_subtype(tfm) ==
32232debd332SHarsh Jain 	    CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
32241f479e4cSHarsh Jain 		memcpy(ivptr, aeadctx->salt, 4);
32251f479e4cSHarsh Jain 		memcpy(ivptr + 4, req->iv, GCM_RFC4106_IV_SIZE);
32262debd332SHarsh Jain 	} else {
32271f479e4cSHarsh Jain 		memcpy(ivptr, req->iv, GCM_AES_IV_SIZE);
32282debd332SHarsh Jain 	}
3229f3b140adSAyush Sawal 	put_unaligned_be32(0x01, &ivptr[12]);
32301f479e4cSHarsh Jain 	ulptx = (struct ulptx_sgl *)(ivptr + 16);
32312debd332SHarsh Jain 
32321f479e4cSHarsh Jain 	chcr_add_aead_dst_ent(req, phys_cpl, qid);
32331f479e4cSHarsh Jain 	chcr_add_aead_src_ent(req, ulptx);
3234ee0863baSHarsh Jain 	atomic_inc(&adap->chcr_stats.aead_rqst);
32351f479e4cSHarsh Jain 	temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + IV +
32361f479e4cSHarsh Jain 		kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0);
32372f47d580SHarsh Jain 	create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size,
32382f47d580SHarsh Jain 		    transhdr_len, temp, reqctx->verify);
32392debd332SHarsh Jain 	reqctx->skb = skb;
32402debd332SHarsh Jain 	return skb;
32412debd332SHarsh Jain 
32422debd332SHarsh Jain err:
32434262c98aSHarsh Jain 	chcr_aead_common_exit(req);
32445fe8c711SHarsh Jain 	return ERR_PTR(error);
32452debd332SHarsh Jain }
32462debd332SHarsh Jain 
32472debd332SHarsh Jain 
32482debd332SHarsh Jain 
chcr_aead_cra_init(struct crypto_aead * tfm)32492debd332SHarsh Jain static int chcr_aead_cra_init(struct crypto_aead *tfm)
32502debd332SHarsh Jain {
32512f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
32520e93708dSHarsh Jain 	struct aead_alg *alg = crypto_aead_alg(tfm);
32532debd332SHarsh Jain 
32540e93708dSHarsh Jain 	aeadctx->sw_cipher = crypto_alloc_aead(alg->base.cra_name, 0,
32555fe8c711SHarsh Jain 					       CRYPTO_ALG_NEED_FALLBACK |
32565fe8c711SHarsh Jain 					       CRYPTO_ALG_ASYNC);
32570e93708dSHarsh Jain 	if  (IS_ERR(aeadctx->sw_cipher))
32580e93708dSHarsh Jain 		return PTR_ERR(aeadctx->sw_cipher);
3259e055bffaSHerbert Xu 	crypto_aead_set_reqsize_dma(
3260e055bffaSHerbert Xu 		tfm, max(sizeof(struct chcr_aead_reqctx),
32610e93708dSHarsh Jain 			 sizeof(struct aead_request) +
32620e93708dSHarsh Jain 			 crypto_aead_reqsize(aeadctx->sw_cipher)));
32632f47d580SHarsh Jain 	return chcr_device_init(a_ctx(tfm));
32642debd332SHarsh Jain }
32652debd332SHarsh Jain 
chcr_aead_cra_exit(struct crypto_aead * tfm)32662debd332SHarsh Jain static void chcr_aead_cra_exit(struct crypto_aead *tfm)
32672debd332SHarsh Jain {
32682f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
32690e93708dSHarsh Jain 
32700e93708dSHarsh Jain 	crypto_free_aead(aeadctx->sw_cipher);
32712debd332SHarsh Jain }
32722debd332SHarsh Jain 
chcr_authenc_null_setauthsize(struct crypto_aead * tfm,unsigned int authsize)32732debd332SHarsh Jain static int chcr_authenc_null_setauthsize(struct crypto_aead *tfm,
32742debd332SHarsh Jain 					unsigned int authsize)
32752debd332SHarsh Jain {
32762f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
32772debd332SHarsh Jain 
32782debd332SHarsh Jain 	aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NOP;
32792debd332SHarsh Jain 	aeadctx->mayverify = VERIFY_HW;
32800e93708dSHarsh Jain 	return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
32812debd332SHarsh Jain }
chcr_authenc_setauthsize(struct crypto_aead * tfm,unsigned int authsize)32822debd332SHarsh Jain static int chcr_authenc_setauthsize(struct crypto_aead *tfm,
32832debd332SHarsh Jain 				    unsigned int authsize)
32842debd332SHarsh Jain {
32852f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
32862debd332SHarsh Jain 	u32 maxauth = crypto_aead_maxauthsize(tfm);
32872debd332SHarsh Jain 
32882debd332SHarsh Jain 	/*SHA1 authsize in ipsec is 12 instead of 10 i.e maxauthsize / 2 is not
32892debd332SHarsh Jain 	 * true for sha1. authsize == 12 condition should be before
32902debd332SHarsh Jain 	 * authsize == (maxauth >> 1)
32912debd332SHarsh Jain 	 */
32922debd332SHarsh Jain 	if (authsize == ICV_4) {
32932debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
32942debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
32952debd332SHarsh Jain 	} else if (authsize == ICV_6) {
32962debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
32972debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
32982debd332SHarsh Jain 	} else if (authsize == ICV_10) {
32992debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
33002debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33012debd332SHarsh Jain 	} else if (authsize == ICV_12) {
33022debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
33032debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33042debd332SHarsh Jain 	} else if (authsize == ICV_14) {
33052debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
33062debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33072debd332SHarsh Jain 	} else if (authsize == (maxauth >> 1)) {
33082debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
33092debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33102debd332SHarsh Jain 	} else if (authsize == maxauth) {
33112debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
33122debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33132debd332SHarsh Jain 	} else {
33142debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
33152debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_SW;
33162debd332SHarsh Jain 	}
33170e93708dSHarsh Jain 	return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
33182debd332SHarsh Jain }
33192debd332SHarsh Jain 
33202debd332SHarsh Jain 
chcr_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)33212debd332SHarsh Jain static int chcr_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
33222debd332SHarsh Jain {
33232f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
33242debd332SHarsh Jain 
33252debd332SHarsh Jain 	switch (authsize) {
33262debd332SHarsh Jain 	case ICV_4:
33272debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
33282debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33292debd332SHarsh Jain 		break;
33302debd332SHarsh Jain 	case ICV_8:
33312debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
33322debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33332debd332SHarsh Jain 		break;
33342debd332SHarsh Jain 	case ICV_12:
33352debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
33362debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33372debd332SHarsh Jain 		break;
33382debd332SHarsh Jain 	case ICV_14:
33392debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
33402debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33412debd332SHarsh Jain 		break;
33422debd332SHarsh Jain 	case ICV_16:
33432debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
33442debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33452debd332SHarsh Jain 		break;
33462debd332SHarsh Jain 	case ICV_13:
33472debd332SHarsh Jain 	case ICV_15:
33482debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
33492debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_SW;
33502debd332SHarsh Jain 		break;
33512debd332SHarsh Jain 	default:
33522debd332SHarsh Jain 		return -EINVAL;
33532debd332SHarsh Jain 	}
33540e93708dSHarsh Jain 	return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
33552debd332SHarsh Jain }
33562debd332SHarsh Jain 
chcr_4106_4309_setauthsize(struct crypto_aead * tfm,unsigned int authsize)33572debd332SHarsh Jain static int chcr_4106_4309_setauthsize(struct crypto_aead *tfm,
33582debd332SHarsh Jain 					  unsigned int authsize)
33592debd332SHarsh Jain {
33602f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
33612debd332SHarsh Jain 
33622debd332SHarsh Jain 	switch (authsize) {
33632debd332SHarsh Jain 	case ICV_8:
33642debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
33652debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33662debd332SHarsh Jain 		break;
33672debd332SHarsh Jain 	case ICV_12:
33682debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
33692debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33702debd332SHarsh Jain 		break;
33712debd332SHarsh Jain 	case ICV_16:
33722debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
33732debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33742debd332SHarsh Jain 		break;
33752debd332SHarsh Jain 	default:
33762debd332SHarsh Jain 		return -EINVAL;
33772debd332SHarsh Jain 	}
33780e93708dSHarsh Jain 	return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
33792debd332SHarsh Jain }
33802debd332SHarsh Jain 
chcr_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)33812debd332SHarsh Jain static int chcr_ccm_setauthsize(struct crypto_aead *tfm,
33822debd332SHarsh Jain 				unsigned int authsize)
33832debd332SHarsh Jain {
33842f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
33852debd332SHarsh Jain 
33862debd332SHarsh Jain 	switch (authsize) {
33872debd332SHarsh Jain 	case ICV_4:
33882debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
33892debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33902debd332SHarsh Jain 		break;
33912debd332SHarsh Jain 	case ICV_6:
33922debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
33932debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33942debd332SHarsh Jain 		break;
33952debd332SHarsh Jain 	case ICV_8:
33962debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
33972debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
33982debd332SHarsh Jain 		break;
33992debd332SHarsh Jain 	case ICV_10:
34002debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
34012debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
34022debd332SHarsh Jain 		break;
34032debd332SHarsh Jain 	case ICV_12:
34042debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
34052debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
34062debd332SHarsh Jain 		break;
34072debd332SHarsh Jain 	case ICV_14:
34082debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
34092debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
34102debd332SHarsh Jain 		break;
34112debd332SHarsh Jain 	case ICV_16:
34122debd332SHarsh Jain 		aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
34132debd332SHarsh Jain 		aeadctx->mayverify = VERIFY_HW;
34142debd332SHarsh Jain 		break;
34152debd332SHarsh Jain 	default:
34162debd332SHarsh Jain 		return -EINVAL;
34172debd332SHarsh Jain 	}
34180e93708dSHarsh Jain 	return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
34192debd332SHarsh Jain }
34202debd332SHarsh Jain 
chcr_ccm_common_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)34210e93708dSHarsh Jain static int chcr_ccm_common_setkey(struct crypto_aead *aead,
34222debd332SHarsh Jain 				const u8 *key,
34232debd332SHarsh Jain 				unsigned int keylen)
34242debd332SHarsh Jain {
34252f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
34262debd332SHarsh Jain 	unsigned char ck_size, mk_size;
34272debd332SHarsh Jain 	int key_ctx_size = 0;
34282debd332SHarsh Jain 
3429125d01caSHarsh Jain 	key_ctx_size = sizeof(struct _key_ctx) + roundup(keylen, 16) * 2;
34302debd332SHarsh Jain 	if (keylen == AES_KEYSIZE_128) {
34312debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3432125d01caSHarsh Jain 		mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_128;
34332debd332SHarsh Jain 	} else if (keylen == AES_KEYSIZE_192) {
34342debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
34352debd332SHarsh Jain 		mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_192;
34362debd332SHarsh Jain 	} else if (keylen == AES_KEYSIZE_256) {
34372debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
34382debd332SHarsh Jain 		mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
34392debd332SHarsh Jain 	} else {
34402debd332SHarsh Jain 		aeadctx->enckey_len = 0;
34412debd332SHarsh Jain 		return	-EINVAL;
34422debd332SHarsh Jain 	}
34432debd332SHarsh Jain 	aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, mk_size, 0, 0,
34442debd332SHarsh Jain 						key_ctx_size >> 4);
34450e93708dSHarsh Jain 	memcpy(aeadctx->key, key, keylen);
34460e93708dSHarsh Jain 	aeadctx->enckey_len = keylen;
34470e93708dSHarsh Jain 
34482debd332SHarsh Jain 	return 0;
34492debd332SHarsh Jain }
34502debd332SHarsh Jain 
chcr_aead_ccm_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)34510e93708dSHarsh Jain static int chcr_aead_ccm_setkey(struct crypto_aead *aead,
34520e93708dSHarsh Jain 				const u8 *key,
34530e93708dSHarsh Jain 				unsigned int keylen)
34540e93708dSHarsh Jain {
34552f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
34560e93708dSHarsh Jain 	int error;
34570e93708dSHarsh Jain 
34580e93708dSHarsh Jain 	crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
34590e93708dSHarsh Jain 	crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
34600e93708dSHarsh Jain 			      CRYPTO_TFM_REQ_MASK);
34610e93708dSHarsh Jain 	error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
34620e93708dSHarsh Jain 	if (error)
34630e93708dSHarsh Jain 		return error;
34640e93708dSHarsh Jain 	return chcr_ccm_common_setkey(aead, key, keylen);
34650e93708dSHarsh Jain }
34660e93708dSHarsh Jain 
chcr_aead_rfc4309_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)34672debd332SHarsh Jain static int chcr_aead_rfc4309_setkey(struct crypto_aead *aead, const u8 *key,
34682debd332SHarsh Jain 				    unsigned int keylen)
34692debd332SHarsh Jain {
34702f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
34714dbeae42SHarsh Jain 	int error;
34722debd332SHarsh Jain 
34732debd332SHarsh Jain 	if (keylen < 3) {
34742debd332SHarsh Jain 		aeadctx->enckey_len = 0;
34752debd332SHarsh Jain 		return	-EINVAL;
34762debd332SHarsh Jain 	}
34774dbeae42SHarsh Jain 	crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
34784dbeae42SHarsh Jain 	crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
34794dbeae42SHarsh Jain 			      CRYPTO_TFM_REQ_MASK);
34804dbeae42SHarsh Jain 	error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
34814dbeae42SHarsh Jain 	if (error)
34824dbeae42SHarsh Jain 		return error;
34832debd332SHarsh Jain 	keylen -= 3;
34842debd332SHarsh Jain 	memcpy(aeadctx->salt, key + keylen, 3);
34850e93708dSHarsh Jain 	return chcr_ccm_common_setkey(aead, key, keylen);
34862debd332SHarsh Jain }
34872debd332SHarsh Jain 
chcr_gcm_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)34882debd332SHarsh Jain static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
34892debd332SHarsh Jain 			   unsigned int keylen)
34902debd332SHarsh Jain {
34912f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
34922debd332SHarsh Jain 	struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx);
34932debd332SHarsh Jain 	unsigned int ck_size;
34942debd332SHarsh Jain 	int ret = 0, key_ctx_size = 0;
3495571c47abSArd Biesheuvel 	struct crypto_aes_ctx aes;
34962debd332SHarsh Jain 
34970e93708dSHarsh Jain 	aeadctx->enckey_len = 0;
34980e93708dSHarsh Jain 	crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
34990e93708dSHarsh Jain 	crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead)
35000e93708dSHarsh Jain 			      & CRYPTO_TFM_REQ_MASK);
35010e93708dSHarsh Jain 	ret = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
35020e93708dSHarsh Jain 	if (ret)
35030e93708dSHarsh Jain 		goto out;
35040e93708dSHarsh Jain 
35057c2cf1c4SHarsh Jain 	if (get_aead_subtype(aead) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106 &&
35067c2cf1c4SHarsh Jain 	    keylen > 3) {
35072debd332SHarsh Jain 		keylen -= 4;  /* nonce/salt is present in the last 4 bytes */
35082debd332SHarsh Jain 		memcpy(aeadctx->salt, key + keylen, 4);
35092debd332SHarsh Jain 	}
35102debd332SHarsh Jain 	if (keylen == AES_KEYSIZE_128) {
35112debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
35122debd332SHarsh Jain 	} else if (keylen == AES_KEYSIZE_192) {
35132debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
35142debd332SHarsh Jain 	} else if (keylen == AES_KEYSIZE_256) {
35152debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
35162debd332SHarsh Jain 	} else {
35170e93708dSHarsh Jain 		pr_err("GCM: Invalid key length %d\n", keylen);
35182debd332SHarsh Jain 		ret = -EINVAL;
35192debd332SHarsh Jain 		goto out;
35202debd332SHarsh Jain 	}
35212debd332SHarsh Jain 
35222debd332SHarsh Jain 	memcpy(aeadctx->key, key, keylen);
35232debd332SHarsh Jain 	aeadctx->enckey_len = keylen;
3524125d01caSHarsh Jain 	key_ctx_size = sizeof(struct _key_ctx) + roundup(keylen, 16) +
35252debd332SHarsh Jain 		AEAD_H_SIZE;
35262debd332SHarsh Jain 	aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size,
35272debd332SHarsh Jain 						CHCR_KEYCTX_MAC_KEY_SIZE_128,
35282debd332SHarsh Jain 						0, 0,
35292debd332SHarsh Jain 						key_ctx_size >> 4);
35308356ea51SHarsh Jain 	/* Calculate the H = CIPH(K, 0 repeated 16 times).
35318356ea51SHarsh Jain 	 * It will go in key context
35322debd332SHarsh Jain 	 */
3533571c47abSArd Biesheuvel 	ret = aes_expandkey(&aes, key, keylen);
35342debd332SHarsh Jain 	if (ret) {
35352debd332SHarsh Jain 		aeadctx->enckey_len = 0;
3536571c47abSArd Biesheuvel 		goto out;
35372debd332SHarsh Jain 	}
35382debd332SHarsh Jain 	memset(gctx->ghash_h, 0, AEAD_H_SIZE);
3539571c47abSArd Biesheuvel 	aes_encrypt(&aes, gctx->ghash_h, gctx->ghash_h);
3540571c47abSArd Biesheuvel 	memzero_explicit(&aes, sizeof(aes));
35412debd332SHarsh Jain 
35422debd332SHarsh Jain out:
35432debd332SHarsh Jain 	return ret;
35442debd332SHarsh Jain }
35452debd332SHarsh Jain 
chcr_authenc_setkey(struct crypto_aead * authenc,const u8 * key,unsigned int keylen)35462debd332SHarsh Jain static int chcr_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
35472debd332SHarsh Jain 				   unsigned int keylen)
35482debd332SHarsh Jain {
35492f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(authenc));
35502debd332SHarsh Jain 	struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
35512debd332SHarsh Jain 	/* it contains auth and cipher key both*/
35522debd332SHarsh Jain 	struct crypto_authenc_keys keys;
35533d64bd67SHarsh Jain 	unsigned int bs, subtype;
35542debd332SHarsh Jain 	unsigned int max_authsize = crypto_aead_alg(authenc)->maxauthsize;
35552debd332SHarsh Jain 	int err = 0, i, key_ctx_len = 0;
35562debd332SHarsh Jain 	unsigned char ck_size = 0;
35572debd332SHarsh Jain 	unsigned char pad[CHCR_HASH_MAX_BLOCK_SIZE_128] = { 0 };
3558ec1bca94SChristophe Jaillet 	struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
35592debd332SHarsh Jain 	struct algo_param param;
35602debd332SHarsh Jain 	int align;
35612debd332SHarsh Jain 	u8 *o_ptr = NULL;
35622debd332SHarsh Jain 
35630e93708dSHarsh Jain 	crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
35640e93708dSHarsh Jain 	crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
35650e93708dSHarsh Jain 			      & CRYPTO_TFM_REQ_MASK);
35660e93708dSHarsh Jain 	err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
35670e93708dSHarsh Jain 	if (err)
35680e93708dSHarsh Jain 		goto out;
35690e93708dSHarsh Jain 
3570674f368aSEric Biggers 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
35712debd332SHarsh Jain 		goto out;
35722debd332SHarsh Jain 
35732debd332SHarsh Jain 	if (get_alg_config(&param, max_authsize)) {
35741b3eeb87SChristophe JAILLET 		pr_err("Unsupported digest size\n");
35752debd332SHarsh Jain 		goto out;
35762debd332SHarsh Jain 	}
35773d64bd67SHarsh Jain 	subtype = get_aead_subtype(authenc);
35783d64bd67SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
35793d64bd67SHarsh Jain 		subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
35803d64bd67SHarsh Jain 		if (keys.enckeylen < CTR_RFC3686_NONCE_SIZE)
35813d64bd67SHarsh Jain 			goto out;
35823d64bd67SHarsh Jain 		memcpy(aeadctx->nonce, keys.enckey + (keys.enckeylen
35833d64bd67SHarsh Jain 		- CTR_RFC3686_NONCE_SIZE), CTR_RFC3686_NONCE_SIZE);
35843d64bd67SHarsh Jain 		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
35853d64bd67SHarsh Jain 	}
35862debd332SHarsh Jain 	if (keys.enckeylen == AES_KEYSIZE_128) {
35872debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
35882debd332SHarsh Jain 	} else if (keys.enckeylen == AES_KEYSIZE_192) {
35892debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
35902debd332SHarsh Jain 	} else if (keys.enckeylen == AES_KEYSIZE_256) {
35912debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
35922debd332SHarsh Jain 	} else {
35931b3eeb87SChristophe JAILLET 		pr_err("Unsupported cipher key\n");
35942debd332SHarsh Jain 		goto out;
35952debd332SHarsh Jain 	}
35962debd332SHarsh Jain 
35972debd332SHarsh Jain 	/* Copy only encryption key. We use authkey to generate h(ipad) and
35982debd332SHarsh Jain 	 * h(opad) so authkey is not needed again. authkeylen size have the
35992debd332SHarsh Jain 	 * size of the hash digest size.
36002debd332SHarsh Jain 	 */
36012debd332SHarsh Jain 	memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
36022debd332SHarsh Jain 	aeadctx->enckey_len = keys.enckeylen;
36033d64bd67SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_SHA ||
36043d64bd67SHarsh Jain 		subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL) {
36053d64bd67SHarsh Jain 
36062debd332SHarsh Jain 		get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
36072debd332SHarsh Jain 			    aeadctx->enckey_len << 3);
36083d64bd67SHarsh Jain 	}
36092debd332SHarsh Jain 	base_hash  = chcr_alloc_shash(max_authsize);
36102debd332SHarsh Jain 	if (IS_ERR(base_hash)) {
36111b3eeb87SChristophe JAILLET 		pr_err("Base driver cannot be loaded\n");
3612d110cf0aSChristophe JAILLET 		goto out;
36132debd332SHarsh Jain 	}
36142debd332SHarsh Jain 	{
36152debd332SHarsh Jain 		SHASH_DESC_ON_STACK(shash, base_hash);
36166faa0f57SHarsh Jain 
36172debd332SHarsh Jain 		shash->tfm = base_hash;
36182debd332SHarsh Jain 		bs = crypto_shash_blocksize(base_hash);
36192debd332SHarsh Jain 		align = KEYCTX_ALIGN_PAD(max_authsize);
36202debd332SHarsh Jain 		o_ptr =  actx->h_iopad + param.result_size + align;
36212debd332SHarsh Jain 
36222debd332SHarsh Jain 		if (keys.authkeylen > bs) {
36232debd332SHarsh Jain 			err = crypto_shash_digest(shash, keys.authkey,
36242debd332SHarsh Jain 						  keys.authkeylen,
36252debd332SHarsh Jain 						  o_ptr);
36262debd332SHarsh Jain 			if (err) {
36271b3eeb87SChristophe JAILLET 				pr_err("Base driver cannot be loaded\n");
36282debd332SHarsh Jain 				goto out;
36292debd332SHarsh Jain 			}
36302debd332SHarsh Jain 			keys.authkeylen = max_authsize;
36312debd332SHarsh Jain 		} else
36322debd332SHarsh Jain 			memcpy(o_ptr, keys.authkey, keys.authkeylen);
36332debd332SHarsh Jain 
36342debd332SHarsh Jain 		/* Compute the ipad-digest*/
36352debd332SHarsh Jain 		memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
36362debd332SHarsh Jain 		memcpy(pad, o_ptr, keys.authkeylen);
36372debd332SHarsh Jain 		for (i = 0; i < bs >> 2; i++)
36382debd332SHarsh Jain 			*((unsigned int *)pad + i) ^= IPAD_DATA;
36392debd332SHarsh Jain 
36402debd332SHarsh Jain 		if (chcr_compute_partial_hash(shash, pad, actx->h_iopad,
36412debd332SHarsh Jain 					      max_authsize))
36422debd332SHarsh Jain 			goto out;
36432debd332SHarsh Jain 		/* Compute the opad-digest */
36442debd332SHarsh Jain 		memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
36452debd332SHarsh Jain 		memcpy(pad, o_ptr, keys.authkeylen);
36462debd332SHarsh Jain 		for (i = 0; i < bs >> 2; i++)
36472debd332SHarsh Jain 			*((unsigned int *)pad + i) ^= OPAD_DATA;
36482debd332SHarsh Jain 
36492debd332SHarsh Jain 		if (chcr_compute_partial_hash(shash, pad, o_ptr, max_authsize))
36502debd332SHarsh Jain 			goto out;
36512debd332SHarsh Jain 
36522debd332SHarsh Jain 		/* convert the ipad and opad digest to network order */
36532debd332SHarsh Jain 		chcr_change_order(actx->h_iopad, param.result_size);
36542debd332SHarsh Jain 		chcr_change_order(o_ptr, param.result_size);
36552debd332SHarsh Jain 		key_ctx_len = sizeof(struct _key_ctx) +
3656125d01caSHarsh Jain 			roundup(keys.enckeylen, 16) +
36572debd332SHarsh Jain 			(param.result_size + align) * 2;
36582debd332SHarsh Jain 		aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, param.mk_size,
36592debd332SHarsh Jain 						0, 1, key_ctx_len >> 4);
36602debd332SHarsh Jain 		actx->auth_mode = param.auth_mode;
36612debd332SHarsh Jain 		chcr_free_shash(base_hash);
36622debd332SHarsh Jain 
3663eb526531STudor-Dan Ambarus 		memzero_explicit(&keys, sizeof(keys));
36642debd332SHarsh Jain 		return 0;
36652debd332SHarsh Jain 	}
36662debd332SHarsh Jain out:
36672debd332SHarsh Jain 	aeadctx->enckey_len = 0;
3668eb526531STudor-Dan Ambarus 	memzero_explicit(&keys, sizeof(keys));
3669ec1bca94SChristophe Jaillet 	if (!IS_ERR(base_hash))
36702debd332SHarsh Jain 		chcr_free_shash(base_hash);
36712debd332SHarsh Jain 	return -EINVAL;
36722debd332SHarsh Jain }
36732debd332SHarsh Jain 
chcr_aead_digest_null_setkey(struct crypto_aead * authenc,const u8 * key,unsigned int keylen)36742debd332SHarsh Jain static int chcr_aead_digest_null_setkey(struct crypto_aead *authenc,
36752debd332SHarsh Jain 					const u8 *key, unsigned int keylen)
36762debd332SHarsh Jain {
36772f47d580SHarsh Jain 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(authenc));
36782debd332SHarsh Jain 	struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
36792debd332SHarsh Jain 	struct crypto_authenc_keys keys;
36800e93708dSHarsh Jain 	int err;
36812debd332SHarsh Jain 	/* it contains auth and cipher key both*/
36823d64bd67SHarsh Jain 	unsigned int subtype;
36832debd332SHarsh Jain 	int key_ctx_len = 0;
36842debd332SHarsh Jain 	unsigned char ck_size = 0;
36852debd332SHarsh Jain 
36860e93708dSHarsh Jain 	crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
36870e93708dSHarsh Jain 	crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
36880e93708dSHarsh Jain 			      & CRYPTO_TFM_REQ_MASK);
36890e93708dSHarsh Jain 	err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
36900e93708dSHarsh Jain 	if (err)
36910e93708dSHarsh Jain 		goto out;
36920e93708dSHarsh Jain 
3693674f368aSEric Biggers 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
36942debd332SHarsh Jain 		goto out;
3695674f368aSEric Biggers 
36963d64bd67SHarsh Jain 	subtype = get_aead_subtype(authenc);
36973d64bd67SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
36983d64bd67SHarsh Jain 	    subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
36993d64bd67SHarsh Jain 		if (keys.enckeylen < CTR_RFC3686_NONCE_SIZE)
37003d64bd67SHarsh Jain 			goto out;
37013d64bd67SHarsh Jain 		memcpy(aeadctx->nonce, keys.enckey + (keys.enckeylen
37023d64bd67SHarsh Jain 			- CTR_RFC3686_NONCE_SIZE), CTR_RFC3686_NONCE_SIZE);
37033d64bd67SHarsh Jain 		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
37043d64bd67SHarsh Jain 	}
37052debd332SHarsh Jain 	if (keys.enckeylen == AES_KEYSIZE_128) {
37062debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
37072debd332SHarsh Jain 	} else if (keys.enckeylen == AES_KEYSIZE_192) {
37082debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
37092debd332SHarsh Jain 	} else if (keys.enckeylen == AES_KEYSIZE_256) {
37102debd332SHarsh Jain 		ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
37112debd332SHarsh Jain 	} else {
37121b3eeb87SChristophe JAILLET 		pr_err("Unsupported cipher key %d\n", keys.enckeylen);
37132debd332SHarsh Jain 		goto out;
37142debd332SHarsh Jain 	}
37152debd332SHarsh Jain 	memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
37162debd332SHarsh Jain 	aeadctx->enckey_len = keys.enckeylen;
37173d64bd67SHarsh Jain 	if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_SHA ||
37183d64bd67SHarsh Jain 	    subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL) {
37192debd332SHarsh Jain 		get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
37202debd332SHarsh Jain 				aeadctx->enckey_len << 3);
37213d64bd67SHarsh Jain 	}
3722125d01caSHarsh Jain 	key_ctx_len =  sizeof(struct _key_ctx) + roundup(keys.enckeylen, 16);
37232debd332SHarsh Jain 
37242debd332SHarsh Jain 	aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY, 0,
37252debd332SHarsh Jain 						0, key_ctx_len >> 4);
37262debd332SHarsh Jain 	actx->auth_mode = CHCR_SCMD_AUTH_MODE_NOP;
3727eb526531STudor-Dan Ambarus 	memzero_explicit(&keys, sizeof(keys));
37282debd332SHarsh Jain 	return 0;
37292debd332SHarsh Jain out:
37302debd332SHarsh Jain 	aeadctx->enckey_len = 0;
3731eb526531STudor-Dan Ambarus 	memzero_explicit(&keys, sizeof(keys));
37322debd332SHarsh Jain 	return -EINVAL;
37332debd332SHarsh Jain }
37346dad4e8aSAtul Gupta 
chcr_aead_op(struct aead_request * req,int size,create_wr_t create_wr_fn)37356dad4e8aSAtul Gupta static int chcr_aead_op(struct aead_request *req,
37366dad4e8aSAtul Gupta 			int size,
37376dad4e8aSAtul Gupta 			create_wr_t create_wr_fn)
37386dad4e8aSAtul Gupta {
37396dad4e8aSAtul Gupta 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3740e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
3741567be3a5SAyush Sawal 	struct chcr_context *ctx = a_ctx(tfm);
3742567be3a5SAyush Sawal 	struct uld_ctx *u_ctx = ULD_CTX(ctx);
37436dad4e8aSAtul Gupta 	struct sk_buff *skb;
3744fef4912bSHarsh Jain 	struct chcr_dev *cdev;
37456dad4e8aSAtul Gupta 
3746fef4912bSHarsh Jain 	cdev = a_ctx(tfm)->dev;
3747fef4912bSHarsh Jain 	if (!cdev) {
37481b3eeb87SChristophe JAILLET 		pr_err("%s : No crypto device.\n", __func__);
37496dad4e8aSAtul Gupta 		return -ENXIO;
37506dad4e8aSAtul Gupta 	}
3751fef4912bSHarsh Jain 
3752fef4912bSHarsh Jain 	if (chcr_inc_wrcount(cdev)) {
3753fef4912bSHarsh Jain 	/* Detach state for CHCR means lldi or padap is freed.
3754fef4912bSHarsh Jain 	 * We cannot increment fallback here.
3755fef4912bSHarsh Jain 	 */
3756fef4912bSHarsh Jain 		return chcr_aead_fallback(req, reqctx->op);
3757fef4912bSHarsh Jain 	}
3758fef4912bSHarsh Jain 
37596dad4e8aSAtul Gupta 	if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
3760567be3a5SAyush Sawal 					reqctx->txqidx) &&
3761567be3a5SAyush Sawal 		(!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))) {
3762fef4912bSHarsh Jain 			chcr_dec_wrcount(cdev);
37636faa0f57SHarsh Jain 			return -ENOSPC;
37646dad4e8aSAtul Gupta 	}
37656dad4e8aSAtul Gupta 
3766d91a3159SDevulapally Shiva Krishna 	if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106 &&
3767d91a3159SDevulapally Shiva Krishna 	    crypto_ipsec_check_assoclen(req->assoclen) != 0) {
3768d91a3159SDevulapally Shiva Krishna 		pr_err("RFC4106: Invalid value of assoclen %d\n",
3769d91a3159SDevulapally Shiva Krishna 		       req->assoclen);
3770d91a3159SDevulapally Shiva Krishna 		return -EINVAL;
3771d91a3159SDevulapally Shiva Krishna 	}
3772d91a3159SDevulapally Shiva Krishna 
37736dad4e8aSAtul Gupta 	/* Form a WR from req */
3774567be3a5SAyush Sawal 	skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size);
37756dad4e8aSAtul Gupta 
3776b04a27caSYueHaibing 	if (IS_ERR_OR_NULL(skb)) {
3777fef4912bSHarsh Jain 		chcr_dec_wrcount(cdev);
3778b04a27caSYueHaibing 		return PTR_ERR_OR_ZERO(skb);
3779fef4912bSHarsh Jain 	}
37806dad4e8aSAtul Gupta 
37816dad4e8aSAtul Gupta 	skb->dev = u_ctx->lldi.ports[0];
3782567be3a5SAyush Sawal 	set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
37836dad4e8aSAtul Gupta 	chcr_send_wr(skb);
3784567be3a5SAyush Sawal 	return -EINPROGRESS;
37856dad4e8aSAtul Gupta }
37866dad4e8aSAtul Gupta 
chcr_aead_encrypt(struct aead_request * req)37872debd332SHarsh Jain static int chcr_aead_encrypt(struct aead_request *req)
37882debd332SHarsh Jain {
37892debd332SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3790e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
3791567be3a5SAyush Sawal 	struct chcr_context *ctx = a_ctx(tfm);
3792567be3a5SAyush Sawal 	unsigned int cpu;
3793567be3a5SAyush Sawal 
3794567be3a5SAyush Sawal 	cpu = get_cpu();
3795567be3a5SAyush Sawal 	reqctx->txqidx = cpu % ctx->ntxq;
3796567be3a5SAyush Sawal 	reqctx->rxqidx = cpu % ctx->nrxq;
3797567be3a5SAyush Sawal 	put_cpu();
37982debd332SHarsh Jain 
37992debd332SHarsh Jain 	reqctx->verify = VERIFY_HW;
38004262c98aSHarsh Jain 	reqctx->op = CHCR_ENCRYPT_OP;
38012debd332SHarsh Jain 
38022debd332SHarsh Jain 	switch (get_aead_subtype(tfm)) {
38033d64bd67SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_CTR_SHA:
38043d64bd67SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_CBC_SHA:
38053d64bd67SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_CBC_NULL:
38063d64bd67SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_CTR_NULL:
38074262c98aSHarsh Jain 		return chcr_aead_op(req, 0, create_authenc_wr);
38082debd332SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
38092debd332SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
38104262c98aSHarsh Jain 		return chcr_aead_op(req, 0, create_aead_ccm_wr);
38112debd332SHarsh Jain 	default:
38124262c98aSHarsh Jain 		return chcr_aead_op(req, 0, create_gcm_wr);
38132debd332SHarsh Jain 	}
38142debd332SHarsh Jain }
38152debd332SHarsh Jain 
chcr_aead_decrypt(struct aead_request * req)38162debd332SHarsh Jain static int chcr_aead_decrypt(struct aead_request *req)
38172debd332SHarsh Jain {
38182debd332SHarsh Jain 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3819567be3a5SAyush Sawal 	struct chcr_context *ctx = a_ctx(tfm);
3820567be3a5SAyush Sawal 	struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
3821e055bffaSHerbert Xu 	struct chcr_aead_reqctx *reqctx = aead_request_ctx_dma(req);
38222debd332SHarsh Jain 	int size;
3823567be3a5SAyush Sawal 	unsigned int cpu;
3824567be3a5SAyush Sawal 
3825567be3a5SAyush Sawal 	cpu = get_cpu();
3826567be3a5SAyush Sawal 	reqctx->txqidx = cpu % ctx->ntxq;
3827567be3a5SAyush Sawal 	reqctx->rxqidx = cpu % ctx->nrxq;
3828567be3a5SAyush Sawal 	put_cpu();
38292debd332SHarsh Jain 
38302debd332SHarsh Jain 	if (aeadctx->mayverify == VERIFY_SW) {
38312debd332SHarsh Jain 		size = crypto_aead_maxauthsize(tfm);
38322debd332SHarsh Jain 		reqctx->verify = VERIFY_SW;
38332debd332SHarsh Jain 	} else {
38342debd332SHarsh Jain 		size = 0;
38352debd332SHarsh Jain 		reqctx->verify = VERIFY_HW;
38362debd332SHarsh Jain 	}
38374262c98aSHarsh Jain 	reqctx->op = CHCR_DECRYPT_OP;
38382debd332SHarsh Jain 	switch (get_aead_subtype(tfm)) {
38393d64bd67SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_CBC_SHA:
38403d64bd67SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_CTR_SHA:
38413d64bd67SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_CBC_NULL:
38423d64bd67SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_CTR_NULL:
38434262c98aSHarsh Jain 		return chcr_aead_op(req, size, create_authenc_wr);
38442debd332SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
38452debd332SHarsh Jain 	case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
38464262c98aSHarsh Jain 		return chcr_aead_op(req, size, create_aead_ccm_wr);
38472debd332SHarsh Jain 	default:
38484262c98aSHarsh Jain 		return chcr_aead_op(req, size, create_gcm_wr);
38492debd332SHarsh Jain 	}
38502debd332SHarsh Jain }
38512debd332SHarsh Jain 
3852324429d7SHariprasad Shenai static struct chcr_alg_template driver_algs[] = {
3853324429d7SHariprasad Shenai 	/* AES-CBC */
3854324429d7SHariprasad Shenai 	{
38557cea6d3eSArd Biesheuvel 		.type = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_SUB_TYPE_CBC,
3856324429d7SHariprasad Shenai 		.is_registered = 0,
38577cea6d3eSArd Biesheuvel 		.alg.skcipher = {
38587cea6d3eSArd Biesheuvel 			.base.cra_name		= "cbc(aes)",
38597cea6d3eSArd Biesheuvel 			.base.cra_driver_name	= "cbc-aes-chcr",
38607cea6d3eSArd Biesheuvel 			.base.cra_blocksize	= AES_BLOCK_SIZE,
38617cea6d3eSArd Biesheuvel 
38627cea6d3eSArd Biesheuvel 			.init			= chcr_init_tfm,
38637cea6d3eSArd Biesheuvel 			.exit			= chcr_exit_tfm,
3864324429d7SHariprasad Shenai 			.min_keysize		= AES_MIN_KEY_SIZE,
3865324429d7SHariprasad Shenai 			.max_keysize		= AES_MAX_KEY_SIZE,
3866324429d7SHariprasad Shenai 			.ivsize			= AES_BLOCK_SIZE,
3867324429d7SHariprasad Shenai 			.setkey			= chcr_aes_cbc_setkey,
3868324429d7SHariprasad Shenai 			.encrypt		= chcr_aes_encrypt,
3869324429d7SHariprasad Shenai 			.decrypt		= chcr_aes_decrypt,
3870324429d7SHariprasad Shenai 			}
3871324429d7SHariprasad Shenai 	},
3872324429d7SHariprasad Shenai 	{
38737cea6d3eSArd Biesheuvel 		.type = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_SUB_TYPE_XTS,
3874324429d7SHariprasad Shenai 		.is_registered = 0,
38757cea6d3eSArd Biesheuvel 		.alg.skcipher = {
38767cea6d3eSArd Biesheuvel 			.base.cra_name		= "xts(aes)",
38777cea6d3eSArd Biesheuvel 			.base.cra_driver_name	= "xts-aes-chcr",
38787cea6d3eSArd Biesheuvel 			.base.cra_blocksize	= AES_BLOCK_SIZE,
38797cea6d3eSArd Biesheuvel 
38807cea6d3eSArd Biesheuvel 			.init			= chcr_init_tfm,
38817cea6d3eSArd Biesheuvel 			.exit			= chcr_exit_tfm,
3882324429d7SHariprasad Shenai 			.min_keysize		= 2 * AES_MIN_KEY_SIZE,
3883324429d7SHariprasad Shenai 			.max_keysize		= 2 * AES_MAX_KEY_SIZE,
3884324429d7SHariprasad Shenai 			.ivsize			= AES_BLOCK_SIZE,
3885324429d7SHariprasad Shenai 			.setkey			= chcr_aes_xts_setkey,
3886324429d7SHariprasad Shenai 			.encrypt		= chcr_aes_encrypt,
3887324429d7SHariprasad Shenai 			.decrypt		= chcr_aes_decrypt,
3888324429d7SHariprasad Shenai 			}
3889b8fd1f41SHarsh Jain 	},
3890b8fd1f41SHarsh Jain 	{
38917cea6d3eSArd Biesheuvel 		.type = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_SUB_TYPE_CTR,
3892b8fd1f41SHarsh Jain 		.is_registered = 0,
38937cea6d3eSArd Biesheuvel 		.alg.skcipher = {
38947cea6d3eSArd Biesheuvel 			.base.cra_name		= "ctr(aes)",
38957cea6d3eSArd Biesheuvel 			.base.cra_driver_name	= "ctr-aes-chcr",
38967cea6d3eSArd Biesheuvel 			.base.cra_blocksize	= 1,
38977cea6d3eSArd Biesheuvel 
38987cea6d3eSArd Biesheuvel 			.init			= chcr_init_tfm,
38997cea6d3eSArd Biesheuvel 			.exit			= chcr_exit_tfm,
3900b8fd1f41SHarsh Jain 			.min_keysize		= AES_MIN_KEY_SIZE,
3901b8fd1f41SHarsh Jain 			.max_keysize		= AES_MAX_KEY_SIZE,
3902b8fd1f41SHarsh Jain 			.ivsize			= AES_BLOCK_SIZE,
3903b8fd1f41SHarsh Jain 			.setkey			= chcr_aes_ctr_setkey,
3904b8fd1f41SHarsh Jain 			.encrypt		= chcr_aes_encrypt,
3905b8fd1f41SHarsh Jain 			.decrypt		= chcr_aes_decrypt,
3906b8fd1f41SHarsh Jain 		}
3907b8fd1f41SHarsh Jain 	},
3908b8fd1f41SHarsh Jain 	{
39097cea6d3eSArd Biesheuvel 		.type = CRYPTO_ALG_TYPE_SKCIPHER |
3910b8fd1f41SHarsh Jain 			CRYPTO_ALG_SUB_TYPE_CTR_RFC3686,
3911b8fd1f41SHarsh Jain 		.is_registered = 0,
39127cea6d3eSArd Biesheuvel 		.alg.skcipher = {
39137cea6d3eSArd Biesheuvel 			.base.cra_name		= "rfc3686(ctr(aes))",
39147cea6d3eSArd Biesheuvel 			.base.cra_driver_name	= "rfc3686-ctr-aes-chcr",
39157cea6d3eSArd Biesheuvel 			.base.cra_blocksize	= 1,
39167cea6d3eSArd Biesheuvel 
39177cea6d3eSArd Biesheuvel 			.init			= chcr_rfc3686_init,
39187cea6d3eSArd Biesheuvel 			.exit			= chcr_exit_tfm,
39197cea6d3eSArd Biesheuvel 			.min_keysize		= AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
39207cea6d3eSArd Biesheuvel 			.max_keysize		= AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3921b8fd1f41SHarsh Jain 			.ivsize			= CTR_RFC3686_IV_SIZE,
3922b8fd1f41SHarsh Jain 			.setkey			= chcr_aes_rfc3686_setkey,
3923b8fd1f41SHarsh Jain 			.encrypt		= chcr_aes_encrypt,
3924b8fd1f41SHarsh Jain 			.decrypt		= chcr_aes_decrypt,
3925b8fd1f41SHarsh Jain 		}
3926324429d7SHariprasad Shenai 	},
3927324429d7SHariprasad Shenai 	/* SHA */
3928324429d7SHariprasad Shenai 	{
3929324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_AHASH,
3930324429d7SHariprasad Shenai 		.is_registered = 0,
3931324429d7SHariprasad Shenai 		.alg.hash = {
3932324429d7SHariprasad Shenai 			.halg.digestsize = SHA1_DIGEST_SIZE,
3933324429d7SHariprasad Shenai 			.halg.base = {
3934324429d7SHariprasad Shenai 				.cra_name = "sha1",
3935324429d7SHariprasad Shenai 				.cra_driver_name = "sha1-chcr",
3936324429d7SHariprasad Shenai 				.cra_blocksize = SHA1_BLOCK_SIZE,
3937324429d7SHariprasad Shenai 			}
3938324429d7SHariprasad Shenai 		}
3939324429d7SHariprasad Shenai 	},
3940324429d7SHariprasad Shenai 	{
3941324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_AHASH,
3942324429d7SHariprasad Shenai 		.is_registered = 0,
3943324429d7SHariprasad Shenai 		.alg.hash = {
3944324429d7SHariprasad Shenai 			.halg.digestsize = SHA256_DIGEST_SIZE,
3945324429d7SHariprasad Shenai 			.halg.base = {
3946324429d7SHariprasad Shenai 				.cra_name = "sha256",
3947324429d7SHariprasad Shenai 				.cra_driver_name = "sha256-chcr",
3948324429d7SHariprasad Shenai 				.cra_blocksize = SHA256_BLOCK_SIZE,
3949324429d7SHariprasad Shenai 			}
3950324429d7SHariprasad Shenai 		}
3951324429d7SHariprasad Shenai 	},
3952324429d7SHariprasad Shenai 	{
3953324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_AHASH,
3954324429d7SHariprasad Shenai 		.is_registered = 0,
3955324429d7SHariprasad Shenai 		.alg.hash = {
3956324429d7SHariprasad Shenai 			.halg.digestsize = SHA224_DIGEST_SIZE,
3957324429d7SHariprasad Shenai 			.halg.base = {
3958324429d7SHariprasad Shenai 				.cra_name = "sha224",
3959324429d7SHariprasad Shenai 				.cra_driver_name = "sha224-chcr",
3960324429d7SHariprasad Shenai 				.cra_blocksize = SHA224_BLOCK_SIZE,
3961324429d7SHariprasad Shenai 			}
3962324429d7SHariprasad Shenai 		}
3963324429d7SHariprasad Shenai 	},
3964324429d7SHariprasad Shenai 	{
3965324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_AHASH,
3966324429d7SHariprasad Shenai 		.is_registered = 0,
3967324429d7SHariprasad Shenai 		.alg.hash = {
3968324429d7SHariprasad Shenai 			.halg.digestsize = SHA384_DIGEST_SIZE,
3969324429d7SHariprasad Shenai 			.halg.base = {
3970324429d7SHariprasad Shenai 				.cra_name = "sha384",
3971324429d7SHariprasad Shenai 				.cra_driver_name = "sha384-chcr",
3972324429d7SHariprasad Shenai 				.cra_blocksize = SHA384_BLOCK_SIZE,
3973324429d7SHariprasad Shenai 			}
3974324429d7SHariprasad Shenai 		}
3975324429d7SHariprasad Shenai 	},
3976324429d7SHariprasad Shenai 	{
3977324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_AHASH,
3978324429d7SHariprasad Shenai 		.is_registered = 0,
3979324429d7SHariprasad Shenai 		.alg.hash = {
3980324429d7SHariprasad Shenai 			.halg.digestsize = SHA512_DIGEST_SIZE,
3981324429d7SHariprasad Shenai 			.halg.base = {
3982324429d7SHariprasad Shenai 				.cra_name = "sha512",
3983324429d7SHariprasad Shenai 				.cra_driver_name = "sha512-chcr",
3984324429d7SHariprasad Shenai 				.cra_blocksize = SHA512_BLOCK_SIZE,
3985324429d7SHariprasad Shenai 			}
3986324429d7SHariprasad Shenai 		}
3987324429d7SHariprasad Shenai 	},
3988324429d7SHariprasad Shenai 	/* HMAC */
3989324429d7SHariprasad Shenai 	{
3990324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_HMAC,
3991324429d7SHariprasad Shenai 		.is_registered = 0,
3992324429d7SHariprasad Shenai 		.alg.hash = {
3993324429d7SHariprasad Shenai 			.halg.digestsize = SHA1_DIGEST_SIZE,
3994324429d7SHariprasad Shenai 			.halg.base = {
3995324429d7SHariprasad Shenai 				.cra_name = "hmac(sha1)",
39962debd332SHarsh Jain 				.cra_driver_name = "hmac-sha1-chcr",
3997324429d7SHariprasad Shenai 				.cra_blocksize = SHA1_BLOCK_SIZE,
3998324429d7SHariprasad Shenai 			}
3999324429d7SHariprasad Shenai 		}
4000324429d7SHariprasad Shenai 	},
4001324429d7SHariprasad Shenai 	{
4002324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_HMAC,
4003324429d7SHariprasad Shenai 		.is_registered = 0,
4004324429d7SHariprasad Shenai 		.alg.hash = {
4005324429d7SHariprasad Shenai 			.halg.digestsize = SHA224_DIGEST_SIZE,
4006324429d7SHariprasad Shenai 			.halg.base = {
4007324429d7SHariprasad Shenai 				.cra_name = "hmac(sha224)",
40082debd332SHarsh Jain 				.cra_driver_name = "hmac-sha224-chcr",
4009324429d7SHariprasad Shenai 				.cra_blocksize = SHA224_BLOCK_SIZE,
4010324429d7SHariprasad Shenai 			}
4011324429d7SHariprasad Shenai 		}
4012324429d7SHariprasad Shenai 	},
4013324429d7SHariprasad Shenai 	{
4014324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_HMAC,
4015324429d7SHariprasad Shenai 		.is_registered = 0,
4016324429d7SHariprasad Shenai 		.alg.hash = {
4017324429d7SHariprasad Shenai 			.halg.digestsize = SHA256_DIGEST_SIZE,
4018324429d7SHariprasad Shenai 			.halg.base = {
4019324429d7SHariprasad Shenai 				.cra_name = "hmac(sha256)",
40202debd332SHarsh Jain 				.cra_driver_name = "hmac-sha256-chcr",
4021324429d7SHariprasad Shenai 				.cra_blocksize = SHA256_BLOCK_SIZE,
4022324429d7SHariprasad Shenai 			}
4023324429d7SHariprasad Shenai 		}
4024324429d7SHariprasad Shenai 	},
4025324429d7SHariprasad Shenai 	{
4026324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_HMAC,
4027324429d7SHariprasad Shenai 		.is_registered = 0,
4028324429d7SHariprasad Shenai 		.alg.hash = {
4029324429d7SHariprasad Shenai 			.halg.digestsize = SHA384_DIGEST_SIZE,
4030324429d7SHariprasad Shenai 			.halg.base = {
4031324429d7SHariprasad Shenai 				.cra_name = "hmac(sha384)",
40322debd332SHarsh Jain 				.cra_driver_name = "hmac-sha384-chcr",
4033324429d7SHariprasad Shenai 				.cra_blocksize = SHA384_BLOCK_SIZE,
4034324429d7SHariprasad Shenai 			}
4035324429d7SHariprasad Shenai 		}
4036324429d7SHariprasad Shenai 	},
4037324429d7SHariprasad Shenai 	{
4038324429d7SHariprasad Shenai 		.type = CRYPTO_ALG_TYPE_HMAC,
4039324429d7SHariprasad Shenai 		.is_registered = 0,
4040324429d7SHariprasad Shenai 		.alg.hash = {
4041324429d7SHariprasad Shenai 			.halg.digestsize = SHA512_DIGEST_SIZE,
4042324429d7SHariprasad Shenai 			.halg.base = {
4043324429d7SHariprasad Shenai 				.cra_name = "hmac(sha512)",
40442debd332SHarsh Jain 				.cra_driver_name = "hmac-sha512-chcr",
4045324429d7SHariprasad Shenai 				.cra_blocksize = SHA512_BLOCK_SIZE,
4046324429d7SHariprasad Shenai 			}
4047324429d7SHariprasad Shenai 		}
4048324429d7SHariprasad Shenai 	},
40492debd332SHarsh Jain 	/* Add AEAD Algorithms */
40502debd332SHarsh Jain 	{
40512debd332SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_GCM,
40522debd332SHarsh Jain 		.is_registered = 0,
40532debd332SHarsh Jain 		.alg.aead = {
40542debd332SHarsh Jain 			.base = {
40552debd332SHarsh Jain 				.cra_name = "gcm(aes)",
40562debd332SHarsh Jain 				.cra_driver_name = "gcm-aes-chcr",
40572debd332SHarsh Jain 				.cra_blocksize	= 1,
4058e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
40592debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
40602debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
40612debd332SHarsh Jain 						sizeof(struct chcr_gcm_ctx),
40622debd332SHarsh Jain 			},
40638f6acb7fSCorentin LABBE 			.ivsize = GCM_AES_IV_SIZE,
40642debd332SHarsh Jain 			.maxauthsize = GHASH_DIGEST_SIZE,
40652debd332SHarsh Jain 			.setkey = chcr_gcm_setkey,
40662debd332SHarsh Jain 			.setauthsize = chcr_gcm_setauthsize,
40672debd332SHarsh Jain 		}
40682debd332SHarsh Jain 	},
40692debd332SHarsh Jain 	{
40702debd332SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106,
40712debd332SHarsh Jain 		.is_registered = 0,
40722debd332SHarsh Jain 		.alg.aead = {
40732debd332SHarsh Jain 			.base = {
40742debd332SHarsh Jain 				.cra_name = "rfc4106(gcm(aes))",
40752debd332SHarsh Jain 				.cra_driver_name = "rfc4106-gcm-aes-chcr",
40762debd332SHarsh Jain 				.cra_blocksize	 = 1,
4077e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY + 1,
40782debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
40792debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
40802debd332SHarsh Jain 						sizeof(struct chcr_gcm_ctx),
40812debd332SHarsh Jain 
40822debd332SHarsh Jain 			},
40838f6acb7fSCorentin LABBE 			.ivsize = GCM_RFC4106_IV_SIZE,
40842debd332SHarsh Jain 			.maxauthsize	= GHASH_DIGEST_SIZE,
40852debd332SHarsh Jain 			.setkey = chcr_gcm_setkey,
40862debd332SHarsh Jain 			.setauthsize	= chcr_4106_4309_setauthsize,
40872debd332SHarsh Jain 		}
40882debd332SHarsh Jain 	},
40892debd332SHarsh Jain 	{
40902debd332SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_CCM,
40912debd332SHarsh Jain 		.is_registered = 0,
40922debd332SHarsh Jain 		.alg.aead = {
40932debd332SHarsh Jain 			.base = {
40942debd332SHarsh Jain 				.cra_name = "ccm(aes)",
40952debd332SHarsh Jain 				.cra_driver_name = "ccm-aes-chcr",
40962debd332SHarsh Jain 				.cra_blocksize	 = 1,
4097e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
40982debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
40992debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx),
41002debd332SHarsh Jain 
41012debd332SHarsh Jain 			},
41022debd332SHarsh Jain 			.ivsize = AES_BLOCK_SIZE,
41032debd332SHarsh Jain 			.maxauthsize	= GHASH_DIGEST_SIZE,
41042debd332SHarsh Jain 			.setkey = chcr_aead_ccm_setkey,
41052debd332SHarsh Jain 			.setauthsize	= chcr_ccm_setauthsize,
41062debd332SHarsh Jain 		}
41072debd332SHarsh Jain 	},
41082debd332SHarsh Jain 	{
41092debd332SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309,
41102debd332SHarsh Jain 		.is_registered = 0,
41112debd332SHarsh Jain 		.alg.aead = {
41122debd332SHarsh Jain 			.base = {
41132debd332SHarsh Jain 				.cra_name = "rfc4309(ccm(aes))",
41142debd332SHarsh Jain 				.cra_driver_name = "rfc4309-ccm-aes-chcr",
41152debd332SHarsh Jain 				.cra_blocksize	 = 1,
4116e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY + 1,
41172debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
41182debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx),
41192debd332SHarsh Jain 
41202debd332SHarsh Jain 			},
41212debd332SHarsh Jain 			.ivsize = 8,
41222debd332SHarsh Jain 			.maxauthsize	= GHASH_DIGEST_SIZE,
41232debd332SHarsh Jain 			.setkey = chcr_aead_rfc4309_setkey,
41242debd332SHarsh Jain 			.setauthsize = chcr_4106_4309_setauthsize,
41252debd332SHarsh Jain 		}
41262debd332SHarsh Jain 	},
41272debd332SHarsh Jain 	{
41283d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
41292debd332SHarsh Jain 		.is_registered = 0,
41302debd332SHarsh Jain 		.alg.aead = {
41312debd332SHarsh Jain 			.base = {
41322debd332SHarsh Jain 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
41332debd332SHarsh Jain 				.cra_driver_name =
41342debd332SHarsh Jain 					"authenc-hmac-sha1-cbc-aes-chcr",
41352debd332SHarsh Jain 				.cra_blocksize	 = AES_BLOCK_SIZE,
4136e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
41372debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
41382debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
41392debd332SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
41402debd332SHarsh Jain 
41412debd332SHarsh Jain 			},
41422debd332SHarsh Jain 			.ivsize = AES_BLOCK_SIZE,
41432debd332SHarsh Jain 			.maxauthsize = SHA1_DIGEST_SIZE,
41442debd332SHarsh Jain 			.setkey = chcr_authenc_setkey,
41452debd332SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
41462debd332SHarsh Jain 		}
41472debd332SHarsh Jain 	},
41482debd332SHarsh Jain 	{
41493d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
41502debd332SHarsh Jain 		.is_registered = 0,
41512debd332SHarsh Jain 		.alg.aead = {
41522debd332SHarsh Jain 			.base = {
41532debd332SHarsh Jain 
41542debd332SHarsh Jain 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
41552debd332SHarsh Jain 				.cra_driver_name =
41562debd332SHarsh Jain 					"authenc-hmac-sha256-cbc-aes-chcr",
41572debd332SHarsh Jain 				.cra_blocksize	 = AES_BLOCK_SIZE,
4158e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
41592debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
41602debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
41612debd332SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
41622debd332SHarsh Jain 
41632debd332SHarsh Jain 			},
41642debd332SHarsh Jain 			.ivsize = AES_BLOCK_SIZE,
41652debd332SHarsh Jain 			.maxauthsize	= SHA256_DIGEST_SIZE,
41662debd332SHarsh Jain 			.setkey = chcr_authenc_setkey,
41672debd332SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
41682debd332SHarsh Jain 		}
41692debd332SHarsh Jain 	},
41702debd332SHarsh Jain 	{
41713d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
41722debd332SHarsh Jain 		.is_registered = 0,
41732debd332SHarsh Jain 		.alg.aead = {
41742debd332SHarsh Jain 			.base = {
41752debd332SHarsh Jain 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
41762debd332SHarsh Jain 				.cra_driver_name =
41772debd332SHarsh Jain 					"authenc-hmac-sha224-cbc-aes-chcr",
41782debd332SHarsh Jain 				.cra_blocksize	 = AES_BLOCK_SIZE,
4179e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
41802debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
41812debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
41822debd332SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
41832debd332SHarsh Jain 			},
41842debd332SHarsh Jain 			.ivsize = AES_BLOCK_SIZE,
41852debd332SHarsh Jain 			.maxauthsize = SHA224_DIGEST_SIZE,
41862debd332SHarsh Jain 			.setkey = chcr_authenc_setkey,
41872debd332SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
41882debd332SHarsh Jain 		}
41892debd332SHarsh Jain 	},
41902debd332SHarsh Jain 	{
41913d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
41922debd332SHarsh Jain 		.is_registered = 0,
41932debd332SHarsh Jain 		.alg.aead = {
41942debd332SHarsh Jain 			.base = {
41952debd332SHarsh Jain 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
41962debd332SHarsh Jain 				.cra_driver_name =
41972debd332SHarsh Jain 					"authenc-hmac-sha384-cbc-aes-chcr",
41982debd332SHarsh Jain 				.cra_blocksize	 = AES_BLOCK_SIZE,
4199e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
42002debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
42012debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
42022debd332SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
42032debd332SHarsh Jain 
42042debd332SHarsh Jain 			},
42052debd332SHarsh Jain 			.ivsize = AES_BLOCK_SIZE,
42062debd332SHarsh Jain 			.maxauthsize = SHA384_DIGEST_SIZE,
42072debd332SHarsh Jain 			.setkey = chcr_authenc_setkey,
42082debd332SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
42092debd332SHarsh Jain 		}
42102debd332SHarsh Jain 	},
42112debd332SHarsh Jain 	{
42123d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
42132debd332SHarsh Jain 		.is_registered = 0,
42142debd332SHarsh Jain 		.alg.aead = {
42152debd332SHarsh Jain 			.base = {
42162debd332SHarsh Jain 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
42172debd332SHarsh Jain 				.cra_driver_name =
42182debd332SHarsh Jain 					"authenc-hmac-sha512-cbc-aes-chcr",
42192debd332SHarsh Jain 				.cra_blocksize	 = AES_BLOCK_SIZE,
4220e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
42212debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
42222debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
42232debd332SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
42242debd332SHarsh Jain 
42252debd332SHarsh Jain 			},
42262debd332SHarsh Jain 			.ivsize = AES_BLOCK_SIZE,
42272debd332SHarsh Jain 			.maxauthsize = SHA512_DIGEST_SIZE,
42282debd332SHarsh Jain 			.setkey = chcr_authenc_setkey,
42292debd332SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
42302debd332SHarsh Jain 		}
42312debd332SHarsh Jain 	},
42322debd332SHarsh Jain 	{
42333d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_NULL,
42342debd332SHarsh Jain 		.is_registered = 0,
42352debd332SHarsh Jain 		.alg.aead = {
42362debd332SHarsh Jain 			.base = {
42372debd332SHarsh Jain 				.cra_name = "authenc(digest_null,cbc(aes))",
42382debd332SHarsh Jain 				.cra_driver_name =
42392debd332SHarsh Jain 					"authenc-digest_null-cbc-aes-chcr",
42402debd332SHarsh Jain 				.cra_blocksize	 = AES_BLOCK_SIZE,
4241e29abda5SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
42422debd332SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
42432debd332SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
42442debd332SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
42452debd332SHarsh Jain 
42462debd332SHarsh Jain 			},
42472debd332SHarsh Jain 			.ivsize  = AES_BLOCK_SIZE,
42482debd332SHarsh Jain 			.maxauthsize = 0,
42492debd332SHarsh Jain 			.setkey  = chcr_aead_digest_null_setkey,
42502debd332SHarsh Jain 			.setauthsize = chcr_authenc_null_setauthsize,
42512debd332SHarsh Jain 		}
42522debd332SHarsh Jain 	},
42533d64bd67SHarsh Jain 	{
42543d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
42553d64bd67SHarsh Jain 		.is_registered = 0,
42563d64bd67SHarsh Jain 		.alg.aead = {
42573d64bd67SHarsh Jain 			.base = {
42583d64bd67SHarsh Jain 				.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
42593d64bd67SHarsh Jain 				.cra_driver_name =
42603d64bd67SHarsh Jain 				"authenc-hmac-sha1-rfc3686-ctr-aes-chcr",
42613d64bd67SHarsh Jain 				.cra_blocksize	 = 1,
42623d64bd67SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
42633d64bd67SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
42643d64bd67SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
42653d64bd67SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
42663d64bd67SHarsh Jain 
42673d64bd67SHarsh Jain 			},
42683d64bd67SHarsh Jain 			.ivsize = CTR_RFC3686_IV_SIZE,
42693d64bd67SHarsh Jain 			.maxauthsize = SHA1_DIGEST_SIZE,
42703d64bd67SHarsh Jain 			.setkey = chcr_authenc_setkey,
42713d64bd67SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
42723d64bd67SHarsh Jain 		}
42733d64bd67SHarsh Jain 	},
42743d64bd67SHarsh Jain 	{
42753d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
42763d64bd67SHarsh Jain 		.is_registered = 0,
42773d64bd67SHarsh Jain 		.alg.aead = {
42783d64bd67SHarsh Jain 			.base = {
42793d64bd67SHarsh Jain 
42803d64bd67SHarsh Jain 				.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
42813d64bd67SHarsh Jain 				.cra_driver_name =
42823d64bd67SHarsh Jain 				"authenc-hmac-sha256-rfc3686-ctr-aes-chcr",
42833d64bd67SHarsh Jain 				.cra_blocksize	 = 1,
42843d64bd67SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
42853d64bd67SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
42863d64bd67SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
42873d64bd67SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
42883d64bd67SHarsh Jain 
42893d64bd67SHarsh Jain 			},
42903d64bd67SHarsh Jain 			.ivsize = CTR_RFC3686_IV_SIZE,
42913d64bd67SHarsh Jain 			.maxauthsize	= SHA256_DIGEST_SIZE,
42923d64bd67SHarsh Jain 			.setkey = chcr_authenc_setkey,
42933d64bd67SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
42943d64bd67SHarsh Jain 		}
42953d64bd67SHarsh Jain 	},
42963d64bd67SHarsh Jain 	{
42973d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
42983d64bd67SHarsh Jain 		.is_registered = 0,
42993d64bd67SHarsh Jain 		.alg.aead = {
43003d64bd67SHarsh Jain 			.base = {
43013d64bd67SHarsh Jain 				.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
43023d64bd67SHarsh Jain 				.cra_driver_name =
43033d64bd67SHarsh Jain 				"authenc-hmac-sha224-rfc3686-ctr-aes-chcr",
43043d64bd67SHarsh Jain 				.cra_blocksize	 = 1,
43053d64bd67SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
43063d64bd67SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
43073d64bd67SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
43083d64bd67SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
43093d64bd67SHarsh Jain 			},
43103d64bd67SHarsh Jain 			.ivsize = CTR_RFC3686_IV_SIZE,
43113d64bd67SHarsh Jain 			.maxauthsize = SHA224_DIGEST_SIZE,
43123d64bd67SHarsh Jain 			.setkey = chcr_authenc_setkey,
43133d64bd67SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
43143d64bd67SHarsh Jain 		}
43153d64bd67SHarsh Jain 	},
43163d64bd67SHarsh Jain 	{
43173d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
43183d64bd67SHarsh Jain 		.is_registered = 0,
43193d64bd67SHarsh Jain 		.alg.aead = {
43203d64bd67SHarsh Jain 			.base = {
43213d64bd67SHarsh Jain 				.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
43223d64bd67SHarsh Jain 				.cra_driver_name =
43233d64bd67SHarsh Jain 				"authenc-hmac-sha384-rfc3686-ctr-aes-chcr",
43243d64bd67SHarsh Jain 				.cra_blocksize	 = 1,
43253d64bd67SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
43263d64bd67SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
43273d64bd67SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
43283d64bd67SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
43293d64bd67SHarsh Jain 
43303d64bd67SHarsh Jain 			},
43313d64bd67SHarsh Jain 			.ivsize = CTR_RFC3686_IV_SIZE,
43323d64bd67SHarsh Jain 			.maxauthsize = SHA384_DIGEST_SIZE,
43333d64bd67SHarsh Jain 			.setkey = chcr_authenc_setkey,
43343d64bd67SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
43353d64bd67SHarsh Jain 		}
43363d64bd67SHarsh Jain 	},
43373d64bd67SHarsh Jain 	{
43383d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
43393d64bd67SHarsh Jain 		.is_registered = 0,
43403d64bd67SHarsh Jain 		.alg.aead = {
43413d64bd67SHarsh Jain 			.base = {
43423d64bd67SHarsh Jain 				.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
43433d64bd67SHarsh Jain 				.cra_driver_name =
43443d64bd67SHarsh Jain 				"authenc-hmac-sha512-rfc3686-ctr-aes-chcr",
43453d64bd67SHarsh Jain 				.cra_blocksize	 = 1,
43463d64bd67SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
43473d64bd67SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
43483d64bd67SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
43493d64bd67SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
43503d64bd67SHarsh Jain 
43513d64bd67SHarsh Jain 			},
43523d64bd67SHarsh Jain 			.ivsize = CTR_RFC3686_IV_SIZE,
43533d64bd67SHarsh Jain 			.maxauthsize = SHA512_DIGEST_SIZE,
43543d64bd67SHarsh Jain 			.setkey = chcr_authenc_setkey,
43553d64bd67SHarsh Jain 			.setauthsize = chcr_authenc_setauthsize,
43563d64bd67SHarsh Jain 		}
43573d64bd67SHarsh Jain 	},
43583d64bd67SHarsh Jain 	{
43593d64bd67SHarsh Jain 		.type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_NULL,
43603d64bd67SHarsh Jain 		.is_registered = 0,
43613d64bd67SHarsh Jain 		.alg.aead = {
43623d64bd67SHarsh Jain 			.base = {
43633d64bd67SHarsh Jain 				.cra_name = "authenc(digest_null,rfc3686(ctr(aes)))",
43643d64bd67SHarsh Jain 				.cra_driver_name =
43653d64bd67SHarsh Jain 				"authenc-digest_null-rfc3686-ctr-aes-chcr",
43663d64bd67SHarsh Jain 				.cra_blocksize	 = 1,
43673d64bd67SHarsh Jain 				.cra_priority = CHCR_AEAD_PRIORITY,
43683d64bd67SHarsh Jain 				.cra_ctxsize =	sizeof(struct chcr_context) +
43693d64bd67SHarsh Jain 						sizeof(struct chcr_aead_ctx) +
43703d64bd67SHarsh Jain 						sizeof(struct chcr_authenc_ctx),
43713d64bd67SHarsh Jain 
43723d64bd67SHarsh Jain 			},
43733d64bd67SHarsh Jain 			.ivsize  = CTR_RFC3686_IV_SIZE,
43743d64bd67SHarsh Jain 			.maxauthsize = 0,
43753d64bd67SHarsh Jain 			.setkey  = chcr_aead_digest_null_setkey,
43763d64bd67SHarsh Jain 			.setauthsize = chcr_authenc_null_setauthsize,
43773d64bd67SHarsh Jain 		}
43783d64bd67SHarsh Jain 	},
4379324429d7SHariprasad Shenai };
4380324429d7SHariprasad Shenai 
4381324429d7SHariprasad Shenai /*
4382324429d7SHariprasad Shenai  *	chcr_unregister_alg - Deregister crypto algorithms with
4383324429d7SHariprasad Shenai  *	kernel framework.
4384324429d7SHariprasad Shenai  */
chcr_unregister_alg(void)4385324429d7SHariprasad Shenai static int chcr_unregister_alg(void)
4386324429d7SHariprasad Shenai {
4387324429d7SHariprasad Shenai 	int i;
4388324429d7SHariprasad Shenai 
4389324429d7SHariprasad Shenai 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4390324429d7SHariprasad Shenai 		switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
43917cea6d3eSArd Biesheuvel 		case CRYPTO_ALG_TYPE_SKCIPHER:
43928b9914cdSAyush Sawal 			if (driver_algs[i].is_registered && refcount_read(
43938b9914cdSAyush Sawal 			    &driver_algs[i].alg.skcipher.base.cra_refcnt)
43948b9914cdSAyush Sawal 			    == 1) {
43957cea6d3eSArd Biesheuvel 				crypto_unregister_skcipher(
43967cea6d3eSArd Biesheuvel 						&driver_algs[i].alg.skcipher);
43978b9914cdSAyush Sawal 				driver_algs[i].is_registered = 0;
43988b9914cdSAyush Sawal 			}
4399324429d7SHariprasad Shenai 			break;
44002debd332SHarsh Jain 		case CRYPTO_ALG_TYPE_AEAD:
44018b9914cdSAyush Sawal 			if (driver_algs[i].is_registered && refcount_read(
44028b9914cdSAyush Sawal 			    &driver_algs[i].alg.aead.base.cra_refcnt) == 1) {
44032debd332SHarsh Jain 				crypto_unregister_aead(
44042debd332SHarsh Jain 						&driver_algs[i].alg.aead);
44058b9914cdSAyush Sawal 				driver_algs[i].is_registered = 0;
44068b9914cdSAyush Sawal 			}
44072debd332SHarsh Jain 			break;
4408324429d7SHariprasad Shenai 		case CRYPTO_ALG_TYPE_AHASH:
44098b9914cdSAyush Sawal 			if (driver_algs[i].is_registered && refcount_read(
44108b9914cdSAyush Sawal 			    &driver_algs[i].alg.hash.halg.base.cra_refcnt)
44118b9914cdSAyush Sawal 			    == 1) {
4412324429d7SHariprasad Shenai 				crypto_unregister_ahash(
4413324429d7SHariprasad Shenai 						&driver_algs[i].alg.hash);
44148b9914cdSAyush Sawal 				driver_algs[i].is_registered = 0;
44158b9914cdSAyush Sawal 			}
4416324429d7SHariprasad Shenai 			break;
4417324429d7SHariprasad Shenai 		}
4418324429d7SHariprasad Shenai 	}
4419324429d7SHariprasad Shenai 	return 0;
4420324429d7SHariprasad Shenai }
4421324429d7SHariprasad Shenai 
4422324429d7SHariprasad Shenai #define SZ_AHASH_CTX sizeof(struct chcr_context)
4423324429d7SHariprasad Shenai #define SZ_AHASH_H_CTX (sizeof(struct chcr_context) + sizeof(struct hmac_ctx))
4424324429d7SHariprasad Shenai #define SZ_AHASH_REQ_CTX sizeof(struct chcr_ahash_req_ctx)
4425324429d7SHariprasad Shenai 
4426324429d7SHariprasad Shenai /*
4427324429d7SHariprasad Shenai  *	chcr_register_alg - Register crypto algorithms with kernel framework.
4428324429d7SHariprasad Shenai  */
chcr_register_alg(void)4429324429d7SHariprasad Shenai static int chcr_register_alg(void)
4430324429d7SHariprasad Shenai {
4431324429d7SHariprasad Shenai 	struct crypto_alg ai;
4432324429d7SHariprasad Shenai 	struct ahash_alg *a_hash;
4433324429d7SHariprasad Shenai 	int err = 0, i;
4434324429d7SHariprasad Shenai 	char *name = NULL;
4435324429d7SHariprasad Shenai 
4436324429d7SHariprasad Shenai 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4437324429d7SHariprasad Shenai 		if (driver_algs[i].is_registered)
4438324429d7SHariprasad Shenai 			continue;
4439324429d7SHariprasad Shenai 		switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
44407cea6d3eSArd Biesheuvel 		case CRYPTO_ALG_TYPE_SKCIPHER:
44417cea6d3eSArd Biesheuvel 			driver_algs[i].alg.skcipher.base.cra_priority =
4442b8fd1f41SHarsh Jain 				CHCR_CRA_PRIORITY;
44437cea6d3eSArd Biesheuvel 			driver_algs[i].alg.skcipher.base.cra_module = THIS_MODULE;
44447cea6d3eSArd Biesheuvel 			driver_algs[i].alg.skcipher.base.cra_flags =
44457cea6d3eSArd Biesheuvel 				CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
4446b8aa7dc5SMikulas Patocka 				CRYPTO_ALG_ALLOCATES_MEMORY |
4447b8fd1f41SHarsh Jain 				CRYPTO_ALG_NEED_FALLBACK;
44487cea6d3eSArd Biesheuvel 			driver_algs[i].alg.skcipher.base.cra_ctxsize =
4449b8fd1f41SHarsh Jain 				sizeof(struct chcr_context) +
4450b8fd1f41SHarsh Jain 				sizeof(struct ablk_ctx);
44517cea6d3eSArd Biesheuvel 			driver_algs[i].alg.skcipher.base.cra_alignmask = 0;
44527cea6d3eSArd Biesheuvel 
44537cea6d3eSArd Biesheuvel 			err = crypto_register_skcipher(&driver_algs[i].alg.skcipher);
44547cea6d3eSArd Biesheuvel 			name = driver_algs[i].alg.skcipher.base.cra_driver_name;
4455324429d7SHariprasad Shenai 			break;
44562debd332SHarsh Jain 		case CRYPTO_ALG_TYPE_AEAD:
44572debd332SHarsh Jain 			driver_algs[i].alg.aead.base.cra_flags =
4458b8aa7dc5SMikulas Patocka 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |
4459b8aa7dc5SMikulas Patocka 				CRYPTO_ALG_ALLOCATES_MEMORY;
44602debd332SHarsh Jain 			driver_algs[i].alg.aead.encrypt = chcr_aead_encrypt;
44612debd332SHarsh Jain 			driver_algs[i].alg.aead.decrypt = chcr_aead_decrypt;
44622debd332SHarsh Jain 			driver_algs[i].alg.aead.init = chcr_aead_cra_init;
44632debd332SHarsh Jain 			driver_algs[i].alg.aead.exit = chcr_aead_cra_exit;
44642debd332SHarsh Jain 			driver_algs[i].alg.aead.base.cra_module = THIS_MODULE;
44652debd332SHarsh Jain 			err = crypto_register_aead(&driver_algs[i].alg.aead);
44662debd332SHarsh Jain 			name = driver_algs[i].alg.aead.base.cra_driver_name;
44672debd332SHarsh Jain 			break;
4468324429d7SHariprasad Shenai 		case CRYPTO_ALG_TYPE_AHASH:
4469324429d7SHariprasad Shenai 			a_hash = &driver_algs[i].alg.hash;
4470324429d7SHariprasad Shenai 			a_hash->update = chcr_ahash_update;
4471324429d7SHariprasad Shenai 			a_hash->final = chcr_ahash_final;
4472324429d7SHariprasad Shenai 			a_hash->finup = chcr_ahash_finup;
4473324429d7SHariprasad Shenai 			a_hash->digest = chcr_ahash_digest;
4474324429d7SHariprasad Shenai 			a_hash->export = chcr_ahash_export;
4475324429d7SHariprasad Shenai 			a_hash->import = chcr_ahash_import;
4476324429d7SHariprasad Shenai 			a_hash->halg.statesize = SZ_AHASH_REQ_CTX;
4477324429d7SHariprasad Shenai 			a_hash->halg.base.cra_priority = CHCR_CRA_PRIORITY;
4478324429d7SHariprasad Shenai 			a_hash->halg.base.cra_module = THIS_MODULE;
4479b8aa7dc5SMikulas Patocka 			a_hash->halg.base.cra_flags =
4480b8aa7dc5SMikulas Patocka 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY;
4481324429d7SHariprasad Shenai 			a_hash->halg.base.cra_alignmask = 0;
4482324429d7SHariprasad Shenai 			a_hash->halg.base.cra_exit = NULL;
4483324429d7SHariprasad Shenai 
4484324429d7SHariprasad Shenai 			if (driver_algs[i].type == CRYPTO_ALG_TYPE_HMAC) {
4485324429d7SHariprasad Shenai 				a_hash->halg.base.cra_init = chcr_hmac_cra_init;
4486324429d7SHariprasad Shenai 				a_hash->halg.base.cra_exit = chcr_hmac_cra_exit;
4487324429d7SHariprasad Shenai 				a_hash->init = chcr_hmac_init;
4488324429d7SHariprasad Shenai 				a_hash->setkey = chcr_ahash_setkey;
4489324429d7SHariprasad Shenai 				a_hash->halg.base.cra_ctxsize = SZ_AHASH_H_CTX;
4490324429d7SHariprasad Shenai 			} else {
4491324429d7SHariprasad Shenai 				a_hash->init = chcr_sha_init;
4492324429d7SHariprasad Shenai 				a_hash->halg.base.cra_ctxsize = SZ_AHASH_CTX;
4493324429d7SHariprasad Shenai 				a_hash->halg.base.cra_init = chcr_sha_cra_init;
4494324429d7SHariprasad Shenai 			}
4495324429d7SHariprasad Shenai 			err = crypto_register_ahash(&driver_algs[i].alg.hash);
4496324429d7SHariprasad Shenai 			ai = driver_algs[i].alg.hash.halg.base;
4497324429d7SHariprasad Shenai 			name = ai.cra_driver_name;
4498324429d7SHariprasad Shenai 			break;
4499324429d7SHariprasad Shenai 		}
4500324429d7SHariprasad Shenai 		if (err) {
45011b3eeb87SChristophe JAILLET 			pr_err("%s : Algorithm registration failed\n", name);
4502324429d7SHariprasad Shenai 			goto register_err;
4503324429d7SHariprasad Shenai 		} else {
4504324429d7SHariprasad Shenai 			driver_algs[i].is_registered = 1;
4505324429d7SHariprasad Shenai 		}
4506324429d7SHariprasad Shenai 	}
4507324429d7SHariprasad Shenai 	return 0;
4508324429d7SHariprasad Shenai 
4509324429d7SHariprasad Shenai register_err:
4510324429d7SHariprasad Shenai 	chcr_unregister_alg();
4511324429d7SHariprasad Shenai 	return err;
4512324429d7SHariprasad Shenai }
4513324429d7SHariprasad Shenai 
4514324429d7SHariprasad Shenai /*
4515324429d7SHariprasad Shenai  *	start_crypto - Register the crypto algorithms.
4516324429d7SHariprasad Shenai  *	This should called once when the first device comesup. After this
4517324429d7SHariprasad Shenai  *	kernel will start calling driver APIs for crypto operations.
4518324429d7SHariprasad Shenai  */
start_crypto(void)4519324429d7SHariprasad Shenai int start_crypto(void)
4520324429d7SHariprasad Shenai {
4521324429d7SHariprasad Shenai 	return chcr_register_alg();
4522324429d7SHariprasad Shenai }
4523324429d7SHariprasad Shenai 
4524324429d7SHariprasad Shenai /*
4525324429d7SHariprasad Shenai  *	stop_crypto - Deregister all the crypto algorithms with kernel.
4526324429d7SHariprasad Shenai  *	This should be called once when the last device goes down. After this
4527324429d7SHariprasad Shenai  *	kernel will not call the driver API for crypto operations.
4528324429d7SHariprasad Shenai  */
stop_crypto(void)4529324429d7SHariprasad Shenai int stop_crypto(void)
4530324429d7SHariprasad Shenai {
4531324429d7SHariprasad Shenai 	chcr_unregister_alg();
4532324429d7SHariprasad Shenai 	return 0;
4533324429d7SHariprasad Shenai }
4534