xref: /openbmc/linux/drivers/crypto/amcc/crypto4xx_alg.c (revision 8be98d2f2a0a262f8bf8a0bc1fdf522b3c7aab17)
1  // SPDX-License-Identifier: GPL-2.0-or-later
2  /*
3   * AMCC SoC PPC4xx Crypto Driver
4   *
5   * Copyright (c) 2008 Applied Micro Circuits Corporation.
6   * All rights reserved. James Hsiao <jhsiao@amcc.com>
7   *
8   * This file implements the Linux crypto algorithms.
9   */
10  
11  #include <linux/kernel.h>
12  #include <linux/interrupt.h>
13  #include <linux/spinlock_types.h>
14  #include <linux/scatterlist.h>
15  #include <linux/crypto.h>
16  #include <linux/hash.h>
17  #include <crypto/internal/hash.h>
18  #include <linux/dma-mapping.h>
19  #include <crypto/algapi.h>
20  #include <crypto/aead.h>
21  #include <crypto/aes.h>
22  #include <crypto/gcm.h>
23  #include <crypto/sha1.h>
24  #include <crypto/ctr.h>
25  #include <crypto/skcipher.h>
26  #include "crypto4xx_reg_def.h"
27  #include "crypto4xx_core.h"
28  #include "crypto4xx_sa.h"
29  
set_dynamic_sa_command_0(struct dynamic_sa_ctl * sa,u32 save_h,u32 save_iv,u32 ld_h,u32 ld_iv,u32 hdr_proc,u32 h,u32 c,u32 pad_type,u32 op_grp,u32 op,u32 dir)30  static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
31  				     u32 save_iv, u32 ld_h, u32 ld_iv,
32  				     u32 hdr_proc, u32 h, u32 c, u32 pad_type,
33  				     u32 op_grp, u32 op, u32 dir)
34  {
35  	sa->sa_command_0.w = 0;
36  	sa->sa_command_0.bf.save_hash_state = save_h;
37  	sa->sa_command_0.bf.save_iv = save_iv;
38  	sa->sa_command_0.bf.load_hash_state = ld_h;
39  	sa->sa_command_0.bf.load_iv = ld_iv;
40  	sa->sa_command_0.bf.hdr_proc = hdr_proc;
41  	sa->sa_command_0.bf.hash_alg = h;
42  	sa->sa_command_0.bf.cipher_alg = c;
43  	sa->sa_command_0.bf.pad_type = pad_type & 3;
44  	sa->sa_command_0.bf.extend_pad = pad_type >> 2;
45  	sa->sa_command_0.bf.op_group = op_grp;
46  	sa->sa_command_0.bf.opcode = op;
47  	sa->sa_command_0.bf.dir = dir;
48  }
49  
set_dynamic_sa_command_1(struct dynamic_sa_ctl * sa,u32 cm,u32 hmac_mc,u32 cfb,u32 esn,u32 sn_mask,u32 mute,u32 cp_pad,u32 cp_pay,u32 cp_hdr)50  static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
51  				     u32 hmac_mc, u32 cfb, u32 esn,
52  				     u32 sn_mask, u32 mute, u32 cp_pad,
53  				     u32 cp_pay, u32 cp_hdr)
54  {
55  	sa->sa_command_1.w = 0;
56  	sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
57  	sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
58  	sa->sa_command_1.bf.feedback_mode = cfb;
59  	sa->sa_command_1.bf.sa_rev = 1;
60  	sa->sa_command_1.bf.hmac_muting = hmac_mc;
61  	sa->sa_command_1.bf.extended_seq_num = esn;
62  	sa->sa_command_1.bf.seq_num_mask = sn_mask;
63  	sa->sa_command_1.bf.mutable_bit_proc = mute;
64  	sa->sa_command_1.bf.copy_pad = cp_pad;
65  	sa->sa_command_1.bf.copy_payload = cp_pay;
66  	sa->sa_command_1.bf.copy_hdr = cp_hdr;
67  }
68  
crypto4xx_crypt(struct skcipher_request * req,const unsigned int ivlen,bool decrypt,bool check_blocksize)69  static inline int crypto4xx_crypt(struct skcipher_request *req,
70  				  const unsigned int ivlen, bool decrypt,
71  				  bool check_blocksize)
72  {
73  	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
74  	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
75  	__le32 iv[AES_IV_SIZE];
76  
77  	if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
78  		return -EINVAL;
79  
80  	if (ivlen)
81  		crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
82  
83  	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
84  		req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
85  		ctx->sa_len, 0, NULL);
86  }
87  
crypto4xx_encrypt_noiv_block(struct skcipher_request * req)88  int crypto4xx_encrypt_noiv_block(struct skcipher_request *req)
89  {
90  	return crypto4xx_crypt(req, 0, false, true);
91  }
92  
crypto4xx_encrypt_iv_stream(struct skcipher_request * req)93  int crypto4xx_encrypt_iv_stream(struct skcipher_request *req)
94  {
95  	return crypto4xx_crypt(req, AES_IV_SIZE, false, false);
96  }
97  
crypto4xx_decrypt_noiv_block(struct skcipher_request * req)98  int crypto4xx_decrypt_noiv_block(struct skcipher_request *req)
99  {
100  	return crypto4xx_crypt(req, 0, true, true);
101  }
102  
crypto4xx_decrypt_iv_stream(struct skcipher_request * req)103  int crypto4xx_decrypt_iv_stream(struct skcipher_request *req)
104  {
105  	return crypto4xx_crypt(req, AES_IV_SIZE, true, false);
106  }
107  
crypto4xx_encrypt_iv_block(struct skcipher_request * req)108  int crypto4xx_encrypt_iv_block(struct skcipher_request *req)
109  {
110  	return crypto4xx_crypt(req, AES_IV_SIZE, false, true);
111  }
112  
crypto4xx_decrypt_iv_block(struct skcipher_request * req)113  int crypto4xx_decrypt_iv_block(struct skcipher_request *req)
114  {
115  	return crypto4xx_crypt(req, AES_IV_SIZE, true, true);
116  }
117  
118  /*
119   * AES Functions
120   */
crypto4xx_setkey_aes(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen,unsigned char cm,u8 fb)121  static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
122  				const u8 *key,
123  				unsigned int keylen,
124  				unsigned char cm,
125  				u8 fb)
126  {
127  	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
128  	struct dynamic_sa_ctl *sa;
129  	int    rc;
130  
131  	if (keylen != AES_KEYSIZE_256 && keylen != AES_KEYSIZE_192 &&
132  	    keylen != AES_KEYSIZE_128)
133  		return -EINVAL;
134  
135  	/* Create SA */
136  	if (ctx->sa_in || ctx->sa_out)
137  		crypto4xx_free_sa(ctx);
138  
139  	rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
140  	if (rc)
141  		return rc;
142  
143  	/* Setup SA */
144  	sa = ctx->sa_in;
145  
146  	set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
147  				 SA_NOT_SAVE_IV : SA_SAVE_IV),
148  				 SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
149  				 SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
150  				 SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
151  				 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
152  				 SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
153  				 DIR_INBOUND);
154  
155  	set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
156  				 fb, SA_EXTENDED_SN_OFF,
157  				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
158  				 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
159  				 SA_NOT_COPY_HDR);
160  	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
161  				 key, keylen);
162  	sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
163  	sa->sa_command_1.bf.key_len = keylen >> 3;
164  
165  	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
166  	sa = ctx->sa_out;
167  	sa->sa_command_0.bf.dir = DIR_OUTBOUND;
168  	/*
169  	 * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
170  	 * it's the DIR_(IN|OUT)BOUND that matters
171  	 */
172  	sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
173  
174  	return 0;
175  }
176  
crypto4xx_setkey_aes_cbc(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)177  int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
178  			     const u8 *key, unsigned int keylen)
179  {
180  	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
181  				    CRYPTO_FEEDBACK_MODE_NO_FB);
182  }
183  
crypto4xx_setkey_aes_cfb(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)184  int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
185  			     const u8 *key, unsigned int keylen)
186  {
187  	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
188  				    CRYPTO_FEEDBACK_MODE_128BIT_CFB);
189  }
190  
crypto4xx_setkey_aes_ecb(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)191  int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
192  			     const u8 *key, unsigned int keylen)
193  {
194  	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
195  				    CRYPTO_FEEDBACK_MODE_NO_FB);
196  }
197  
crypto4xx_setkey_aes_ofb(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)198  int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
199  			     const u8 *key, unsigned int keylen)
200  {
201  	return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
202  				    CRYPTO_FEEDBACK_MODE_64BIT_OFB);
203  }
204  
crypto4xx_setkey_rfc3686(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)205  int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
206  			     const u8 *key, unsigned int keylen)
207  {
208  	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
209  	int rc;
210  
211  	rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
212  		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
213  	if (rc)
214  		return rc;
215  
216  	ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
217  						 CTR_RFC3686_NONCE_SIZE]);
218  
219  	return 0;
220  }
221  
crypto4xx_rfc3686_encrypt(struct skcipher_request * req)222  int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
223  {
224  	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
225  	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
226  	__le32 iv[AES_IV_SIZE / 4] = {
227  		ctx->iv_nonce,
228  		cpu_to_le32p((u32 *) req->iv),
229  		cpu_to_le32p((u32 *) (req->iv + 4)),
230  		cpu_to_le32(1) };
231  
232  	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
233  				  req->cryptlen, iv, AES_IV_SIZE,
234  				  ctx->sa_out, ctx->sa_len, 0, NULL);
235  }
236  
crypto4xx_rfc3686_decrypt(struct skcipher_request * req)237  int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
238  {
239  	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
240  	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
241  	__le32 iv[AES_IV_SIZE / 4] = {
242  		ctx->iv_nonce,
243  		cpu_to_le32p((u32 *) req->iv),
244  		cpu_to_le32p((u32 *) (req->iv + 4)),
245  		cpu_to_le32(1) };
246  
247  	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
248  				  req->cryptlen, iv, AES_IV_SIZE,
249  				  ctx->sa_out, ctx->sa_len, 0, NULL);
250  }
251  
252  static int
crypto4xx_ctr_crypt(struct skcipher_request * req,bool encrypt)253  crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
254  {
255  	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
256  	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
257  	size_t iv_len = crypto_skcipher_ivsize(cipher);
258  	unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
259  	unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
260  			AES_BLOCK_SIZE;
261  
262  	/*
263  	 * The hardware uses only the last 32-bits as the counter while the
264  	 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
265  	 * the whole IV is a counter.  So fallback if the counter is going to
266  	 * overlow.
267  	 */
268  	if (counter + nblks < counter) {
269  		SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
270  		int ret;
271  
272  		skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
273  		skcipher_request_set_callback(subreq, req->base.flags,
274  			NULL, NULL);
275  		skcipher_request_set_crypt(subreq, req->src, req->dst,
276  			req->cryptlen, req->iv);
277  		ret = encrypt ? crypto_skcipher_encrypt(subreq)
278  			: crypto_skcipher_decrypt(subreq);
279  		skcipher_request_zero(subreq);
280  		return ret;
281  	}
282  
283  	return encrypt ? crypto4xx_encrypt_iv_stream(req)
284  		       : crypto4xx_decrypt_iv_stream(req);
285  }
286  
crypto4xx_sk_setup_fallback(struct crypto4xx_ctx * ctx,struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)287  static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
288  				       struct crypto_skcipher *cipher,
289  				       const u8 *key,
290  				       unsigned int keylen)
291  {
292  	crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
293  				    CRYPTO_TFM_REQ_MASK);
294  	crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
295  		crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
296  	return crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
297  }
298  
crypto4xx_setkey_aes_ctr(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)299  int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
300  			     const u8 *key, unsigned int keylen)
301  {
302  	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
303  	int rc;
304  
305  	rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
306  	if (rc)
307  		return rc;
308  
309  	return crypto4xx_setkey_aes(cipher, key, keylen,
310  		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
311  }
312  
crypto4xx_encrypt_ctr(struct skcipher_request * req)313  int crypto4xx_encrypt_ctr(struct skcipher_request *req)
314  {
315  	return crypto4xx_ctr_crypt(req, true);
316  }
317  
crypto4xx_decrypt_ctr(struct skcipher_request * req)318  int crypto4xx_decrypt_ctr(struct skcipher_request *req)
319  {
320  	return crypto4xx_ctr_crypt(req, false);
321  }
322  
crypto4xx_aead_need_fallback(struct aead_request * req,unsigned int len,bool is_ccm,bool decrypt)323  static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
324  						unsigned int len,
325  						bool is_ccm, bool decrypt)
326  {
327  	struct crypto_aead *aead = crypto_aead_reqtfm(req);
328  
329  	/* authsize has to be a multiple of 4 */
330  	if (aead->authsize & 3)
331  		return true;
332  
333  	/*
334  	 * hardware does not handle cases where plaintext
335  	 * is less than a block.
336  	 */
337  	if (len < AES_BLOCK_SIZE)
338  		return true;
339  
340  	/* assoc len needs to be a multiple of 4 and <= 1020 */
341  	if (req->assoclen & 0x3 || req->assoclen > 1020)
342  		return true;
343  
344  	/* CCM supports only counter field length of 2 and 4 bytes */
345  	if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
346  		return true;
347  
348  	return false;
349  }
350  
crypto4xx_aead_fallback(struct aead_request * req,struct crypto4xx_ctx * ctx,bool do_decrypt)351  static int crypto4xx_aead_fallback(struct aead_request *req,
352  	struct crypto4xx_ctx *ctx, bool do_decrypt)
353  {
354  	struct aead_request *subreq = aead_request_ctx(req);
355  
356  	aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
357  	aead_request_set_callback(subreq, req->base.flags,
358  				  req->base.complete, req->base.data);
359  	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
360  			       req->iv);
361  	aead_request_set_ad(subreq, req->assoclen);
362  	return do_decrypt ? crypto_aead_decrypt(subreq) :
363  			    crypto_aead_encrypt(subreq);
364  }
365  
crypto4xx_aead_setup_fallback(struct crypto4xx_ctx * ctx,struct crypto_aead * cipher,const u8 * key,unsigned int keylen)366  static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
367  					 struct crypto_aead *cipher,
368  					 const u8 *key,
369  					 unsigned int keylen)
370  {
371  	crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
372  	crypto_aead_set_flags(ctx->sw_cipher.aead,
373  		crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
374  	return crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
375  }
376  
377  /*
378   * AES-CCM Functions
379   */
380  
crypto4xx_setkey_aes_ccm(struct crypto_aead * cipher,const u8 * key,unsigned int keylen)381  int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
382  			     unsigned int keylen)
383  {
384  	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
385  	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
386  	struct dynamic_sa_ctl *sa;
387  	int rc = 0;
388  
389  	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
390  	if (rc)
391  		return rc;
392  
393  	if (ctx->sa_in || ctx->sa_out)
394  		crypto4xx_free_sa(ctx);
395  
396  	rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
397  	if (rc)
398  		return rc;
399  
400  	/* Setup SA */
401  	sa = (struct dynamic_sa_ctl *) ctx->sa_in;
402  	sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
403  
404  	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
405  				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
406  				 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
407  				 SA_CIPHER_ALG_AES,
408  				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
409  				 SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
410  
411  	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
412  				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
413  				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
414  				 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
415  				 SA_NOT_COPY_HDR);
416  
417  	sa->sa_command_1.bf.key_len = keylen >> 3;
418  
419  	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
420  
421  	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
422  	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
423  
424  	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
425  				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
426  				 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
427  				 SA_CIPHER_ALG_AES,
428  				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
429  				 SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
430  
431  	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
432  				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
433  				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
434  				 SA_COPY_PAD, SA_COPY_PAYLOAD,
435  				 SA_NOT_COPY_HDR);
436  
437  	sa->sa_command_1.bf.key_len = keylen >> 3;
438  	return 0;
439  }
440  
crypto4xx_crypt_aes_ccm(struct aead_request * req,bool decrypt)441  static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
442  {
443  	struct crypto4xx_ctx *ctx  = crypto_tfm_ctx(req->base.tfm);
444  	struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
445  	struct crypto_aead *aead = crypto_aead_reqtfm(req);
446  	__le32 iv[16];
447  	u32 tmp_sa[SA_AES128_CCM_LEN + 4];
448  	struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
449  	unsigned int len = req->cryptlen;
450  
451  	if (decrypt)
452  		len -= crypto_aead_authsize(aead);
453  
454  	if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
455  		return crypto4xx_aead_fallback(req, ctx, decrypt);
456  
457  	memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
458  	sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
459  
460  	if (req->iv[0] == 1) {
461  		/* CRYPTO_MODE_AES_ICM */
462  		sa->sa_command_1.bf.crypto_mode9_8 = 1;
463  	}
464  
465  	iv[3] = cpu_to_le32(0);
466  	crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
467  
468  	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
469  				  len, iv, sizeof(iv),
470  				  sa, ctx->sa_len, req->assoclen, rctx->dst);
471  }
472  
crypto4xx_encrypt_aes_ccm(struct aead_request * req)473  int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
474  {
475  	return crypto4xx_crypt_aes_ccm(req, false);
476  }
477  
crypto4xx_decrypt_aes_ccm(struct aead_request * req)478  int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
479  {
480  	return crypto4xx_crypt_aes_ccm(req, true);
481  }
482  
crypto4xx_setauthsize_aead(struct crypto_aead * cipher,unsigned int authsize)483  int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
484  			       unsigned int authsize)
485  {
486  	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
487  	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
488  
489  	return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
490  }
491  
492  /*
493   * AES-GCM Functions
494   */
495  
crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)496  static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
497  {
498  	switch (keylen) {
499  	case 16:
500  	case 24:
501  	case 32:
502  		return 0;
503  	default:
504  		return -EINVAL;
505  	}
506  }
507  
crypto4xx_compute_gcm_hash_key_sw(__le32 * hash_start,const u8 * key,unsigned int keylen)508  static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
509  					     unsigned int keylen)
510  {
511  	struct crypto_aes_ctx ctx;
512  	uint8_t src[16] = { 0 };
513  	int rc;
514  
515  	rc = aes_expandkey(&ctx, key, keylen);
516  	if (rc) {
517  		pr_err("aes_expandkey() failed: %d\n", rc);
518  		return rc;
519  	}
520  
521  	aes_encrypt(&ctx, src, src);
522  	crypto4xx_memcpy_to_le32(hash_start, src, 16);
523  	memzero_explicit(&ctx, sizeof(ctx));
524  	return 0;
525  }
526  
crypto4xx_setkey_aes_gcm(struct crypto_aead * cipher,const u8 * key,unsigned int keylen)527  int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
528  			     const u8 *key, unsigned int keylen)
529  {
530  	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
531  	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
532  	struct dynamic_sa_ctl *sa;
533  	int    rc = 0;
534  
535  	if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0)
536  		return -EINVAL;
537  
538  	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
539  	if (rc)
540  		return rc;
541  
542  	if (ctx->sa_in || ctx->sa_out)
543  		crypto4xx_free_sa(ctx);
544  
545  	rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
546  	if (rc)
547  		return rc;
548  
549  	sa  = (struct dynamic_sa_ctl *) ctx->sa_in;
550  
551  	sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
552  	set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
553  				 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
554  				 SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
555  				 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
556  				 SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
557  				 DIR_INBOUND);
558  	set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
559  				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
560  				 SA_SEQ_MASK_ON, SA_MC_DISABLE,
561  				 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
562  				 SA_NOT_COPY_HDR);
563  
564  	sa->sa_command_1.bf.key_len = keylen >> 3;
565  
566  	crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
567  				 key, keylen);
568  
569  	rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
570  		key, keylen);
571  	if (rc) {
572  		pr_err("GCM hash key setting failed = %d\n", rc);
573  		goto err;
574  	}
575  
576  	memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
577  	sa = (struct dynamic_sa_ctl *) ctx->sa_out;
578  	sa->sa_command_0.bf.dir = DIR_OUTBOUND;
579  	sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
580  
581  	return 0;
582  err:
583  	crypto4xx_free_sa(ctx);
584  	return rc;
585  }
586  
crypto4xx_crypt_aes_gcm(struct aead_request * req,bool decrypt)587  static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
588  					  bool decrypt)
589  {
590  	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
591  	struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
592  	__le32 iv[4];
593  	unsigned int len = req->cryptlen;
594  
595  	if (decrypt)
596  		len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
597  
598  	if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
599  		return crypto4xx_aead_fallback(req, ctx, decrypt);
600  
601  	crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
602  	iv[3] = cpu_to_le32(1);
603  
604  	return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
605  				  len, iv, sizeof(iv),
606  				  decrypt ? ctx->sa_in : ctx->sa_out,
607  				  ctx->sa_len, req->assoclen, rctx->dst);
608  }
609  
crypto4xx_encrypt_aes_gcm(struct aead_request * req)610  int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
611  {
612  	return crypto4xx_crypt_aes_gcm(req, false);
613  }
614  
crypto4xx_decrypt_aes_gcm(struct aead_request * req)615  int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
616  {
617  	return crypto4xx_crypt_aes_gcm(req, true);
618  }
619  
620  /*
621   * HASH SHA1 Functions
622   */
crypto4xx_hash_alg_init(struct crypto_tfm * tfm,unsigned int sa_len,unsigned char ha,unsigned char hm)623  static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
624  				   unsigned int sa_len,
625  				   unsigned char ha,
626  				   unsigned char hm)
627  {
628  	struct crypto_alg *alg = tfm->__crt_alg;
629  	struct crypto4xx_alg *my_alg;
630  	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
631  	struct dynamic_sa_hash160 *sa;
632  	int rc;
633  
634  	my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
635  			      alg.u.hash);
636  	ctx->dev   = my_alg->dev;
637  
638  	/* Create SA */
639  	if (ctx->sa_in || ctx->sa_out)
640  		crypto4xx_free_sa(ctx);
641  
642  	rc = crypto4xx_alloc_sa(ctx, sa_len);
643  	if (rc)
644  		return rc;
645  
646  	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
647  				 sizeof(struct crypto4xx_ctx));
648  	sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
649  	set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
650  				 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
651  				 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
652  				 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
653  				 SA_OPCODE_HASH, DIR_INBOUND);
654  	set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
655  				 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
656  				 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
657  				 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
658  				 SA_NOT_COPY_HDR);
659  	/* Need to zero hash digest in SA */
660  	memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
661  	memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
662  
663  	return 0;
664  }
665  
crypto4xx_hash_init(struct ahash_request * req)666  int crypto4xx_hash_init(struct ahash_request *req)
667  {
668  	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
669  	int ds;
670  	struct dynamic_sa_ctl *sa;
671  
672  	sa = ctx->sa_in;
673  	ds = crypto_ahash_digestsize(
674  			__crypto_ahash_cast(req->base.tfm));
675  	sa->sa_command_0.bf.digest_len = ds >> 2;
676  	sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
677  
678  	return 0;
679  }
680  
crypto4xx_hash_update(struct ahash_request * req)681  int crypto4xx_hash_update(struct ahash_request *req)
682  {
683  	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
684  	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
685  	struct scatterlist dst;
686  	unsigned int ds = crypto_ahash_digestsize(ahash);
687  
688  	sg_init_one(&dst, req->result, ds);
689  
690  	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
691  				  req->nbytes, NULL, 0, ctx->sa_in,
692  				  ctx->sa_len, 0, NULL);
693  }
694  
crypto4xx_hash_final(struct ahash_request * req)695  int crypto4xx_hash_final(struct ahash_request *req)
696  {
697  	return 0;
698  }
699  
crypto4xx_hash_digest(struct ahash_request * req)700  int crypto4xx_hash_digest(struct ahash_request *req)
701  {
702  	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
703  	struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
704  	struct scatterlist dst;
705  	unsigned int ds = crypto_ahash_digestsize(ahash);
706  
707  	sg_init_one(&dst, req->result, ds);
708  
709  	return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
710  				  req->nbytes, NULL, 0, ctx->sa_in,
711  				  ctx->sa_len, 0, NULL);
712  }
713  
714  /*
715   * SHA1 Algorithm
716   */
crypto4xx_sha1_alg_init(struct crypto_tfm * tfm)717  int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
718  {
719  	return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
720  				       SA_HASH_MODE_HASH);
721  }
722