1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Shared descriptors for aead, skcipher algorithms
4  *
5  * Copyright 2016-2018 NXP
6  */
7 
8 #include "compat.h"
9 #include "desc_constr.h"
10 #include "caamalg_desc.h"
11 
12 /*
13  * For aead functions, read payload and write payload,
14  * both of which are specified in req->src and req->dst
15  */
16 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
17 {
18 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
19 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
20 			     KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
21 }
22 
23 /* Set DK bit in class 1 operation if shared */
24 static inline void append_dec_op1(u32 *desc, u32 type)
25 {
26 	u32 *jump_cmd, *uncond_jump_cmd;
27 
28 	/* DK bit is valid only for AES */
29 	if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
30 		append_operation(desc, type | OP_ALG_AS_INITFINAL |
31 				 OP_ALG_DECRYPT);
32 		return;
33 	}
34 
35 	jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
36 	append_operation(desc, type | OP_ALG_AS_INITFINAL |
37 			 OP_ALG_DECRYPT);
38 	uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
39 	set_jump_tgt_here(desc, jump_cmd);
40 	append_operation(desc, type | OP_ALG_AS_INITFINAL |
41 			 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
42 	set_jump_tgt_here(desc, uncond_jump_cmd);
43 }
44 
45 /**
46  * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
47  *                               (non-protocol) with no (null) encryption.
48  * @desc: pointer to buffer used for descriptor construction
49  * @adata: pointer to authentication transform definitions.
50  *         A split key is required for SEC Era < 6; the size of the split key
51  *         is specified in this case. Valid algorithm values - one of
52  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
53  *         with OP_ALG_AAI_HMAC_PRECOMP.
54  * @icvsize: integrity check value (ICV) size (truncated or full)
55  * @era: SEC Era
56  */
57 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
58 				 unsigned int icvsize, int era)
59 {
60 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
61 
62 	init_sh_desc(desc, HDR_SHARE_SERIAL);
63 
64 	/* Skip if already shared */
65 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
66 				   JUMP_COND_SHRD);
67 	if (era < 6) {
68 		if (adata->key_inline)
69 			append_key_as_imm(desc, adata->key_virt,
70 					  adata->keylen_pad, adata->keylen,
71 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
72 					  KEY_ENC);
73 		else
74 			append_key(desc, adata->key_dma, adata->keylen,
75 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
76 	} else {
77 		append_proto_dkp(desc, adata);
78 	}
79 	set_jump_tgt_here(desc, key_jump_cmd);
80 
81 	/* assoclen + cryptlen = seqinlen */
82 	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
83 
84 	/* Prepare to read and write cryptlen + assoclen bytes */
85 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
86 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
87 
88 	/*
89 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
90 	 * thus need to do some magic, i.e. self-patch the descriptor
91 	 * buffer.
92 	 */
93 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
94 				    MOVE_DEST_MATH3 |
95 				    (0x6 << MOVE_LEN_SHIFT));
96 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
97 				     MOVE_DEST_DESCBUF |
98 				     MOVE_WAITCOMP |
99 				     (0x8 << MOVE_LEN_SHIFT));
100 
101 	/* Class 2 operation */
102 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
103 			 OP_ALG_ENCRYPT);
104 
105 	/* Read and write cryptlen bytes */
106 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
107 
108 	set_move_tgt_here(desc, read_move_cmd);
109 	set_move_tgt_here(desc, write_move_cmd);
110 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
111 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
112 		    MOVE_AUX_LS);
113 
114 	/* Write ICV */
115 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
116 			 LDST_SRCDST_BYTE_CONTEXT);
117 
118 #ifdef DEBUG
119 	print_hex_dump(KERN_ERR,
120 		       "aead null enc shdesc@" __stringify(__LINE__)": ",
121 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
122 #endif
123 }
124 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
125 
126 /**
127  * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
128  *                               (non-protocol) with no (null) decryption.
129  * @desc: pointer to buffer used for descriptor construction
130  * @adata: pointer to authentication transform definitions.
131  *         A split key is required for SEC Era < 6; the size of the split key
132  *         is specified in this case. Valid algorithm values - one of
133  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
134  *         with OP_ALG_AAI_HMAC_PRECOMP.
135  * @icvsize: integrity check value (ICV) size (truncated or full)
136  * @era: SEC Era
137  */
138 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
139 				 unsigned int icvsize, int era)
140 {
141 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
142 
143 	init_sh_desc(desc, HDR_SHARE_SERIAL);
144 
145 	/* Skip if already shared */
146 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
147 				   JUMP_COND_SHRD);
148 	if (era < 6) {
149 		if (adata->key_inline)
150 			append_key_as_imm(desc, adata->key_virt,
151 					  adata->keylen_pad, adata->keylen,
152 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
153 					  KEY_ENC);
154 		else
155 			append_key(desc, adata->key_dma, adata->keylen,
156 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
157 	} else {
158 		append_proto_dkp(desc, adata);
159 	}
160 	set_jump_tgt_here(desc, key_jump_cmd);
161 
162 	/* Class 2 operation */
163 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
164 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
165 
166 	/* assoclen + cryptlen = seqoutlen */
167 	append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
168 
169 	/* Prepare to read and write cryptlen + assoclen bytes */
170 	append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
171 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
172 
173 	/*
174 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
175 	 * thus need to do some magic, i.e. self-patch the descriptor
176 	 * buffer.
177 	 */
178 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
179 				    MOVE_DEST_MATH2 |
180 				    (0x6 << MOVE_LEN_SHIFT));
181 	write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
182 				     MOVE_DEST_DESCBUF |
183 				     MOVE_WAITCOMP |
184 				     (0x8 << MOVE_LEN_SHIFT));
185 
186 	/* Read and write cryptlen bytes */
187 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
188 
189 	/*
190 	 * Insert a NOP here, since we need at least 4 instructions between
191 	 * code patching the descriptor buffer and the location being patched.
192 	 */
193 	jump_cmd = append_jump(desc, JUMP_TEST_ALL);
194 	set_jump_tgt_here(desc, jump_cmd);
195 
196 	set_move_tgt_here(desc, read_move_cmd);
197 	set_move_tgt_here(desc, write_move_cmd);
198 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
199 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
200 		    MOVE_AUX_LS);
201 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
202 
203 	/* Load ICV */
204 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
205 			     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
206 
207 #ifdef DEBUG
208 	print_hex_dump(KERN_ERR,
209 		       "aead null dec shdesc@" __stringify(__LINE__)": ",
210 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
211 #endif
212 }
213 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
214 
215 static void init_sh_desc_key_aead(u32 * const desc,
216 				  struct alginfo * const cdata,
217 				  struct alginfo * const adata,
218 				  const bool is_rfc3686, u32 *nonce, int era)
219 {
220 	u32 *key_jump_cmd;
221 	unsigned int enckeylen = cdata->keylen;
222 
223 	/* Note: Context registers are saved. */
224 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
225 
226 	/* Skip if already shared */
227 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
228 				   JUMP_COND_SHRD);
229 
230 	/*
231 	 * RFC3686 specific:
232 	 *	| key = {AUTH_KEY, ENC_KEY, NONCE}
233 	 *	| enckeylen = encryption key size + nonce size
234 	 */
235 	if (is_rfc3686)
236 		enckeylen -= CTR_RFC3686_NONCE_SIZE;
237 
238 	if (era < 6) {
239 		if (adata->key_inline)
240 			append_key_as_imm(desc, adata->key_virt,
241 					  adata->keylen_pad, adata->keylen,
242 					  CLASS_2 | KEY_DEST_MDHA_SPLIT |
243 					  KEY_ENC);
244 		else
245 			append_key(desc, adata->key_dma, adata->keylen,
246 				   CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
247 	} else {
248 		append_proto_dkp(desc, adata);
249 	}
250 
251 	if (cdata->key_inline)
252 		append_key_as_imm(desc, cdata->key_virt, enckeylen,
253 				  enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
254 	else
255 		append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
256 			   KEY_DEST_CLASS_REG);
257 
258 	/* Load Counter into CONTEXT1 reg */
259 	if (is_rfc3686) {
260 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
261 				   LDST_CLASS_IND_CCB |
262 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
263 		append_move(desc,
264 			    MOVE_SRC_OUTFIFO |
265 			    MOVE_DEST_CLASS1CTX |
266 			    (16 << MOVE_OFFSET_SHIFT) |
267 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
268 	}
269 
270 	set_jump_tgt_here(desc, key_jump_cmd);
271 }
272 
273 /**
274  * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
275  *                          (non-protocol).
276  * @desc: pointer to buffer used for descriptor construction
277  * @cdata: pointer to block cipher transform definitions
278  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
279  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
280  * @adata: pointer to authentication transform definitions.
281  *         A split key is required for SEC Era < 6; the size of the split key
282  *         is specified in this case. Valid algorithm values - one of
283  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
284  *         with OP_ALG_AAI_HMAC_PRECOMP.
285  * @ivsize: initialization vector size
286  * @icvsize: integrity check value (ICV) size (truncated or full)
287  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
288  * @nonce: pointer to rfc3686 nonce
289  * @ctx1_iv_off: IV offset in CONTEXT1 register
290  * @is_qi: true when called from caam/qi
291  * @era: SEC Era
292  */
293 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
294 			    struct alginfo *adata, unsigned int ivsize,
295 			    unsigned int icvsize, const bool is_rfc3686,
296 			    u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
297 			    int era)
298 {
299 	/* Note: Context registers are saved. */
300 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
301 
302 	/* Class 2 operation */
303 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
304 			 OP_ALG_ENCRYPT);
305 
306 	if (is_qi) {
307 		u32 *wait_load_cmd;
308 
309 		/* REG3 = assoclen */
310 		append_seq_load(desc, 4, LDST_CLASS_DECO |
311 				LDST_SRCDST_WORD_DECO_MATH3 |
312 				(4 << LDST_OFFSET_SHIFT));
313 
314 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
315 					    JUMP_COND_CALM | JUMP_COND_NCP |
316 					    JUMP_COND_NOP | JUMP_COND_NIP |
317 					    JUMP_COND_NIFP);
318 		set_jump_tgt_here(desc, wait_load_cmd);
319 
320 		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
321 				LDST_SRCDST_BYTE_CONTEXT |
322 				(ctx1_iv_off << LDST_OFFSET_SHIFT));
323 	}
324 
325 	/* Read and write assoclen bytes */
326 	if (is_qi || era < 3) {
327 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
328 		append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
329 	} else {
330 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
331 		append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
332 	}
333 
334 	/* Skip assoc data */
335 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
336 
337 	/* read assoc before reading payload */
338 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
339 				      FIFOLDST_VLF);
340 
341 	/* Load Counter into CONTEXT1 reg */
342 	if (is_rfc3686)
343 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
344 				     LDST_SRCDST_BYTE_CONTEXT |
345 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
346 				      LDST_OFFSET_SHIFT));
347 
348 	/* Class 1 operation */
349 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
350 			 OP_ALG_ENCRYPT);
351 
352 	/* Read and write cryptlen bytes */
353 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
354 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
355 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
356 
357 	/* Write ICV */
358 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
359 			 LDST_SRCDST_BYTE_CONTEXT);
360 
361 #ifdef DEBUG
362 	print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
363 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
364 #endif
365 }
366 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
367 
368 /**
369  * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
370  *                          (non-protocol).
371  * @desc: pointer to buffer used for descriptor construction
372  * @cdata: pointer to block cipher transform definitions
373  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
374  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
375  * @adata: pointer to authentication transform definitions.
376  *         A split key is required for SEC Era < 6; the size of the split key
377  *         is specified in this case. Valid algorithm values - one of
378  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
379  *         with OP_ALG_AAI_HMAC_PRECOMP.
380  * @ivsize: initialization vector size
381  * @icvsize: integrity check value (ICV) size (truncated or full)
382  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
383  * @nonce: pointer to rfc3686 nonce
384  * @ctx1_iv_off: IV offset in CONTEXT1 register
385  * @is_qi: true when called from caam/qi
386  * @era: SEC Era
387  */
388 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
389 			    struct alginfo *adata, unsigned int ivsize,
390 			    unsigned int icvsize, const bool geniv,
391 			    const bool is_rfc3686, u32 *nonce,
392 			    const u32 ctx1_iv_off, const bool is_qi, int era)
393 {
394 	/* Note: Context registers are saved. */
395 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
396 
397 	/* Class 2 operation */
398 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
399 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
400 
401 	if (is_qi) {
402 		u32 *wait_load_cmd;
403 
404 		/* REG3 = assoclen */
405 		append_seq_load(desc, 4, LDST_CLASS_DECO |
406 				LDST_SRCDST_WORD_DECO_MATH3 |
407 				(4 << LDST_OFFSET_SHIFT));
408 
409 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
410 					    JUMP_COND_CALM | JUMP_COND_NCP |
411 					    JUMP_COND_NOP | JUMP_COND_NIP |
412 					    JUMP_COND_NIFP);
413 		set_jump_tgt_here(desc, wait_load_cmd);
414 
415 		if (!geniv)
416 			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
417 					LDST_SRCDST_BYTE_CONTEXT |
418 					(ctx1_iv_off << LDST_OFFSET_SHIFT));
419 	}
420 
421 	/* Read and write assoclen bytes */
422 	if (is_qi || era < 3) {
423 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
424 		if (geniv)
425 			append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
426 						ivsize);
427 		else
428 			append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
429 					CAAM_CMD_SZ);
430 	} else {
431 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
432 		if (geniv)
433 			append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
434 						ivsize);
435 		else
436 			append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
437 					CAAM_CMD_SZ);
438 	}
439 
440 	/* Skip assoc data */
441 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
442 
443 	/* read assoc before reading payload */
444 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
445 			     KEY_VLF);
446 
447 	if (geniv) {
448 		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
449 				LDST_SRCDST_BYTE_CONTEXT |
450 				(ctx1_iv_off << LDST_OFFSET_SHIFT));
451 		append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
452 			    (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
453 	}
454 
455 	/* Load Counter into CONTEXT1 reg */
456 	if (is_rfc3686)
457 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
458 				     LDST_SRCDST_BYTE_CONTEXT |
459 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
460 				      LDST_OFFSET_SHIFT));
461 
462 	/* Choose operation */
463 	if (ctx1_iv_off)
464 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
465 				 OP_ALG_DECRYPT);
466 	else
467 		append_dec_op1(desc, cdata->algtype);
468 
469 	/* Read and write cryptlen bytes */
470 	append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
471 	append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
472 	aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
473 
474 	/* Load ICV */
475 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
476 			     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
477 
478 #ifdef DEBUG
479 	print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
480 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
481 #endif
482 }
483 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
484 
485 /**
486  * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
487  *                             (non-protocol) with HW-generated initialization
488  *                             vector.
489  * @desc: pointer to buffer used for descriptor construction
490  * @cdata: pointer to block cipher transform definitions
491  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
492  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
493  * @adata: pointer to authentication transform definitions.
494  *         A split key is required for SEC Era < 6; the size of the split key
495  *         is specified in this case. Valid algorithm values - one of
496  *         OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
497  *         with OP_ALG_AAI_HMAC_PRECOMP.
498  * @ivsize: initialization vector size
499  * @icvsize: integrity check value (ICV) size (truncated or full)
500  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
501  * @nonce: pointer to rfc3686 nonce
502  * @ctx1_iv_off: IV offset in CONTEXT1 register
503  * @is_qi: true when called from caam/qi
504  * @era: SEC Era
505  */
506 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
507 			       struct alginfo *adata, unsigned int ivsize,
508 			       unsigned int icvsize, const bool is_rfc3686,
509 			       u32 *nonce, const u32 ctx1_iv_off,
510 			       const bool is_qi, int era)
511 {
512 	u32 geniv, moveiv;
513 
514 	/* Note: Context registers are saved. */
515 	init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
516 
517 	if (is_qi) {
518 		u32 *wait_load_cmd;
519 
520 		/* REG3 = assoclen */
521 		append_seq_load(desc, 4, LDST_CLASS_DECO |
522 				LDST_SRCDST_WORD_DECO_MATH3 |
523 				(4 << LDST_OFFSET_SHIFT));
524 
525 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
526 					    JUMP_COND_CALM | JUMP_COND_NCP |
527 					    JUMP_COND_NOP | JUMP_COND_NIP |
528 					    JUMP_COND_NIFP);
529 		set_jump_tgt_here(desc, wait_load_cmd);
530 	}
531 
532 	if (is_rfc3686) {
533 		if (is_qi)
534 			append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
535 					LDST_SRCDST_BYTE_CONTEXT |
536 					(ctx1_iv_off << LDST_OFFSET_SHIFT));
537 
538 		goto copy_iv;
539 	}
540 
541 	/* Generate IV */
542 	geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
543 		NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
544 		NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
545 	append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
546 			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
547 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
548 	append_move(desc, MOVE_WAITCOMP |
549 		    MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
550 		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
551 		    (ivsize << MOVE_LEN_SHIFT));
552 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
553 
554 copy_iv:
555 	/* Copy IV to class 1 context */
556 	append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
557 		    (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
558 		    (ivsize << MOVE_LEN_SHIFT));
559 
560 	/* Return to encryption */
561 	append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
562 			 OP_ALG_ENCRYPT);
563 
564 	/* Read and write assoclen bytes */
565 	if (is_qi || era < 3) {
566 		append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
567 		append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
568 	} else {
569 		append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
570 		append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
571 	}
572 
573 	/* Skip assoc data */
574 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
575 
576 	/* read assoc before reading payload */
577 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
578 			     KEY_VLF);
579 
580 	/* Copy iv from outfifo to class 2 fifo */
581 	moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
582 		 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
583 	append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
584 			    LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
585 	append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
586 			    LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
587 
588 	/* Load Counter into CONTEXT1 reg */
589 	if (is_rfc3686)
590 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
591 				     LDST_SRCDST_BYTE_CONTEXT |
592 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
593 				      LDST_OFFSET_SHIFT));
594 
595 	/* Class 1 operation */
596 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
597 			 OP_ALG_ENCRYPT);
598 
599 	/* Will write ivsize + cryptlen */
600 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
601 
602 	/* Not need to reload iv */
603 	append_seq_fifo_load(desc, ivsize,
604 			     FIFOLD_CLASS_SKIP);
605 
606 	/* Will read cryptlen */
607 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
608 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
609 			     FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
610 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
611 
612 	/* Write ICV */
613 	append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
614 			 LDST_SRCDST_BYTE_CONTEXT);
615 
616 #ifdef DEBUG
617 	print_hex_dump(KERN_ERR,
618 		       "aead givenc shdesc@" __stringify(__LINE__)": ",
619 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
620 #endif
621 }
622 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
623 
624 /**
625  * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
626  * @desc: pointer to buffer used for descriptor construction
627  * @cdata: pointer to block cipher transform definitions
628  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
629  * @ivsize: initialization vector size
630  * @icvsize: integrity check value (ICV) size (truncated or full)
631  * @is_qi: true when called from caam/qi
632  */
633 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
634 			   unsigned int ivsize, unsigned int icvsize,
635 			   const bool is_qi)
636 {
637 	u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
638 	    *zero_assoc_jump_cmd2;
639 
640 	init_sh_desc(desc, HDR_SHARE_SERIAL);
641 
642 	/* skip key loading if they are loaded due to sharing */
643 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
644 				   JUMP_COND_SHRD);
645 	if (cdata->key_inline)
646 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
647 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
648 	else
649 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
650 			   KEY_DEST_CLASS_REG);
651 	set_jump_tgt_here(desc, key_jump_cmd);
652 
653 	/* class 1 operation */
654 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
655 			 OP_ALG_ENCRYPT);
656 
657 	if (is_qi) {
658 		u32 *wait_load_cmd;
659 
660 		/* REG3 = assoclen */
661 		append_seq_load(desc, 4, LDST_CLASS_DECO |
662 				LDST_SRCDST_WORD_DECO_MATH3 |
663 				(4 << LDST_OFFSET_SHIFT));
664 
665 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
666 					    JUMP_COND_CALM | JUMP_COND_NCP |
667 					    JUMP_COND_NOP | JUMP_COND_NIP |
668 					    JUMP_COND_NIFP);
669 		set_jump_tgt_here(desc, wait_load_cmd);
670 
671 		append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
672 					ivsize);
673 	} else {
674 		append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
675 				CAAM_CMD_SZ);
676 	}
677 
678 	/* if assoclen + cryptlen is ZERO, skip to ICV write */
679 	zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
680 						 JUMP_COND_MATH_Z);
681 
682 	if (is_qi)
683 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
684 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
685 
686 	/* if assoclen is ZERO, skip reading the assoc data */
687 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
688 	zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
689 					   JUMP_COND_MATH_Z);
690 
691 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
692 
693 	/* skip assoc data */
694 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
695 
696 	/* cryptlen = seqinlen - assoclen */
697 	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
698 
699 	/* if cryptlen is ZERO jump to zero-payload commands */
700 	zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
701 					    JUMP_COND_MATH_Z);
702 
703 	/* read assoc data */
704 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
705 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
706 	set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
707 
708 	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
709 
710 	/* write encrypted data */
711 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
712 
713 	/* read payload data */
714 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
715 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
716 
717 	/* jump to ICV writing */
718 	if (is_qi)
719 		append_jump(desc, JUMP_TEST_ALL | 4);
720 	else
721 		append_jump(desc, JUMP_TEST_ALL | 2);
722 
723 	/* zero-payload commands */
724 	set_jump_tgt_here(desc, zero_payload_jump_cmd);
725 
726 	/* read assoc data */
727 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
728 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
729 	if (is_qi)
730 		/* jump to ICV writing */
731 		append_jump(desc, JUMP_TEST_ALL | 2);
732 
733 	/* There is no input data */
734 	set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
735 
736 	if (is_qi)
737 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
738 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
739 				     FIFOLD_TYPE_LAST1);
740 
741 	/* write ICV */
742 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
743 			 LDST_SRCDST_BYTE_CONTEXT);
744 
745 #ifdef DEBUG
746 	print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
747 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
748 #endif
749 }
750 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
751 
752 /**
753  * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
754  * @desc: pointer to buffer used for descriptor construction
755  * @cdata: pointer to block cipher transform definitions
756  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
757  * @ivsize: initialization vector size
758  * @icvsize: integrity check value (ICV) size (truncated or full)
759  * @is_qi: true when called from caam/qi
760  */
761 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
762 			   unsigned int ivsize, unsigned int icvsize,
763 			   const bool is_qi)
764 {
765 	u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
766 
767 	init_sh_desc(desc, HDR_SHARE_SERIAL);
768 
769 	/* skip key loading if they are loaded due to sharing */
770 	key_jump_cmd = append_jump(desc, JUMP_JSL |
771 				   JUMP_TEST_ALL | JUMP_COND_SHRD);
772 	if (cdata->key_inline)
773 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
774 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
775 	else
776 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
777 			   KEY_DEST_CLASS_REG);
778 	set_jump_tgt_here(desc, key_jump_cmd);
779 
780 	/* class 1 operation */
781 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
782 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
783 
784 	if (is_qi) {
785 		u32 *wait_load_cmd;
786 
787 		/* REG3 = assoclen */
788 		append_seq_load(desc, 4, LDST_CLASS_DECO |
789 				LDST_SRCDST_WORD_DECO_MATH3 |
790 				(4 << LDST_OFFSET_SHIFT));
791 
792 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
793 					    JUMP_COND_CALM | JUMP_COND_NCP |
794 					    JUMP_COND_NOP | JUMP_COND_NIP |
795 					    JUMP_COND_NIFP);
796 		set_jump_tgt_here(desc, wait_load_cmd);
797 
798 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
799 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
800 	}
801 
802 	/* if assoclen is ZERO, skip reading the assoc data */
803 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
804 	zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
805 						 JUMP_COND_MATH_Z);
806 
807 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
808 
809 	/* skip assoc data */
810 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
811 
812 	/* read assoc data */
813 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
814 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
815 
816 	set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
817 
818 	/* cryptlen = seqoutlen - assoclen */
819 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
820 
821 	/* jump to zero-payload command if cryptlen is zero */
822 	zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
823 					    JUMP_COND_MATH_Z);
824 
825 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
826 
827 	/* store encrypted data */
828 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
829 
830 	/* read payload data */
831 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
832 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
833 
834 	/* zero-payload command */
835 	set_jump_tgt_here(desc, zero_payload_jump_cmd);
836 
837 	/* read ICV */
838 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
839 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
840 
841 #ifdef DEBUG
842 	print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
843 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
844 #endif
845 }
846 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
847 
848 /**
849  * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
850  *                             (non-protocol).
851  * @desc: pointer to buffer used for descriptor construction
852  * @cdata: pointer to block cipher transform definitions
853  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
854  * @ivsize: initialization vector size
855  * @icvsize: integrity check value (ICV) size (truncated or full)
856  * @is_qi: true when called from caam/qi
857  */
858 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
859 			       unsigned int ivsize, unsigned int icvsize,
860 			       const bool is_qi)
861 {
862 	u32 *key_jump_cmd;
863 
864 	init_sh_desc(desc, HDR_SHARE_SERIAL);
865 
866 	/* Skip key loading if it is loaded due to sharing */
867 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
868 				   JUMP_COND_SHRD);
869 	if (cdata->key_inline)
870 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
871 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
872 	else
873 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
874 			   KEY_DEST_CLASS_REG);
875 	set_jump_tgt_here(desc, key_jump_cmd);
876 
877 	/* Class 1 operation */
878 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
879 			 OP_ALG_ENCRYPT);
880 
881 	if (is_qi) {
882 		u32 *wait_load_cmd;
883 
884 		/* REG3 = assoclen */
885 		append_seq_load(desc, 4, LDST_CLASS_DECO |
886 				LDST_SRCDST_WORD_DECO_MATH3 |
887 				(4 << LDST_OFFSET_SHIFT));
888 
889 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
890 					    JUMP_COND_CALM | JUMP_COND_NCP |
891 					    JUMP_COND_NOP | JUMP_COND_NIP |
892 					    JUMP_COND_NIFP);
893 		set_jump_tgt_here(desc, wait_load_cmd);
894 
895 		/* Read salt and IV */
896 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
897 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
898 					FIFOLD_TYPE_IV);
899 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
900 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
901 	}
902 
903 	append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
904 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
905 
906 	/* Read assoc data */
907 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
908 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
909 
910 	/* Skip IV */
911 	append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
912 
913 	/* Will read cryptlen bytes */
914 	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
915 
916 	/* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
917 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
918 
919 	/* Skip assoc data */
920 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
921 
922 	/* cryptlen = seqoutlen - assoclen */
923 	append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
924 
925 	/* Write encrypted data */
926 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
927 
928 	/* Read payload data */
929 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
930 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
931 
932 	/* Write ICV */
933 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
934 			 LDST_SRCDST_BYTE_CONTEXT);
935 
936 #ifdef DEBUG
937 	print_hex_dump(KERN_ERR,
938 		       "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
939 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
940 #endif
941 }
942 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
943 
944 /**
945  * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
946  *                             (non-protocol).
947  * @desc: pointer to buffer used for descriptor construction
948  * @cdata: pointer to block cipher transform definitions
949  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
950  * @ivsize: initialization vector size
951  * @icvsize: integrity check value (ICV) size (truncated or full)
952  * @is_qi: true when called from caam/qi
953  */
954 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
955 			       unsigned int ivsize, unsigned int icvsize,
956 			       const bool is_qi)
957 {
958 	u32 *key_jump_cmd;
959 
960 	init_sh_desc(desc, HDR_SHARE_SERIAL);
961 
962 	/* Skip key loading if it is loaded due to sharing */
963 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
964 				   JUMP_COND_SHRD);
965 	if (cdata->key_inline)
966 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
967 				  cdata->keylen, CLASS_1 |
968 				  KEY_DEST_CLASS_REG);
969 	else
970 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
971 			   KEY_DEST_CLASS_REG);
972 	set_jump_tgt_here(desc, key_jump_cmd);
973 
974 	/* Class 1 operation */
975 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
976 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
977 
978 	if (is_qi) {
979 		u32 *wait_load_cmd;
980 
981 		/* REG3 = assoclen */
982 		append_seq_load(desc, 4, LDST_CLASS_DECO |
983 				LDST_SRCDST_WORD_DECO_MATH3 |
984 				(4 << LDST_OFFSET_SHIFT));
985 
986 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
987 					    JUMP_COND_CALM | JUMP_COND_NCP |
988 					    JUMP_COND_NOP | JUMP_COND_NIP |
989 					    JUMP_COND_NIFP);
990 		set_jump_tgt_here(desc, wait_load_cmd);
991 
992 		/* Read salt and IV */
993 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
994 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
995 					FIFOLD_TYPE_IV);
996 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
997 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
998 	}
999 
1000 	append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
1001 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1002 
1003 	/* Read assoc data */
1004 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1005 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1006 
1007 	/* Skip IV */
1008 	append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
1009 
1010 	/* Will read cryptlen bytes */
1011 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1012 
1013 	/* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1014 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1015 
1016 	/* Skip assoc data */
1017 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1018 
1019 	/* Will write cryptlen bytes */
1020 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1021 
1022 	/* Store payload data */
1023 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1024 
1025 	/* Read encrypted data */
1026 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1027 			     FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1028 
1029 	/* Read ICV */
1030 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1031 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1032 
1033 #ifdef DEBUG
1034 	print_hex_dump(KERN_ERR,
1035 		       "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
1036 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1037 #endif
1038 }
1039 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
1040 
1041 /**
1042  * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
1043  *                             (non-protocol).
1044  * @desc: pointer to buffer used for descriptor construction
1045  * @cdata: pointer to block cipher transform definitions
1046  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1047  * @ivsize: initialization vector size
1048  * @icvsize: integrity check value (ICV) size (truncated or full)
1049  * @is_qi: true when called from caam/qi
1050  */
1051 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
1052 			       unsigned int ivsize, unsigned int icvsize,
1053 			       const bool is_qi)
1054 {
1055 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1056 
1057 	init_sh_desc(desc, HDR_SHARE_SERIAL);
1058 
1059 	/* Skip key loading if it is loaded due to sharing */
1060 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1061 				   JUMP_COND_SHRD);
1062 	if (cdata->key_inline)
1063 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1064 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1065 	else
1066 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1067 			   KEY_DEST_CLASS_REG);
1068 	set_jump_tgt_here(desc, key_jump_cmd);
1069 
1070 	/* Class 1 operation */
1071 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1072 			 OP_ALG_ENCRYPT);
1073 
1074 	if (is_qi) {
1075 		/* assoclen is not needed, skip it */
1076 		append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1077 
1078 		/* Read salt and IV */
1079 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1080 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1081 					FIFOLD_TYPE_IV);
1082 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1083 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1084 	}
1085 
1086 	/* assoclen + cryptlen = seqinlen */
1087 	append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1088 
1089 	/*
1090 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
1091 	 * thus need to do some magic, i.e. self-patch the descriptor
1092 	 * buffer.
1093 	 */
1094 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1095 				    (0x6 << MOVE_LEN_SHIFT));
1096 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1097 				     (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1098 
1099 	/* Will read assoclen + cryptlen bytes */
1100 	append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1101 
1102 	/* Will write assoclen + cryptlen bytes */
1103 	append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1104 
1105 	/* Read and write assoclen + cryptlen bytes */
1106 	aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1107 
1108 	set_move_tgt_here(desc, read_move_cmd);
1109 	set_move_tgt_here(desc, write_move_cmd);
1110 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1111 	/* Move payload data to OFIFO */
1112 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1113 
1114 	/* Write ICV */
1115 	append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
1116 			 LDST_SRCDST_BYTE_CONTEXT);
1117 
1118 #ifdef DEBUG
1119 	print_hex_dump(KERN_ERR,
1120 		       "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
1121 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1122 #endif
1123 }
1124 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
1125 
1126 /**
1127  * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
1128  *                             (non-protocol).
1129  * @desc: pointer to buffer used for descriptor construction
1130  * @cdata: pointer to block cipher transform definitions
1131  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1132  * @ivsize: initialization vector size
1133  * @icvsize: integrity check value (ICV) size (truncated or full)
1134  * @is_qi: true when called from caam/qi
1135  */
1136 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
1137 			       unsigned int ivsize, unsigned int icvsize,
1138 			       const bool is_qi)
1139 {
1140 	u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1141 
1142 	init_sh_desc(desc, HDR_SHARE_SERIAL);
1143 
1144 	/* Skip key loading if it is loaded due to sharing */
1145 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1146 				   JUMP_COND_SHRD);
1147 	if (cdata->key_inline)
1148 		append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1149 				  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1150 	else
1151 		append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1152 			   KEY_DEST_CLASS_REG);
1153 	set_jump_tgt_here(desc, key_jump_cmd);
1154 
1155 	/* Class 1 operation */
1156 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1157 			 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1158 
1159 	if (is_qi) {
1160 		/* assoclen is not needed, skip it */
1161 		append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1162 
1163 		/* Read salt and IV */
1164 		append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1165 					cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1166 					FIFOLD_TYPE_IV);
1167 		append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1168 				     FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1169 	}
1170 
1171 	/* assoclen + cryptlen = seqoutlen */
1172 	append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1173 
1174 	/*
1175 	 * MOVE_LEN opcode is not available in all SEC HW revisions,
1176 	 * thus need to do some magic, i.e. self-patch the descriptor
1177 	 * buffer.
1178 	 */
1179 	read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1180 				    (0x6 << MOVE_LEN_SHIFT));
1181 	write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1182 				     (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1183 
1184 	/* Will read assoclen + cryptlen bytes */
1185 	append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1186 
1187 	/* Will write assoclen + cryptlen bytes */
1188 	append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1189 
1190 	/* Store payload data */
1191 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1192 
1193 	/* In-snoop assoclen + cryptlen data */
1194 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1195 			     FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1196 
1197 	set_move_tgt_here(desc, read_move_cmd);
1198 	set_move_tgt_here(desc, write_move_cmd);
1199 	append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1200 	/* Move payload data to OFIFO */
1201 	append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1202 	append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1203 
1204 	/* Read ICV */
1205 	append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1206 			     FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1207 
1208 #ifdef DEBUG
1209 	print_hex_dump(KERN_ERR,
1210 		       "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1211 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1212 #endif
1213 }
1214 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1215 
1216 /**
1217  * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
1218  *                          IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
1219  *                          descriptor (non-protocol).
1220  * @desc: pointer to buffer used for descriptor construction
1221  * @cdata: pointer to block cipher transform definitions
1222  *         Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
1223  *         OP_ALG_AAI_AEAD.
1224  * @adata: pointer to authentication transform definitions
1225  *         Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
1226  *         OP_ALG_AAI_AEAD.
1227  * @ivsize: initialization vector size
1228  * @icvsize: integrity check value (ICV) size (truncated or full)
1229  * @encap: true if encapsulation, false if decapsulation
1230  * @is_qi: true when called from caam/qi
1231  */
1232 void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
1233 			    struct alginfo *adata, unsigned int ivsize,
1234 			    unsigned int icvsize, const bool encap,
1235 			    const bool is_qi)
1236 {
1237 	u32 *key_jump_cmd, *wait_cmd;
1238 	u32 nfifo;
1239 	const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
1240 
1241 	/* Note: Context registers are saved. */
1242 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1243 
1244 	/* skip key loading if they are loaded due to sharing */
1245 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1246 				   JUMP_COND_SHRD);
1247 
1248 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
1249 			  CLASS_1 | KEY_DEST_CLASS_REG);
1250 
1251 	/* For IPsec load the salt from keymat in the context register */
1252 	if (is_ipsec)
1253 		append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
1254 				   LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
1255 				   4 << LDST_OFFSET_SHIFT);
1256 
1257 	set_jump_tgt_here(desc, key_jump_cmd);
1258 
1259 	/* Class 2 and 1 operations: Poly & ChaCha */
1260 	if (encap) {
1261 		append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1262 				 OP_ALG_ENCRYPT);
1263 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1264 				 OP_ALG_ENCRYPT);
1265 	} else {
1266 		append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1267 				 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1268 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1269 				 OP_ALG_DECRYPT);
1270 	}
1271 
1272 	if (is_qi) {
1273 		u32 *wait_load_cmd;
1274 		u32 ctx1_iv_off = is_ipsec ? 8 : 4;
1275 
1276 		/* REG3 = assoclen */
1277 		append_seq_load(desc, 4, LDST_CLASS_DECO |
1278 				LDST_SRCDST_WORD_DECO_MATH3 |
1279 				4 << LDST_OFFSET_SHIFT);
1280 
1281 		wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1282 					    JUMP_COND_CALM | JUMP_COND_NCP |
1283 					    JUMP_COND_NOP | JUMP_COND_NIP |
1284 					    JUMP_COND_NIFP);
1285 		set_jump_tgt_here(desc, wait_load_cmd);
1286 
1287 		append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
1288 				LDST_SRCDST_BYTE_CONTEXT |
1289 				ctx1_iv_off << LDST_OFFSET_SHIFT);
1290 	}
1291 
1292 	/*
1293 	 * MAGIC with NFIFO
1294 	 * Read associated data from the input and send them to class1 and
1295 	 * class2 alignment blocks. From class1 send data to output fifo and
1296 	 * then write it to memory since we don't need to encrypt AD.
1297 	 */
1298 	nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
1299 		NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
1300 	append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
1301 			    LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
1302 
1303 	append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
1304 	append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1305 	append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
1306 			     FIFOLD_CLASS_CLASS1 | LDST_VLF);
1307 	append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
1308 			MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
1309 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
1310 
1311 	/* IPsec - copy IV at the output */
1312 	if (is_ipsec)
1313 		append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
1314 				      0x2 << 25);
1315 
1316 	wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
1317 			       JUMP_COND_NOP | JUMP_TEST_ALL);
1318 	set_jump_tgt_here(desc, wait_cmd);
1319 
1320 	if (encap) {
1321 		/* Read and write cryptlen bytes */
1322 		append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1323 		append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
1324 				CAAM_CMD_SZ);
1325 		aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
1326 
1327 		/* Write ICV */
1328 		append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
1329 				 LDST_SRCDST_BYTE_CONTEXT);
1330 	} else {
1331 		/* Read and write cryptlen bytes */
1332 		append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0,
1333 				CAAM_CMD_SZ);
1334 		append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
1335 				CAAM_CMD_SZ);
1336 		aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
1337 
1338 		/* Load ICV for verification */
1339 		append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
1340 				     FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
1341 	}
1342 
1343 	print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
1344 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1345 			     1);
1346 }
1347 EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
1348 
1349 /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
1350 static inline void skcipher_append_src_dst(u32 *desc)
1351 {
1352 	append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1353 	append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1354 	append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1355 			     KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1356 	append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1357 }
1358 
1359 /**
1360  * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
1361  * @desc: pointer to buffer used for descriptor construction
1362  * @cdata: pointer to block cipher transform definitions
1363  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1364  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1365  *                                - OP_ALG_ALGSEL_CHACHA20
1366  * @ivsize: initialization vector size
1367  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1368  * @ctx1_iv_off: IV offset in CONTEXT1 register
1369  */
1370 void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
1371 				unsigned int ivsize, const bool is_rfc3686,
1372 				const u32 ctx1_iv_off)
1373 {
1374 	u32 *key_jump_cmd;
1375 
1376 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1377 	/* Skip if already shared */
1378 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1379 				   JUMP_COND_SHRD);
1380 
1381 	/* Load class1 key only */
1382 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1383 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1384 
1385 	/* Load nonce into CONTEXT1 reg */
1386 	if (is_rfc3686) {
1387 		const u8 *nonce = cdata->key_virt + cdata->keylen;
1388 
1389 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1390 				   LDST_CLASS_IND_CCB |
1391 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1392 		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1393 			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1394 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1395 	}
1396 
1397 	set_jump_tgt_here(desc, key_jump_cmd);
1398 
1399 	/* Load iv */
1400 	append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1401 			LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1402 
1403 	/* Load counter into CONTEXT1 reg */
1404 	if (is_rfc3686)
1405 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1406 				     LDST_SRCDST_BYTE_CONTEXT |
1407 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1408 				      LDST_OFFSET_SHIFT));
1409 
1410 	/* Load operation */
1411 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1412 			 OP_ALG_ENCRYPT);
1413 
1414 	/* Perform operation */
1415 	skcipher_append_src_dst(desc);
1416 
1417 #ifdef DEBUG
1418 	print_hex_dump(KERN_ERR,
1419 		       "skcipher enc shdesc@" __stringify(__LINE__)": ",
1420 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1421 #endif
1422 }
1423 EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
1424 
1425 /**
1426  * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
1427  * @desc: pointer to buffer used for descriptor construction
1428  * @cdata: pointer to block cipher transform definitions
1429  *         Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1430  *         with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1431  *                                - OP_ALG_ALGSEL_CHACHA20
1432  * @ivsize: initialization vector size
1433  * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1434  * @ctx1_iv_off: IV offset in CONTEXT1 register
1435  */
1436 void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
1437 				unsigned int ivsize, const bool is_rfc3686,
1438 				const u32 ctx1_iv_off)
1439 {
1440 	u32 *key_jump_cmd;
1441 
1442 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1443 	/* Skip if already shared */
1444 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1445 				   JUMP_COND_SHRD);
1446 
1447 	/* Load class1 key only */
1448 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1449 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1450 
1451 	/* Load nonce into CONTEXT1 reg */
1452 	if (is_rfc3686) {
1453 		const u8 *nonce = cdata->key_virt + cdata->keylen;
1454 
1455 		append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1456 				   LDST_CLASS_IND_CCB |
1457 				   LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1458 		append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1459 			    MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1460 			    (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1461 	}
1462 
1463 	set_jump_tgt_here(desc, key_jump_cmd);
1464 
1465 	/* load IV */
1466 	append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1467 			LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1468 
1469 	/* Load counter into CONTEXT1 reg */
1470 	if (is_rfc3686)
1471 		append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1472 				     LDST_SRCDST_BYTE_CONTEXT |
1473 				     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1474 				      LDST_OFFSET_SHIFT));
1475 
1476 	/* Choose operation */
1477 	if (ctx1_iv_off)
1478 		append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1479 				 OP_ALG_DECRYPT);
1480 	else
1481 		append_dec_op1(desc, cdata->algtype);
1482 
1483 	/* Perform operation */
1484 	skcipher_append_src_dst(desc);
1485 
1486 #ifdef DEBUG
1487 	print_hex_dump(KERN_ERR,
1488 		       "skcipher dec shdesc@" __stringify(__LINE__)": ",
1489 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1490 #endif
1491 }
1492 EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
1493 
1494 /**
1495  * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
1496  * @desc: pointer to buffer used for descriptor construction
1497  * @cdata: pointer to block cipher transform definitions
1498  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1499  */
1500 void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
1501 {
1502 	__be64 sector_size = cpu_to_be64(512);
1503 	u32 *key_jump_cmd;
1504 
1505 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1506 	/* Skip if already shared */
1507 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1508 				   JUMP_COND_SHRD);
1509 
1510 	/* Load class1 keys only */
1511 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1512 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1513 
1514 	/* Load sector size with index 40 bytes (0x28) */
1515 	append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1516 			   LDST_SRCDST_BYTE_CONTEXT |
1517 			   (0x28 << LDST_OFFSET_SHIFT));
1518 
1519 	set_jump_tgt_here(desc, key_jump_cmd);
1520 
1521 	/*
1522 	 * create sequence for loading the sector index
1523 	 * Upper 8B of IV - will be used as sector index
1524 	 * Lower 8B of IV - will be discarded
1525 	 */
1526 	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1527 			(0x20 << LDST_OFFSET_SHIFT));
1528 	append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1529 
1530 	/* Load operation */
1531 	append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1532 			 OP_ALG_ENCRYPT);
1533 
1534 	/* Perform operation */
1535 	skcipher_append_src_dst(desc);
1536 
1537 #ifdef DEBUG
1538 	print_hex_dump(KERN_ERR,
1539 		       "xts skcipher enc shdesc@" __stringify(__LINE__) ": ",
1540 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1541 #endif
1542 }
1543 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
1544 
1545 /**
1546  * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
1547  * @desc: pointer to buffer used for descriptor construction
1548  * @cdata: pointer to block cipher transform definitions
1549  *         Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1550  */
1551 void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
1552 {
1553 	__be64 sector_size = cpu_to_be64(512);
1554 	u32 *key_jump_cmd;
1555 
1556 	init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1557 	/* Skip if already shared */
1558 	key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1559 				   JUMP_COND_SHRD);
1560 
1561 	/* Load class1 key only */
1562 	append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1563 			  cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1564 
1565 	/* Load sector size with index 40 bytes (0x28) */
1566 	append_load_as_imm(desc, (void *)&sector_size, 8, LDST_CLASS_1_CCB |
1567 			   LDST_SRCDST_BYTE_CONTEXT |
1568 			   (0x28 << LDST_OFFSET_SHIFT));
1569 
1570 	set_jump_tgt_here(desc, key_jump_cmd);
1571 
1572 	/*
1573 	 * create sequence for loading the sector index
1574 	 * Upper 8B of IV - will be used as sector index
1575 	 * Lower 8B of IV - will be discarded
1576 	 */
1577 	append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1578 			(0x20 << LDST_OFFSET_SHIFT));
1579 	append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1580 
1581 	/* Load operation */
1582 	append_dec_op1(desc, cdata->algtype);
1583 
1584 	/* Perform operation */
1585 	skcipher_append_src_dst(desc);
1586 
1587 #ifdef DEBUG
1588 	print_hex_dump(KERN_ERR,
1589 		       "xts skcipher dec shdesc@" __stringify(__LINE__) ": ",
1590 		       DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1591 #endif
1592 }
1593 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
1594 
1595 MODULE_LICENSE("GPL");
1596 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1597 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
1598