1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * Shared descriptors for aead, skcipher algorithms 4 * 5 * Copyright 2016-2019 NXP 6 */ 7 8 #include "compat.h" 9 #include "desc_constr.h" 10 #include "caamalg_desc.h" 11 12 /* 13 * For aead functions, read payload and write payload, 14 * both of which are specified in req->src and req->dst 15 */ 16 static inline void aead_append_src_dst(u32 *desc, u32 msg_type) 17 { 18 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF); 19 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | 20 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH); 21 } 22 23 /* Set DK bit in class 1 operation if shared */ 24 static inline void append_dec_op1(u32 *desc, u32 type) 25 { 26 u32 *jump_cmd, *uncond_jump_cmd; 27 28 /* DK bit is valid only for AES */ 29 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) { 30 append_operation(desc, type | OP_ALG_AS_INITFINAL | 31 OP_ALG_DECRYPT); 32 return; 33 } 34 35 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD); 36 append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT); 37 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL); 38 set_jump_tgt_here(desc, jump_cmd); 39 append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT | 40 OP_ALG_AAI_DK); 41 set_jump_tgt_here(desc, uncond_jump_cmd); 42 } 43 44 /** 45 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor 46 * (non-protocol) with no (null) encryption. 47 * @desc: pointer to buffer used for descriptor construction 48 * @adata: pointer to authentication transform definitions. 49 * A split key is required for SEC Era < 6; the size of the split key 50 * is specified in this case. Valid algorithm values - one of 51 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed 52 * with OP_ALG_AAI_HMAC_PRECOMP. 53 * @icvsize: integrity check value (ICV) size (truncated or full) 54 * @era: SEC Era 55 */ 56 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata, 57 unsigned int icvsize, int era) 58 { 59 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd; 60 61 init_sh_desc(desc, HDR_SHARE_SERIAL); 62 63 /* Skip if already shared */ 64 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 65 JUMP_COND_SHRD); 66 if (era < 6) { 67 if (adata->key_inline) 68 append_key_as_imm(desc, adata->key_virt, 69 adata->keylen_pad, adata->keylen, 70 CLASS_2 | KEY_DEST_MDHA_SPLIT | 71 KEY_ENC); 72 else 73 append_key(desc, adata->key_dma, adata->keylen, 74 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC); 75 } else { 76 append_proto_dkp(desc, adata); 77 } 78 set_jump_tgt_here(desc, key_jump_cmd); 79 80 /* assoclen + cryptlen = seqinlen */ 81 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ); 82 83 /* Prepare to read and write cryptlen + assoclen bytes */ 84 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ); 85 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); 86 87 /* 88 * MOVE_LEN opcode is not available in all SEC HW revisions, 89 * thus need to do some magic, i.e. self-patch the descriptor 90 * buffer. 91 */ 92 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | 93 MOVE_DEST_MATH3 | 94 (0x6 << MOVE_LEN_SHIFT)); 95 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | 96 MOVE_DEST_DESCBUF | 97 MOVE_WAITCOMP | 98 (0x8 << MOVE_LEN_SHIFT)); 99 100 /* Class 2 operation */ 101 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | 102 OP_ALG_ENCRYPT); 103 104 /* Read and write cryptlen bytes */ 105 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1); 106 107 set_move_tgt_here(desc, read_move_cmd); 108 set_move_tgt_here(desc, write_move_cmd); 109 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO); 110 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO | 111 MOVE_AUX_LS); 112 113 /* Write ICV */ 114 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB | 115 LDST_SRCDST_BYTE_CONTEXT); 116 117 print_hex_dump_debug("aead null enc shdesc@" __stringify(__LINE__)": ", 118 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 119 1); 120 } 121 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap); 122 123 /** 124 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor 125 * (non-protocol) with no (null) decryption. 126 * @desc: pointer to buffer used for descriptor construction 127 * @adata: pointer to authentication transform definitions. 128 * A split key is required for SEC Era < 6; the size of the split key 129 * is specified in this case. Valid algorithm values - one of 130 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed 131 * with OP_ALG_AAI_HMAC_PRECOMP. 132 * @icvsize: integrity check value (ICV) size (truncated or full) 133 * @era: SEC Era 134 */ 135 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata, 136 unsigned int icvsize, int era) 137 { 138 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd; 139 140 init_sh_desc(desc, HDR_SHARE_SERIAL); 141 142 /* Skip if already shared */ 143 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 144 JUMP_COND_SHRD); 145 if (era < 6) { 146 if (adata->key_inline) 147 append_key_as_imm(desc, adata->key_virt, 148 adata->keylen_pad, adata->keylen, 149 CLASS_2 | KEY_DEST_MDHA_SPLIT | 150 KEY_ENC); 151 else 152 append_key(desc, adata->key_dma, adata->keylen, 153 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC); 154 } else { 155 append_proto_dkp(desc, adata); 156 } 157 set_jump_tgt_here(desc, key_jump_cmd); 158 159 /* Class 2 operation */ 160 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | 161 OP_ALG_DECRYPT | OP_ALG_ICV_ON); 162 163 /* assoclen + cryptlen = seqoutlen */ 164 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ); 165 166 /* Prepare to read and write cryptlen + assoclen bytes */ 167 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ); 168 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ); 169 170 /* 171 * MOVE_LEN opcode is not available in all SEC HW revisions, 172 * thus need to do some magic, i.e. self-patch the descriptor 173 * buffer. 174 */ 175 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | 176 MOVE_DEST_MATH2 | 177 (0x6 << MOVE_LEN_SHIFT)); 178 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 | 179 MOVE_DEST_DESCBUF | 180 MOVE_WAITCOMP | 181 (0x8 << MOVE_LEN_SHIFT)); 182 183 /* Read and write cryptlen bytes */ 184 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1); 185 186 /* 187 * Insert a NOP here, since we need at least 4 instructions between 188 * code patching the descriptor buffer and the location being patched. 189 */ 190 jump_cmd = append_jump(desc, JUMP_TEST_ALL); 191 set_jump_tgt_here(desc, jump_cmd); 192 193 set_move_tgt_here(desc, read_move_cmd); 194 set_move_tgt_here(desc, write_move_cmd); 195 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO); 196 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO | 197 MOVE_AUX_LS); 198 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO); 199 200 /* Load ICV */ 201 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 | 202 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV); 203 204 print_hex_dump_debug("aead null dec shdesc@" __stringify(__LINE__)": ", 205 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 206 1); 207 } 208 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap); 209 210 static void init_sh_desc_key_aead(u32 * const desc, 211 struct alginfo * const cdata, 212 struct alginfo * const adata, 213 const bool is_rfc3686, u32 *nonce, int era) 214 { 215 u32 *key_jump_cmd; 216 unsigned int enckeylen = cdata->keylen; 217 218 /* Note: Context registers are saved. */ 219 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); 220 221 /* Skip if already shared */ 222 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 223 JUMP_COND_SHRD); 224 225 /* 226 * RFC3686 specific: 227 * | key = {AUTH_KEY, ENC_KEY, NONCE} 228 * | enckeylen = encryption key size + nonce size 229 */ 230 if (is_rfc3686) 231 enckeylen -= CTR_RFC3686_NONCE_SIZE; 232 233 if (era < 6) { 234 if (adata->key_inline) 235 append_key_as_imm(desc, adata->key_virt, 236 adata->keylen_pad, adata->keylen, 237 CLASS_2 | KEY_DEST_MDHA_SPLIT | 238 KEY_ENC); 239 else 240 append_key(desc, adata->key_dma, adata->keylen, 241 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC); 242 } else { 243 append_proto_dkp(desc, adata); 244 } 245 246 if (cdata->key_inline) 247 append_key_as_imm(desc, cdata->key_virt, enckeylen, 248 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG); 249 else 250 append_key(desc, cdata->key_dma, enckeylen, CLASS_1 | 251 KEY_DEST_CLASS_REG); 252 253 /* Load Counter into CONTEXT1 reg */ 254 if (is_rfc3686) { 255 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE, 256 LDST_CLASS_IND_CCB | 257 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM); 258 append_move(desc, 259 MOVE_SRC_OUTFIFO | 260 MOVE_DEST_CLASS1CTX | 261 (16 << MOVE_OFFSET_SHIFT) | 262 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT)); 263 } 264 265 set_jump_tgt_here(desc, key_jump_cmd); 266 } 267 268 /** 269 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor 270 * (non-protocol). 271 * @desc: pointer to buffer used for descriptor construction 272 * @cdata: pointer to block cipher transform definitions 273 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed 274 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128. 275 * @adata: pointer to authentication transform definitions. 276 * A split key is required for SEC Era < 6; the size of the split key 277 * is specified in this case. Valid algorithm values - one of 278 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed 279 * with OP_ALG_AAI_HMAC_PRECOMP. 280 * @ivsize: initialization vector size 281 * @icvsize: integrity check value (ICV) size (truncated or full) 282 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template 283 * @nonce: pointer to rfc3686 nonce 284 * @ctx1_iv_off: IV offset in CONTEXT1 register 285 * @is_qi: true when called from caam/qi 286 * @era: SEC Era 287 */ 288 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata, 289 struct alginfo *adata, unsigned int ivsize, 290 unsigned int icvsize, const bool is_rfc3686, 291 u32 *nonce, const u32 ctx1_iv_off, const bool is_qi, 292 int era) 293 { 294 /* Note: Context registers are saved. */ 295 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era); 296 297 /* Class 2 operation */ 298 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | 299 OP_ALG_ENCRYPT); 300 301 if (is_qi) { 302 u32 *wait_load_cmd; 303 304 /* REG3 = assoclen */ 305 append_seq_load(desc, 4, LDST_CLASS_DECO | 306 LDST_SRCDST_WORD_DECO_MATH3 | 307 (4 << LDST_OFFSET_SHIFT)); 308 309 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 310 JUMP_COND_CALM | JUMP_COND_NCP | 311 JUMP_COND_NOP | JUMP_COND_NIP | 312 JUMP_COND_NIFP); 313 set_jump_tgt_here(desc, wait_load_cmd); 314 315 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | 316 LDST_SRCDST_BYTE_CONTEXT | 317 (ctx1_iv_off << LDST_OFFSET_SHIFT)); 318 } 319 320 /* Read and write assoclen bytes */ 321 if (is_qi || era < 3) { 322 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ); 323 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); 324 } else { 325 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ); 326 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ); 327 } 328 329 /* Skip assoc data */ 330 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); 331 332 /* read assoc before reading payload */ 333 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG | 334 FIFOLDST_VLF); 335 336 /* Load Counter into CONTEXT1 reg */ 337 if (is_rfc3686) 338 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB | 339 LDST_SRCDST_BYTE_CONTEXT | 340 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) << 341 LDST_OFFSET_SHIFT)); 342 343 /* Class 1 operation */ 344 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 345 OP_ALG_ENCRYPT); 346 347 /* Read and write cryptlen bytes */ 348 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 349 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 350 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2); 351 352 /* Write ICV */ 353 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB | 354 LDST_SRCDST_BYTE_CONTEXT); 355 356 print_hex_dump_debug("aead enc shdesc@" __stringify(__LINE__)": ", 357 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 358 1); 359 } 360 EXPORT_SYMBOL(cnstr_shdsc_aead_encap); 361 362 /** 363 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor 364 * (non-protocol). 365 * @desc: pointer to buffer used for descriptor construction 366 * @cdata: pointer to block cipher transform definitions 367 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed 368 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128. 369 * @adata: pointer to authentication transform definitions. 370 * A split key is required for SEC Era < 6; the size of the split key 371 * is specified in this case. Valid algorithm values - one of 372 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed 373 * with OP_ALG_AAI_HMAC_PRECOMP. 374 * @ivsize: initialization vector size 375 * @icvsize: integrity check value (ICV) size (truncated or full) 376 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template 377 * @nonce: pointer to rfc3686 nonce 378 * @ctx1_iv_off: IV offset in CONTEXT1 register 379 * @is_qi: true when called from caam/qi 380 * @era: SEC Era 381 */ 382 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata, 383 struct alginfo *adata, unsigned int ivsize, 384 unsigned int icvsize, const bool geniv, 385 const bool is_rfc3686, u32 *nonce, 386 const u32 ctx1_iv_off, const bool is_qi, int era) 387 { 388 /* Note: Context registers are saved. */ 389 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era); 390 391 /* Class 2 operation */ 392 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | 393 OP_ALG_DECRYPT | OP_ALG_ICV_ON); 394 395 if (is_qi) { 396 u32 *wait_load_cmd; 397 398 /* REG3 = assoclen */ 399 append_seq_load(desc, 4, LDST_CLASS_DECO | 400 LDST_SRCDST_WORD_DECO_MATH3 | 401 (4 << LDST_OFFSET_SHIFT)); 402 403 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 404 JUMP_COND_CALM | JUMP_COND_NCP | 405 JUMP_COND_NOP | JUMP_COND_NIP | 406 JUMP_COND_NIFP); 407 set_jump_tgt_here(desc, wait_load_cmd); 408 409 if (!geniv) 410 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | 411 LDST_SRCDST_BYTE_CONTEXT | 412 (ctx1_iv_off << LDST_OFFSET_SHIFT)); 413 } 414 415 /* Read and write assoclen bytes */ 416 if (is_qi || era < 3) { 417 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ); 418 if (geniv) 419 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, 420 ivsize); 421 else 422 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, 423 CAAM_CMD_SZ); 424 } else { 425 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ); 426 if (geniv) 427 append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM, 428 ivsize); 429 else 430 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, 431 CAAM_CMD_SZ); 432 } 433 434 /* Skip assoc data */ 435 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); 436 437 /* read assoc before reading payload */ 438 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG | 439 KEY_VLF); 440 441 if (geniv) { 442 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | 443 LDST_SRCDST_BYTE_CONTEXT | 444 (ctx1_iv_off << LDST_OFFSET_SHIFT)); 445 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO | 446 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize); 447 } 448 449 /* Load Counter into CONTEXT1 reg */ 450 if (is_rfc3686) 451 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB | 452 LDST_SRCDST_BYTE_CONTEXT | 453 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) << 454 LDST_OFFSET_SHIFT)); 455 456 /* Choose operation */ 457 if (ctx1_iv_off) 458 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 459 OP_ALG_DECRYPT); 460 else 461 append_dec_op1(desc, cdata->algtype); 462 463 /* Read and write cryptlen bytes */ 464 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ); 465 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ); 466 aead_append_src_dst(desc, FIFOLD_TYPE_MSG); 467 468 /* Load ICV */ 469 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 | 470 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV); 471 472 print_hex_dump_debug("aead dec shdesc@" __stringify(__LINE__)": ", 473 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 474 1); 475 } 476 EXPORT_SYMBOL(cnstr_shdsc_aead_decap); 477 478 /** 479 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor 480 * (non-protocol) with HW-generated initialization 481 * vector. 482 * @desc: pointer to buffer used for descriptor construction 483 * @cdata: pointer to block cipher transform definitions 484 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed 485 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128. 486 * @adata: pointer to authentication transform definitions. 487 * A split key is required for SEC Era < 6; the size of the split key 488 * is specified in this case. Valid algorithm values - one of 489 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed 490 * with OP_ALG_AAI_HMAC_PRECOMP. 491 * @ivsize: initialization vector size 492 * @icvsize: integrity check value (ICV) size (truncated or full) 493 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template 494 * @nonce: pointer to rfc3686 nonce 495 * @ctx1_iv_off: IV offset in CONTEXT1 register 496 * @is_qi: true when called from caam/qi 497 * @era: SEC Era 498 */ 499 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata, 500 struct alginfo *adata, unsigned int ivsize, 501 unsigned int icvsize, const bool is_rfc3686, 502 u32 *nonce, const u32 ctx1_iv_off, 503 const bool is_qi, int era) 504 { 505 u32 geniv, moveiv; 506 507 /* Note: Context registers are saved. */ 508 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era); 509 510 if (is_qi) { 511 u32 *wait_load_cmd; 512 513 /* REG3 = assoclen */ 514 append_seq_load(desc, 4, LDST_CLASS_DECO | 515 LDST_SRCDST_WORD_DECO_MATH3 | 516 (4 << LDST_OFFSET_SHIFT)); 517 518 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 519 JUMP_COND_CALM | JUMP_COND_NCP | 520 JUMP_COND_NOP | JUMP_COND_NIP | 521 JUMP_COND_NIFP); 522 set_jump_tgt_here(desc, wait_load_cmd); 523 } 524 525 if (is_rfc3686) { 526 if (is_qi) 527 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | 528 LDST_SRCDST_BYTE_CONTEXT | 529 (ctx1_iv_off << LDST_OFFSET_SHIFT)); 530 531 goto copy_iv; 532 } 533 534 /* Generate IV */ 535 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO | 536 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | 537 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT); 538 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB | 539 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM); 540 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO); 541 append_move(desc, MOVE_WAITCOMP | 542 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX | 543 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | 544 (ivsize << MOVE_LEN_SHIFT)); 545 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO); 546 547 copy_iv: 548 /* Copy IV to class 1 context */ 549 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO | 550 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | 551 (ivsize << MOVE_LEN_SHIFT)); 552 553 /* Return to encryption */ 554 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | 555 OP_ALG_ENCRYPT); 556 557 /* Read and write assoclen bytes */ 558 if (is_qi || era < 3) { 559 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ); 560 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); 561 } else { 562 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ); 563 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ); 564 } 565 566 /* Skip assoc data */ 567 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); 568 569 /* read assoc before reading payload */ 570 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG | 571 KEY_VLF); 572 573 /* Copy iv from outfifo to class 2 fifo */ 574 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 | 575 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT); 576 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB | 577 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM); 578 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB | 579 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM); 580 581 /* Load Counter into CONTEXT1 reg */ 582 if (is_rfc3686) 583 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB | 584 LDST_SRCDST_BYTE_CONTEXT | 585 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) << 586 LDST_OFFSET_SHIFT)); 587 588 /* Class 1 operation */ 589 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 590 OP_ALG_ENCRYPT); 591 592 /* Will write ivsize + cryptlen */ 593 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 594 595 /* Not need to reload iv */ 596 append_seq_fifo_load(desc, ivsize, 597 FIFOLD_CLASS_SKIP); 598 599 /* Will read cryptlen */ 600 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 601 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF | 602 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH); 603 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF); 604 605 /* Write ICV */ 606 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB | 607 LDST_SRCDST_BYTE_CONTEXT); 608 609 print_hex_dump_debug("aead givenc shdesc@" __stringify(__LINE__)": ", 610 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 611 1); 612 } 613 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap); 614 615 /** 616 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor 617 * @desc: pointer to buffer used for descriptor construction 618 * @cdata: pointer to block cipher transform definitions 619 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM. 620 * @ivsize: initialization vector size 621 * @icvsize: integrity check value (ICV) size (truncated or full) 622 * @is_qi: true when called from caam/qi 623 */ 624 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata, 625 unsigned int ivsize, unsigned int icvsize, 626 const bool is_qi) 627 { 628 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1, 629 *zero_assoc_jump_cmd2; 630 631 init_sh_desc(desc, HDR_SHARE_SERIAL); 632 633 /* skip key loading if they are loaded due to sharing */ 634 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 635 JUMP_COND_SHRD); 636 if (cdata->key_inline) 637 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 638 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 639 else 640 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 | 641 KEY_DEST_CLASS_REG); 642 set_jump_tgt_here(desc, key_jump_cmd); 643 644 /* class 1 operation */ 645 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 646 OP_ALG_ENCRYPT); 647 648 if (is_qi) { 649 u32 *wait_load_cmd; 650 651 /* REG3 = assoclen */ 652 append_seq_load(desc, 4, LDST_CLASS_DECO | 653 LDST_SRCDST_WORD_DECO_MATH3 | 654 (4 << LDST_OFFSET_SHIFT)); 655 656 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 657 JUMP_COND_CALM | JUMP_COND_NCP | 658 JUMP_COND_NOP | JUMP_COND_NIP | 659 JUMP_COND_NIFP); 660 set_jump_tgt_here(desc, wait_load_cmd); 661 662 append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM, 663 ivsize); 664 } else { 665 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, 666 CAAM_CMD_SZ); 667 } 668 669 /* if assoclen + cryptlen is ZERO, skip to ICV write */ 670 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL | 671 JUMP_COND_MATH_Z); 672 673 if (is_qi) 674 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 | 675 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1); 676 677 /* if assoclen is ZERO, skip reading the assoc data */ 678 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ); 679 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL | 680 JUMP_COND_MATH_Z); 681 682 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); 683 684 /* skip assoc data */ 685 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); 686 687 /* cryptlen = seqinlen - assoclen */ 688 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ); 689 690 /* if cryptlen is ZERO jump to zero-payload commands */ 691 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL | 692 JUMP_COND_MATH_Z); 693 694 /* read assoc data */ 695 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 696 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1); 697 set_jump_tgt_here(desc, zero_assoc_jump_cmd1); 698 699 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 700 701 /* write encrypted data */ 702 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF); 703 704 /* read payload data */ 705 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 706 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1); 707 708 /* jump to ICV writing */ 709 if (is_qi) 710 append_jump(desc, JUMP_TEST_ALL | 4); 711 else 712 append_jump(desc, JUMP_TEST_ALL | 2); 713 714 /* zero-payload commands */ 715 set_jump_tgt_here(desc, zero_payload_jump_cmd); 716 717 /* read assoc data */ 718 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 719 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1); 720 if (is_qi) 721 /* jump to ICV writing */ 722 append_jump(desc, JUMP_TEST_ALL | 2); 723 724 /* There is no input data */ 725 set_jump_tgt_here(desc, zero_assoc_jump_cmd2); 726 727 if (is_qi) 728 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 | 729 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 730 FIFOLD_TYPE_LAST1); 731 732 /* write ICV */ 733 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB | 734 LDST_SRCDST_BYTE_CONTEXT); 735 736 print_hex_dump_debug("gcm enc shdesc@" __stringify(__LINE__)": ", 737 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 738 1); 739 } 740 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap); 741 742 /** 743 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor 744 * @desc: pointer to buffer used for descriptor construction 745 * @cdata: pointer to block cipher transform definitions 746 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM. 747 * @ivsize: initialization vector size 748 * @icvsize: integrity check value (ICV) size (truncated or full) 749 * @is_qi: true when called from caam/qi 750 */ 751 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata, 752 unsigned int ivsize, unsigned int icvsize, 753 const bool is_qi) 754 { 755 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1; 756 757 init_sh_desc(desc, HDR_SHARE_SERIAL); 758 759 /* skip key loading if they are loaded due to sharing */ 760 key_jump_cmd = append_jump(desc, JUMP_JSL | 761 JUMP_TEST_ALL | JUMP_COND_SHRD); 762 if (cdata->key_inline) 763 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 764 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 765 else 766 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 | 767 KEY_DEST_CLASS_REG); 768 set_jump_tgt_here(desc, key_jump_cmd); 769 770 /* class 1 operation */ 771 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 772 OP_ALG_DECRYPT | OP_ALG_ICV_ON); 773 774 if (is_qi) { 775 u32 *wait_load_cmd; 776 777 /* REG3 = assoclen */ 778 append_seq_load(desc, 4, LDST_CLASS_DECO | 779 LDST_SRCDST_WORD_DECO_MATH3 | 780 (4 << LDST_OFFSET_SHIFT)); 781 782 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 783 JUMP_COND_CALM | JUMP_COND_NCP | 784 JUMP_COND_NOP | JUMP_COND_NIP | 785 JUMP_COND_NIFP); 786 set_jump_tgt_here(desc, wait_load_cmd); 787 788 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 | 789 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1); 790 } 791 792 /* if assoclen is ZERO, skip reading the assoc data */ 793 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ); 794 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL | 795 JUMP_COND_MATH_Z); 796 797 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); 798 799 /* skip assoc data */ 800 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); 801 802 /* read assoc data */ 803 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 804 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1); 805 806 set_jump_tgt_here(desc, zero_assoc_jump_cmd1); 807 808 /* cryptlen = seqoutlen - assoclen */ 809 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ); 810 811 /* jump to zero-payload command if cryptlen is zero */ 812 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL | 813 JUMP_COND_MATH_Z); 814 815 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ); 816 817 /* store encrypted data */ 818 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF); 819 820 /* read payload data */ 821 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 822 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1); 823 824 /* zero-payload command */ 825 set_jump_tgt_here(desc, zero_payload_jump_cmd); 826 827 /* read ICV */ 828 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 | 829 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1); 830 831 print_hex_dump_debug("gcm dec shdesc@" __stringify(__LINE__)": ", 832 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 833 1); 834 } 835 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap); 836 837 /** 838 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor 839 * (non-protocol). 840 * @desc: pointer to buffer used for descriptor construction 841 * @cdata: pointer to block cipher transform definitions 842 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM. 843 * @ivsize: initialization vector size 844 * @icvsize: integrity check value (ICV) size (truncated or full) 845 * @is_qi: true when called from caam/qi 846 */ 847 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata, 848 unsigned int ivsize, unsigned int icvsize, 849 const bool is_qi) 850 { 851 u32 *key_jump_cmd; 852 853 init_sh_desc(desc, HDR_SHARE_SERIAL); 854 855 /* Skip key loading if it is loaded due to sharing */ 856 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 857 JUMP_COND_SHRD); 858 if (cdata->key_inline) 859 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 860 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 861 else 862 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 | 863 KEY_DEST_CLASS_REG); 864 set_jump_tgt_here(desc, key_jump_cmd); 865 866 /* Class 1 operation */ 867 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 868 OP_ALG_ENCRYPT); 869 870 if (is_qi) { 871 u32 *wait_load_cmd; 872 873 /* REG3 = assoclen */ 874 append_seq_load(desc, 4, LDST_CLASS_DECO | 875 LDST_SRCDST_WORD_DECO_MATH3 | 876 (4 << LDST_OFFSET_SHIFT)); 877 878 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 879 JUMP_COND_CALM | JUMP_COND_NCP | 880 JUMP_COND_NOP | JUMP_COND_NIP | 881 JUMP_COND_NIFP); 882 set_jump_tgt_here(desc, wait_load_cmd); 883 884 /* Read salt and IV */ 885 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt + 886 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 | 887 FIFOLD_TYPE_IV); 888 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 | 889 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1); 890 } 891 892 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize); 893 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); 894 895 /* Read assoc data */ 896 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 897 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1); 898 899 /* Skip IV */ 900 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP); 901 902 /* Will read cryptlen bytes */ 903 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 904 905 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */ 906 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG); 907 908 /* Skip assoc data */ 909 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); 910 911 /* cryptlen = seqoutlen - assoclen */ 912 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ); 913 914 /* Write encrypted data */ 915 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF); 916 917 /* Read payload data */ 918 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 919 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1); 920 921 /* Write ICV */ 922 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB | 923 LDST_SRCDST_BYTE_CONTEXT); 924 925 print_hex_dump_debug("rfc4106 enc shdesc@" __stringify(__LINE__)": ", 926 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 927 1); 928 } 929 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap); 930 931 /** 932 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor 933 * (non-protocol). 934 * @desc: pointer to buffer used for descriptor construction 935 * @cdata: pointer to block cipher transform definitions 936 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM. 937 * @ivsize: initialization vector size 938 * @icvsize: integrity check value (ICV) size (truncated or full) 939 * @is_qi: true when called from caam/qi 940 */ 941 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata, 942 unsigned int ivsize, unsigned int icvsize, 943 const bool is_qi) 944 { 945 u32 *key_jump_cmd; 946 947 init_sh_desc(desc, HDR_SHARE_SERIAL); 948 949 /* Skip key loading if it is loaded due to sharing */ 950 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 951 JUMP_COND_SHRD); 952 if (cdata->key_inline) 953 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 954 cdata->keylen, CLASS_1 | 955 KEY_DEST_CLASS_REG); 956 else 957 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 | 958 KEY_DEST_CLASS_REG); 959 set_jump_tgt_here(desc, key_jump_cmd); 960 961 /* Class 1 operation */ 962 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 963 OP_ALG_DECRYPT | OP_ALG_ICV_ON); 964 965 if (is_qi) { 966 u32 *wait_load_cmd; 967 968 /* REG3 = assoclen */ 969 append_seq_load(desc, 4, LDST_CLASS_DECO | 970 LDST_SRCDST_WORD_DECO_MATH3 | 971 (4 << LDST_OFFSET_SHIFT)); 972 973 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 974 JUMP_COND_CALM | JUMP_COND_NCP | 975 JUMP_COND_NOP | JUMP_COND_NIP | 976 JUMP_COND_NIFP); 977 set_jump_tgt_here(desc, wait_load_cmd); 978 979 /* Read salt and IV */ 980 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt + 981 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 | 982 FIFOLD_TYPE_IV); 983 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 | 984 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1); 985 } 986 987 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize); 988 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); 989 990 /* Read assoc data */ 991 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 992 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1); 993 994 /* Skip IV */ 995 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP); 996 997 /* Will read cryptlen bytes */ 998 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ); 999 1000 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */ 1001 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG); 1002 1003 /* Skip assoc data */ 1004 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF); 1005 1006 /* Will write cryptlen bytes */ 1007 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ); 1008 1009 /* Store payload data */ 1010 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF); 1011 1012 /* Read encrypted data */ 1013 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF | 1014 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1); 1015 1016 /* Read ICV */ 1017 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 | 1018 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1); 1019 1020 print_hex_dump_debug("rfc4106 dec shdesc@" __stringify(__LINE__)": ", 1021 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1022 1); 1023 } 1024 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap); 1025 1026 /** 1027 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor 1028 * (non-protocol). 1029 * @desc: pointer to buffer used for descriptor construction 1030 * @cdata: pointer to block cipher transform definitions 1031 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM. 1032 * @ivsize: initialization vector size 1033 * @icvsize: integrity check value (ICV) size (truncated or full) 1034 * @is_qi: true when called from caam/qi 1035 */ 1036 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata, 1037 unsigned int ivsize, unsigned int icvsize, 1038 const bool is_qi) 1039 { 1040 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd; 1041 1042 init_sh_desc(desc, HDR_SHARE_SERIAL); 1043 1044 /* Skip key loading if it is loaded due to sharing */ 1045 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 1046 JUMP_COND_SHRD); 1047 if (cdata->key_inline) 1048 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 1049 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 1050 else 1051 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 | 1052 KEY_DEST_CLASS_REG); 1053 set_jump_tgt_here(desc, key_jump_cmd); 1054 1055 /* Class 1 operation */ 1056 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 1057 OP_ALG_ENCRYPT); 1058 1059 if (is_qi) { 1060 /* assoclen is not needed, skip it */ 1061 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP); 1062 1063 /* Read salt and IV */ 1064 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt + 1065 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 | 1066 FIFOLD_TYPE_IV); 1067 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 | 1068 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1); 1069 } 1070 1071 /* assoclen + cryptlen = seqinlen */ 1072 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ); 1073 1074 /* 1075 * MOVE_LEN opcode is not available in all SEC HW revisions, 1076 * thus need to do some magic, i.e. self-patch the descriptor 1077 * buffer. 1078 */ 1079 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 | 1080 (0x6 << MOVE_LEN_SHIFT)); 1081 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF | 1082 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP); 1083 1084 /* Will read assoclen + cryptlen bytes */ 1085 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 1086 1087 /* Will write assoclen + cryptlen bytes */ 1088 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 1089 1090 /* Read and write assoclen + cryptlen bytes */ 1091 aead_append_src_dst(desc, FIFOLD_TYPE_AAD); 1092 1093 set_move_tgt_here(desc, read_move_cmd); 1094 set_move_tgt_here(desc, write_move_cmd); 1095 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO); 1096 /* Move payload data to OFIFO */ 1097 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO); 1098 1099 /* Write ICV */ 1100 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB | 1101 LDST_SRCDST_BYTE_CONTEXT); 1102 1103 print_hex_dump_debug("rfc4543 enc shdesc@" __stringify(__LINE__)": ", 1104 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1105 1); 1106 } 1107 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap); 1108 1109 /** 1110 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor 1111 * (non-protocol). 1112 * @desc: pointer to buffer used for descriptor construction 1113 * @cdata: pointer to block cipher transform definitions 1114 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM. 1115 * @ivsize: initialization vector size 1116 * @icvsize: integrity check value (ICV) size (truncated or full) 1117 * @is_qi: true when called from caam/qi 1118 */ 1119 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata, 1120 unsigned int ivsize, unsigned int icvsize, 1121 const bool is_qi) 1122 { 1123 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd; 1124 1125 init_sh_desc(desc, HDR_SHARE_SERIAL); 1126 1127 /* Skip key loading if it is loaded due to sharing */ 1128 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 1129 JUMP_COND_SHRD); 1130 if (cdata->key_inline) 1131 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 1132 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 1133 else 1134 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 | 1135 KEY_DEST_CLASS_REG); 1136 set_jump_tgt_here(desc, key_jump_cmd); 1137 1138 /* Class 1 operation */ 1139 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 1140 OP_ALG_DECRYPT | OP_ALG_ICV_ON); 1141 1142 if (is_qi) { 1143 /* assoclen is not needed, skip it */ 1144 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP); 1145 1146 /* Read salt and IV */ 1147 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt + 1148 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 | 1149 FIFOLD_TYPE_IV); 1150 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 | 1151 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1); 1152 } 1153 1154 /* assoclen + cryptlen = seqoutlen */ 1155 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ); 1156 1157 /* 1158 * MOVE_LEN opcode is not available in all SEC HW revisions, 1159 * thus need to do some magic, i.e. self-patch the descriptor 1160 * buffer. 1161 */ 1162 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 | 1163 (0x6 << MOVE_LEN_SHIFT)); 1164 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF | 1165 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP); 1166 1167 /* Will read assoclen + cryptlen bytes */ 1168 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ); 1169 1170 /* Will write assoclen + cryptlen bytes */ 1171 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ); 1172 1173 /* Store payload data */ 1174 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF); 1175 1176 /* In-snoop assoclen + cryptlen data */ 1177 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF | 1178 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1); 1179 1180 set_move_tgt_here(desc, read_move_cmd); 1181 set_move_tgt_here(desc, write_move_cmd); 1182 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO); 1183 /* Move payload data to OFIFO */ 1184 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO); 1185 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO); 1186 1187 /* Read ICV */ 1188 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 | 1189 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1); 1190 1191 print_hex_dump_debug("rfc4543 dec shdesc@" __stringify(__LINE__)": ", 1192 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1193 1); 1194 } 1195 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap); 1196 1197 /** 1198 * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and 1199 * IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared 1200 * descriptor (non-protocol). 1201 * @desc: pointer to buffer used for descriptor construction 1202 * @cdata: pointer to block cipher transform definitions 1203 * Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with 1204 * OP_ALG_AAI_AEAD. 1205 * @adata: pointer to authentication transform definitions 1206 * Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with 1207 * OP_ALG_AAI_AEAD. 1208 * @ivsize: initialization vector size 1209 * @icvsize: integrity check value (ICV) size (truncated or full) 1210 * @encap: true if encapsulation, false if decapsulation 1211 * @is_qi: true when called from caam/qi 1212 */ 1213 void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata, 1214 struct alginfo *adata, unsigned int ivsize, 1215 unsigned int icvsize, const bool encap, 1216 const bool is_qi) 1217 { 1218 u32 *key_jump_cmd, *wait_cmd; 1219 u32 nfifo; 1220 const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE); 1221 1222 /* Note: Context registers are saved. */ 1223 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); 1224 1225 /* skip key loading if they are loaded due to sharing */ 1226 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 1227 JUMP_COND_SHRD); 1228 1229 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen, 1230 CLASS_1 | KEY_DEST_CLASS_REG); 1231 1232 /* For IPsec load the salt from keymat in the context register */ 1233 if (is_ipsec) 1234 append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4, 1235 LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT | 1236 4 << LDST_OFFSET_SHIFT); 1237 1238 set_jump_tgt_here(desc, key_jump_cmd); 1239 1240 /* Class 2 and 1 operations: Poly & ChaCha */ 1241 if (encap) { 1242 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | 1243 OP_ALG_ENCRYPT); 1244 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 1245 OP_ALG_ENCRYPT); 1246 } else { 1247 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL | 1248 OP_ALG_DECRYPT | OP_ALG_ICV_ON); 1249 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 1250 OP_ALG_DECRYPT); 1251 } 1252 1253 if (is_qi) { 1254 u32 *wait_load_cmd; 1255 u32 ctx1_iv_off = is_ipsec ? 8 : 4; 1256 1257 /* REG3 = assoclen */ 1258 append_seq_load(desc, 4, LDST_CLASS_DECO | 1259 LDST_SRCDST_WORD_DECO_MATH3 | 1260 4 << LDST_OFFSET_SHIFT); 1261 1262 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 1263 JUMP_COND_CALM | JUMP_COND_NCP | 1264 JUMP_COND_NOP | JUMP_COND_NIP | 1265 JUMP_COND_NIFP); 1266 set_jump_tgt_here(desc, wait_load_cmd); 1267 1268 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | 1269 LDST_SRCDST_BYTE_CONTEXT | 1270 ctx1_iv_off << LDST_OFFSET_SHIFT); 1271 } 1272 1273 /* 1274 * MAGIC with NFIFO 1275 * Read associated data from the input and send them to class1 and 1276 * class2 alignment blocks. From class1 send data to output fifo and 1277 * then write it to memory since we don't need to encrypt AD. 1278 */ 1279 nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 | 1280 NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND; 1281 append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB | 1282 LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3); 1283 1284 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ); 1285 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ); 1286 append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO | 1287 FIFOLD_CLASS_CLASS1 | LDST_VLF); 1288 append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK | 1289 MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3); 1290 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF); 1291 1292 /* IPsec - copy IV at the output */ 1293 if (is_ipsec) 1294 append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA | 1295 0x2 << 25); 1296 1297 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL | 1298 JUMP_COND_NOP | JUMP_TEST_ALL); 1299 set_jump_tgt_here(desc, wait_cmd); 1300 1301 if (encap) { 1302 /* Read and write cryptlen bytes */ 1303 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 1304 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, 1305 CAAM_CMD_SZ); 1306 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2); 1307 1308 /* Write ICV */ 1309 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB | 1310 LDST_SRCDST_BYTE_CONTEXT); 1311 } else { 1312 /* Read and write cryptlen bytes */ 1313 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, 1314 CAAM_CMD_SZ); 1315 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, 1316 CAAM_CMD_SZ); 1317 aead_append_src_dst(desc, FIFOLD_TYPE_MSG); 1318 1319 /* Load ICV for verification */ 1320 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 | 1321 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV); 1322 } 1323 1324 print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ", 1325 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1326 1); 1327 } 1328 EXPORT_SYMBOL(cnstr_shdsc_chachapoly); 1329 1330 /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */ 1331 static inline void skcipher_append_src_dst(u32 *desc) 1332 { 1333 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 1334 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ); 1335 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | 1336 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1); 1337 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF); 1338 } 1339 1340 /** 1341 * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor 1342 * @desc: pointer to buffer used for descriptor construction 1343 * @cdata: pointer to block cipher transform definitions 1344 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed 1345 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128 1346 * - OP_ALG_ALGSEL_CHACHA20 1347 * @ivsize: initialization vector size 1348 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template 1349 * @ctx1_iv_off: IV offset in CONTEXT1 register 1350 */ 1351 void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata, 1352 unsigned int ivsize, const bool is_rfc3686, 1353 const u32 ctx1_iv_off) 1354 { 1355 u32 *key_jump_cmd; 1356 1357 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); 1358 /* Skip if already shared */ 1359 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 1360 JUMP_COND_SHRD); 1361 1362 /* Load class1 key only */ 1363 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 1364 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 1365 1366 /* Load nonce into CONTEXT1 reg */ 1367 if (is_rfc3686) { 1368 const u8 *nonce = cdata->key_virt + cdata->keylen; 1369 1370 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE, 1371 LDST_CLASS_IND_CCB | 1372 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM); 1373 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO | 1374 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) | 1375 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT)); 1376 } 1377 1378 set_jump_tgt_here(desc, key_jump_cmd); 1379 1380 /* Load IV, if there is one */ 1381 if (ivsize) 1382 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | 1383 LDST_CLASS_1_CCB | (ctx1_iv_off << 1384 LDST_OFFSET_SHIFT)); 1385 1386 /* Load counter into CONTEXT1 reg */ 1387 if (is_rfc3686) 1388 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB | 1389 LDST_SRCDST_BYTE_CONTEXT | 1390 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) << 1391 LDST_OFFSET_SHIFT)); 1392 1393 /* Load operation */ 1394 append_operation(desc, cdata->algtype | OP_ALG_AS_INIT | 1395 OP_ALG_ENCRYPT); 1396 1397 /* Perform operation */ 1398 skcipher_append_src_dst(desc); 1399 1400 /* Store IV */ 1401 if (ivsize) 1402 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | 1403 LDST_CLASS_1_CCB | (ctx1_iv_off << 1404 LDST_OFFSET_SHIFT)); 1405 1406 print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ", 1407 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1408 1); 1409 } 1410 EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap); 1411 1412 /** 1413 * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor 1414 * @desc: pointer to buffer used for descriptor construction 1415 * @cdata: pointer to block cipher transform definitions 1416 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed 1417 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128 1418 * - OP_ALG_ALGSEL_CHACHA20 1419 * @ivsize: initialization vector size 1420 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template 1421 * @ctx1_iv_off: IV offset in CONTEXT1 register 1422 */ 1423 void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata, 1424 unsigned int ivsize, const bool is_rfc3686, 1425 const u32 ctx1_iv_off) 1426 { 1427 u32 *key_jump_cmd; 1428 1429 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); 1430 /* Skip if already shared */ 1431 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 1432 JUMP_COND_SHRD); 1433 1434 /* Load class1 key only */ 1435 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 1436 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 1437 1438 /* Load nonce into CONTEXT1 reg */ 1439 if (is_rfc3686) { 1440 const u8 *nonce = cdata->key_virt + cdata->keylen; 1441 1442 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE, 1443 LDST_CLASS_IND_CCB | 1444 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM); 1445 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO | 1446 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) | 1447 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT)); 1448 } 1449 1450 set_jump_tgt_here(desc, key_jump_cmd); 1451 1452 /* Load IV, if there is one */ 1453 if (ivsize) 1454 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | 1455 LDST_CLASS_1_CCB | (ctx1_iv_off << 1456 LDST_OFFSET_SHIFT)); 1457 1458 /* Load counter into CONTEXT1 reg */ 1459 if (is_rfc3686) 1460 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB | 1461 LDST_SRCDST_BYTE_CONTEXT | 1462 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) << 1463 LDST_OFFSET_SHIFT)); 1464 1465 /* Choose operation */ 1466 if (ctx1_iv_off) 1467 append_operation(desc, cdata->algtype | OP_ALG_AS_INIT | 1468 OP_ALG_DECRYPT); 1469 else 1470 append_dec_op1(desc, cdata->algtype); 1471 1472 /* Perform operation */ 1473 skcipher_append_src_dst(desc); 1474 1475 /* Store IV */ 1476 if (ivsize) 1477 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT | 1478 LDST_CLASS_1_CCB | (ctx1_iv_off << 1479 LDST_OFFSET_SHIFT)); 1480 1481 print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ", 1482 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1483 1); 1484 } 1485 EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap); 1486 1487 /** 1488 * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor 1489 * @desc: pointer to buffer used for descriptor construction 1490 * @cdata: pointer to block cipher transform definitions 1491 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS. 1492 */ 1493 void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata) 1494 { 1495 __be64 sector_size = cpu_to_be64(512); 1496 u32 *key_jump_cmd; 1497 1498 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); 1499 /* Skip if already shared */ 1500 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 1501 JUMP_COND_SHRD); 1502 1503 /* Load class1 keys only */ 1504 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 1505 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 1506 1507 /* Load sector size with index 40 bytes (0x28) */ 1508 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB | 1509 LDST_SRCDST_BYTE_CONTEXT | 1510 (0x28 << LDST_OFFSET_SHIFT)); 1511 1512 set_jump_tgt_here(desc, key_jump_cmd); 1513 1514 /* 1515 * create sequence for loading the sector index 1516 * Upper 8B of IV - will be used as sector index 1517 * Lower 8B of IV - will be discarded 1518 */ 1519 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | 1520 (0x20 << LDST_OFFSET_SHIFT)); 1521 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP); 1522 1523 /* Load operation */ 1524 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL | 1525 OP_ALG_ENCRYPT); 1526 1527 /* Perform operation */ 1528 skcipher_append_src_dst(desc); 1529 1530 /* Store upper 8B of IV */ 1531 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | 1532 (0x20 << LDST_OFFSET_SHIFT)); 1533 1534 print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__) 1535 ": ", DUMP_PREFIX_ADDRESS, 16, 4, 1536 desc, desc_bytes(desc), 1); 1537 } 1538 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap); 1539 1540 /** 1541 * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor 1542 * @desc: pointer to buffer used for descriptor construction 1543 * @cdata: pointer to block cipher transform definitions 1544 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS. 1545 */ 1546 void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata) 1547 { 1548 __be64 sector_size = cpu_to_be64(512); 1549 u32 *key_jump_cmd; 1550 1551 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX); 1552 /* Skip if already shared */ 1553 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | 1554 JUMP_COND_SHRD); 1555 1556 /* Load class1 key only */ 1557 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, 1558 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG); 1559 1560 /* Load sector size with index 40 bytes (0x28) */ 1561 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB | 1562 LDST_SRCDST_BYTE_CONTEXT | 1563 (0x28 << LDST_OFFSET_SHIFT)); 1564 1565 set_jump_tgt_here(desc, key_jump_cmd); 1566 1567 /* 1568 * create sequence for loading the sector index 1569 * Upper 8B of IV - will be used as sector index 1570 * Lower 8B of IV - will be discarded 1571 */ 1572 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | 1573 (0x20 << LDST_OFFSET_SHIFT)); 1574 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP); 1575 1576 /* Load operation */ 1577 append_dec_op1(desc, cdata->algtype); 1578 1579 /* Perform operation */ 1580 skcipher_append_src_dst(desc); 1581 1582 /* Store upper 8B of IV */ 1583 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB | 1584 (0x20 << LDST_OFFSET_SHIFT)); 1585 1586 print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__) 1587 ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc, 1588 desc_bytes(desc), 1); 1589 } 1590 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap); 1591 1592 MODULE_LICENSE("GPL"); 1593 MODULE_DESCRIPTION("FSL CAAM descriptor support"); 1594 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC"); 1595