1 /* 2 * caam descriptor construction helper functions 3 * 4 * Copyright 2008-2012 Freescale Semiconductor, Inc. 5 */ 6 7 #include "desc.h" 8 9 #define IMMEDIATE (1 << 23) 10 #define CAAM_CMD_SZ sizeof(u32) 11 #define CAAM_PTR_SZ sizeof(dma_addr_t) 12 #define CAAM_DESC_BYTES_MAX (CAAM_CMD_SZ * MAX_CAAM_DESCSIZE) 13 #define DESC_JOB_IO_LEN (CAAM_CMD_SZ * 5 + CAAM_PTR_SZ * 3) 14 15 #ifdef DEBUG 16 #define PRINT_POS do { printk(KERN_DEBUG "%02d: %s\n", desc_len(desc),\ 17 &__func__[sizeof("append")]); } while (0) 18 #else 19 #define PRINT_POS 20 #endif 21 22 #define SET_OK_NO_PROP_ERRORS (IMMEDIATE | LDST_CLASS_DECO | \ 23 LDST_SRCDST_WORD_DECOCTRL | \ 24 (LDOFF_CHG_SHARE_OK_NO_PROP << \ 25 LDST_OFFSET_SHIFT)) 26 #define DISABLE_AUTO_INFO_FIFO (IMMEDIATE | LDST_CLASS_DECO | \ 27 LDST_SRCDST_WORD_DECOCTRL | \ 28 (LDOFF_DISABLE_AUTO_NFIFO << LDST_OFFSET_SHIFT)) 29 #define ENABLE_AUTO_INFO_FIFO (IMMEDIATE | LDST_CLASS_DECO | \ 30 LDST_SRCDST_WORD_DECOCTRL | \ 31 (LDOFF_ENABLE_AUTO_NFIFO << LDST_OFFSET_SHIFT)) 32 33 static inline int desc_len(u32 *desc) 34 { 35 return *desc & HDR_DESCLEN_MASK; 36 } 37 38 static inline int desc_bytes(void *desc) 39 { 40 return desc_len(desc) * CAAM_CMD_SZ; 41 } 42 43 static inline u32 *desc_end(u32 *desc) 44 { 45 return desc + desc_len(desc); 46 } 47 48 static inline void *sh_desc_pdb(u32 *desc) 49 { 50 return desc + 1; 51 } 52 53 static inline void init_desc(u32 *desc, u32 options) 54 { 55 *desc = (options | HDR_ONE) + 1; 56 } 57 58 static inline void init_sh_desc(u32 *desc, u32 options) 59 { 60 PRINT_POS; 61 init_desc(desc, CMD_SHARED_DESC_HDR | options); 62 } 63 64 static inline void init_sh_desc_pdb(u32 *desc, u32 options, size_t pdb_bytes) 65 { 66 u32 pdb_len = (pdb_bytes + CAAM_CMD_SZ - 1) / CAAM_CMD_SZ; 67 68 init_sh_desc(desc, (((pdb_len + 1) << HDR_START_IDX_SHIFT) + pdb_len) | 69 options); 70 } 71 72 static inline void init_job_desc(u32 *desc, u32 options) 73 { 74 init_desc(desc, CMD_DESC_HDR | options); 75 } 76 77 static inline void append_ptr(u32 *desc, dma_addr_t ptr) 78 { 79 dma_addr_t *offset = (dma_addr_t *)desc_end(desc); 80 81 *offset = ptr; 82 83 (*desc) += CAAM_PTR_SZ / CAAM_CMD_SZ; 84 } 85 86 static inline void init_job_desc_shared(u32 *desc, dma_addr_t ptr, int len, 87 u32 options) 88 { 89 PRINT_POS; 90 init_job_desc(desc, HDR_SHARED | options | 91 (len << HDR_START_IDX_SHIFT)); 92 append_ptr(desc, ptr); 93 } 94 95 static inline void append_data(u32 *desc, void *data, int len) 96 { 97 u32 *offset = desc_end(desc); 98 99 if (len) /* avoid sparse warning: memcpy with byte count of 0 */ 100 memcpy(offset, data, len); 101 102 (*desc) += (len + CAAM_CMD_SZ - 1) / CAAM_CMD_SZ; 103 } 104 105 static inline void append_cmd(u32 *desc, u32 command) 106 { 107 u32 *cmd = desc_end(desc); 108 109 *cmd = command; 110 111 (*desc)++; 112 } 113 114 #define append_u32 append_cmd 115 116 static inline void append_u64(u32 *desc, u64 data) 117 { 118 u32 *offset = desc_end(desc); 119 120 *offset = upper_32_bits(data); 121 *(++offset) = lower_32_bits(data); 122 123 (*desc) += 2; 124 } 125 126 /* Write command without affecting header, and return pointer to next word */ 127 static inline u32 *write_cmd(u32 *desc, u32 command) 128 { 129 *desc = command; 130 131 return desc + 1; 132 } 133 134 static inline void append_cmd_ptr(u32 *desc, dma_addr_t ptr, int len, 135 u32 command) 136 { 137 append_cmd(desc, command | len); 138 append_ptr(desc, ptr); 139 } 140 141 /* Write length after pointer, rather than inside command */ 142 static inline void append_cmd_ptr_extlen(u32 *desc, dma_addr_t ptr, 143 unsigned int len, u32 command) 144 { 145 append_cmd(desc, command); 146 if (!(command & (SQIN_RTO | SQIN_PRE))) 147 append_ptr(desc, ptr); 148 append_cmd(desc, len); 149 } 150 151 static inline void append_cmd_data(u32 *desc, void *data, int len, 152 u32 command) 153 { 154 append_cmd(desc, command | IMMEDIATE | len); 155 append_data(desc, data, len); 156 } 157 158 #define APPEND_CMD_RET(cmd, op) \ 159 static inline u32 *append_##cmd(u32 *desc, u32 options) \ 160 { \ 161 u32 *cmd = desc_end(desc); \ 162 PRINT_POS; \ 163 append_cmd(desc, CMD_##op | options); \ 164 return cmd; \ 165 } 166 APPEND_CMD_RET(jump, JUMP) 167 APPEND_CMD_RET(move, MOVE) 168 169 static inline void set_jump_tgt_here(u32 *desc, u32 *jump_cmd) 170 { 171 *jump_cmd = *jump_cmd | (desc_len(desc) - (jump_cmd - desc)); 172 } 173 174 static inline void set_move_tgt_here(u32 *desc, u32 *move_cmd) 175 { 176 *move_cmd &= ~MOVE_OFFSET_MASK; 177 *move_cmd = *move_cmd | ((desc_len(desc) << (MOVE_OFFSET_SHIFT + 2)) & 178 MOVE_OFFSET_MASK); 179 } 180 181 #define APPEND_CMD(cmd, op) \ 182 static inline void append_##cmd(u32 *desc, u32 options) \ 183 { \ 184 PRINT_POS; \ 185 append_cmd(desc, CMD_##op | options); \ 186 } 187 APPEND_CMD(operation, OPERATION) 188 189 #define APPEND_CMD_LEN(cmd, op) \ 190 static inline void append_##cmd(u32 *desc, unsigned int len, u32 options) \ 191 { \ 192 PRINT_POS; \ 193 append_cmd(desc, CMD_##op | len | options); \ 194 } 195 196 APPEND_CMD_LEN(seq_load, SEQ_LOAD) 197 APPEND_CMD_LEN(seq_store, SEQ_STORE) 198 APPEND_CMD_LEN(seq_fifo_load, SEQ_FIFO_LOAD) 199 APPEND_CMD_LEN(seq_fifo_store, SEQ_FIFO_STORE) 200 201 #define APPEND_CMD_PTR(cmd, op) \ 202 static inline void append_##cmd(u32 *desc, dma_addr_t ptr, unsigned int len, \ 203 u32 options) \ 204 { \ 205 PRINT_POS; \ 206 append_cmd_ptr(desc, ptr, len, CMD_##op | options); \ 207 } 208 APPEND_CMD_PTR(key, KEY) 209 APPEND_CMD_PTR(load, LOAD) 210 APPEND_CMD_PTR(fifo_load, FIFO_LOAD) 211 APPEND_CMD_PTR(fifo_store, FIFO_STORE) 212 213 static inline void append_store(u32 *desc, dma_addr_t ptr, unsigned int len, 214 u32 options) 215 { 216 u32 cmd_src; 217 218 cmd_src = options & LDST_SRCDST_MASK; 219 220 append_cmd(desc, CMD_STORE | options | len); 221 222 /* The following options do not require pointer */ 223 if (!(cmd_src == LDST_SRCDST_WORD_DESCBUF_SHARED || 224 cmd_src == LDST_SRCDST_WORD_DESCBUF_JOB || 225 cmd_src == LDST_SRCDST_WORD_DESCBUF_JOB_WE || 226 cmd_src == LDST_SRCDST_WORD_DESCBUF_SHARED_WE)) 227 append_ptr(desc, ptr); 228 } 229 230 #define APPEND_SEQ_PTR_INTLEN(cmd, op) \ 231 static inline void append_seq_##cmd##_ptr_intlen(u32 *desc, dma_addr_t ptr, \ 232 unsigned int len, \ 233 u32 options) \ 234 { \ 235 PRINT_POS; \ 236 if (options & (SQIN_RTO | SQIN_PRE)) \ 237 append_cmd(desc, CMD_SEQ_##op##_PTR | len | options); \ 238 else \ 239 append_cmd_ptr(desc, ptr, len, CMD_SEQ_##op##_PTR | options); \ 240 } 241 APPEND_SEQ_PTR_INTLEN(in, IN) 242 APPEND_SEQ_PTR_INTLEN(out, OUT) 243 244 #define APPEND_CMD_PTR_TO_IMM(cmd, op) \ 245 static inline void append_##cmd##_as_imm(u32 *desc, void *data, \ 246 unsigned int len, u32 options) \ 247 { \ 248 PRINT_POS; \ 249 append_cmd_data(desc, data, len, CMD_##op | options); \ 250 } 251 APPEND_CMD_PTR_TO_IMM(load, LOAD); 252 APPEND_CMD_PTR_TO_IMM(fifo_load, FIFO_LOAD); 253 254 #define APPEND_CMD_PTR_EXTLEN(cmd, op) \ 255 static inline void append_##cmd##_extlen(u32 *desc, dma_addr_t ptr, \ 256 unsigned int len, u32 options) \ 257 { \ 258 PRINT_POS; \ 259 append_cmd_ptr_extlen(desc, ptr, len, CMD_##op | SQIN_EXT | options); \ 260 } 261 APPEND_CMD_PTR_EXTLEN(seq_in_ptr, SEQ_IN_PTR) 262 APPEND_CMD_PTR_EXTLEN(seq_out_ptr, SEQ_OUT_PTR) 263 264 /* 265 * Determine whether to store length internally or externally depending on 266 * the size of its type 267 */ 268 #define APPEND_CMD_PTR_LEN(cmd, op, type) \ 269 static inline void append_##cmd(u32 *desc, dma_addr_t ptr, \ 270 type len, u32 options) \ 271 { \ 272 PRINT_POS; \ 273 if (sizeof(type) > sizeof(u16)) \ 274 append_##cmd##_extlen(desc, ptr, len, options); \ 275 else \ 276 append_##cmd##_intlen(desc, ptr, len, options); \ 277 } 278 APPEND_CMD_PTR_LEN(seq_in_ptr, SEQ_IN_PTR, u32) 279 APPEND_CMD_PTR_LEN(seq_out_ptr, SEQ_OUT_PTR, u32) 280 281 /* 282 * 2nd variant for commands whose specified immediate length differs 283 * from length of immediate data provided, e.g., split keys 284 */ 285 #define APPEND_CMD_PTR_TO_IMM2(cmd, op) \ 286 static inline void append_##cmd##_as_imm(u32 *desc, void *data, \ 287 unsigned int data_len, \ 288 unsigned int len, u32 options) \ 289 { \ 290 PRINT_POS; \ 291 append_cmd(desc, CMD_##op | IMMEDIATE | len | options); \ 292 append_data(desc, data, data_len); \ 293 } 294 APPEND_CMD_PTR_TO_IMM2(key, KEY); 295 296 #define APPEND_CMD_RAW_IMM(cmd, op, type) \ 297 static inline void append_##cmd##_imm_##type(u32 *desc, type immediate, \ 298 u32 options) \ 299 { \ 300 PRINT_POS; \ 301 append_cmd(desc, CMD_##op | IMMEDIATE | options | sizeof(type)); \ 302 append_cmd(desc, immediate); \ 303 } 304 APPEND_CMD_RAW_IMM(load, LOAD, u32); 305 306 /* 307 * Append math command. Only the last part of destination and source need to 308 * be specified 309 */ 310 #define APPEND_MATH(op, desc, dest, src_0, src_1, len) \ 311 append_cmd(desc, CMD_MATH | MATH_FUN_##op | MATH_DEST_##dest | \ 312 MATH_SRC0_##src_0 | MATH_SRC1_##src_1 | (u32)len); 313 314 #define append_math_add(desc, dest, src0, src1, len) \ 315 APPEND_MATH(ADD, desc, dest, src0, src1, len) 316 #define append_math_sub(desc, dest, src0, src1, len) \ 317 APPEND_MATH(SUB, desc, dest, src0, src1, len) 318 #define append_math_add_c(desc, dest, src0, src1, len) \ 319 APPEND_MATH(ADDC, desc, dest, src0, src1, len) 320 #define append_math_sub_b(desc, dest, src0, src1, len) \ 321 APPEND_MATH(SUBB, desc, dest, src0, src1, len) 322 #define append_math_and(desc, dest, src0, src1, len) \ 323 APPEND_MATH(AND, desc, dest, src0, src1, len) 324 #define append_math_or(desc, dest, src0, src1, len) \ 325 APPEND_MATH(OR, desc, dest, src0, src1, len) 326 #define append_math_xor(desc, dest, src0, src1, len) \ 327 APPEND_MATH(XOR, desc, dest, src0, src1, len) 328 #define append_math_lshift(desc, dest, src0, src1, len) \ 329 APPEND_MATH(LSHIFT, desc, dest, src0, src1, len) 330 #define append_math_rshift(desc, dest, src0, src1, len) \ 331 APPEND_MATH(RSHIFT, desc, dest, src0, src1, len) 332 #define append_math_ldshift(desc, dest, src0, src1, len) \ 333 APPEND_MATH(SHLD, desc, dest, src0, src1, len) 334 335 /* Exactly one source is IMM. Data is passed in as u32 value */ 336 #define APPEND_MATH_IMM_u32(op, desc, dest, src_0, src_1, data) \ 337 do { \ 338 APPEND_MATH(op, desc, dest, src_0, src_1, CAAM_CMD_SZ); \ 339 append_cmd(desc, data); \ 340 } while (0) 341 342 #define append_math_add_imm_u32(desc, dest, src0, src1, data) \ 343 APPEND_MATH_IMM_u32(ADD, desc, dest, src0, src1, data) 344 #define append_math_sub_imm_u32(desc, dest, src0, src1, data) \ 345 APPEND_MATH_IMM_u32(SUB, desc, dest, src0, src1, data) 346 #define append_math_add_c_imm_u32(desc, dest, src0, src1, data) \ 347 APPEND_MATH_IMM_u32(ADDC, desc, dest, src0, src1, data) 348 #define append_math_sub_b_imm_u32(desc, dest, src0, src1, data) \ 349 APPEND_MATH_IMM_u32(SUBB, desc, dest, src0, src1, data) 350 #define append_math_and_imm_u32(desc, dest, src0, src1, data) \ 351 APPEND_MATH_IMM_u32(AND, desc, dest, src0, src1, data) 352 #define append_math_or_imm_u32(desc, dest, src0, src1, data) \ 353 APPEND_MATH_IMM_u32(OR, desc, dest, src0, src1, data) 354 #define append_math_xor_imm_u32(desc, dest, src0, src1, data) \ 355 APPEND_MATH_IMM_u32(XOR, desc, dest, src0, src1, data) 356 #define append_math_lshift_imm_u32(desc, dest, src0, src1, data) \ 357 APPEND_MATH_IMM_u32(LSHIFT, desc, dest, src0, src1, data) 358 #define append_math_rshift_imm_u32(desc, dest, src0, src1, data) \ 359 APPEND_MATH_IMM_u32(RSHIFT, desc, dest, src0, src1, data) 360 361 /* Exactly one source is IMM. Data is passed in as u64 value */ 362 #define APPEND_MATH_IMM_u64(op, desc, dest, src_0, src_1, data) \ 363 do { \ 364 u32 upper = (data >> 16) >> 16; \ 365 APPEND_MATH(op, desc, dest, src_0, src_1, CAAM_CMD_SZ * 2 | \ 366 (upper ? 0 : MATH_IFB)); \ 367 if (upper) \ 368 append_u64(desc, data); \ 369 else \ 370 append_u32(desc, data); \ 371 } while (0) 372 373 #define append_math_add_imm_u64(desc, dest, src0, src1, data) \ 374 APPEND_MATH_IMM_u64(ADD, desc, dest, src0, src1, data) 375 #define append_math_sub_imm_u64(desc, dest, src0, src1, data) \ 376 APPEND_MATH_IMM_u64(SUB, desc, dest, src0, src1, data) 377 #define append_math_add_c_imm_u64(desc, dest, src0, src1, data) \ 378 APPEND_MATH_IMM_u64(ADDC, desc, dest, src0, src1, data) 379 #define append_math_sub_b_imm_u64(desc, dest, src0, src1, data) \ 380 APPEND_MATH_IMM_u64(SUBB, desc, dest, src0, src1, data) 381 #define append_math_and_imm_u64(desc, dest, src0, src1, data) \ 382 APPEND_MATH_IMM_u64(AND, desc, dest, src0, src1, data) 383 #define append_math_or_imm_u64(desc, dest, src0, src1, data) \ 384 APPEND_MATH_IMM_u64(OR, desc, dest, src0, src1, data) 385 #define append_math_xor_imm_u64(desc, dest, src0, src1, data) \ 386 APPEND_MATH_IMM_u64(XOR, desc, dest, src0, src1, data) 387 #define append_math_lshift_imm_u64(desc, dest, src0, src1, data) \ 388 APPEND_MATH_IMM_u64(LSHIFT, desc, dest, src0, src1, data) 389 #define append_math_rshift_imm_u64(desc, dest, src0, src1, data) \ 390 APPEND_MATH_IMM_u64(RSHIFT, desc, dest, src0, src1, data) 391