Lines Matching refs:walk

100 	struct skcipher_walk walk;  in __ecb_crypt()  local
103 err = skcipher_walk_virt(&walk, req, false); in __ecb_crypt()
105 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
106 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
108 if (walk.nbytes < walk.total) in __ecb_crypt()
110 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
113 fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk, in __ecb_crypt()
116 err = skcipher_walk_done(&walk, in __ecb_crypt()
117 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
160 struct skcipher_walk walk; in cbc_encrypt() local
163 err = skcipher_walk_virt(&walk, req, false); in cbc_encrypt()
165 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt()
166 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt()
170 neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_encrypt()
172 walk.iv); in cbc_encrypt()
174 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
183 struct skcipher_walk walk; in cbc_decrypt() local
186 err = skcipher_walk_virt(&walk, req, false); in cbc_decrypt()
188 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
189 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
191 if (walk.nbytes < walk.total) in cbc_decrypt()
193 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt()
196 aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt()
198 walk.iv); in cbc_decrypt()
200 err = skcipher_walk_done(&walk, in cbc_decrypt()
201 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt()
211 struct skcipher_walk walk; in ctr_encrypt() local
214 err = skcipher_walk_virt(&walk, req, false); in ctr_encrypt()
216 while (walk.nbytes > 0) { in ctr_encrypt()
217 int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7; in ctr_encrypt()
218 int nbytes = walk.nbytes % (8 * AES_BLOCK_SIZE); in ctr_encrypt()
219 const u8 *src = walk.src.virt.addr; in ctr_encrypt()
220 u8 *dst = walk.dst.virt.addr; in ctr_encrypt()
225 blocks, walk.iv); in ctr_encrypt()
229 if (nbytes && walk.nbytes == walk.total) { in ctr_encrypt()
238 nbytes, walk.iv); in ctr_encrypt()
246 err = skcipher_walk_done(&walk, nbytes); in ctr_encrypt()
286 struct skcipher_walk walk; in __xts_crypt() local
311 err = skcipher_walk_virt(&walk, req, false); in __xts_crypt()
315 while (walk.nbytes >= AES_BLOCK_SIZE) { in __xts_crypt()
316 int blocks = (walk.nbytes / AES_BLOCK_SIZE) & ~7; in __xts_crypt()
317 out = walk.dst.virt.addr; in __xts_crypt()
318 in = walk.src.virt.addr; in __xts_crypt()
319 nbytes = walk.nbytes; in __xts_crypt()
324 neon_aes_ecb_encrypt(walk.iv, walk.iv, in __xts_crypt()
330 walk.iv); in __xts_crypt()
336 if (walk.nbytes == walk.total && nbytes > 0) { in __xts_crypt()
340 ctx->twkey, walk.iv, first); in __xts_crypt()
344 ctx->twkey, walk.iv, first); in __xts_crypt()
348 err = skcipher_walk_done(&walk, nbytes); in __xts_crypt()
362 err = skcipher_walk_virt(&walk, req, false); in __xts_crypt()
366 out = walk.dst.virt.addr; in __xts_crypt()
367 in = walk.src.virt.addr; in __xts_crypt()
368 nbytes = walk.nbytes; in __xts_crypt()
373 nbytes, ctx->twkey, walk.iv, first); in __xts_crypt()
376 nbytes, ctx->twkey, walk.iv, first); in __xts_crypt()
379 return skcipher_walk_done(&walk, 0); in __xts_crypt()