Lines Matching refs:walk

97 	struct scatter_walk walk;  in ccm_calculate_auth_mac()  local
112 scatterwalk_start(&walk, req->src); in ccm_calculate_auth_mac()
115 u32 n = scatterwalk_clamp(&walk, assoclen); in ccm_calculate_auth_mac()
119 scatterwalk_start(&walk, sg_next(walk.sg)); in ccm_calculate_auth_mac()
120 n = scatterwalk_clamp(&walk, assoclen); in ccm_calculate_auth_mac()
123 p = ptr = scatterwalk_map(&walk); in ccm_calculate_auth_mac()
125 scatterwalk_advance(&walk, n); in ccm_calculate_auth_mac()
158 scatterwalk_done(&walk, 0, assoclen); in ccm_calculate_auth_mac()
162 static int ccm_crypt(struct aead_request *req, struct skcipher_walk *walk, in ccm_crypt() argument
172 memcpy(ctr0, walk->iv, SM4_BLOCK_SIZE); in ccm_crypt()
173 crypto_inc(walk->iv, SM4_BLOCK_SIZE); in ccm_crypt()
180 while (walk->nbytes && walk->nbytes != walk->total) { in ccm_crypt()
181 unsigned int tail = walk->nbytes % SM4_BLOCK_SIZE; in ccm_crypt()
183 sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr, in ccm_crypt()
184 walk->src.virt.addr, walk->iv, in ccm_crypt()
185 walk->nbytes - tail, mac); in ccm_crypt()
189 err = skcipher_walk_done(walk, tail); in ccm_crypt()
194 if (walk->nbytes) { in ccm_crypt()
195 sm4_ce_ccm_crypt(rkey_enc, walk->dst.virt.addr, in ccm_crypt()
196 walk->src.virt.addr, walk->iv, in ccm_crypt()
197 walk->nbytes, mac); in ccm_crypt()
203 err = skcipher_walk_done(walk, 0); in ccm_crypt()
218 struct skcipher_walk walk; in ccm_encrypt() local
225 err = skcipher_walk_aead_encrypt(&walk, req, false); in ccm_encrypt()
229 err = ccm_crypt(req, &walk, ctx->rkey_enc, mac, sm4_ce_ccm_enc); in ccm_encrypt()
247 struct skcipher_walk walk; in ccm_decrypt() local
254 err = skcipher_walk_aead_decrypt(&walk, req, false); in ccm_decrypt()
258 err = ccm_crypt(req, &walk, ctx->rkey_enc, mac, sm4_ce_ccm_dec); in ccm_decrypt()