Lines Matching refs:sgs
28 struct scatterlist *sgs, *sgd; in rk_cipher_need_fallback() local
36 sgs = req->src; in rk_cipher_need_fallback()
38 while (sgs && sgd) { in rk_cipher_need_fallback()
39 if (!IS_ALIGNED(sgs->offset, sizeof(u32))) { in rk_cipher_need_fallback()
47 stodo = min(len, sgs->length); in rk_cipher_need_fallback()
62 sgs = sg_next(sgs); in rk_cipher_need_fallback()
289 struct scatterlist *sgs, in crypto_dma_start() argument
292 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, sg_dma_address(sgs)); in crypto_dma_start()
304 struct scatterlist *sgs, *sgd; in rk_cipher_run() local
333 sgs = areq->src; in rk_cipher_run()
336 while (sgs && sgd && len) { in rk_cipher_run()
337 if (!sgs->length) { in rk_cipher_run()
338 sgs = sg_next(sgs); in rk_cipher_run()
344 offset = sgs->length - ivsize; in rk_cipher_run()
345 scatterwalk_map_and_copy(biv, sgs, offset, ivsize, 0); in rk_cipher_run()
347 if (sgs == sgd) { in rk_cipher_run()
348 err = dma_map_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL); in rk_cipher_run()
354 err = dma_map_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE); in rk_cipher_run()
376 todo = min(sg_dma_len(sgs), len); in rk_cipher_run()
378 crypto_dma_start(rkc, sgs, sgd, todo / 4); in rk_cipher_run()
386 if (sgs == sgd) { in rk_cipher_run()
387 dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL); in rk_cipher_run()
389 dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE); in rk_cipher_run()
400 sgs = sg_next(sgs); in rk_cipher_run()
424 if (sgs == sgd) { in rk_cipher_run()
425 dma_unmap_sg(rkc->dev, sgs, 1, DMA_BIDIRECTIONAL); in rk_cipher_run()
427 dma_unmap_sg(rkc->dev, sgs, 1, DMA_TO_DEVICE); in rk_cipher_run()