Home
last modified time | relevance | path

Searched refs:in_sg (Results 1 – 25 of 63) sorted by relevance

123

/openbmc/linux/drivers/crypto/nx/
H A Dnx-aes-xcbc.c81 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, in nx_xcbc_empty()
93 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
108 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, in nx_xcbc_empty()
121 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
195 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
230 in_sg = nx_build_sg_list(nx_ctx->in_sg, in nx_xcbc_update()
241 in_sg = nx_build_sg_list(in_sg, in nx_xcbc_update()
251 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * in nx_xcbc_update()
280 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
324 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer, in nx_xcbc_final()
[all …]
H A Dnx-sha256.c109 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha256_update() local
113 in_sg = nx_build_sg_list(in_sg, in nx_sha256_update()
122 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha256_update()
137 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha256_update()
140 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_update()
186 struct nx_sg *in_sg, *out_sg; in nx_sha256_final() local
215 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, in nx_sha256_final()
231 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_final()
H A Dnx-sha512.c109 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha512_update() local
113 in_sg = nx_build_sg_list(in_sg, in nx_sha512_update()
121 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha512_update()
136 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha512_update()
139 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_update()
189 struct nx_sg *in_sg, *out_sg; in nx_sha512_final() local
224 in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len, in nx_sha512_final()
236 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_final()
H A Dnx-aes-gcm.c107 struct nx_sg *nx_sg = nx_ctx->in_sg; in nx_gca()
137 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in nx_gca()
145 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) in nx_gca()
204 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in gmac()
212 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg) in gmac()
249 struct nx_sg *in_sg, *out_sg; in gcm_empty() local
269 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) iv, in gcm_empty()
282 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in gcm_empty()
H A Dnx.c269 struct nx_sg *nx_insg = nx_ctx->in_sg; in nx_build_sg_lists()
294 nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes); in nx_build_sg_lists()
314 nx_ctx->op.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
322 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
677 nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE); in nx_crypto_ctx_init()
678 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init()
753 nx_ctx->in_sg = NULL; in nx_crypto_ctx_exit()
/openbmc/linux/lib/
H A Dsg_split.c80 struct scatterlist *in_sg, *out_sg; in sg_split_phys() local
84 in_sg = split->in_sg0; in sg_split_phys()
87 *out_sg = *in_sg; in sg_split_phys()
96 in_sg = sg_next(in_sg); in sg_split_phys()
106 struct scatterlist *in_sg, *out_sg; in sg_split_mapped() local
110 in_sg = split->in_sg0; in sg_split_mapped()
113 sg_dma_address(out_sg) = sg_dma_address(in_sg); in sg_split_mapped()
114 sg_dma_len(out_sg) = sg_dma_len(in_sg); in sg_split_mapped()
119 in_sg = sg_next(in_sg); in sg_split_mapped()
/openbmc/qemu/fsdev/
H A D9p-iov-marshal.c66 ssize_t v9fs_pack(struct iovec *in_sg, int in_num, size_t offset, in v9fs_pack() argument
69 return v9fs_packunpack((void *)src, in_sg, in_num, offset, size, 1); in v9fs_pack()
195 ssize_t v9fs_iov_vmarshal(struct iovec *in_sg, int in_num, size_t offset, in v9fs_iov_vmarshal() argument
206 copied = v9fs_pack(in_sg, in_num, offset, &val, sizeof(val)); in v9fs_iov_vmarshal()
214 copied = v9fs_pack(in_sg, in_num, offset, &val, sizeof(val)); in v9fs_iov_vmarshal()
222 copied = v9fs_pack(in_sg, in_num, offset, &val, sizeof(val)); in v9fs_iov_vmarshal()
235 copied = v9fs_iov_marshal(in_sg, in_num, offset, bswap, in v9fs_iov_vmarshal()
252 copied = v9fs_iov_marshal(in_sg, in_num, offset, bswap, in v9fs_iov_vmarshal()
265 copied = v9fs_iov_marshal(in_sg, in_num, offset, bswap, in v9fs_iov_vmarshal()
295 ssize_t v9fs_iov_marshal(struct iovec *in_sg, int in_num, size_t offset, in v9fs_iov_marshal() argument
[all …]
H A D9p-iov-marshal.h7 ssize_t v9fs_pack(struct iovec *in_sg, int in_num, size_t offset,
11 ssize_t v9fs_iov_marshal(struct iovec *in_sg, int in_num, size_t offset,
16 ssize_t v9fs_iov_vmarshal(struct iovec *in_sg, int in_num, size_t offset,
/openbmc/linux/drivers/crypto/
H A Domap-des.c147 struct scatterlist *in_sg; member
382 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_des_crypt_dma()
533 dd->in_sg = req->src; in omap_des_prepare_req()
837 BUG_ON(!dd->in_sg); in omap_des_irq()
839 BUG_ON(_calc_walked(in) > dd->in_sg->length); in omap_des_irq()
841 src = sg_virt(dd->in_sg) + _calc_walked(in); in omap_des_irq()
847 if (dd->in_sg->length == _calc_walked(in)) { in omap_des_irq()
848 dd->in_sg = sg_next(dd->in_sg); in omap_des_irq()
849 if (dd->in_sg) { in omap_des_irq()
851 dd->in_sg); in omap_des_irq()
[all …]
H A Domap-aes.c266 struct scatterlist *in_sg, in omap_aes_crypt_dma() argument
275 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_aes_crypt_dma()
440 dd->in_sg = req->src; in omap_aes_prepare_req()
872 BUG_ON(!dd->in_sg); in omap_aes_irq()
874 BUG_ON(_calc_walked(in) > dd->in_sg->length); in omap_aes_irq()
876 src = sg_virt(dd->in_sg) + _calc_walked(in); in omap_aes_irq()
882 if (dd->in_sg->length == _calc_walked(in)) { in omap_aes_irq()
883 dd->in_sg = sg_next(dd->in_sg); in omap_aes_irq()
884 if (dd->in_sg) { in omap_aes_irq()
886 dd->in_sg); in omap_aes_irq()
[all …]
H A Dsahara.c182 struct scatterlist *in_sg; member
220 struct scatterlist *in_sg; member
498 sg = dev->in_sg; in sahara_hw_descriptor_create()
542 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
579 dev->in_sg = req->src; in sahara_aes_process()
613 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_aes_process()
783 dev->in_sg = rctx->in_sg; in sahara_sha_hw_links_create()
796 sg = dev->in_sg; in sahara_sha_hw_links_create()
931 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request()
934 rctx->in_sg = rctx->in_sg_chain; in sahara_sha_prepare_request()
[all …]
H A Datmel-tdes.c115 struct scatterlist *in_sg; member
305 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_pdc_stop()
497 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start()
502 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start()
521 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_tdes_crypt_start()
526 addr_in = sg_dma_address(dd->in_sg); in atmel_tdes_crypt_start()
550 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start()
625 dd->in_sg = req->src; in atmel_tdes_handle_queue()
656 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_dma_stop()
1058 dd->in_sg = sg_next(dd->in_sg); in atmel_tdes_done_task()
[all …]
H A Domap-aes-gcm.c36 dd->in_sg = NULL; in omap_aes_gcm_finish_req()
59 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_gcm_done_task()
148 dd->in_sg = dd->in_sgl; in omap_aes_gcm_copy_buffers()
171 dd->in_sg_len = sg_nents_for_len(dd->in_sg, alen + clen); in omap_aes_gcm_copy_buffers()
/openbmc/qemu/hw/9pfs/
H A Dxen-9p-backend.c72 struct iovec *in_sg, in xen_9pfs_in_sg() argument
86 in_sg[0].iov_base = ring->ring.in + masked_prod; in xen_9pfs_in_sg()
87 in_sg[0].iov_len = masked_cons - masked_prod; in xen_9pfs_in_sg()
90 in_sg[0].iov_base = ring->ring.in + masked_prod; in xen_9pfs_in_sg()
91 in_sg[0].iov_len = XEN_FLEX_RING_SIZE(ring->ring_order) - masked_prod; in xen_9pfs_in_sg()
92 in_sg[1].iov_base = ring->ring.in; in xen_9pfs_in_sg()
93 in_sg[1].iov_len = masked_cons; in xen_9pfs_in_sg()
140 struct iovec in_sg[2]; in xen_9pfs_pdu_vmarshal() local
145 in_sg, &num, pdu->idx, ROUND_UP(offset + 128, 512)); in xen_9pfs_pdu_vmarshal()
147 ret = v9fs_iov_vmarshal(in_sg, num, offset, 0, fmt, ap); in xen_9pfs_pdu_vmarshal()
H A D9p-proxy.h28 #define proxy_unmarshal(in_sg, offset, fmt, args...) \ argument
29 v9fs_iov_unmarshal(in_sg, 1, offset, 0, fmt, ##args)
H A Dvirtio-9p-device.c62 if (iov_size(elem->in_sg, elem->in_num) < 7) { in handle_9p_output()
128 ret = v9fs_iov_vmarshal(elem->in_sg, elem->in_num, offset, 1, fmt, ap); in virtio_pdu_vmarshal()
161 size_t buf_size = iov_size(elem->in_sg, elem->in_num); in virtio_init_in_iov_from_pdu()
171 *piov = elem->in_sg; in virtio_init_in_iov_from_pdu()
/openbmc/linux/drivers/crypto/gemini/
H A Dsl3516-ce-cipher.c30 struct scatterlist *in_sg; in sl3516_ce_need_fallback() local
87 in_sg = areq->src; in sl3516_ce_need_fallback()
89 while (in_sg && out_sg) { in sl3516_ce_need_fallback()
90 if (in_sg->length != out_sg->length) { in sl3516_ce_need_fallback()
94 in_sg = sg_next(in_sg); in sl3516_ce_need_fallback()
97 if (in_sg || out_sg) in sl3516_ce_need_fallback()
/openbmc/linux/drivers/crypto/allwinner/sun8i-ss/
H A Dsun8i-ss-cipher.c28 struct scatterlist *in_sg = areq->src; in sun8i_ss_need_fallback() local
76 in_sg = areq->src; in sun8i_ss_need_fallback()
78 while (in_sg && out_sg) { in sun8i_ss_need_fallback()
79 if (in_sg->length != out_sg->length) in sun8i_ss_need_fallback()
81 in_sg = sg_next(in_sg); in sun8i_ss_need_fallback()
84 if (in_sg || out_sg) in sun8i_ss_need_fallback()
/openbmc/qemu/contrib/vhost-user-blk/
H A Dvhost-user-blk.c264 if (elem->in_sg[in_num - 1].iov_len < sizeof(struct virtio_blk_inhdr)) { in vub_virtio_process_req()
268 req->in = (struct virtio_blk_inhdr *)elem->in_sg[in_num - 1].iov_base; in vub_virtio_process_req()
281 ret = vub_readv(req, &elem->in_sg[0], in_num); in vub_virtio_process_req()
297 size_t size = MIN(vub_iov_size(&elem->in_sg[0], in_num), in vub_virtio_process_req()
299 snprintf(elem->in_sg[0].iov_base, size, "%s", "vhost_user_blk"); in vub_virtio_process_req()
301 req->size = elem->in_sg[0].iov_len; in vub_virtio_process_req()
/openbmc/linux/drivers/crypto/allwinner/sun4i-ss/
H A Dsun4i-ss-hash.c198 struct scatterlist *in_sg = areq->src; in sun4i_hash() local
263 while (in_sg && i == 1) { in sun4i_hash()
264 if (in_sg->length % 4) in sun4i_hash()
266 in_sg = sg_next(in_sg); in sun4i_hash()
H A Dsun4i-ss-cipher.c182 struct scatterlist *in_sg = areq->src; in sun4i_ss_cipher_poll() local
225 while (in_sg && no_chunk == 1) { in sun4i_ss_cipher_poll()
226 if ((in_sg->length | in_sg->offset) & 3u) in sun4i_ss_cipher_poll()
228 in_sg = sg_next(in_sg); in sun4i_ss_cipher_poll()
/openbmc/qemu/hw/audio/
H A Dvirtio-snd.c187 if (iov_size(cmd->elem->in_sg, cmd->elem->in_num) < in virtio_snd_handle_pcm_info()
193 iov_size(cmd->elem->in_sg, cmd->elem->in_num), in virtio_snd_handle_pcm_info()
225 iov_from_buf(cmd->elem->in_sg, in virtio_snd_handle_pcm_info()
723 iov_from_buf(cmd->elem->in_sg, in process_cmd()
826 iov_from_buf(buffer->elem->in_sg, in empty_invalid_queue()
976 size = iov_size(elem->in_sg, elem->in_num) - in virtio_snd_handle_rx_xfer()
1123 iov_from_buf(buffer->elem->in_sg, in return_tx_buffer()
1211 iov_from_buf(buffer->elem->in_sg, in return_rx_buffer()
1216 iov_from_buf(buffer->elem->in_sg, in return_rx_buffer()
1257 max_size = iov_size(buffer->elem->in_sg, buffer->elem->in_num); in virtio_snd_pcm_in_cb()
/openbmc/qemu/hw/virtio/
H A Dvhost-shadow-virtqueue.c177 const struct iovec *in_sg, size_t in_num, in vhost_svq_add_split() argument
200 ok = vhost_svq_vring_write_descs(svq, sgs, in_sg, in_num, false, true); in vhost_svq_add_split()
250 size_t out_num, const struct iovec *in_sg, size_t in_num, in vhost_svq_add() argument
261 ok = vhost_svq_add_split(svq, out_sg, out_num, in_sg, in_num, &qemu_head); in vhost_svq_add()
277 return vhost_svq_add(svq, elem->out_sg, elem->out_num, elem->in_sg, in vhost_svq_add_element()
/openbmc/linux/crypto/
H A Drsa-pkcs1pad.c102 struct scatterlist in_sg[2], out_sg[1]; member
256 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_encrypt()
264 akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg, in pkcs1pad_encrypt()
413 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_sign()
421 akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg, in pkcs1pad_sign()
/openbmc/linux/drivers/crypto/starfive/
H A Djh7110-hash.c96 dma_unmap_sg(cryp->dev, rctx->in_sg, rctx->in_sg_len, DMA_TO_DEVICE); in starfive_hash_start()
131 sg_dma_len(rctx->in_sg) = total_len; in starfive_hash_xmit_dma()
139 ret = dma_map_sg(cryp->dev, rctx->in_sg, rctx->in_sg_len, DMA_TO_DEVICE); in starfive_hash_xmit_dma()
152 in_desc = dmaengine_prep_slave_sg(cryp->tx, rctx->in_sg, in starfive_hash_xmit_dma()
366 rctx->in_sg = req->src; in starfive_hash_digest()
369 rctx->in_sg_len = sg_nents_for_len(rctx->in_sg, rctx->total); in starfive_hash_digest()
372 if (starfive_hash_check_aligned(rctx->in_sg, rctx->total, rctx->blksize)) in starfive_hash_digest()

123