Searched refs:chunk_end (Results 1 – 12 of 12) sorted by relevance
/openbmc/linux/net/sctp/ |
H A D | inqueue.c | 104 ch = (struct sctp_chunkhdr *)chunk->chunk_end; in sctp_inq_peek() 145 ch = (struct sctp_chunkhdr *)chunk->chunk_end; in sctp_inq_pop() 147 skb_pull(chunk->skb, chunk->chunk_end - chunk->skb->data); in sctp_inq_pop() 207 chunk->chunk_end = ((__u8 *)ch) + SCTP_PAD4(ntohs(ch->length)); in sctp_inq_pop() 211 if (chunk->chunk_end + sizeof(*ch) <= skb_tail_pointer(chunk->skb)) { in sctp_inq_pop() 214 } else if (chunk->chunk_end > skb_tail_pointer(chunk->skb)) { in sctp_inq_pop() 217 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_inq_pop()
|
H A D | sm_make_chunk.c | 1067 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_addto_param() 1245 retval->chunk_end = skb_tail_pointer(retval->skb); in sctp_make_pad() 1355 retval->chunk_end = skb_tail_pointer(retval->skb); in sctp_make_auth() 1461 retval->chunk_end = ((__u8 *)chunk_hdr) + sizeof(*chunk_hdr); in _sctp_make_chunk() 1547 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_addto_chunk() 1571 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_user_addto_chunk() 2321 if (param.v != (void *)chunk->chunk_end) in sctp_verify_init() 3256 if (param.v != chunk->chunk_end) in sctp_verify_asconf()
|
H A D | ulpevent.c | 728 skb_trim(skb, chunk->chunk_end - padding - skb->data); in sctp_ulpevent_make_rcvmsg()
|
H A D | sm_sideeffect.c | 972 while (chunk->chunk_end > chunk->skb->data) { in sctp_cmd_process_operr()
|
H A D | sm_statefuns.c | 193 return (void *)err == (void *)chunk->chunk_end; in sctp_err_chunk_valid() 3586 if ((void *)err != (void *)chunk->chunk_end) in sctp_sf_operr_notify()
|
/openbmc/linux/tools/perf/tests/ |
H A D | code-reading.c | 83 unsigned char *chunk_end = chunk_start + bytes_read - 1; in read_objdump_chunk() local 86 while (chunk_start < chunk_end) { in read_objdump_chunk() 88 *chunk_start = *chunk_end; in read_objdump_chunk() 89 *chunk_end = tmp; in read_objdump_chunk() 91 chunk_end--; in read_objdump_chunk()
|
/openbmc/qemu/accel/tcg/ |
H A D | translate-all.c | 470 size_t chunk_end = tcg_ctx->gen_insn_end_off[insn]; in tb_gen_code() local 471 if (chunk_end > chunk_start) { in tb_gen_code() 475 chunk_end - chunk_start); in tb_gen_code() 476 chunk_start = chunk_end; in tb_gen_code()
|
/openbmc/qemu/migration/ |
H A D | rdma.c | 1264 uint8_t *chunk_start, uint8_t *chunk_end) in qemu_rdma_register_and_get_keys() argument 1287 uint64_t len = chunk_end - chunk_start; in qemu_rdma_register_and_get_keys() 2003 uint8_t *chunk_start, *chunk_end; in qemu_rdma_write_one() local 2040 chunk_end = ram_chunk_end(block, chunk + chunks); in qemu_rdma_write_one() 2128 chunk_start, chunk_end)) { in qemu_rdma_write_one() 2262 uint8_t *chunk_end; in qemu_rdma_buffer_mergeable() local 2274 chunk_end = ram_chunk_end(block, rdma->current_chunk); in qemu_rdma_buffer_mergeable() 2295 if ((host_addr + len) > chunk_end) { in qemu_rdma_buffer_mergeable() 3670 uint8_t *chunk_start, *chunk_end; in rdma_registration_handle() local 3712 chunk_end = ram_chunk_end(block, chunk + reg->chunks); in rdma_registration_handle() [all …]
|
H A D | ram.c | 770 unsigned long chunk_end = QEMU_ALIGN_UP(start + npages, chunk_pages); in migration_clear_memory_region_dirty_bitmap_range() local 776 for (i = chunk_start; i < chunk_end; i += chunk_pages) { in migration_clear_memory_region_dirty_bitmap_range()
|
/openbmc/linux/drivers/usb/cdns3/ |
H A D | cdns3-gadget.c | 485 int chunk_end; in cdns3_wa2_descmiss_copy_data() local 495 chunk_end = descmiss_priv_req->flags & REQUEST_INTERNAL_CH; in cdns3_wa2_descmiss_copy_data() 503 if (!chunk_end) in cdns3_wa2_descmiss_copy_data()
|
/openbmc/linux/fs/btrfs/ |
H A D | tree-checker.c | 801 u64 chunk_end; in btrfs_check_chunk_valid() local 856 if (unlikely(check_add_overflow(logical, length, &chunk_end))) { in btrfs_check_chunk_valid()
|
/openbmc/linux/include/net/sctp/ |
H A D | structs.h | 609 __u8 *chunk_end; member
|