Home
last modified time | relevance | path

Searched refs:chunk (Results 1 – 25 of 445) sorted by relevance

12345678910>>...18

/openbmc/linux/net/sctp/
H A Dinqueue.c124 if (chunk->head_skb == chunk->skb) { in sctp_inq_pop()
125 chunk->skb = skb_shinfo(chunk->skb)->frag_list; in sctp_inq_pop()
129 chunk->skb = chunk->skb->next; in sctp_inq_pop()
134 chunk->skb = chunk->head_skb; in sctp_inq_pop()
141 skb_pull(chunk->skb, chunk->chunk_end - chunk->skb->data); in sctp_inq_pop()
163 chunk->head_skb = chunk->skb; in sctp_inq_pop()
166 if (chunk->head_skb && chunk->skb->data_len == chunk->skb->len) in sctp_inq_pop()
167 chunk->skb = skb_shinfo(chunk->skb)->frag_list; in sctp_inq_pop()
195 cb->chunk = head_cb->chunk; in sctp_inq_pop()
211 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_inq_pop()
[all …]
H A Dchunk.c60 struct sctp_chunk *chunk; in sctp_datamsg_free() local
66 sctp_chunk_free(chunk); in sctp_datamsg_free()
86 sctp_chunk_put(chunk); in sctp_datamsg_destroy()
90 asoc = chunk->asoc; in sctp_datamsg_destroy()
111 sctp_chunk_put(chunk); in sctp_datamsg_destroy()
135 chunk->msg = msg; in sctp_datamsg_assign()
264 if (!chunk) { in sctp_datamsg_from_user()
273 chunk->shkey = shkey; in sctp_datamsg_from_user()
276 __skb_pull(chunk->skb, (__u8 *)chunk->chunk_hdr - in sctp_datamsg_from_user()
308 if (!chunk->has_tsn && in sctp_chunk_abandoned()
[all …]
H A Doutput.c124 if (chunk) in sctp_packet_config()
187 packet, packet->size, chunk, chunk->skb ? chunk->skb->len : -1); in sctp_packet_transmit_chunk()
203 chunk); in sctp_packet_transmit_chunk()
261 if (!chunk->auth) in sctp_packet_bundle_auth()
362 if (chunk->asoc) in __sctp_packet_append_chunk()
492 padding = SCTP_PAD4(chunk->skb->len) - chunk->skb->len; in sctp_packet_pack()
500 skb_put_data(nskb, chunk->skb->data, chunk->skb->len); in sctp_packet_pack()
503 chunk, in sctp_packet_pack()
506 chunk->has_tsn ? ntohl(chunk->subh.data_hdr->tsn) : 0, in sctp_packet_pack()
507 ntohs(chunk->chunk_hdr->length), chunk->skb->len, in sctp_packet_pack()
[all …]
H A Doutqueue.c230 sctp_chunk_free(chunk); in __sctp_outq_teardown()
239 sctp_chunk_free(chunk); in __sctp_outq_teardown()
248 sctp_chunk_free(chunk); in __sctp_outq_teardown()
257 sctp_chunk_free(chunk); in __sctp_outq_teardown()
263 sctp_chunk_free(chunk); in __sctp_outq_teardown()
286 chunk && chunk->chunk_hdr ? in sctp_outq_tail()
295 __func__, q, chunk, chunk && chunk->chunk_hdr ? in sctp_outq_tail()
499 if (chunk->transport) in sctp_retransmit_mark()
1104 __func__, ctx->q, chunk, chunk && chunk->chunk_hdr ? in sctp_outq_flush_data()
1107 chunk->skb ? chunk->skb->head : NULL, chunk->skb ? in sctp_outq_flush_data()
[all …]
H A Dsm_statefuns.c375 (struct sctp_init_chunk *)chunk->chunk_hdr, chunk, in sctp_sf_do_5_1B_init()
404 chunk->subh.init_hdr = (struct sctp_inithdr *)chunk->skb->data; in sctp_sf_do_5_1B_init()
425 if (!sctp_process_init(new_asoc, chunk, sctp_source(chunk), in sctp_sf_do_5_1B_init()
543 chunk->subh.init_hdr = (struct sctp_inithdr *)chunk->skb->data; in sctp_sf_do_5_1C_ack()
548 (struct sctp_init_chunk *)chunk->chunk_hdr, chunk, in sctp_sf_do_5_1C_ack()
748 if (!pskb_pull(chunk->skb, ntohs(chunk->chunk_hdr->length) - in sctp_sf_do_5_1D_ce()
934 chunk->head_skb ?: chunk->skb)) in sctp_sf_do_5_1E_ca()
1559 (struct sctp_init_chunk *)chunk->chunk_hdr, chunk, in sctp_sf_do_unexpected_init()
1611 if (!sctp_process_init(new_asoc, chunk, sctp_source(chunk), in sctp_sf_do_unexpected_init()
2226 if (!pskb_pull(chunk->skb, ntohs(chunk->chunk_hdr->length) - in sctp_sf_do_5_2_4_dupcook()
[all …]
H A Dsm_make_chunk.c594 if (chunk) in sctp_make_cookie_echo()
635 if (retval && chunk && chunk->transport) in sctp_make_cookie_ack()
693 if (chunk) in sctp_make_cwr()
872 if (chunk) in sctp_make_shutdown()
946 if (chunk && chunk->chunk_hdr && in sctp_make_abort()
1000 if (chunk) in sctp_make_abort_no_data()
1067 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_addto_param()
1217 if (chunk) in sctp_make_heartbeat_ack()
1274 if (chunk) in sctp_make_op_error_space()
1547 chunk->chunk_end = skb_tail_pointer(chunk->skb); in sctp_addto_chunk()
[all …]
/openbmc/linux/net/sunrpc/xprtrdma/
H A Dsvc_rdma_pcl.c24 kfree(chunk); in pcl_free()
32 chunk = kmalloc(struct_size(chunk, ch_segments, segcount), GFP_KERNEL); in pcl_alloc_chunk()
33 if (!chunk) in pcl_alloc_chunk()
40 return chunk; in pcl_alloc_chunk()
74 segment = &chunk->ch_segments[chunk->ch_segcount]; in pcl_set_read_segment()
121 if (!chunk) in pcl_alloc_call()
174 if (!chunk) { in pcl_alloc_read()
176 if (!chunk) in pcl_alloc_read()
214 if (!chunk) in pcl_alloc_write()
277 if (!chunk || !chunk->ch_payload_length) in pcl_process_nonpayloads()
[all …]
/openbmc/linux/mm/
H A Dpercpu-vm.c17 WARN_ON(chunk->immutable); in pcpu_chunk_page()
229 chunk); in pcpu_map_pages()
339 if (!chunk) in pcpu_create_chunk()
349 chunk->data = vms; in pcpu_create_chunk()
355 return chunk; in pcpu_create_chunk()
360 if (!chunk) in pcpu_destroy_chunk()
366 if (chunk->data) in pcpu_destroy_chunk()
368 pcpu_free_chunk(chunk); in pcpu_destroy_chunk()
397 if (chunk == pcpu_first_chunk || chunk == pcpu_reserved_chunk) in pcpu_should_reclaim_chunk()
406 return ((chunk->isolated && chunk->nr_empty_pop_pages) || in pcpu_should_reclaim_chunk()
[all …]
H A Dpercpu.c219 if (!chunk) in pcpu_addr_in_chunk()
222 start_addr = chunk->base_addr + chunk->start_offset; in pcpu_addr_in_chunk()
601 if (chunk != pcpu_reserved_chunk && !chunk->isolated) in pcpu_update_empty_pages()
1363 if (!chunk) in pcpu_alloc_first_chunk()
1403 bitmap_fill(chunk->populated, chunk->nr_pages); in pcpu_alloc_first_chunk()
1404 chunk->nr_populated = chunk->nr_pages; in pcpu_alloc_first_chunk()
1405 chunk->nr_empty_pop_pages = chunk->nr_pages; in pcpu_alloc_first_chunk()
1444 if (!chunk) in pcpu_alloc_chunk()
1479 chunk->free_bytes = chunk->nr_pages * PAGE_SIZE; in pcpu_alloc_chunk()
1651 if (likely(chunk && chunk->obj_cgroups)) { in pcpu_memcg_post_alloc_hook()
[all …]
H A Dpercpu-km.c56 struct pcpu_chunk *chunk; in pcpu_create_chunk() local
61 chunk = pcpu_alloc_chunk(gfp); in pcpu_create_chunk()
62 if (!chunk) in pcpu_create_chunk()
67 pcpu_free_chunk(chunk); in pcpu_create_chunk()
74 chunk->data = pages; in pcpu_create_chunk()
75 chunk->base_addr = page_address(pages); in pcpu_create_chunk()
78 pcpu_chunk_populated(chunk, 0, nr_pages); in pcpu_create_chunk()
84 return chunk; in pcpu_create_chunk()
91 if (!chunk) in pcpu_destroy_chunk()
97 if (chunk->data) in pcpu_destroy_chunk()
[all …]
H A Dpercpu-stats.c35 struct pcpu_chunk *chunk; in find_max_nr_alloc() local
55 struct pcpu_block_md *chunk_md = &chunk->chunk_md; in chunk_map_stats()
69 last_alloc = find_last_bit(chunk->alloc_map, in chunk_map_stats()
70 pcpu_chunk_map_bits(chunk) - in chunk_map_stats()
88 if (test_bit(start, chunk->alloc_map)) { in chunk_map_stats()
121 P("nr_alloc", chunk->nr_alloc); in chunk_map_stats()
122 P("max_alloc_size", chunk->max_alloc_size); in chunk_map_stats()
125 P("free_bytes", chunk->free_bytes); in chunk_map_stats()
137 struct pcpu_chunk *chunk; in percpu_stats_show() local
207 if (chunk == pcpu_first_chunk) in percpu_stats_show()
[all …]
/openbmc/linux/drivers/s390/cio/
H A Ditcw.c183 void *chunk; in itcw_init() local
197 return chunk; in itcw_init()
198 itcw = chunk; in itcw_init()
213 return chunk; in itcw_init()
221 return chunk; in itcw_init()
231 return chunk; in itcw_init()
239 return chunk; in itcw_init()
245 return chunk; in itcw_init()
252 return chunk; in itcw_init()
259 return chunk; in itcw_init()
[all …]
/openbmc/linux/kernel/trace/
H A Dpid_list.c25 chunk->next = NULL; in get_lower_chunk()
33 return chunk; in get_lower_chunk()
57 return chunk; in get_upper_chunk()
357 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in pid_list_refill_irq()
358 if (!chunk) in pid_list_refill_irq()
368 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in pid_list_refill_irq()
369 if (!chunk) in pid_list_refill_irq()
430 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in trace_pid_list_alloc()
431 if (!chunk) in trace_pid_list_alloc()
441 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in trace_pid_list_alloc()
[all …]
/openbmc/linux/drivers/net/ethernet/mellanox/mlx4/
H A Dicm.c92 kfree(chunk); in mlx4_free_icm()
160 if (!chunk) { in mlx4_alloc_icm()
161 chunk = kzalloc_node(sizeof(*chunk), in mlx4_alloc_icm()
165 if (!chunk) { in mlx4_alloc_icm()
166 chunk = kzalloc(sizeof(*chunk), in mlx4_alloc_icm()
169 if (!chunk) in mlx4_alloc_icm()
188 &chunk->buf[chunk->npages], in mlx4_alloc_icm()
191 ret = mlx4_alloc_icm_pages(&chunk->sg[chunk->npages], in mlx4_alloc_icm()
205 ++chunk->nsg; in mlx4_alloc_icm()
208 chunk->sg, chunk->npages, in mlx4_alloc_icm()
[all …]
H A Dicm.h74 struct mlx4_icm_chunk *chunk; member
100 iter->chunk = list_empty(&icm->chunk_list) ? in mlx4_icm_first()
108 return !iter->chunk; in mlx4_icm_last()
113 if (++iter->page_idx >= iter->chunk->nsg) { in mlx4_icm_next()
115 iter->chunk = NULL; in mlx4_icm_next()
119 iter->chunk = list_entry(iter->chunk->list.next, in mlx4_icm_next()
127 if (iter->chunk->coherent) in mlx4_icm_addr()
128 return iter->chunk->buf[iter->page_idx].dma_addr; in mlx4_icm_addr()
135 if (iter->chunk->coherent) in mlx4_icm_size()
136 return iter->chunk->buf[iter->page_idx].size; in mlx4_icm_size()
[all …]
/openbmc/linux/kernel/
H A Daudit_tree.c136 kfree(chunk); in free_chunk()
194 if (!chunk) in alloc_chunk()
205 return chunk; in alloc_chunk()
286 audit_mark(mark)->chunk = chunk; in replace_mark_chunk()
287 if (chunk) in replace_mark_chunk()
402 if (!chunk) { in create_chunk()
410 kfree(chunk); in create_chunk()
417 kfree(chunk); in create_chunk()
489 if (!chunk) { in tag_chunk()
503 p = &chunk->owners[chunk->count - 1]; in tag_chunk()
[all …]
/openbmc/linux/lib/
H A Dgenalloc.c42 return chunk->end_addr - chunk->start_addr + 1; in chunk_size()
196 chunk->phys_addr = phys; in gen_pool_add_owner()
197 chunk->start_addr = virt; in gen_pool_add_owner()
199 chunk->owner = owner; in gen_pool_add_owner()
224 if (addr >= chunk->start_addr && addr <= chunk->end_addr) { in gen_pool_virt_to_phys()
225 paddr = chunk->phys_addr + (addr - chunk->start_addr); in gen_pool_virt_to_phys()
257 vfree(chunk); in gen_pool_destroy()
320 *owner = chunk->owner; in gen_pool_alloc_algo_owner()
504 if (addr >= chunk->start_addr && addr <= chunk->end_addr) { in gen_pool_free_owner()
512 *owner = chunk->owner; in gen_pool_free_owner()
[all …]
/openbmc/u-boot/fs/yaffs2/
H A Dyaffs_bitmap.c35 chunk < 0 || chunk >= dev->param.chunks_per_block) { in yaffs_verify_chunk_bit_id()
38 blk, chunk); in yaffs_verify_chunk_bit_id()
50 void yaffs_clear_chunk_bit(struct yaffs_dev *dev, int blk, int chunk) in yaffs_clear_chunk_bit() argument
54 yaffs_verify_chunk_bit_id(dev, blk, chunk); in yaffs_clear_chunk_bit()
55 blk_bits[chunk / 8] &= ~(1 << (chunk & 7)); in yaffs_clear_chunk_bit()
58 void yaffs_set_chunk_bit(struct yaffs_dev *dev, int blk, int chunk) in yaffs_set_chunk_bit() argument
62 yaffs_verify_chunk_bit_id(dev, blk, chunk); in yaffs_set_chunk_bit()
63 blk_bits[chunk / 8] |= (1 << (chunk & 7)); in yaffs_set_chunk_bit()
66 int yaffs_check_chunk_bit(struct yaffs_dev *dev, int blk, int chunk) in yaffs_check_chunk_bit() argument
70 yaffs_verify_chunk_bit_id(dev, blk, chunk); in yaffs_check_chunk_bit()
[all …]
/openbmc/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_ring_mux.c104 if (chunk->sync_seq > last_seq && chunk->sync_seq <= seq) { in amdgpu_mux_resubmit_chunks()
106 chunk->sync_seq, in amdgpu_mux_resubmit_chunks()
108 if (chunk->sync_seq == in amdgpu_mux_resubmit_chunks()
119 chunk->start, in amdgpu_mux_resubmit_chunks()
120 chunk->end); in amdgpu_mux_resubmit_chunks()
185 list_del(&chunk->entry); in amdgpu_ring_mux_fini()
450 if (!chunk) { in amdgpu_ring_mux_start_ib()
455 chunk->start = ring->wptr; in amdgpu_ring_mux_start_ib()
499 if (!chunk) { in amdgpu_ring_mux_ib_mark_offset()
532 if (!chunk) { in amdgpu_ring_mux_end_ib()
[all …]
/openbmc/linux/drivers/infiniband/hw/irdma/
H A Dpble.c18 struct irdma_chunk *chunk; in irdma_destroy_pble_prm() local
23 list_del(&chunk->list); in irdma_destroy_pble_prm()
27 kfree(chunk->chunkmem.va); in irdma_destroy_pble_prm()
90 struct irdma_chunk *chunk = info->chunk; in add_sd_direct() local
112 chunk->size, chunk->size, chunk->vaddr, chunk->fpm_addr); in add_sd_direct()
145 struct irdma_chunk *chunk = info->chunk; in add_bp_pages() local
160 addr = chunk->vaddr; in add_bp_pages()
230 chunk = chunkmem.va; in add_pble_prm()
233 chunk->dev = dev; in add_pble_prm()
240 info.chunk = chunk; in add_pble_prm()
[all …]
/openbmc/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_dmem.c98 return chunk->drm; in page_to_drm()
120 chunk->callocated--; in nouveau_dmem_page_free()
236 chunk = kzalloc(sizeof(*chunk), GFP_KERNEL); in nouveau_dmem_chunk_alloc()
237 if (chunk == NULL) { in nouveau_dmem_chunk_alloc()
250 chunk->drm = drm; in nouveau_dmem_chunk_alloc()
260 &chunk->bo); in nouveau_dmem_chunk_alloc()
286 chunk->callocated++; in nouveau_dmem_chunk_alloc()
301 kfree(chunk); in nouveau_dmem_chunk_alloc()
318 chunk->callocated++; in nouveau_dmem_page_alloc_locked()
431 list_del(&chunk->list); in nouveau_dmem_fini()
[all …]
/openbmc/linux/drivers/net/ethernet/mellanox/mlx5/core/steering/
H A Ddr_icm_pool.c84 return (u64)offset * chunk->seg; in mlx5dr_icm_pool_get_chunk_mr_addr()
96 return (u64)chunk->buddy_mem->icm_mr->icm_start_addr + size * chunk->seg; in mlx5dr_icm_pool_get_chunk_icm_addr()
102 chunk->buddy_mem->pool->icm_type); in mlx5dr_icm_pool_get_chunk_byte_size()
221 memset(chunk->ste_arr, 0, in dr_icm_chunk_ste_init()
329 chunk->seg = seg; in dr_icm_chunk_init()
330 chunk->size = chunk_size; in dr_icm_chunk_init()
331 chunk->buddy_mem = buddy_mem_pool; in dr_icm_chunk_init()
453 if (!chunk) in mlx5dr_icm_alloc_chunk()
464 return chunk; in mlx5dr_icm_alloc_chunk()
483 hot_chunk->seg = chunk->seg; in mlx5dr_icm_free_chunk()
[all …]
/openbmc/linux/include/net/sctp/
H A Dsm.h177 const struct sctp_chunk *chunk);
179 const struct sctp_chunk *chunk);
182 const struct sctp_chunk *chunk);
200 const struct sctp_chunk *chunk);
212 const struct sctp_chunk *chunk,
217 const struct sctp_chunk *chunk,
221 const struct sctp_chunk *chunk);
224 const struct sctp_chunk *chunk);
271 struct sctp_chunk *chunk,
321 struct sctp_chunk *chunk,
[all …]
/openbmc/linux/drivers/infiniband/hw/mthca/
H A Dmthca_memfree.c69 dma_unmap_sg(&dev->pdev->dev, chunk->mem, chunk->npages, in mthca_free_icm_pages()
101 kfree(chunk); in mthca_free_icm()
158 if (!chunk) { in mthca_alloc_icm()
159 chunk = kmalloc(sizeof *chunk, in mthca_alloc_icm()
161 if (!chunk) in mthca_alloc_icm()
175 &chunk->mem[chunk->npages], in mthca_alloc_icm()
178 ret = mthca_alloc_icm_pages(&chunk->mem[chunk->npages], in mthca_alloc_icm()
185 ++chunk->nsg; in mthca_alloc_icm()
187 chunk->nsg = in mthca_alloc_icm()
197 chunk = NULL; in mthca_alloc_icm()
[all …]
/openbmc/linux/drivers/gpu/drm/panel/
H A Dpanel-samsung-s6e63m0-dsi.c44 int chunk; in s6e63m0_dsi_dcs_write() local
54 chunk = remain; in s6e63m0_dsi_dcs_write()
57 if (chunk > S6E63M0_DSI_MAX_CHUNK) in s6e63m0_dsi_dcs_write()
58 chunk = S6E63M0_DSI_MAX_CHUNK; in s6e63m0_dsi_dcs_write()
64 cmdwritten += chunk; in s6e63m0_dsi_dcs_write()
65 seqp += chunk; in s6e63m0_dsi_dcs_write()
68 chunk = remain - cmdwritten; in s6e63m0_dsi_dcs_write()
69 if (chunk > S6E63M0_DSI_MAX_CHUNK) in s6e63m0_dsi_dcs_write()
70 chunk = S6E63M0_DSI_MAX_CHUNK; in s6e63m0_dsi_dcs_write()
82 cmdwritten += chunk; in s6e63m0_dsi_dcs_write()
[all …]

12345678910>>...18