Home
last modified time | relevance | path

Searched refs:seqno (Results 1 – 25 of 224) sorted by relevance

123456789

/openbmc/linux/drivers/gpu/drm/v3d/
H A Dv3d_trace.h39 uint64_t seqno,
54 __entry->seqno = seqno;
62 __entry->seqno,
69 uint64_t seqno),
79 __entry->seqno = seqno;
84 __entry->seqno)
99 __entry->seqno = seqno;
119 __entry->seqno = seqno;
139 __entry->seqno = seqno;
178 __entry->seqno = seqno;
[all …]
/openbmc/linux/drivers/gpu/drm/vc4/
H A Dvc4_trace.h23 __field(u64, seqno)
29 __entry->seqno = seqno;
39 TP_ARGS(dev, seqno),
48 __entry->seqno = seqno;
82 uint64_t seqno,
97 __entry->seqno = seqno;
105 __entry->seqno,
112 uint64_t seqno),
122 __entry->seqno = seqno;
127 __entry->seqno)
[all …]
H A Dvc4_gem.c395 if (vc4->finished_seqno >= seqno) in vc4_wait_for_seqno()
562 bo->seqno = seqno; in vc4_update_bo_seqnos()
569 bo->seqno = seqno; in vc4_update_bo_seqnos()
574 bo->write_seqno = seqno; in vc4_update_bo_seqnos()
687 uint64_t seqno; in vc4_queue_submit() local
698 seqno = ++vc4->emit_seqno; in vc4_queue_submit()
699 exec->seqno = seqno; in vc4_queue_submit()
703 fence->seqno = exec->seqno; in vc4_queue_submit()
1020 cb->seqno = seqno; in vc4_queue_seqno_cb()
1045 uint64_t seqno, in vc4_wait_for_seqno_ioctl_helper() argument
[all …]
/openbmc/linux/drivers/media/pci/saa7164/
H A Dsaa7164-cmd.c22 ret = dev->cmds[i].seqno; in saa7164_cmd_alloc_seqno()
35 (dev->cmds[seqno].seqno == seqno)) { in saa7164_cmd_free_seqno()
47 (dev->cmds[seqno].seqno == seqno)) { in saa7164_cmd_timeout_seqno()
59 (dev->cmds[seqno].seqno == seqno)) { in saa7164_cmd_timeout_get()
258 (dev->cmds[seqno].seqno == seqno)) { in saa7164_cmd_wait()
269 __func__, seqno, dev->cmds[seqno].signalled); in saa7164_cmd_wait()
287 __func__, seqno, r, in saa7164_cmd_wait()
293 __func__, seqno); in saa7164_cmd_wait()
308 dev->cmds[i].seqno, in saa7164_cmd_signal()
434 if (presponse_t->seqno != pcommand_t->seqno) { in saa7164_cmd_send()
[all …]
/openbmc/linux/drivers/gpu/drm/virtio/
H A Dvirtgpu_trace.h12 TP_PROTO(struct virtqueue *vq, struct virtio_gpu_ctrl_hdr *hdr, u32 seqno),
13 TP_ARGS(vq, hdr, seqno),
23 __field(u32, seqno)
34 __entry->seqno = seqno;
39 __entry->ctx_id, __entry->num_free, __entry->seqno)
43 TP_PROTO(struct virtqueue *vq, struct virtio_gpu_ctrl_hdr *hdr, u32 seqno),
44 TP_ARGS(vq, hdr, seqno)
48 TP_PROTO(struct virtqueue *vq, struct virtio_gpu_ctrl_hdr *hdr, u32 seqno),
49 TP_ARGS(vq, hdr, seqno)
/openbmc/linux/drivers/dma-buf/
H A Ddma-fence-chain.c90 int dma_fence_chain_find_seqno(struct dma_fence **pfence, uint64_t seqno) in dma_fence_chain_find_seqno() argument
94 if (!seqno) in dma_fence_chain_find_seqno()
98 if (!chain || chain->base.seqno < seqno) in dma_fence_chain_find_seqno()
103 to_dma_fence_chain(*pfence)->prev_seqno < seqno) in dma_fence_chain_find_seqno()
244 uint64_t seqno) in dma_fence_chain_init() argument
255 if (prev_chain && __dma_fence_is_later(seqno, prev->seqno, prev->ops)) { in dma_fence_chain_init()
257 chain->prev_seqno = prev->seqno; in dma_fence_chain_init()
262 seqno = max(prev->seqno, seqno); in dma_fence_chain_init()
266 &chain->lock, context, seqno); in dma_fence_chain_init()
H A Dst-dma-fence-chain.c63 u64 seqno) in mock_chain() argument
72 seqno); in mock_chain()
281 fence->seqno); in find_signaled()
326 fence ? fence->seqno : 0); in find_out_of_order()
363 fence->seqno, in find_gap()
401 int seqno; in __find_race() local
405 err = dma_fence_chain_find_seqno(&fence, seqno); in __find_race()
408 seqno); in __find_race()
419 if (fence->seqno == seqno) { in __find_race()
423 seqno); in __find_race()
[all …]
/openbmc/linux/drivers/gpu/drm/i915/selftests/
H A Di915_syncmap.c154 if (__sync_seqno(leaf)[idx] != seqno) { in check_seqno()
167 err = i915_syncmap_set(sync, context, seqno); in check_one()
195 context, seqno); in check_one()
242 err = i915_syncmap_set(sync, context, seqno); in check_leaf()
264 context, seqno); in check_leaf()
548 u32 seqno; in igt_syncmap_random() local
570 seqno = 0; in igt_syncmap_random()
575 u32 last_seqno = seqno; in igt_syncmap_random()
578 seqno = prandom_u32_state(&prng); in igt_syncmap_random()
579 expect = seqno_later(last_seqno, seqno); in igt_syncmap_random()
[all …]
H A Digt_spinner.c89 if (!spin->seqno) { in igt_spinner_pin()
94 spin->seqno = memset(vaddr, 0xff, PAGE_SIZE); in igt_spinner_pin()
178 *batch++ = rq->fence.seqno; in igt_spinner_create_request()
219 u32 *seqno = spin->seqno + seqno_offset(rq->fence.context); in hws_seqno() local
221 return READ_ONCE(*seqno); in hws_seqno()
243 if (spin->seqno) { in igt_spinner_fini()
256 rq->fence.seqno), in igt_wait_for_spinner()
259 rq->fence.seqno), in igt_wait_for_spinner()
/openbmc/linux/drivers/gpu/drm/radeon/
H A Dradeon_trace.h127 TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
129 TP_ARGS(dev, ring, seqno),
134 __field(u32, seqno)
140 __entry->seqno = seqno;
144 __entry->dev, __entry->ring, __entry->seqno)
149 TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
151 TP_ARGS(dev, ring, seqno)
156 TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
158 TP_ARGS(dev, ring, seqno)
163 TP_PROTO(struct drm_device *dev, int ring, u32 seqno),
[all …]
/openbmc/linux/drivers/gpu/drm/i915/gt/
H A Dintel_timeline.h46 u64 context, u32 seqno) in __intel_timeline_sync_set() argument
48 return i915_syncmap_set(&tl->sync, context, seqno); in __intel_timeline_sync_set()
54 return __intel_timeline_sync_set(tl, fence->context, fence->seqno); in intel_timeline_sync_set()
58 u64 context, u32 seqno) in __intel_timeline_sync_is_later() argument
60 return i915_syncmap_is_later(&tl->sync, context, seqno); in __intel_timeline_sync_is_later()
66 return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno); in intel_timeline_sync_is_later()
74 u32 *seqno);
H A Dselftest_timeline.c199 u32 seqno; member
697 u32 seqno[2]; in live_hwsp_wrap() local
708 tl->seqno = -4u; in live_hwsp_wrap()
745 GEM_BUG_ON(seqno[1] >= seqno[0]); in live_hwsp_wrap()
760 seqno[0], seqno[1]); in live_hwsp_wrap()
792 *cs++ = seqno; in emit_read_hwsp()
960 u32 seqno = rq->fence.seqno; in wrap_timeline() local
962 while (tl->seqno >= seqno) { /* Cause a wrap */ in wrap_timeline()
1204 tl->seqno = -2u; in live_hwsp_rollover_kernel()
1227 GEM_BUG_ON(rq[2]->fence.seqno > rq[0]->fence.seqno); in live_hwsp_rollover_kernel()
[all …]
H A Dintel_tlb.c112 static bool tlb_seqno_passed(const struct intel_gt *gt, u32 seqno) in tlb_seqno_passed() argument
117 return (s32)(cur - ALIGN(seqno, 2)) > 0; in tlb_seqno_passed()
120 void intel_gt_invalidate_tlb_full(struct intel_gt *gt, u32 seqno) in intel_gt_invalidate_tlb_full() argument
130 if (tlb_seqno_passed(gt, seqno)) in intel_gt_invalidate_tlb_full()
135 if (tlb_seqno_passed(gt, seqno)) in intel_gt_invalidate_tlb_full()
140 write_seqcount_invalidate(&gt->tlb.seqno); in intel_gt_invalidate_tlb_full()
149 seqcount_mutex_init(&gt->tlb.seqno, &gt->tlb.invalidate_lock); in intel_gt_init_tlb()
/openbmc/linux/net/dccp/
H A Dackvec.c59 avr->avr_ack_seqno = seqno; in dccp_ackvec_update_records()
132 u64 seqno, enum dccp_ackvec_states state) in dccp_ackvec_update_old() argument
163 (unsigned long long)seqno, state); in dccp_ackvec_update_old()
197 u64 seqno, enum dccp_ackvec_states state) in dccp_ackvec_add_new() argument
237 av->av_buf_ackno = seqno; in dccp_ackvec_add_new()
250 u64 seqno = DCCP_SKB_CB(skb)->dccpd_seq; in dccp_ackvec_input() local
254 dccp_ackvec_add_new(av, 1, seqno, state); in dccp_ackvec_input()
255 av->av_tail_ackno = seqno; in dccp_ackvec_input()
266 av->av_buf_ackno = seqno; in dccp_ackvec_input()
269 dccp_ackvec_add_new(av, num_packets, seqno, state); in dccp_ackvec_input()
[all …]
/openbmc/linux/include/trace/events/
H A Drpcgss.h322 __field(u32, seqno)
403 __field(u32, seqno)
434 __field(u32, seqno)
492 u32 seqno
495 TP_ARGS(rqstp, seqno),
499 __field(u32, seqno)
504 __entry->seqno = seqno;
515 u32 seqno \
525 u32 seqno,
534 __field(u32, seqno)
[all …]
H A Ddma_fence.h22 __field(unsigned int, seqno)
29 __entry->seqno = fence->seqno;
34 __entry->seqno)
/openbmc/linux/drivers/net/wireless/mediatek/mt76/
H A Dagg-rx.c84 mt76_rx_aggr_release_frames(tid, frames, status->seqno); in mt76_rx_aggr_check_release()
126 u16 seqno; in mt76_rx_aggr_check_ctl() local
135 seqno = IEEE80211_SEQ_TO_SN(le16_to_cpu(bar->start_seq_num)); in mt76_rx_aggr_check_ctl()
142 mt76_rx_aggr_release_frames(tid, frames, seqno); in mt76_rx_aggr_check_ctl()
155 u16 seqno, head, size, idx; in mt76_rx_aggr_reorder() local
187 seqno = status->seqno; in mt76_rx_aggr_reorder()
189 sn_less = ieee80211_sn_less(seqno, head); in mt76_rx_aggr_reorder()
204 if (seqno == head) { in mt76_rx_aggr_reorder()
217 if (!ieee80211_sn_less(seqno, head + size)) { in mt76_rx_aggr_reorder()
218 head = ieee80211_sn_inc(ieee80211_sn_sub(seqno, size)); in mt76_rx_aggr_reorder()
[all …]
/openbmc/linux/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_irq.c120 static bool vmw_fifo_idle(struct vmw_private *dev_priv, uint32_t seqno) in vmw_fifo_idle() argument
128 uint32_t seqno = vmw_fence_read(dev_priv); in vmw_update_seqno() local
130 if (dev_priv->last_read_seqno != seqno) { in vmw_update_seqno()
131 dev_priv->last_read_seqno = seqno; in vmw_update_seqno()
137 uint32_t seqno) in vmw_seqno_passed() argument
141 if (likely(dev_priv->last_read_seqno - seqno < VMW_FENCE_WRAP)) in vmw_seqno_passed()
145 if (likely(dev_priv->last_read_seqno - seqno < VMW_FENCE_WRAP)) in vmw_seqno_passed()
148 if (!vmw_has_fences(dev_priv) && vmw_fifo_idle(dev_priv, seqno)) in vmw_seqno_passed()
156 ret = ((atomic_read(&dev_priv->marker_seq) - seqno) in vmw_seqno_passed()
165 uint32_t seqno, in vmw_fallback_wait() argument
[all …]
/openbmc/linux/net/dccp/ccids/lib/
H A Dpacket_history.h38 u64 seqno; member
43 tfrc_tx_hist_find_entry(struct tfrc_tx_hist_entry *head, u64 seqno) in tfrc_tx_hist_find_entry() argument
45 while (head != NULL && head->seqno != seqno) in tfrc_tx_hist_find_entry()
50 int tfrc_tx_hist_add(struct tfrc_tx_hist_entry **headp, u64 seqno);
/openbmc/linux/drivers/gpu/drm/i915/
H A Di915_syncmap.c154 bool i915_syncmap_is_later(struct i915_syncmap **root, u64 id, u32 seqno) in i915_syncmap_is_later() argument
195 return seqno_later(__sync_seqno(p)[idx], seqno); in i915_syncmap_is_later()
214 static inline void __sync_set_seqno(struct i915_syncmap *p, u64 id, u32 seqno) in __sync_set_seqno() argument
219 __sync_seqno(p)[idx] = seqno; in __sync_set_seqno()
230 static noinline int __sync_set(struct i915_syncmap **root, u64 id, u32 seqno) in __sync_set() argument
335 __sync_set_seqno(p, id, seqno); in __sync_set()
353 int i915_syncmap_set(struct i915_syncmap **root, u64 id, u32 seqno) in i915_syncmap_set() argument
362 __sync_set_seqno(p, id, seqno); in i915_syncmap_set()
366 return __sync_set(root, id, seqno); in i915_syncmap_set()
H A Di915_trace.h275 __field(u32, seqno)
284 __entry->seqno = rq->fence.seqno;
302 __field(u32, seqno)
311 __entry->seqno = rq->fence.seqno;
350 __field(u32, seqno)
360 __entry->seqno = rq->fence.seqno;
367 __entry->ctx, __entry->seqno,
380 __field(u32, seqno)
389 __entry->seqno = rq->fence.seqno;
615 __field(u32, seqno)
[all …]
/openbmc/linux/drivers/gpu/drm/lima/
H A Dlima_trace.h19 __field(unsigned int, seqno)
26 __entry->seqno = task->base.s_fence->finished.seqno;
31 __entry->task_id, __entry->context, __entry->seqno,
/openbmc/linux/include/uapi/linux/
H A Dbatadv_packet.h215 __be32 seqno; member
241 __be32 seqno; member
261 __be32 seqno; member
314 __be16 seqno; member
344 __be32 seqno; member
382 __be16 seqno; member
465 __be16 seqno; member
483 __be32 seqno; member
/openbmc/linux/drivers/gpu/drm/msm/
H A Dmsm_gpu_trace.h40 __field(u32, seqno)
47 __entry->seqno = submit->seqno;
51 __entry->id, __entry->pid, __entry->ringid, __entry->seqno,
64 __field(u32, seqno)
74 __entry->seqno = submit->seqno;
81 __entry->id, __entry->pid, __entry->ringid, __entry->seqno,
/openbmc/linux/net/batman-adv/
H A Dfragmentation.c108 u16 seqno) in batadv_frag_init_chain() argument
112 if (chain->seqno == seqno) in batadv_frag_init_chain()
119 chain->seqno = seqno; in batadv_frag_init_chain()
146 u16 seqno, hdr_size = sizeof(struct batadv_frag_packet); in batadv_frag_insert_packet() local
157 seqno = ntohs(frag_packet->seqno); in batadv_frag_insert_packet()
158 bucket = seqno % BATADV_FRAG_BUFFER_COUNT; in batadv_frag_insert_packet()
173 if (batadv_frag_init_chain(chain, seqno)) { in batadv_frag_insert_packet()
493 frag_header.seqno = htons(atomic_inc_return(&bat_priv->frag_seqno)); in batadv_frag_send_packet()

123456789