Searched refs:work_queued (Results 1 – 12 of 12) sorted by relevance
63 sbi->work_queued = 0; in flush_mdb()78 if (!sbi->work_queued) { in hfs_mark_mdb_dirty()81 sbi->work_queued = 1; in hfs_mark_mdb_dirty()
164 int work_queued; /* non-zero delayed work is queued */ member
74 sbi->work_queued = 0; in flush_superblock()89 if (!sbi->work_queued) { in affs_mark_sb_dirty()92 sbi->work_queued = 1; in affs_mark_sb_dirty()
105 int work_queued; /* non-zero delayed work is queued */ member
574 bool work_queued = false; in amdgpu_dm_irq_schedule_work() local581 work_queued = true; in amdgpu_dm_irq_schedule_work()586 if (!work_queued) { in amdgpu_dm_irq_schedule_work()
255 sbi->work_queued = 0; in delayed_sync_fs()272 if (!sbi->work_queued) { in hfsplus_mark_mdb_dirty()275 sbi->work_queued = 1; in hfsplus_mark_mdb_dirty()
191 int work_queued; /* non-zero delayed work is queued */ member
29 int work_queued; /* non-zero if the delayed work is queued */ member
722 sbi->work_queued = 0; in delayed_sync_fs()734 if (!sbi->work_queued) { in ufs_mark_sb_dirty()737 sbi->work_queued = 1; in ufs_mark_sb_dirty()
100 if (sbi->work_queued == 1) in flush_old_commits()107 if (sbi->work_queued == 1) in flush_old_commits()108 sbi->work_queued = 0; in flush_old_commits()128 if (!sbi->work_queued) { in reiserfs_schedule_old_flush()131 sbi->work_queued = 1; in reiserfs_schedule_old_flush()142 sbi->work_queued = 2; in reiserfs_cancel_old_flush()177 sbi->work_queued = 0; in reiserfs_unfreeze()
618 int work_queued; /* non-zero delayed work is queued */ member
1538 unsigned int work_queued; in xennet_handle_rx() local1545 work_queued = XEN_RING_NR_UNCONSUMED_RESPONSES(&queue->rx); in xennet_handle_rx()1546 if (work_queued > queue->rx_rsp_unconsumed) { in xennet_handle_rx()1547 queue->rx_rsp_unconsumed = work_queued; in xennet_handle_rx()1549 } else if (unlikely(work_queued < queue->rx_rsp_unconsumed)) { in xennet_handle_rx()1560 if (likely(netif_carrier_ok(queue->info->netdev) && work_queued)) in xennet_handle_rx()