Home
last modified time | relevance | path

Searched refs:lock (Results 1 – 25 of 3751) sorted by relevance

12345678910>>...151

/openbmc/linux/include/linux/
H A Dspinlock_api_up.h28 do { __acquire(lock); (void)(lock); } while (0)
43 do { __release(lock); (void)(lock); } while (0)
58 #define _raw_spin_lock(lock) __LOCK(lock) argument
60 #define _raw_read_lock(lock) __LOCK(lock) argument
61 #define _raw_write_lock(lock) __LOCK(lock) argument
63 #define _raw_spin_lock_bh(lock) __LOCK_BH(lock) argument
64 #define _raw_read_lock_bh(lock) __LOCK_BH(lock) argument
65 #define _raw_write_lock_bh(lock) __LOCK_BH(lock) argument
76 #define _raw_spin_unlock(lock) __UNLOCK(lock) argument
77 #define _raw_read_unlock(lock) __UNLOCK(lock) argument
[all …]
H A Drwlock_api_smp.h18 void __lockfunc _raw_read_lock(rwlock_t *lock) __acquires(lock);
19 void __lockfunc _raw_write_lock(rwlock_t *lock) __acquires(lock);
45 #define _raw_read_lock(lock) __raw_read_lock(lock) argument
49 #define _raw_write_lock(lock) __raw_write_lock(lock) argument
53 #define _raw_read_lock_bh(lock) __raw_read_lock_bh(lock) argument
57 #define _raw_write_lock_bh(lock) __raw_write_lock_bh(lock) argument
61 #define _raw_read_lock_irq(lock) __raw_read_lock_irq(lock) argument
77 #define _raw_read_trylock(lock) __raw_read_trylock(lock) argument
81 #define _raw_write_trylock(lock) __raw_write_trylock(lock) argument
85 #define _raw_read_unlock(lock) __raw_read_unlock(lock) argument
[all …]
H A Dspinlock.h108 __raw_spin_lock_init((lock), #lock, &__key, LD_WAIT_SPIN); \
113 do { *(lock) = __RAW_SPIN_LOCK_UNLOCKED(lock); } while (0)
121 #define raw_spin_is_contended(lock) (((void)(lock), 0)) argument
215 #define raw_spin_trylock(lock) __cond_lock(lock, _raw_spin_trylock(lock)) argument
217 #define raw_spin_lock(lock) _raw_spin_lock(lock) argument
274 #define raw_spin_lock_irq(lock) _raw_spin_lock_irq(lock) argument
275 #define raw_spin_lock_bh(lock) _raw_spin_lock_bh(lock) argument
276 #define raw_spin_unlock(lock) _raw_spin_unlock(lock) argument
284 #define raw_spin_unlock_bh(lock) _raw_spin_unlock_bh(lock) argument
287 __cond_lock(lock, _raw_spin_trylock_bh(lock))
[all …]
H A Drwlock.h24 __rwlock_init((lock), #lock, &__key); \
28 do { *(lock) = __RW_LOCK_UNLOCKED(lock); } while (0)
52 #define read_trylock(lock) __cond_lock(lock, _raw_read_trylock(lock)) argument
53 #define write_trylock(lock) __cond_lock(lock, _raw_write_trylock(lock)) argument
55 #define write_lock(lock) _raw_write_lock(lock) argument
56 #define read_lock(lock) _raw_read_lock(lock) argument
92 #define read_lock_irq(lock) _raw_read_lock_irq(lock) argument
93 #define read_lock_bh(lock) _raw_read_lock_bh(lock) argument
96 #define read_unlock(lock) _raw_read_unlock(lock) argument
97 #define write_unlock(lock) _raw_write_unlock(lock) argument
[all …]
H A Dspinlock_api_smp.h22 void __lockfunc _raw_spin_lock(raw_spinlock_t *lock) __acquires(lock);
28 void __lockfunc _raw_spin_lock_bh(raw_spinlock_t *lock) __acquires(lock);
47 #define _raw_spin_lock(lock) __raw_spin_lock(lock) argument
51 #define _raw_spin_lock_bh(lock) __raw_spin_lock_bh(lock) argument
55 #define _raw_spin_lock_irq(lock) __raw_spin_lock_irq(lock) argument
59 #define _raw_spin_lock_irqsave(lock) __raw_spin_lock_irqsave(lock) argument
63 #define _raw_spin_trylock(lock) __raw_spin_trylock(lock) argument
67 #define _raw_spin_trylock_bh(lock) __raw_spin_trylock_bh(lock) argument
71 #define _raw_spin_unlock(lock) __raw_spin_unlock(lock) argument
75 #define _raw_spin_unlock_bh(lock) __raw_spin_unlock_bh(lock) argument
[all …]
H A Dspinlock_rt.h45 rt_spin_lock(lock); in spin_lock()
89 rt_spin_lock(lock); in spin_lock_bh()
94 rt_spin_lock(lock); in spin_lock_irq()
106 rt_spin_unlock(lock); in spin_unlock()
127 __cond_lock(lock, rt_spin_trylock(lock))
130 __cond_lock(lock, rt_spin_trylock_bh(lock))
133 __cond_lock(lock, rt_spin_trylock(lock))
146 __cond_lock(lock, __spin_trylock_irqsave(lock, flags))
148 #define spin_is_contended(lock) (((void)(lock), 0)) argument
152 return rt_mutex_base_is_locked(&lock->lock); in spin_is_locked()
[all …]
H A Dspinlock_up.h31 lock->slock = 0; in arch_spin_lock()
39 lock->slock = 0; in arch_spin_trylock()
48 lock->slock = 1; in arch_spin_unlock()
54 #define arch_read_lock(lock) do { barrier(); (void)(lock); } while (0) argument
55 #define arch_write_lock(lock) do { barrier(); (void)(lock); } while (0) argument
56 #define arch_read_trylock(lock) ({ barrier(); (void)(lock); 1; }) argument
57 #define arch_write_trylock(lock) ({ barrier(); (void)(lock); 1; }) argument
62 #define arch_spin_is_locked(lock) ((void)(lock), 0) argument
64 # define arch_spin_lock(lock) do { barrier(); (void)(lock); } while (0) argument
66 # define arch_spin_trylock(lock) ({ barrier(); (void)(lock); 1; }) argument
[all …]
H A Dmutex.h84 extern void mutex_destroy(struct mutex *lock);
188 #define mutex_lock(lock) mutex_lock_nested(lock, 0) argument
189 #define mutex_lock_interruptible(lock) mutex_lock_interruptible_nested(lock, 0) argument
190 #define mutex_lock_killable(lock) mutex_lock_killable_nested(lock, 0) argument
191 #define mutex_lock_io(lock) mutex_lock_io_nested(lock, 0) argument
200 extern void mutex_lock(struct mutex *lock);
205 # define mutex_lock_nested(lock, subclass) mutex_lock(lock) argument
206 # define mutex_lock_interruptible_nested(lock, subclass) mutex_lock_interruptible(lock) argument
207 # define mutex_lock_killable_nested(lock, subclass) mutex_lock_killable(lock) argument
208 # define mutex_lock_nest_lock(lock, nest_lock) mutex_lock(lock) argument
[all …]
H A Dlockdep.h233 lockdep_init_map_type(&(lock)->dep_map, #lock, (lock)->dep_map.key, sub,\
239 lockdep_set_class_and_name(lock, &__lockdep_no_validate__, #lock)
244 #define lockdep_match_class(lock, key) lockdep_match_key(&(lock)->dep_map, key) argument
249 return lock->key == key; in lockdep_match_key()
291 #define lockdep_is_held(lock) lock_is_held(&(lock)->dep_map) argument
292 #define lockdep_is_held_type(lock, r) lock_is_held_type(&(lock)->dep_map, (r)) argument
304 lock_set_class(lock, lock->name, lock->key, subclass, ip); in lock_set_subclass()
483 lock(_lock); \
493 ____err = lock(_lock); \
506 lock(_lock)
[all …]
/openbmc/linux/kernel/locking/
H A Dspinlock_debug.c23 debug_check_no_locks_freed((void *)lock, sizeof(*lock)); in __raw_spin_lock_init()
42 debug_check_no_locks_freed((void *)lock, sizeof(*lock)); in __rwlock_init()
65 lock, READ_ONCE(lock->magic), in spin_dump()
99 SPIN_BUG_ON(lock->magic != SPINLOCK_MAGIC, lock, "bad magic"); in debug_spin_unlock()
101 SPIN_BUG_ON(lock->owner != current, lock, "wrong owner"); in debug_spin_unlock()
160 RWLOCK_BUG_ON(lock->magic != RWLOCK_MAGIC, lock, "bad magic"); in do_raw_read_lock()
179 RWLOCK_BUG_ON(lock->magic != RWLOCK_MAGIC, lock, "bad magic"); in do_raw_read_unlock()
185 RWLOCK_BUG_ON(lock->magic != RWLOCK_MAGIC, lock, "bad magic"); in debug_write_lock_before()
186 RWLOCK_BUG_ON(lock->owner == current, lock, "recursion"); in debug_write_lock_before()
199 RWLOCK_BUG_ON(lock->magic != RWLOCK_MAGIC, lock, "bad magic"); in debug_write_unlock()
[all …]
H A Dmutex.c52 osq_lock_init(&lock->osq); in __mutex_init()
489 osq_unlock(&lock->osq); in mutex_optimistic_spin()
496 osq_unlock(&lock->osq); in mutex_optimistic_spin()
560 __ww_mutex_unlock(lock); in ww_mutex_unlock()
582 MUTEX_WARN_ON(lock->magic != lock); in __mutex_lock_common()
637 __mutex_add_waiter(lock, &waiter, &lock->wait_list); in __mutex_lock_common()
856 ww_mutex_unlock(lock); in ww_mutex_deadlock_injection()
1027 mutex_lock(lock); in mutex_lock_io()
1085 MUTEX_WARN_ON(lock->magic != lock); in mutex_trylock()
1142 mutex_lock(lock); in atomic_dec_and_mutex_lock()
[all …]
H A Drtmutex_api.c141 __rt_mutex_unlock(&lock->rtmutex); in rt_mutex_unlock()
150 return rt_mutex_slowtrylock(lock); in rt_mutex_futex_trylock()
170 debug_rt_mutex_unlock(lock); in __rt_mutex_futex_unlock()
172 if (!rt_mutex_has_waiters(lock)) { in __rt_mutex_futex_unlock()
173 lock->owner = NULL; in __rt_mutex_futex_unlock()
216 debug_check_no_locks_freed((void *)lock, sizeof(*lock)); in __rt_mutex_init()
241 __rt_mutex_base_init(lock); in rt_mutex_init_proxy_locked()
270 rt_mutex_clear_owner(lock); in rt_mutex_proxy_unlock()
348 remove_waiter(lock, waiter); in rt_mutex_start_proxy_lock()
434 remove_waiter(lock, waiter); in rt_mutex_cleanup_proxy_lock()
[all …]
H A Dspinlock.c154 __raw_spin_lock(lock); in _raw_spin_lock()
170 __raw_spin_lock_irq(lock); in _raw_spin_lock_irq()
178 __raw_spin_lock_bh(lock); in _raw_spin_lock_bh()
186 __raw_spin_unlock(lock); in _raw_spin_unlock()
228 __raw_read_lock(lock); in _raw_read_lock()
244 __raw_read_lock_irq(lock); in _raw_read_lock_irq()
252 __raw_read_lock_bh(lock); in _raw_read_lock_bh()
260 __raw_read_unlock(lock); in _raw_read_unlock()
300 __raw_write_lock(lock); in _raw_write_lock()
334 __raw_write_lock_bh(lock); in _raw_write_lock_bh()
[all …]
H A Dww_mutex.h9 __ww_waiter_first(struct mutex *lock) in __ww_waiter_first() argument
41 __ww_waiter_last(struct mutex *lock) in __ww_waiter_last() argument
64 return __mutex_owner(lock); in __ww_mutex_owner()
75 raw_spin_lock(&lock->wait_lock); in lock_wait_lock()
407 lock_wait_lock(&lock->base); in ww_mutex_set_context_fastpath()
409 unlock_wait_lock(&lock->base); in ww_mutex_set_context_fastpath()
490 struct MUTEX *lock, in __ww_mutex_add_waiter() argument
561 if (lock->ctx) { in __ww_mutex_unlock()
565 if (lock->ctx->acquired > 0) in __ww_mutex_unlock()
566 lock->ctx->acquired--; in __ww_mutex_unlock()
[all …]
H A Drtmutex.c36 struct rt_mutex *lock, in __ww_mutex_add_waiter() argument
110 xchg_acquire(&lock->owner, rt_mutex_owner_encode(lock, owner)); in rt_mutex_set_owner()
116 WRITE_ONCE(lock->owner, rt_mutex_owner_encode(lock, NULL)); in rt_mutex_clear_owner()
258 __releases(lock->wait_lock) in unlock_rt_mutex_safe()
318 __releases(lock->wait_lock) in unlock_rt_mutex_safe()
320 lock->owner = NULL; in unlock_rt_mutex_safe()
812 lock = waiter->lock; in rt_mutex_adjust_prio_chain()
837 if (lock == orig_lock || rt_mutex_owner(lock) == top_task) { in rt_mutex_adjust_prio_chain()
1091 if (rt_mutex_owner(lock)) in try_to_take_rt_mutex()
1210 waiter->lock = lock; in task_blocks_on_rt_mutex()
[all …]
/openbmc/linux/arch/alpha/include/asm/
H A Dspinlock.h21 return lock.lock == 0; in arch_spin_value_unlocked()
27 lock->lock = 0; in arch_spin_unlock()
46 : "=&r" (tmp), "=m" (lock->lock) in arch_spin_lock()
47 : "m"(lock->lock) : "memory"); in arch_spin_lock()
52 return !test_and_set_bit(0, &lock->lock); in arch_spin_trylock()
74 : "m" (*lock) : "memory"); in arch_read_lock()
94 : "m" (*lock) : "memory"); in arch_write_lock()
114 : "m" (*lock) : "memory"); in arch_read_trylock()
136 : "m" (*lock) : "memory"); in arch_write_trylock()
154 : "m" (*lock) : "memory"); in arch_read_unlock()
[all …]
/openbmc/linux/arch/hexagon/include/asm/
H A Dspinlock.h28 static inline void arch_read_lock(arch_rwlock_t *lock) in arch_read_lock() argument
37 : "r" (&lock->lock) in arch_read_lock()
51 : "r" (&lock->lock) in arch_read_unlock()
69 : "r" (&lock->lock) in arch_read_trylock()
85 : "r" (&lock->lock) in arch_write_lock()
102 : "r" (&lock->lock) in arch_write_trylock()
112 lock->lock = 0; in arch_write_unlock()
124 : "r" (&lock->lock) in arch_spin_lock()
133 lock->lock = 0; in arch_spin_unlock()
147 : "r" (&lock->lock) in arch_spin_trylock()
[all …]
/openbmc/linux/drivers/gpu/drm/
H A Ddrm_lock.c63 volatile unsigned int *lock = &lock_data->hw_lock->lock; in drm_lock_take() local
67 old = *lock; in drm_lock_take()
112 volatile unsigned int *lock = &lock_data->hw_lock->lock; in drm_lock_transfer() local
116 old = *lock; in drm_lock_transfer()
127 volatile unsigned int *lock = &lock_data->hw_lock->lock; in drm_legacy_lock_free() local
139 old = *lock; in drm_legacy_lock_free()
185 master->lock.hw_lock ? master->lock.hw_lock->lock : -1, in drm_legacy_lock()
201 if (drm_lock_take(&master->lock, lock->context)) { in drm_legacy_lock()
231 dev->sigdata.lock = master->lock.hw_lock; in drm_legacy_lock()
335 _DRM_LOCK_IS_HELD(master->lock.hw_lock->lock) && in drm_legacy_i_have_hw_lock()
[all …]
/openbmc/linux/arch/ia64/include/asm/
H A Dspinlock.h45 int *p = (int *)&lock->lock, ticket, serve; in __ticket_spin_lock()
65 int tmp = READ_ONCE(lock->lock); in __ticket_spin_trylock()
68 return ia64_cmpxchg(acq, &lock->lock, tmp, tmp + 1, sizeof (tmp)) == tmp; in __ticket_spin_trylock()
74 unsigned short *p = (unsigned short *)&lock->lock + 1, tmp; in __ticket_spin_unlock()
84 long tmp = READ_ONCE(lock->lock); in __ticket_spin_is_locked()
91 long tmp = READ_ONCE(lock->lock); in __ticket_spin_is_contended()
98 return !(((lock.lock >> TICKET_SHIFT) ^ lock.lock) & TICKET_MASK); in arch_spin_value_unlocked()
114 __ticket_spin_lock(lock); in arch_spin_lock()
256 arch_rwlock_t lock; in arch_read_trylock() member
259 old.lock = new.lock = *x; in arch_read_trylock()
[all …]
/openbmc/linux/drivers/md/persistent-data/
H A Ddm-block-manager.c198 spin_lock(&lock->lock); in bl_down_read()
201 spin_unlock(&lock->lock); in bl_down_read()
217 spin_unlock(&lock->lock); in bl_down_read()
228 spin_lock(&lock->lock); in bl_down_read_nonblock()
241 spin_unlock(&lock->lock); in bl_down_read_nonblock()
247 spin_lock(&lock->lock); in bl_up_read()
253 spin_unlock(&lock->lock); in bl_up_read()
261 spin_lock(&lock->lock); in bl_down_write()
284 spin_unlock(&lock->lock); in bl_down_write()
294 spin_lock(&lock->lock); in bl_up_write()
[all …]
/openbmc/linux/fs/ocfs2/dlm/
H A Ddlmast.c79 BUG_ON(!lock); in __dlm_queue_ast()
81 res = lock->lockres; in __dlm_queue_ast()
91 lock->ast_pending, lock->ml.type); in __dlm_queue_ast()
101 dlm_lock_get(lock); in __dlm_queue_ast()
132 BUG_ON(!lock); in dlm_queue_ast()
145 BUG_ON(!lock); in __dlm_queue_bast()
206 fn = lock->ast; in dlm_do_local_ast()
226 lksb = lock->lksb; in dlm_do_remote_ast()
346 lock = NULL; in dlm_proxy_ast_handler()
387 lock->ml.type, lock->ml.convert_type); in dlm_proxy_ast_handler()
[all …]
/openbmc/qemu/util/
H A Dqemu-coroutine-lock.c51 if (lock) { in qemu_co_queue_wait_impl()
69 if (lock) { in qemu_co_queue_wait_impl()
84 if (lock) { in qemu_co_enter_next_impl()
88 if (lock) { in qemu_co_enter_next_impl()
341 lock->owners = 0; in qemu_co_rwlock_init()
385 if (lock->owners == 0 || (lock->owners > 0 && QSIMPLEQ_EMPTY(&lock->tickets))) { in qemu_co_rwlock_rdlock()
386 lock->owners++; in qemu_co_rwlock_rdlock()
413 lock->owners--; in qemu_co_rwlock_unlock()
426 lock->owners = 1; in qemu_co_rwlock_downgrade()
457 if (lock->owners == 1 && QSIMPLEQ_EMPTY(&lock->tickets)) { in qemu_co_rwlock_upgrade()
[all …]
/openbmc/linux/drivers/acpi/acpica/
H A Dutlock.c32 lock->num_readers = 0; in acpi_ut_create_rw_lock()
45 acpi_os_delete_mutex(lock->reader_mutex); in acpi_ut_delete_rw_lock()
46 acpi_os_delete_mutex(lock->writer_mutex); in acpi_ut_delete_rw_lock()
48 lock->num_readers = 0; in acpi_ut_delete_rw_lock()
49 lock->reader_mutex = NULL; in acpi_ut_delete_rw_lock()
50 lock->writer_mutex = NULL; in acpi_ut_delete_rw_lock()
82 lock->num_readers++; in acpi_ut_acquire_read_lock()
83 if (lock->num_readers == 1) { in acpi_ut_acquire_read_lock()
89 acpi_os_release_mutex(lock->reader_mutex); in acpi_ut_acquire_read_lock()
104 lock->num_readers--; in acpi_ut_release_read_lock()
[all …]
/openbmc/linux/arch/powerpc/include/asm/
H A Dsimple_spinlock.h37 return lock.slock == 0; in arch_spin_value_unlocked()
101 splpar_spin_yield(lock); in spin_yield()
109 splpar_rw_yield(lock); in rw_yield()
122 splpar_spin_yield(lock); in arch_spin_lock()
133 lock->slock = 0; in arch_spin_unlock()
173 : "r" (&rw->lock), [eh] "n" (eh) in __arch_read_trylock()
253 : "r"(&rw->lock) in arch_read_unlock()
261 rw->lock = 0; in arch_write_unlock()
264 #define arch_spin_relax(lock) spin_yield(lock) argument
265 #define arch_read_relax(lock) rw_yield(lock) argument
[all …]
/openbmc/linux/tools/testing/selftests/bpf/progs/
H A Dlinked_list.c24 bpf_spin_lock(lock); in list_push_pop()
26 bpf_spin_unlock(lock); in list_push_pop()
33 bpf_spin_lock(lock); in list_push_pop()
35 bpf_spin_unlock(lock); in list_push_pop()
43 bpf_spin_lock(lock); in list_push_pop()
46 bpf_spin_unlock(lock); in list_push_pop()
49 bpf_spin_lock(lock); in list_push_pop()
60 bpf_spin_lock(lock); in list_push_pop()
64 bpf_spin_lock(lock); in list_push_pop()
76 bpf_spin_lock(lock); in list_push_pop()
[all …]

12345678910>>...151