Home
last modified time | relevance | path

Searched refs:arch_spin_unlock (Results 1 – 25 of 48) sorted by relevance

12

/openbmc/linux/arch/parisc/include/asm/
H A Dspinlock.h49 static inline void arch_spin_unlock(arch_spinlock_t *x) in arch_spin_unlock() function
96 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_trylock()
121 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_trylock()
146 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_unlock()
157 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_unlock()
/openbmc/linux/arch/arc/include/asm/
H A Dspinlock.h67 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
261 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
315 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_trylock()
340 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_trylock()
365 arch_spin_unlock(&(rw->lock_mutex)); in arch_read_unlock()
376 arch_spin_unlock(&(rw->lock_mutex)); in arch_write_unlock()
H A Dsmp.h117 arch_spin_unlock(&smp_atomic_ops_lock); \
/openbmc/linux/include/linux/
H A Dspinlock_up.h45 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
65 # define arch_spin_unlock(lock) do { barrier(); (void)(lock); } while (0) macro
/openbmc/linux/kernel/locking/
H A Dqrwlock.c56 arch_spin_unlock(&lock->wait_lock); in queued_read_lock_slowpath()
88 arch_spin_unlock(&lock->wait_lock); in queued_write_lock_slowpath()
/openbmc/linux/arch/arm/common/
H A Dmcpm_entry.c232 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_up()
268 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_down()
274 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_power_down()
335 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_suspend()
365 arch_spin_unlock(&mcpm_lock); in mcpm_cpu_powered_up()
/openbmc/linux/kernel/kcsan/
H A Dselftest.c166 KCSAN_CHECK_READ_BARRIER(arch_spin_unlock(&arch_spinlock)); in test_barrier()
195 KCSAN_CHECK_WRITE_BARRIER(arch_spin_unlock(&arch_spinlock)); in test_barrier()
227 KCSAN_CHECK_RW_BARRIER(arch_spin_unlock(&arch_spinlock)); in test_barrier()
/openbmc/linux/arch/x86/mm/
H A Dkmmio.c356 arch_spin_unlock(&kmmio_lock); in post_kmmio_handler()
467 arch_spin_unlock(&kmmio_lock); in register_kmmio_probe()
516 arch_spin_unlock(&kmmio_lock); in remove_kmmio_fault_pages()
559 arch_spin_unlock(&kmmio_lock); in unregister_kmmio_probe()
/openbmc/linux/kernel/trace/
H A Dtrace_stack.c282 arch_spin_unlock(&stack_trace_max_lock); in check_stack()
360 arch_spin_unlock(&stack_trace_max_lock); in stack_max_size_write()
410 arch_spin_unlock(&stack_trace_max_lock); in t_stop()
H A Dtrace_clock.c139 arch_spin_unlock(&trace_clock_struct.lock); in trace_clock_global()
/openbmc/linux/arch/powerpc/kvm/
H A Dbook3s_xive.c575 arch_spin_unlock(&sb->lock); in xive_vm_h_eoi()
583 arch_spin_unlock(&sb->lock); in xive_vm_h_eoi()
1365 arch_spin_unlock(&sb->lock); in kvmppc_xive_set_xive()
1387 arch_spin_unlock(&sb->lock); in kvmppc_xive_get_xive()
1426 arch_spin_unlock(&sb->lock); in kvmppc_xive_int_on()
1452 arch_spin_unlock(&sb->lock); in kvmppc_xive_int_off()
1643 arch_spin_unlock(&sb->lock); in kvmppc_xive_set_mapped()
1723 arch_spin_unlock(&sb->lock); in kvmppc_xive_clr_mapped()
2084 arch_spin_unlock(&sb->lock); in xive_pre_save_mask_irq()
2108 arch_spin_unlock(&sb->lock); in xive_pre_save_unmask_irq()
[all …]
H A Dbook3s_xics.c160 arch_spin_unlock(&ics->lock); in write_xive()
216 arch_spin_unlock(&ics->lock); in kvmppc_xics_get_xive()
473 arch_spin_unlock(&ics->lock); in icp_deliver_irq()
502 arch_spin_unlock(&ics->lock); in icp_deliver_irq()
509 arch_spin_unlock(&ics->lock); in icp_deliver_irq()
1009 arch_spin_unlock(&ics->lock); in xics_debug_show()
1217 arch_spin_unlock(&ics->lock); in xics_get_source()
1275 arch_spin_unlock(&ics->lock); in xics_set_source()
H A Dbook3s_xive_native.c272 arch_spin_unlock(&sb->lock); in xive_native_esb_fault()
411 arch_spin_unlock(&sb->lock); in kvmppc_xive_native_set_source()
459 arch_spin_unlock(&sb->lock); in kvmppc_xive_native_update_source_config()
537 arch_spin_unlock(&sb->lock); in kvmppc_xive_native_sync_source()
847 arch_spin_unlock(&sb->lock); in kvmppc_xive_reset()
929 arch_spin_unlock(&sb->lock); in kvmppc_xive_native_eq_sync()
1250 arch_spin_unlock(&sb->lock); in xive_native_debug_show()
/openbmc/linux/tools/include/linux/
H A Dspinlock.h30 static inline void arch_spin_unlock(arch_spinlock_t *mutex) in arch_spin_unlock() function
/openbmc/linux/arch/hexagon/include/asm/
H A Dspinlock.h130 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
/openbmc/linux/include/asm-generic/
H A Dspinlock.h63 static __always_inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
H A Dqspinlock.h148 #define arch_spin_unlock(l) queued_spin_unlock(l) macro
/openbmc/linux/arch/alpha/include/asm/
H A Dspinlock.h24 static inline void arch_spin_unlock(arch_spinlock_t * lock) in arch_spin_unlock() function
/openbmc/linux/arch/sh/include/asm/
H A Dspinlock-cas.h33 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
H A Dspinlock-llsc.h46 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
/openbmc/linux/arch/s390/lib/
H A Dspinlock.c286 arch_spin_unlock(&rw->wait); in arch_read_lock_wait()
309 arch_spin_unlock(&rw->wait); in arch_write_lock_wait()
/openbmc/linux/arch/x86/kernel/
H A Dtsc_sync.c286 arch_spin_unlock(&sync_lock); in check_tsc_warp()
316 arch_spin_unlock(&sync_lock); in check_tsc_warp()
/openbmc/linux/arch/sparc/include/asm/
H A Dspinlock_32.h48 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() function
/openbmc/linux/arch/powerpc/include/asm/
H A Dqspinlock.h166 #define arch_spin_unlock(l) queued_spin_unlock(l) macro
/openbmc/linux/arch/s390/include/asm/
H A Dspinlock.h77 static inline void arch_spin_unlock(arch_spinlock_t *lp) in arch_spin_unlock() function

12