1 #if defined(__i386__) || defined(__x86_64__) 2 #define barrier() asm volatile("" ::: "memory") 3 #define virt_mb() __sync_synchronize() 4 #define virt_rmb() barrier() 5 #define virt_wmb() barrier() 6 /* Atomic store should be enough, but gcc generates worse code in that case. */ 7 #define virt_store_mb(var, value) do { \ 8 typeof(var) virt_store_mb_value = (value); \ 9 __atomic_exchange(&(var), &virt_store_mb_value, &virt_store_mb_value, \ 10 __ATOMIC_SEQ_CST); \ 11 barrier(); \ 12 } while (0); 13 /* Weak barriers should be used. If not - it's a bug */ 14 # define mb() abort() 15 # define rmb() abort() 16 # define wmb() abort() 17 #else 18 #error Please fill in barrier macros 19 #endif 20 21