wmb
#define wmb() __asm__ __volatile__("wmb": : :"memory")
#define wmb() asm volatile("dmb 2\n" : : : "memory")
#define wmb() __arm_heavy_mb(st)
#define wmb() barrier()
#define wmb() c_wsync()
static inline void wmb(void)
#define wmb wmb
#define wmb() mb()
#define wmb() barrier()
#define wmb() mb()
#define wmb() __asm__ __volatile__("":::"memory")
#define wmb() asm volatile(ALTERNATIVE("lock addl $0,-4(%%esp)", "sfence", \
#define wmb() alternative("lock addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
#define wmb() asm volatile("sfence" : : : "memory")
#define wmb() do { kcsan_wmb(); __wmb(); } while (0)
#define wmb() mb()
#define wmb() __asm__ __volatile__("wmb": : :"memory")
#define wmb() ((void(*)(void))0xffff0fa0)()
#define wmb() asm volatile("dmb ishst" ::: "memory")
#define wmb() mb()
#define wmb() __asm__ __volatile__ ("sync" : : : "memory")
#define wmb() RISCV_FENCE(ow, ow)
#define wmb() mb()
#define wmb() mb()
#define wmb() __asm__ __volatile__("":::"memory")
#define wmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory")
#define wmb() asm volatile("sfence" ::: "memory")
#define wmb() mb()
#define wmb() mb()
#define wmb() asm volatile("lwsync":::"memory")