rmb
#define rmb() __asm__ __volatile__("mb": : :"memory")
#define rmb() asm volatile("dmb 1\n" : : : "memory")
#define rmb() dsb()
#define rmb() barrier()
#define rmb() c_rsync()
static inline void rmb(void)
#define rmb rmb
#define rmb() mb()
#define rmb() barrier()
#define rmb() mb()
#define rmb() __asm__ __volatile__("":::"memory")
#define rmb() asm volatile(ALTERNATIVE("lock addl $0,-4(%%esp)", "lfence", \
#define rmb() alternative("lock addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
#define rmb() asm volatile("lfence" : : : "memory")
#define rmb() do { kcsan_rmb(); __rmb(); } while (0)
#define rmb() mb()
#define rmb() __asm__ __volatile__("mb": : :"memory")
#define rmb() ((void(*)(void))0xffff0fa0)()
#define rmb() asm volatile("dmb ishld" ::: "memory")
#define rmb() mb()
#define rmb() __asm__ __volatile__ ("sync" : : : "memory")
#define rmb() RISCV_FENCE(ir, ir)
#define rmb() mb()
#define rmb() mb()
#define rmb() __asm__ __volatile__("":::"memory")
#define rmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory")
#define rmb() asm volatile("lfence" ::: "memory")
#define rmb() barrier()
#define rmb() mb()
#define rmb() asm volatile("lfence":::"memory")
#define rmb() asm volatile("lwsync":::"memory")