#ifndef _COMPAT_FREEBSD_AMD64_MACHINE_ATOMIC_H_
#define _COMPAT_FREEBSD_AMD64_MACHINE_ATOMIC_H_
static __inline u_int
atomic_load_acq_short(volatile u_short *p)
{
u_short res;
res = *p;
__asm volatile("" : : : "memory");
return (res);
}
static __inline u_int
atomic_load_acq_int(volatile u_int *p)
{
u_int res;
res = *p;
__asm volatile("" : : : "memory");
return (res);
}
static __inline u_long
atomic_load_acq_long(volatile u_long *p)
{
u_long res;
res = *p;
__asm volatile("" : : : "memory");
return (res);
}
static __inline void
atomic_store_rel_int(volatile u_int *p, u_int v)
{
__asm volatile("" : : : "memory");
*p = v;
}
static __inline void
atomic_store_rel_long(volatile u_long *p, u_long v)
{
__asm volatile("" : : : "memory");
*p = v;
}
static __inline int
atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
{
u_char res;
__asm __volatile(
" lock ; "
" cmpxchgl %3,%1 ; "
" sete %0 ; "
"# atomic_cmpset_int"
: "=q" (res),
"+m" (*dst),
"+a" (expect)
: "r" (src)
: "memory", "cc");
return (res);
}
static __inline int
atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
{
u_char res;
__asm __volatile(
" lock ; "
" cmpxchgq %3,%1 ; "
" sete %0 ; "
"# atomic_cmpset_long"
: "=q" (res),
"+m" (*dst),
"+a" (expect)
: "r" (src)
: "memory", "cc");
return (res);
}
static __inline int
atomic_testandset_int(volatile u_int *p, u_int v)
{
u_char res;
__asm __volatile(
" lock ; "
" btsl %2,%1 ; "
" setc %0 ; "
"# atomic_testandset_int"
: "=q" (res),
"+m" (*p)
: "Ir" (v & 0x1f)
: "cc");
return (res);
}
static __inline u_int
atomic_fetchadd_int(volatile u_int *p, u_int v)
{
__asm __volatile(
" lock ; "
" xaddl %0, %1 ; "
"# atomic_fetchadd_int"
: "+r" (v),
"=m" (*p)
: "m" (*p)
: "cc");
return (v);
}
static __inline void
atomic_set_int(volatile u_int *p, u_int v)
{
__asm volatile(
"lock ; " "orl %1,%0"
: "=m" (*p)
: "ir" (v), "m" (*p)
: "cc");
}
static __inline void
atomic_clear_int(volatile u_int *p, u_int v)
{
__asm volatile(
"lock ; " "andl %1,%0"
: "=m" (*p)
: "ir" (~v), "m" (*p)
: "cc");
}
static __inline void
atomic_subtract_int(volatile u_int *p, u_int v)
{
__asm volatile(
"lock ; " "subl %1,%0"
: "=m" (*p)
: "ir" (v), "m" (*p)
: "cc");
}
static __inline void
atomic_set_long(volatile u_long *p, u_long v)
{
__asm volatile(
"lock ; " "orq %1,%0"
: "+m" (*p)
: "ir" (v)
: "cc");
}
static __inline void
atomic_clear_long(volatile u_long *p, u_long v)
{
__asm volatile("lock ; " "andq %1,%0"
: "+m" (*p)
: "ir" (~v)
: "cc");
}
static __inline u_int
atomic_swap_int(volatile u_int *p, u_int v)
{
__asm __volatile(
" xchgl %1,%0 ; "
"# atomic_swap_int"
: "+r" (v),
"+m" (*p));
return (v);
}
static __inline u_long
atomic_swap_long(volatile u_long *p, u_long v)
{
__asm __volatile(
" xchgq %1,%0 ; "
"# atomic_swap_long"
: "+r" (v),
"+m" (*p));
return (v);
}
#define atomic_store_short(p, v) \
(*(volatile u_short *)(p) = (u_short)(v))
#define atomic_store_int(p, v) \
(*(volatile u_int *)(p) = (u_int)(v))
#define atomic_load_32(p) (*(volatile uint32_t *)(p))
#define atomic_load_64(p) (*(volatile uint64_t *)(p))
#define atomic_readandclear_int(p) atomic_swap_int(p, 0)
#define atomic_readandclear_long(p) atomic_swap_long(p, 0)
#define atomic_load_acq_32 atomic_load_acq_int
#define atomic_store_rel_32 atomic_store_rel_int
#define atomic_cmpset_32 atomic_cmpset_int
#define atomic_cmpset_64 atomic_cmpset_long
#define atomic_readandclear_64 atomic_readandclear_long
#define atomic_cmpset_ptr atomic_cmpset_long
#include_next <sys/atomic.h>
static __inline void
atomic_thread_fence_rel(void)
{
__asm __volatile(" " : : : "memory");
}
static __inline void
atomic_thread_fence_seq_cst(void)
{
__asm __volatile("lock; addl $0,-8(%%rsp)" : : : "memory", "cc");
}
#define mb() membar_enter()
#define rmb() membar_consumer()
#define wmb() membar_producer()
#endif