arch_try_cmpxchg
return arch_try_cmpxchg(&v->counter, old, new);
return arch_try_cmpxchg(&v->counter, old, new);
#define arch_try_cmpxchg64 arch_try_cmpxchg
#define arch_try_cmpxchg_local arch_try_cmpxchg
#define arch_try_cmpxchg64_local arch_try_cmpxchg
} while (!arch_try_cmpxchg((u32 *)ptr, &old, new));
} while (!arch_try_cmpxchg((u32 *)ptr, &old, new));
} while (!arch_try_cmpxchg((u32 *)ptr, &old, x & 0xffffffff));
} while (!arch_try_cmpxchg((u64 *)ptr, &old, x));
} while (!arch_try_cmpxchg(&get_lowcore()->preempt_count, &old, new));
if (!arch_try_cmpxchg(&rw->cnts, &old, 0x30000))
return (!(old & 0xffff0000) && arch_try_cmpxchg(&rw->cnts, &old, old + 1));
return !old && arch_try_cmpxchg(&rw->cnts, &old, 0x30000);
return likely(arch_try_cmpxchg(&lp->lock, &old, spinlock_lockval()));
return arch_try_cmpxchg((long *)crstep, &old.val, new.val);
if (arch_try_cmpxchg(&lp->lock, &old, new))
if (arch_try_cmpxchg(&lp->lock, &old, new))
if (arch_try_cmpxchg(&lp->lock, &old, new))
if (arch_try_cmpxchg(&lp->lock, &owner, cpu))
arch_try_cmpxchg(&rw->cnts, &old, old | 0x10000))
return arch_try_cmpxchg(&v->counter, old, new);
return arch_try_cmpxchg(&v->counter, old, new);
arch_try_cmpxchg((ptr), (po), (n)); \
#if defined(arch_try_cmpxchg)
#define raw_try_cmpxchg arch_try_cmpxchg
__atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
__atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
#elif defined(arch_try_cmpxchg)
#define raw_try_cmpxchg_acquire arch_try_cmpxchg
__atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
#elif defined(arch_try_cmpxchg)
#define raw_try_cmpxchg_release arch_try_cmpxchg
#elif defined(arch_try_cmpxchg)
#define raw_try_cmpxchg_relaxed arch_try_cmpxchg