arch_spin_is_locked
#define arch_spin_is_locked(x) ((x)->lock != 0)
#define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__)
static inline int arch_spin_is_locked(arch_spinlock_t *lock)
#define arch_spin_is_locked(x) ((x)->lock != 0)
static inline int arch_spin_is_locked(arch_spinlock_t *x)
#define arch_spin_is_locked(l) queued_spin_is_locked(l)
static inline int arch_spin_is_locked(arch_spinlock_t *lock)
static inline int arch_spin_is_locked(arch_spinlock_t *lp)
#define arch_spin_is_locked(x) ((x)->lock <= 0)
#define arch_spin_is_locked(x) ((x)->lock <= 0)
#define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0)
#define arch_spin_is_locked(l) queued_spin_is_locked(l)
#define arch_spin_is_locked(l) ticket_spin_is_locked(l)
#define arch_spin_is_locked(x) ((x)->slock == 0)
#define arch_spin_is_locked(lock) ((void)(lock), 0)
static inline bool arch_spin_is_locked(arch_spinlock_t *mutex)