qspinlock
static inline bool virt_spin_lock(struct qspinlock *lock)
static inline void queued_spin_unlock(struct qspinlock *lock)
static __always_inline int __queued_spin_trylock_steal(struct qspinlock *lock)
static __always_inline int queued_spin_trylock(struct qspinlock *lock)
void queued_spin_lock_slowpath(struct qspinlock *lock);
static __always_inline void queued_spin_lock(struct qspinlock *lock)
static inline void queued_spin_unlock(struct qspinlock *lock)
static __always_inline int queued_spin_is_locked(struct qspinlock *lock)
static __always_inline int queued_spin_value_unlocked(struct qspinlock lock)
static __always_inline int queued_spin_is_contended(struct qspinlock *lock)
static __always_inline int __queued_spin_trylock_nosteal(struct qspinlock *lock)
static __always_inline u32 trylock_clean_tail(struct qspinlock *lock, u32 tail)
static __always_inline u32 publish_tail_cpu(struct qspinlock *lock, u32 tail)
struct qspinlock *lock;
static __always_inline u32 set_mustq(struct qspinlock *lock)
static __always_inline u32 clear_mustq(struct qspinlock *lock)
static __always_inline bool try_set_sleepy(struct qspinlock *lock, u32 old)
static __always_inline void seen_sleepy_owner(struct qspinlock *lock, u32 val)
static struct qnode *get_tail_qnode(struct qspinlock *lock, int prev_cpu)
static __always_inline bool __yield_to_locked_owner(struct qspinlock *lock, u32 val, bool paravirt, bool mustq)
static __always_inline bool yield_to_locked_owner(struct qspinlock *lock, u32 val, bool paravirt)
static __always_inline bool yield_head_to_locked_owner(struct qspinlock *lock, u32 val, bool paravirt)
static __always_inline bool yield_to_prev(struct qspinlock *lock, struct qnode *node, int prev_cpu, bool paravirt)
static __always_inline bool try_to_steal_lock(struct qspinlock *lock, bool paravirt)
static __always_inline void queued_spin_lock_mcs_queue(struct qspinlock *lock, bool paravirt)
void __lockfunc queued_spin_lock_slowpath(struct qspinlock *lock)
#include <asm/qspinlock.h>
struct qspinlock;
static inline bool virt_spin_lock(struct qspinlock *lock)
void (*queued_spin_lock_slowpath)(struct qspinlock *lock, u32 val);
extern void native_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
extern void __pv_queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
extern void __raw_callee_save___pv_queued_spin_unlock(struct qspinlock *lock);
static __always_inline void pv_queued_spin_lock_slowpath(struct qspinlock *lock,
static __always_inline void pv_queued_spin_unlock(struct qspinlock *lock)
static inline void native_queued_spin_unlock(struct qspinlock *lock)
static inline void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val)
static inline void queued_spin_unlock(struct qspinlock *lock)
void __raw_callee_save___native_queued_spin_unlock(struct qspinlock *lock);
__visible void __native_queued_spin_unlock(struct qspinlock *lock);
static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock)
extern void __lockfunc __pv_queued_spin_unlock(struct qspinlock *lock);
void __lockfunc __pv_queued_spin_unlock_slowpath(struct qspinlock *lock, u8 locked);
typedef struct qspinlock rqspinlock_t;
__visible void __native_queued_spin_unlock(struct qspinlock *lock)
extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
static __always_inline void queued_spin_lock(struct qspinlock *lock)
static __always_inline void queued_spin_unlock(struct qspinlock *lock)
static __always_inline bool virt_spin_lock(struct qspinlock *lock)
static __always_inline int queued_spin_is_locked(struct qspinlock *lock)
static __always_inline int queued_spin_value_unlocked(struct qspinlock lock)
static __always_inline int queued_spin_is_contended(struct qspinlock *lock)
static __always_inline int queued_spin_trylock(struct qspinlock *lock)
struct qspinlock;
typedef struct qspinlock rqspinlock_t;
static __always_inline bool try_cmpxchg_tail(struct qspinlock *lock, u32 tail, u32 new_tail)
void __lockfunc queued_spin_lock_slowpath(struct qspinlock *lock, u32 val)
static __always_inline void __pv_kick_node(struct qspinlock *lock,
static __always_inline u32 __pv_wait_head_or_lock(struct qspinlock *lock,
static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail)
static __always_inline void clear_pending(struct qspinlock *lock)
static __always_inline void clear_pending_set_locked(struct qspinlock *lock)
static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail)
static __always_inline u32 queued_fetch_set_pending_acquire(struct qspinlock *lock)
static __always_inline void set_locked(struct qspinlock *lock)
static __always_inline void clear_pending(struct qspinlock *lock)
static __always_inline void clear_pending_set_locked(struct qspinlock *lock)
static __always_inline void set_pending(struct qspinlock *lock)
static __always_inline bool trylock_clear_pending(struct qspinlock *lock)
static __always_inline void set_pending(struct qspinlock *lock)
static __always_inline bool trylock_clear_pending(struct qspinlock *lock)
struct qspinlock *lock;
static struct qspinlock **pv_hash(struct qspinlock *lock, struct pv_node *node)
struct qspinlock *old = NULL;
static struct pv_node *pv_unhash(struct qspinlock *lock)
static void pv_kick_node(struct qspinlock *lock, struct mcs_spinlock *node)
pv_wait_head_or_lock(struct qspinlock *lock, struct mcs_spinlock *node)
struct qspinlock **lp = NULL;
lp = (struct qspinlock **)1;
__pv_queued_spin_unlock_slowpath(struct qspinlock *lock, u8 locked)
__visible __lockfunc void __pv_queued_spin_unlock(struct qspinlock *lock)
static inline bool pv_hybrid_queued_unfair_trylock(struct qspinlock *lock)