#ifndef _ASM_X86_NOSPEC_BRANCH_H_
#define _ASM_X86_NOSPEC_BRANCH_H_
#include <linux/static_key.h>
#include <linux/objtool.h>
#include <linux/linkage.h>
#include <asm/alternative.h>
#include <asm/cpufeatures.h>
#include <asm/msr-index.h>
#include <asm/unwind_hints.h>
#include <asm/percpu.h>
#define RET_DEPTH_SHIFT 5
#define RSB_RET_STUFF_LOOPS 16
#define RET_DEPTH_INIT 0x8000000000000000ULL
#define RET_DEPTH_INIT_FROM_CALL 0xfc00000000000000ULL
#define RET_DEPTH_CREDIT 0xffffffffffffffffULL
#ifdef CONFIG_CALL_THUNKS_DEBUG
# define CALL_THUNKS_DEBUG_INC_CALLS \
incq PER_CPU_VAR(__x86_call_count);
# define CALL_THUNKS_DEBUG_INC_RETS \
incq PER_CPU_VAR(__x86_ret_count);
# define CALL_THUNKS_DEBUG_INC_STUFFS \
incq PER_CPU_VAR(__x86_stuffs_count);
# define CALL_THUNKS_DEBUG_INC_CTXSW \
incq PER_CPU_VAR(__x86_ctxsw_count);
#else
# define CALL_THUNKS_DEBUG_INC_CALLS
# define CALL_THUNKS_DEBUG_INC_RETS
# define CALL_THUNKS_DEBUG_INC_STUFFS
# define CALL_THUNKS_DEBUG_INC_CTXSW
#endif
#if defined(CONFIG_MITIGATION_CALL_DEPTH_TRACKING) && !defined(COMPILE_OFFSETS)
#include <asm/asm-offsets.h>
#define CREDIT_CALL_DEPTH \
movq $-1, PER_CPU_VAR(__x86_call_depth);
#define RESET_CALL_DEPTH \
xor %eax, %eax; \
bts $63, %rax; \
movq %rax, PER_CPU_VAR(__x86_call_depth);
#define RESET_CALL_DEPTH_FROM_CALL \
movb $0xfc, %al; \
shl $56, %rax; \
movq %rax, PER_CPU_VAR(__x86_call_depth); \
CALL_THUNKS_DEBUG_INC_CALLS
#define INCREMENT_CALL_DEPTH \
sarq $5, PER_CPU_VAR(__x86_call_depth); \
CALL_THUNKS_DEBUG_INC_CALLS
#else
#define CREDIT_CALL_DEPTH
#define RESET_CALL_DEPTH
#define RESET_CALL_DEPTH_FROM_CALL
#define INCREMENT_CALL_DEPTH
#endif
#define RETPOLINE_THUNK_SIZE 32
#define RSB_CLEAR_LOOPS 32
#define __FILL_RETURN_SLOT \
ANNOTATE_INTRA_FUNCTION_CALL; \
call 772f; \
int3; \
772:
#ifdef CONFIG_X86_64
#define __FILL_RETURN_BUFFER(reg, nr) \
mov $(nr/2), reg; \
771: \
__FILL_RETURN_SLOT \
__FILL_RETURN_SLOT \
add $(BITS_PER_LONG/8) * 2, %_ASM_SP; \
dec reg; \
jnz 771b; \
\
lfence; \
CREDIT_CALL_DEPTH \
CALL_THUNKS_DEBUG_INC_CTXSW
#else
#define __FILL_RETURN_BUFFER(reg, nr) \
.rept nr; \
__FILL_RETURN_SLOT; \
.endr; \
add $(BITS_PER_LONG/8) * nr, %_ASM_SP;
#endif
#define __FILL_ONE_RETURN \
__FILL_RETURN_SLOT \
add $(BITS_PER_LONG/8), %_ASM_SP; \
lfence;
#ifdef __ASSEMBLER__
#define ANNOTATE_UNRET_SAFE ANNOTATE_RETPOLINE_SAFE
.macro VALIDATE_UNRET_END
#if defined(CONFIG_NOINSTR_VALIDATION) && \
(defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO))
ANNOTATE_RETPOLINE_SAFE
nop
#endif
.endm
.macro __CS_PREFIX reg:req
.irp rs,r8,r9,r10,r11,r12,r13,r14,r15
.ifc \reg,\rs
.byte 0x2e
.endif
.endr
.endm
.macro JMP_NOSPEC reg:req
#ifdef CONFIG_MITIGATION_RETPOLINE
__CS_PREFIX \reg
jmp __x86_indirect_thunk_\reg
#else
jmp *%\reg
int3
#endif
.endm
.macro CALL_NOSPEC reg:req
#ifdef CONFIG_MITIGATION_RETPOLINE
__CS_PREFIX \reg
call __x86_indirect_thunk_\reg
#else
call *%\reg
#endif
.endm
.macro FILL_RETURN_BUFFER reg:req nr:req ftr:req ftr2=ALT_NOT(X86_FEATURE_ALWAYS)
ALTERNATIVE_2 "jmp .Lskip_rsb_\@", \
__stringify(__FILL_RETURN_BUFFER(\reg,\nr)), \ftr, \
__stringify(nop;nop;__FILL_ONE_RETURN), \ftr2
.Lskip_rsb_\@:
.endm
.macro CALL_UNTRAIN_RET
#if defined(CONFIG_MITIGATION_UNRET_ENTRY) || defined(CONFIG_MITIGATION_SRSO)
ALTERNATIVE_2 "", "call entry_untrain_ret", X86_FEATURE_UNRET, \
"call srso_alias_untrain_ret", X86_FEATURE_SRSO_ALIAS
#endif
.endm
.macro __UNTRAIN_RET ibpb_feature, call_depth_insns
#if defined(CONFIG_MITIGATION_RETHUNK) || defined(CONFIG_MITIGATION_IBPB_ENTRY)
VALIDATE_UNRET_END
CALL_UNTRAIN_RET
ALTERNATIVE_2 "", \
"call write_ibpb", \ibpb_feature, \
__stringify(\call_depth_insns), X86_FEATURE_CALL_DEPTH
#endif
.endm
#define UNTRAIN_RET \
__UNTRAIN_RET X86_FEATURE_ENTRY_IBPB, __stringify(RESET_CALL_DEPTH)
#define UNTRAIN_RET_VM \
__UNTRAIN_RET X86_FEATURE_IBPB_ON_VMEXIT, __stringify(RESET_CALL_DEPTH)
#define UNTRAIN_RET_FROM_CALL \
__UNTRAIN_RET X86_FEATURE_ENTRY_IBPB, __stringify(RESET_CALL_DEPTH_FROM_CALL)
.macro CALL_DEPTH_ACCOUNT
#ifdef CONFIG_MITIGATION_CALL_DEPTH_TRACKING
ALTERNATIVE "", \
__stringify(INCREMENT_CALL_DEPTH), X86_FEATURE_CALL_DEPTH
#endif
.endm
#ifdef CONFIG_X86_64
#define VERW verw x86_verw_sel(%rip)
#else
#define VERW verw %cs:x86_verw_sel
#endif
#define __CLEAR_CPU_BUFFERS __stringify(VERW)
#define CLEAR_CPU_BUFFERS \
ALTERNATIVE "", __CLEAR_CPU_BUFFERS, X86_FEATURE_CLEAR_CPU_BUF
#ifdef CONFIG_X86_64
.macro CLEAR_BRANCH_HISTORY
ALTERNATIVE "", "call clear_bhb_loop", X86_FEATURE_CLEAR_BHB_LOOP
.endm
.macro CLEAR_BRANCH_HISTORY_VMEXIT
ALTERNATIVE "", "call clear_bhb_loop", X86_FEATURE_CLEAR_BHB_VMEXIT
.endm
#else
#define CLEAR_BRANCH_HISTORY
#define CLEAR_BRANCH_HISTORY_VMEXIT
#endif
#else
#define ITS_THUNK_SIZE 64
typedef u8 retpoline_thunk_t[RETPOLINE_THUNK_SIZE];
typedef u8 its_thunk_t[ITS_THUNK_SIZE];
extern retpoline_thunk_t __x86_indirect_thunk_array[];
extern retpoline_thunk_t __x86_indirect_call_thunk_array[];
extern retpoline_thunk_t __x86_indirect_jump_thunk_array[];
extern its_thunk_t __x86_indirect_its_thunk_array[];
#ifdef CONFIG_MITIGATION_RETHUNK
extern void __x86_return_thunk(void);
#else
static inline void __x86_return_thunk(void) {}
#endif
#ifdef CONFIG_MITIGATION_UNRET_ENTRY
extern void retbleed_return_thunk(void);
#else
static inline void retbleed_return_thunk(void) {}
#endif
extern void srso_alias_untrain_ret(void);
#ifdef CONFIG_MITIGATION_SRSO
extern void srso_return_thunk(void);
extern void srso_alias_return_thunk(void);
#else
static inline void srso_return_thunk(void) {}
static inline void srso_alias_return_thunk(void) {}
#endif
#ifdef CONFIG_MITIGATION_ITS
extern void its_return_thunk(void);
#else
static inline void its_return_thunk(void) {}
#endif
extern void retbleed_return_thunk(void);
extern void srso_return_thunk(void);
extern void srso_alias_return_thunk(void);
extern void entry_untrain_ret(void);
extern void write_ibpb(void);
#ifdef CONFIG_X86_64
extern void clear_bhb_loop(void);
#endif
extern void (*x86_return_thunk)(void);
extern void __warn_thunk(void);
#ifdef CONFIG_MITIGATION_CALL_DEPTH_TRACKING
extern void call_depth_return_thunk(void);
#define CALL_DEPTH_ACCOUNT \
ALTERNATIVE("", \
__stringify(INCREMENT_CALL_DEPTH), \
X86_FEATURE_CALL_DEPTH)
DECLARE_PER_CPU_CACHE_HOT(u64, __x86_call_depth);
#ifdef CONFIG_CALL_THUNKS_DEBUG
DECLARE_PER_CPU(u64, __x86_call_count);
DECLARE_PER_CPU(u64, __x86_ret_count);
DECLARE_PER_CPU(u64, __x86_stuffs_count);
DECLARE_PER_CPU(u64, __x86_ctxsw_count);
#endif
#else
static inline void call_depth_return_thunk(void) {}
#define CALL_DEPTH_ACCOUNT ""
#endif
#ifdef CONFIG_MITIGATION_RETPOLINE
#define GEN(reg) \
extern retpoline_thunk_t __x86_indirect_thunk_ ## reg;
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN(reg) \
extern retpoline_thunk_t __x86_indirect_call_thunk_ ## reg;
#include <asm/GEN-for-each-reg.h>
#undef GEN
#define GEN(reg) \
extern retpoline_thunk_t __x86_indirect_jump_thunk_ ## reg;
#include <asm/GEN-for-each-reg.h>
#undef GEN
#ifdef CONFIG_X86_64
#define __CS_PREFIX(reg) \
".irp rs,r8,r9,r10,r11,r12,r13,r14,r15\n" \
".ifc \\rs," reg "\n" \
".byte 0x2e\n" \
".endif\n" \
".endr\n"
#define CALL_NOSPEC __CS_PREFIX("%V[thunk_target]") \
"call __x86_indirect_thunk_%V[thunk_target]\n"
# define THUNK_TARGET(addr) [thunk_target] "r" (addr)
#else
# define CALL_NOSPEC \
ALTERNATIVE_2( \
ANNOTATE_RETPOLINE_SAFE "\n" \
"call *%[thunk_target]\n", \
" jmp 904f;\n" \
" .align 16\n" \
"901: call 903f;\n" \
"902: pause;\n" \
" lfence;\n" \
" jmp 902b;\n" \
" .align 16\n" \
"903: lea 4(%%esp), %%esp;\n" \
" pushl %[thunk_target];\n" \
" ret;\n" \
" .align 16\n" \
"904: call 901b;\n", \
X86_FEATURE_RETPOLINE, \
"lfence;\n" \
ANNOTATE_RETPOLINE_SAFE "\n" \
"call *%[thunk_target]\n", \
X86_FEATURE_RETPOLINE_LFENCE)
# define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
#endif
#else
# define CALL_NOSPEC "call *%[thunk_target]\n"
# define THUNK_TARGET(addr) [thunk_target] "rm" (addr)
#endif
enum spectre_v2_mitigation {
SPECTRE_V2_NONE,
SPECTRE_V2_RETPOLINE,
SPECTRE_V2_LFENCE,
SPECTRE_V2_EIBRS,
SPECTRE_V2_EIBRS_RETPOLINE,
SPECTRE_V2_EIBRS_LFENCE,
SPECTRE_V2_IBRS,
};
enum spectre_v2_user_mitigation {
SPECTRE_V2_USER_NONE,
SPECTRE_V2_USER_STRICT,
SPECTRE_V2_USER_STRICT_PREFERRED,
SPECTRE_V2_USER_PRCTL,
SPECTRE_V2_USER_SECCOMP,
};
enum ssb_mitigation {
SPEC_STORE_BYPASS_NONE,
SPEC_STORE_BYPASS_AUTO,
SPEC_STORE_BYPASS_DISABLE,
SPEC_STORE_BYPASS_PRCTL,
SPEC_STORE_BYPASS_SECCOMP,
};
static __always_inline
void alternative_msr_write(unsigned int msr, u64 val, unsigned int feature)
{
asm volatile(ALTERNATIVE("", "wrmsr", %c[feature])
: : "c" (msr),
"a" ((u32)val),
"d" ((u32)(val >> 32)),
[feature] "i" (feature)
: "memory");
}
DECLARE_PER_CPU(bool, x86_ibpb_exit_to_user);
static inline void indirect_branch_prediction_barrier(void)
{
asm_inline volatile(ALTERNATIVE("", "call write_ibpb", X86_FEATURE_IBPB)
: ASM_CALL_CONSTRAINT
:: "rax", "rcx", "rdx", "memory");
}
extern u64 x86_spec_ctrl_base;
DECLARE_PER_CPU(u64, x86_spec_ctrl_current);
extern void update_spec_ctrl_cond(u64 val);
extern u64 spec_ctrl_current(void);
#define firmware_restrict_branch_speculation_start() \
do { \
preempt_disable(); \
alternative_msr_write(MSR_IA32_SPEC_CTRL, \
spec_ctrl_current() | SPEC_CTRL_IBRS, \
X86_FEATURE_USE_IBRS_FW); \
alternative_msr_write(MSR_IA32_PRED_CMD, PRED_CMD_IBPB, \
X86_FEATURE_USE_IBPB_FW); \
} while (0)
#define firmware_restrict_branch_speculation_end() \
do { \
alternative_msr_write(MSR_IA32_SPEC_CTRL, \
spec_ctrl_current(), \
X86_FEATURE_USE_IBRS_FW); \
preempt_enable(); \
} while (0)
DECLARE_STATIC_KEY_FALSE(switch_to_cond_stibp);
DECLARE_STATIC_KEY_FALSE(switch_mm_cond_ibpb);
DECLARE_STATIC_KEY_FALSE(switch_mm_always_ibpb);
DECLARE_STATIC_KEY_FALSE(switch_vcpu_ibpb);
DECLARE_STATIC_KEY_FALSE(cpu_buf_idle_clear);
DECLARE_STATIC_KEY_FALSE(switch_mm_cond_l1d_flush);
extern u16 x86_verw_sel;
#include <asm/segment.h>
static __always_inline void x86_clear_cpu_buffers(void)
{
static const u16 ds = __KERNEL_DS;
asm volatile("verw %[ds]" : : [ds] "m" (ds) : "cc");
}
static __always_inline void x86_idle_clear_cpu_buffers(void)
{
if (static_branch_likely(&cpu_buf_idle_clear))
x86_clear_cpu_buffers();
}
#endif
#endif