BPF_REG_5
len += push_r64(BUF(buf, len), BPF_REG_5);
[BPF_REG_5] = {ARC_R_22, ARC_R_23},
[BPF_REG_5] = {STACK_OFFSET(BPF_R5_HI), STACK_OFFSET(BPF_R5_LO)},
const s8 *r5 = bpf2a32[BPF_REG_5];
[BPF_REG_5] = A64_R(4),
[BPF_REG_5] = LOONGARCH_GPR_A4,
[BPF_REG_5] = {MIPS_R_T5, MIPS_R_T4},
[BPF_REG_5] = MIPS_R_A4,
dst = regmap[BPF_REG_5];
[BPF_REG_5] = {HPPA_R(11), HPPA_R(12)},
reg = bpf_get_reg64_offset(regmap[BPF_REG_5], tmp, offset_sp, ctx);
LOAD_PARAM(HPPA_REG_ARG4, BPF_REG_5);
[BPF_REG_5] = HPPA_R(9),
emit_hppa_copy(regmap[BPF_REG_5], HPPA_REG_ARG4, ctx);
if (bpf_is_seen_register(ctx, bpf_to_ppc(BPF_REG_5))) {
EMIT(PPC_RAW_STW(bpf_to_ppc(BPF_REG_5) - 1, _R1, 8));
EMIT(PPC_RAW_STW(bpf_to_ppc(BPF_REG_5), _R1, 12));
ctx->b2p[BPF_REG_5] = _R22;
ctx->b2p[BPF_REG_5] = _R7;
const s8 *r5 = bpf2rv32[BPF_REG_5];
[BPF_REG_5] = {RV_REG_S4, RV_REG_S3},
[BPF_REG_5] = RV_REG_A4,
REG_SET_SEEN(BPF_REG_5);
[BPF_REG_5] = 6,
[BPF_REG_5] = O4,
[BPF_REG_5] = 0, /* R8 */
[BPF_REG_5] = offsetof(struct pt_regs, r8),
return (1 << reg) & (BIT(BPF_REG_5) |
[BPF_REG_5] = {STACK_OFFSET(40), STACK_OFFSET(44)},
const u8 *r5 = bpf2ia32[BPF_REG_5];
(1 << BPF_REG_5))
#define BPF_REG_ARG5 BPF_REG_5
#define BPF_R5 regs[BPF_REG_5]
__mark_reg_not_init(env, &callee->regs[BPF_REG_5]);
for (i = BPF_REG_1; i <= BPF_REG_5; i++)
__mark_reg_not_init(env, &callee->regs[BPF_REG_5]);
__mark_reg_not_init(env, &callee->regs[BPF_REG_5]);
__mark_reg_not_init(env, &callee->regs[BPF_REG_5]);
__mark_reg_not_init(env, &callee->regs[BPF_REG_5]);
__mark_reg_not_init(env, &callee->regs[BPF_REG_5]);
__mark_reg_not_init(env, &callee->regs[BPF_REG_5]);
struct bpf_reg_state *data_len_reg = ®s[BPF_REG_5];
BPF_REG_0, BPF_REG_1, BPF_REG_2, BPF_REG_3, BPF_REG_4, BPF_REG_5
node_offset_reg = BPF_REG_5;
insn_buf[0] = BPF_MOV64_IMM(BPF_REG_5, (__force __s32)GFP_ATOMIC);
insn_buf[0] = BPF_MOV64_IMM(BPF_REG_5, (__force __s32)GFP_KERNEL);
for (i = BPF_REG_1; i <= BPF_REG_5; i++) {
for (i = BPF_REG_1; i <= BPF_REG_5; i++)
for (i = BPF_REG_1; i <= BPF_REG_5; i++)
WARN_ON_ONCE(regno < BPF_REG_2 || regno > BPF_REG_5);
#define R5 BPF_REG_5
#define BPF_REG_ARG5 BPF_REG_5
BPF_MOV64_REG(BPF_REG_5, BPF_REG_2),
BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 14),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_LDX_MEM(BPF_B, BPF_REG_4, BPF_REG_5, 0),
BPF_LDX_MEM(BPF_B, BPF_REG_4, BPF_REG_5, 1),
BPF_LDX_MEM(BPF_B, BPF_REG_4, BPF_REG_5, 2),
BPF_LDX_MEM(BPF_B, BPF_REG_4, BPF_REG_5, 3),
BPF_LDX_MEM(BPF_H, BPF_REG_4, BPF_REG_5, 0),
BPF_LDX_MEM(BPF_H, BPF_REG_4, BPF_REG_5, 2),
BPF_LDX_MEM(BPF_W, BPF_REG_4, BPF_REG_5, 0),
BPF_MOV64_REG(BPF_REG_5, BPF_REG_2),
BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 14),
BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_LDX_MEM(BPF_W, BPF_REG_4, BPF_REG_5, 0),
BPF_MOV64_REG(BPF_REG_5, BPF_REG_2),
BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 14),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_LDX_MEM(BPF_W, BPF_REG_4, BPF_REG_5, 0),
BPF_MOV64_REG(BPF_REG_5, BPF_REG_2),
BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 14),
BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 4),
BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_LDX_MEM(BPF_W, BPF_REG_4, BPF_REG_5, 0),
BPF_MOV64_REG(BPF_REG_5, BPF_REG_2),
BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_LDX_MEM(BPF_W, BPF_REG_6, BPF_REG_5, 0),
BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_LDX_MEM(BPF_W, BPF_REG_6, BPF_REG_5, 0),
BPF_MOV64_REG(BPF_REG_5, BPF_REG_3),
BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_2),
BPF_ALU64_IMM(BPF_LSH, BPF_REG_5, 2),
BPF_ALU64_IMM(BPF_ADD, BPF_REG_5, 14),
BPF_JMP_IMM(BPF_JSGE, BPF_REG_5, 0, 1),
BPF_ALU64_REG(BPF_ADD, BPF_REG_6, BPF_REG_5),
BPF_MOV64_REG(BPF_REG_5, BPF_REG_2),
BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_6),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_LDX_MEM(BPF_W, BPF_REG_6, BPF_REG_5, 0),
BPF_MOV64_REG(BPF_REG_5, BPF_REG_2),
BPF_ALU64_REG(BPF_SUB, BPF_REG_5, BPF_REG_6),
BPF_ALU64_REG(BPF_ADD, BPF_REG_5, BPF_REG_7),
BPF_MOV64_REG(BPF_REG_4, BPF_REG_5),
BPF_LDX_MEM(BPF_W, BPF_REG_6, BPF_REG_5, 0),
: __imm_insn(ld_ind, BPF_LD_IND(BPF_W, BPF_REG_5, -0x200000))
BPF_MOV64_IMM(BPF_REG_5, 0), \
BPF_MOV64_REG(BPF_REG_5, BPF_REG_10),
BPF_ATOMIC_OP(BPF_DW, BPF_CMPXCHG, BPF_REG_10, BPF_REG_5, -8),
BPF_MOV64_REG(BPF_REG_5, BPF_REG_10),
BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, BPF_REG_10, BPF_REG_5, -8),
BPF_MOV32_REG(BPF_REG_5, BPF_REG_10),
BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, BPF_REG_10, BPF_REG_5, -8),
BPF_MOV32_REG(BPF_REG_5, BPF_REG_10),
BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, BPF_REG_10, BPF_REG_5, -8),
__ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 1, BPF_ADD | BPF_FETCH, 2, 3),
__ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_AND | BPF_FETCH, 0x011, 0x010),
__ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_OR | BPF_FETCH, 0x011, 0x011),
__ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_XOR | BPF_FETCH, 0x011, 0x001),
__ATOMIC_FETCH_OP_TEST(BPF_REG_4, BPF_REG_5, 0x010, BPF_XCHG, 0x011, 0x011),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_MOV64_IMM(BPF_REG_5, 1),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_MOV64_IMM(BPF_REG_5, 1),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_MOV64_IMM(BPF_REG_5, 1),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_MOV64_IMM(BPF_REG_5, 1),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_MOV64_IMM(BPF_REG_5, 1),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_MOV64_IMM(BPF_REG_5, 1),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_MOV64_IMM(BPF_REG_5, 1),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),
BPF_MOV64_IMM(BPF_REG_5, 8), \
BPF_LD_IMM64(BPF_REG_5, 0xeeff0d413122ULL),
BPF_ALU32_REG(BPF_MUL, BPF_REG_5, BPF_REG_1),
BPF_JMP_REG(BPF_JEQ, BPF_REG_5, BPF_REG_0, 2),
BPF_MOV64_IMM(BPF_REG_5, 0),
BPF_MOV64_REG(BPF_REG_0, BPF_REG_5),