R1
#define _ASM_MACRO_1R(OP, R1, ENC) \
".macro " #OP " " #R1 "\n\t" \
"parse_r __" #R1 ", \\" #R1 "\n\t" \
#define _ASM_MACRO_1R1I(OP, R1, I2, ENC) \
".macro " #OP " " #R1 ", " #I2 "\n\t" \
"parse_r __" #R1 ", \\" #R1 "\n\t" \
#define _ASM_MACRO_2R(OP, R1, R2, ENC) \
".macro " #OP " " #R1 ", " #R2 "\n\t" \
"parse_r __" #R1 ", \\" #R1 "\n\t" \
#define _ASM_MACRO_3R(OP, R1, R2, R3, ENC) \
".macro " #OP " " #R1 ", " #R2 ", " #R3 "\n\t" \
"parse_r __" #R1 ", \\" #R1 "\n\t" \
#define _ASM_MACRO_2R_1S(OP, R1, R2, SEL3, ENC) \
".macro " #OP " " #R1 ", " #R2 ", " #SEL3 " = 0\n\t" \
"parse_r __" #R1 ", \\" #R1 "\n\t" \
unsigned long newbase = R1(regs->iir)?regs->gr[R1(regs->iir)]:0;
if (ret == 0 && modify && R1(regs->iir))
regs->gr[R1(regs->iir)] = newbase;
#define emit_cmp(R1, R2) \
*prog++ = (SUBCC | RS1(R1) | RS2(R2) | RD(G0))
#define emit_cmpi(R1, IMM) \
*prog++ = (SUBCC | IMMED | RS1(R1) | S13(IMM) | RD(G0));
#define emit_btst(R1, R2) \
*prog++ = (ANDCC | RS1(R1) | RS2(R2) | RD(G0))
#define emit_btsti(R1, IMM) \
*prog++ = (ANDCC | IMMED | RS1(R1) | S13(IMM) | RD(G0));
#define emit_sub(R1, R2, R3) \
*prog++ = (SUB | RS1(R1) | RS2(R2) | RD(R3))
#define emit_subi(R1, IMM, R3) \
*prog++ = (SUB | IMMED | RS1(R1) | S13(IMM) | RD(R3))
#define emit_add(R1, R2, R3) \
*prog++ = (ADD | RS1(R1) | RS2(R2) | RD(R3))
#define emit_addi(R1, IMM, R3) \
*prog++ = (ADD | IMMED | RS1(R1) | S13(IMM) | RD(R3))
#define emit_and(R1, R2, R3) \
*prog++ = (AND | RS1(R1) | RS2(R2) | RD(R3))
#define emit_andi(R1, IMM, R3) \
*prog++ = (AND | IMMED | RS1(R1) | S13(IMM) | RD(R3))
#define emit_cmp(R1, R2, CTX) \
emit(SUBCC | RS1(R1) | RS2(R2) | RD(G0), CTX)
#define emit_cmpi(R1, IMM, CTX) \
emit(SUBCC | IMMED | RS1(R1) | S13(IMM) | RD(G0), CTX)
#define emit_btst(R1, R2, CTX) \
emit(ANDCC | RS1(R1) | RS2(R2) | RD(G0), CTX)
#define emit_btsti(R1, IMM, CTX) \
emit(ANDCC | IMMED | RS1(R1) | S13(IMM) | RD(G0), CTX)
state->if_agc_cfg.R1 = (u16) (ulIfAgcR1);
state->rf_agc_cfg.R1 = (u16) (ulRfAgcR1);
u16 R1;
u32 R1 = state->if_agc_cfg.R1;
if (R2 == 0 && (R1 == 0 || R3 == 0))
Vmax = (3300 * R2) / (R1 + R2);
Vmin = (3300 * Rpar) / (R1 + Rpar);
wm8739_write(sd, R1, (vol_r & 0x1f) | mute);
cl(scc, R1, TxINT_ENAB); /* force an ABORT, but don't */
wr(scc,R1,0); /* disable interrupts */
seq_printf(seq, "\tR %2.2x %2.2x XX ", InReg(scc->ctrl,R0), InReg(scc->ctrl,R1));
status = InReg(scc->ctrl,R1); /* read receiver status */
wr(scc,R1,0); /* no W/REQ operation */
or(scc,R1,INT_ALL_Rx|TxINT_ENAB|EXT_INT_ENAB); /* enable interrupts */
or(scc, R1, TxINT_ENAB); /* t_maxkeyup may have reset these */
s32 R1, R2, R3;
R1 = (s32) le32_to_cpu(il->card_alive_init.therm_r1[1]);
R1 = (s32) le32_to_cpu(il->card_alive_init.therm_r1[0]);
D_TEMP("Calib values R[1-3]: %d %d %d R4: %d\n", R1, R2, R3, vt);
if (R3 == R1) {
temperature /= (R3 - R1);
ASPEED_PINCTRL_PIN(R1),
ASPEED_SB_PINCONF(PIN_CONFIG_BIAS_PULL_DOWN, L3, R1, SCU8C, 26),
ASPEED_SB_PINCONF(PIN_CONFIG_BIAS_DISABLE, L3, R1, SCU8C, 26),
SIG_EXPR_LIST_DECL_SINGLE(R1, SDA8, I2C8, I2C8_DESC);
PIN_DECL_1(R1, GPIOK7, SDA8);
FUNC_GROUP_DECL(I2C8, P2, R1);
if (evt[i][R1] & SLG51000_STA_ILIM_FLAG_MASK)
if (!(evt[i][R1] & SLG51000_STA_ILIM_FLAG_MASK) &&
(evt[i][R1] & SLG51000_STA_VOUT_OK_FLAG_MASK)) {
if (evt[SLG51000_SCTL_EVT][R1] &
u32 R1;
up->curregs[R1] = EXT_INT_ENAB | INT_ALL_Rx | TxINT_ENAB;
regval = read_zsreg(channel, R1);
unsigned char stat = read_zsreg(channel, R1);
write_zsreg(channel, R1,
regs[R1] & ~(RxINT_MASK | TxINT_ENAB | EXT_INT_ENAB));
write_zsreg(channel, R1, regs[R1]);
r1 = read_zsreg(channel, R1);
up->curregs[R1] &= ~RxINT_MASK;
unsigned char stat = read_zsreg(channel, R1);
up->curregs[R1] |= EXT_INT_ENAB | INT_ALL_Rx | TxINT_ENAB;
up->curregs[R1] &= ~(EXT_INT_ENAB | TxINT_ENAB | RxINT_MASK);
|| (read_zsreg(uap, R1) & ALL_SNT) == 0) {
unsigned char stat = read_zsreg(uap, R1);
write_zsreg(uap, R1,
regs[R1] & ~(RxINT_MASK | TxINT_ENAB | EXT_INT_ENAB));
write_zsreg(uap, R1, regs[R1]);
write_zsreg(uap, R1, uap->curregs[1] & ~TxINT_ENAB);
write_zsreg(uap, R1, uap->curregs[1]);
write_zsreg(uap, R1, uap->curregs[1]);
r1 = read_zsreg(uap, R1);
uap->curregs[R1] &= ~RxINT_MASK;
write_zsreg(uap, R1, 0);
r1 = read_zsreg(channel, R1);
up->curregs[R1] = EXT_INT_ENAB | INT_ALL_Rx | TxINT_ENAB;
up->curregs[R1] = EXT_INT_ENAB | INT_ALL_Rx | TxINT_ENAB;
regval = read_zsreg(channel, R1);
unsigned char stat = read_zsreg(channel, R1);
write_zsreg(channel, R1,
regs[R1] & ~(RxINT_MASK | TxINT_ENAB | EXT_INT_ENAB));
write_zsreg(channel, R1, regs[R1]);
r1 = read_zsreg(channel, R1);
up->curregs[R1] &= ~RxINT_MASK;
up->curregs[R1] |= EXT_INT_ENAB | INT_ALL_Rx | TxINT_ENAB;
up->curregs[R1] &= ~(EXT_INT_ENAB | TxINT_ENAB | RxINT_MASK);
write_zsreg(zport, R1, zport->regs[1]);
write_zsreg(zport, R1, zport->regs[1]);
while (!(read_zsreg(zport, R1) & ALL_SNT) && --loops) {
write_zsreg(zport, R1, regs[1]);
status = read_zsreg(zport, R1);
write_zsreg(zport_a, R1, zport_a->regs[1]);
write_zsreg(zport, R1, zport->regs[1]);
write_zsreg(zport_a, R1, zport_a->regs[1]);
status = read_zsreg(zport, R1) & (Rx_OVR | FRM_ERR | PAR_ERR);
write_zsreg(zport, R1, zport->regs[1]);
R1(a, b, c, d, e, f, g, h, K[12], W1(12), W2(16));
R1(d, a, b, c, h, e, f, g, K[13], W1(13), W2(17));
R1(c, d, a, b, g, h, e, f, K[14], W1(14), W2(18));
R1(b, c, d, a, f, g, h, e, K[15], W1(15), W2(19));
R1(a, b, c, d, e, f, g, h, K[0], I(0), I(4));
R1(d, a, b, c, h, e, f, g, K[1], I(1), I(5));
R1(c, d, a, b, g, h, e, f, K[2], I(2), I(6));
R1(b, c, d, a, f, g, h, e, K[3], I(3), I(7));
R1(a, b, c, d, e, f, g, h, K[4], W1(4), I(8));
R1(d, a, b, c, h, e, f, g, K[5], W1(5), I(9));
R1(c, d, a, b, g, h, e, f, K[6], W1(6), I(10));
R1(b, c, d, a, f, g, h, e, K[7], W1(7), I(11));
R1(a, b, c, d, e, f, g, h, K[8], W1(8), I(12));
R1(d, a, b, c, h, e, f, g, K[9], W1(9), I(13));
R1(c, d, a, b, g, h, e, f, K[10], W1(10), I(14));
R1(b, c, d, a, f, g, h, e, K[11], W1(11), I(15));
BPF_LD_IMM64(R1, -3),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 6),
BPF_ALU64_IMM(BPF_ADD, R1, 1),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
BPF_ALU64_IMM(BPF_ADD, R1, 1),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
BPF_ALU64_IMM(BPF_ADD, R1, 1),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
BPF_LD_IMM64(R1, -3),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 4),
BPF_ALU64_IMM(BPF_ADD, R1, 2),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 2),
BPF_ALU64_IMM(BPF_ADD, R1, 2),
BPF_JMP_IMM(BPF_JSGE, R1, 0, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JGT, R1, 2, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_IMM(BPF_JGT, R1, 1, 1),
BPF_LD_IMM64(R1, 2),
BPF_JMP_IMM(BPF_JLT, R1, 3, 1),
BPF_LD_IMM64(R1, 1),
BPF_JMP_IMM(BPF_JLT, R1, -1, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JGE, R1, 2, 1),
i += __bpf_ld_imm64(&insns[i], R1, dst);
BPF_LD_IMM64(R1, 2),
BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
insns[i++] = BPF_ALU32_REG(op, R1, R2);
BPF_LD_IMM64(R1, 3), /* note: this takes 2 insns */
BPF_JMP_IMM(BPF_JGT, R1, 2, -6), /* goto out */
insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JGE, R1, 3, 1),
BPF_LD_IMM64(R1, 2), /* note: this takes 2 insns */
BPF_JMP_IMM(BPF_JLT, R1, 3, -6), /* goto out */
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JLE, R1, 3, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JNE, R1, 2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JEQ, R1, 3, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JSET, R1, 2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JSET, R1, 0xffffffff, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JSGT, R1, R2, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JSLT, R2, R1, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JSLT, R1, R2, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JSGE, R1, R2, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JSLE, R2, R1, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JSLE, R1, R2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JGT, R1, R2, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JGT, R1, R2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JLT, R2, R1, 1),
BPF_LD_IMM64(R1, -1),
BPF_JMP_REG(BPF_JLT, R2, R1, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JGE, R1, R2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JGE, R1, R2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JLE, R2, R1, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JLE, R1, R2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JGE, R1, R2, 2),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JGE, R1, R2, 0),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JGE, R1, R2, 4),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JLE, R2, R1, 2),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JLE, R2, R1, 0),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JLE, R2, R1, 4),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JNE, R1, R2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JSET, R1, R2, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_REG(BPF_JSET, R1, R2, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
BPF_MOV64_REG(R2, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
BPF_MOV64_REG(R2, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xfefbbc12),
BPF_MOV64_REG(R2, R1),
BPF_LD_IMM64(R1, -17104896),
BPF_JMP_IMM(BPF_JNE, R1, -17104896, 1),
BPF_LD_IMM64(R1, 0xfefb0000),
BPF_JMP_IMM(BPF_JNE, R1, 0xfefb0000, 1),
BPF_LD_IMM64(R1, 0x7efb0000),
BPF_JMP_IMM(BPF_JNE, R1, 0x7efb0000, 1),
BPF_LD_IMM64(R1, 0x123456789abcdefULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_ALU64_REG(BPF_SUB, R0, R1),
BPF_MOV64_REG(R1, R0),
BPF_ALU64_IMM(BPF_RSH, R1, 32),
BPF_ALU64_REG(BPF_OR, R0, R1),
BPF_ALU64_IMM(BPF_MOV, R1, R1), \
BPF_JMP_IMM(BPF_JNE, R1, R1, 9), \
BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R9, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R9, R1),
BPF_ALU64_IMM(BPF_MOV, R1, 1), \
BPF_ATOMIC_OP(width, op, R10, R1, -8), \
BPF_JMP_IMM(BPF_JNE, R1, 1, 9), \
BPF_LD_IMM64(R1, 0x0123456789acbdefULL),\
BPF_ALU64_REG(BPF_MOV, R0, R1), \
BPF_ALU32_REG(BPF_##op, R2, R1), \
BPF_ALU64_REG(BPF_SUB, R0, R1), \
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ALU64_IMM(BPF_RSH, R1, 32), \
BPF_ALU64_REG(BPF_OR, R0, R1), \
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ATOMIC_OP(BPF_W, BPF_##op, R10, R1, -4), \
BPF_ALU64_REG(BPF_SUB, R0, R1), \
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ALU64_IMM(BPF_RSH, R1, 32), \
BPF_ALU64_REG(BPF_OR, R0, R1), \
BPF_LD_IMM64(R1, 0x0123456789acbdefULL),
BPF_ALU64_REG(BPF_MOV, R2, R1),
BPF_ATOMIC_OP(BPF_W, BPF_CMPXCHG, R10, R1, -4),
BPF_ALU64_REG(BPF_SUB, R1, R2),
BPF_ALU64_REG(BPF_MOV, R2, R1),
BPF_ALU64_REG(BPF_OR, R1, R2),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ALU64_REG(BPF_SUB, R0, R1), \
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ALU64_IMM(BPF_RSH, R1, 32), \
BPF_ALU64_REG(BPF_OR, R0, R1), \
BPF_LD_IMM64(R1, 0xfedcba9876543210ULL),\
BPF_ALU64_REG(BPF_MOV, R3, R1), \
BPF_JMP32_IMM(BPF_##op, R0, R1, 1), \
BPF_ALU64_REG(BPF_SUB, R1, R3), \
BPF_ALU64_REG(BPF_OR, R0, R1), \
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ALU64_IMM(BPF_RSH, R1, 32), \
BPF_ALU64_REG(BPF_OR, R0, R1), \
BPF_JMP_IMM(BPF_JSET, R1, 0, 1),
BPF_JMP_IMM(BPF_JLT, R1, 0, 1),
BPF_JMP_IMM(BPF_JGE, R1, 0, 1),
BPF_JMP_IMM(BPF_JGT, R1, U32_MAX, 1),
BPF_JMP_IMM(BPF_JLE, R1, U32_MAX, 1),
BPF_JMP32_IMM(BPF_JSGT, R1, S32_MAX, 1),
BPF_JMP32_IMM(BPF_JSGE, R1, S32_MIN, 1),
BPF_JMP32_IMM(BPF_JSLT, R1, S32_MIN, 1),
BPF_JMP32_IMM(BPF_JSLE, R1, S32_MAX, 1),
BPF_JMP_REG(BPF_JEQ, R1, R1, 1),
BPF_JMP_REG(BPF_JGE, R1, R1, 1),
BPF_JMP_REG(BPF_JLE, R1, R1, 1),
BPF_JMP_REG(BPF_JSGE, R1, R1, 1),
BPF_JMP_REG(BPF_JSLE, R1, R1, 1),
BPF_JMP_REG(BPF_JNE, R1, R1, 1),
BPF_JMP_REG(BPF_JGT, R1, R1, 1),
BPF_JMP_REG(BPF_JLT, R1, R1, 1),
BPF_JMP_REG(BPF_JSGT, R1, R1, 1),
BPF_JMP_REG(BPF_JSLT, R1, R1, 1),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_ALU64_IMM(BPF_ADD, R1, 2),
BPF_ALU64_IMM(BPF_ADD, R1, 3),
BPF_ALU64_IMM(BPF_ADD, R1, 4),
BPF_ALU64_IMM(BPF_MOV, R1, 1),
BPF_STX_MEM(BPF_DW, R3, R1, -8),
BPF_JMP_REG(BPF_JNE, R0, R1, 3),
BPF_LDX_MEM(BPF_W, R2, R1, 0),
BPF_STX_MEM(BPF_W, R1, R2, 0),
BPF_LDX_MEM(BPF_W, R2, R1, 0),
BPF_STX_MEM(BPF_W, R1, R2, 0),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LDX_MEM(BPF_DW, R1, R10, -8),
BPF_ALU32_REG(BPF_MOV, R0, R1),
BPF_LDX_MEM(BPF_W, R2, R1, 0),
BPF_STX_MEM(BPF_W, R1, R2, 0),
BPF_LDX_MEM(BPF_W, R2, R1, 0),
BPF_STX_MEM(BPF_W, R1, R2, 0),
i += __bpf_ld_imm64(&insns[i], R1, dst);
insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
insns[i++] = BPF_LDX_MEM(BPF_DW, R1, R10, -8);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
insns[i++] = BPF_LDX_MEM(BPF_W, R1, R10, -4);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
i += __bpf_ld_imm64(&insns[i], R1, dst);
insns[i++] = BPF_STX_MEM(BPF_DW, R10, R1, -8);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 2);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
i += __bpf_ld_imm64(&insns[i], R1, (u32)dst);
insns[i++] = BPF_STX_MEM(BPF_W, R10, R1, -4);
insns[i++] = BPF_JMP32_REG(BPF_JEQ, R1, R3, 2);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
i += __bpf_ld_imm64(&insn[i], R1, res);
insn[i++] = BPF_MOV64_REG(R1, R10);
i += __bpf_ld_imm64(&insn[i], R1, mem);
i += __bpf_ld_imm64(&insn[i], R1, mem);
insn[i++] = BPF_ALU64_REG(BPF_OP(op), R1, R10);
insn[i++] = BPF_JMP_REG(BPF_JEQ, R0, R1, 2);
insn[i++] = BPF_JMP32_REG(BPF_JEQ, R0, R1, 2);
i += __bpf_ld_imm64(&insn[i], R1, imm);
insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
i += __bpf_ld_imm64(&insn[i], R1, imm);
insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
i += __bpf_ld_imm64(&insns[i], R1, dst);
insns[i++] = BPF_JMP_IMM(op, R1, imm, 1);
i += __bpf_ld_imm64(&insns[i], R1, dst);
insns[i++] = BPF_JMP32_IMM(op, R1, imm, 1);
i += __bpf_ld_imm64(&insns[i], R1, dst);
insns[i++] = BPF_JMP_REG(op, R1, R2, 1);
i += __bpf_ld_imm64(&insns[i], R1, dst);
insns[i++] = BPF_JMP32_REG(op, R1, R2, 1);
insns[1] = BPF_ALU64_IMM(BPF_MOV, R1, r1);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JEQ, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JNE, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSET, R1, 0x82, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGT, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JGE, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLT, R1, 0x80000000, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JLE, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGT, R1, -2, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSGE, R1, -2, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLT, R1, -1, 0);
struct bpf_insn jmp = BPF_JMP_IMM(BPF_JSLE, R1, -1, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JEQ, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JNE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JSET, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JGT, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JGE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JLT, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JLE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGT, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JSGE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLT, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP_REG(BPF_JSLE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JEQ, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JNE, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSET, R1, 0x82, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGT, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JGE, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLT, R1, 0x80000000, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JLE, R1, 1234, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGT, R1, -2, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSGE, R1, -2, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLT, R1, -1, 0);
struct bpf_insn jmp = BPF_JMP32_IMM(BPF_JSLE, R1, -1, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JEQ, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JNE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSET, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGT, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JGE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLT, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JLE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGT, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSGE, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLT, R1, R2, 0);
struct bpf_insn jmp = BPF_JMP32_REG(BPF_JSLE, R1, R2, 0);
BPF_ALU64_IMM(BPF_MOV, R1, 1),
BPF_ALU64_IMM(BPF_ADD, R1, 2),
BPF_ALU64_REG(BPF_SUB, R1, R2),
BPF_ALU64_IMM(BPF_ADD, R1, -1),
BPF_ALU64_IMM(BPF_MUL, R1, 3),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_ALU64_IMM(BPF_MOV, R1, -1),
BPF_ALU64_REG(BPF_MUL, R1, R2),
BPF_JMP_IMM(BPF_JEQ, R1, 0xfffffffd, 1),
BPF_ALU32_IMM(BPF_MOV, R1, -1),
BPF_ALU64_REG(BPF_MUL, R1, R2),
BPF_ALU64_IMM(BPF_RSH, R1, 8),
BPF_JMP_IMM(BPF_JEQ, R1, 0x2ffffff, 1),
BPF_ALU64_IMM(BPF_MOV, R1, -1),
BPF_ALU32_REG(BPF_MUL, R1, R2),
BPF_ALU64_IMM(BPF_RSH, R1, 8),
BPF_JMP_IMM(BPF_JEQ, R1, 0xffffff, 1),
BPF_ALU64_IMM(BPF_MOV, R1, 1),
BPF_ALU64_IMM(BPF_ADD, R1, 20),
BPF_ALU64_IMM(BPF_SUB, R1, 10),
BPF_ALU64_REG(BPF_ADD, R0, R1),
BPF_ALU64_REG(BPF_ADD, R1, R0),
BPF_ALU64_REG(BPF_ADD, R1, R1),
BPF_ALU64_REG(BPF_ADD, R1, R2),
BPF_ALU64_REG(BPF_ADD, R1, R3),
BPF_ALU64_REG(BPF_ADD, R1, R4),
BPF_ALU64_REG(BPF_ADD, R1, R5),
BPF_ALU64_REG(BPF_ADD, R1, R6),
BPF_ALU64_REG(BPF_ADD, R1, R7),
BPF_ALU64_REG(BPF_ADD, R1, R8),
BPF_ALU64_REG(BPF_ADD, R1, R9), /* R1 == 456 */
BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
BPF_ALU64_REG(BPF_ADD, R2, R1),
BPF_ALU64_REG(BPF_ADD, R3, R1),
BPF_ALU64_REG(BPF_ADD, R4, R1),
BPF_ALU64_REG(BPF_ADD, R5, R1),
BPF_ALU64_REG(BPF_ADD, R6, R1),
BPF_ALU64_REG(BPF_ADD, R7, R1),
BPF_ALU64_REG(BPF_ADD, R8, R1),
BPF_ALU64_REG(BPF_ADD, R9, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 1),
BPF_ALU64_IMM(BPF_ADD, R1, 10),
BPF_ALU32_REG(BPF_ADD, R0, R1),
BPF_ALU32_REG(BPF_ADD, R1, R0),
BPF_ALU32_REG(BPF_ADD, R1, R1),
BPF_ALU32_REG(BPF_ADD, R1, R2),
BPF_ALU32_REG(BPF_ADD, R1, R3),
BPF_ALU32_REG(BPF_ADD, R1, R4),
BPF_ALU32_REG(BPF_ADD, R1, R5),
BPF_ALU32_REG(BPF_ADD, R1, R6),
BPF_ALU32_REG(BPF_ADD, R1, R7),
BPF_ALU32_REG(BPF_ADD, R1, R8),
BPF_ALU32_REG(BPF_ADD, R1, R9), /* R1 == 456 */
BPF_JMP_IMM(BPF_JEQ, R1, 456, 1),
BPF_ALU32_REG(BPF_ADD, R2, R1),
BPF_ALU32_REG(BPF_ADD, R3, R1),
BPF_ALU32_REG(BPF_ADD, R4, R1),
BPF_ALU32_REG(BPF_ADD, R5, R1),
BPF_ALU32_REG(BPF_ADD, R6, R1),
BPF_ALU32_REG(BPF_ADD, R7, R1),
BPF_ALU32_REG(BPF_ADD, R8, R1),
BPF_ALU32_REG(BPF_ADD, R9, R1),
BPF_ALU64_IMM(BPF_MOV, R1, 1),
BPF_ALU64_REG(BPF_SUB, R0, R1),
BPF_ALU64_REG(BPF_SUB, R1, R0),
BPF_ALU64_REG(BPF_SUB, R1, R2),
BPF_ALU64_REG(BPF_SUB, R1, R3),
BPF_ALU64_REG(BPF_SUB, R1, R4),
BPF_ALU64_REG(BPF_SUB, R1, R5),
BPF_ALU64_REG(BPF_SUB, R1, R6),
BPF_ALU64_REG(BPF_SUB, R1, R7),
BPF_ALU64_REG(BPF_SUB, R1, R8),
BPF_ALU64_REG(BPF_SUB, R1, R9),
BPF_ALU64_IMM(BPF_SUB, R1, 10),
BPF_ALU64_REG(BPF_SUB, R2, R1),
BPF_ALU64_REG(BPF_SUB, R3, R1),
BPF_ALU64_REG(BPF_SUB, R4, R1),
BPF_ALU64_REG(BPF_SUB, R5, R1),
BPF_ALU64_REG(BPF_SUB, R6, R1),
BPF_ALU64_REG(BPF_SUB, R7, R1),
BPF_ALU64_REG(BPF_SUB, R8, R1),
BPF_ALU64_REG(BPF_SUB, R9, R1),
BPF_ALU64_REG(BPF_SUB, R0, R1),
BPF_ALU64_REG(BPF_XOR, R1, R1),
BPF_JMP_REG(BPF_JEQ, R0, R1, 1),
BPF_ALU64_IMM(BPF_MOV, R1, -1),
BPF_ALU64_REG(BPF_SUB, R1, R1),
BPF_JMP_REG(BPF_JEQ, R1, R2, 1),
BPF_ALU64_IMM(BPF_MOV, R1, -1),
BPF_ALU64_IMM(BPF_MOV, R1, 1),
BPF_ALU64_REG(BPF_SUB, R1, R1),
BPF_ALU64_IMM(BPF_MOV, R1, 1),
BPF_ALU64_REG(BPF_MUL, R0, R1),
BPF_ALU64_REG(BPF_MUL, R1, R0),
BPF_ALU64_REG(BPF_MUL, R1, R2),
BPF_ALU64_REG(BPF_MUL, R1, R3),
BPF_ALU64_REG(BPF_MUL, R1, R4),
BPF_ALU64_REG(BPF_MUL, R1, R5),
BPF_ALU64_REG(BPF_MUL, R1, R6),
BPF_ALU64_REG(BPF_MUL, R1, R7),
BPF_ALU64_REG(BPF_MUL, R1, R8),
BPF_ALU64_REG(BPF_MUL, R1, R9),
BPF_ALU64_IMM(BPF_MUL, R1, 10),
BPF_ALU64_REG(BPF_MOV, R2, R1),
BPF_ALU64_IMM(BPF_LSH, R1, 32),
BPF_ALU64_IMM(BPF_ARSH, R1, 32),
BPF_JMP_IMM(BPF_JEQ, R1, 0xebb90000, 1),
BPF_ALU64_REG(BPF_MUL, R2, R1),
BPF_MOV64_REG(R1, R0),
BPF_MOV64_REG(R2, R1),
BPF_ALU64_IMM(BPF_MOV, R1, 0),
BPF_ALU64_REG(BPF_ADD, R0, R1),
BPF_MOV64_REG(R1, R0),
BPF_MOV64_REG(R2, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0),
BPF_ALU64_REG(BPF_ADD, R0, R1),
BPF_MOV64_REG(R1, R0),
BPF_MOV64_REG(R2, R1),
BPF_LD_IMM64(R1, 0x0LL),
BPF_ALU64_REG(BPF_ADD, R0, R1),
BPF_MOV64_IMM(R1, 1),
BPF_ALU32_REG(BPF_RSH, R0, R1),
BPF_JMP32_IMM(BPF_JEQ, R1, 0, 3),
BPF_ALU64_IMM(BPF_RSH, R1, 32),
BPF_JMP32_IMM(BPF_JNE, R1, 0, 1),
BPF_LD_IMM64(R1, 0x567800001234LL),
BPF_MOV64_REG(R2, R1),
i = __bpf_ld_imm64(insns, R1, 0x0123456789abcdefULL);
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU32_REG(BPF_MOV, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
BPF_ALU32_REG(BPF_MOV, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
BPF_ALU64_REG(BPF_MOV, R0, R1),
insns[i++] = BPF_ALU32_REG(op, R0, R1);
insns[i++] = BPF_ALU64_REG(op, R0, R1);
BPF_MOVSX32_REG(R1, R3, 8),
BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
BPF_MOVSX32_REG(R1, R3, 16),
BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
BPF_MOVSX64_REG(R1, R3, 8),
BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
BPF_MOVSX64_REG(R1, R3, 16),
BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
BPF_MOVSX64_REG(R1, R3, 32),
BPF_JMP_REG(BPF_JEQ, R2, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU32_REG(BPF_ADD, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
BPF_ALU32_REG(BPF_ADD, R0, R1),
BPF_LD_IMM64(R1, 4294967294U),
BPF_ALU32_REG(BPF_ADD, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU64_REG(BPF_ADD, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
BPF_ALU64_REG(BPF_ADD, R0, R1),
BPF_LD_IMM64(R1, 4294967294U),
BPF_ALU64_REG(BPF_ADD, R0, R1),
BPF_LD_IMM64(R1, 4294967296ULL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 1),
BPF_ALU32_REG(BPF_SUB, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
BPF_ALU32_REG(BPF_SUB, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 1),
BPF_ALU64_REG(BPF_SUB, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 4294967294U),
BPF_ALU64_REG(BPF_SUB, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 3),
BPF_ALU32_REG(BPF_MUL, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0x7FFFFFF8),
BPF_ALU32_REG(BPF_MUL, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, -1),
BPF_ALU32_REG(BPF_MUL, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 3),
BPF_ALU64_REG(BPF_MUL, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
BPF_ALU64_REG(BPF_MUL, R0, R1),
BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
BPF_ALU64_REG(BPF_MUL, R0, R1),
BPF_LD_IMM64(R1, 0x123456789abcdef0LL),
BPF_ALU64_REG(BPF_MUL, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU32_REG(BPF_DIV, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 4294967295U),
BPF_ALU32_REG(BPF_DIV, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU64_REG(BPF_DIV, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2147483647),
BPF_ALU64_REG(BPF_DIV, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU32_REG(BPF_MOD, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 4294967293U),
BPF_ALU32_REG(BPF_MOD, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU64_REG(BPF_MOD, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2147483645),
BPF_ALU64_REG(BPF_MOD, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU32_REG_OFF(BPF_DIV, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU64_REG_OFF(BPF_DIV, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU32_REG_OFF(BPF_MOD, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU64_REG_OFF(BPF_MOD, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU32_REG(BPF_AND, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_ALU32_REG(BPF_AND, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU64_REG(BPF_AND, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_ALU64_REG(BPF_AND, R0, R1),
BPF_LD_IMM64(R1, 0x0000000080a0c0e0LL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
insn[i++] = BPF_ALU64_REG(BPF_MOV, R1, R3);
insn[i++] = BPF_ALU32_IMM(op, R1, imm);
BPF_LD_IMM64(R1, 0x00000000090b0d0fLL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
insn[i++] = BPF_ALU32_REG(op, R1, R2);
BPF_LD_IMM64(R1, 0x0123456780a0c0e0LL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU32_REG(BPF_OR, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_ALU32_REG(BPF_OR, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 2),
BPF_ALU64_REG(BPF_OR, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_ALU64_REG(BPF_OR, R0, R1),
insn[i++] = BPF_ALU64_IMM(op, R1, imm);
insn[i++] = BPF_ALU64_REG(op, R1, R2);
BPF_LD_IMM64(R1, 0x00000000f9fbfdffLL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
BPF_LD_IMM64(R1, 0x012345678fafcfefLL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
BPF_LD_IMM64(R1, 0xfffffffff9fbfdffLL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 6),
insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R4, 1);
BPF_ALU32_REG(BPF_XOR, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_ALU32_REG(BPF_XOR, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 6),
BPF_ALU64_REG(BPF_XOR, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_ALU64_REG(BPF_XOR, R0, R1),
BPF_LD_IMM64(R1, 0x00000000795b3d1fLL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
BPF_LD_IMM64(R1, 0x0123456786a4c2e0LL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
BPF_LD_IMM64(R1, 0xfedcba98795b3d1fLL),
BPF_JMP_REG(BPF_JEQ, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 1),
BPF_ALU32_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 31),
BPF_ALU32_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 12),
BPF_ALU32_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 1),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 31),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 12),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 12),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 36),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 36),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 32),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 32),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0),
BPF_ALU64_REG(BPF_LSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 1),
BPF_ALU32_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 31),
BPF_ALU32_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 20),
BPF_ALU32_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 1),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 31),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 12),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 12),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 36),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 36),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 32),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 32),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0),
BPF_ALU64_REG(BPF_RSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 7),
BPF_ALU32_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 40),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 12),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 12),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 36),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 36),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 32),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 32),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
BPF_ALU32_IMM(BPF_MOV, R1, 0),
BPF_ALU64_REG(BPF_ARSH, R0, R1),
insn[i++] = BPF_ALU64_IMM(BPF_MOV, R1, val);
insn[i++] = BPF_ALU32_REG(op, R1, R1);
insn[i++] = BPF_ALU64_REG(op, R1, R1);
insn[i++] = BPF_JMP_REG(BPF_JEQ, R1, R2, 1);
BPF_ALU64_REG(BPF_MOV, R1, R0),
BPF_ALU64_IMM(BPF_RSH, R1, 32),
BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
BPF_ALU64_REG(BPF_MOV, R1, R0),
BPF_ALU64_IMM(BPF_RSH, R1, 32),
BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
BPF_ALU64_REG(BPF_MOV, R1, R0),
BPF_ALU64_IMM(BPF_RSH, R1, 32),
BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
BPF_ALU64_REG(BPF_MOV, R1, R0),
BPF_ALU64_IMM(BPF_RSH, R1, 32),
BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
BPF_ALU64_REG(BPF_MOV, R1, R0),
BPF_ALU64_IMM(BPF_RSH, R1, 32),
BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
BPF_ALU64_REG(BPF_MOV, R1, R0),
BPF_ALU64_IMM(BPF_RSH, R1, 32),
BPF_ALU32_REG(BPF_ADD, R0, R1), /* R1 = 0 */
BPF_LD_IMM64(R1, 0x0102030405060708ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8182838485868788ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_ALU64_IMM(BPF_ADD, R1, 512),
BPF_STX_MEM(BPF_B, R1, R2, -256),
BPF_LDX_MEM(BPF_B, R0, R1, -256),
BPF_STX_MEM(BPF_B, R1, R2, 256),
BPF_LDX_MEM(BPF_B, R0, R1, 256),
BPF_STX_MEM(BPF_B, R1, R2, 4096),
BPF_LDX_MEM(BPF_B, R0, R1, 4096),
BPF_LD_IMM64(R1, 0x0102030405060708ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8182838485868788ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_ALU64_IMM(BPF_ADD, R1, 512),
BPF_STX_MEM(BPF_H, R1, R2, -256),
BPF_LDX_MEM(BPF_H, R0, R1, -256),
BPF_STX_MEM(BPF_H, R1, R2, 256),
BPF_LDX_MEM(BPF_H, R0, R1, 256),
BPF_STX_MEM(BPF_H, R1, R2, 8192),
BPF_LDX_MEM(BPF_H, R0, R1, 8192),
BPF_STX_MEM(BPF_H, R1, R2, 13),
BPF_LDX_MEM(BPF_H, R0, R1, 13),
BPF_LD_IMM64(R1, 0x0102030405060708ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8182838485868788ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_ALU64_IMM(BPF_ADD, R1, 512),
BPF_STX_MEM(BPF_W, R1, R2, -256),
BPF_LDX_MEM(BPF_W, R0, R1, -256),
BPF_STX_MEM(BPF_W, R1, R2, 256),
BPF_LDX_MEM(BPF_W, R0, R1, 256),
BPF_STX_MEM(BPF_W, R1, R2, 16384),
BPF_LDX_MEM(BPF_W, R0, R1, 16384),
BPF_STX_MEM(BPF_W, R1, R2, 13),
BPF_LDX_MEM(BPF_W, R0, R1, 13),
BPF_LD_IMM64(R1, 0x0102030405060708ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_JMP_REG(BPF_JNE, R0, R1, 1),
BPF_LD_IMM64(R1, 0x8182838485868788ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_JMP_REG(BPF_JNE, R0, R1, 1),
BPF_ALU64_IMM(BPF_ADD, R1, 512),
BPF_STX_MEM(BPF_DW, R1, R2, -256),
BPF_LDX_MEM(BPF_DW, R0, R1, -256),
BPF_STX_MEM(BPF_DW, R1, R2, 256),
BPF_LDX_MEM(BPF_DW, R0, R1, 256),
BPF_STX_MEM(BPF_DW, R1, R2, 32760),
BPF_LDX_MEM(BPF_DW, R0, R1, 32760),
BPF_STX_MEM(BPF_DW, R1, R2, 13),
BPF_LDX_MEM(BPF_DW, R0, R1, 13),
BPF_LD_IMM64(R1, 0xdead0000000000f0ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0xdead00000000f123ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x00000000deadbeefULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0x8090a0b0c0d0e0f0ULL),
BPF_STX_MEM(BPF_DW, R10, R1, -8),
BPF_LD_IMM64(R1, 0xffLL),
BPF_STX_MEM(BPF_B, R10, R1, -40),
BPF_LD_IMM64(R1, 0xffffLL),
BPF_STX_MEM(BPF_H, R10, R1, -40),
BPF_LD_IMM64(R1, 0xffffffffLL),
BPF_STX_MEM(BPF_W, R10, R1, -40),
BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
BPF_STX_MEM(BPF_DW, R10, R1, -40),
BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
BPF_STX_MEM(BPF_DW, R10, R1, -40),
BPF_LD_IMM64(R1, 0x0123456789abcdefLL),
BPF_STX_MEM(BPF_DW, R10, R1, -40),
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ALU64_IMM(BPF_RSH, R1, 32), \
BPF_ALU64_REG(BPF_OR, R0, R1), \
BPF_ALU64_REG(BPF_MOV, R1, R10), \
BPF_ALU64_REG(BPF_SUB, R0, R1), \
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ALU64_IMM(BPF_RSH, R1, 32), \
BPF_ALU64_REG(BPF_OR, R0, R1), \
BPF_LD_IMM64(R1, (update) | BPF_ATOMIC_POISON(width)), \
BPF_ATOMIC_OP(width, op, R10, R1, -40), \
BPF_ALU64_REG(BPF_MOV, R1, R0), \
BPF_ALU64_IMM(BPF_RSH, R1, 32), \
BPF_ALU64_REG(BPF_OR, R0, R1), \
BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_STX_MEM(BPF_DW, R10, R1, -40),
BPF_JMP_REG(BPF_JNE, R0, R1, 1),
BPF_ALU64_REG(BPF_SUB, R0, R1),
BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_STX_MEM(BPF_DW, R10, R1, -40),
BPF_JMP_REG(BPF_JNE, R0, R1, 1),
BPF_ALU64_REG(BPF_SUB, R0, R1),
BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_STX_MEM(BPF_DW, R10, R1, -40),
BPF_JMP_REG(BPF_JNE, R0, R1, 1),
BPF_ALU64_REG(BPF_SUB, R0, R1),
BPF_LD_IMM64(R1, 0x0123456789abcdefULL),
BPF_ALU64_REG(BPF_MOV, R0, R1),
BPF_STX_MEM(BPF_DW, R10, R1, -40),
BPF_ALU32_IMM(BPF_MOV, R1, 4321),
BPF_JMP32_REG(BPF_JEQ, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 1234),
BPF_JMP32_REG(BPF_JEQ, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 1234),
BPF_JMP32_REG(BPF_JNE, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 4321),
BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 7),
BPF_JMP32_REG(BPF_JSET, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 8 | 2),
BPF_JMP32_REG(BPF_JNE, R0, R1, 1),
i += __bpf_ld_imm64(&insns[i], R1, dst);
insns[i++] = BPF_ALU64_IMM(op, R1, imm);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_JMP32_REG(BPF_JGT, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
BPF_JMP32_REG(BPF_JGT, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_JMP32_REG(BPF_JGE, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
BPF_JMP32_REG(BPF_JGE, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
BPF_JMP32_REG(BPF_JLT, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 0xffffffff),
BPF_JMP32_REG(BPF_JLT, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffd),
BPF_JMP32_REG(BPF_JLE, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, 0xfffffffe),
BPF_JMP32_REG(BPF_JLE, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
BPF_JMP32_REG(BPF_JSGT, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
BPF_JMP32_REG(BPF_JSGT, R0, R1, 1),
i += __bpf_ld_imm64(&insns[i], R1, dst);
insns[i++] = BPF_ALU32_IMM(op, R1, imm);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
BPF_JMP32_REG(BPF_JSGE, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
BPF_JMP32_REG(BPF_JSGE, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
BPF_JMP32_REG(BPF_JSLT, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, -12345677),
BPF_JMP32_REG(BPF_JSLT, R0, R1, 1),
BPF_ALU32_IMM(BPF_MOV, R1, -12345679),
BPF_JMP32_REG(BPF_JSLE, R0, R1, 2),
BPF_ALU32_IMM(BPF_MOV, R1, -12345678),
BPF_JMP32_REG(BPF_JSLE, R0, R1, 1),
BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
BPF_JMP_IMM(BPF_JSLT, R1, -1, 1),
BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
BPF_JMP_IMM(BPF_JSGT, R1, -2, 1),
i += __bpf_ld_imm64(&insns[i], R1, dst);
BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
BPF_JMP_IMM(BPF_JSGT, R1, -1, 1),
BPF_LD_IMM64(R1, 0xfffffffffffffffeLL),
BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
insns[i++] = BPF_ALU64_REG(op, R1, R2);
BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
insns[i++] = BPF_JMP_REG(BPF_JEQ, R1, R3, 1);
BPF_JMP_IMM(BPF_JSLE, R1, -1, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JSLE, R1, 0, 6),
BPF_ALU64_IMM(BPF_SUB, R1, 1),
BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
BPF_ALU64_IMM(BPF_SUB, R1, 1),
BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
BPF_ALU64_IMM(BPF_SUB, R1, 1),
BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
BPF_LD_IMM64(R1, 3),
BPF_JMP_IMM(BPF_JSLE, R1, 0, 4),
BPF_ALU64_IMM(BPF_SUB, R1, 2),
BPF_JMP_IMM(BPF_JSLE, R1, 0, 2),
BPF_ALU64_IMM(BPF_SUB, R1, 2),
BPF_JMP_IMM(BPF_JSLE, R1, 0, 1),
BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
BPF_JMP_IMM(BPF_JSGE, R1, -2, 1),
BPF_LD_IMM64(R1, 0xffffffffffffffffLL),
BPF_JMP_IMM(BPF_JSGE, R1, -1, 1),
u8 bval, R1 = 0, R2;
while ((R1 < 0x0b || R1 > 0x0e) && R1 != 0x12 && count <= 50000) {
R1 = bval & 0x1F;
if (R2 != R1)
R1 = R2;