R8
R(R8);
BUILD_KVM_GPR_ACCESSORS(r8, R8)
COPY(R8); COPY(R9); COPY(R10); COPY(R11);
COPY(R8); COPY(R9); COPY(R10); COPY(R11);
case R8:
[R8 >> 3] = HOST_R8,
case R8:
PUTREG(R8, r8);
GETREG(R8, r8);
DEFINE_LONGS(HOST_R8, R8);
wm8739_write(sd, R8, 0x020);
wm8739_write(sd, R8, 0x000);
wm8739_write(sd, R8, 0x018);
wm8739_write(sd, R8, 0x000);
#define S8(_p, _m, _v) W8(_p, _m, R8(_p, _m) | (_v))
#define C8(_p, _m, _v) W8(_p, _m, R8(_p, _m) & ~(_v))
#define S8(_p, _m, _v) W8(_p, _m, R8(_p, _m) | (_v))
#define C8(_p, _m, _v) W8(_p, _m, R8(_p, _m) & ~(_v))
BPF_ALU64_IMM(BPF_MOV, R8, R8), \
BPF_JMP_IMM(BPF_JNE, R8, R8, 2), \
BPF_TEST_CLOBBER_ALU(ALU64_IMM, AND, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, OR, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, XOR, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, LSH, R8, 12),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, RSH, R8, 12),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, ARSH, R8, 12),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, ADD, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, SUB, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, MUL, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, DIV, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU64_IMM, MOD, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, AND, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, OR, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, XOR, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, LSH, R8, 12),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, RSH, R8, 12),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, ARSH, R8, 12),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, ADD, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, SUB, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, MUL, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, DIV, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU32_IMM, MOD, R8, 123456789),
BPF_TEST_CLOBBER_ALU(ALU64_REG, AND, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, OR, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, XOR, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, LSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, RSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, ARSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, ADD, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, SUB, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, MUL, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, DIV, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU64_REG, MOD, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, AND, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, OR, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, XOR, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, LSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, RSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, ARSH, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, ADD, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, SUB, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, MUL, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, DIV, R8, R1),
BPF_TEST_CLOBBER_ALU(ALU32_REG, MOD, R8, R1),
BPF_ALU64_IMM(BPF_MOV, R8, 8), \
BPF_JMP_IMM(BPF_JNE, R8, 8, 2), \
BPF_ALU64_IMM(BPF_MOV, R8, 8),
BPF_ALU64_IMM(BPF_ADD, R8, 20),
BPF_ALU64_IMM(BPF_SUB, R8, 10),
BPF_ALU64_REG(BPF_ADD, R0, R8),
BPF_ALU64_REG(BPF_ADD, R1, R8),
BPF_ALU64_REG(BPF_ADD, R2, R8),
BPF_ALU64_REG(BPF_ADD, R3, R8),
BPF_ALU64_REG(BPF_ADD, R4, R8),
BPF_ALU64_REG(BPF_ADD, R5, R8),
BPF_ALU64_REG(BPF_ADD, R6, R8),
BPF_ALU64_REG(BPF_ADD, R7, R8),
BPF_ALU64_REG(BPF_ADD, R8, R0),
BPF_ALU64_REG(BPF_ADD, R8, R1),
BPF_ALU64_REG(BPF_ADD, R8, R2),
BPF_ALU64_REG(BPF_ADD, R8, R3),
BPF_ALU64_REG(BPF_ADD, R8, R4),
BPF_ALU64_REG(BPF_ADD, R8, R5),
BPF_ALU64_REG(BPF_ADD, R8, R6),
BPF_ALU64_REG(BPF_ADD, R8, R7),
BPF_ALU64_REG(BPF_ADD, R8, R8),
BPF_ALU64_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
BPF_ALU64_REG(BPF_ADD, R9, R8),
BPF_ALU32_IMM(BPF_MOV, R8, 8),
BPF_ALU64_IMM(BPF_ADD, R8, 10),
BPF_ALU32_REG(BPF_ADD, R0, R8),
BPF_ALU32_REG(BPF_ADD, R1, R8),
BPF_ALU32_REG(BPF_ADD, R2, R8),
BPF_ALU32_REG(BPF_ADD, R3, R8),
BPF_ALU32_REG(BPF_ADD, R4, R8),
BPF_ALU32_REG(BPF_ADD, R5, R8),
BPF_ALU32_REG(BPF_ADD, R6, R8),
BPF_ALU32_REG(BPF_ADD, R7, R8),
BPF_ALU32_REG(BPF_ADD, R8, R0),
BPF_ALU32_REG(BPF_ADD, R8, R1),
BPF_ALU32_REG(BPF_ADD, R8, R2),
BPF_ALU32_REG(BPF_ADD, R8, R3),
BPF_ALU32_REG(BPF_ADD, R8, R4),
BPF_ALU32_REG(BPF_ADD, R8, R5),
BPF_ALU32_REG(BPF_ADD, R8, R6),
BPF_ALU32_REG(BPF_ADD, R8, R7),
BPF_ALU32_REG(BPF_ADD, R8, R8),
BPF_ALU32_REG(BPF_ADD, R8, R9), /* R8 == 985799 */
BPF_JMP_IMM(BPF_JEQ, R8, 985799, 1),
BPF_ALU32_REG(BPF_ADD, R9, R8),
BPF_ALU64_IMM(BPF_MOV, R8, 8),
BPF_ALU64_REG(BPF_SUB, R0, R8),
BPF_ALU64_REG(BPF_SUB, R1, R8),
BPF_ALU64_REG(BPF_SUB, R2, R8),
BPF_ALU64_REG(BPF_SUB, R3, R8),
BPF_ALU64_REG(BPF_SUB, R4, R8),
BPF_ALU64_REG(BPF_SUB, R5, R8),
BPF_ALU64_REG(BPF_SUB, R6, R8),
BPF_ALU64_REG(BPF_SUB, R7, R8),
BPF_ALU64_REG(BPF_SUB, R8, R0),
BPF_ALU64_REG(BPF_SUB, R8, R1),
BPF_ALU64_REG(BPF_SUB, R8, R2),
BPF_ALU64_REG(BPF_SUB, R8, R3),
BPF_ALU64_REG(BPF_SUB, R8, R4),
BPF_ALU64_REG(BPF_SUB, R8, R5),
BPF_ALU64_REG(BPF_SUB, R8, R6),
BPF_ALU64_REG(BPF_SUB, R8, R7),
BPF_ALU64_REG(BPF_SUB, R8, R9),
BPF_ALU64_IMM(BPF_SUB, R8, 10),
BPF_ALU64_REG(BPF_SUB, R9, R8),
BPF_ALU64_REG(BPF_SUB, R0, R8),
BPF_ALU64_IMM(BPF_MOV, R8, -1),
BPF_ALU64_REG(BPF_XOR, R8, R8),
BPF_JMP_REG(BPF_JEQ, R7, R8, 1),
BPF_ALU64_REG(BPF_SUB, R8, R8),
BPF_JMP_REG(BPF_JEQ, R9, R8, 1),
BPF_ALU64_IMM(BPF_MOV, R8, 8),
BPF_ALU64_REG(BPF_MUL, R0, R8),
BPF_ALU64_REG(BPF_MUL, R1, R8),
BPF_ALU64_REG(BPF_MUL, R2, R8),
BPF_MOV64_REG(R8, R7),
BPF_MOV64_REG(R9, R8),
BPF_ALU64_IMM(BPF_MOV, R8, 0),
BPF_ALU64_REG(BPF_ADD, R0, R8),
BPF_MOV64_REG(R8, R7),
BPF_MOV64_REG(R9, R8),
BPF_ALU32_IMM(BPF_MOV, R8, 0),
BPF_ALU64_REG(BPF_ADD, R0, R8),
BPF_MOV64_REG(R8, R7),
BPF_MOV64_REG(R9, R8),
BPF_LD_IMM64(R8, 0x0LL),
BPF_ALU64_REG(BPF_ADD, R0, R8),