IMM
mtfsfi(unsigned int crfD, unsigned int IMM)
__FPU_FPSCR |= (IMM & 0xf) << ((7 - crfD) << 2);
printk("%s: %d %x: %08lx\n", __func__, crfD, IMM, __FPU_FPSCR);
#define emit_cmpi(R1, IMM) \
*prog++ = (SUBCC | IMMED | RS1(R1) | S13(IMM) | RD(G0));
#define emit_btsti(R1, IMM) \
*prog++ = (ANDCC | IMMED | RS1(R1) | S13(IMM) | RD(G0));
#define emit_subi(R1, IMM, R3) \
*prog++ = (SUB | IMMED | RS1(R1) | S13(IMM) | RD(R3))
#define emit_addi(R1, IMM, R3) \
*prog++ = (ADD | IMMED | RS1(R1) | S13(IMM) | RD(R3))
#define emit_andi(R1, IMM, R3) \
*prog++ = (AND | IMMED | RS1(R1) | S13(IMM) | RD(R3))
#define emit_cmpi(R1, IMM, CTX) \
emit(SUBCC | IMMED | RS1(R1) | S13(IMM) | RD(G0), CTX)
#define emit_btsti(R1, IMM, CTX) \
emit(ANDCC | IMMED | RS1(R1) | S13(IMM) | RD(G0), CTX)
append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
#define BPF_ALU64_IMM_OFF(OP, DST, IMM, OFF) \
.imm = IMM })
#define BPF_ALU64_IMM(OP, DST, IMM) \
BPF_ALU64_IMM_OFF(OP, DST, IMM, 0)
#define BPF_ALU32_IMM_OFF(OP, DST, IMM, OFF) \
.imm = IMM })
#define BPF_ALU32_IMM(OP, DST, IMM) \
BPF_ALU32_IMM_OFF(OP, DST, IMM, 0)
#define BPF_MOV64_IMM(DST, IMM) \
.imm = IMM })
#define BPF_MOV32_IMM(DST, IMM) \
.imm = IMM })
#define BPF_LD_IMM64(DST, IMM) \
BPF_LD_IMM64_RAW(DST, 0, IMM)
#define BPF_LD_IMM64_RAW(DST, SRC, IMM) \
.imm = (__u32) (IMM) }), \
.imm = ((__u64) (IMM)) >> 32 })
#define BPF_MOV64_RAW(TYPE, DST, SRC, IMM) \
.imm = IMM })
#define BPF_MOV32_RAW(TYPE, DST, SRC, IMM) \
.imm = IMM })
#define BPF_LD_ABS(SIZE, IMM) \
.imm = IMM })
#define BPF_LD_IND(SIZE, SRC, IMM) \
.imm = IMM })
#define BPF_ST_MEM(SIZE, DST, OFF, IMM) \
.imm = IMM })
#define BPF_JMP_IMM(OP, DST, IMM, OFF) \
.imm = IMM })
#define BPF_JMP32_IMM(OP, DST, IMM, OFF) \
.imm = IMM })
#define BPF_JMP32_A(IMM) \
.imm = IMM })
#define BPF_CALL_KFUNC(OFF, IMM) \
.imm = IMM })
#define BPF_RAW_INSN(CODE, DST, SRC, OFF, IMM) \
.imm = IMM })
#define NESTED_SYNC_IMM __NESTED_SYNC(IMM)
INSN_3(LD, IMM, DW)
DST = DST OP IMM; \
DST = (u32) DST OP (u32) IMM; \
DST = DST OP IMM; \
DST = (u32) DST OP (u32) IMM; \
DST = (u32) IMM;
DST = IMM;
DST = (u64) (u32) (((s32) DST) >> IMM);
(*(s64 *) &DST) >>= IMM;
div64_u64_rem(DST, IMM, &AX);
AX = div64_s64(DST, IMM);
DST = DST - AX * IMM;
DST = do_div(AX, (u32) IMM);
AX = do_div(AX, abs_s32((s32)IMM));
DST = div64_u64(DST, IMM);
DST = div64_s64(DST, IMM);
do_div(AX, (u32) IMM);
do_div(AX, abs_s32((s32)IMM));
if (((s32)DST < 0) == ((s32)IMM < 0))
switch (IMM) {
switch (IMM) {
switch (IMM) {
if ((SIGN##64) DST CMP_OP (SIGN##64) IMM) { \
if ((SIGN##32) DST CMP_OP (SIGN##32) IMM) { \
*(SIZE *)(unsigned long) (DST + insn->off) = IMM; \
switch (IMM) {
#define BPF_LD_ABS(SIZE, IMM) \
.imm = IMM })
#define BPF_ST_MEM(SIZE, DST, OFF, IMM) \
.imm = IMM })
#define BPF_JMP_IMM(OP, DST, IMM, OFF) \
.imm = IMM })
#define BPF_JMP32_IMM(OP, DST, IMM, OFF) \
.imm = IMM })
#define BPF_RAW_INSN(CODE, DST, SRC, OFF, IMM) \
.imm = IMM })
#define BPF_ALU64_IMM(OP, DST, IMM) \
.imm = IMM })
#define BPF_ALU32_IMM(OP, DST, IMM) \
.imm = IMM })
#define BPF_MOV64_IMM(DST, IMM) \
.imm = IMM })
#define BPF_MOV32_IMM(DST, IMM) \
.imm = IMM })
#define BPF_LD_IMM64(DST, IMM) \
BPF_LD_IMM64_RAW(DST, 0, IMM)
#define BPF_LD_IMM64_RAW(DST, SRC, IMM) \
.imm = (__u32) (IMM) }), \
.imm = ((__u64) (IMM)) >> 32 })
.imm = IMM })
#define BPF_MOV32_IMM(DST, IMM) \
.imm = IMM })
#define BPF_MOV64_RAW(TYPE, DST, SRC, IMM) \
.imm = IMM })
#define BPF_MOV32_RAW(TYPE, DST, SRC, IMM) \
.imm = IMM })
#define BPF_LD_ABS(SIZE, IMM) \
.imm = IMM })
#define BPF_LD_IND(SIZE, SRC, IMM) \
.imm = IMM })
#define BPF_ST_MEM(SIZE, DST, OFF, IMM) \
.imm = IMM })
#define BPF_JMP_IMM(OP, DST, IMM, OFF) \
.imm = IMM })
#define BPF_JMP32_IMM(OP, DST, IMM, OFF) \
.imm = IMM })
#define BPF_JMP32_A(IMM) \
.imm = IMM })
#define BPF_RAW_INSN(CODE, DST, SRC, OFF, IMM) \
.imm = IMM })
#define BPF_LD_IMM64(DST, IMM) \
BPF_LD_IMM64_RAW(DST, 0, IMM)
#define BPF_LD_IMM64_RAW(DST, SRC, IMM) \
.imm = (__u32) (IMM) }), \
.imm = ((__u64) (IMM)) >> 32 })
#define BPF_ALU64_IMM(OP, DST, IMM) \
.imm = IMM })
#define BPF_ALU32_IMM(OP, DST, IMM) \
.imm = IMM })
#define BPF_MOV64_IMM(DST, IMM) \
#define DEFINE_SIMPLE_JUMP_TABLE_PROG(NAME, SRC_REG, OFF, IMM, OUTCOME) \
: __imm_insn(gotox_r0, BPF_RAW_INSN(BPF_JMP | BPF_JA | BPF_X, BPF_REG_0, (SRC_REG), (OFF) , (IMM))) \