R5
wm8739_write(sd, R5, 0x000);
if ( (grp1 & TXGROUP) && (scc2->wreg[R5] & RTS) )
if ( !(scc->wreg[R5] & RTS) )
if ( !(scc->wreg[R5] & RTS) )
case PARAM_DTR: return CAST((scc->wreg[R5] & DTR)? 1:0);
case PARAM_RTS: return CAST((scc->wreg[R5] & RTS)? 1:0);
wr(scc,R5,Tx8|DTR|TxCRC_ENAB); /* TX 8 bits/char, disabled, DTR */
or(scc,R5, TxENAB);
scc->wreg[R5] |= RTS;
or(scc,R5,RTS|TxENAB); /* set the RTS line and enable TX */
cl(scc,R5,RTS|TxENAB);
or(scc,R5, TxENAB);
scc->wreg[R5] |= RTS;
or(scc,R5,RTS|TxENAB); /* enable tx */
cl(scc,R5,RTS|TxENAB); /* disable tx */
ASPEED_PINCTRL_PIN(R5),
SIG_EXPR_LIST_DECL_SINGLE(R5, VPIB8, VPI24, VPI_24_RSVD_DESC, R5_DESC, COND2);
SIG_EXPR_LIST_DECL_SINGLE(R5, TXD2, TXD2, R5_DESC, COND2);
PIN_DECL_2(R5, GPIOM6, VPIB8, TXD2);
FUNC_GROUP_DECL(TXD2, R5);
FUNC_GROUP_DECL(VPI24, T1, U2, P4, P3, Y1, AB2, AA1, Y2, AA2, P5, R5, T5, V3,
up->curregs[R5] = TxENAB | Tx8;
write_zsreg(channel, R5, regs[R5] & ~TxENAB);
write_zsreg(channel, R5, regs[R5]);
up->curregs[R5] |= set_bits;
up->curregs[R5] &= ~clear_bits;
write_zsreg(channel, R5, up->curregs[R5]);
new_reg = (up->curregs[R5] | set_bits) & ~clear_bits;
if (new_reg != up->curregs[R5]) {
up->curregs[R5] = new_reg;
write_zsreg(channel, R5, up->curregs[R5]);
up->curregs[R5] |= TxENAB;
up->curregs[R5] &= ~TxENAB;
up->curregs[R5] &= ~SND_BRK;
uap->curregs[R5] |= DTR;
write_zsreg(uap, R5, uap->curregs[R5]);
uap->curregs[R5] &= ~DTR;
write_zsreg(uap, R5, uap->curregs[R5]);
write_zsreg(uap, R5, regs[R5] & ~TxENABLE);
write_zsreg(uap, R5, regs[R5]);
write_zsreg(uap, R5, uap->curregs[5] | TxENABLE | RTS | DTR);
uap->curregs[R5] |= set_bits;
uap->curregs[R5] &= ~clear_bits;
write_zsreg(uap, R5, uap->curregs[R5]);
set_bits, clear_bits, uap->curregs[R5]);
new_reg = (uap->curregs[R5] | set_bits) & ~clear_bits;
if (new_reg != uap->curregs[R5]) {
uap->curregs[R5] = new_reg;
write_zsreg(uap, R5, uap->curregs[R5]);
uap->curregs[R5] = Tx8 | RTS;
uap->curregs[R5] |= DTR;
write_zsreg(uap, R5, uap->curregs[R5] |= TxENABLE);
uap->curregs[R5] |= DTR;
write_zsreg(uap, R5, uap->curregs[R5]);
uap->curregs[R5] &= ~DTR;
write_zsreg(uap, R5, uap->curregs[R5]);
uap->curregs[R5] &= ~TxENABLE;
uap->curregs[R5] &= ~SND_BRK;
up->curregs[R5] = TxENAB | Tx8;
up->curregs[R5] = TxENAB | Tx8;
write_zsreg(channel, R5, regs[R5] & ~TxENAB);
write_zsreg(channel, R5, regs[R5]);
up->curregs[R5] |= set_bits;
up->curregs[R5] &= ~clear_bits;
write_zsreg(channel, R5, up->curregs[R5]);
new_reg = (up->curregs[R5] | set_bits) & ~clear_bits;
if (new_reg != up->curregs[R5]) {
up->curregs[R5] = new_reg;
write_zsreg(channel, R5, up->curregs[R5]);
up->curregs[R5] |= TxENAB;
up->curregs[R5] &= ~TxENAB;
up->curregs[R5] &= ~SND_BRK;
up->curregs[R5] &= ~TxN_MASK;
up->curregs[R5] |= Tx5;
up->curregs[R5] |= Tx6;
up->curregs[R5] |= Tx7;
up->curregs[R5] |= Tx8;
write_zsreg(zport, R5, zport->regs[5]);
write_zsreg(zport, R5, zport->regs[5]);
write_zsreg(zport, R5, regs[5] & ~TxENAB);
write_zsreg(zport, R5, regs[5]);
write_zsreg(zport_a, R5, zport_a->regs[5]);
write_zsreg(zport, R5, zport->regs[5]);
write_zsreg(zport, R5, zport->regs[5]);
write_zsreg(zport, R5, zport->regs[5]);
write_zsreg(zport, R5, zport->regs[5]);
BPF_ALU64_IMM(BPF_MOV, R5, R5), \
BPF_JMP_IMM(BPF_JNE, R5, R5, 5), \
BPF_ALU64_IMM(BPF_MOV, R5, 5), \
BPF_JMP_IMM(BPF_JNE, R5, 5, 5), \
i += __bpf_ld_imm64(&insns[i], R5, keep);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
i += __bpf_ld_imm64(&insns[i], R5, keep);
insns[i++] = BPF_JMP_REG(BPF_JEQ, R0, R5, 1);
BPF_ALU64_IMM(BPF_MOV, R5, 5),
BPF_ALU64_IMM(BPF_ADD, R5, 20),
BPF_ALU64_IMM(BPF_SUB, R5, 10),
BPF_ALU64_REG(BPF_ADD, R0, R5),
BPF_ALU64_REG(BPF_ADD, R1, R5),
BPF_ALU64_REG(BPF_ADD, R2, R5),
BPF_ALU64_REG(BPF_ADD, R3, R5),
BPF_ALU64_REG(BPF_ADD, R4, R5),
BPF_ALU64_REG(BPF_ADD, R5, R0),
BPF_ALU64_REG(BPF_ADD, R5, R1),
BPF_ALU64_REG(BPF_ADD, R5, R2),
BPF_ALU64_REG(BPF_ADD, R5, R3),
BPF_ALU64_REG(BPF_ADD, R5, R4),
BPF_ALU64_REG(BPF_ADD, R5, R5),
BPF_ALU64_REG(BPF_ADD, R5, R6),
BPF_ALU64_REG(BPF_ADD, R5, R7),
BPF_ALU64_REG(BPF_ADD, R5, R8),
BPF_ALU64_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
BPF_ALU64_REG(BPF_ADD, R6, R5),
BPF_ALU64_REG(BPF_ADD, R7, R5),
BPF_ALU64_REG(BPF_ADD, R8, R5),
BPF_ALU64_REG(BPF_ADD, R9, R5),
BPF_ALU32_IMM(BPF_MOV, R5, 5),
BPF_ALU64_IMM(BPF_ADD, R5, 10),
BPF_ALU32_REG(BPF_ADD, R0, R5),
BPF_ALU32_REG(BPF_ADD, R1, R5),
BPF_ALU32_REG(BPF_ADD, R2, R5),
BPF_ALU32_REG(BPF_ADD, R3, R5),
BPF_ALU32_REG(BPF_ADD, R4, R5),
BPF_ALU32_REG(BPF_ADD, R5, R0),
BPF_ALU32_REG(BPF_ADD, R5, R1),
BPF_ALU32_REG(BPF_ADD, R5, R2),
BPF_ALU32_REG(BPF_ADD, R5, R3),
BPF_ALU32_REG(BPF_ADD, R5, R4),
BPF_ALU32_REG(BPF_ADD, R5, R5),
BPF_ALU32_REG(BPF_ADD, R5, R6),
BPF_ALU32_REG(BPF_ADD, R5, R7),
BPF_ALU32_REG(BPF_ADD, R5, R8),
BPF_ALU32_REG(BPF_ADD, R5, R9), /* R5 == 36518 */
BPF_JMP_IMM(BPF_JEQ, R5, 36518, 1),
BPF_ALU32_REG(BPF_ADD, R6, R5),
BPF_ALU32_REG(BPF_ADD, R7, R5),
BPF_ALU32_REG(BPF_ADD, R8, R5),
BPF_ALU32_REG(BPF_ADD, R9, R5),
BPF_ALU64_IMM(BPF_MOV, R5, 5),
BPF_ALU64_REG(BPF_SUB, R0, R5),
BPF_ALU64_REG(BPF_SUB, R1, R5),
BPF_ALU64_REG(BPF_SUB, R2, R5),
BPF_ALU64_REG(BPF_SUB, R3, R5),
BPF_ALU64_REG(BPF_SUB, R4, R5),
BPF_ALU64_REG(BPF_SUB, R5, R0),
BPF_ALU64_REG(BPF_SUB, R5, R1),
BPF_ALU64_REG(BPF_SUB, R5, R2),
BPF_ALU64_REG(BPF_SUB, R5, R3),
BPF_ALU64_REG(BPF_SUB, R5, R4),
BPF_ALU64_REG(BPF_SUB, R5, R6),
BPF_ALU64_REG(BPF_SUB, R5, R7),
BPF_ALU64_REG(BPF_SUB, R5, R8),
BPF_ALU64_REG(BPF_SUB, R5, R9),
BPF_ALU64_IMM(BPF_SUB, R5, 10),
BPF_ALU64_REG(BPF_SUB, R6, R5),
BPF_ALU64_REG(BPF_SUB, R7, R5),
BPF_ALU64_REG(BPF_SUB, R8, R5),
BPF_ALU64_REG(BPF_SUB, R9, R5),
BPF_ALU64_REG(BPF_SUB, R0, R5),
BPF_ALU64_IMM(BPF_MOV, R5, -1),
BPF_ALU64_REG(BPF_XOR, R5, R5),
BPF_JMP_REG(BPF_JEQ, R5, R4, 1),
BPF_ALU64_IMM(BPF_MOV, R5, 1),
BPF_ALU64_REG(BPF_SUB, R5, R5),
BPF_JMP_REG(BPF_JEQ, R5, R6, 1),
BPF_ALU64_IMM(BPF_MOV, R5, 5),
BPF_ALU64_REG(BPF_MUL, R0, R5),
BPF_ALU64_REG(BPF_MUL, R1, R5),
BPF_ALU64_REG(BPF_MUL, R2, R5),
BPF_MOV64_REG(R5, R4),
BPF_MOV64_REG(R6, R5),
BPF_ALU64_IMM(BPF_MOV, R5, 0),
BPF_ALU64_REG(BPF_ADD, R0, R5),
BPF_MOV64_REG(R5, R4),
BPF_MOV64_REG(R6, R5),
BPF_ALU32_IMM(BPF_MOV, R5, 0),
BPF_ALU64_REG(BPF_ADD, R0, R5),
BPF_MOV64_REG(R5, R4),
BPF_MOV64_REG(R6, R5),
BPF_LD_IMM64(R5, 0x0LL),
BPF_ALU64_REG(BPF_ADD, R0, R5),
BPF_LD_IMM64(R5, (update) | BPF_ATOMIC_POISON(width)), \
BPF_ATOMIC_OP(width, op, R10, R5, -40), \