fpsr
u_int64_t fpsr;
__asm__ volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr));
return map[(fpsr >> 41) & 0x03];
u_int64_t fpsr;
__asm__ volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr));
return ((fpsr >> 32) & 0x1f);
u_int64_t fpsr;
__asm__ volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr));
return ((fpsr >> 41) & 0x3);
u_int64_t fpsr;
__asm__ volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr));
return ((fpsr >> 59) & 0x1f);
u_int64_t fpsr;
__asm__ volatile("fstd %%fr0,0(%1)" : "=m"(fpsr) : "r"(&fpsr));
old = (fpsr >> 32) & 0x1f;
fpsr = (fpsr & 0xffffffe000000000LL) | ((u_int64_t)(mask & 0x1f) << 32);
__asm__ volatile("fldd 0(%0),%%fr0" : : "r"(&fpsr), "m"(fpsr));
u_int64_t fpsr;
__asm__ volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr));
old = (fpsr >> 41) & 0x03;
fpsr = (fpsr & 0xfffff9ff00000000LL) |
__asm__ volatile("fldd 0(%0),%%fr0" : : "r"(&fpsr), "m"(fpsr));
u_int64_t fpsr;
__asm__ volatile("fstd %%fr0,0(%1)" : "=m" (fpsr) : "r" (&fpsr));
old = (fpsr >> 59) & 0x1f;
fpsr = (fpsr & 0x07ffffff00000000LL) | ((u_int64_t)(mask & 0x1f) << 59);
__asm__ volatile("fldd 0(%0),%%fr0" : : "r"(&fpsr), "m"(fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fldd 0(%0), %%fr0" : : "r" (&u.fpsr), "m" (u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fldd 0(%0), %%fr0" : : "r" (&u.fpsr), "m"(u.fpsr));
unsigned long long fpsr;
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fldd 0(%0), %%fr0" : : "r" (&u.fpsr), "m"(u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fldd 0(%0), %%fr0" : : "r" (&u.fpsr), "m"(u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fldd 0(%0), %%fr0" : : "r" (&u.fpsr), "m"(u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fldd 0(%0), %%fr0" : : "r" (&u.fpsr), "m"(u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
__asm__ volatile ("fldd 0(%0), %%fr0" : : "r" (&u.fpsr), "m" (u.fpsr));
__asm__ volatile ("fstd %%fr0, 0(%1)" : "=m" (u.fpsr) :
"r" (&u.fpsr));
u_int64_t ofpsr, fpsr;
fpsr = (ofpsr & ~((u_int64_t)FP_RM << (9 + 32))) |
__asm__ volatile("fldds 0(%0), %%fr0" :: "r" (&fpsr), "m" (fpsr));
u_int64_t ofpsr, fpsr;
fpsr = (ofpsr & ~((u_int64_t)FP_RM << (9 + 32))) |
__asm__ volatile("fldds 0(%0), %%fr0" :: "r" (&fpsr), "m" (fpsr));
u_int64_t ofpsr, fpsr;
fpsr = ofpsr | ((u_int64_t)FP_RM << (9 + 32));
__asm__ volatile("fldds 0(%0), %%fr0" :: "r" (&fpsr), "m" (fpsr));
__asm__ volatile("fldds 0(%0), %%fr0" :: "r" (&ofpsr), "m" (fpsr));
u_int64_t ofpsr, fpsr;
fpsr = ofpsr | ((u_int64_t)FP_RM << (9 + 32));
__asm__ volatile("fldds 0(%0), %%fr0" :: "r" (&fpsr), "m" (fpsr));
u_int64_t ofpsr, fpsr;
fpsr = (ofpsr & ~((u_int64_t)FP_RM << (9 + 32))) |
__asm__ volatile("fldds 0(%0), %%fr0" :: "r" (&fpsr), "m" (fpsr));
u_int64_t ofpsr, fpsr;
fpsr = (ofpsr & ~((u_int64_t)FP_RM << (9 + 32))) |
__asm__ volatile("fldds 0(%0), %%fr0" :: "r" (&fpsr), "m" (fpsr));
unsigned int fpsr;
__asm__ volatile ("fldcr %0, %%fcr62" : "=r" (fpsr));
fpsr &= ~excepts;
fpsr |= *flagp & excepts;
__asm__ volatile ("fstcr %0, %%fcr62" : : "r" (fpsr));
unsigned int fpsr;
__asm__ volatile ("fldcr %0, %%fcr62" : "=r" (fpsr));
return (fpsr & excepts);
unsigned int fpsr, fpcr;
fpsr = envp->__status;
fpsr &= ~FE_ALL_EXCEPT;
__asm__ volatile ("fstcr %0, %%fcr62" : : "r" (fpsr));
unsigned int fpsr;
__asm__ volatile ("fldcr %0, %%fcr62" : "=r" (fpsr));
feraiseexcept(fpsr);
unsigned int fpsr;
__asm__ volatile ("fldcr %0, %%fcr62" : "=r" (fpsr));
fpsr &= ~excepts;
__asm__ volatile ("fstcr %0, %%fcr62" : : "r" (fpsr));
unsigned int fpsr;
__asm__ volatile ("fldcr %0, %%fcr62" : "=r" (fpsr));
*flagp = fpsr & excepts;
WRITE_SPECIALREG(fpsr, fp->fp_sr);
fp->fp_sr = READ_SPECIALREG(fpsr);
WRITE_SPECIALREG(fpsr, fp->fp_sr);
fp->fp_sr = READ_SPECIALREG(fpsr);
uint64_t fpsr;
#define tf_fpsr tf_regs.fpsr
unsigned long fpsr;
N("fprl", fprl), N("fpit", fpit), N("fpsr", fpsr), N("fpcr", fpcr),