version 1.20, 2017/10/31 18:23:29 |
version 1.21, 2017/11/03 07:14:24 |
Line 125 __KERNEL_RCSID(0, "$NetBSD$"); |
|
Line 125 __KERNEL_RCSID(0, "$NetBSD$"); |
|
#define stts() HYPERVISOR_fpu_taskswitch(1) |
#define stts() HYPERVISOR_fpu_taskswitch(1) |
#endif |
#endif |
|
|
|
static uint32_t x86_fpu_mxcsr_mask __read_mostly = 0; |
|
|
static inline union savefpu * |
static inline union savefpu * |
process_fpframe(struct lwp *lwp) |
process_fpframe(struct lwp *lwp) |
{ |
{ |
Line 226 fpuinit(struct cpu_info *ci) |
|
Line 228 fpuinit(struct cpu_info *ci) |
|
} |
} |
|
|
/* |
/* |
|
* Get the value of MXCSR_MASK supported by the CPU. |
|
*/ |
|
void |
|
fpuinit_mxcsr_mask(void) |
|
{ |
|
union savefpu fpusave __aligned(16); |
|
u_long cr0, psl; |
|
|
|
memset(&fpusave, 0, sizeof(fpusave)); |
|
|
|
/* Disable interrupts, and enable FPU */ |
|
psl = x86_read_psl(); |
|
x86_disable_intr(); |
|
cr0 = rcr0(); |
|
lcr0(cr0 & ~(CR0_EM|CR0_TS)); |
|
|
|
/* Fill in the FPU area */ |
|
fxsave(&fpusave); |
|
|
|
/* Restore previous state */ |
|
lcr0(cr0); |
|
x86_write_psl(psl); |
|
|
|
if (fpusave.sv_xmm.fx_mxcsr_mask == 0) { |
|
x86_fpu_mxcsr_mask = __INITIAL_MXCSR_MASK__; |
|
} else { |
|
x86_fpu_mxcsr_mask = fpusave.sv_xmm.fx_mxcsr_mask; |
|
} |
|
} |
|
|
|
/* |
* This is a synchronous trap on either an x87 instruction (due to an |
* This is a synchronous trap on either an x87 instruction (due to an |
* unmasked error on the previous x87 instruction) or on an SSE/SSE2 etc |
* unmasked error on the previous x87 instruction) or on an SSE/SSE2 etc |
* instruction due to an error on the instruction itself. |
* instruction due to an error on the instruction itself. |
Line 515 fpu_save_area_clear(struct lwp *l, unsig |
|
Line 548 fpu_save_area_clear(struct lwp *l, unsig |
|
if (i386_use_fxsave) { |
if (i386_use_fxsave) { |
memset(&fpu_save->sv_xmm, 0, x86_fpu_save_size); |
memset(&fpu_save->sv_xmm, 0, x86_fpu_save_size); |
fpu_save->sv_xmm.fx_mxcsr = __INITIAL_MXCSR__; |
fpu_save->sv_xmm.fx_mxcsr = __INITIAL_MXCSR__; |
fpu_save->sv_xmm.fx_mxcsr_mask = __INITIAL_MXCSR_MASK__; |
fpu_save->sv_xmm.fx_mxcsr_mask = x86_fpu_mxcsr_mask; |
fpu_save->sv_xmm.fx_cw = x87_cw; |
fpu_save->sv_xmm.fx_cw = x87_cw; |
} else { |
} else { |
memset(&fpu_save->sv_87, 0, x86_fpu_save_size); |
memset(&fpu_save->sv_87, 0, x86_fpu_save_size); |
Line 537 fpu_save_area_reset(struct lwp *l) |
|
Line 570 fpu_save_area_reset(struct lwp *l) |
|
*/ |
*/ |
if (i386_use_fxsave) { |
if (i386_use_fxsave) { |
fpu_save->sv_xmm.fx_mxcsr = __INITIAL_MXCSR__; |
fpu_save->sv_xmm.fx_mxcsr = __INITIAL_MXCSR__; |
fpu_save->sv_xmm.fx_mxcsr_mask = __INITIAL_MXCSR_MASK__; |
fpu_save->sv_xmm.fx_mxcsr_mask = x86_fpu_mxcsr_mask; |
fpu_save->sv_xmm.fx_tw = 0; |
fpu_save->sv_xmm.fx_tw = 0; |
fpu_save->sv_xmm.fx_cw = pcb->pcb_fpu_dflt_cw; |
fpu_save->sv_xmm.fx_cw = pcb->pcb_fpu_dflt_cw; |
} else { |
} else { |
Line 576 process_write_fpregs_xmm(struct lwp *l, |
|
Line 609 process_write_fpregs_xmm(struct lwp *l, |
|
/* |
/* |
* Invalid bits in mxcsr or mxcsr_mask will cause faults. |
* Invalid bits in mxcsr or mxcsr_mask will cause faults. |
*/ |
*/ |
fpu_save->sv_xmm.fx_mxcsr_mask &= __INITIAL_MXCSR_MASK__; |
fpu_save->sv_xmm.fx_mxcsr_mask &= x86_fpu_mxcsr_mask; |
fpu_save->sv_xmm.fx_mxcsr &= fpu_save->sv_xmm.fx_mxcsr_mask; |
fpu_save->sv_xmm.fx_mxcsr &= fpu_save->sv_xmm.fx_mxcsr_mask; |
|
|
/* |
/* |