version 1.31.2.5, 2019/01/18 08:50:13 |
version 1.32, 2018/04/02 20:54:47 |
|
|
*/ |
*/ |
|
|
#include "opt_ddb.h" |
#include "opt_ddb.h" |
#include "opt_kasan.h" |
|
|
|
#define ALIGN_TEXT .align 16,0x90 |
#define ALIGN_TEXT .align 16,0x90 |
|
|
|
|
|
|
.text |
.text |
|
|
/* |
|
* int splraise(int s); |
|
*/ |
|
ENTRY(splraise) |
|
movl CPUVAR(ILEVEL),%eax |
|
cmpl %edi,%eax |
|
cmoval %eax,%edi |
|
movl %edi,CPUVAR(ILEVEL) |
|
ret |
|
END(splraise) |
|
|
|
#ifndef XEN |
#ifndef XEN |
/* |
/* |
* Xsoftintr() |
* Xsoftintr() |
Line 117 IDTVEC(softintr) |
|
Line 105 IDTVEC(softintr) |
|
movq L_PCB(%r15),%rcx |
movq L_PCB(%r15),%rcx |
movq %rdi,CPUVAR(CURLWP) |
movq %rdi,CPUVAR(CURLWP) |
|
|
#ifdef KASAN |
|
/* clear the new stack */ |
|
pushq %rax |
|
pushq %rdx |
|
pushq %rcx |
|
callq _C_LABEL(kasan_softint) |
|
popq %rcx |
|
popq %rdx |
|
popq %rax |
|
#endif |
|
|
|
/* save old context */ |
/* save old context */ |
movq %rsp,PCB_RSP(%rcx) |
movq %rsp,PCB_RSP(%rcx) |
movq %rbp,PCB_RBP(%rcx) |
movq %rbp,PCB_RBP(%rcx) |
Line 160 IDTVEC_END(softintr) |
|
Line 137 IDTVEC_END(softintr) |
|
* |
* |
* %rax prevlwp from cpu_switchto() |
* %rax prevlwp from cpu_switchto() |
*/ |
*/ |
ENTRY(softintr_ret) |
NENTRY(softintr_ret) |
incl CPUVAR(MTX_COUNT) /* re-adjust after mi_switch */ |
incl CPUVAR(MTX_COUNT) /* re-adjust after mi_switch */ |
movl $0,L_CTXSWTCH(%rax) /* %rax from cpu_switchto */ |
movl $0,L_CTXSWTCH(%rax) /* %rax from cpu_switchto */ |
cli |
cli |
Line 172 END(softintr_ret) |
|
Line 149 END(softintr_ret) |
|
* |
* |
* Software interrupt registration. |
* Software interrupt registration. |
*/ |
*/ |
ENTRY(softint_trigger) |
NENTRY(softint_trigger) |
orl %edi,CPUVAR(IPENDING) /* atomic on local cpu */ |
orl %edi,CPUVAR(IPENDING) /* atomic on local cpu */ |
ret |
ret |
END(softint_trigger) |
END(softint_trigger) |
|
|
|
|
/* |
/* |
* Xrecurse_preempt() |
* Xpreemptrecurse() |
* |
* |
* Handles preemption interrupts via Xspllower(). |
* Handles preemption interrupts via Xspllower(). |
*/ |
*/ |
IDTVEC(recurse_preempt) |
IDTVEC(preemptrecurse) |
movl $IPL_PREEMPT,CPUVAR(ILEVEL) |
movl $IPL_PREEMPT,CPUVAR(ILEVEL) |
sti |
sti |
xorq %rdi,%rdi |
xorq %rdi,%rdi |
call _C_LABEL(kpreempt) |
call _C_LABEL(kpreempt) |
cli |
cli |
jmp *%r13 /* back to Xspllower */ |
jmp *%r13 /* back to Xspllower */ |
IDTVEC_END(recurse_preempt) |
IDTVEC_END(preemptrecurse) |
|
|
/* |
/* |
* Xresume_preempt() |
* Xpreemptresume() |
* |
* |
* Handles preemption interrupts via Xdoreti(). |
* Handles preemption interrupts via Xdoreti(). |
*/ |
*/ |
IDTVEC(resume_preempt) |
IDTVEC(preemptresume) |
movl $IPL_PREEMPT,CPUVAR(ILEVEL) |
movl $IPL_PREEMPT,CPUVAR(ILEVEL) |
sti |
sti |
testq $SEL_RPL,TF_CS(%rsp) |
testq $SEL_RPL,TF_CS(%rsp) |
Line 210 IDTVEC(resume_preempt) |
|
Line 187 IDTVEC(resume_preempt) |
|
call _C_LABEL(preempt) /* from user */ |
call _C_LABEL(preempt) /* from user */ |
cli |
cli |
jmp *%r13 /* back to Xdoreti */ |
jmp *%r13 /* back to Xdoreti */ |
IDTVEC_END(resume_preempt) |
IDTVEC_END(preemptresume) |
|
|
|
/* |
|
* int splraise(int s); |
|
*/ |
|
ENTRY(splraise) |
|
movl CPUVAR(ILEVEL),%eax |
|
cmpl %edi,%eax |
|
cmoval %eax,%edi |
|
movl %edi,CPUVAR(ILEVEL) |
|
ret |
|
END(splraise) |
|
|
/* |
/* |
* void spllower(int s); |
* void spllower(int s); |
|
|
.align 16 |
.align 16 |
END(spllower) |
END(spllower) |
LABEL(spllower_end) |
LABEL(spllower_end) |
|
#endif /* !XEN */ |
|
|
/* |
/* |
* void cx8_spllower(int s); |
* void cx8_spllower(int s); |
Line 279 LABEL(cx8_spllower_patch) |
|
Line 268 LABEL(cx8_spllower_patch) |
|
END(cx8_spllower_patch) |
END(cx8_spllower_patch) |
END(cx8_spllower) |
END(cx8_spllower) |
LABEL(cx8_spllower_end) |
LABEL(cx8_spllower_end) |
#endif /* !XEN */ |
|
|
|
/* |
/* |
* void Xspllower(int s); |
* void Xspllower(int s); |
Line 308 IDTVEC(spllower) |
|
Line 296 IDTVEC(spllower) |
|
movl %edi,%ebx |
movl %edi,%ebx |
leaq 1f(%rip),%r13 /* address to resume loop at */ |
leaq 1f(%rip),%r13 /* address to resume loop at */ |
1: movl %ebx,%eax /* get cpl */ |
1: movl %ebx,%eax /* get cpl */ |
#if !defined(XEN) |
|
movl CPUVAR(IUNMASK)(,%rax,4),%eax |
movl CPUVAR(IUNMASK)(,%rax,4),%eax |
CLI(si) |
CLI(si) |
andl CPUVAR(IPENDING),%eax /* any non-masked bits left? */ |
andl CPUVAR(IPENDING),%eax /* any non-masked bits left? */ |
Line 317 IDTVEC(spllower) |
|
Line 304 IDTVEC(spllower) |
|
btrl %eax,CPUVAR(IPENDING) |
btrl %eax,CPUVAR(IPENDING) |
movq CPUVAR(ISOURCES)(,%rax,8),%rax |
movq CPUVAR(ISOURCES)(,%rax,8),%rax |
jmp *IS_RECURSE(%rax) |
jmp *IS_RECURSE(%rax) |
#endif |
|
2: |
2: |
#if defined(XEN) |
|
movl CPUVAR(XUNMASK)(,%rax,4),%eax |
|
CLI(si) |
|
andl CPUVAR(XPENDING),%eax /* any non-masked bits left? */ |
|
jz 3f |
|
bsrl %eax,%eax |
|
btrl %eax,CPUVAR(XPENDING) |
|
movq CPUVAR(XSOURCES)(,%rax,8),%rax |
|
jmp *IS_RECURSE(%rax) |
|
#endif |
|
3: |
|
movl %ebx,CPUVAR(ILEVEL) |
movl %ebx,CPUVAR(ILEVEL) |
STI(si) |
STI(si) |
popq %r12 |
popq %r12 |
popq %r13 |
popq %r13 |
popq %rbx |
popq %rbx |
ret |
ret |
IDTVEC_END(spllower) |
ITDVEC_END(spllower) |
|
|
/* |
/* |
* void Xdoreti(void); |
* void Xdoreti(void); |
|
|
decl CPUVAR(IDEPTH) |
decl CPUVAR(IDEPTH) |
leaq 1f(%rip),%r13 |
leaq 1f(%rip),%r13 |
1: movl %ebx,%eax |
1: movl %ebx,%eax |
#if !defined(XEN) |
|
movl CPUVAR(IUNMASK)(,%rax,4),%eax |
movl CPUVAR(IUNMASK)(,%rax,4),%eax |
CLI(si) |
CLI(si) |
andl CPUVAR(IPENDING),%eax |
andl CPUVAR(IPENDING),%eax |
|
|
btrl %eax,CPUVAR(IPENDING) |
btrl %eax,CPUVAR(IPENDING) |
movq CPUVAR(ISOURCES)(,%rax,8),%rax |
movq CPUVAR(ISOURCES)(,%rax,8),%rax |
jmp *IS_RESUME(%rax) |
jmp *IS_RESUME(%rax) |
#endif |
2: /* Check for ASTs on exit to user mode. */ |
2: |
|
#if defined(XEN) |
|
movl CPUVAR(XUNMASK)(,%rax,4),%eax |
|
CLI(si) |
|
andl CPUVAR(XPENDING),%eax |
|
jz 3f |
|
bsrl %eax,%eax /* slow, but not worth optimizing */ |
|
btrl %eax,CPUVAR(XPENDING) |
|
movq CPUVAR(XSOURCES)(,%rax,8),%rax |
|
jmp *IS_RESUME(%rax) |
|
#endif |
|
3: /* Check for ASTs on exit to user mode. */ |
|
movl %ebx,CPUVAR(ILEVEL) |
movl %ebx,CPUVAR(ILEVEL) |
5: |
5: |
testb $SEL_RPL,TF_CS(%rsp) |
testb $SEL_RPL,TF_CS(%rsp) |