forked from Minki/linux
[PATCH] KVM: Clean up AMD SVM debug registers load and unload
By letting gcc choose the temporary register for us, we lose arch dependency and some ugliness. Conceivably gcc will also generate marginally better code. Signed-off-by: Avi Kivity <avi@qumranet.com> Signed-off-by: Andrew Morton <akpm@osdl.org> Signed-off-by: Linus Torvalds <torvalds@osdl.org>
This commit is contained in:
parent
fd24dc4af6
commit
5aff458e9c
@ -1345,53 +1345,18 @@ static void kvm_reput_irq(struct kvm_vcpu *vcpu)
|
||||
|
||||
static void save_db_regs(unsigned long *db_regs)
|
||||
{
|
||||
#ifdef __x86_64__
|
||||
asm ("mov %%dr0, %%rax \n\t"
|
||||
"mov %%rax, %[dr0] \n\t"
|
||||
"mov %%dr1, %%rax \n\t"
|
||||
"mov %%rax, %[dr1] \n\t"
|
||||
"mov %%dr2, %%rax \n\t"
|
||||
"mov %%rax, %[dr2] \n\t"
|
||||
"mov %%dr3, %%rax \n\t"
|
||||
"mov %%rax, %[dr3] \n\t"
|
||||
: [dr0] "=m"(db_regs[0]),
|
||||
[dr1] "=m"(db_regs[1]),
|
||||
[dr2] "=m"(db_regs[2]),
|
||||
[dr3] "=m"(db_regs[3])
|
||||
: : "rax");
|
||||
#else
|
||||
asm ("mov %%dr0, %%eax \n\t"
|
||||
"mov %%eax, %[dr0] \n\t"
|
||||
"mov %%dr1, %%eax \n\t"
|
||||
"mov %%eax, %[dr1] \n\t"
|
||||
"mov %%dr2, %%eax \n\t"
|
||||
"mov %%eax, %[dr2] \n\t"
|
||||
"mov %%dr3, %%eax \n\t"
|
||||
"mov %%eax, %[dr3] \n\t"
|
||||
: [dr0] "=m"(db_regs[0]),
|
||||
[dr1] "=m"(db_regs[1]),
|
||||
[dr2] "=m"(db_regs[2]),
|
||||
[dr3] "=m"(db_regs[3])
|
||||
: : "eax");
|
||||
#endif
|
||||
asm volatile ("mov %%dr0, %0" : "=r"(db_regs[0]));
|
||||
asm volatile ("mov %%dr1, %0" : "=r"(db_regs[1]));
|
||||
asm volatile ("mov %%dr2, %0" : "=r"(db_regs[2]));
|
||||
asm volatile ("mov %%dr3, %0" : "=r"(db_regs[3]));
|
||||
}
|
||||
|
||||
static void load_db_regs(unsigned long *db_regs)
|
||||
{
|
||||
asm volatile ("mov %[dr0], %%dr0 \n\t"
|
||||
"mov %[dr1], %%dr1 \n\t"
|
||||
"mov %[dr2], %%dr2 \n\t"
|
||||
"mov %[dr3], %%dr3 \n\t"
|
||||
:
|
||||
: [dr0] "r"(db_regs[0]),
|
||||
[dr1] "r"(db_regs[1]),
|
||||
[dr2] "r"(db_regs[2]),
|
||||
[dr3] "r"(db_regs[3])
|
||||
#ifdef __x86_64__
|
||||
: "rax");
|
||||
#else
|
||||
: "eax");
|
||||
#endif
|
||||
asm volatile ("mov %0, %%dr0" : : "r"(db_regs[0]));
|
||||
asm volatile ("mov %0, %%dr1" : : "r"(db_regs[1]));
|
||||
asm volatile ("mov %0, %%dr2" : : "r"(db_regs[2]));
|
||||
asm volatile ("mov %0, %%dr3" : : "r"(db_regs[3]));
|
||||
}
|
||||
|
||||
static int svm_vcpu_run(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run)
|
||||
|
Loading…
Reference in New Issue
Block a user