hv: cleanup inline assembly code in vmx.c a little bit

1. We could explicitly use specific register to avoid one more
   register allocated.
2. If we explicitly assign register, it's not neccessary to
   add the register in clobber list according to gcc mannual.
3. For vmptrld, we add memory to clobber list also.

Signed-off-by: Yin Fengwei <fengwei.yin@intel.com>
Reviewed-by: Edwin Zhai <edwin.zhai@intel.com>
Acked-by: Eddie Dong <eddie.dong@intel.com>
This commit is contained in:
Yin Fengwei 2018-07-21 22:53:10 +08:00 committed by lijinxia
parent 77c3917544
commit a4eebb0ef8
1 changed files with 10 additions and 11 deletions

View File

@ -55,12 +55,12 @@ static inline int exec_vmxon(void *addr)
/* Ensure previous operations successful */ /* Ensure previous operations successful */
if (status == 0) { if (status == 0) {
/* Turn VMX on */ /* Turn VMX on */
asm volatile ("mov %1, %%rax\n" asm volatile (
"vmxon (%%rax)\n" "vmxon (%%rax)\n"
"pushfq\n" "pushfq\n"
"pop %0\n":"=r" (rflags) "pop %0\n":"=r" (rflags)
: "r"(addr) : "a"(addr)
: "%rax", "cc", "memory"); : "cc", "memory");
/* if carry and zero flags are clear operation success */ /* if carry and zero flags are clear operation success */
if ((rflags & (RFLAGS_C | RFLAGS_Z)) != 0U) { if ((rflags & (RFLAGS_C | RFLAGS_Z)) != 0U) {
@ -153,12 +153,12 @@ int exec_vmclear(void *addr)
ASSERT(status == 0, "Incorrect arguments"); ASSERT(status == 0, "Incorrect arguments");
asm volatile ( asm volatile (
"mov %1, %%rax\n"
"vmclear (%%rax)\n" "vmclear (%%rax)\n"
"pushfq\n" "pushfq\n"
"pop %0\n":"=r" (rflags) "pop %0\n"
: "r"(addr) :"=r" (rflags)
: "%rax", "cc", "memory"); : "a"(addr)
: "cc", "memory");
/* if carry and zero flags are clear operation success */ /* if carry and zero flags are clear operation success */
if ((rflags & (RFLAGS_C | RFLAGS_Z)) != 0U) { if ((rflags & (RFLAGS_C | RFLAGS_Z)) != 0U) {
@ -179,13 +179,12 @@ int exec_vmptrld(void *addr)
ASSERT(status == 0, "Incorrect arguments"); ASSERT(status == 0, "Incorrect arguments");
asm volatile ( asm volatile (
"mov %1, %%rax\n"
"vmptrld (%%rax)\n" "vmptrld (%%rax)\n"
"pushfq\n" "pushfq\n"
"pop %0\n" "pop %0\n"
: "=r" (rflags) : "=r" (rflags)
: "r"(addr) : "a"(addr)
: "%rax", "cc"); : "cc", "memory");
/* if carry and zero flags are clear operation success */ /* if carry and zero flags are clear operation success */
if ((rflags & (RFLAGS_C | RFLAGS_Z)) != 0U) { if ((rflags & (RFLAGS_C | RFLAGS_Z)) != 0U) {
@ -1153,7 +1152,7 @@ static void init_host_state(__unused struct vcpu *vcpu)
(((trbase_lo >> 56U) & 0xffUL) << 24U); (((trbase_lo >> 56U) & 0xffUL) << 24U);
/* SS segment override for upper32 bits of base in ia32e mode */ /* SS segment override for upper32 bits of base in ia32e mode */
asm volatile ("mov %0,%%rax\n" asm volatile (
".byte 0x36\n" ".byte 0x36\n"
"movq 8(%%rax),%%rax\n":"=a" (trbase_hi):"0"(trbase)); "movq 8(%%rax),%%rax\n":"=a" (trbase_hi):"0"(trbase));
realtrbase = realtrbase | (trbase_hi << 32U); realtrbase = realtrbase | (trbase_hi << 32U);