x86/retpoline/hyperv: Convert assembler indirect jumps

Convert all indirect jumps in hyperv inline asm code to use non-speculative
sequences when CONFIG_RETPOLINE is enabled.

Signed-off-by: David Woodhouse <dwmw@amazon.co.uk>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Acked-by: Arjan van de Ven <arjan@linux.intel.com>
Acked-by: Ingo Molnar <mingo@kernel.org>
Cc: gnomes@lxorguk.ukuu.org.uk
Cc: Rik van Riel <riel@redhat.com>
Cc: Andi Kleen <ak@linux.intel.com>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: thomas.lendacky@amd.com
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Jiri Kosina <jikos@kernel.org>
Cc: Andy Lutomirski <luto@amacapital.net>
Cc: Dave Hansen <dave.hansen@intel.com>
Cc: Kees Cook <keescook@google.com>
Cc: Tim Chen <tim.c.chen@linux.intel.com>
Cc: Greg Kroah-Hartman <gregkh@linux-foundation.org>
Cc: Paul Turner <pjt@google.com>
Link: https://lkml.kernel.org/r/1515707194-20531-9-git-send-email-dwmw@amazon.co.uk
This commit is contained in:
David Woodhouse 2018-01-11 21:46:30 +00:00 committed by Thomas Gleixner
parent 9351803bd8
commit e70e5892b2

View File

@ -7,6 +7,7 @@
#include <linux/nmi.h> #include <linux/nmi.h>
#include <asm/io.h> #include <asm/io.h>
#include <asm/hyperv.h> #include <asm/hyperv.h>
#include <asm/nospec-branch.h>
/* /*
* The below CPUID leaves are present if VersionAndFeatures.HypervisorPresent * The below CPUID leaves are present if VersionAndFeatures.HypervisorPresent
@ -186,10 +187,11 @@ static inline u64 hv_do_hypercall(u64 control, void *input, void *output)
return U64_MAX; return U64_MAX;
__asm__ __volatile__("mov %4, %%r8\n" __asm__ __volatile__("mov %4, %%r8\n"
"call *%5" CALL_NOSPEC
: "=a" (hv_status), ASM_CALL_CONSTRAINT, : "=a" (hv_status), ASM_CALL_CONSTRAINT,
"+c" (control), "+d" (input_address) "+c" (control), "+d" (input_address)
: "r" (output_address), "m" (hv_hypercall_pg) : "r" (output_address),
THUNK_TARGET(hv_hypercall_pg)
: "cc", "memory", "r8", "r9", "r10", "r11"); : "cc", "memory", "r8", "r9", "r10", "r11");
#else #else
u32 input_address_hi = upper_32_bits(input_address); u32 input_address_hi = upper_32_bits(input_address);
@ -200,13 +202,13 @@ static inline u64 hv_do_hypercall(u64 control, void *input, void *output)
if (!hv_hypercall_pg) if (!hv_hypercall_pg)
return U64_MAX; return U64_MAX;
__asm__ __volatile__("call *%7" __asm__ __volatile__(CALL_NOSPEC
: "=A" (hv_status), : "=A" (hv_status),
"+c" (input_address_lo), ASM_CALL_CONSTRAINT "+c" (input_address_lo), ASM_CALL_CONSTRAINT
: "A" (control), : "A" (control),
"b" (input_address_hi), "b" (input_address_hi),
"D"(output_address_hi), "S"(output_address_lo), "D"(output_address_hi), "S"(output_address_lo),
"m" (hv_hypercall_pg) THUNK_TARGET(hv_hypercall_pg)
: "cc", "memory"); : "cc", "memory");
#endif /* !x86_64 */ #endif /* !x86_64 */
return hv_status; return hv_status;
@ -227,10 +229,10 @@ static inline u64 hv_do_fast_hypercall8(u16 code, u64 input1)
#ifdef CONFIG_X86_64 #ifdef CONFIG_X86_64
{ {
__asm__ __volatile__("call *%4" __asm__ __volatile__(CALL_NOSPEC
: "=a" (hv_status), ASM_CALL_CONSTRAINT, : "=a" (hv_status), ASM_CALL_CONSTRAINT,
"+c" (control), "+d" (input1) "+c" (control), "+d" (input1)
: "m" (hv_hypercall_pg) : THUNK_TARGET(hv_hypercall_pg)
: "cc", "r8", "r9", "r10", "r11"); : "cc", "r8", "r9", "r10", "r11");
} }
#else #else
@ -238,13 +240,13 @@ static inline u64 hv_do_fast_hypercall8(u16 code, u64 input1)
u32 input1_hi = upper_32_bits(input1); u32 input1_hi = upper_32_bits(input1);
u32 input1_lo = lower_32_bits(input1); u32 input1_lo = lower_32_bits(input1);
__asm__ __volatile__ ("call *%5" __asm__ __volatile__ (CALL_NOSPEC
: "=A"(hv_status), : "=A"(hv_status),
"+c"(input1_lo), "+c"(input1_lo),
ASM_CALL_CONSTRAINT ASM_CALL_CONSTRAINT
: "A" (control), : "A" (control),
"b" (input1_hi), "b" (input1_hi),
"m" (hv_hypercall_pg) THUNK_TARGET(hv_hypercall_pg)
: "cc", "edi", "esi"); : "cc", "edi", "esi");
} }
#endif #endif