diff --git a/arch/x86/Kconfig b/arch/x86/Kconfig index 7101ac64bb20..595c06b32b3a 100644 --- a/arch/x86/Kconfig +++ b/arch/x86/Kconfig @@ -215,6 +215,7 @@ config X86 select HAVE_FUNCTION_ARG_ACCESS_API select HAVE_STACKPROTECTOR if CC_HAS_SANE_STACKPROTECTOR select HAVE_STACK_VALIDATION if X86_64 + select HAVE_STATIC_CALL select HAVE_RSEQ select HAVE_SYSCALL_TRACEPOINTS select HAVE_UNSTABLE_SCHED_CLOCK diff --git a/arch/x86/include/asm/static_call.h b/arch/x86/include/asm/static_call.h new file mode 100644 index 000000000000..07aa8791cbfe --- /dev/null +++ b/arch/x86/include/asm/static_call.h @@ -0,0 +1,23 @@ +/* SPDX-License-Identifier: GPL-2.0 */ +#ifndef _ASM_STATIC_CALL_H +#define _ASM_STATIC_CALL_H + +#include + +/* + * For CONFIG_HAVE_STATIC_CALL, this is a permanent trampoline which + * does a direct jump to the function. The direct jump gets patched by + * static_call_update(). + */ +#define ARCH_DEFINE_STATIC_CALL_TRAMP(name, func) \ + asm(".pushsection .text, \"ax\" \n" \ + ".align 4 \n" \ + ".globl " STATIC_CALL_TRAMP_STR(name) " \n" \ + STATIC_CALL_TRAMP_STR(name) ": \n" \ + " .byte 0xe9 # jmp.d32 \n" \ + " .long " #func " - (. + 4) \n" \ + ".type " STATIC_CALL_TRAMP_STR(name) ", @function \n" \ + ".size " STATIC_CALL_TRAMP_STR(name) ", . - " STATIC_CALL_TRAMP_STR(name) " \n" \ + ".popsection \n") + +#endif /* _ASM_STATIC_CALL_H */ diff --git a/arch/x86/kernel/Makefile b/arch/x86/kernel/Makefile index e77261db2391..de09af019e23 100644 --- a/arch/x86/kernel/Makefile +++ b/arch/x86/kernel/Makefile @@ -68,6 +68,7 @@ obj-y += tsc.o tsc_msr.o io_delay.o rtc.o obj-y += pci-iommu_table.o obj-y += resource.o obj-y += irqflags.o +obj-y += static_call.o obj-y += process.o obj-y += fpu/ diff --git a/arch/x86/kernel/static_call.c b/arch/x86/kernel/static_call.c new file mode 100644 index 000000000000..0565825970af --- /dev/null +++ b/arch/x86/kernel/static_call.c @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: GPL-2.0 +#include +#include +#include +#include + +static void __static_call_transform(void *insn, u8 opcode, void *func) +{ + const void *code = text_gen_insn(opcode, insn, func); + + if (WARN_ONCE(*(u8 *)insn != opcode, + "unexpected static call insn opcode 0x%x at %pS\n", + opcode, insn)) + return; + + if (memcmp(insn, code, CALL_INSN_SIZE) == 0) + return; + + text_poke_bp(insn, code, CALL_INSN_SIZE, NULL); +} + +void arch_static_call_transform(void *site, void *tramp, void *func) +{ + mutex_lock(&text_mutex); + + if (tramp) + __static_call_transform(tramp, JMP32_INSN_OPCODE, func); + + mutex_unlock(&text_mutex); +} +EXPORT_SYMBOL_GPL(arch_static_call_transform);