x86: Change {JMP,CALL}_NOSPEC argument

In order to change the {JMP,CALL}_NOSPEC macros to call out-of-line
versions of the retpoline magic, we need to remove the '%' from the
argument, such that we can paste it onto symbol names.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Acked-by: Josh Poimboeuf <jpoimboe@redhat.com>
Link: https://lkml.kernel.org/r/20200428191700.151623523@infradead.org
This commit is contained in:
Peter Zijlstra 2020-04-22 17:16:40 +02:00
parent ca3f0d80dd
commit 34fdce6981
11 changed files with 35 additions and 35 deletions

View File

@ -2758,7 +2758,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
pxor INC, STATE4 pxor INC, STATE4
movdqu IV, 0x30(OUTP) movdqu IV, 0x30(OUTP)
CALL_NOSPEC %r11 CALL_NOSPEC r11
movdqu 0x00(OUTP), INC movdqu 0x00(OUTP), INC
pxor INC, STATE1 pxor INC, STATE1
@ -2803,7 +2803,7 @@ SYM_FUNC_START(aesni_xts_crypt8)
_aesni_gf128mul_x_ble() _aesni_gf128mul_x_ble()
movups IV, (IVP) movups IV, (IVP)
CALL_NOSPEC %r11 CALL_NOSPEC r11
movdqu 0x40(OUTP), INC movdqu 0x40(OUTP), INC
pxor INC, STATE1 pxor INC, STATE1

View File

@ -1228,7 +1228,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_16way)
vpxor 14 * 16(%rax), %xmm15, %xmm14; vpxor 14 * 16(%rax), %xmm15, %xmm14;
vpxor 15 * 16(%rax), %xmm15, %xmm15; vpxor 15 * 16(%rax), %xmm15, %xmm15;
CALL_NOSPEC %r9; CALL_NOSPEC r9;
addq $(16 * 16), %rsp; addq $(16 * 16), %rsp;

View File

@ -1339,7 +1339,7 @@ SYM_FUNC_START_LOCAL(camellia_xts_crypt_32way)
vpxor 14 * 32(%rax), %ymm15, %ymm14; vpxor 14 * 32(%rax), %ymm15, %ymm14;
vpxor 15 * 32(%rax), %ymm15, %ymm15; vpxor 15 * 32(%rax), %ymm15, %ymm15;
CALL_NOSPEC %r9; CALL_NOSPEC r9;
addq $(16 * 32), %rsp; addq $(16 * 32), %rsp;

View File

@ -75,7 +75,7 @@
.text .text
SYM_FUNC_START(crc_pcl) SYM_FUNC_START(crc_pcl)
#define bufp %rdi #define bufp rdi
#define bufp_dw %edi #define bufp_dw %edi
#define bufp_w %di #define bufp_w %di
#define bufp_b %dil #define bufp_b %dil
@ -105,9 +105,9 @@ SYM_FUNC_START(crc_pcl)
## 1) ALIGN: ## 1) ALIGN:
################################################################ ################################################################
mov bufp, bufptmp # rdi = *buf mov %bufp, bufptmp # rdi = *buf
neg bufp neg %bufp
and $7, bufp # calculate the unalignment amount of and $7, %bufp # calculate the unalignment amount of
# the address # the address
je proc_block # Skip if aligned je proc_block # Skip if aligned
@ -123,13 +123,13 @@ SYM_FUNC_START(crc_pcl)
do_align: do_align:
#### Calculate CRC of unaligned bytes of the buffer (if any) #### Calculate CRC of unaligned bytes of the buffer (if any)
movq (bufptmp), tmp # load a quadward from the buffer movq (bufptmp), tmp # load a quadward from the buffer
add bufp, bufptmp # align buffer pointer for quadword add %bufp, bufptmp # align buffer pointer for quadword
# processing # processing
sub bufp, len # update buffer length sub %bufp, len # update buffer length
align_loop: align_loop:
crc32b %bl, crc_init_dw # compute crc32 of 1-byte crc32b %bl, crc_init_dw # compute crc32 of 1-byte
shr $8, tmp # get next byte shr $8, tmp # get next byte
dec bufp dec %bufp
jne align_loop jne align_loop
proc_block: proc_block:
@ -169,10 +169,10 @@ continue_block:
xor crc2, crc2 xor crc2, crc2
## branch into array ## branch into array
lea jump_table(%rip), bufp lea jump_table(%rip), %bufp
movzxw (bufp, %rax, 2), len movzxw (%bufp, %rax, 2), len
lea crc_array(%rip), bufp lea crc_array(%rip), %bufp
lea (bufp, len, 1), bufp lea (%bufp, len, 1), %bufp
JMP_NOSPEC bufp JMP_NOSPEC bufp
################################################################ ################################################################
@ -218,9 +218,9 @@ LABEL crc_ %i
## 4) Combine three results: ## 4) Combine three results:
################################################################ ################################################################
lea (K_table-8)(%rip), bufp # first entry is for idx 1 lea (K_table-8)(%rip), %bufp # first entry is for idx 1
shlq $3, %rax # rax *= 8 shlq $3, %rax # rax *= 8
pmovzxdq (bufp,%rax), %xmm0 # 2 consts: K1:K2 pmovzxdq (%bufp,%rax), %xmm0 # 2 consts: K1:K2
leal (%eax,%eax,2), %eax # rax *= 3 (total *24) leal (%eax,%eax,2), %eax # rax *= 3 (total *24)
subq %rax, tmp # tmp -= rax*24 subq %rax, tmp # tmp -= rax*24

View File

@ -816,7 +816,7 @@ SYM_CODE_START(ret_from_fork)
/* kernel thread */ /* kernel thread */
1: movl %edi, %eax 1: movl %edi, %eax
CALL_NOSPEC %ebx CALL_NOSPEC ebx
/* /*
* A kernel thread is allowed to return here after successfully * A kernel thread is allowed to return here after successfully
* calling do_execve(). Exit to userspace to complete the execve() * calling do_execve(). Exit to userspace to complete the execve()
@ -1501,7 +1501,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception_read_cr2)
TRACE_IRQS_OFF TRACE_IRQS_OFF
movl %esp, %eax # pt_regs pointer movl %esp, %eax # pt_regs pointer
CALL_NOSPEC %edi CALL_NOSPEC edi
jmp ret_from_exception jmp ret_from_exception
SYM_CODE_END(common_exception_read_cr2) SYM_CODE_END(common_exception_read_cr2)
@ -1522,7 +1522,7 @@ SYM_CODE_START_LOCAL_NOALIGN(common_exception)
TRACE_IRQS_OFF TRACE_IRQS_OFF
movl %esp, %eax # pt_regs pointer movl %esp, %eax # pt_regs pointer
CALL_NOSPEC %edi CALL_NOSPEC edi
jmp ret_from_exception jmp ret_from_exception
SYM_CODE_END(common_exception) SYM_CODE_END(common_exception)

View File

@ -349,7 +349,7 @@ SYM_CODE_START(ret_from_fork)
/* kernel thread */ /* kernel thread */
UNWIND_HINT_EMPTY UNWIND_HINT_EMPTY
movq %r12, %rdi movq %r12, %rdi
CALL_NOSPEC %rbx CALL_NOSPEC rbx
/* /*
* A kernel thread is allowed to return here after successfully * A kernel thread is allowed to return here after successfully
* calling do_execve(). Exit to userspace to complete the execve() * calling do_execve(). Exit to userspace to complete the execve()

View File

@ -118,22 +118,22 @@
.macro JMP_NOSPEC reg:req .macro JMP_NOSPEC reg:req
#ifdef CONFIG_RETPOLINE #ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *\reg), \ ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), \
__stringify(RETPOLINE_JMP \reg), X86_FEATURE_RETPOLINE, \ __stringify(RETPOLINE_JMP %\reg), X86_FEATURE_RETPOLINE,\
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *\reg), X86_FEATURE_RETPOLINE_AMD __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; jmp *%\reg), X86_FEATURE_RETPOLINE_AMD
#else #else
jmp *\reg jmp *%\reg
#endif #endif
.endm .endm
.macro CALL_NOSPEC reg:req .macro CALL_NOSPEC reg:req
#ifdef CONFIG_RETPOLINE #ifdef CONFIG_RETPOLINE
ANNOTATE_NOSPEC_ALTERNATIVE ANNOTATE_NOSPEC_ALTERNATIVE
ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *\reg), \ ALTERNATIVE_2 __stringify(ANNOTATE_RETPOLINE_SAFE; call *%\reg),\
__stringify(RETPOLINE_CALL \reg), X86_FEATURE_RETPOLINE,\ __stringify(RETPOLINE_CALL %\reg), X86_FEATURE_RETPOLINE,\
__stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *\reg), X86_FEATURE_RETPOLINE_AMD __stringify(lfence; ANNOTATE_RETPOLINE_SAFE; call *%\reg), X86_FEATURE_RETPOLINE_AMD
#else #else
call *\reg call *%\reg
#endif #endif
.endm .endm

View File

@ -189,5 +189,5 @@ return_to_handler:
movl %eax, %ecx movl %eax, %ecx
popl %edx popl %edx
popl %eax popl %eax
JMP_NOSPEC %ecx JMP_NOSPEC ecx
#endif #endif

View File

@ -301,7 +301,7 @@ trace:
* function tracing is enabled. * function tracing is enabled.
*/ */
movq ftrace_trace_function, %r8 movq ftrace_trace_function, %r8
CALL_NOSPEC %r8 CALL_NOSPEC r8
restore_mcount_regs restore_mcount_regs
jmp fgraph_trace jmp fgraph_trace
@ -338,6 +338,6 @@ SYM_CODE_START(return_to_handler)
movq 8(%rsp), %rdx movq 8(%rsp), %rdx
movq (%rsp), %rax movq (%rsp), %rax
addq $24, %rsp addq $24, %rsp
JMP_NOSPEC %rdi JMP_NOSPEC rdi
SYM_CODE_END(return_to_handler) SYM_CODE_END(return_to_handler)
#endif #endif

View File

@ -153,7 +153,7 @@ SYM_FUNC_START(csum_partial)
negl %ebx negl %ebx
lea 45f(%ebx,%ebx,2), %ebx lea 45f(%ebx,%ebx,2), %ebx
testl %esi, %esi testl %esi, %esi
JMP_NOSPEC %ebx JMP_NOSPEC ebx
# Handle 2-byte-aligned regions # Handle 2-byte-aligned regions
20: addw (%esi), %ax 20: addw (%esi), %ax
@ -436,7 +436,7 @@ SYM_FUNC_START(csum_partial_copy_generic)
andl $-32,%edx andl $-32,%edx
lea 3f(%ebx,%ebx), %ebx lea 3f(%ebx,%ebx), %ebx
testl %esi, %esi testl %esi, %esi
JMP_NOSPEC %ebx JMP_NOSPEC ebx
1: addl $64,%esi 1: addl $64,%esi
addl $64,%edi addl $64,%edi
SRC(movb -32(%edx),%bl) ; SRC(movb (%edx),%bl) SRC(movb -32(%edx),%bl) ; SRC(movb (%edx),%bl)

View File

@ -21,7 +21,7 @@ SYM_FUNC_START(__efi_call)
mov %r8, %r9 mov %r8, %r9
mov %rcx, %r8 mov %rcx, %r8
mov %rsi, %rcx mov %rsi, %rcx
CALL_NOSPEC %rdi CALL_NOSPEC rdi
leave leave
ret ret
SYM_FUNC_END(__efi_call) SYM_FUNC_END(__efi_call)