x86/copy_user_64: Remove .fixup usage
Place the anonymous .fixup code at the tail of the regular functions. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Reviewed-by: Josh Poimboeuf <jpoimboe@redhat.com> Reviewed-by: Borislav Petkov <bp@suse.de> Link: https://lore.kernel.org/r/20211110101325.068505810@infradead.org
This commit is contained in:
parent
c6dbd3e5e6
commit
acba44d243
@ -32,14 +32,10 @@
|
||||
decl %ecx
|
||||
jnz 100b
|
||||
102:
|
||||
.section .fixup,"ax"
|
||||
103: addl %ecx,%edx /* ecx is zerorest also */
|
||||
jmp .Lcopy_user_handle_tail
|
||||
.previous
|
||||
|
||||
_ASM_EXTABLE_CPY(100b, 103b)
|
||||
_ASM_EXTABLE_CPY(101b, 103b)
|
||||
.endm
|
||||
_ASM_EXTABLE_CPY(100b, .Lcopy_user_handle_align)
|
||||
_ASM_EXTABLE_CPY(101b, .Lcopy_user_handle_align)
|
||||
.endm
|
||||
|
||||
/*
|
||||
* copy_user_generic_unrolled - memory copy with exception handling.
|
||||
@ -107,7 +103,6 @@ SYM_FUNC_START(copy_user_generic_unrolled)
|
||||
ASM_CLAC
|
||||
RET
|
||||
|
||||
.section .fixup,"ax"
|
||||
30: shll $6,%ecx
|
||||
addl %ecx,%edx
|
||||
jmp 60f
|
||||
@ -115,7 +110,6 @@ SYM_FUNC_START(copy_user_generic_unrolled)
|
||||
jmp 60f
|
||||
50: movl %ecx,%edx
|
||||
60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */
|
||||
.previous
|
||||
|
||||
_ASM_EXTABLE_CPY(1b, 30b)
|
||||
_ASM_EXTABLE_CPY(2b, 30b)
|
||||
@ -166,20 +160,16 @@ SYM_FUNC_START(copy_user_generic_string)
|
||||
movl %edx,%ecx
|
||||
shrl $3,%ecx
|
||||
andl $7,%edx
|
||||
1: rep
|
||||
movsq
|
||||
1: rep movsq
|
||||
2: movl %edx,%ecx
|
||||
3: rep
|
||||
movsb
|
||||
3: rep movsb
|
||||
xorl %eax,%eax
|
||||
ASM_CLAC
|
||||
RET
|
||||
|
||||
.section .fixup,"ax"
|
||||
11: leal (%rdx,%rcx,8),%ecx
|
||||
12: movl %ecx,%edx /* ecx is zerorest also */
|
||||
jmp .Lcopy_user_handle_tail
|
||||
.previous
|
||||
|
||||
_ASM_EXTABLE_CPY(1b, 11b)
|
||||
_ASM_EXTABLE_CPY(3b, 12b)
|
||||
@ -203,16 +193,13 @@ SYM_FUNC_START(copy_user_enhanced_fast_string)
|
||||
cmpl $64,%edx
|
||||
jb .L_copy_short_string /* less then 64 bytes, avoid the costly 'rep' */
|
||||
movl %edx,%ecx
|
||||
1: rep
|
||||
movsb
|
||||
1: rep movsb
|
||||
xorl %eax,%eax
|
||||
ASM_CLAC
|
||||
RET
|
||||
|
||||
.section .fixup,"ax"
|
||||
12: movl %ecx,%edx /* ecx is zerorest also */
|
||||
jmp .Lcopy_user_handle_tail
|
||||
.previous
|
||||
|
||||
_ASM_EXTABLE_CPY(1b, 12b)
|
||||
SYM_FUNC_END(copy_user_enhanced_fast_string)
|
||||
@ -240,6 +227,11 @@ SYM_CODE_START_LOCAL(.Lcopy_user_handle_tail)
|
||||
RET
|
||||
|
||||
_ASM_EXTABLE_CPY(1b, 2b)
|
||||
|
||||
.Lcopy_user_handle_align:
|
||||
addl %ecx,%edx /* ecx is zerorest also */
|
||||
jmp .Lcopy_user_handle_tail
|
||||
|
||||
SYM_CODE_END(.Lcopy_user_handle_tail)
|
||||
|
||||
/*
|
||||
@ -350,7 +342,6 @@ SYM_FUNC_START(__copy_user_nocache)
|
||||
sfence
|
||||
RET
|
||||
|
||||
.section .fixup,"ax"
|
||||
.L_fixup_4x8b_copy:
|
||||
shll $6,%ecx
|
||||
addl %ecx,%edx
|
||||
@ -366,7 +357,6 @@ SYM_FUNC_START(__copy_user_nocache)
|
||||
.L_fixup_handle_tail:
|
||||
sfence
|
||||
jmp .Lcopy_user_handle_tail
|
||||
.previous
|
||||
|
||||
_ASM_EXTABLE_CPY(1b, .L_fixup_4x8b_copy)
|
||||
_ASM_EXTABLE_CPY(2b, .L_fixup_4x8b_copy)
|
||||
|
Loading…
Reference in New Issue
Block a user