x86: prepare merging futex_32/64.h

Replace .quad/.long with a define and use the same asm syntax
for i386 and x86.

Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
This commit is contained in:
Thomas Gleixner 2008-01-30 13:30:20 +01:00 committed by Ingo Molnar
parent 0e078e2f50
commit 2f2239d1d5
2 changed files with 44 additions and 44 deletions

View File

@ -4,6 +4,8 @@
#ifdef __KERNEL__
#include <linux/futex.h>
#include <asm/asm.h>
#include <asm/errno.h>
#include <asm/system.h>
#include <asm/processor.h>
@ -17,8 +19,8 @@
jmp 2b\n\
.previous\n\
.section __ex_table,\"a\"\n\
.align 8\n\
.long 1b,3b\n\
.align 8\n" \
_ASM_PTR "1b,3b\n \
.previous" \
: "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
: "i" (-EFAULT), "0" (oparg), "1" (0))
@ -35,8 +37,8 @@
jmp 3b\n\
.previous\n\
.section __ex_table,\"a\"\n\
.align 8\n\
.long 1b,4b,2b,4b\n\
.align 8\n" \
_ASM_PTR "1b,4b,2b,4b\n \
.previous" \
: "=&a" (oldval), "=&r" (ret), "+m" (*uaddr), \
"=&r" (tem) \
@ -56,37 +58,33 @@ futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
return -EFAULT;
#ifndef CONFIG_X86_BSWAP
if (op == FUTEX_OP_SET && boot_cpu_data.x86 == 3)
return -ENOSYS;
#endif
pagefault_disable();
if (op == FUTEX_OP_SET)
__futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
else {
#ifndef CONFIG_X86_BSWAP
if (boot_cpu_data.x86 == 3)
ret = -ENOSYS;
else
#endif
switch (op) {
case FUTEX_OP_SET:
__futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
break;
case FUTEX_OP_ADD:
__futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret,
oldval, uaddr, oparg);
__futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
uaddr, oparg);
break;
case FUTEX_OP_OR:
__futex_atomic_op2("orl %4, %3", ret, oldval, uaddr,
oparg);
__futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
break;
case FUTEX_OP_ANDN:
__futex_atomic_op2("andl %4, %3", ret, oldval, uaddr,
~oparg);
__futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
break;
case FUTEX_OP_XOR:
__futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr,
oparg);
__futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
break;
default:
ret = -ENOSYS;
}
}
pagefault_enable();
@ -120,7 +118,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
" .section __ex_table, \"a\" \n"
" .align 8 \n"
" .long 1b,3b \n"
_ASM_PTR " 1b,3b \n"
" .previous \n"
: "=a" (oldval), "+m" (*uaddr)

View File

@ -4,6 +4,8 @@
#ifdef __KERNEL__
#include <linux/futex.h>
#include <asm/asm.h>
#include <asm/errno.h>
#include <asm/system.h>
#include <asm/uaccess.h>
@ -16,11 +18,11 @@
jmp 2b\n\
.previous\n\
.section __ex_table,\"a\"\n\
.align 8\n\
.quad 1b,3b\n\
.align 8\n" \
_ASM_PTR "1b,3b\n \
.previous" \
: "=r" (oldval), "=r" (ret), "=m" (*uaddr) \
: "i" (-EFAULT), "m" (*uaddr), "0" (oparg), "1" (0))
: "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
: "i" (-EFAULT), "0" (oparg), "1" (0))
#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
__asm__ __volatile ( \
@ -34,12 +36,12 @@
jmp 3b\n\
.previous\n\
.section __ex_table,\"a\"\n\
.align 8\n\
.quad 1b,4b,2b,4b\n\
.align 8\n" \
_ASM_PTR "1b,4b,2b,4b\n \
.previous" \
: "=&a" (oldval), "=&r" (ret), "=m" (*uaddr), \
: "=&a" (oldval), "=&r" (ret), "+m" (*uaddr), \
"=&r" (tem) \
: "r" (oparg), "i" (-EFAULT), "m" (*uaddr), "1" (0))
: "r" (oparg), "i" (-EFAULT), "1" (0))
static inline int
futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
@ -110,10 +112,10 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
" .section __ex_table, \"a\" \n"
" .align 8 \n"
" .quad 1b,3b \n"
_ASM_PTR " 1b,3b \n"
" .previous \n"
: "=a" (oldval), "=m" (*uaddr)
: "=a" (oldval), "+m" (*uaddr)
: "i" (-EFAULT), "r" (newval), "0" (oldval)
: "memory"
);