mirror of
https://github.com/torvalds/linux.git
synced 2024-11-21 19:41:42 +00:00
locking/atomic: make atomic*_{cmp,}xchg optional
Most architectures define the atomic/atomic64 xchg and cmpxchg operations in terms of arch_xchg and arch_cmpxchg respectfully. Add fallbacks for these cases and remove the trivial cases from arch code. On some architectures the existing definitions are kept as these are used to build other arch_atomic*() operations. Signed-off-by: Mark Rutland <mark.rutland@arm.com> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Reviewed-by: Kees Cook <keescook@chromium.org> Link: https://lore.kernel.org/r/20230605070124.3741859-5-mark.rutland@arm.com
This commit is contained in:
parent
a7bafa7969
commit
d12157efc8
@ -200,16 +200,6 @@ ATOMIC_OPS(xor, xor)
|
|||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define arch_atomic64_cmpxchg(v, old, new) \
|
|
||||||
(arch_cmpxchg(&((v)->counter), old, new))
|
|
||||||
#define arch_atomic64_xchg(v, new) \
|
|
||||||
(arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, old, new) \
|
|
||||||
(arch_cmpxchg(&((v)->counter), old, new))
|
|
||||||
#define arch_atomic_xchg(v, new) \
|
|
||||||
(arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* arch_atomic_fetch_add_unless - add unless the number is a given value
|
* arch_atomic_fetch_add_unless - add unless the number is a given value
|
||||||
* @v: pointer of type atomic_t
|
* @v: pointer of type atomic_t
|
||||||
|
@ -22,30 +22,6 @@
|
|||||||
#include <asm/atomic-spinlock.h>
|
#include <asm/atomic-spinlock.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, o, n) \
|
|
||||||
({ \
|
|
||||||
arch_cmpxchg(&((v)->counter), (o), (n)); \
|
|
||||||
})
|
|
||||||
|
|
||||||
#ifdef arch_cmpxchg_relaxed
|
|
||||||
#define arch_atomic_cmpxchg_relaxed(v, o, n) \
|
|
||||||
({ \
|
|
||||||
arch_cmpxchg_relaxed(&((v)->counter), (o), (n)); \
|
|
||||||
})
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define arch_atomic_xchg(v, n) \
|
|
||||||
({ \
|
|
||||||
arch_xchg(&((v)->counter), (n)); \
|
|
||||||
})
|
|
||||||
|
|
||||||
#ifdef arch_xchg_relaxed
|
|
||||||
#define arch_atomic_xchg_relaxed(v, n) \
|
|
||||||
({ \
|
|
||||||
arch_xchg_relaxed(&((v)->counter), (n)); \
|
|
||||||
})
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* 64-bit atomics
|
* 64-bit atomics
|
||||||
*/
|
*/
|
||||||
|
@ -159,6 +159,7 @@ arch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new)
|
|||||||
|
|
||||||
return prev;
|
return prev;
|
||||||
}
|
}
|
||||||
|
#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
|
||||||
|
|
||||||
static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
|
static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
|
||||||
{
|
{
|
||||||
@ -179,6 +180,7 @@ static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
|
|||||||
|
|
||||||
return prev;
|
return prev;
|
||||||
}
|
}
|
||||||
|
#define arch_atomic64_xchg arch_atomic64_xchg
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
|
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
|
||||||
|
@ -210,6 +210,7 @@ static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
|
|||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
|
||||||
|
|
||||||
#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
|
#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
|
||||||
|
|
||||||
@ -240,8 +241,6 @@ ATOMIC_OPS(xor, ^=, eor)
|
|||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
#ifndef CONFIG_GENERIC_ATOMIC64
|
#ifndef CONFIG_GENERIC_ATOMIC64
|
||||||
typedef struct {
|
typedef struct {
|
||||||
s64 counter;
|
s64 counter;
|
||||||
|
@ -142,24 +142,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
|
|||||||
#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
|
#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
|
||||||
#define arch_atomic_fetch_xor arch_atomic_fetch_xor
|
#define arch_atomic_fetch_xor arch_atomic_fetch_xor
|
||||||
|
|
||||||
#define arch_atomic_xchg_relaxed(v, new) \
|
|
||||||
arch_xchg_relaxed(&((v)->counter), (new))
|
|
||||||
#define arch_atomic_xchg_acquire(v, new) \
|
|
||||||
arch_xchg_acquire(&((v)->counter), (new))
|
|
||||||
#define arch_atomic_xchg_release(v, new) \
|
|
||||||
arch_xchg_release(&((v)->counter), (new))
|
|
||||||
#define arch_atomic_xchg(v, new) \
|
|
||||||
arch_xchg(&((v)->counter), (new))
|
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg_relaxed(v, old, new) \
|
|
||||||
arch_cmpxchg_relaxed(&((v)->counter), (old), (new))
|
|
||||||
#define arch_atomic_cmpxchg_acquire(v, old, new) \
|
|
||||||
arch_cmpxchg_acquire(&((v)->counter), (old), (new))
|
|
||||||
#define arch_atomic_cmpxchg_release(v, old, new) \
|
|
||||||
arch_cmpxchg_release(&((v)->counter), (old), (new))
|
|
||||||
#define arch_atomic_cmpxchg(v, old, new) \
|
|
||||||
arch_cmpxchg(&((v)->counter), (old), (new))
|
|
||||||
|
|
||||||
#define arch_atomic_andnot arch_atomic_andnot
|
#define arch_atomic_andnot arch_atomic_andnot
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -209,16 +191,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
|
|||||||
#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
|
#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
|
||||||
#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
|
#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
|
||||||
|
|
||||||
#define arch_atomic64_xchg_relaxed arch_atomic_xchg_relaxed
|
|
||||||
#define arch_atomic64_xchg_acquire arch_atomic_xchg_acquire
|
|
||||||
#define arch_atomic64_xchg_release arch_atomic_xchg_release
|
|
||||||
#define arch_atomic64_xchg arch_atomic_xchg
|
|
||||||
|
|
||||||
#define arch_atomic64_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
|
|
||||||
#define arch_atomic64_cmpxchg_acquire arch_atomic_cmpxchg_acquire
|
|
||||||
#define arch_atomic64_cmpxchg_release arch_atomic_cmpxchg_release
|
|
||||||
#define arch_atomic64_cmpxchg arch_atomic_cmpxchg
|
|
||||||
|
|
||||||
#define arch_atomic64_andnot arch_atomic64_andnot
|
#define arch_atomic64_andnot arch_atomic64_andnot
|
||||||
|
|
||||||
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
|
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
|
||||||
|
@ -195,41 +195,6 @@ arch_atomic_dec_if_positive(atomic_t *v)
|
|||||||
}
|
}
|
||||||
#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
|
#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
|
||||||
|
|
||||||
#define ATOMIC_OP() \
|
|
||||||
static __always_inline \
|
|
||||||
int arch_atomic_xchg_relaxed(atomic_t *v, int n) \
|
|
||||||
{ \
|
|
||||||
return __xchg_relaxed(n, &(v->counter), 4); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
int arch_atomic_cmpxchg_relaxed(atomic_t *v, int o, int n) \
|
|
||||||
{ \
|
|
||||||
return __cmpxchg_relaxed(&(v->counter), o, n, 4); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
int arch_atomic_cmpxchg_acquire(atomic_t *v, int o, int n) \
|
|
||||||
{ \
|
|
||||||
return __cmpxchg_acquire(&(v->counter), o, n, 4); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
int arch_atomic_cmpxchg(atomic_t *v, int o, int n) \
|
|
||||||
{ \
|
|
||||||
return __cmpxchg(&(v->counter), o, n, 4); \
|
|
||||||
}
|
|
||||||
|
|
||||||
#define ATOMIC_OPS() \
|
|
||||||
ATOMIC_OP()
|
|
||||||
|
|
||||||
ATOMIC_OPS()
|
|
||||||
|
|
||||||
#define arch_atomic_xchg_relaxed arch_atomic_xchg_relaxed
|
|
||||||
#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
|
|
||||||
#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
|
|
||||||
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
|
|
||||||
|
|
||||||
#undef ATOMIC_OPS
|
|
||||||
#undef ATOMIC_OP
|
|
||||||
|
|
||||||
#else
|
#else
|
||||||
#include <asm-generic/atomic.h>
|
#include <asm-generic/atomic.h>
|
||||||
#endif
|
#endif
|
||||||
|
@ -36,12 +36,6 @@ static inline void arch_atomic_set(atomic_t *v, int new)
|
|||||||
*/
|
*/
|
||||||
#define arch_atomic_read(v) READ_ONCE((v)->counter)
|
#define arch_atomic_read(v) READ_ONCE((v)->counter)
|
||||||
|
|
||||||
#define arch_atomic_xchg(v, new) \
|
|
||||||
(arch_xchg(&((v)->counter), (new)))
|
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, old, new) \
|
|
||||||
(arch_cmpxchg(&((v)->counter), (old), (new)))
|
|
||||||
|
|
||||||
#define ATOMIC_OP(op) \
|
#define ATOMIC_OP(op) \
|
||||||
static inline void arch_atomic_##op(int i, atomic_t *v) \
|
static inline void arch_atomic_##op(int i, atomic_t *v) \
|
||||||
{ \
|
{ \
|
||||||
|
@ -207,13 +207,6 @@ ATOMIC64_FETCH_OP(xor, ^)
|
|||||||
#undef ATOMIC64_FETCH_OP
|
#undef ATOMIC64_FETCH_OP
|
||||||
#undef ATOMIC64_OP
|
#undef ATOMIC64_OP
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), old, new))
|
|
||||||
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
#define arch_atomic64_cmpxchg(v, old, new) \
|
|
||||||
(arch_cmpxchg(&((v)->counter), old, new))
|
|
||||||
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
#define arch_atomic_add(i,v) (void)arch_atomic_add_return((i), (v))
|
#define arch_atomic_add(i,v) (void)arch_atomic_add_return((i), (v))
|
||||||
#define arch_atomic_sub(i,v) (void)arch_atomic_sub_return((i), (v))
|
#define arch_atomic_sub(i,v) (void)arch_atomic_sub_return((i), (v))
|
||||||
|
|
||||||
|
@ -181,9 +181,6 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, o, n) (arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), (new)))
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* arch_atomic_dec_if_positive - decrement by 1 if old value positive
|
* arch_atomic_dec_if_positive - decrement by 1 if old value positive
|
||||||
* @v: pointer of type atomic_t
|
* @v: pointer of type atomic_t
|
||||||
@ -342,10 +339,6 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
#define arch_atomic64_cmpxchg(v, o, n) \
|
|
||||||
((__typeof__((v)->counter))arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), (new)))
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
|
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
|
||||||
* @v: pointer of type atomic64_t
|
* @v: pointer of type atomic64_t
|
||||||
|
@ -158,12 +158,7 @@ static inline int arch_atomic_inc_and_test(atomic_t *v)
|
|||||||
}
|
}
|
||||||
#define arch_atomic_inc_and_test arch_atomic_inc_and_test
|
#define arch_atomic_inc_and_test arch_atomic_inc_and_test
|
||||||
|
|
||||||
#ifdef CONFIG_RMW_INSNS
|
#ifndef CONFIG_RMW_INSNS
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, o, n) ((int)arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
#else /* !CONFIG_RMW_INSNS */
|
|
||||||
|
|
||||||
static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
|
static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
|
||||||
{
|
{
|
||||||
@ -177,6 +172,7 @@ static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
|
|||||||
local_irq_restore(flags);
|
local_irq_restore(flags);
|
||||||
return prev;
|
return prev;
|
||||||
}
|
}
|
||||||
|
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
|
||||||
|
|
||||||
static inline int arch_atomic_xchg(atomic_t *v, int new)
|
static inline int arch_atomic_xchg(atomic_t *v, int new)
|
||||||
{
|
{
|
||||||
@ -189,6 +185,7 @@ static inline int arch_atomic_xchg(atomic_t *v, int new)
|
|||||||
local_irq_restore(flags);
|
local_irq_restore(flags);
|
||||||
return prev;
|
return prev;
|
||||||
}
|
}
|
||||||
|
#define arch_atomic_xchg arch_atomic_xchg
|
||||||
|
|
||||||
#endif /* !CONFIG_RMW_INSNS */
|
#endif /* !CONFIG_RMW_INSNS */
|
||||||
|
|
||||||
|
@ -33,17 +33,6 @@ static __always_inline void arch_##pfx##_set(pfx##_t *v, type i) \
|
|||||||
{ \
|
{ \
|
||||||
WRITE_ONCE(v->counter, i); \
|
WRITE_ONCE(v->counter, i); \
|
||||||
} \
|
} \
|
||||||
\
|
|
||||||
static __always_inline type \
|
|
||||||
arch_##pfx##_cmpxchg(pfx##_t *v, type o, type n) \
|
|
||||||
{ \
|
|
||||||
return arch_cmpxchg(&v->counter, o, n); \
|
|
||||||
} \
|
|
||||||
\
|
|
||||||
static __always_inline type arch_##pfx##_xchg(pfx##_t *v, type n) \
|
|
||||||
{ \
|
|
||||||
return arch_xchg(&v->counter, n); \
|
|
||||||
}
|
|
||||||
|
|
||||||
ATOMIC_OPS(atomic, int)
|
ATOMIC_OPS(atomic, int)
|
||||||
|
|
||||||
|
@ -130,7 +130,4 @@ static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
|
|||||||
|
|
||||||
#include <asm/cmpxchg.h>
|
#include <asm/cmpxchg.h>
|
||||||
|
|
||||||
#define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (v)))
|
|
||||||
#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (old), (new)))
|
|
||||||
|
|
||||||
#endif /* __ASM_OPENRISC_ATOMIC_H */
|
#endif /* __ASM_OPENRISC_ATOMIC_H */
|
||||||
|
@ -73,10 +73,6 @@ static __inline__ int arch_atomic_read(const atomic_t *v)
|
|||||||
return READ_ONCE((v)->counter);
|
return READ_ONCE((v)->counter);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* exported interface */
|
|
||||||
#define arch_atomic_cmpxchg(v, o, n) (arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
#define ATOMIC_OP(op, c_op) \
|
#define ATOMIC_OP(op, c_op) \
|
||||||
static __inline__ void arch_atomic_##op(int i, atomic_t *v) \
|
static __inline__ void arch_atomic_##op(int i, atomic_t *v) \
|
||||||
{ \
|
{ \
|
||||||
@ -218,11 +214,6 @@ arch_atomic64_read(const atomic64_t *v)
|
|||||||
return READ_ONCE((v)->counter);
|
return READ_ONCE((v)->counter);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* exported interface */
|
|
||||||
#define arch_atomic64_cmpxchg(v, o, n) \
|
|
||||||
((__typeof__((v)->counter))arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
#endif /* !CONFIG_64BIT */
|
#endif /* !CONFIG_64BIT */
|
||||||
|
|
||||||
|
|
||||||
|
@ -126,18 +126,6 @@ ATOMIC_OPS(xor, xor, "", K)
|
|||||||
#undef ATOMIC_OP_RETURN_RELAXED
|
#undef ATOMIC_OP_RETURN_RELAXED
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, o, n) \
|
|
||||||
(arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic_cmpxchg_relaxed(v, o, n) \
|
|
||||||
arch_cmpxchg_relaxed(&((v)->counter), (o), (n))
|
|
||||||
#define arch_atomic_cmpxchg_acquire(v, o, n) \
|
|
||||||
arch_cmpxchg_acquire(&((v)->counter), (o), (n))
|
|
||||||
|
|
||||||
#define arch_atomic_xchg(v, new) \
|
|
||||||
(arch_xchg(&((v)->counter), new))
|
|
||||||
#define arch_atomic_xchg_relaxed(v, new) \
|
|
||||||
arch_xchg_relaxed(&((v)->counter), (new))
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* atomic_fetch_add_unless - add unless the number is a given value
|
* atomic_fetch_add_unless - add unless the number is a given value
|
||||||
* @v: pointer of type atomic_t
|
* @v: pointer of type atomic_t
|
||||||
@ -396,18 +384,6 @@ static __inline__ s64 arch_atomic64_dec_if_positive(atomic64_t *v)
|
|||||||
}
|
}
|
||||||
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
|
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
|
||||||
|
|
||||||
#define arch_atomic64_cmpxchg(v, o, n) \
|
|
||||||
(arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic64_cmpxchg_relaxed(v, o, n) \
|
|
||||||
arch_cmpxchg_relaxed(&((v)->counter), (o), (n))
|
|
||||||
#define arch_atomic64_cmpxchg_acquire(v, o, n) \
|
|
||||||
arch_cmpxchg_acquire(&((v)->counter), (o), (n))
|
|
||||||
|
|
||||||
#define arch_atomic64_xchg(v, new) \
|
|
||||||
(arch_xchg(&((v)->counter), new))
|
|
||||||
#define arch_atomic64_xchg_relaxed(v, new) \
|
|
||||||
arch_xchg_relaxed(&((v)->counter), (new))
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* atomic64_fetch_add_unless - add unless the number is a given value
|
* atomic64_fetch_add_unless - add unless the number is a given value
|
||||||
* @v: pointer of type atomic64_t
|
* @v: pointer of type atomic64_t
|
||||||
|
@ -238,78 +238,6 @@ static __always_inline s64 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a,
|
|||||||
#define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
|
#define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/*
|
|
||||||
* atomic_{cmp,}xchg is required to have exactly the same ordering semantics as
|
|
||||||
* {cmp,}xchg and the operations that return, so they need a full barrier.
|
|
||||||
*/
|
|
||||||
#define ATOMIC_OP(c_t, prefix, size) \
|
|
||||||
static __always_inline \
|
|
||||||
c_t arch_atomic##prefix##_xchg_relaxed(atomic##prefix##_t *v, c_t n) \
|
|
||||||
{ \
|
|
||||||
return __xchg_relaxed(&(v->counter), n, size); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_t arch_atomic##prefix##_xchg_acquire(atomic##prefix##_t *v, c_t n) \
|
|
||||||
{ \
|
|
||||||
return __xchg_acquire(&(v->counter), n, size); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_t arch_atomic##prefix##_xchg_release(atomic##prefix##_t *v, c_t n) \
|
|
||||||
{ \
|
|
||||||
return __xchg_release(&(v->counter), n, size); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_t arch_atomic##prefix##_xchg(atomic##prefix##_t *v, c_t n) \
|
|
||||||
{ \
|
|
||||||
return __arch_xchg(&(v->counter), n, size); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_t arch_atomic##prefix##_cmpxchg_relaxed(atomic##prefix##_t *v, \
|
|
||||||
c_t o, c_t n) \
|
|
||||||
{ \
|
|
||||||
return __cmpxchg_relaxed(&(v->counter), o, n, size); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_t arch_atomic##prefix##_cmpxchg_acquire(atomic##prefix##_t *v, \
|
|
||||||
c_t o, c_t n) \
|
|
||||||
{ \
|
|
||||||
return __cmpxchg_acquire(&(v->counter), o, n, size); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_t arch_atomic##prefix##_cmpxchg_release(atomic##prefix##_t *v, \
|
|
||||||
c_t o, c_t n) \
|
|
||||||
{ \
|
|
||||||
return __cmpxchg_release(&(v->counter), o, n, size); \
|
|
||||||
} \
|
|
||||||
static __always_inline \
|
|
||||||
c_t arch_atomic##prefix##_cmpxchg(atomic##prefix##_t *v, c_t o, c_t n) \
|
|
||||||
{ \
|
|
||||||
return __cmpxchg(&(v->counter), o, n, size); \
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef CONFIG_GENERIC_ATOMIC64
|
|
||||||
#define ATOMIC_OPS() \
|
|
||||||
ATOMIC_OP(int, , 4)
|
|
||||||
#else
|
|
||||||
#define ATOMIC_OPS() \
|
|
||||||
ATOMIC_OP(int, , 4) \
|
|
||||||
ATOMIC_OP(s64, 64, 8)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ATOMIC_OPS()
|
|
||||||
|
|
||||||
#define arch_atomic_xchg_relaxed arch_atomic_xchg_relaxed
|
|
||||||
#define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
|
|
||||||
#define arch_atomic_xchg_release arch_atomic_xchg_release
|
|
||||||
#define arch_atomic_xchg arch_atomic_xchg
|
|
||||||
#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
|
|
||||||
#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
|
|
||||||
#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
|
|
||||||
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
|
|
||||||
|
|
||||||
#undef ATOMIC_OPS
|
|
||||||
#undef ATOMIC_OP
|
|
||||||
|
|
||||||
static __always_inline bool arch_atomic_inc_unless_negative(atomic_t *v)
|
static __always_inline bool arch_atomic_inc_unless_negative(atomic_t *v)
|
||||||
{
|
{
|
||||||
int prev, rc;
|
int prev, rc;
|
||||||
|
@ -30,9 +30,6 @@
|
|||||||
#include <asm/atomic-irq.h>
|
#include <asm/atomic-irq.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
#define arch_atomic_cmpxchg(v, o, n) (arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
|
|
||||||
#endif /* CONFIG_CPU_J2 */
|
#endif /* CONFIG_CPU_J2 */
|
||||||
|
|
||||||
#endif /* __ASM_SH_ATOMIC_H */
|
#endif /* __ASM_SH_ATOMIC_H */
|
||||||
|
@ -24,7 +24,9 @@ int arch_atomic_fetch_and(int, atomic_t *);
|
|||||||
int arch_atomic_fetch_or(int, atomic_t *);
|
int arch_atomic_fetch_or(int, atomic_t *);
|
||||||
int arch_atomic_fetch_xor(int, atomic_t *);
|
int arch_atomic_fetch_xor(int, atomic_t *);
|
||||||
int arch_atomic_cmpxchg(atomic_t *, int, int);
|
int arch_atomic_cmpxchg(atomic_t *, int, int);
|
||||||
|
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
|
||||||
int arch_atomic_xchg(atomic_t *, int);
|
int arch_atomic_xchg(atomic_t *, int);
|
||||||
|
#define arch_atomic_xchg arch_atomic_xchg
|
||||||
int arch_atomic_fetch_add_unless(atomic_t *, int, int);
|
int arch_atomic_fetch_add_unless(atomic_t *, int, int);
|
||||||
void arch_atomic_set(atomic_t *, int);
|
void arch_atomic_set(atomic_t *, int);
|
||||||
|
|
||||||
|
@ -49,17 +49,6 @@ ATOMIC_OPS(xor)
|
|||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, o, n) (arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
|
|
||||||
static inline int arch_atomic_xchg(atomic_t *v, int new)
|
|
||||||
{
|
|
||||||
return arch_xchg(&v->counter, new);
|
|
||||||
}
|
|
||||||
|
|
||||||
#define arch_atomic64_cmpxchg(v, o, n) \
|
|
||||||
((__typeof__((v)->counter))arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
s64 arch_atomic64_dec_if_positive(atomic64_t *v);
|
s64 arch_atomic64_dec_if_positive(atomic64_t *v);
|
||||||
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
|
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
|
||||||
|
|
||||||
|
@ -257,7 +257,4 @@ ATOMIC_OPS(xor)
|
|||||||
#undef ATOMIC_OP_RETURN
|
#undef ATOMIC_OP_RETURN
|
||||||
#undef ATOMIC_OP
|
#undef ATOMIC_OP
|
||||||
|
|
||||||
#define arch_atomic_cmpxchg(v, o, n) ((int)arch_cmpxchg(&((v)->counter), (o), (n)))
|
|
||||||
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))
|
|
||||||
|
|
||||||
#endif /* _XTENSA_ATOMIC_H */
|
#endif /* _XTENSA_ATOMIC_H */
|
||||||
|
@ -130,7 +130,4 @@ ATOMIC_OP(xor, ^)
|
|||||||
#define arch_atomic_read(v) READ_ONCE((v)->counter)
|
#define arch_atomic_read(v) READ_ONCE((v)->counter)
|
||||||
#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
|
#define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
|
||||||
|
|
||||||
#define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (u32)(v)))
|
|
||||||
#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (u32)(old), (u32)(new)))
|
|
||||||
|
|
||||||
#endif /* __ASM_GENERIC_ATOMIC_H */
|
#endif /* __ASM_GENERIC_ATOMIC_H */
|
||||||
|
@ -1091,9 +1091,48 @@ arch_atomic_fetch_xor(int i, atomic_t *v)
|
|||||||
#endif /* arch_atomic_fetch_xor_relaxed */
|
#endif /* arch_atomic_fetch_xor_relaxed */
|
||||||
|
|
||||||
#ifndef arch_atomic_xchg_relaxed
|
#ifndef arch_atomic_xchg_relaxed
|
||||||
|
#ifdef arch_atomic_xchg
|
||||||
#define arch_atomic_xchg_acquire arch_atomic_xchg
|
#define arch_atomic_xchg_acquire arch_atomic_xchg
|
||||||
#define arch_atomic_xchg_release arch_atomic_xchg
|
#define arch_atomic_xchg_release arch_atomic_xchg
|
||||||
#define arch_atomic_xchg_relaxed arch_atomic_xchg
|
#define arch_atomic_xchg_relaxed arch_atomic_xchg
|
||||||
|
#endif /* arch_atomic_xchg */
|
||||||
|
|
||||||
|
#ifndef arch_atomic_xchg
|
||||||
|
static __always_inline int
|
||||||
|
arch_atomic_xchg(atomic_t *v, int new)
|
||||||
|
{
|
||||||
|
return arch_xchg(&v->counter, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic_xchg arch_atomic_xchg
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic_xchg_acquire
|
||||||
|
static __always_inline int
|
||||||
|
arch_atomic_xchg_acquire(atomic_t *v, int new)
|
||||||
|
{
|
||||||
|
return arch_xchg_acquire(&v->counter, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic_xchg_release
|
||||||
|
static __always_inline int
|
||||||
|
arch_atomic_xchg_release(atomic_t *v, int new)
|
||||||
|
{
|
||||||
|
return arch_xchg_release(&v->counter, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic_xchg_release arch_atomic_xchg_release
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic_xchg_relaxed
|
||||||
|
static __always_inline int
|
||||||
|
arch_atomic_xchg_relaxed(atomic_t *v, int new)
|
||||||
|
{
|
||||||
|
return arch_xchg_relaxed(&v->counter, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic_xchg_relaxed arch_atomic_xchg_relaxed
|
||||||
|
#endif
|
||||||
|
|
||||||
#else /* arch_atomic_xchg_relaxed */
|
#else /* arch_atomic_xchg_relaxed */
|
||||||
|
|
||||||
#ifndef arch_atomic_xchg_acquire
|
#ifndef arch_atomic_xchg_acquire
|
||||||
@ -1133,9 +1172,48 @@ arch_atomic_xchg(atomic_t *v, int i)
|
|||||||
#endif /* arch_atomic_xchg_relaxed */
|
#endif /* arch_atomic_xchg_relaxed */
|
||||||
|
|
||||||
#ifndef arch_atomic_cmpxchg_relaxed
|
#ifndef arch_atomic_cmpxchg_relaxed
|
||||||
|
#ifdef arch_atomic_cmpxchg
|
||||||
#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
|
#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
|
||||||
#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
|
#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
|
||||||
#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
|
#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
|
||||||
|
#endif /* arch_atomic_cmpxchg */
|
||||||
|
|
||||||
|
#ifndef arch_atomic_cmpxchg
|
||||||
|
static __always_inline int
|
||||||
|
arch_atomic_cmpxchg(atomic_t *v, int old, int new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic_cmpxchg_acquire
|
||||||
|
static __always_inline int
|
||||||
|
arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg_acquire(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic_cmpxchg_release
|
||||||
|
static __always_inline int
|
||||||
|
arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg_release(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic_cmpxchg_relaxed
|
||||||
|
static __always_inline int
|
||||||
|
arch_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg_relaxed(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
|
||||||
|
#endif
|
||||||
|
|
||||||
#else /* arch_atomic_cmpxchg_relaxed */
|
#else /* arch_atomic_cmpxchg_relaxed */
|
||||||
|
|
||||||
#ifndef arch_atomic_cmpxchg_acquire
|
#ifndef arch_atomic_cmpxchg_acquire
|
||||||
@ -2225,9 +2303,48 @@ arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
|
|||||||
#endif /* arch_atomic64_fetch_xor_relaxed */
|
#endif /* arch_atomic64_fetch_xor_relaxed */
|
||||||
|
|
||||||
#ifndef arch_atomic64_xchg_relaxed
|
#ifndef arch_atomic64_xchg_relaxed
|
||||||
|
#ifdef arch_atomic64_xchg
|
||||||
#define arch_atomic64_xchg_acquire arch_atomic64_xchg
|
#define arch_atomic64_xchg_acquire arch_atomic64_xchg
|
||||||
#define arch_atomic64_xchg_release arch_atomic64_xchg
|
#define arch_atomic64_xchg_release arch_atomic64_xchg
|
||||||
#define arch_atomic64_xchg_relaxed arch_atomic64_xchg
|
#define arch_atomic64_xchg_relaxed arch_atomic64_xchg
|
||||||
|
#endif /* arch_atomic64_xchg */
|
||||||
|
|
||||||
|
#ifndef arch_atomic64_xchg
|
||||||
|
static __always_inline s64
|
||||||
|
arch_atomic64_xchg(atomic64_t *v, s64 new)
|
||||||
|
{
|
||||||
|
return arch_xchg(&v->counter, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic64_xchg arch_atomic64_xchg
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic64_xchg_acquire
|
||||||
|
static __always_inline s64
|
||||||
|
arch_atomic64_xchg_acquire(atomic64_t *v, s64 new)
|
||||||
|
{
|
||||||
|
return arch_xchg_acquire(&v->counter, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic64_xchg_release
|
||||||
|
static __always_inline s64
|
||||||
|
arch_atomic64_xchg_release(atomic64_t *v, s64 new)
|
||||||
|
{
|
||||||
|
return arch_xchg_release(&v->counter, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic64_xchg_release arch_atomic64_xchg_release
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic64_xchg_relaxed
|
||||||
|
static __always_inline s64
|
||||||
|
arch_atomic64_xchg_relaxed(atomic64_t *v, s64 new)
|
||||||
|
{
|
||||||
|
return arch_xchg_relaxed(&v->counter, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic64_xchg_relaxed arch_atomic64_xchg_relaxed
|
||||||
|
#endif
|
||||||
|
|
||||||
#else /* arch_atomic64_xchg_relaxed */
|
#else /* arch_atomic64_xchg_relaxed */
|
||||||
|
|
||||||
#ifndef arch_atomic64_xchg_acquire
|
#ifndef arch_atomic64_xchg_acquire
|
||||||
@ -2267,9 +2384,48 @@ arch_atomic64_xchg(atomic64_t *v, s64 i)
|
|||||||
#endif /* arch_atomic64_xchg_relaxed */
|
#endif /* arch_atomic64_xchg_relaxed */
|
||||||
|
|
||||||
#ifndef arch_atomic64_cmpxchg_relaxed
|
#ifndef arch_atomic64_cmpxchg_relaxed
|
||||||
|
#ifdef arch_atomic64_cmpxchg
|
||||||
#define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
|
#define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
|
||||||
#define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
|
#define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
|
||||||
#define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
|
#define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
|
||||||
|
#endif /* arch_atomic64_cmpxchg */
|
||||||
|
|
||||||
|
#ifndef arch_atomic64_cmpxchg
|
||||||
|
static __always_inline s64
|
||||||
|
arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic64_cmpxchg_acquire
|
||||||
|
static __always_inline s64
|
||||||
|
arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg_acquire(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic64_cmpxchg_release
|
||||||
|
static __always_inline s64
|
||||||
|
arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg_release(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef arch_atomic64_cmpxchg_relaxed
|
||||||
|
static __always_inline s64
|
||||||
|
arch_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg_relaxed(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
#define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg_relaxed
|
||||||
|
#endif
|
||||||
|
|
||||||
#else /* arch_atomic64_cmpxchg_relaxed */
|
#else /* arch_atomic64_cmpxchg_relaxed */
|
||||||
|
|
||||||
#ifndef arch_atomic64_cmpxchg_acquire
|
#ifndef arch_atomic64_cmpxchg_acquire
|
||||||
@ -2597,4 +2753,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v)
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
#endif /* _LINUX_ATOMIC_FALLBACK_H */
|
#endif /* _LINUX_ATOMIC_FALLBACK_H */
|
||||||
// 9f0fd6ed53267c6ec64e36cd18e6fd8df57ea277
|
// e1cee558cc61cae887890db30fcdf93baca9f498
|
||||||
|
7
scripts/atomic/fallbacks/cmpxchg
Normal file
7
scripts/atomic/fallbacks/cmpxchg
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
cat <<EOF
|
||||||
|
static __always_inline ${int}
|
||||||
|
arch_${atomic}_cmpxchg${order}(${atomic}_t *v, ${int} old, ${int} new)
|
||||||
|
{
|
||||||
|
return arch_cmpxchg${order}(&v->counter, old, new);
|
||||||
|
}
|
||||||
|
EOF
|
7
scripts/atomic/fallbacks/xchg
Normal file
7
scripts/atomic/fallbacks/xchg
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
cat <<EOF
|
||||||
|
static __always_inline ${int}
|
||||||
|
arch_${atomic}_xchg${order}(${atomic}_t *v, ${int} new)
|
||||||
|
{
|
||||||
|
return arch_xchg${order}(&v->counter, new);
|
||||||
|
}
|
||||||
|
EOF
|
Loading…
Reference in New Issue
Block a user