forked from Minki/linux
powerpc: Optimise smp_wmb
Change2d1b202762
("powerpc: Fixup lwsync at runtime") removed __SUBARCH_HAS_LWSYNC, causing smp_wmb to revert back to eieio for all CPUs. This restores the behaviour intorduced in74f0609526
("powerpc: Optimise smp_wmb on 64-bit processors"). Signed-off-by: Nick Piggin <npiggin@suse.de> Signed-off-by: Paul Mackerras <paulus@samba.org>
This commit is contained in:
parent
a4e22f02f5
commit
46d075be58
@ -5,6 +5,10 @@
|
||||
#include <linux/stringify.h>
|
||||
#include <asm/feature-fixups.h>
|
||||
|
||||
#if defined(__powerpc64__) || defined(CONFIG_PPC_E500MC)
|
||||
#define __SUBARCH_HAS_LWSYNC
|
||||
#endif
|
||||
|
||||
#ifndef __ASSEMBLY__
|
||||
extern unsigned int __start___lwsync_fixup, __stop___lwsync_fixup;
|
||||
extern void do_lwsync_fixups(unsigned long value, void *fixup_start,
|
||||
|
@ -45,14 +45,14 @@
|
||||
#ifdef CONFIG_SMP
|
||||
|
||||
#ifdef __SUBARCH_HAS_LWSYNC
|
||||
# define SMPWMB lwsync
|
||||
# define SMPWMB LWSYNC
|
||||
#else
|
||||
# define SMPWMB eieio
|
||||
#endif
|
||||
|
||||
#define smp_mb() mb()
|
||||
#define smp_rmb() rmb()
|
||||
#define smp_wmb() __asm__ __volatile__ (__stringify(SMPWMB) : : :"memory")
|
||||
#define smp_wmb() __asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
|
||||
#define smp_read_barrier_depends() read_barrier_depends()
|
||||
#else
|
||||
#define smp_mb() barrier()
|
||||
|
Loading…
Reference in New Issue
Block a user