Merge branch 'core-hweight-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip

* 'core-hweight-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip:
  x86, hweight: Use a 32-bit popcnt for __arch_hweight32()
  arch, hweight: Fix compilation errors
  x86: Add optimized popcnt variants
  bitops: Optimize hweight() by making use of compile-time evaluation
This commit is contained in:
Linus Torvalds 2010-05-18 09:17:01 -07:00
commit cb41838bbc
14 changed files with 188 additions and 62 deletions

View File

@ -405,29 +405,31 @@ static inline int fls(int x)
#if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67) #if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67)
/* Whee. EV67 can calculate it directly. */ /* Whee. EV67 can calculate it directly. */
static inline unsigned long hweight64(unsigned long w) static inline unsigned long __arch_hweight64(unsigned long w)
{ {
return __kernel_ctpop(w); return __kernel_ctpop(w);
} }
static inline unsigned int hweight32(unsigned int w) static inline unsigned int __arch_weight32(unsigned int w)
{ {
return hweight64(w); return __arch_hweight64(w);
} }
static inline unsigned int hweight16(unsigned int w) static inline unsigned int __arch_hweight16(unsigned int w)
{ {
return hweight64(w & 0xffff); return __arch_hweight64(w & 0xffff);
} }
static inline unsigned int hweight8(unsigned int w) static inline unsigned int __arch_hweight8(unsigned int w)
{ {
return hweight64(w & 0xff); return __arch_hweight64(w & 0xff);
} }
#else #else
#include <asm-generic/bitops/hweight.h> #include <asm-generic/bitops/arch_hweight.h>
#endif #endif
#include <asm-generic/bitops/const_hweight.h>
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */
#include <asm-generic/bitops/find.h> #include <asm-generic/bitops/find.h>

View File

@ -437,17 +437,18 @@ __fls (unsigned long x)
* hweightN: returns the hamming weight (i.e. the number * hweightN: returns the hamming weight (i.e. the number
* of bits set) of a N-bit word * of bits set) of a N-bit word
*/ */
static __inline__ unsigned long static __inline__ unsigned long __arch_hweight64(unsigned long x)
hweight64 (unsigned long x)
{ {
unsigned long result; unsigned long result;
result = ia64_popcnt(x); result = ia64_popcnt(x);
return result; return result;
} }
#define hweight32(x) (unsigned int) hweight64((x) & 0xfffffffful) #define __arch_hweight32(x) ((unsigned int) __arch_hweight64((x) & 0xfffffffful))
#define hweight16(x) (unsigned int) hweight64((x) & 0xfffful) #define __arch_hweight16(x) ((unsigned int) __arch_hweight64((x) & 0xfffful))
#define hweight8(x) (unsigned int) hweight64((x) & 0xfful) #define __arch_hweight8(x) ((unsigned int) __arch_hweight64((x) & 0xfful))
#include <asm-generic/bitops/const_hweight.h>
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */

View File

@ -44,7 +44,7 @@ extern void change_bit(unsigned long nr, volatile unsigned long *addr);
#ifdef ULTRA_HAS_POPULATION_COUNT #ifdef ULTRA_HAS_POPULATION_COUNT
static inline unsigned int hweight64(unsigned long w) static inline unsigned int __arch_hweight64(unsigned long w)
{ {
unsigned int res; unsigned int res;
@ -52,7 +52,7 @@ static inline unsigned int hweight64(unsigned long w)
return res; return res;
} }
static inline unsigned int hweight32(unsigned int w) static inline unsigned int __arch_hweight32(unsigned int w)
{ {
unsigned int res; unsigned int res;
@ -60,7 +60,7 @@ static inline unsigned int hweight32(unsigned int w)
return res; return res;
} }
static inline unsigned int hweight16(unsigned int w) static inline unsigned int __arch_hweight16(unsigned int w)
{ {
unsigned int res; unsigned int res;
@ -68,7 +68,7 @@ static inline unsigned int hweight16(unsigned int w)
return res; return res;
} }
static inline unsigned int hweight8(unsigned int w) static inline unsigned int __arch_hweight8(unsigned int w)
{ {
unsigned int res; unsigned int res;
@ -78,9 +78,10 @@ static inline unsigned int hweight8(unsigned int w)
#else #else
#include <asm-generic/bitops/hweight.h> #include <asm-generic/bitops/arch_hweight.h>
#endif #endif
#include <asm-generic/bitops/const_hweight.h>
#include <asm-generic/bitops/lock.h> #include <asm-generic/bitops/lock.h>
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */

View File

@ -237,6 +237,11 @@ config X86_32_LAZY_GS
def_bool y def_bool y
depends on X86_32 && !CC_STACKPROTECTOR depends on X86_32 && !CC_STACKPROTECTOR
config ARCH_HWEIGHT_CFLAGS
string
default "-fcall-saved-ecx -fcall-saved-edx" if X86_32
default "-fcall-saved-rdi -fcall-saved-rsi -fcall-saved-rdx -fcall-saved-rcx -fcall-saved-r8 -fcall-saved-r9 -fcall-saved-r10 -fcall-saved-r11" if X86_64
config KTIME_SCALAR config KTIME_SCALAR
def_bool X86_32 def_bool X86_32
source "init/Kconfig" source "init/Kconfig"

View File

@ -42,9 +42,6 @@
#define LOCK_PREFIX "" #define LOCK_PREFIX ""
#endif #endif
/* This must be included *after* the definition of LOCK_PREFIX */
#include <asm/cpufeature.h>
struct alt_instr { struct alt_instr {
u8 *instr; /* original instruction */ u8 *instr; /* original instruction */
u8 *replacement; u8 *replacement;
@ -98,6 +95,12 @@ static inline int alternatives_text_reserved(void *start, void *end)
"663:\n\t" newinstr "\n664:\n" /* replacement */ \ "663:\n\t" newinstr "\n664:\n" /* replacement */ \
".previous" ".previous"
/*
* This must be included *after* the definition of ALTERNATIVE due to
* <asm/arch_hweight.h>
*/
#include <asm/cpufeature.h>
/* /*
* Alternative instructions for different CPU types or capabilities. * Alternative instructions for different CPU types or capabilities.
* *

View File

@ -0,0 +1,61 @@
#ifndef _ASM_X86_HWEIGHT_H
#define _ASM_X86_HWEIGHT_H
#ifdef CONFIG_64BIT
/* popcnt %edi, %eax -- redundant REX prefix for alignment */
#define POPCNT32 ".byte 0xf3,0x40,0x0f,0xb8,0xc7"
/* popcnt %rdi, %rax */
#define POPCNT64 ".byte 0xf3,0x48,0x0f,0xb8,0xc7"
#define REG_IN "D"
#define REG_OUT "a"
#else
/* popcnt %eax, %eax */
#define POPCNT32 ".byte 0xf3,0x0f,0xb8,0xc0"
#define REG_IN "a"
#define REG_OUT "a"
#endif
/*
* __sw_hweightXX are called from within the alternatives below
* and callee-clobbered registers need to be taken care of. See
* ARCH_HWEIGHT_CFLAGS in <arch/x86/Kconfig> for the respective
* compiler switches.
*/
static inline unsigned int __arch_hweight32(unsigned int w)
{
unsigned int res = 0;
asm (ALTERNATIVE("call __sw_hweight32", POPCNT32, X86_FEATURE_POPCNT)
: "="REG_OUT (res)
: REG_IN (w));
return res;
}
static inline unsigned int __arch_hweight16(unsigned int w)
{
return __arch_hweight32(w & 0xffff);
}
static inline unsigned int __arch_hweight8(unsigned int w)
{
return __arch_hweight32(w & 0xff);
}
static inline unsigned long __arch_hweight64(__u64 w)
{
unsigned long res = 0;
#ifdef CONFIG_X86_32
return __arch_hweight32((u32)w) +
__arch_hweight32((u32)(w >> 32));
#else
asm (ALTERNATIVE("call __sw_hweight64", POPCNT64, X86_FEATURE_POPCNT)
: "="REG_OUT (res)
: REG_IN (w));
#endif /* CONFIG_X86_32 */
return res;
}
#endif

View File

@ -444,7 +444,9 @@ static inline int fls(int x)
#define ARCH_HAS_FAST_MULTIPLIER 1 #define ARCH_HAS_FAST_MULTIPLIER 1
#include <asm-generic/bitops/hweight.h> #include <asm/arch_hweight.h>
#include <asm-generic/bitops/const_hweight.h>
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */

View File

@ -0,0 +1,25 @@
#ifndef _ASM_GENERIC_BITOPS_ARCH_HWEIGHT_H_
#define _ASM_GENERIC_BITOPS_ARCH_HWEIGHT_H_
#include <asm/types.h>
static inline unsigned int __arch_hweight32(unsigned int w)
{
return __sw_hweight32(w);
}
static inline unsigned int __arch_hweight16(unsigned int w)
{
return __sw_hweight16(w);
}
static inline unsigned int __arch_hweight8(unsigned int w)
{
return __sw_hweight8(w);
}
static inline unsigned long __arch_hweight64(__u64 w)
{
return __sw_hweight64(w);
}
#endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */

View File

@ -0,0 +1,42 @@
#ifndef _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_
#define _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_
/*
* Compile time versions of __arch_hweightN()
*/
#define __const_hweight8(w) \
( (!!((w) & (1ULL << 0))) + \
(!!((w) & (1ULL << 1))) + \
(!!((w) & (1ULL << 2))) + \
(!!((w) & (1ULL << 3))) + \
(!!((w) & (1ULL << 4))) + \
(!!((w) & (1ULL << 5))) + \
(!!((w) & (1ULL << 6))) + \
(!!((w) & (1ULL << 7))) )
#define __const_hweight16(w) (__const_hweight8(w) + __const_hweight8((w) >> 8 ))
#define __const_hweight32(w) (__const_hweight16(w) + __const_hweight16((w) >> 16))
#define __const_hweight64(w) (__const_hweight32(w) + __const_hweight32((w) >> 32))
/*
* Generic interface.
*/
#define hweight8(w) (__builtin_constant_p(w) ? __const_hweight8(w) : __arch_hweight8(w))
#define hweight16(w) (__builtin_constant_p(w) ? __const_hweight16(w) : __arch_hweight16(w))
#define hweight32(w) (__builtin_constant_p(w) ? __const_hweight32(w) : __arch_hweight32(w))
#define hweight64(w) (__builtin_constant_p(w) ? __const_hweight64(w) : __arch_hweight64(w))
/*
* Interface for known constant arguments
*/
#define HWEIGHT8(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight8(w))
#define HWEIGHT16(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight16(w))
#define HWEIGHT32(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight32(w))
#define HWEIGHT64(w) (BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + __const_hweight64(w))
/*
* Type invariant interface to the compile time constant hweight functions.
*/
#define HWEIGHT(w) HWEIGHT64((u64)w)
#endif /* _ASM_GENERIC_BITOPS_CONST_HWEIGHT_H_ */

View File

@ -1,11 +1,7 @@
#ifndef _ASM_GENERIC_BITOPS_HWEIGHT_H_ #ifndef _ASM_GENERIC_BITOPS_HWEIGHT_H_
#define _ASM_GENERIC_BITOPS_HWEIGHT_H_ #define _ASM_GENERIC_BITOPS_HWEIGHT_H_
#include <asm/types.h> #include <asm-generic/bitops/arch_hweight.h>
#include <asm-generic/bitops/const_hweight.h>
extern unsigned int hweight32(unsigned int w);
extern unsigned int hweight16(unsigned int w);
extern unsigned int hweight8(unsigned int w);
extern unsigned long hweight64(__u64 w);
#endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */ #endif /* _ASM_GENERIC_BITOPS_HWEIGHT_H_ */

View File

@ -10,6 +10,11 @@
#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long)) #define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
#endif #endif
extern unsigned int __sw_hweight8(unsigned int w);
extern unsigned int __sw_hweight16(unsigned int w);
extern unsigned int __sw_hweight32(unsigned int w);
extern unsigned long __sw_hweight64(__u64 w);
/* /*
* Include this here because some architectures need generic_ffs/fls in * Include this here because some architectures need generic_ffs/fls in
* scope * scope
@ -44,31 +49,6 @@ static inline unsigned long hweight_long(unsigned long w)
return sizeof(w) == 4 ? hweight32(w) : hweight64(w); return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
} }
/*
* Clearly slow versions of the hweightN() functions, their benefit is
* of course compile time evaluation of constant arguments.
*/
#define HWEIGHT8(w) \
( BUILD_BUG_ON_ZERO(!__builtin_constant_p(w)) + \
(!!((w) & (1ULL << 0))) + \
(!!((w) & (1ULL << 1))) + \
(!!((w) & (1ULL << 2))) + \
(!!((w) & (1ULL << 3))) + \
(!!((w) & (1ULL << 4))) + \
(!!((w) & (1ULL << 5))) + \
(!!((w) & (1ULL << 6))) + \
(!!((w) & (1ULL << 7))) )
#define HWEIGHT16(w) (HWEIGHT8(w) + HWEIGHT8((w) >> 8))
#define HWEIGHT32(w) (HWEIGHT16(w) + HWEIGHT16((w) >> 16))
#define HWEIGHT64(w) (HWEIGHT32(w) + HWEIGHT32((w) >> 32))
/*
* Type invariant version that simply casts things to the
* largest type.
*/
#define HWEIGHT(w) HWEIGHT64((u64)(w))
/** /**
* rol32 - rotate a 32-bit value left * rol32 - rotate a 32-bit value left
* @word: value to rotate * @word: value to rotate

View File

@ -39,7 +39,10 @@ lib-$(CONFIG_RWSEM_XCHGADD_ALGORITHM) += rwsem.o
lib-$(CONFIG_GENERIC_FIND_FIRST_BIT) += find_next_bit.o lib-$(CONFIG_GENERIC_FIND_FIRST_BIT) += find_next_bit.o
lib-$(CONFIG_GENERIC_FIND_NEXT_BIT) += find_next_bit.o lib-$(CONFIG_GENERIC_FIND_NEXT_BIT) += find_next_bit.o
obj-$(CONFIG_GENERIC_FIND_LAST_BIT) += find_last_bit.o obj-$(CONFIG_GENERIC_FIND_LAST_BIT) += find_last_bit.o
CFLAGS_hweight.o = $(subst $(quote),,$(CONFIG_ARCH_HWEIGHT_CFLAGS))
obj-$(CONFIG_GENERIC_HWEIGHT) += hweight.o obj-$(CONFIG_GENERIC_HWEIGHT) += hweight.o
obj-$(CONFIG_LOCK_KERNEL) += kernel_lock.o obj-$(CONFIG_LOCK_KERNEL) += kernel_lock.o
obj-$(CONFIG_BTREE) += btree.o obj-$(CONFIG_BTREE) += btree.o
obj-$(CONFIG_DEBUG_PREEMPT) += smp_processor_id.o obj-$(CONFIG_DEBUG_PREEMPT) += smp_processor_id.o

View File

@ -9,7 +9,7 @@
* The Hamming Weight of a number is the total number of bits set in it. * The Hamming Weight of a number is the total number of bits set in it.
*/ */
unsigned int hweight32(unsigned int w) unsigned int __sw_hweight32(unsigned int w)
{ {
#ifdef ARCH_HAS_FAST_MULTIPLIER #ifdef ARCH_HAS_FAST_MULTIPLIER
w -= (w >> 1) & 0x55555555; w -= (w >> 1) & 0x55555555;
@ -24,29 +24,30 @@ unsigned int hweight32(unsigned int w)
return (res + (res >> 16)) & 0x000000FF; return (res + (res >> 16)) & 0x000000FF;
#endif #endif
} }
EXPORT_SYMBOL(hweight32); EXPORT_SYMBOL(__sw_hweight32);
unsigned int hweight16(unsigned int w) unsigned int __sw_hweight16(unsigned int w)
{ {
unsigned int res = w - ((w >> 1) & 0x5555); unsigned int res = w - ((w >> 1) & 0x5555);
res = (res & 0x3333) + ((res >> 2) & 0x3333); res = (res & 0x3333) + ((res >> 2) & 0x3333);
res = (res + (res >> 4)) & 0x0F0F; res = (res + (res >> 4)) & 0x0F0F;
return (res + (res >> 8)) & 0x00FF; return (res + (res >> 8)) & 0x00FF;
} }
EXPORT_SYMBOL(hweight16); EXPORT_SYMBOL(__sw_hweight16);
unsigned int hweight8(unsigned int w) unsigned int __sw_hweight8(unsigned int w)
{ {
unsigned int res = w - ((w >> 1) & 0x55); unsigned int res = w - ((w >> 1) & 0x55);
res = (res & 0x33) + ((res >> 2) & 0x33); res = (res & 0x33) + ((res >> 2) & 0x33);
return (res + (res >> 4)) & 0x0F; return (res + (res >> 4)) & 0x0F;
} }
EXPORT_SYMBOL(hweight8); EXPORT_SYMBOL(__sw_hweight8);
unsigned long hweight64(__u64 w) unsigned long __sw_hweight64(__u64 w)
{ {
#if BITS_PER_LONG == 32 #if BITS_PER_LONG == 32
return hweight32((unsigned int)(w >> 32)) + hweight32((unsigned int)w); return __sw_hweight32((unsigned int)(w >> 32)) +
__sw_hweight32((unsigned int)w);
#elif BITS_PER_LONG == 64 #elif BITS_PER_LONG == 64
#ifdef ARCH_HAS_FAST_MULTIPLIER #ifdef ARCH_HAS_FAST_MULTIPLIER
w -= (w >> 1) & 0x5555555555555555ul; w -= (w >> 1) & 0x5555555555555555ul;
@ -63,4 +64,4 @@ unsigned long hweight64(__u64 w)
#endif #endif
#endif #endif
} }
EXPORT_SYMBOL(hweight64); EXPORT_SYMBOL(__sw_hweight64);

View File

@ -245,3 +245,7 @@ quiet_cmd_lzo = LZO $@
cmd_lzo = (cat $(filter-out FORCE,$^) | \ cmd_lzo = (cat $(filter-out FORCE,$^) | \
lzop -9 && $(call size_append, $(filter-out FORCE,$^))) > $@ || \ lzop -9 && $(call size_append, $(filter-out FORCE,$^))) > $@ || \
(rm -f $@ ; false) (rm -f $@ ; false)
# misc stuff
# ---------------------------------------------------------------------------
quote:="