016e6d6a4f
Import few basic bitmap functions (bitmap_{weight,fill,set,clear,or}()) and their dependencies from Linux. These are required for upcoming DMA resource allocation support for TI's K3 SoCs. Signed-off-by: Vignesh Raghavendra <vigneshr@ti.com> Reviewed-by: Grygorii Strashko <grygorii.strashko@ti.com> Signed-off-by: Lokesh Vutla <lokeshvutla@ti.com>
218 lines
5.3 KiB
C
218 lines
5.3 KiB
C
// SPDX-License-Identifier: GPL-2.0+
|
|
#ifndef __LINUX_BITMAP_H
|
|
#define __LINUX_BITMAP_H
|
|
|
|
#include <asm/types.h>
|
|
#include <linux/types.h>
|
|
#include <linux/bitops.h>
|
|
#include <linux/string.h>
|
|
|
|
#ifdef __LITTLE_ENDIAN
|
|
#define BITMAP_MEM_ALIGNMENT 8
|
|
#else
|
|
#define BITMAP_MEM_ALIGNMENT (8 * sizeof(unsigned long))
|
|
#endif
|
|
#define BITMAP_MEM_MASK (BITMAP_MEM_ALIGNMENT - 1)
|
|
|
|
#define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1)))
|
|
#define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1)))
|
|
#define small_const_nbits(nbits) \
|
|
(__builtin_constant_p(nbits) && (nbits) <= BITS_PER_LONG)
|
|
|
|
static inline void
|
|
__bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
|
|
const unsigned long *bitmap2, unsigned int bits)
|
|
{
|
|
unsigned int k;
|
|
unsigned int nr = BITS_TO_LONGS(bits);
|
|
|
|
for (k = 0; k < nr; k++)
|
|
dst[k] = bitmap1[k] | bitmap2[k];
|
|
}
|
|
|
|
static inline int
|
|
__bitmap_weight(const unsigned long *bitmap, unsigned int bits)
|
|
{
|
|
unsigned int k, lim = bits / BITS_PER_LONG;
|
|
int w = 0;
|
|
|
|
for (k = 0; k < lim; k++)
|
|
w += hweight_long(bitmap[k]);
|
|
|
|
if (bits % BITS_PER_LONG)
|
|
w += hweight_long(bitmap[k] & BITMAP_LAST_WORD_MASK(bits));
|
|
|
|
return w;
|
|
}
|
|
|
|
static inline void
|
|
__bitmap_set(unsigned long *map, unsigned int start, int len)
|
|
{
|
|
unsigned long *p = map + BIT_WORD(start);
|
|
const unsigned int size = start + len;
|
|
int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
|
|
unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
|
|
|
|
while (len - bits_to_set >= 0) {
|
|
*p |= mask_to_set;
|
|
len -= bits_to_set;
|
|
bits_to_set = BITS_PER_LONG;
|
|
mask_to_set = ~0UL;
|
|
p++;
|
|
}
|
|
if (len) {
|
|
mask_to_set &= BITMAP_LAST_WORD_MASK(size);
|
|
*p |= mask_to_set;
|
|
}
|
|
}
|
|
|
|
static inline void
|
|
__bitmap_clear(unsigned long *map, unsigned int start, int len)
|
|
{
|
|
unsigned long *p = map + BIT_WORD(start);
|
|
const unsigned int size = start + len;
|
|
int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
|
|
unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
|
|
|
|
while (len - bits_to_clear >= 0) {
|
|
*p &= ~mask_to_clear;
|
|
len -= bits_to_clear;
|
|
bits_to_clear = BITS_PER_LONG;
|
|
mask_to_clear = ~0UL;
|
|
p++;
|
|
}
|
|
if (len) {
|
|
mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
|
|
*p &= ~mask_to_clear;
|
|
}
|
|
}
|
|
|
|
static inline void bitmap_zero(unsigned long *dst, int nbits)
|
|
{
|
|
if (small_const_nbits(nbits)) {
|
|
*dst = 0UL;
|
|
} else {
|
|
int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
|
|
|
|
memset(dst, 0, len);
|
|
}
|
|
}
|
|
|
|
static inline unsigned long
|
|
find_next_bit(const unsigned long *addr, unsigned long size,
|
|
unsigned long offset)
|
|
{
|
|
const unsigned long *p = addr + BIT_WORD(offset);
|
|
unsigned long result = offset & ~(BITS_PER_LONG - 1);
|
|
unsigned long tmp;
|
|
|
|
if (offset >= size)
|
|
return size;
|
|
size -= result;
|
|
offset %= BITS_PER_LONG;
|
|
if (offset) {
|
|
tmp = *(p++);
|
|
tmp &= (~0UL << offset);
|
|
if (size < BITS_PER_LONG)
|
|
goto found_first;
|
|
if (tmp)
|
|
goto found_middle;
|
|
size -= BITS_PER_LONG;
|
|
result += BITS_PER_LONG;
|
|
}
|
|
while (size & ~(BITS_PER_LONG - 1)) {
|
|
tmp = *(p++);
|
|
if ((tmp))
|
|
goto found_middle;
|
|
result += BITS_PER_LONG;
|
|
size -= BITS_PER_LONG;
|
|
}
|
|
if (!size)
|
|
return result;
|
|
tmp = *p;
|
|
|
|
found_first:
|
|
tmp &= (~0UL >> (BITS_PER_LONG - size));
|
|
if (tmp == 0UL) /* Are any bits set? */
|
|
return result + size; /* Nope. */
|
|
found_middle:
|
|
return result + __ffs(tmp);
|
|
}
|
|
|
|
/*
|
|
* Find the first set bit in a memory region.
|
|
*/
|
|
static inline unsigned long find_first_bit(const unsigned long *addr, unsigned long size)
|
|
{
|
|
unsigned long idx;
|
|
|
|
for (idx = 0; idx * BITS_PER_LONG < size; idx++) {
|
|
if (addr[idx])
|
|
return min(idx * BITS_PER_LONG + __ffs(addr[idx]), size);
|
|
}
|
|
|
|
return size;
|
|
}
|
|
|
|
#define for_each_set_bit(bit, addr, size) \
|
|
for ((bit) = find_first_bit((addr), (size)); \
|
|
(bit) < (size); \
|
|
(bit) = find_next_bit((addr), (size), (bit) + 1))
|
|
|
|
static inline void bitmap_fill(unsigned long *dst, unsigned int nbits)
|
|
{
|
|
if (small_const_nbits(nbits)) {
|
|
*dst = ~0UL;
|
|
} else {
|
|
unsigned int len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
|
|
|
|
memset(dst, 0xff, len);
|
|
}
|
|
}
|
|
|
|
static inline void bitmap_or(unsigned long *dst, const unsigned long *src1,
|
|
const unsigned long *src2, unsigned int nbits)
|
|
{
|
|
if (small_const_nbits(nbits))
|
|
*dst = *src1 | *src2;
|
|
else
|
|
__bitmap_or(dst, src1, src2, nbits);
|
|
}
|
|
|
|
static inline int bitmap_weight(const unsigned long *src, unsigned int nbits)
|
|
{
|
|
if (small_const_nbits(nbits))
|
|
return hweight_long(*src & BITMAP_LAST_WORD_MASK(nbits));
|
|
return __bitmap_weight(src, nbits);
|
|
}
|
|
|
|
static inline void bitmap_set(unsigned long *map, unsigned int start,
|
|
unsigned int nbits)
|
|
{
|
|
if (__builtin_constant_p(nbits) && nbits == 1)
|
|
__set_bit(start, map);
|
|
else if (__builtin_constant_p(start & BITMAP_MEM_MASK) &&
|
|
IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) &&
|
|
__builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
|
|
IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
|
|
memset((char *)map + start / 8, 0xff, nbits / 8);
|
|
else
|
|
__bitmap_set(map, start, nbits);
|
|
}
|
|
|
|
static inline void bitmap_clear(unsigned long *map, unsigned int start,
|
|
unsigned int nbits)
|
|
{
|
|
if (__builtin_constant_p(nbits) && nbits == 1)
|
|
__clear_bit(start, map);
|
|
else if (__builtin_constant_p(start & BITMAP_MEM_MASK) &&
|
|
IS_ALIGNED(start, BITMAP_MEM_ALIGNMENT) &&
|
|
__builtin_constant_p(nbits & BITMAP_MEM_MASK) &&
|
|
IS_ALIGNED(nbits, BITMAP_MEM_ALIGNMENT))
|
|
memset((char *)map + start / 8, 0, nbits / 8);
|
|
else
|
|
__bitmap_clear(map, start, nbits);
|
|
}
|
|
|
|
#endif /* __LINUX_BITMAP_H */
|