2013-04-04 13:57:50 +00:00
|
|
|
#ifndef _PERF_LINUX_BITOPS_H_
|
|
|
|
#define _PERF_LINUX_BITOPS_H_
|
|
|
|
|
|
|
|
#include <linux/kernel.h>
|
2018-10-01 14:46:15 +00:00
|
|
|
#include <endian.h>
|
2019-06-19 22:44:36 +00:00
|
|
|
#include "common/internal.h"
|
2013-04-04 13:57:50 +00:00
|
|
|
|
2013-08-14 18:21:54 +00:00
|
|
|
#ifndef DIV_ROUND_UP
|
|
|
|
#define DIV_ROUND_UP(n, d) (((n) + (d) - 1) / (d))
|
|
|
|
#endif
|
|
|
|
|
2013-04-04 13:57:50 +00:00
|
|
|
#define BITS_PER_BYTE 8
|
|
|
|
#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
|
|
|
|
#define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u64))
|
|
|
|
#define BITS_TO_U32(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u32))
|
|
|
|
|
|
|
|
#define for_each_set_bit(bit, addr, size) \
|
|
|
|
for ((bit) = find_first_bit((addr), (size)); \
|
|
|
|
(bit) < (size); \
|
|
|
|
(bit) = find_next_bit((addr), (size), (bit) + 1))
|
|
|
|
|
|
|
|
/* same as for_each_set_bit() but use bit as value to start with */
|
|
|
|
#define for_each_set_bit_from(bit, addr, size) \
|
|
|
|
for ((bit) = find_next_bit((addr), (size), (bit)); \
|
|
|
|
(bit) < (size); \
|
|
|
|
(bit) = find_next_bit((addr), (size), (bit) + 1))
|
|
|
|
|
|
|
|
static inline void set_bit(int nr, unsigned long *addr)
|
|
|
|
{
|
|
|
|
addr[nr / BITS_PER_LONG] |= 1UL << (nr % BITS_PER_LONG);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void clear_bit(int nr, unsigned long *addr)
|
|
|
|
{
|
|
|
|
addr[nr / BITS_PER_LONG] &= ~(1UL << (nr % BITS_PER_LONG));
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* hweightN - returns the hamming weight of a N-bit word
|
|
|
|
* @x: the word to weigh
|
|
|
|
*
|
|
|
|
* The Hamming Weight of a number is the total number of bits set in it.
|
|
|
|
*/
|
|
|
|
|
|
|
|
static inline unsigned int hweight32(unsigned int w)
|
|
|
|
{
|
|
|
|
unsigned int res = w - ((w >> 1) & 0x55555555);
|
|
|
|
res = (res & 0x33333333) + ((res >> 2) & 0x33333333);
|
|
|
|
res = (res + (res >> 4)) & 0x0F0F0F0F;
|
|
|
|
res = res + (res >> 8);
|
|
|
|
return (res + (res >> 16)) & 0x000000FF;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline unsigned long hweight64(__u64 w)
|
|
|
|
{
|
|
|
|
#if BITS_PER_LONG == 32
|
|
|
|
return hweight32((unsigned int)(w >> 32)) + hweight32((unsigned int)w);
|
|
|
|
#elif BITS_PER_LONG == 64
|
|
|
|
__u64 res = w - ((w >> 1) & 0x5555555555555555ul);
|
|
|
|
res = (res & 0x3333333333333333ul) + ((res >> 2) & 0x3333333333333333ul);
|
|
|
|
res = (res + (res >> 4)) & 0x0F0F0F0F0F0F0F0Ful;
|
|
|
|
res = res + (res >> 8);
|
|
|
|
res = res + (res >> 16);
|
|
|
|
return (res + (res >> 32)) & 0x00000000000000FFul;
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline unsigned long hweight_long(unsigned long w)
|
|
|
|
{
|
|
|
|
return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
|
|
|
|
}
|
|
|
|
|
|
|
|
#define BITOP_WORD(nr) ((nr) / BITS_PER_LONG)
|
|
|
|
|
|
|
|
/**
|
|
|
|
* __ffs - find first bit in word.
|
|
|
|
* @word: The word to search
|
|
|
|
*
|
|
|
|
* Undefined if no bit exists, so code should check against 0 first.
|
|
|
|
*/
|
|
|
|
static __always_inline unsigned long __ffs(unsigned long word)
|
|
|
|
{
|
|
|
|
int num = 0;
|
|
|
|
|
|
|
|
#if BITS_PER_LONG == 64
|
|
|
|
if ((word & 0xffffffff) == 0) {
|
|
|
|
num += 32;
|
|
|
|
word >>= 32;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
if ((word & 0xffff) == 0) {
|
|
|
|
num += 16;
|
|
|
|
word >>= 16;
|
|
|
|
}
|
|
|
|
if ((word & 0xff) == 0) {
|
|
|
|
num += 8;
|
|
|
|
word >>= 8;
|
|
|
|
}
|
|
|
|
if ((word & 0xf) == 0) {
|
|
|
|
num += 4;
|
|
|
|
word >>= 4;
|
|
|
|
}
|
|
|
|
if ((word & 0x3) == 0) {
|
|
|
|
num += 2;
|
|
|
|
word >>= 2;
|
|
|
|
}
|
|
|
|
if ((word & 0x1) == 0)
|
|
|
|
num += 1;
|
|
|
|
return num;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define ffz(x) __ffs(~(x))
|
|
|
|
|
2018-10-01 14:46:14 +00:00
|
|
|
#define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1)))
|
|
|
|
#define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1)))
|
|
|
|
|
2013-04-04 13:57:50 +00:00
|
|
|
/*
|
2018-10-01 14:46:14 +00:00
|
|
|
* This is a common helper function for find_next_bit, find_next_zero_bit, and
|
|
|
|
* find_next_and_bit. The differences are:
|
|
|
|
* - The "invert" argument, which is XORed with each fetched word before
|
|
|
|
* searching it for one bits.
|
|
|
|
* - The optional "addr2", which is anded with "addr1" if present.
|
2013-04-04 13:57:50 +00:00
|
|
|
*/
|
2018-10-01 14:46:14 +00:00
|
|
|
static inline unsigned long _find_next_bit(const unsigned long *addr1,
|
|
|
|
const unsigned long *addr2, unsigned long nbits,
|
|
|
|
unsigned long start, unsigned long invert)
|
2013-04-04 13:57:50 +00:00
|
|
|
{
|
|
|
|
unsigned long tmp;
|
|
|
|
|
2018-10-01 14:46:14 +00:00
|
|
|
if (start >= nbits)
|
|
|
|
return nbits;
|
|
|
|
|
|
|
|
tmp = addr1[start / BITS_PER_LONG];
|
|
|
|
if (addr2)
|
|
|
|
tmp &= addr2[start / BITS_PER_LONG];
|
|
|
|
tmp ^= invert;
|
|
|
|
|
|
|
|
/* Handle 1st word. */
|
|
|
|
tmp &= BITMAP_FIRST_WORD_MASK(start);
|
|
|
|
start = round_down(start, BITS_PER_LONG);
|
|
|
|
|
|
|
|
while (!tmp) {
|
|
|
|
start += BITS_PER_LONG;
|
|
|
|
if (start >= nbits)
|
|
|
|
return nbits;
|
|
|
|
|
|
|
|
tmp = addr1[start / BITS_PER_LONG];
|
|
|
|
if (addr2)
|
|
|
|
tmp &= addr2[start / BITS_PER_LONG];
|
|
|
|
tmp ^= invert;
|
2013-04-04 13:57:50 +00:00
|
|
|
}
|
2018-10-01 14:46:14 +00:00
|
|
|
|
|
|
|
return min(start + __ffs(tmp), nbits);
|
2013-04-04 13:57:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Find the next set bit in a memory region.
|
|
|
|
*/
|
2018-10-01 14:46:14 +00:00
|
|
|
static inline unsigned long find_next_bit(const unsigned long *addr,
|
|
|
|
unsigned long size,
|
|
|
|
unsigned long offset)
|
2013-04-04 13:57:50 +00:00
|
|
|
{
|
2018-10-01 14:46:14 +00:00
|
|
|
return _find_next_bit(addr, NULL, size, offset, 0UL);
|
2013-04-04 13:57:50 +00:00
|
|
|
}
|
|
|
|
|
2018-10-01 14:46:14 +00:00
|
|
|
static inline unsigned long find_next_zero_bit(const unsigned long *addr,
|
|
|
|
unsigned long size,
|
|
|
|
unsigned long offset)
|
2013-04-04 13:57:50 +00:00
|
|
|
{
|
2018-10-01 14:46:14 +00:00
|
|
|
return _find_next_bit(addr, NULL, size, offset, ~0UL);
|
2013-04-04 13:57:50 +00:00
|
|
|
}
|
2018-10-01 14:46:14 +00:00
|
|
|
|
|
|
|
#define find_first_bit(addr, size) find_next_bit((addr), (size), 0)
|
2018-10-01 14:46:15 +00:00
|
|
|
#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0)
|
|
|
|
|
|
|
|
#if __BYTE_ORDER == __BIG_ENDIAN
|
|
|
|
|
|
|
|
static inline unsigned long ext2_swab(const unsigned long y)
|
|
|
|
{
|
|
|
|
#if BITS_PER_LONG == 64
|
2018-11-05 19:06:41 +00:00
|
|
|
return (unsigned long) bswap_64((u64) y);
|
2018-10-01 14:46:15 +00:00
|
|
|
#elif BITS_PER_LONG == 32
|
2018-11-05 19:06:41 +00:00
|
|
|
return (unsigned long) bswap_32((u32) y);
|
2018-10-01 14:46:15 +00:00
|
|
|
#else
|
|
|
|
#error BITS_PER_LONG not defined
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline unsigned long _find_next_bit_le(const unsigned long *addr1,
|
|
|
|
const unsigned long *addr2, unsigned long nbits,
|
|
|
|
unsigned long start, unsigned long invert)
|
|
|
|
{
|
|
|
|
unsigned long tmp;
|
|
|
|
|
|
|
|
if (start >= nbits)
|
|
|
|
return nbits;
|
|
|
|
|
|
|
|
tmp = addr1[start / BITS_PER_LONG];
|
|
|
|
if (addr2)
|
|
|
|
tmp &= addr2[start / BITS_PER_LONG];
|
|
|
|
tmp ^= invert;
|
|
|
|
|
|
|
|
/* Handle 1st word. */
|
|
|
|
tmp &= ext2_swab(BITMAP_FIRST_WORD_MASK(start));
|
|
|
|
start = round_down(start, BITS_PER_LONG);
|
|
|
|
|
|
|
|
while (!tmp) {
|
|
|
|
start += BITS_PER_LONG;
|
|
|
|
if (start >= nbits)
|
|
|
|
return nbits;
|
|
|
|
|
|
|
|
tmp = addr1[start / BITS_PER_LONG];
|
|
|
|
if (addr2)
|
|
|
|
tmp &= addr2[start / BITS_PER_LONG];
|
|
|
|
tmp ^= invert;
|
|
|
|
}
|
|
|
|
|
|
|
|
return min(start + __ffs(ext2_swab(tmp)), nbits);
|
|
|
|
}
|
|
|
|
|
2018-11-05 19:06:41 +00:00
|
|
|
static inline unsigned long find_next_zero_bit_le(const void *addr, unsigned long size,
|
2018-10-01 14:46:15 +00:00
|
|
|
unsigned long offset)
|
|
|
|
{
|
|
|
|
return _find_next_bit_le(addr, NULL, size, offset, ~0UL);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2018-11-05 19:06:41 +00:00
|
|
|
static inline unsigned long find_next_bit_le(const void *addr, unsigned long size,
|
2018-10-01 14:46:15 +00:00
|
|
|
unsigned long offset)
|
|
|
|
{
|
|
|
|
return _find_next_bit_le(addr, NULL, size, offset, 0UL);
|
|
|
|
}
|
|
|
|
|
|
|
|
#else
|
|
|
|
|
|
|
|
static inline unsigned long find_next_zero_bit_le(const void *addr,
|
|
|
|
unsigned long size, unsigned long offset)
|
|
|
|
{
|
|
|
|
return find_next_zero_bit(addr, size, offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline unsigned long find_next_bit_le(const void *addr,
|
|
|
|
unsigned long size, unsigned long offset)
|
|
|
|
{
|
|
|
|
return find_next_bit(addr, size, offset);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline unsigned long find_first_zero_bit_le(const void *addr,
|
|
|
|
unsigned long size)
|
|
|
|
{
|
|
|
|
return find_first_zero_bit(addr, size);
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|
2018-10-01 14:46:14 +00:00
|
|
|
|
2013-04-04 13:57:50 +00:00
|
|
|
#endif
|