xref: /linux/tools/include/linux/bitops.h (revision e5a52fd2b8cdb700b3c07b030e050a49ef3156b9)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _TOOLS_LINUX_BITOPS_H_
3 #define _TOOLS_LINUX_BITOPS_H_
4 
5 #include <asm/types.h>
6 #include <limits.h>
7 #ifndef __WORDSIZE
8 #define __WORDSIZE (__SIZEOF_LONG__ * 8)
9 #endif
10 
11 #ifndef BITS_PER_LONG
12 # define BITS_PER_LONG __WORDSIZE
13 #endif
14 #include <linux/bits.h>
15 #include <linux/compiler.h>
16 
17 #define BITS_PER_TYPE(type)	(sizeof(type) * BITS_PER_BYTE)
18 #define BITS_TO_LONGS(nr)	DIV_ROUND_UP(nr, BITS_PER_TYPE(long))
19 #define BITS_TO_U64(nr)		DIV_ROUND_UP(nr, BITS_PER_TYPE(u64))
20 #define BITS_TO_U32(nr)		DIV_ROUND_UP(nr, BITS_PER_TYPE(u32))
21 #define BITS_TO_BYTES(nr)	DIV_ROUND_UP(nr, BITS_PER_TYPE(char))
22 
23 extern unsigned int __sw_hweight8(unsigned int w);
24 extern unsigned int __sw_hweight16(unsigned int w);
25 extern unsigned int __sw_hweight32(unsigned int w);
26 extern unsigned long __sw_hweight64(__u64 w);
27 
28 /*
29  * Include this here because some architectures need generic_ffs/fls in
30  * scope
31  *
32  * XXX: this needs to be asm/bitops.h, when we get to per arch optimizations
33  */
34 #include <asm-generic/bitops.h>
35 
36 #define for_each_set_bit(bit, addr, size) \
37 	for ((bit) = find_first_bit((addr), (size));		\
38 	     (bit) < (size);					\
39 	     (bit) = find_next_bit((addr), (size), (bit) + 1))
40 
41 #define for_each_clear_bit(bit, addr, size) \
42 	for ((bit) = find_first_zero_bit((addr), (size));       \
43 	     (bit) < (size);                                    \
44 	     (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
45 
46 /* same as for_each_set_bit() but use bit as value to start with */
47 #define for_each_set_bit_from(bit, addr, size) \
48 	for ((bit) = find_next_bit((addr), (size), (bit));	\
49 	     (bit) < (size);					\
50 	     (bit) = find_next_bit((addr), (size), (bit) + 1))
51 
52 static inline unsigned long hweight_long(unsigned long w)
53 {
54 	return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
55 }
56 
57 static inline unsigned fls_long(unsigned long l)
58 {
59 	if (sizeof(l) == 4)
60 		return fls(l);
61 	return fls64(l);
62 }
63 
64 /**
65  * rol32 - rotate a 32-bit value left
66  * @word: value to rotate
67  * @shift: bits to roll
68  */
69 static inline __u32 rol32(__u32 word, unsigned int shift)
70 {
71 	return (word << shift) | (word >> ((-shift) & 31));
72 }
73 
74 #endif
75