bitops.h revision 3e037454bcfa4b187e8293d2121bd8c0f5a5c31c
1#ifndef _LINUX_BITOPS_H
2#define _LINUX_BITOPS_H
3#include <asm/types.h>
4
5/*
6 * Include this here because some architectures need generic_ffs/fls in
7 * scope
8 */
9#include <asm/bitops.h>
10
11#define for_each_bit(bit, addr, size) \
12	for ((bit) = find_first_bit((addr), (size)); \
13	     (bit) < (size); \
14	     (bit) = find_next_bit((addr), (size), (bit) + 1))
15
16
17static __inline__ int get_bitmask_order(unsigned int count)
18{
19	int order;
20
21	order = fls(count);
22	return order;	/* We could be slightly more clever with -1 here... */
23}
24
25static __inline__ int get_count_order(unsigned int count)
26{
27	int order;
28
29	order = fls(count) - 1;
30	if (count & (count - 1))
31		order++;
32	return order;
33}
34
35static inline unsigned long hweight_long(unsigned long w)
36{
37	return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
38}
39
40/**
41 * rol32 - rotate a 32-bit value left
42 * @word: value to rotate
43 * @shift: bits to roll
44 */
45static inline __u32 rol32(__u32 word, unsigned int shift)
46{
47	return (word << shift) | (word >> (32 - shift));
48}
49
50/**
51 * ror32 - rotate a 32-bit value right
52 * @word: value to rotate
53 * @shift: bits to roll
54 */
55static inline __u32 ror32(__u32 word, unsigned int shift)
56{
57	return (word >> shift) | (word << (32 - shift));
58}
59
60static inline unsigned fls_long(unsigned long l)
61{
62	if (sizeof(l) == 4)
63		return fls(l);
64	return fls64(l);
65}
66
67#endif
68