1/*
2 * Bitmap Module
3 *
4 * Copyright (C) 2010 Corentin Chary <corentin.chary@gmail.com>
5 *
6 * Mostly inspired by (stolen from) linux/bitmap.h and linux/bitops.h
7 *
8 * This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
9 * See the COPYING.LIB file in the top-level directory.
10 */
11
12#ifndef BITMAP_H
13#define BITMAP_H
14
15
16#include "qemu/bitops.h"
17
18/*
19 * The available bitmap operations and their rough meaning in the
20 * case that the bitmap is a single unsigned long are thus:
21 *
22 * Note that nbits should be always a compile time evaluable constant.
23 * Otherwise many inlines will generate horrible code.
24 *
25 * bitmap_zero(dst, nbits) *dst = 0UL
26 * bitmap_fill(dst, nbits) *dst = ~0UL
27 * bitmap_copy(dst, src, nbits) *dst = *src
28 * bitmap_and(dst, src1, src2, nbits) *dst = *src1 & *src2
29 * bitmap_or(dst, src1, src2, nbits) *dst = *src1 | *src2
30 * bitmap_xor(dst, src1, src2, nbits) *dst = *src1 ^ *src2
31 * bitmap_andnot(dst, src1, src2, nbits) *dst = *src1 & ~(*src2)
32 * bitmap_complement(dst, src, nbits) *dst = ~(*src)
33 * bitmap_equal(src1, src2, nbits) Are *src1 and *src2 equal?
34 * bitmap_intersects(src1, src2, nbits) Do *src1 and *src2 overlap?
35 * bitmap_empty(src, nbits) Are all bits zero in *src?
36 * bitmap_full(src, nbits) Are all bits set in *src?
37 * bitmap_set(dst, pos, nbits) Set specified bit area
38 * bitmap_set_atomic(dst, pos, nbits) Set specified bit area with atomic ops
39 * bitmap_clear(dst, pos, nbits) Clear specified bit area
40 * bitmap_test_and_clear_atomic(dst, pos, nbits) Test and clear area
41 * bitmap_find_next_zero_area(buf, len, pos, n, mask) Find bit free area
42 * bitmap_to_le(dst, src, nbits) Convert bitmap to little endian
43 * bitmap_from_le(dst, src, nbits) Convert bitmap from little endian
44 * bitmap_copy_with_src_offset(dst, src, offset, nbits)
45 * *dst = *src (with an offset into src)
46 * bitmap_copy_with_dst_offset(dst, src, offset, nbits)
47 * *dst = *src (with an offset into dst)
48 */
49
50/*
51 * Also the following operations apply to bitmaps.
52 *
53 * set_bit(bit, addr) *addr |= bit
54 * clear_bit(bit, addr) *addr &= ~bit
55 * change_bit(bit, addr) *addr ^= bit
56 * test_bit(bit, addr) Is bit set in *addr?
57 * test_and_set_bit(bit, addr) Set bit and return old value
58 * test_and_clear_bit(bit, addr) Clear bit and return old value
59 * test_and_change_bit(bit, addr) Change bit and return old value
60 * find_first_zero_bit(addr, nbits) Position first zero bit in *addr
61 * find_first_bit(addr, nbits) Position first set bit in *addr
62 * find_next_zero_bit(addr, nbits, bit) Position next zero bit in *addr >= bit
63 * find_next_bit(addr, nbits, bit) Position next set bit in *addr >= bit
64 */
65
66#define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) & (BITS_PER_LONG - 1)))
67#define BITMAP_LAST_WORD_MASK(nbits) (~0UL >> (-(nbits) & (BITS_PER_LONG - 1)))
68
69#define DECLARE_BITMAP(name,bits) \
70 unsigned long name[BITS_TO_LONGS(bits)]
71
72#define small_nbits(nbits) \
73 ((nbits) <= BITS_PER_LONG)
74
75int slow_bitmap_empty(const unsigned long *bitmap, long bits);
76int slow_bitmap_full(const unsigned long *bitmap, long bits);
77int slow_bitmap_equal(const unsigned long *bitmap1,
78 const unsigned long *bitmap2, long bits);
79void slow_bitmap_complement(unsigned long *dst, const unsigned long *src,
80 long bits);
81int slow_bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
82 const unsigned long *bitmap2, long bits);
83void slow_bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
84 const unsigned long *bitmap2, long bits);
85void slow_bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
86 const unsigned long *bitmap2, long bits);
87int slow_bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
88 const unsigned long *bitmap2, long bits);
89int slow_bitmap_intersects(const unsigned long *bitmap1,
90 const unsigned long *bitmap2, long bits);
91long slow_bitmap_count_one(const unsigned long *bitmap, long nbits);
92
93static inline unsigned long *bitmap_try_new(long nbits)
94{
95 long len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
96 return g_try_malloc0(len);
97}
98
99static inline unsigned long *bitmap_new(long nbits)
100{
101 unsigned long *ptr = bitmap_try_new(nbits);
102 if (ptr == NULL) {
103 abort();
104 }
105 return ptr;
106}
107
108static inline void bitmap_zero(unsigned long *dst, long nbits)
109{
110 if (small_nbits(nbits)) {
111 *dst = 0UL;
112 } else {
113 long len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
114 memset(dst, 0, len);
115 }
116}
117
118static inline void bitmap_fill(unsigned long *dst, long nbits)
119{
120 size_t nlongs = BITS_TO_LONGS(nbits);
121 if (!small_nbits(nbits)) {
122 long len = (nlongs - 1) * sizeof(unsigned long);
123 memset(dst, 0xff, len);
124 }
125 dst[nlongs - 1] = BITMAP_LAST_WORD_MASK(nbits);
126}
127
128static inline void bitmap_copy(unsigned long *dst, const unsigned long *src,
129 long nbits)
130{
131 if (small_nbits(nbits)) {
132 *dst = *src;
133 } else {
134 long len = BITS_TO_LONGS(nbits) * sizeof(unsigned long);
135 memcpy(dst, src, len);
136 }
137}
138
139static inline int bitmap_and(unsigned long *dst, const unsigned long *src1,
140 const unsigned long *src2, long nbits)
141{
142 if (small_nbits(nbits)) {
143 return (*dst = *src1 & *src2) != 0;
144 }
145 return slow_bitmap_and(dst, src1, src2, nbits);
146}
147
148static inline void bitmap_or(unsigned long *dst, const unsigned long *src1,
149 const unsigned long *src2, long nbits)
150{
151 if (small_nbits(nbits)) {
152 *dst = *src1 | *src2;
153 } else {
154 slow_bitmap_or(dst, src1, src2, nbits);
155 }
156}
157
158static inline void bitmap_xor(unsigned long *dst, const unsigned long *src1,
159 const unsigned long *src2, long nbits)
160{
161 if (small_nbits(nbits)) {
162 *dst = *src1 ^ *src2;
163 } else {
164 slow_bitmap_xor(dst, src1, src2, nbits);
165 }
166}
167
168static inline int bitmap_andnot(unsigned long *dst, const unsigned long *src1,
169 const unsigned long *src2, long nbits)
170{
171 if (small_nbits(nbits)) {
172 return (*dst = *src1 & ~(*src2)) != 0;
173 }
174 return slow_bitmap_andnot(dst, src1, src2, nbits);
175}
176
177static inline void bitmap_complement(unsigned long *dst,
178 const unsigned long *src,
179 long nbits)
180{
181 if (small_nbits(nbits)) {
182 *dst = ~(*src) & BITMAP_LAST_WORD_MASK(nbits);
183 } else {
184 slow_bitmap_complement(dst, src, nbits);
185 }
186}
187
188static inline int bitmap_equal(const unsigned long *src1,
189 const unsigned long *src2, long nbits)
190{
191 if (small_nbits(nbits)) {
192 return ! ((*src1 ^ *src2) & BITMAP_LAST_WORD_MASK(nbits));
193 } else {
194 return slow_bitmap_equal(src1, src2, nbits);
195 }
196}
197
198static inline int bitmap_empty(const unsigned long *src, long nbits)
199{
200 if (small_nbits(nbits)) {
201 return ! (*src & BITMAP_LAST_WORD_MASK(nbits));
202 } else {
203 return slow_bitmap_empty(src, nbits);
204 }
205}
206
207static inline int bitmap_full(const unsigned long *src, long nbits)
208{
209 if (small_nbits(nbits)) {
210 return ! (~(*src) & BITMAP_LAST_WORD_MASK(nbits));
211 } else {
212 return slow_bitmap_full(src, nbits);
213 }
214}
215
216static inline int bitmap_intersects(const unsigned long *src1,
217 const unsigned long *src2, long nbits)
218{
219 if (small_nbits(nbits)) {
220 return ((*src1 & *src2) & BITMAP_LAST_WORD_MASK(nbits)) != 0;
221 } else {
222 return slow_bitmap_intersects(src1, src2, nbits);
223 }
224}
225
226static inline long bitmap_count_one(const unsigned long *bitmap, long nbits)
227{
228 if (unlikely(!nbits)) {
229 return 0;
230 }
231
232 if (small_nbits(nbits)) {
233 return ctpopl(*bitmap & BITMAP_LAST_WORD_MASK(nbits));
234 } else {
235 return slow_bitmap_count_one(bitmap, nbits);
236 }
237}
238
239static inline long bitmap_count_one_with_offset(const unsigned long *bitmap,
240 long offset, long nbits)
241{
242 long aligned_offset = QEMU_ALIGN_DOWN(offset, BITS_PER_LONG);
243 long redundant_bits = offset - aligned_offset;
244 long bits_to_count = nbits + redundant_bits;
245 const unsigned long *bitmap_start = bitmap +
246 aligned_offset / BITS_PER_LONG;
247
248 return bitmap_count_one(bitmap_start, bits_to_count) -
249 bitmap_count_one(bitmap_start, redundant_bits);
250}
251
252void bitmap_set(unsigned long *map, long i, long len);
253void bitmap_set_atomic(unsigned long *map, long i, long len);
254void bitmap_clear(unsigned long *map, long start, long nr);
255bool bitmap_test_and_clear_atomic(unsigned long *map, long start, long nr);
256void bitmap_copy_and_clear_atomic(unsigned long *dst, unsigned long *src,
257 long nr);
258unsigned long bitmap_find_next_zero_area(unsigned long *map,
259 unsigned long size,
260 unsigned long start,
261 unsigned long nr,
262 unsigned long align_mask);
263
264static inline unsigned long *bitmap_zero_extend(unsigned long *old,
265 long old_nbits, long new_nbits)
266{
267 long new_len = BITS_TO_LONGS(new_nbits) * sizeof(unsigned long);
268 unsigned long *new = g_realloc(old, new_len);
269 bitmap_clear(new, old_nbits, new_nbits - old_nbits);
270 return new;
271}
272
273void bitmap_to_le(unsigned long *dst, const unsigned long *src,
274 long nbits);
275void bitmap_from_le(unsigned long *dst, const unsigned long *src,
276 long nbits);
277
278void bitmap_copy_with_src_offset(unsigned long *dst, const unsigned long *src,
279 unsigned long offset, unsigned long nbits);
280void bitmap_copy_with_dst_offset(unsigned long *dst, const unsigned long *src,
281 unsigned long shift, unsigned long nbits);
282
283#endif /* BITMAP_H */
284