1/*
2 * Copyright 2012-present Facebook, Inc.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#pragma once
18
19#include <cstddef>
20#include <limits>
21#include <type_traits>
22
23#include <glog/logging.h>
24
25#include <folly/Portability.h>
26#include <folly/Range.h>
27#include <folly/lang/Bits.h>
28
29namespace folly {
30
31template <class T>
32struct UnalignedNoASan : public Unaligned<T> {};
33
34// As a general rule, bit operations work on unsigned values only;
35// right-shift is arithmetic for signed values, and that can lead to
36// unpleasant bugs.
37
38namespace detail {
39
40/**
41 * Helper class to make Bits<T> (below) work with both aligned values
42 * (T, where T is an unsigned integral type) or unaligned values
43 * (Unaligned<T>, where T is an unsigned integral type)
44 */
45template <class T, class Enable = void>
46struct BitsTraits;
47
48// Partial specialization for Unaligned<T>, where T is unsigned integral
49// loadRMW is the same as load, but it indicates that it loads for a
50// read-modify-write operation (we write back the bits we won't change);
51// silence the GCC warning in that case.
52template <class T>
53struct BitsTraits<
54 Unaligned<T>,
55 typename std::enable_if<(std::is_integral<T>::value)>::type> {
56 typedef T UnderlyingType;
57 static T load(const Unaligned<T>& x) {
58 return x.value;
59 }
60 static void store(Unaligned<T>& x, T v) {
61 x.value = v;
62 }
63 static T loadRMW(const Unaligned<T>& x) {
64 FOLLY_PUSH_WARNING
65 FOLLY_GNU_DISABLE_WARNING("-Wuninitialized")
66 FOLLY_GCC_DISABLE_WARNING("-Wmaybe-uninitialized")
67 return x.value;
68 FOLLY_POP_WARNING
69 }
70};
71
72// Special version that allows one to disable address sanitizer on demand.
73template <class T>
74struct BitsTraits<
75 UnalignedNoASan<T>,
76 typename std::enable_if<(std::is_integral<T>::value)>::type> {
77 typedef T UnderlyingType;
78 static T FOLLY_DISABLE_ADDRESS_SANITIZER load(const UnalignedNoASan<T>& x) {
79 return x.value;
80 }
81 static void FOLLY_DISABLE_ADDRESS_SANITIZER
82 store(UnalignedNoASan<T>& x, T v) {
83 x.value = v;
84 }
85 static T FOLLY_DISABLE_ADDRESS_SANITIZER
86 loadRMW(const UnalignedNoASan<T>& x) {
87 FOLLY_PUSH_WARNING
88 FOLLY_GNU_DISABLE_WARNING("-Wuninitialized")
89 FOLLY_GCC_DISABLE_WARNING("-Wmaybe-uninitialized")
90 return x.value;
91 FOLLY_POP_WARNING
92 }
93};
94
95// Partial specialization for T, where T is unsigned integral
96template <class T>
97struct BitsTraits<
98 T,
99 typename std::enable_if<(std::is_integral<T>::value)>::type> {
100 typedef T UnderlyingType;
101 static T load(const T& x) {
102 return x;
103 }
104 static void store(T& x, T v) {
105 x = v;
106 }
107 static T loadRMW(const T& x) {
108 FOLLY_PUSH_WARNING
109 FOLLY_GNU_DISABLE_WARNING("-Wuninitialized")
110 FOLLY_GCC_DISABLE_WARNING("-Wmaybe-uninitialized")
111 return x;
112 FOLLY_POP_WARNING
113 }
114};
115
116} // namespace detail
117
118/**
119 * Wrapper class with static methods for various bit-level operations,
120 * treating an array of T as an array of bits (in little-endian order).
121 * (T is either an unsigned integral type or Unaligned<X>, where X is
122 * an unsigned integral type)
123 */
124template <class T, class Traits = detail::BitsTraits<T>>
125struct Bits {
126 typedef typename Traits::UnderlyingType UnderlyingType;
127 typedef T type;
128 static_assert(sizeof(T) == sizeof(UnderlyingType), "Size mismatch");
129
130 /**
131 * Number of bits in a block.
132 */
133 static constexpr size_t bitsPerBlock = std::numeric_limits<
134 typename std::make_unsigned<UnderlyingType>::type>::digits;
135
136 /**
137 * Byte index of the given bit.
138 */
139 static constexpr size_t blockIndex(size_t bit) {
140 return bit / bitsPerBlock;
141 }
142
143 /**
144 * Offset in block of the given bit.
145 */
146 static constexpr size_t bitOffset(size_t bit) {
147 return bit % bitsPerBlock;
148 }
149
150 /**
151 * Number of blocks used by the given number of bits.
152 */
153 static constexpr size_t blockCount(size_t nbits) {
154 return nbits / bitsPerBlock + (nbits % bitsPerBlock != 0);
155 }
156
157 /**
158 * Set the given bit.
159 */
160 static void set(T* p, size_t bit);
161
162 /**
163 * Clear the given bit.
164 */
165 static void clear(T* p, size_t bit);
166
167 /**
168 * Test the given bit.
169 */
170 static bool test(const T* p, size_t bit);
171
172 /**
173 * Set count contiguous bits starting at bitStart to the values
174 * from the least significant count bits of value; little endian.
175 * (value & 1 becomes the bit at bitStart, etc)
176 * Precondition: count <= sizeof(T) * 8
177 * Precondition: value can fit in 'count' bits
178 */
179 static void set(T* p, size_t bitStart, size_t count, UnderlyingType value);
180
181 /**
182 * Get count contiguous bits starting at bitStart.
183 * Precondition: count <= sizeof(T) * 8
184 */
185 static UnderlyingType get(const T* p, size_t bitStart, size_t count);
186
187 /**
188 * Count the number of bits set in a range of blocks.
189 */
190 static size_t count(const T* begin, const T* end);
191
192 private:
193 // Same as set, assumes all bits are in the same block.
194 // (bitStart < sizeof(T) * 8, bitStart + count <= sizeof(T) * 8)
195 static void
196 innerSet(T* p, size_t bitStart, size_t count, UnderlyingType value);
197
198 // Same as get, assumes all bits are in the same block.
199 // (bitStart < sizeof(T) * 8, bitStart + count <= sizeof(T) * 8)
200 static UnderlyingType innerGet(const T* p, size_t bitStart, size_t count);
201
202 static constexpr UnderlyingType zero = UnderlyingType(0);
203 static constexpr UnderlyingType one = UnderlyingType(1);
204
205 using UnsignedType = typename std::make_unsigned<UnderlyingType>::type;
206 static constexpr UnderlyingType ones(size_t count) {
207 return (count < bitsPerBlock)
208 ? static_cast<UnderlyingType>((UnsignedType{1} << count) - 1)
209 : ~zero;
210 }
211};
212
213// gcc 4.8 needs more -Wmaybe-uninitialized tickling, as it propagates the
214// taint upstream from loadRMW
215FOLLY_PUSH_WARNING
216FOLLY_GNU_DISABLE_WARNING("-Wuninitialized")
217FOLLY_GCC_DISABLE_WARNING("-Wmaybe-uninitialized")
218
219template <class T, class Traits>
220inline void Bits<T, Traits>::set(T* p, size_t bit) {
221 T& block = p[blockIndex(bit)];
222 Traits::store(block, Traits::loadRMW(block) | (one << bitOffset(bit)));
223}
224
225template <class T, class Traits>
226inline void Bits<T, Traits>::clear(T* p, size_t bit) {
227 T& block = p[blockIndex(bit)];
228 Traits::store(block, Traits::loadRMW(block) & ~(one << bitOffset(bit)));
229}
230
231template <class T, class Traits>
232inline void Bits<T, Traits>::set(
233 T* p,
234 size_t bitStart,
235 size_t count,
236 UnderlyingType value) {
237 DCHECK_LE(count, sizeof(UnderlyingType) * 8);
238 size_t cut = bitsPerBlock - count;
239 if (cut != 8 * sizeof(UnderlyingType)) {
240 using U = typename std::make_unsigned<UnderlyingType>::type;
241 DCHECK_EQ(value, UnderlyingType(U(value) << cut) >> cut);
242 }
243 size_t idx = blockIndex(bitStart);
244 size_t offset = bitOffset(bitStart);
245 if (std::is_signed<UnderlyingType>::value) {
246 value &= ones(count);
247 }
248 if (offset + count <= bitsPerBlock) {
249 innerSet(p + idx, offset, count, value);
250 } else {
251 size_t countInThisBlock = bitsPerBlock - offset;
252 size_t countInNextBlock = count - countInThisBlock;
253
254 UnderlyingType thisBlock = UnderlyingType(value & ones(countInThisBlock));
255 UnderlyingType nextBlock = UnderlyingType(value >> countInThisBlock);
256 if (std::is_signed<UnderlyingType>::value) {
257 nextBlock &= ones(countInNextBlock);
258 }
259 innerSet(p + idx, offset, countInThisBlock, thisBlock);
260 innerSet(p + idx + 1, 0, countInNextBlock, nextBlock);
261 }
262}
263
264template <class T, class Traits>
265inline void Bits<T, Traits>::innerSet(
266 T* p,
267 size_t offset,
268 size_t count,
269 UnderlyingType value) {
270 // Mask out bits and set new value
271 UnderlyingType v = Traits::loadRMW(*p);
272 v &= ~(ones(count) << offset);
273 v |= (value << offset);
274 Traits::store(*p, v);
275}
276
277FOLLY_POP_WARNING
278
279template <class T, class Traits>
280inline bool Bits<T, Traits>::test(const T* p, size_t bit) {
281 return Traits::load(p[blockIndex(bit)]) & (one << bitOffset(bit));
282}
283
284template <class T, class Traits>
285inline auto Bits<T, Traits>::get(const T* p, size_t bitStart, size_t count)
286 -> UnderlyingType {
287 if (count == 0) {
288 return UnderlyingType{};
289 }
290
291 DCHECK_LE(count, sizeof(UnderlyingType) * 8);
292 size_t idx = blockIndex(bitStart);
293 size_t offset = bitOffset(bitStart);
294 UnderlyingType ret;
295 if (offset + count <= bitsPerBlock) {
296 ret = innerGet(p + idx, offset, count);
297 } else {
298 size_t countInThisBlock = bitsPerBlock - offset;
299 size_t countInNextBlock = count - countInThisBlock;
300 UnderlyingType thisBlockValue = innerGet(p + idx, offset, countInThisBlock);
301 UnderlyingType nextBlockValue = innerGet(p + idx + 1, 0, countInNextBlock);
302 ret = (nextBlockValue << countInThisBlock) | thisBlockValue;
303 }
304 if (std::is_signed<UnderlyingType>::value) {
305 size_t emptyBits = bitsPerBlock - count;
306 ret <<= emptyBits;
307 ret >>= emptyBits;
308 }
309 return ret;
310}
311
312template <class T, class Traits>
313inline auto Bits<T, Traits>::innerGet(const T* p, size_t offset, size_t count)
314 -> UnderlyingType {
315 return (Traits::load(*p) >> offset) & ones(count);
316}
317
318template <class T, class Traits>
319inline size_t Bits<T, Traits>::count(const T* begin, const T* end) {
320 size_t n = 0;
321 for (; begin != end; ++begin) {
322 n += popcount(Traits::load(*begin));
323 }
324 return n;
325}
326
327} // namespace folly
328