1 | /* |
2 | * utilasm.h |
3 | * |
4 | */ |
5 | |
6 | #ifndef INCLUDE_UTILASM_H_ |
7 | #define INCLUDE_UTILASM_H_ |
8 | |
9 | #include <roaring/portability.h> |
10 | |
11 | #if defined(USE_BMI) & defined(ROARING_INLINE_ASM) |
12 | #define ASMBITMANIPOPTIMIZATION // optimization flag |
13 | |
14 | #define ASM_SHIFT_RIGHT(srcReg, bitsReg, destReg) \ |
15 | __asm volatile("shrx %1, %2, %0" \ |
16 | : "=r"(destReg) \ |
17 | : /* write */ \ |
18 | "r"(bitsReg), /* read only */ \ |
19 | "r"(srcReg) /* read only */ \ |
20 | ) |
21 | |
22 | #define ASM_INPLACESHIFT_RIGHT(srcReg, bitsReg) \ |
23 | __asm volatile("shrx %1, %0, %0" \ |
24 | : "+r"(srcReg) \ |
25 | : /* read/write */ \ |
26 | "r"(bitsReg) /* read only */ \ |
27 | ) |
28 | |
29 | #define ASM_SHIFT_LEFT(srcReg, bitsReg, destReg) \ |
30 | __asm volatile("shlx %1, %2, %0" \ |
31 | : "=r"(destReg) \ |
32 | : /* write */ \ |
33 | "r"(bitsReg), /* read only */ \ |
34 | "r"(srcReg) /* read only */ \ |
35 | ) |
36 | // set bit at position testBit within testByte to 1 and |
37 | // copy cmovDst to cmovSrc if that bit was previously clear |
38 | #define ASM_SET_BIT_INC_WAS_CLEAR(testByte, testBit, count) \ |
39 | __asm volatile( \ |
40 | "bts %2, %0\n" \ |
41 | "sbb $-1, %1\n" \ |
42 | : "+r"(testByte), /* read/write */ \ |
43 | "+r"(count) \ |
44 | : /* read/write */ \ |
45 | "r"(testBit) /* read only */ \ |
46 | ) |
47 | |
48 | #define ASM_CLEAR_BIT_DEC_WAS_SET(testByte, testBit, count) \ |
49 | __asm volatile( \ |
50 | "btr %2, %0\n" \ |
51 | "sbb $0, %1\n" \ |
52 | : "+r"(testByte), /* read/write */ \ |
53 | "+r"(count) \ |
54 | : /* read/write */ \ |
55 | "r"(testBit) /* read only */ \ |
56 | ) |
57 | |
58 | #define ASM_BT64(testByte, testBit, count) \ |
59 | __asm volatile( \ |
60 | "bt %2,%1\n" \ |
61 | "sbb %0,%0" /*could use setb */ \ |
62 | : "=r"(count) \ |
63 | : /* write */ \ |
64 | "r"(testByte), /* read only */ \ |
65 | "r"(testBit) /* read only */ \ |
66 | ) |
67 | |
68 | #endif // USE_BMI |
69 | #endif /* INCLUDE_UTILASM_H_ */ |
70 | |