1 | // |
2 | // Copyright 2017 The Abseil Authors. |
3 | // |
4 | // Licensed under the Apache License, Version 2.0 (the "License"); |
5 | // you may not use this file except in compliance with the License. |
6 | // You may obtain a copy of the License at |
7 | // |
8 | // https://www.apache.org/licenses/LICENSE-2.0 |
9 | // |
10 | // Unless required by applicable law or agreed to in writing, software |
11 | // distributed under the License is distributed on an "AS IS" BASIS, |
12 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
13 | // See the License for the specific language governing permissions and |
14 | // limitations under the License. |
15 | // |
16 | |
17 | #ifndef ABSL_BASE_INTERNAL_UNALIGNED_ACCESS_H_ |
18 | #define ABSL_BASE_INTERNAL_UNALIGNED_ACCESS_H_ |
19 | |
20 | #include <string.h> |
21 | #include <cstdint> |
22 | |
23 | #include "absl/base/attributes.h" |
24 | |
25 | // unaligned APIs |
26 | |
27 | // Portable handling of unaligned loads, stores, and copies. |
28 | |
29 | // The unaligned API is C++ only. The declarations use C++ features |
30 | // (namespaces, inline) which are absent or incompatible in C. |
31 | #if defined(__cplusplus) |
32 | |
33 | #if defined(ADDRESS_SANITIZER) || defined(THREAD_SANITIZER) ||\ |
34 | defined(MEMORY_SANITIZER) |
35 | // Consider we have an unaligned load/store of 4 bytes from address 0x...05. |
36 | // AddressSanitizer will treat it as a 3-byte access to the range 05:07 and |
37 | // will miss a bug if 08 is the first unaddressable byte. |
38 | // ThreadSanitizer will also treat this as a 3-byte access to 05:07 and will |
39 | // miss a race between this access and some other accesses to 08. |
40 | // MemorySanitizer will correctly propagate the shadow on unaligned stores |
41 | // and correctly report bugs on unaligned loads, but it may not properly |
42 | // update and report the origin of the uninitialized memory. |
43 | // For all three tools, replacing an unaligned access with a tool-specific |
44 | // callback solves the problem. |
45 | |
46 | // Make sure uint16_t/uint32_t/uint64_t are defined. |
47 | #include <stdint.h> |
48 | |
49 | extern "C" { |
50 | uint16_t __sanitizer_unaligned_load16(const void *p); |
51 | uint32_t __sanitizer_unaligned_load32(const void *p); |
52 | uint64_t __sanitizer_unaligned_load64(const void *p); |
53 | void __sanitizer_unaligned_store16(void *p, uint16_t v); |
54 | void __sanitizer_unaligned_store32(void *p, uint32_t v); |
55 | void __sanitizer_unaligned_store64(void *p, uint64_t v); |
56 | } // extern "C" |
57 | |
58 | namespace absl { |
59 | namespace base_internal { |
60 | |
61 | inline uint16_t UnalignedLoad16(const void *p) { |
62 | return __sanitizer_unaligned_load16(p); |
63 | } |
64 | |
65 | inline uint32_t UnalignedLoad32(const void *p) { |
66 | return __sanitizer_unaligned_load32(p); |
67 | } |
68 | |
69 | inline uint64_t UnalignedLoad64(const void *p) { |
70 | return __sanitizer_unaligned_load64(p); |
71 | } |
72 | |
73 | inline void UnalignedStore16(void *p, uint16_t v) { |
74 | __sanitizer_unaligned_store16(p, v); |
75 | } |
76 | |
77 | inline void UnalignedStore32(void *p, uint32_t v) { |
78 | __sanitizer_unaligned_store32(p, v); |
79 | } |
80 | |
81 | inline void UnalignedStore64(void *p, uint64_t v) { |
82 | __sanitizer_unaligned_store64(p, v); |
83 | } |
84 | |
85 | } // namespace base_internal |
86 | } // namespace absl |
87 | |
88 | #define ABSL_INTERNAL_UNALIGNED_LOAD16(_p) \ |
89 | (absl::base_internal::UnalignedLoad16(_p)) |
90 | #define ABSL_INTERNAL_UNALIGNED_LOAD32(_p) \ |
91 | (absl::base_internal::UnalignedLoad32(_p)) |
92 | #define ABSL_INTERNAL_UNALIGNED_LOAD64(_p) \ |
93 | (absl::base_internal::UnalignedLoad64(_p)) |
94 | |
95 | #define ABSL_INTERNAL_UNALIGNED_STORE16(_p, _val) \ |
96 | (absl::base_internal::UnalignedStore16(_p, _val)) |
97 | #define ABSL_INTERNAL_UNALIGNED_STORE32(_p, _val) \ |
98 | (absl::base_internal::UnalignedStore32(_p, _val)) |
99 | #define ABSL_INTERNAL_UNALIGNED_STORE64(_p, _val) \ |
100 | (absl::base_internal::UnalignedStore64(_p, _val)) |
101 | |
102 | #else |
103 | |
104 | namespace absl { |
105 | namespace base_internal { |
106 | |
107 | inline uint16_t UnalignedLoad16(const void *p) { |
108 | uint16_t t; |
109 | memcpy(&t, p, sizeof t); |
110 | return t; |
111 | } |
112 | |
113 | inline uint32_t UnalignedLoad32(const void *p) { |
114 | uint32_t t; |
115 | memcpy(&t, p, sizeof t); |
116 | return t; |
117 | } |
118 | |
119 | inline uint64_t UnalignedLoad64(const void *p) { |
120 | uint64_t t; |
121 | memcpy(&t, p, sizeof t); |
122 | return t; |
123 | } |
124 | |
125 | inline void UnalignedStore16(void *p, uint16_t v) { memcpy(p, &v, sizeof v); } |
126 | |
127 | inline void UnalignedStore32(void *p, uint32_t v) { memcpy(p, &v, sizeof v); } |
128 | |
129 | inline void UnalignedStore64(void *p, uint64_t v) { memcpy(p, &v, sizeof v); } |
130 | |
131 | } // namespace base_internal |
132 | } // namespace absl |
133 | |
134 | #define ABSL_INTERNAL_UNALIGNED_LOAD16(_p) \ |
135 | (absl::base_internal::UnalignedLoad16(_p)) |
136 | #define ABSL_INTERNAL_UNALIGNED_LOAD32(_p) \ |
137 | (absl::base_internal::UnalignedLoad32(_p)) |
138 | #define ABSL_INTERNAL_UNALIGNED_LOAD64(_p) \ |
139 | (absl::base_internal::UnalignedLoad64(_p)) |
140 | |
141 | #define ABSL_INTERNAL_UNALIGNED_STORE16(_p, _val) \ |
142 | (absl::base_internal::UnalignedStore16(_p, _val)) |
143 | #define ABSL_INTERNAL_UNALIGNED_STORE32(_p, _val) \ |
144 | (absl::base_internal::UnalignedStore32(_p, _val)) |
145 | #define ABSL_INTERNAL_UNALIGNED_STORE64(_p, _val) \ |
146 | (absl::base_internal::UnalignedStore64(_p, _val)) |
147 | |
148 | #endif |
149 | |
150 | #endif // defined(__cplusplus), end of unaligned API |
151 | |
152 | #endif // ABSL_BASE_INTERNAL_UNALIGNED_ACCESS_H_ |
153 | |