1 | // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 | // for details. All rights reserved. Use of this source code is governed by a |
3 | // BSD-style license that can be found in the LICENSE file. |
4 | |
5 | #include <memory> |
6 | |
7 | #include "platform/assert.h" |
8 | #include "vm/heap/freelist.h" |
9 | #include "vm/pointer_tagging.h" |
10 | #include "vm/unit_test.h" |
11 | |
12 | namespace dart { |
13 | |
14 | static uword Allocate(FreeList* free_list, intptr_t size, bool is_protected) { |
15 | uword result = free_list->TryAllocate(size, is_protected); |
16 | if ((result != 0u) && is_protected) { |
17 | VirtualMemory::Protect(reinterpret_cast<void*>(result), size, |
18 | VirtualMemory::kReadExecute); |
19 | } |
20 | return result; |
21 | } |
22 | |
23 | static void Free(FreeList* free_list, |
24 | uword address, |
25 | intptr_t size, |
26 | bool is_protected) { |
27 | if (is_protected) { |
28 | VirtualMemory::Protect(reinterpret_cast<void*>(address), size, |
29 | VirtualMemory::kReadWrite); |
30 | } |
31 | free_list->Free(address, size); |
32 | if (is_protected) { |
33 | VirtualMemory::Protect(reinterpret_cast<void*>(address), size, |
34 | VirtualMemory::kReadExecute); |
35 | } |
36 | } |
37 | |
38 | static void TestFreeList(VirtualMemory* region, |
39 | FreeList* free_list, |
40 | bool is_protected) { |
41 | const intptr_t kSmallObjectSize = 4 * kWordSize; |
42 | const intptr_t kMediumObjectSize = 16 * kWordSize; |
43 | const intptr_t kLargeObjectSize = 8 * KB; |
44 | uword blob = region->start(); |
45 | // Enqueue the large blob as one free block. |
46 | free_list->Free(blob, region->size()); |
47 | |
48 | if (is_protected) { |
49 | // Write protect the whole region. |
50 | region->Protect(VirtualMemory::kReadExecute); |
51 | } |
52 | |
53 | // Allocate a small object. Expect it to be positioned as the first element. |
54 | uword small_object = Allocate(free_list, kSmallObjectSize, is_protected); |
55 | EXPECT_EQ(blob, small_object); |
56 | // Freeing and allocating should give us the same memory back. |
57 | Free(free_list, small_object, kSmallObjectSize, is_protected); |
58 | small_object = Allocate(free_list, kSmallObjectSize, is_protected); |
59 | EXPECT_EQ(blob, small_object); |
60 | // Splitting the remainder further with small and medium objects. |
61 | uword small_object2 = Allocate(free_list, kSmallObjectSize, is_protected); |
62 | EXPECT_EQ(blob + kSmallObjectSize, small_object2); |
63 | uword med_object = Allocate(free_list, kMediumObjectSize, is_protected); |
64 | EXPECT_EQ(small_object2 + kSmallObjectSize, med_object); |
65 | // Allocate a large object. |
66 | uword large_object = Allocate(free_list, kLargeObjectSize, is_protected); |
67 | EXPECT_EQ(med_object + kMediumObjectSize, large_object); |
68 | // Make sure that small objects can still split the remainder. |
69 | uword small_object3 = Allocate(free_list, kSmallObjectSize, is_protected); |
70 | EXPECT_EQ(large_object + kLargeObjectSize, small_object3); |
71 | // Split the large object. |
72 | Free(free_list, large_object, kLargeObjectSize, is_protected); |
73 | uword small_object4 = Allocate(free_list, kSmallObjectSize, is_protected); |
74 | EXPECT_EQ(large_object, small_object4); |
75 | // Get the full remainder of the large object. |
76 | large_object = |
77 | Allocate(free_list, kLargeObjectSize - kSmallObjectSize, is_protected); |
78 | EXPECT_EQ(small_object4 + kSmallObjectSize, large_object); |
79 | // Get another large object from the large unallocated remainder. |
80 | uword large_object2 = Allocate(free_list, kLargeObjectSize, is_protected); |
81 | EXPECT_EQ(small_object3 + kSmallObjectSize, large_object2); |
82 | } |
83 | |
84 | TEST_CASE(FreeList) { |
85 | FreeList* free_list = new FreeList(); |
86 | const intptr_t kBlobSize = 1 * MB; |
87 | VirtualMemory* region = |
88 | VirtualMemory::Allocate(kBlobSize, /* is_executable */ false, "test" ); |
89 | |
90 | TestFreeList(region, free_list, false); |
91 | |
92 | // Delete the memory associated with the test. |
93 | delete region; |
94 | delete free_list; |
95 | } |
96 | |
97 | TEST_CASE(FreeListProtected) { |
98 | FreeList* free_list = new FreeList(); |
99 | const intptr_t kBlobSize = 1 * MB; |
100 | VirtualMemory* region = |
101 | VirtualMemory::Allocate(kBlobSize, /* is_executable */ false, "test" ); |
102 | |
103 | TestFreeList(region, free_list, true); |
104 | |
105 | // Delete the memory associated with the test. |
106 | delete region; |
107 | delete free_list; |
108 | } |
109 | |
110 | TEST_CASE(FreeListProtectedTinyObjects) { |
111 | FreeList* free_list = new FreeList(); |
112 | const intptr_t kBlobSize = 1 * MB; |
113 | const intptr_t kObjectSize = 2 * kWordSize; |
114 | uword* objects = new uword[kBlobSize / kObjectSize]; |
115 | |
116 | VirtualMemory* blob = |
117 | VirtualMemory::Allocate(kBlobSize, /* is_executable = */ false, "test" ); |
118 | ASSERT(Utils::IsAligned(blob->start(), 4096)); |
119 | blob->Protect(VirtualMemory::kReadWrite); |
120 | |
121 | // Enqueue the large blob as one free block. |
122 | free_list->Free(blob->start(), blob->size()); |
123 | |
124 | // Write protect the whole region. |
125 | blob->Protect(VirtualMemory::kReadExecute); |
126 | |
127 | // Allocate small objects. |
128 | for (intptr_t i = 0; i < blob->size() / kObjectSize; i++) { |
129 | objects[i] = Allocate(free_list, kObjectSize, true); // is_protected |
130 | } |
131 | |
132 | // All space is occupied. Expect failed allocation. |
133 | ASSERT(Allocate(free_list, kObjectSize, true) == 0); |
134 | |
135 | // Free all objects again. Make the whole region writable for this. |
136 | blob->Protect(VirtualMemory::kReadWrite); |
137 | for (intptr_t i = 0; i < blob->size() / kObjectSize; i++) { |
138 | free_list->Free(objects[i], kObjectSize); |
139 | } |
140 | |
141 | // Delete the memory associated with the test. |
142 | delete blob; |
143 | delete free_list; |
144 | delete[] objects; |
145 | } |
146 | |
147 | TEST_CASE(FreeListProtectedVariableSizeObjects) { |
148 | FreeList* free_list = new FreeList(); |
149 | const intptr_t kBlobSize = 8 * KB; |
150 | const intptr_t kMinSize = 2 * kWordSize; |
151 | uword* objects = new uword[kBlobSize / kMinSize]; |
152 | for (intptr_t i = 0; i < kBlobSize / kMinSize; ++i) { |
153 | objects[i] = static_cast<uword>(NULL); |
154 | } |
155 | |
156 | VirtualMemory* blob = |
157 | VirtualMemory::Allocate(kBlobSize, /* is_executable = */ false, "test" ); |
158 | ASSERT(Utils::IsAligned(blob->start(), 4096)); |
159 | blob->Protect(VirtualMemory::kReadWrite); |
160 | |
161 | // Enqueue the large blob as one free block. |
162 | free_list->Free(blob->start(), blob->size()); |
163 | |
164 | // Write protect the whole region. |
165 | blob->Protect(VirtualMemory::kReadExecute); |
166 | |
167 | // Allocate and free objects so that free list has > 1 elements. |
168 | uword e0 = Allocate(free_list, 1 * KB, true); |
169 | ASSERT(e0); |
170 | uword e1 = Allocate(free_list, 3 * KB, true); |
171 | ASSERT(e1); |
172 | uword e2 = Allocate(free_list, 2 * KB, true); |
173 | ASSERT(e2); |
174 | uword e3 = Allocate(free_list, 2 * KB, true); |
175 | ASSERT(e3); |
176 | |
177 | Free(free_list, e1, 3 * KB, true); |
178 | Free(free_list, e2, 2 * KB, true); |
179 | e0 = Allocate(free_list, 3 * KB - 2 * kWordSize, true); |
180 | ASSERT(e0); |
181 | |
182 | // Delete the memory associated with the test. |
183 | delete blob; |
184 | delete free_list; |
185 | delete[] objects; |
186 | } |
187 | |
188 | static void TestRegress38528(intptr_t ) { |
189 | // Test the following scenario. |
190 | // |
191 | // | <------------ free list element -----------------> | |
192 | // | <allocated code> | <header> | <remainder - header> | <other code> | |
193 | // ^ |
194 | // page boundary around here, depending on header_overlap |
195 | // |
196 | // It is important that after the allocation has been re-protected, the |
197 | // "<other code>" region is also still executable (and not writable). |
198 | std::unique_ptr<FreeList> free_list(new FreeList()); |
199 | const uword page = VirtualMemory::PageSize(); |
200 | std::unique_ptr<VirtualMemory> blob( |
201 | VirtualMemory::Allocate(2 * page, |
202 | /*is_executable=*/false, "test" )); |
203 | const intptr_t remainder_size = page / 2; |
204 | const intptr_t alloc_size = page - header_overlap * kObjectAlignment; |
205 | void* const other_code = |
206 | reinterpret_cast<void*>(blob->start() + alloc_size + remainder_size); |
207 | |
208 | // Load a simple function into the "other code" section which just returns. |
209 | // This is used to ensure that it's still executable. |
210 | #if defined(HOST_ARCH_X64) || defined(HOST_ARCH_IA32) |
211 | const uint8_t ret[1] = {0xC3}; // ret |
212 | #elif defined(HOST_ARCH_ARM) |
213 | const uint8_t ret[4] = {0x1e, 0xff, 0x2f, 0xe1}; // bx lr |
214 | #elif defined(HOST_ARCH_ARM64) |
215 | const uint8_t ret[4] = {0xc0, 0x03, 0x5f, 0xd6}; // ret |
216 | #else |
217 | #error "Unknown architecture." |
218 | #endif |
219 | memcpy(other_code, ret, sizeof(ret)); // NOLINT |
220 | |
221 | free_list->Free(blob->start(), alloc_size + remainder_size); |
222 | blob->Protect(VirtualMemory::kReadExecute); // not writable |
223 | Allocate(free_list.get(), alloc_size, /*protected=*/true); |
224 | VirtualMemory::Protect(blob->address(), alloc_size, |
225 | VirtualMemory::kReadExecute); |
226 | reinterpret_cast<void (*)()>(other_code)(); |
227 | } |
228 | |
229 | TEST_CASE(Regress38528) { |
230 | for (const intptr_t i : {-2, -1, 0, 1, 2}) { |
231 | TestRegress38528(i); |
232 | } |
233 | } |
234 | |
235 | } // namespace dart |
236 | |