1/*
2 * Copyright (c) 2018, 2019, Oracle and/or its affiliates. All rights reserved.
3 * Copyright (c) 2018, SAP.
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This code is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 only, as
8 * published by the Free Software Foundation.
9 *
10 * This code is distributed in the hope that it will be useful, but WITHOUT
11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
13 * version 2 for more details (a copy is included in the LICENSE file that
14 * accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License version
17 * 2 along with this work; if not, write to the Free Software Foundation,
18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
19 *
20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
21 * or visit www.oracle.com if you need additional information or have any
22 * questions.
23 */
24
25#include "precompiled.hpp"
26#include "memory/allocation.inline.hpp"
27#include "memory/metaspace.hpp"
28#include "runtime/mutex.hpp"
29#include "runtime/mutexLocker.hpp"
30#include "runtime/os.hpp"
31#include "utilities/align.hpp"
32#include "utilities/debug.hpp"
33#include "utilities/globalDefinitions.hpp"
34#include "utilities/ostream.hpp"
35#include "unittest.hpp"
36
37#define NUM_PARALLEL_METASPACES 50
38#define MAX_PER_METASPACE_ALLOCATION_WORDSIZE (512 * K)
39
40//#define DEBUG_VERBOSE true
41
42#ifdef DEBUG_VERBOSE
43
44struct chunkmanager_statistics_t {
45 int num_specialized_chunks;
46 int num_small_chunks;
47 int num_medium_chunks;
48 int num_humongous_chunks;
49};
50
51extern void test_metaspace_retrieve_chunkmanager_statistics(Metaspace::MetadataType mdType, chunkmanager_statistics_t* out);
52
53static void print_chunkmanager_statistics(outputStream* st, Metaspace::MetadataType mdType) {
54 chunkmanager_statistics_t stat;
55 test_metaspace_retrieve_chunkmanager_statistics(mdType, &stat);
56 st->print_cr("free chunks: %d / %d / %d / %d", stat.num_specialized_chunks, stat.num_small_chunks,
57 stat.num_medium_chunks, stat.num_humongous_chunks);
58}
59
60#endif
61
62struct chunk_geometry_t {
63 size_t specialized_chunk_word_size;
64 size_t small_chunk_word_size;
65 size_t medium_chunk_word_size;
66};
67
68extern void test_metaspace_retrieve_chunk_geometry(Metaspace::MetadataType mdType, chunk_geometry_t* out);
69
70
71class MetaspaceAllocationTest : public ::testing::Test {
72protected:
73
74 struct {
75 size_t allocated;
76 Mutex* lock;
77 ClassLoaderMetaspace* space;
78 bool is_empty() const { return allocated == 0; }
79 bool is_full() const { return allocated >= MAX_PER_METASPACE_ALLOCATION_WORDSIZE; }
80 } _spaces[NUM_PARALLEL_METASPACES];
81
82 chunk_geometry_t _chunk_geometry;
83
84 virtual void SetUp() {
85 ::memset(_spaces, 0, sizeof(_spaces));
86 test_metaspace_retrieve_chunk_geometry(Metaspace::NonClassType, &_chunk_geometry);
87 }
88
89 virtual void TearDown() {
90 for (int i = 0; i < NUM_PARALLEL_METASPACES; i ++) {
91 if (_spaces[i].space != NULL) {
92 delete _spaces[i].space;
93 delete _spaces[i].lock;
94 }
95 }
96 }
97
98 void create_space(int i) {
99 assert(i >= 0 && i < NUM_PARALLEL_METASPACES, "Sanity");
100 assert(_spaces[i].space == NULL && _spaces[i].allocated == 0, "Sanity");
101 if (_spaces[i].lock == NULL) {
102 _spaces[i].lock = new Mutex(Monitor::native, "gtest-MetaspaceAllocationTest-lock", false, Monitor::_safepoint_check_never);
103 ASSERT_TRUE(_spaces[i].lock != NULL);
104 }
105 // Let every ~10th space be an unsafe anonymous one to test different allocation patterns.
106 const Metaspace::MetaspaceType msType = (os::random() % 100 < 10) ?
107 Metaspace::UnsafeAnonymousMetaspaceType : Metaspace::StandardMetaspaceType;
108 {
109 // Pull lock during space creation, since this is what happens in the VM too
110 // (see ClassLoaderData::metaspace_non_null(), which we mimick here).
111 MutexLocker ml(_spaces[i].lock, Mutex::_no_safepoint_check_flag);
112 _spaces[i].space = new ClassLoaderMetaspace(_spaces[i].lock, msType);
113 }
114 _spaces[i].allocated = 0;
115 ASSERT_TRUE(_spaces[i].space != NULL);
116 }
117
118 // Returns the index of a random space where index is [0..metaspaces) and which is
119 // empty, non-empty or full.
120 // Returns -1 if no matching space exists.
121 enum fillgrade { fg_empty, fg_non_empty, fg_full };
122 int get_random_matching_space(int metaspaces, fillgrade fg) {
123 const int start_index = os::random() % metaspaces;
124 int i = start_index;
125 do {
126 if (fg == fg_empty && _spaces[i].is_empty()) {
127 return i;
128 } else if ((fg == fg_full && _spaces[i].is_full()) ||
129 (fg == fg_non_empty && !_spaces[i].is_full() && !_spaces[i].is_empty())) {
130 return i;
131 }
132 i ++;
133 if (i == metaspaces) {
134 i = 0;
135 }
136 } while (i != start_index);
137 return -1;
138 }
139
140 int get_random_emtpy_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_empty); }
141 int get_random_non_emtpy_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_non_empty); }
142 int get_random_full_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_full); }
143
144 void do_test(Metaspace::MetadataType mdType, int metaspaces, int phases, int allocs_per_phase,
145 float probability_for_large_allocations // 0.0-1.0
146 ) {
147 // Alternate between breathing in (allocating n blocks for a random Metaspace) and
148 // breathing out (deleting a random Metaspace). The intent is to stress the coalescation
149 // and splitting of free chunks.
150 int phases_done = 0;
151 bool allocating = true;
152 while (phases_done < phases) {
153 bool force_switch = false;
154 if (allocating) {
155 // Allocate space from metaspace, with a preference for completely empty spaces. This
156 // should provide a good mixture of metaspaces in the virtual space.
157 int index = get_random_emtpy_space(metaspaces);
158 if (index == -1) {
159 index = get_random_non_emtpy_space(metaspaces);
160 }
161 if (index == -1) {
162 // All spaces are full, switch to freeing.
163 force_switch = true;
164 } else {
165 // create space if it does not yet exist.
166 if (_spaces[index].space == NULL) {
167 create_space(index);
168 }
169 // Allocate a bunch of blocks from it. Mostly small stuff but mix in large allocations
170 // to force humongous chunk allocations.
171 int allocs_done = 0;
172 while (allocs_done < allocs_per_phase && !_spaces[index].is_full()) {
173 size_t size = 0;
174 int r = os::random() % 1000;
175 if ((float)r < probability_for_large_allocations * 1000.0) {
176 size = (os::random() % _chunk_geometry.medium_chunk_word_size) + _chunk_geometry.medium_chunk_word_size;
177 } else {
178 size = os::random() % 64;
179 }
180 // Note: In contrast to space creation, no need to lock here. ClassLoaderMetaspace::allocate() will lock itself.
181 MetaWord* const p = _spaces[index].space->allocate(size, mdType);
182 if (p == NULL) {
183 // We very probably did hit the metaspace "until-gc" limit.
184#ifdef DEBUG_VERBOSE
185 tty->print_cr("OOM for " SIZE_FORMAT " words. ", size);
186#endif
187 // Just switch to deallocation and resume tests.
188 force_switch = true;
189 break;
190 } else {
191 _spaces[index].allocated += size;
192 allocs_done ++;
193 }
194 }
195 }
196 } else {
197 // freeing: find a metaspace and delete it, with preference for completely filled spaces.
198 int index = get_random_full_space(metaspaces);
199 if (index == -1) {
200 index = get_random_non_emtpy_space(metaspaces);
201 }
202 if (index == -1) {
203 force_switch = true;
204 } else {
205 assert(_spaces[index].space != NULL && _spaces[index].allocated > 0, "Sanity");
206 // Note: do not lock here. In the "wild" (the VM), we do not so either (see ~ClassLoaderData()).
207 delete _spaces[index].space;
208 _spaces[index].space = NULL;
209 _spaces[index].allocated = 0;
210 }
211 }
212
213 if (force_switch) {
214 allocating = !allocating;
215 } else {
216 // periodically switch between allocating and freeing, but prefer allocation because
217 // we want to intermingle allocations of multiple metaspaces.
218 allocating = os::random() % 5 < 4;
219 }
220 phases_done ++;
221#ifdef DEBUG_VERBOSE
222 int metaspaces_in_use = 0;
223 size_t total_allocated = 0;
224 for (int i = 0; i < metaspaces; i ++) {
225 if (_spaces[i].allocated > 0) {
226 total_allocated += _spaces[i].allocated;
227 metaspaces_in_use ++;
228 }
229 }
230 tty->print("%u:\tspaces: %d total words: " SIZE_FORMAT "\t\t\t", phases_done, metaspaces_in_use, total_allocated);
231 print_chunkmanager_statistics(tty, mdType);
232#endif
233 }
234#ifdef DEBUG_VERBOSE
235 tty->print_cr("Test finished. ");
236 MetaspaceUtils::print_metaspace_map(tty, mdType);
237 print_chunkmanager_statistics(tty, mdType);
238#endif
239 }
240};
241
242
243
244TEST_F(MetaspaceAllocationTest, chunk_geometry) {
245 ASSERT_GT(_chunk_geometry.specialized_chunk_word_size, (size_t) 0);
246 ASSERT_GT(_chunk_geometry.small_chunk_word_size, _chunk_geometry.specialized_chunk_word_size);
247 ASSERT_EQ(_chunk_geometry.small_chunk_word_size % _chunk_geometry.specialized_chunk_word_size, (size_t)0);
248 ASSERT_GT(_chunk_geometry.medium_chunk_word_size, _chunk_geometry.small_chunk_word_size);
249 ASSERT_EQ(_chunk_geometry.medium_chunk_word_size % _chunk_geometry.small_chunk_word_size, (size_t)0);
250}
251
252
253TEST_VM_F(MetaspaceAllocationTest, single_space_nonclass) {
254 do_test(Metaspace::NonClassType, 1, 1000, 100, 0);
255}
256
257TEST_VM_F(MetaspaceAllocationTest, single_space_class) {
258 do_test(Metaspace::ClassType, 1, 1000, 100, 0);
259}
260
261TEST_VM_F(MetaspaceAllocationTest, multi_space_nonclass) {
262 do_test(Metaspace::NonClassType, NUM_PARALLEL_METASPACES, 100, 1000, 0.0);
263}
264
265TEST_VM_F(MetaspaceAllocationTest, multi_space_class) {
266 do_test(Metaspace::ClassType, NUM_PARALLEL_METASPACES, 100, 1000, 0.0);
267}
268
269TEST_VM_F(MetaspaceAllocationTest, multi_space_nonclass_2) {
270 // many metaspaces, with humongous chunks mixed in.
271 do_test(Metaspace::NonClassType, NUM_PARALLEL_METASPACES, 100, 1000, .006f);
272}
273
274