1 | /* |
2 | * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved. |
3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 | * |
5 | * This code is free software; you can redistribute it and/or modify it |
6 | * under the terms of the GNU General Public License version 2 only, as |
7 | * published by the Free Software Foundation. |
8 | * |
9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
12 | * version 2 for more details (a copy is included in the LICENSE file that |
13 | * accompanied this code). |
14 | * |
15 | * You should have received a copy of the GNU General Public License version |
16 | * 2 along with this work; if not, write to the Free Software Foundation, |
17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
18 | * |
19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
20 | * or visit www.oracle.com if you need additional information or have any |
21 | * questions. |
22 | * |
23 | */ |
24 | |
25 | #ifndef SHARE_MEMORY_ALLOCATION_HPP |
26 | #define SHARE_MEMORY_ALLOCATION_HPP |
27 | |
28 | #include "runtime/globals.hpp" |
29 | #include "utilities/globalDefinitions.hpp" |
30 | #include "utilities/macros.hpp" |
31 | |
32 | #include <new> |
33 | |
34 | class Thread; |
35 | |
36 | class AllocFailStrategy { |
37 | public: |
38 | enum AllocFailEnum { EXIT_OOM, RETURN_NULL }; |
39 | }; |
40 | typedef AllocFailStrategy::AllocFailEnum AllocFailType; |
41 | |
42 | // The virtual machine must never call one of the implicitly declared |
43 | // global allocation or deletion functions. (Such calls may result in |
44 | // link-time or run-time errors.) For convenience and documentation of |
45 | // intended use, classes in the virtual machine may be derived from one |
46 | // of the following allocation classes, some of which define allocation |
47 | // and deletion functions. |
48 | // Note: std::malloc and std::free should never called directly. |
49 | |
50 | // |
51 | // For objects allocated in the resource area (see resourceArea.hpp). |
52 | // - ResourceObj |
53 | // |
54 | // For objects allocated in the C-heap (managed by: free & malloc and tracked with NMT) |
55 | // - CHeapObj |
56 | // |
57 | // For objects allocated on the stack. |
58 | // - StackObj |
59 | // |
60 | // For classes used as name spaces. |
61 | // - AllStatic |
62 | // |
63 | // For classes in Metaspace (class data) |
64 | // - MetaspaceObj |
65 | // |
66 | // The printable subclasses are used for debugging and define virtual |
67 | // member functions for printing. Classes that avoid allocating the |
68 | // vtbl entries in the objects should therefore not be the printable |
69 | // subclasses. |
70 | // |
71 | // The following macros and function should be used to allocate memory |
72 | // directly in the resource area or in the C-heap, The _OBJ variants |
73 | // of the NEW/FREE_C_HEAP macros are used for alloc/dealloc simple |
74 | // objects which are not inherited from CHeapObj, note constructor and |
75 | // destructor are not called. The preferable way to allocate objects |
76 | // is using the new operator. |
77 | // |
78 | // WARNING: The array variant must only be used for a homogenous array |
79 | // where all objects are of the exact type specified. If subtypes are |
80 | // stored in the array then must pay attention to calling destructors |
81 | // at needed. |
82 | // |
83 | // NEW_RESOURCE_ARRAY(type, size) |
84 | // NEW_RESOURCE_OBJ(type) |
85 | // NEW_C_HEAP_ARRAY(type, size) |
86 | // NEW_C_HEAP_OBJ(type, memflags) |
87 | // FREE_C_HEAP_ARRAY(type, old) |
88 | // FREE_C_HEAP_OBJ(objname, type, memflags) |
89 | // char* AllocateHeap(size_t size, const char* name); |
90 | // void FreeHeap(void* p); |
91 | // |
92 | |
93 | // In non product mode we introduce a super class for all allocation classes |
94 | // that supports printing. |
95 | // We avoid the superclass in product mode to save space. |
96 | |
97 | #ifdef PRODUCT |
98 | #define ALLOCATION_SUPER_CLASS_SPEC |
99 | #else |
100 | #define ALLOCATION_SUPER_CLASS_SPEC : public AllocatedObj |
101 | class AllocatedObj { |
102 | public: |
103 | // Printing support |
104 | void print() const; |
105 | void print_value() const; |
106 | |
107 | virtual void print_on(outputStream* st) const; |
108 | virtual void print_value_on(outputStream* st) const; |
109 | }; |
110 | #endif |
111 | |
112 | #define MEMORY_TYPES_DO(f) \ |
113 | /* Memory type by sub systems. It occupies lower byte. */ \ |
114 | f(mtJavaHeap, "Java Heap") /* Java heap */ \ |
115 | f(mtClass, "Class") /* Java classes */ \ |
116 | f(mtThread, "Thread") /* thread objects */ \ |
117 | f(mtThreadStack, "Thread Stack") \ |
118 | f(mtCode, "Code") /* generated code */ \ |
119 | f(mtGC, "GC") \ |
120 | f(mtCompiler, "Compiler") \ |
121 | f(mtJVMCI, "JVMCI") \ |
122 | f(mtInternal, "Internal") /* memory used by VM, but does not belong to */ \ |
123 | /* any of above categories, and not used by */ \ |
124 | /* NMT */ \ |
125 | f(mtOther, "Other") /* memory not used by VM */ \ |
126 | f(mtSymbol, "Symbol") \ |
127 | f(mtNMT, "Native Memory Tracking") /* memory used by NMT */ \ |
128 | f(mtClassShared, "Shared class space") /* class data sharing */ \ |
129 | f(mtChunk, "Arena Chunk") /* chunk that holds content of arenas */ \ |
130 | f(mtTest, "Test") /* Test type for verifying NMT */ \ |
131 | f(mtTracing, "Tracing") \ |
132 | f(mtLogging, "Logging") \ |
133 | f(mtStatistics, "Statistics") \ |
134 | f(mtArguments, "Arguments") \ |
135 | f(mtModule, "Module") \ |
136 | f(mtSafepoint, "Safepoint") \ |
137 | f(mtSynchronizer, "Synchronization") \ |
138 | f(mtNone, "Unknown") \ |
139 | //end |
140 | |
141 | #define MEMORY_TYPE_DECLARE_ENUM(type, human_readable) \ |
142 | type, |
143 | |
144 | /* |
145 | * Memory types |
146 | */ |
147 | enum MemoryType { |
148 | MEMORY_TYPES_DO(MEMORY_TYPE_DECLARE_ENUM) |
149 | mt_number_of_types // number of memory types (mtDontTrack |
150 | // is not included as validate type) |
151 | }; |
152 | |
153 | typedef MemoryType MEMFLAGS; |
154 | |
155 | |
156 | #if INCLUDE_NMT |
157 | |
158 | extern bool NMT_track_callsite; |
159 | |
160 | #else |
161 | |
162 | const bool NMT_track_callsite = false; |
163 | |
164 | #endif // INCLUDE_NMT |
165 | |
166 | class NativeCallStack; |
167 | |
168 | |
169 | char* AllocateHeap(size_t size, |
170 | MEMFLAGS flags, |
171 | const NativeCallStack& stack, |
172 | AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); |
173 | char* AllocateHeap(size_t size, |
174 | MEMFLAGS flags, |
175 | AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); |
176 | |
177 | char* ReallocateHeap(char *old, |
178 | size_t size, |
179 | MEMFLAGS flag, |
180 | AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); |
181 | |
182 | void FreeHeap(void* p); |
183 | |
184 | template <MEMFLAGS F> class CHeapObj ALLOCATION_SUPER_CLASS_SPEC { |
185 | public: |
186 | ALWAYSINLINE void* operator new(size_t size) throw() { |
187 | return (void*)AllocateHeap(size, F); |
188 | } |
189 | |
190 | ALWAYSINLINE void* operator new(size_t size, |
191 | const NativeCallStack& stack) throw() { |
192 | return (void*)AllocateHeap(size, F, stack); |
193 | } |
194 | |
195 | ALWAYSINLINE void* operator new(size_t size, const std::nothrow_t&, |
196 | const NativeCallStack& stack) throw() { |
197 | return (void*)AllocateHeap(size, F, stack, AllocFailStrategy::RETURN_NULL); |
198 | } |
199 | |
200 | ALWAYSINLINE void* operator new(size_t size, const std::nothrow_t&) throw() { |
201 | return (void*)AllocateHeap(size, F, AllocFailStrategy::RETURN_NULL); |
202 | } |
203 | |
204 | ALWAYSINLINE void* operator new[](size_t size) throw() { |
205 | return (void*)AllocateHeap(size, F); |
206 | } |
207 | |
208 | ALWAYSINLINE void* operator new[](size_t size, |
209 | const NativeCallStack& stack) throw() { |
210 | return (void*)AllocateHeap(size, F, stack); |
211 | } |
212 | |
213 | ALWAYSINLINE void* operator new[](size_t size, const std::nothrow_t&, |
214 | const NativeCallStack& stack) throw() { |
215 | return (void*)AllocateHeap(size, F, stack, AllocFailStrategy::RETURN_NULL); |
216 | } |
217 | |
218 | ALWAYSINLINE void* operator new[](size_t size, const std::nothrow_t&) throw() { |
219 | return (void*)AllocateHeap(size, F, AllocFailStrategy::RETURN_NULL); |
220 | } |
221 | |
222 | void operator delete(void* p) { FreeHeap(p); } |
223 | void operator delete [] (void* p) { FreeHeap(p); } |
224 | }; |
225 | |
226 | // Base class for objects allocated on the stack only. |
227 | // Calling new or delete will result in fatal error. |
228 | |
229 | class StackObj ALLOCATION_SUPER_CLASS_SPEC { |
230 | private: |
231 | void* operator new(size_t size) throw(); |
232 | void* operator new [](size_t size) throw(); |
233 | #ifdef __IBMCPP__ |
234 | public: |
235 | #endif |
236 | void operator delete(void* p); |
237 | void operator delete [](void* p); |
238 | }; |
239 | |
240 | // Base class for objects stored in Metaspace. |
241 | // Calling delete will result in fatal error. |
242 | // |
243 | // Do not inherit from something with a vptr because this class does |
244 | // not introduce one. This class is used to allocate both shared read-only |
245 | // and shared read-write classes. |
246 | // |
247 | |
248 | class ClassLoaderData; |
249 | class MetaspaceClosure; |
250 | |
251 | class MetaspaceObj { |
252 | friend class VMStructs; |
253 | // When CDS is enabled, all shared metaspace objects are mapped |
254 | // into a single contiguous memory block, so we can use these |
255 | // two pointers to quickly determine if something is in the |
256 | // shared metaspace. |
257 | // When CDS is not enabled, both pointers are set to NULL. |
258 | static void* _shared_metaspace_base; // (inclusive) low address |
259 | static void* _shared_metaspace_top; // (exclusive) high address |
260 | |
261 | public: |
262 | |
263 | // Returns true if the pointer points to a valid MetaspaceObj. A valid |
264 | // MetaspaceObj is MetaWord-aligned and contained within either |
265 | // non-shared or shared metaspace. |
266 | static bool is_valid(const MetaspaceObj* p); |
267 | |
268 | static bool is_shared(const MetaspaceObj* p) { |
269 | // If no shared metaspace regions are mapped, _shared_metaspace_{base,top} will |
270 | // both be NULL and all values of p will be rejected quickly. |
271 | return (((void*)p) < _shared_metaspace_top && |
272 | ((void*)p) >= _shared_metaspace_base); |
273 | } |
274 | bool is_shared() const { return MetaspaceObj::is_shared(this); } |
275 | |
276 | void print_address_on(outputStream* st) const; // nonvirtual address printing |
277 | |
278 | static void set_shared_metaspace_range(void* base, void* top) { |
279 | _shared_metaspace_base = base; |
280 | _shared_metaspace_top = top; |
281 | } |
282 | |
283 | static void expand_shared_metaspace_range(void* top) { |
284 | assert(top >= _shared_metaspace_top, "must be" ); |
285 | _shared_metaspace_top = top; |
286 | } |
287 | |
288 | static void* shared_metaspace_base() { return _shared_metaspace_base; } |
289 | static void* shared_metaspace_top() { return _shared_metaspace_top; } |
290 | |
291 | #define METASPACE_OBJ_TYPES_DO(f) \ |
292 | f(Class) \ |
293 | f(Symbol) \ |
294 | f(TypeArrayU1) \ |
295 | f(TypeArrayU2) \ |
296 | f(TypeArrayU4) \ |
297 | f(TypeArrayU8) \ |
298 | f(TypeArrayOther) \ |
299 | f(Method) \ |
300 | f(ConstMethod) \ |
301 | f(MethodData) \ |
302 | f(ConstantPool) \ |
303 | f(ConstantPoolCache) \ |
304 | f(Annotations) \ |
305 | f(MethodCounters) |
306 | |
307 | #define METASPACE_OBJ_TYPE_DECLARE(name) name ## Type, |
308 | #define METASPACE_OBJ_TYPE_NAME_CASE(name) case name ## Type: return #name; |
309 | |
310 | enum Type { |
311 | // Types are MetaspaceObj::ClassType, MetaspaceObj::SymbolType, etc |
312 | METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_DECLARE) |
313 | _number_of_types |
314 | }; |
315 | |
316 | static const char * type_name(Type type) { |
317 | switch(type) { |
318 | METASPACE_OBJ_TYPES_DO(METASPACE_OBJ_TYPE_NAME_CASE) |
319 | default: |
320 | ShouldNotReachHere(); |
321 | return NULL; |
322 | } |
323 | } |
324 | |
325 | static MetaspaceObj::Type array_type(size_t elem_size) { |
326 | switch (elem_size) { |
327 | case 1: return TypeArrayU1Type; |
328 | case 2: return TypeArrayU2Type; |
329 | case 4: return TypeArrayU4Type; |
330 | case 8: return TypeArrayU8Type; |
331 | default: |
332 | return TypeArrayOtherType; |
333 | } |
334 | } |
335 | |
336 | void* operator new(size_t size, ClassLoaderData* loader_data, |
337 | size_t word_size, |
338 | Type type, Thread* thread) throw(); |
339 | // can't use TRAPS from this header file. |
340 | void operator delete(void* p) { ShouldNotCallThis(); } |
341 | |
342 | // Declare a *static* method with the same signature in any subclass of MetaspaceObj |
343 | // that should be read-only by default. See symbol.hpp for an example. This function |
344 | // is used by the templates in metaspaceClosure.hpp |
345 | static bool is_read_only_by_default() { return false; } |
346 | }; |
347 | |
348 | // Base class for classes that constitute name spaces. |
349 | |
350 | class Arena; |
351 | |
352 | class AllStatic { |
353 | public: |
354 | AllStatic() { ShouldNotCallThis(); } |
355 | ~AllStatic() { ShouldNotCallThis(); } |
356 | }; |
357 | |
358 | |
359 | extern char* resource_allocate_bytes(size_t size, |
360 | AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); |
361 | extern char* resource_allocate_bytes(Thread* thread, size_t size, |
362 | AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); |
363 | extern char* resource_reallocate_bytes( char *old, size_t old_size, size_t new_size, |
364 | AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); |
365 | extern void resource_free_bytes( char *old, size_t size ); |
366 | |
367 | //---------------------------------------------------------------------- |
368 | // Base class for objects allocated in the resource area per default. |
369 | // Optionally, objects may be allocated on the C heap with |
370 | // new(ResourceObj::C_HEAP) Foo(...) or in an Arena with new (&arena) |
371 | // ResourceObj's can be allocated within other objects, but don't use |
372 | // new or delete (allocation_type is unknown). If new is used to allocate, |
373 | // use delete to deallocate. |
374 | class ResourceObj ALLOCATION_SUPER_CLASS_SPEC { |
375 | public: |
376 | enum allocation_type { STACK_OR_EMBEDDED = 0, RESOURCE_AREA, C_HEAP, ARENA, allocation_mask = 0x3 }; |
377 | static void set_allocation_type(address res, allocation_type type) NOT_DEBUG_RETURN; |
378 | #ifdef ASSERT |
379 | private: |
380 | // When this object is allocated on stack the new() operator is not |
381 | // called but garbage on stack may look like a valid allocation_type. |
382 | // Store negated 'this' pointer when new() is called to distinguish cases. |
383 | // Use second array's element for verification value to distinguish garbage. |
384 | uintptr_t _allocation_t[2]; |
385 | bool is_type_set() const; |
386 | void initialize_allocation_info(); |
387 | public: |
388 | allocation_type get_allocation_type() const; |
389 | bool allocated_on_stack() const { return get_allocation_type() == STACK_OR_EMBEDDED; } |
390 | bool allocated_on_res_area() const { return get_allocation_type() == RESOURCE_AREA; } |
391 | bool allocated_on_C_heap() const { return get_allocation_type() == C_HEAP; } |
392 | bool allocated_on_arena() const { return get_allocation_type() == ARENA; } |
393 | protected: |
394 | ResourceObj(); // default constructor |
395 | ResourceObj(const ResourceObj& r); // default copy constructor |
396 | ResourceObj& operator=(const ResourceObj& r); // default copy assignment |
397 | ~ResourceObj(); |
398 | #endif // ASSERT |
399 | |
400 | public: |
401 | void* operator new(size_t size, allocation_type type, MEMFLAGS flags) throw(); |
402 | void* operator new [](size_t size, allocation_type type, MEMFLAGS flags) throw(); |
403 | void* operator new(size_t size, const std::nothrow_t& nothrow_constant, |
404 | allocation_type type, MEMFLAGS flags) throw(); |
405 | void* operator new [](size_t size, const std::nothrow_t& nothrow_constant, |
406 | allocation_type type, MEMFLAGS flags) throw(); |
407 | |
408 | void* operator new(size_t size, Arena *arena) throw(); |
409 | |
410 | void* operator new [](size_t size, Arena *arena) throw(); |
411 | |
412 | void* operator new(size_t size) throw() { |
413 | address res = (address)resource_allocate_bytes(size); |
414 | DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);) |
415 | return res; |
416 | } |
417 | |
418 | void* operator new(size_t size, const std::nothrow_t& nothrow_constant) throw() { |
419 | address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL); |
420 | DEBUG_ONLY(if (res != NULL) set_allocation_type(res, RESOURCE_AREA);) |
421 | return res; |
422 | } |
423 | |
424 | void* operator new [](size_t size) throw() { |
425 | address res = (address)resource_allocate_bytes(size); |
426 | DEBUG_ONLY(set_allocation_type(res, RESOURCE_AREA);) |
427 | return res; |
428 | } |
429 | |
430 | void* operator new [](size_t size, const std::nothrow_t& nothrow_constant) throw() { |
431 | address res = (address)resource_allocate_bytes(size, AllocFailStrategy::RETURN_NULL); |
432 | DEBUG_ONLY(if (res != NULL) set_allocation_type(res, RESOURCE_AREA);) |
433 | return res; |
434 | } |
435 | |
436 | void operator delete(void* p); |
437 | void operator delete [](void* p); |
438 | }; |
439 | |
440 | // One of the following macros must be used when allocating an array |
441 | // or object to determine whether it should reside in the C heap on in |
442 | // the resource area. |
443 | |
444 | #define NEW_RESOURCE_ARRAY(type, size)\ |
445 | (type*) resource_allocate_bytes((size) * sizeof(type)) |
446 | |
447 | #define NEW_RESOURCE_ARRAY_RETURN_NULL(type, size)\ |
448 | (type*) resource_allocate_bytes((size) * sizeof(type), AllocFailStrategy::RETURN_NULL) |
449 | |
450 | #define NEW_RESOURCE_ARRAY_IN_THREAD(thread, type, size)\ |
451 | (type*) resource_allocate_bytes(thread, (size) * sizeof(type)) |
452 | |
453 | #define NEW_RESOURCE_ARRAY_IN_THREAD_RETURN_NULL(thread, type, size)\ |
454 | (type*) resource_allocate_bytes(thread, (size) * sizeof(type), AllocFailStrategy::RETURN_NULL) |
455 | |
456 | #define REALLOC_RESOURCE_ARRAY(type, old, old_size, new_size)\ |
457 | (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type), (new_size) * sizeof(type)) |
458 | |
459 | #define REALLOC_RESOURCE_ARRAY_RETURN_NULL(type, old, old_size, new_size)\ |
460 | (type*) resource_reallocate_bytes((char*)(old), (old_size) * sizeof(type),\ |
461 | (new_size) * sizeof(type), AllocFailStrategy::RETURN_NULL) |
462 | |
463 | #define FREE_RESOURCE_ARRAY(type, old, size)\ |
464 | resource_free_bytes((char*)(old), (size) * sizeof(type)) |
465 | |
466 | #define FREE_FAST(old)\ |
467 | /* nop */ |
468 | |
469 | #define NEW_RESOURCE_OBJ(type)\ |
470 | NEW_RESOURCE_ARRAY(type, 1) |
471 | |
472 | #define NEW_RESOURCE_OBJ_RETURN_NULL(type)\ |
473 | NEW_RESOURCE_ARRAY_RETURN_NULL(type, 1) |
474 | |
475 | #define NEW_C_HEAP_ARRAY3(type, size, memflags, pc, allocfail)\ |
476 | (type*) AllocateHeap((size) * sizeof(type), memflags, pc, allocfail) |
477 | |
478 | #define NEW_C_HEAP_ARRAY2(type, size, memflags, pc)\ |
479 | (type*) (AllocateHeap((size) * sizeof(type), memflags, pc)) |
480 | |
481 | #define NEW_C_HEAP_ARRAY(type, size, memflags)\ |
482 | (type*) (AllocateHeap((size) * sizeof(type), memflags)) |
483 | |
484 | #define NEW_C_HEAP_ARRAY2_RETURN_NULL(type, size, memflags, pc)\ |
485 | NEW_C_HEAP_ARRAY3(type, (size), memflags, pc, AllocFailStrategy::RETURN_NULL) |
486 | |
487 | #define NEW_C_HEAP_ARRAY_RETURN_NULL(type, size, memflags)\ |
488 | NEW_C_HEAP_ARRAY3(type, (size), memflags, CURRENT_PC, AllocFailStrategy::RETURN_NULL) |
489 | |
490 | #define REALLOC_C_HEAP_ARRAY(type, old, size, memflags)\ |
491 | (type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), memflags)) |
492 | |
493 | #define REALLOC_C_HEAP_ARRAY_RETURN_NULL(type, old, size, memflags)\ |
494 | (type*) (ReallocateHeap((char*)(old), (size) * sizeof(type), memflags, AllocFailStrategy::RETURN_NULL)) |
495 | |
496 | #define FREE_C_HEAP_ARRAY(type, old) \ |
497 | FreeHeap((char*)(old)) |
498 | |
499 | // allocate type in heap without calling ctor |
500 | #define NEW_C_HEAP_OBJ(type, memflags)\ |
501 | NEW_C_HEAP_ARRAY(type, 1, memflags) |
502 | |
503 | #define NEW_C_HEAP_OBJ_RETURN_NULL(type, memflags)\ |
504 | NEW_C_HEAP_ARRAY_RETURN_NULL(type, 1, memflags) |
505 | |
506 | // deallocate obj of type in heap without calling dtor |
507 | #define FREE_C_HEAP_OBJ(objname)\ |
508 | FreeHeap((char*)objname); |
509 | |
510 | // for statistics |
511 | #ifndef PRODUCT |
512 | class AllocStats : StackObj { |
513 | julong start_mallocs, start_frees; |
514 | julong start_malloc_bytes, start_mfree_bytes, start_res_bytes; |
515 | public: |
516 | AllocStats(); |
517 | |
518 | julong num_mallocs(); // since creation of receiver |
519 | julong alloc_bytes(); |
520 | julong num_frees(); |
521 | julong free_bytes(); |
522 | julong resource_bytes(); |
523 | void print(); |
524 | }; |
525 | #endif |
526 | |
527 | |
528 | //------------------------------ReallocMark--------------------------------- |
529 | // Code which uses REALLOC_RESOURCE_ARRAY should check an associated |
530 | // ReallocMark, which is declared in the same scope as the reallocated |
531 | // pointer. Any operation that could __potentially__ cause a reallocation |
532 | // should check the ReallocMark. |
533 | class ReallocMark: public StackObj { |
534 | protected: |
535 | NOT_PRODUCT(int _nesting;) |
536 | |
537 | public: |
538 | ReallocMark() PRODUCT_RETURN; |
539 | void check() PRODUCT_RETURN; |
540 | }; |
541 | |
542 | // Helper class to allocate arrays that may become large. |
543 | // Uses the OS malloc for allocations smaller than ArrayAllocatorMallocLimit |
544 | // and uses mapped memory for larger allocations. |
545 | // Most OS mallocs do something similar but Solaris malloc does not revert |
546 | // to mapped memory for large allocations. By default ArrayAllocatorMallocLimit |
547 | // is set so that we always use malloc except for Solaris where we set the |
548 | // limit to get mapped memory. |
549 | template <class E> |
550 | class ArrayAllocator : public AllStatic { |
551 | private: |
552 | static bool should_use_malloc(size_t length); |
553 | |
554 | static E* allocate_malloc(size_t length, MEMFLAGS flags); |
555 | static E* allocate_mmap(size_t length, MEMFLAGS flags); |
556 | |
557 | static void free_malloc(E* addr, size_t length); |
558 | static void free_mmap(E* addr, size_t length); |
559 | |
560 | public: |
561 | static E* allocate(size_t length, MEMFLAGS flags); |
562 | static E* reallocate(E* old_addr, size_t old_length, size_t new_length, MEMFLAGS flags); |
563 | static void free(E* addr, size_t length); |
564 | }; |
565 | |
566 | // Uses mmaped memory for all allocations. All allocations are initially |
567 | // zero-filled. No pre-touching. |
568 | template <class E> |
569 | class MmapArrayAllocator : public AllStatic { |
570 | private: |
571 | static size_t size_for(size_t length); |
572 | |
573 | public: |
574 | static E* allocate_or_null(size_t length, MEMFLAGS flags); |
575 | static E* allocate(size_t length, MEMFLAGS flags); |
576 | static void free(E* addr, size_t length); |
577 | }; |
578 | |
579 | // Uses malloc:ed memory for all allocations. |
580 | template <class E> |
581 | class MallocArrayAllocator : public AllStatic { |
582 | public: |
583 | static size_t size_for(size_t length); |
584 | |
585 | static E* allocate(size_t length, MEMFLAGS flags); |
586 | static void free(E* addr); |
587 | }; |
588 | |
589 | #endif // SHARE_MEMORY_ALLOCATION_HPP |
590 | |