1/*
2 * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#ifndef SHARE_MEMORY_ALLOCATION_INLINE_HPP
26#define SHARE_MEMORY_ALLOCATION_INLINE_HPP
27
28#include "runtime/atomic.hpp"
29#include "runtime/os.hpp"
30#include "services/memTracker.hpp"
31#include "utilities/align.hpp"
32#include "utilities/globalDefinitions.hpp"
33
34// Explicit C-heap memory management
35
36#ifndef PRODUCT
37// Increments unsigned long value for statistics (not atomic on MP).
38inline void inc_stat_counter(volatile julong* dest, julong add_value) {
39#if defined(SPARC) || defined(X86)
40 // Sparc and X86 have atomic jlong (8 bytes) instructions
41 julong value = Atomic::load(dest);
42 value += add_value;
43 Atomic::store(value, dest);
44#else
45 // possible word-tearing during load/store
46 *dest += add_value;
47#endif
48}
49#endif
50
51template <class E>
52size_t MmapArrayAllocator<E>::size_for(size_t length) {
53 size_t size = length * sizeof(E);
54 int alignment = os::vm_allocation_granularity();
55 return align_up(size, alignment);
56}
57
58template <class E>
59E* MmapArrayAllocator<E>::allocate_or_null(size_t length, MEMFLAGS flags) {
60 size_t size = size_for(length);
61 int alignment = os::vm_allocation_granularity();
62
63 char* addr = os::reserve_memory(size, NULL, alignment, flags);
64 if (addr == NULL) {
65 return NULL;
66 }
67
68 if (os::commit_memory(addr, size, !ExecMem)) {
69 return (E*)addr;
70 } else {
71 os::release_memory(addr, size);
72 return NULL;
73 }
74}
75
76template <class E>
77E* MmapArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
78 size_t size = size_for(length);
79 int alignment = os::vm_allocation_granularity();
80
81 char* addr = os::reserve_memory(size, NULL, alignment, flags);
82 if (addr == NULL) {
83 vm_exit_out_of_memory(size, OOM_MMAP_ERROR, "Allocator (reserve)");
84 }
85
86 os::commit_memory_or_exit(addr, size, !ExecMem, "Allocator (commit)");
87
88 return (E*)addr;
89}
90
91template <class E>
92void MmapArrayAllocator<E>::free(E* addr, size_t length) {
93 bool result = os::release_memory((char*)addr, size_for(length));
94 assert(result, "Failed to release memory");
95}
96
97template <class E>
98size_t MallocArrayAllocator<E>::size_for(size_t length) {
99 return length * sizeof(E);
100}
101
102template <class E>
103E* MallocArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
104 return (E*)AllocateHeap(size_for(length), flags);
105}
106
107template<class E>
108void MallocArrayAllocator<E>::free(E* addr) {
109 FreeHeap(addr);
110}
111
112template <class E>
113bool ArrayAllocator<E>::should_use_malloc(size_t length) {
114 return MallocArrayAllocator<E>::size_for(length) < ArrayAllocatorMallocLimit;
115}
116
117template <class E>
118E* ArrayAllocator<E>::allocate_malloc(size_t length, MEMFLAGS flags) {
119 return MallocArrayAllocator<E>::allocate(length, flags);
120}
121
122template <class E>
123E* ArrayAllocator<E>::allocate_mmap(size_t length, MEMFLAGS flags) {
124 return MmapArrayAllocator<E>::allocate(length, flags);
125}
126
127template <class E>
128E* ArrayAllocator<E>::allocate(size_t length, MEMFLAGS flags) {
129 if (should_use_malloc(length)) {
130 return allocate_malloc(length, flags);
131 }
132
133 return allocate_mmap(length, flags);
134}
135
136template <class E>
137E* ArrayAllocator<E>::reallocate(E* old_addr, size_t old_length, size_t new_length, MEMFLAGS flags) {
138 E* new_addr = (new_length > 0)
139 ? allocate(new_length, flags)
140 : NULL;
141
142 if (new_addr != NULL && old_addr != NULL) {
143 memcpy(new_addr, old_addr, MIN2(old_length, new_length) * sizeof(E));
144 }
145
146 if (old_addr != NULL) {
147 free(old_addr, old_length);
148 }
149
150 return new_addr;
151}
152
153template<class E>
154void ArrayAllocator<E>::free_malloc(E* addr, size_t length) {
155 MallocArrayAllocator<E>::free(addr);
156}
157
158template<class E>
159void ArrayAllocator<E>::free_mmap(E* addr, size_t length) {
160 MmapArrayAllocator<E>::free(addr, length);
161}
162
163template<class E>
164void ArrayAllocator<E>::free(E* addr, size_t length) {
165 if (addr != NULL) {
166 if (should_use_malloc(length)) {
167 free_malloc(addr, length);
168 } else {
169 free_mmap(addr, length);
170 }
171 }
172}
173
174#endif // SHARE_MEMORY_ALLOCATION_INLINE_HPP
175