1 | /* |
2 | * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved. |
3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 | * |
5 | * This code is free software; you can redistribute it and/or modify it |
6 | * under the terms of the GNU General Public License version 2 only, as |
7 | * published by the Free Software Foundation. |
8 | * |
9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
12 | * version 2 for more details (a copy is included in the LICENSE file that |
13 | * accompanied this code). |
14 | * |
15 | * You should have received a copy of the GNU General Public License version |
16 | * 2 along with this work; if not, write to the Free Software Foundation, |
17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
18 | * |
19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
20 | * or visit www.oracle.com if you need additional information or have any |
21 | * questions. |
22 | * |
23 | */ |
24 | |
25 | #include "precompiled.hpp" |
26 | #include "gc/shared/blockOffsetTable.inline.hpp" |
27 | #include "gc/shared/cardTableRS.hpp" |
28 | #include "gc/shared/collectedHeap.inline.hpp" |
29 | #include "gc/shared/gcLocker.hpp" |
30 | #include "gc/shared/gcTimer.hpp" |
31 | #include "gc/shared/gcTrace.hpp" |
32 | #include "gc/shared/genCollectedHeap.hpp" |
33 | #include "gc/shared/genOopClosures.hpp" |
34 | #include "gc/shared/genOopClosures.inline.hpp" |
35 | #include "gc/shared/generation.hpp" |
36 | #include "gc/shared/generationSpec.hpp" |
37 | #include "gc/shared/space.inline.hpp" |
38 | #include "gc/shared/spaceDecorator.hpp" |
39 | #include "logging/log.hpp" |
40 | #include "memory/allocation.inline.hpp" |
41 | #include "oops/oop.inline.hpp" |
42 | #include "runtime/java.hpp" |
43 | #include "utilities/copy.hpp" |
44 | #include "utilities/events.hpp" |
45 | |
46 | Generation::Generation(ReservedSpace rs, size_t initial_size) : |
47 | _gc_manager(NULL), |
48 | _ref_processor(NULL) { |
49 | if (!_virtual_space.initialize(rs, initial_size)) { |
50 | vm_exit_during_initialization("Could not reserve enough space for " |
51 | "object heap" ); |
52 | } |
53 | // Mangle all of the the initial generation. |
54 | if (ZapUnusedHeapArea) { |
55 | MemRegion mangle_region((HeapWord*)_virtual_space.low(), |
56 | (HeapWord*)_virtual_space.high()); |
57 | SpaceMangler::mangle_region(mangle_region); |
58 | } |
59 | _reserved = MemRegion((HeapWord*)_virtual_space.low_boundary(), |
60 | (HeapWord*)_virtual_space.high_boundary()); |
61 | } |
62 | |
63 | size_t Generation::initial_size() { |
64 | GenCollectedHeap* gch = GenCollectedHeap::heap(); |
65 | if (gch->is_young_gen(this)) { |
66 | return gch->young_gen_spec()->init_size(); |
67 | } |
68 | return gch->old_gen_spec()->init_size(); |
69 | } |
70 | |
71 | size_t Generation::max_capacity() const { |
72 | return reserved().byte_size(); |
73 | } |
74 | |
75 | // By default we get a single threaded default reference processor; |
76 | // generations needing multi-threaded refs processing or discovery override this method. |
77 | void Generation::ref_processor_init() { |
78 | assert(_ref_processor == NULL, "a reference processor already exists" ); |
79 | assert(!_reserved.is_empty(), "empty generation?" ); |
80 | _span_based_discoverer.set_span(_reserved); |
81 | _ref_processor = new ReferenceProcessor(&_span_based_discoverer); // a vanilla reference processor |
82 | if (_ref_processor == NULL) { |
83 | vm_exit_during_initialization("Could not allocate ReferenceProcessor object" ); |
84 | } |
85 | } |
86 | |
87 | void Generation::print() const { print_on(tty); } |
88 | |
89 | void Generation::print_on(outputStream* st) const { |
90 | st->print(" %-20s" , name()); |
91 | st->print(" total " SIZE_FORMAT "K, used " SIZE_FORMAT "K" , |
92 | capacity()/K, used()/K); |
93 | st->print_cr(" [" INTPTR_FORMAT ", " INTPTR_FORMAT ", " INTPTR_FORMAT ")" , |
94 | p2i(_virtual_space.low_boundary()), |
95 | p2i(_virtual_space.high()), |
96 | p2i(_virtual_space.high_boundary())); |
97 | } |
98 | |
99 | void Generation::print_summary_info_on(outputStream* st) { |
100 | StatRecord* sr = stat_record(); |
101 | double time = sr->accumulated_time.seconds(); |
102 | st->print_cr("Accumulated %s generation GC time %3.7f secs, " |
103 | "%u GC's, avg GC time %3.7f" , |
104 | GenCollectedHeap::heap()->is_young_gen(this) ? "young" : "old" , |
105 | time, |
106 | sr->invocations, |
107 | sr->invocations > 0 ? time / sr->invocations : 0.0); |
108 | } |
109 | |
110 | // Utility iterator classes |
111 | |
112 | class GenerationIsInReservedClosure : public SpaceClosure { |
113 | public: |
114 | const void* _p; |
115 | Space* sp; |
116 | virtual void do_space(Space* s) { |
117 | if (sp == NULL) { |
118 | if (s->is_in_reserved(_p)) sp = s; |
119 | } |
120 | } |
121 | GenerationIsInReservedClosure(const void* p) : _p(p), sp(NULL) {} |
122 | }; |
123 | |
124 | class GenerationIsInClosure : public SpaceClosure { |
125 | public: |
126 | const void* _p; |
127 | Space* sp; |
128 | virtual void do_space(Space* s) { |
129 | if (sp == NULL) { |
130 | if (s->is_in(_p)) sp = s; |
131 | } |
132 | } |
133 | GenerationIsInClosure(const void* p) : _p(p), sp(NULL) {} |
134 | }; |
135 | |
136 | bool Generation::is_in(const void* p) const { |
137 | GenerationIsInClosure blk(p); |
138 | ((Generation*)this)->space_iterate(&blk); |
139 | return blk.sp != NULL; |
140 | } |
141 | |
142 | size_t Generation::max_contiguous_available() const { |
143 | // The largest number of contiguous free words in this or any higher generation. |
144 | size_t avail = contiguous_available(); |
145 | size_t old_avail = 0; |
146 | if (GenCollectedHeap::heap()->is_young_gen(this)) { |
147 | old_avail = GenCollectedHeap::heap()->old_gen()->contiguous_available(); |
148 | } |
149 | return MAX2(avail, old_avail); |
150 | } |
151 | |
152 | bool Generation::promotion_attempt_is_safe(size_t max_promotion_in_bytes) const { |
153 | size_t available = max_contiguous_available(); |
154 | bool res = (available >= max_promotion_in_bytes); |
155 | log_trace(gc)("Generation: promo attempt is%s safe: available(" SIZE_FORMAT ") %s max_promo(" SIZE_FORMAT ")" , |
156 | res? "" :" not" , available, res? ">=" :"<" , max_promotion_in_bytes); |
157 | return res; |
158 | } |
159 | |
160 | // Ignores "ref" and calls allocate(). |
161 | oop Generation::promote(oop obj, size_t obj_size) { |
162 | assert(obj_size == (size_t)obj->size(), "bad obj_size passed in" ); |
163 | |
164 | #ifndef PRODUCT |
165 | if (GenCollectedHeap::heap()->promotion_should_fail()) { |
166 | return NULL; |
167 | } |
168 | #endif // #ifndef PRODUCT |
169 | |
170 | HeapWord* result = allocate(obj_size, false); |
171 | if (result != NULL) { |
172 | Copy::aligned_disjoint_words((HeapWord*)obj, result, obj_size); |
173 | return oop(result); |
174 | } else { |
175 | GenCollectedHeap* gch = GenCollectedHeap::heap(); |
176 | return gch->handle_failed_promotion(this, obj, obj_size); |
177 | } |
178 | } |
179 | |
180 | oop Generation::par_promote(int thread_num, |
181 | oop obj, markOop m, size_t word_sz) { |
182 | // Could do a bad general impl here that gets a lock. But no. |
183 | ShouldNotCallThis(); |
184 | return NULL; |
185 | } |
186 | |
187 | Space* Generation::space_containing(const void* p) const { |
188 | GenerationIsInReservedClosure blk(p); |
189 | // Cast away const |
190 | ((Generation*)this)->space_iterate(&blk); |
191 | return blk.sp; |
192 | } |
193 | |
194 | // Some of these are mediocre general implementations. Should be |
195 | // overridden to get better performance. |
196 | |
197 | class GenerationBlockStartClosure : public SpaceClosure { |
198 | public: |
199 | const void* _p; |
200 | HeapWord* _start; |
201 | virtual void do_space(Space* s) { |
202 | if (_start == NULL && s->is_in_reserved(_p)) { |
203 | _start = s->block_start(_p); |
204 | } |
205 | } |
206 | GenerationBlockStartClosure(const void* p) { _p = p; _start = NULL; } |
207 | }; |
208 | |
209 | HeapWord* Generation::block_start(const void* p) const { |
210 | GenerationBlockStartClosure blk(p); |
211 | // Cast away const |
212 | ((Generation*)this)->space_iterate(&blk); |
213 | return blk._start; |
214 | } |
215 | |
216 | class GenerationBlockSizeClosure : public SpaceClosure { |
217 | public: |
218 | const HeapWord* _p; |
219 | size_t size; |
220 | virtual void do_space(Space* s) { |
221 | if (size == 0 && s->is_in_reserved(_p)) { |
222 | size = s->block_size(_p); |
223 | } |
224 | } |
225 | GenerationBlockSizeClosure(const HeapWord* p) { _p = p; size = 0; } |
226 | }; |
227 | |
228 | size_t Generation::block_size(const HeapWord* p) const { |
229 | GenerationBlockSizeClosure blk(p); |
230 | // Cast away const |
231 | ((Generation*)this)->space_iterate(&blk); |
232 | assert(blk.size > 0, "seems reasonable" ); |
233 | return blk.size; |
234 | } |
235 | |
236 | class GenerationBlockIsObjClosure : public SpaceClosure { |
237 | public: |
238 | const HeapWord* _p; |
239 | bool is_obj; |
240 | virtual void do_space(Space* s) { |
241 | if (!is_obj && s->is_in_reserved(_p)) { |
242 | is_obj |= s->block_is_obj(_p); |
243 | } |
244 | } |
245 | GenerationBlockIsObjClosure(const HeapWord* p) { _p = p; is_obj = false; } |
246 | }; |
247 | |
248 | bool Generation::block_is_obj(const HeapWord* p) const { |
249 | GenerationBlockIsObjClosure blk(p); |
250 | // Cast away const |
251 | ((Generation*)this)->space_iterate(&blk); |
252 | return blk.is_obj; |
253 | } |
254 | |
255 | class GenerationOopIterateClosure : public SpaceClosure { |
256 | public: |
257 | OopIterateClosure* _cl; |
258 | virtual void do_space(Space* s) { |
259 | s->oop_iterate(_cl); |
260 | } |
261 | GenerationOopIterateClosure(OopIterateClosure* cl) : |
262 | _cl(cl) {} |
263 | }; |
264 | |
265 | void Generation::oop_iterate(OopIterateClosure* cl) { |
266 | GenerationOopIterateClosure blk(cl); |
267 | space_iterate(&blk); |
268 | } |
269 | |
270 | void Generation::younger_refs_in_space_iterate(Space* sp, |
271 | OopsInGenClosure* cl, |
272 | uint n_threads) { |
273 | CardTableRS* rs = GenCollectedHeap::heap()->rem_set(); |
274 | rs->younger_refs_in_space_iterate(sp, cl, n_threads); |
275 | } |
276 | |
277 | class GenerationObjIterateClosure : public SpaceClosure { |
278 | private: |
279 | ObjectClosure* _cl; |
280 | public: |
281 | virtual void do_space(Space* s) { |
282 | s->object_iterate(_cl); |
283 | } |
284 | GenerationObjIterateClosure(ObjectClosure* cl) : _cl(cl) {} |
285 | }; |
286 | |
287 | void Generation::object_iterate(ObjectClosure* cl) { |
288 | GenerationObjIterateClosure blk(cl); |
289 | space_iterate(&blk); |
290 | } |
291 | |
292 | class GenerationSafeObjIterateClosure : public SpaceClosure { |
293 | private: |
294 | ObjectClosure* _cl; |
295 | public: |
296 | virtual void do_space(Space* s) { |
297 | s->safe_object_iterate(_cl); |
298 | } |
299 | GenerationSafeObjIterateClosure(ObjectClosure* cl) : _cl(cl) {} |
300 | }; |
301 | |
302 | void Generation::safe_object_iterate(ObjectClosure* cl) { |
303 | GenerationSafeObjIterateClosure blk(cl); |
304 | space_iterate(&blk); |
305 | } |
306 | |
307 | #if INCLUDE_SERIALGC |
308 | |
309 | void Generation::prepare_for_compaction(CompactPoint* cp) { |
310 | // Generic implementation, can be specialized |
311 | CompactibleSpace* space = first_compaction_space(); |
312 | while (space != NULL) { |
313 | space->prepare_for_compaction(cp); |
314 | space = space->next_compaction_space(); |
315 | } |
316 | } |
317 | |
318 | class AdjustPointersClosure: public SpaceClosure { |
319 | public: |
320 | void do_space(Space* sp) { |
321 | sp->adjust_pointers(); |
322 | } |
323 | }; |
324 | |
325 | void Generation::adjust_pointers() { |
326 | // Note that this is done over all spaces, not just the compactible |
327 | // ones. |
328 | AdjustPointersClosure blk; |
329 | space_iterate(&blk, true); |
330 | } |
331 | |
332 | void Generation::compact() { |
333 | CompactibleSpace* sp = first_compaction_space(); |
334 | while (sp != NULL) { |
335 | sp->compact(); |
336 | sp = sp->next_compaction_space(); |
337 | } |
338 | } |
339 | |
340 | #endif // INCLUDE_SERIALGC |
341 | |