| 1 | /* |
| 2 | * Copyright (c) 2018, Red Hat, Inc. All rights reserved. |
| 3 | * |
| 4 | * This code is free software; you can redistribute it and/or modify it |
| 5 | * under the terms of the GNU General Public License version 2 only, as |
| 6 | * published by the Free Software Foundation. |
| 7 | * |
| 8 | * This code is distributed in the hope that it will be useful, but WITHOUT |
| 9 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 10 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 11 | * version 2 for more details (a copy is included in the LICENSE file that |
| 12 | * accompanied this code). |
| 13 | * |
| 14 | * You should have received a copy of the GNU General Public License version |
| 15 | * 2 along with this work; if not, write to the Free Software Foundation, |
| 16 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
| 17 | * |
| 18 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
| 19 | * or visit www.oracle.com if you need additional information or have any |
| 20 | * questions. |
| 21 | * |
| 22 | */ |
| 23 | |
| 24 | #include "precompiled.hpp" |
| 25 | |
| 26 | #include "gc/shared/gcCause.hpp" |
| 27 | #include "gc/shenandoah/shenandoahCollectionSet.inline.hpp" |
| 28 | #include "gc/shenandoah/shenandoahCollectorPolicy.hpp" |
| 29 | #include "gc/shenandoah/shenandoahHeap.inline.hpp" |
| 30 | #include "gc/shenandoah/shenandoahHeapRegion.hpp" |
| 31 | #include "gc/shenandoah/shenandoahHeuristics.hpp" |
| 32 | #include "gc/shenandoah/shenandoahMarkingContext.inline.hpp" |
| 33 | #include "logging/log.hpp" |
| 34 | #include "logging/logTag.hpp" |
| 35 | |
| 36 | int ShenandoahHeuristics::compare_by_garbage(RegionData a, RegionData b) { |
| 37 | if (a._garbage > b._garbage) |
| 38 | return -1; |
| 39 | else if (a._garbage < b._garbage) |
| 40 | return 1; |
| 41 | else return 0; |
| 42 | } |
| 43 | |
| 44 | int ShenandoahHeuristics::compare_by_garbage_then_alloc_seq_ascending(RegionData a, RegionData b) { |
| 45 | int r = compare_by_garbage(a, b); |
| 46 | if (r != 0) { |
| 47 | return r; |
| 48 | } |
| 49 | return compare_by_alloc_seq_ascending(a, b); |
| 50 | } |
| 51 | |
| 52 | int ShenandoahHeuristics::compare_by_alloc_seq_ascending(RegionData a, RegionData b) { |
| 53 | if (a._seqnum_last_alloc == b._seqnum_last_alloc) |
| 54 | return 0; |
| 55 | else if (a._seqnum_last_alloc < b._seqnum_last_alloc) |
| 56 | return -1; |
| 57 | else return 1; |
| 58 | } |
| 59 | |
| 60 | int ShenandoahHeuristics::compare_by_alloc_seq_descending(RegionData a, RegionData b) { |
| 61 | return -compare_by_alloc_seq_ascending(a, b); |
| 62 | } |
| 63 | |
| 64 | ShenandoahHeuristics::ShenandoahHeuristics() : |
| 65 | _update_refs_early(false), |
| 66 | _update_refs_adaptive(false), |
| 67 | _region_data(NULL), |
| 68 | _region_data_size(0), |
| 69 | _degenerated_cycles_in_a_row(0), |
| 70 | _successful_cycles_in_a_row(0), |
| 71 | _bytes_in_cset(0), |
| 72 | _cycle_start(os::elapsedTime()), |
| 73 | _last_cycle_end(0), |
| 74 | _gc_times_learned(0), |
| 75 | _gc_time_penalties(0), |
| 76 | _gc_time_history(new TruncatedSeq(5)), |
| 77 | _metaspace_oom() |
| 78 | { |
| 79 | if (strcmp(ShenandoahUpdateRefsEarly, "on" ) == 0 || |
| 80 | strcmp(ShenandoahUpdateRefsEarly, "true" ) == 0 ) { |
| 81 | _update_refs_early = true; |
| 82 | } else if (strcmp(ShenandoahUpdateRefsEarly, "off" ) == 0 || |
| 83 | strcmp(ShenandoahUpdateRefsEarly, "false" ) == 0 ) { |
| 84 | _update_refs_early = false; |
| 85 | } else if (strcmp(ShenandoahUpdateRefsEarly, "adaptive" ) == 0) { |
| 86 | _update_refs_adaptive = true; |
| 87 | _update_refs_early = true; |
| 88 | } else { |
| 89 | vm_exit_during_initialization("Unknown -XX:ShenandoahUpdateRefsEarly option: %s" , ShenandoahUpdateRefsEarly); |
| 90 | } |
| 91 | |
| 92 | // No unloading during concurrent mark? Communicate that to heuristics |
| 93 | if (!ClassUnloadingWithConcurrentMark) { |
| 94 | FLAG_SET_DEFAULT(ShenandoahUnloadClassesFrequency, 0); |
| 95 | } |
| 96 | } |
| 97 | |
| 98 | ShenandoahHeuristics::~ShenandoahHeuristics() { |
| 99 | if (_region_data != NULL) { |
| 100 | FREE_C_HEAP_ARRAY(RegionGarbage, _region_data); |
| 101 | } |
| 102 | } |
| 103 | |
| 104 | ShenandoahHeuristics::RegionData* ShenandoahHeuristics::get_region_data_cache(size_t num) { |
| 105 | RegionData* res = _region_data; |
| 106 | if (res == NULL) { |
| 107 | res = NEW_C_HEAP_ARRAY(RegionData, num, mtGC); |
| 108 | _region_data = res; |
| 109 | _region_data_size = num; |
| 110 | } else if (_region_data_size < num) { |
| 111 | res = REALLOC_C_HEAP_ARRAY(RegionData, _region_data, num, mtGC); |
| 112 | _region_data = res; |
| 113 | _region_data_size = num; |
| 114 | } |
| 115 | return res; |
| 116 | } |
| 117 | |
| 118 | void ShenandoahHeuristics::choose_collection_set(ShenandoahCollectionSet* collection_set) { |
| 119 | assert(collection_set->count() == 0, "Must be empty" ); |
| 120 | |
| 121 | ShenandoahHeap* heap = ShenandoahHeap::heap(); |
| 122 | |
| 123 | // Step 1. Build up the region candidates we care about, rejecting losers and accepting winners right away. |
| 124 | |
| 125 | size_t num_regions = heap->num_regions(); |
| 126 | |
| 127 | RegionData* candidates = get_region_data_cache(num_regions); |
| 128 | |
| 129 | size_t cand_idx = 0; |
| 130 | |
| 131 | size_t total_garbage = 0; |
| 132 | |
| 133 | size_t immediate_garbage = 0; |
| 134 | size_t immediate_regions = 0; |
| 135 | |
| 136 | size_t free = 0; |
| 137 | size_t free_regions = 0; |
| 138 | |
| 139 | ShenandoahMarkingContext* const ctx = heap->complete_marking_context(); |
| 140 | |
| 141 | for (size_t i = 0; i < num_regions; i++) { |
| 142 | ShenandoahHeapRegion* region = heap->get_region(i); |
| 143 | |
| 144 | size_t garbage = region->garbage(); |
| 145 | total_garbage += garbage; |
| 146 | |
| 147 | if (region->is_empty()) { |
| 148 | free_regions++; |
| 149 | free += ShenandoahHeapRegion::region_size_bytes(); |
| 150 | } else if (region->is_regular()) { |
| 151 | if (!region->has_live()) { |
| 152 | // We can recycle it right away and put it in the free set. |
| 153 | immediate_regions++; |
| 154 | immediate_garbage += garbage; |
| 155 | region->make_trash_immediate(); |
| 156 | } else { |
| 157 | // This is our candidate for later consideration. |
| 158 | candidates[cand_idx]._region = region; |
| 159 | candidates[cand_idx]._garbage = garbage; |
| 160 | cand_idx++; |
| 161 | } |
| 162 | } else if (region->is_humongous_start()) { |
| 163 | // Reclaim humongous regions here, and count them as the immediate garbage |
| 164 | #ifdef ASSERT |
| 165 | bool reg_live = region->has_live(); |
| 166 | bool bm_live = ctx->is_marked(oop(region->bottom())); |
| 167 | assert(reg_live == bm_live, |
| 168 | "Humongous liveness and marks should agree. Region live: %s; Bitmap live: %s; Region Live Words: " SIZE_FORMAT, |
| 169 | BOOL_TO_STR(reg_live), BOOL_TO_STR(bm_live), region->get_live_data_words()); |
| 170 | #endif |
| 171 | if (!region->has_live()) { |
| 172 | heap->trash_humongous_region_at(region); |
| 173 | |
| 174 | // Count only the start. Continuations would be counted on "trash" path |
| 175 | immediate_regions++; |
| 176 | immediate_garbage += garbage; |
| 177 | } |
| 178 | } else if (region->is_trash()) { |
| 179 | // Count in just trashed collection set, during coalesced CM-with-UR |
| 180 | immediate_regions++; |
| 181 | immediate_garbage += garbage; |
| 182 | } |
| 183 | } |
| 184 | |
| 185 | // Step 2. Look back at garbage statistics, and decide if we want to collect anything, |
| 186 | // given the amount of immediately reclaimable garbage. If we do, figure out the collection set. |
| 187 | |
| 188 | assert (immediate_garbage <= total_garbage, |
| 189 | "Cannot have more immediate garbage than total garbage: " SIZE_FORMAT "M vs " SIZE_FORMAT "M" , |
| 190 | immediate_garbage / M, total_garbage / M); |
| 191 | |
| 192 | size_t immediate_percent = total_garbage == 0 ? 0 : (immediate_garbage * 100 / total_garbage); |
| 193 | |
| 194 | if (immediate_percent <= ShenandoahImmediateThreshold) { |
| 195 | choose_collection_set_from_regiondata(collection_set, candidates, cand_idx, immediate_garbage + free); |
| 196 | collection_set->update_region_status(); |
| 197 | |
| 198 | size_t cset_percent = total_garbage == 0 ? 0 : (collection_set->garbage() * 100 / total_garbage); |
| 199 | log_info(gc, ergo)("Collectable Garbage: " SIZE_FORMAT "M (" SIZE_FORMAT "%% of total), " SIZE_FORMAT "M CSet, " SIZE_FORMAT " CSet regions" , |
| 200 | collection_set->garbage() / M, cset_percent, collection_set->live_data() / M, collection_set->count()); |
| 201 | } |
| 202 | |
| 203 | log_info(gc, ergo)("Immediate Garbage: " SIZE_FORMAT "M (" SIZE_FORMAT "%% of total), " SIZE_FORMAT " regions" , |
| 204 | immediate_garbage / M, immediate_percent, immediate_regions); |
| 205 | } |
| 206 | |
| 207 | void ShenandoahHeuristics::record_gc_start() { |
| 208 | // Do nothing |
| 209 | } |
| 210 | |
| 211 | void ShenandoahHeuristics::record_gc_end() { |
| 212 | // Do nothing |
| 213 | } |
| 214 | |
| 215 | void ShenandoahHeuristics::record_cycle_start() { |
| 216 | _cycle_start = os::elapsedTime(); |
| 217 | } |
| 218 | |
| 219 | void ShenandoahHeuristics::record_cycle_end() { |
| 220 | _last_cycle_end = os::elapsedTime(); |
| 221 | } |
| 222 | |
| 223 | void ShenandoahHeuristics::record_phase_time(ShenandoahPhaseTimings::Phase phase, double secs) { |
| 224 | // Do nothing |
| 225 | } |
| 226 | |
| 227 | bool ShenandoahHeuristics::should_start_update_refs() { |
| 228 | return _update_refs_early; |
| 229 | } |
| 230 | |
| 231 | bool ShenandoahHeuristics::should_start_normal_gc() const { |
| 232 | // Perform GC to cleanup metaspace |
| 233 | if (has_metaspace_oom()) { |
| 234 | // Some of vmTestbase/metaspace tests depend on following line to count GC cycles |
| 235 | log_info(gc)("Trigger: %s" , GCCause::to_string(GCCause::_metadata_GC_threshold)); |
| 236 | return true; |
| 237 | } |
| 238 | |
| 239 | double last_time_ms = (os::elapsedTime() - _last_cycle_end) * 1000; |
| 240 | bool periodic_gc = (last_time_ms > ShenandoahGuaranteedGCInterval); |
| 241 | if (periodic_gc) { |
| 242 | log_info(gc)("Trigger: Time since last GC (%.0f ms) is larger than guaranteed interval (" UINTX_FORMAT " ms)" , |
| 243 | last_time_ms, ShenandoahGuaranteedGCInterval); |
| 244 | } |
| 245 | return periodic_gc; |
| 246 | } |
| 247 | |
| 248 | bool ShenandoahHeuristics::should_start_traversal_gc() { |
| 249 | return false; |
| 250 | } |
| 251 | |
| 252 | bool ShenandoahHeuristics::can_do_traversal_gc() { |
| 253 | return false; |
| 254 | } |
| 255 | |
| 256 | bool ShenandoahHeuristics::should_degenerate_cycle() { |
| 257 | return _degenerated_cycles_in_a_row <= ShenandoahFullGCThreshold; |
| 258 | } |
| 259 | |
| 260 | void ShenandoahHeuristics::record_success_concurrent() { |
| 261 | _degenerated_cycles_in_a_row = 0; |
| 262 | _successful_cycles_in_a_row++; |
| 263 | |
| 264 | _gc_time_history->add(time_since_last_gc()); |
| 265 | _gc_times_learned++; |
| 266 | _gc_time_penalties -= MIN2<size_t>(_gc_time_penalties, Concurrent_Adjust); |
| 267 | } |
| 268 | |
| 269 | void ShenandoahHeuristics::record_success_degenerated() { |
| 270 | _degenerated_cycles_in_a_row++; |
| 271 | _successful_cycles_in_a_row = 0; |
| 272 | _gc_time_penalties += Degenerated_Penalty; |
| 273 | } |
| 274 | |
| 275 | void ShenandoahHeuristics::record_success_full() { |
| 276 | _degenerated_cycles_in_a_row = 0; |
| 277 | _successful_cycles_in_a_row++; |
| 278 | _gc_time_penalties += Full_Penalty; |
| 279 | } |
| 280 | |
| 281 | void ShenandoahHeuristics::record_allocation_failure_gc() { |
| 282 | _bytes_in_cset = 0; |
| 283 | } |
| 284 | |
| 285 | void ShenandoahHeuristics::record_requested_gc() { |
| 286 | _bytes_in_cset = 0; |
| 287 | |
| 288 | // Assume users call System.gc() when external state changes significantly, |
| 289 | // which forces us to re-learn the GC timings and allocation rates. |
| 290 | _gc_times_learned = 0; |
| 291 | } |
| 292 | |
| 293 | bool ShenandoahHeuristics::can_process_references() { |
| 294 | if (ShenandoahRefProcFrequency == 0) return false; |
| 295 | return true; |
| 296 | } |
| 297 | |
| 298 | bool ShenandoahHeuristics::should_process_references() { |
| 299 | if (!can_process_references()) return false; |
| 300 | size_t cycle = ShenandoahHeap::heap()->shenandoah_policy()->cycle_counter(); |
| 301 | // Process references every Nth GC cycle. |
| 302 | return cycle % ShenandoahRefProcFrequency == 0; |
| 303 | } |
| 304 | |
| 305 | bool ShenandoahHeuristics::can_unload_classes() { |
| 306 | if (!ClassUnloading) return false; |
| 307 | return true; |
| 308 | } |
| 309 | |
| 310 | bool ShenandoahHeuristics::can_unload_classes_normal() { |
| 311 | if (!can_unload_classes()) return false; |
| 312 | if (has_metaspace_oom()) return true; |
| 313 | if (!ClassUnloadingWithConcurrentMark) return false; |
| 314 | if (ShenandoahUnloadClassesFrequency == 0) return false; |
| 315 | return true; |
| 316 | } |
| 317 | |
| 318 | bool ShenandoahHeuristics::should_unload_classes() { |
| 319 | if (!can_unload_classes_normal()) return false; |
| 320 | if (has_metaspace_oom()) return true; |
| 321 | size_t cycle = ShenandoahHeap::heap()->shenandoah_policy()->cycle_counter(); |
| 322 | // Unload classes every Nth GC cycle. |
| 323 | // This should not happen in the same cycle as process_references to amortize costs. |
| 324 | // Offsetting by one is enough to break the rendezvous when periods are equal. |
| 325 | // When periods are not equal, offsetting by one is just as good as any other guess. |
| 326 | return (cycle + 1) % ShenandoahUnloadClassesFrequency == 0; |
| 327 | } |
| 328 | |
| 329 | void ShenandoahHeuristics::initialize() { |
| 330 | // Nothing to do by default. |
| 331 | } |
| 332 | |
| 333 | double ShenandoahHeuristics::time_since_last_gc() const { |
| 334 | return os::elapsedTime() - _cycle_start; |
| 335 | } |
| 336 | |