| 1 | /* |
| 2 | * Copyright (c) 1998, 2019, Oracle and/or its affiliates. All rights reserved. |
| 3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
| 4 | * |
| 5 | * This code is free software; you can redistribute it and/or modify it |
| 6 | * under the terms of the GNU General Public License version 2 only, as |
| 7 | * published by the Free Software Foundation. |
| 8 | * |
| 9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
| 10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 12 | * version 2 for more details (a copy is included in the LICENSE file that |
| 13 | * accompanied this code). |
| 14 | * |
| 15 | * You should have received a copy of the GNU General Public License version |
| 16 | * 2 along with this work; if not, write to the Free Software Foundation, |
| 17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
| 18 | * |
| 19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
| 20 | * or visit www.oracle.com if you need additional information or have any |
| 21 | * questions. |
| 22 | * |
| 23 | */ |
| 24 | |
| 25 | #include "precompiled.hpp" |
| 26 | #include "ci/ciReplay.hpp" |
| 27 | #include "classfile/systemDictionary.hpp" |
| 28 | #include "classfile/vmSymbols.hpp" |
| 29 | #include "compiler/compileBroker.hpp" |
| 30 | #include "compiler/compileLog.hpp" |
| 31 | #include "interpreter/linkResolver.hpp" |
| 32 | #include "jfr/jfrEvents.hpp" |
| 33 | #include "oops/objArrayKlass.hpp" |
| 34 | #include "opto/callGenerator.hpp" |
| 35 | #include "opto/parse.hpp" |
| 36 | #include "runtime/handles.inline.hpp" |
| 37 | #include "utilities/events.hpp" |
| 38 | |
| 39 | //============================================================================= |
| 40 | //------------------------------InlineTree------------------------------------- |
| 41 | InlineTree::InlineTree(Compile* c, |
| 42 | const InlineTree *caller_tree, ciMethod* callee, |
| 43 | JVMState* caller_jvms, int caller_bci, |
| 44 | float site_invoke_ratio, int max_inline_level) : |
| 45 | C(c), |
| 46 | _caller_jvms(caller_jvms), |
| 47 | _method(callee), |
| 48 | _caller_tree((InlineTree*) caller_tree), |
| 49 | _count_inline_bcs(method()->code_size_for_inlining()), |
| 50 | _site_invoke_ratio(site_invoke_ratio), |
| 51 | _max_inline_level(max_inline_level), |
| 52 | _subtrees(c->comp_arena(), 2, 0, NULL), |
| 53 | _msg(NULL) |
| 54 | { |
| 55 | #ifndef PRODUCT |
| 56 | _count_inlines = 0; |
| 57 | _forced_inline = false; |
| 58 | #endif |
| 59 | if (_caller_jvms != NULL) { |
| 60 | // Keep a private copy of the caller_jvms: |
| 61 | _caller_jvms = new (C) JVMState(caller_jvms->method(), caller_tree->caller_jvms()); |
| 62 | _caller_jvms->set_bci(caller_jvms->bci()); |
| 63 | assert(!caller_jvms->should_reexecute(), "there should be no reexecute bytecode with inlining" ); |
| 64 | } |
| 65 | assert(_caller_jvms->same_calls_as(caller_jvms), "consistent JVMS" ); |
| 66 | assert((caller_tree == NULL ? 0 : caller_tree->stack_depth() + 1) == stack_depth(), "correct (redundant) depth parameter" ); |
| 67 | assert(caller_bci == this->caller_bci(), "correct (redundant) bci parameter" ); |
| 68 | // Update hierarchical counts, count_inline_bcs() and count_inlines() |
| 69 | InlineTree *caller = (InlineTree *)caller_tree; |
| 70 | for( ; caller != NULL; caller = ((InlineTree *)(caller->caller_tree())) ) { |
| 71 | caller->_count_inline_bcs += count_inline_bcs(); |
| 72 | NOT_PRODUCT(caller->_count_inlines++;) |
| 73 | } |
| 74 | } |
| 75 | |
| 76 | /** |
| 77 | * Return true when EA is ON and a java constructor is called or |
| 78 | * a super constructor is called from an inlined java constructor. |
| 79 | * Also return true for boxing methods. |
| 80 | * Also return true for methods returning Iterator (including Iterable::iterator()) |
| 81 | * that is essential for forall-loops performance. |
| 82 | */ |
| 83 | static bool is_init_with_ea(ciMethod* callee_method, |
| 84 | ciMethod* caller_method, Compile* C) { |
| 85 | if (!C->do_escape_analysis() || !EliminateAllocations) { |
| 86 | return false; // EA is off |
| 87 | } |
| 88 | if (callee_method->is_initializer()) { |
| 89 | return true; // constuctor |
| 90 | } |
| 91 | if (caller_method->is_initializer() && |
| 92 | caller_method != C->method() && |
| 93 | caller_method->holder()->is_subclass_of(callee_method->holder())) { |
| 94 | return true; // super constructor is called from inlined constructor |
| 95 | } |
| 96 | if (C->eliminate_boxing() && callee_method->is_boxing_method()) { |
| 97 | return true; |
| 98 | } |
| 99 | ciType *retType = callee_method->signature()->return_type(); |
| 100 | ciKlass *iter = C->env()->Iterator_klass(); |
| 101 | if(retType->is_loaded() && iter->is_loaded() && retType->is_subtype_of(iter)) { |
| 102 | return true; |
| 103 | } |
| 104 | return false; |
| 105 | } |
| 106 | |
| 107 | /** |
| 108 | * Force inlining unboxing accessor. |
| 109 | */ |
| 110 | static bool is_unboxing_method(ciMethod* callee_method, Compile* C) { |
| 111 | return C->eliminate_boxing() && callee_method->is_unboxing_method(); |
| 112 | } |
| 113 | |
| 114 | // positive filter: should callee be inlined? |
| 115 | bool InlineTree::should_inline(ciMethod* callee_method, ciMethod* caller_method, |
| 116 | int caller_bci, ciCallProfile& profile, |
| 117 | WarmCallInfo* wci_result) { |
| 118 | // Allows targeted inlining |
| 119 | if (C->directive()->should_inline(callee_method)) { |
| 120 | *wci_result = *(WarmCallInfo::always_hot()); |
| 121 | if (C->print_inlining() && Verbose) { |
| 122 | CompileTask::print_inline_indent(inline_level()); |
| 123 | tty->print_cr("Inlined method is hot: " ); |
| 124 | } |
| 125 | set_msg("force inline by CompileCommand" ); |
| 126 | _forced_inline = true; |
| 127 | return true; |
| 128 | } |
| 129 | |
| 130 | if (callee_method->force_inline()) { |
| 131 | set_msg("force inline by annotation" ); |
| 132 | _forced_inline = true; |
| 133 | return true; |
| 134 | } |
| 135 | |
| 136 | #ifndef PRODUCT |
| 137 | int inline_depth = inline_level()+1; |
| 138 | if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) { |
| 139 | set_msg("force inline by ciReplay" ); |
| 140 | _forced_inline = true; |
| 141 | return true; |
| 142 | } |
| 143 | #endif |
| 144 | |
| 145 | int size = callee_method->code_size_for_inlining(); |
| 146 | |
| 147 | // Check for too many throws (and not too huge) |
| 148 | if(callee_method->interpreter_throwout_count() > InlineThrowCount && |
| 149 | size < InlineThrowMaxSize ) { |
| 150 | wci_result->set_profit(wci_result->profit() * 100); |
| 151 | if (C->print_inlining() && Verbose) { |
| 152 | CompileTask::print_inline_indent(inline_level()); |
| 153 | tty->print_cr("Inlined method with many throws (throws=%d):" , callee_method->interpreter_throwout_count()); |
| 154 | } |
| 155 | set_msg("many throws" ); |
| 156 | return true; |
| 157 | } |
| 158 | |
| 159 | int default_max_inline_size = C->max_inline_size(); |
| 160 | int inline_small_code_size = InlineSmallCode / 4; |
| 161 | int max_inline_size = default_max_inline_size; |
| 162 | |
| 163 | int call_site_count = method()->scale_count(profile.count()); |
| 164 | int invoke_count = method()->interpreter_invocation_count(); |
| 165 | |
| 166 | assert(invoke_count != 0, "require invocation count greater than zero" ); |
| 167 | int freq = call_site_count / invoke_count; |
| 168 | |
| 169 | // bump the max size if the call is frequent |
| 170 | if ((freq >= InlineFrequencyRatio) || |
| 171 | (call_site_count >= InlineFrequencyCount) || |
| 172 | is_unboxing_method(callee_method, C) || |
| 173 | is_init_with_ea(callee_method, caller_method, C)) { |
| 174 | |
| 175 | max_inline_size = C->freq_inline_size(); |
| 176 | if (size <= max_inline_size && TraceFrequencyInlining) { |
| 177 | CompileTask::print_inline_indent(inline_level()); |
| 178 | tty->print_cr("Inlined frequent method (freq=%d count=%d):" , freq, call_site_count); |
| 179 | CompileTask::print_inline_indent(inline_level()); |
| 180 | callee_method->print(); |
| 181 | tty->cr(); |
| 182 | } |
| 183 | } else { |
| 184 | // Not hot. Check for medium-sized pre-existing nmethod at cold sites. |
| 185 | if (callee_method->has_compiled_code() && |
| 186 | callee_method->instructions_size() > inline_small_code_size) { |
| 187 | set_msg("already compiled into a medium method" ); |
| 188 | return false; |
| 189 | } |
| 190 | } |
| 191 | if (size > max_inline_size) { |
| 192 | if (max_inline_size > default_max_inline_size) { |
| 193 | set_msg("hot method too big" ); |
| 194 | } else { |
| 195 | set_msg("too big" ); |
| 196 | } |
| 197 | return false; |
| 198 | } |
| 199 | return true; |
| 200 | } |
| 201 | |
| 202 | |
| 203 | // negative filter: should callee NOT be inlined? |
| 204 | bool InlineTree::should_not_inline(ciMethod *callee_method, |
| 205 | ciMethod* caller_method, |
| 206 | JVMState* jvms, |
| 207 | WarmCallInfo* wci_result) { |
| 208 | |
| 209 | const char* fail_msg = NULL; |
| 210 | |
| 211 | // First check all inlining restrictions which are required for correctness |
| 212 | if (callee_method->is_abstract()) { |
| 213 | fail_msg = "abstract method" ; // // note: we allow ik->is_abstract() |
| 214 | } else if (!callee_method->holder()->is_initialized() && |
| 215 | // access allowed in the context of static initializer |
| 216 | C->needs_clinit_barrier(callee_method->holder(), caller_method)) { |
| 217 | fail_msg = "method holder not initialized" ; |
| 218 | } else if (callee_method->is_native()) { |
| 219 | fail_msg = "native method" ; |
| 220 | } else if (callee_method->dont_inline()) { |
| 221 | fail_msg = "don't inline by annotation" ; |
| 222 | } |
| 223 | |
| 224 | // one more inlining restriction |
| 225 | if (fail_msg == NULL && callee_method->has_unloaded_classes_in_signature()) { |
| 226 | fail_msg = "unloaded signature classes" ; |
| 227 | } |
| 228 | |
| 229 | if (fail_msg != NULL) { |
| 230 | set_msg(fail_msg); |
| 231 | return true; |
| 232 | } |
| 233 | |
| 234 | // ignore heuristic controls on inlining |
| 235 | if (C->directive()->should_inline(callee_method)) { |
| 236 | set_msg("force inline by CompileCommand" ); |
| 237 | return false; |
| 238 | } |
| 239 | |
| 240 | if (C->directive()->should_not_inline(callee_method)) { |
| 241 | set_msg("disallowed by CompileCommand" ); |
| 242 | return true; |
| 243 | } |
| 244 | |
| 245 | #ifndef PRODUCT |
| 246 | int caller_bci = jvms->bci(); |
| 247 | int inline_depth = inline_level()+1; |
| 248 | if (ciReplay::should_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) { |
| 249 | set_msg("force inline by ciReplay" ); |
| 250 | return false; |
| 251 | } |
| 252 | |
| 253 | if (ciReplay::should_not_inline(C->replay_inline_data(), callee_method, caller_bci, inline_depth)) { |
| 254 | set_msg("disallowed by ciReplay" ); |
| 255 | return true; |
| 256 | } |
| 257 | |
| 258 | if (ciReplay::should_not_inline(callee_method)) { |
| 259 | set_msg("disallowed by ciReplay" ); |
| 260 | return true; |
| 261 | } |
| 262 | #endif |
| 263 | |
| 264 | if (callee_method->force_inline()) { |
| 265 | set_msg("force inline by annotation" ); |
| 266 | return false; |
| 267 | } |
| 268 | |
| 269 | // Now perform checks which are heuristic |
| 270 | |
| 271 | if (is_unboxing_method(callee_method, C)) { |
| 272 | // Inline unboxing methods. |
| 273 | return false; |
| 274 | } |
| 275 | |
| 276 | if (callee_method->has_compiled_code() && |
| 277 | callee_method->instructions_size() > InlineSmallCode) { |
| 278 | set_msg("already compiled into a big method" ); |
| 279 | return true; |
| 280 | } |
| 281 | |
| 282 | // don't inline exception code unless the top method belongs to an |
| 283 | // exception class |
| 284 | if (caller_tree() != NULL && |
| 285 | callee_method->holder()->is_subclass_of(C->env()->Throwable_klass())) { |
| 286 | const InlineTree *top = this; |
| 287 | while (top->caller_tree() != NULL) top = top->caller_tree(); |
| 288 | ciInstanceKlass* k = top->method()->holder(); |
| 289 | if (!k->is_subclass_of(C->env()->Throwable_klass())) { |
| 290 | set_msg("exception method" ); |
| 291 | return true; |
| 292 | } |
| 293 | } |
| 294 | |
| 295 | // use frequency-based objections only for non-trivial methods |
| 296 | if (callee_method->code_size() <= MaxTrivialSize) { |
| 297 | return false; |
| 298 | } |
| 299 | |
| 300 | // don't use counts with -Xcomp |
| 301 | if (UseInterpreter) { |
| 302 | |
| 303 | if (!callee_method->has_compiled_code() && |
| 304 | !callee_method->was_executed_more_than(0)) { |
| 305 | set_msg("never executed" ); |
| 306 | return true; |
| 307 | } |
| 308 | |
| 309 | if (is_init_with_ea(callee_method, caller_method, C)) { |
| 310 | // Escape Analysis: inline all executed constructors |
| 311 | return false; |
| 312 | } else { |
| 313 | intx counter_high_value; |
| 314 | // Tiered compilation uses a different "high value" than non-tiered compilation. |
| 315 | // Determine the right value to use. |
| 316 | if (TieredCompilation) { |
| 317 | counter_high_value = InvocationCounter::count_limit / 2; |
| 318 | } else { |
| 319 | counter_high_value = CompileThreshold / 2; |
| 320 | } |
| 321 | if (!callee_method->was_executed_more_than(MIN2(MinInliningThreshold, counter_high_value))) { |
| 322 | set_msg("executed < MinInliningThreshold times" ); |
| 323 | return true; |
| 324 | } |
| 325 | } |
| 326 | } |
| 327 | |
| 328 | return false; |
| 329 | } |
| 330 | |
| 331 | bool InlineTree::is_not_reached(ciMethod* callee_method, ciMethod* caller_method, int caller_bci, ciCallProfile& profile) { |
| 332 | if (!UseInterpreter) { |
| 333 | return false; // -Xcomp |
| 334 | } |
| 335 | if (profile.count() > 0) { |
| 336 | return false; // reachable according to profile |
| 337 | } |
| 338 | if (!callee_method->was_executed_more_than(0)) { |
| 339 | return true; // callee was never executed |
| 340 | } |
| 341 | if (caller_method->is_not_reached(caller_bci)) { |
| 342 | return true; // call site not resolved |
| 343 | } |
| 344 | if (profile.count() == -1) { |
| 345 | return false; // immature profile; optimistically treat as reached |
| 346 | } |
| 347 | assert(profile.count() == 0, "sanity" ); |
| 348 | |
| 349 | // Profile info is scarce. |
| 350 | // Try to guess: check if the call site belongs to a start block. |
| 351 | // Call sites in a start block should be reachable if no exception is thrown earlier. |
| 352 | ciMethodBlocks* caller_blocks = caller_method->get_method_blocks(); |
| 353 | bool is_start_block = caller_blocks->block_containing(caller_bci)->start_bci() == 0; |
| 354 | if (is_start_block) { |
| 355 | return false; // treat the call reached as part of start block |
| 356 | } |
| 357 | return true; // give up and treat the call site as not reached |
| 358 | } |
| 359 | |
| 360 | //-----------------------------try_to_inline----------------------------------- |
| 361 | // return true if ok |
| 362 | // Relocated from "InliningClosure::try_to_inline" |
| 363 | bool InlineTree::try_to_inline(ciMethod* callee_method, ciMethod* caller_method, |
| 364 | int caller_bci, JVMState* jvms, ciCallProfile& profile, |
| 365 | WarmCallInfo* wci_result, bool& should_delay) { |
| 366 | |
| 367 | if (ClipInlining && (int)count_inline_bcs() >= DesiredMethodLimit) { |
| 368 | if (!callee_method->force_inline() || !IncrementalInline) { |
| 369 | set_msg("size > DesiredMethodLimit" ); |
| 370 | return false; |
| 371 | } else if (!C->inlining_incrementally()) { |
| 372 | should_delay = true; |
| 373 | } |
| 374 | } |
| 375 | |
| 376 | _forced_inline = false; // Reset |
| 377 | if (!should_inline(callee_method, caller_method, caller_bci, profile, |
| 378 | wci_result)) { |
| 379 | return false; |
| 380 | } |
| 381 | if (should_not_inline(callee_method, caller_method, jvms, wci_result)) { |
| 382 | return false; |
| 383 | } |
| 384 | |
| 385 | if (InlineAccessors && callee_method->is_accessor()) { |
| 386 | // accessor methods are not subject to any of the following limits. |
| 387 | set_msg("accessor" ); |
| 388 | return true; |
| 389 | } |
| 390 | |
| 391 | // suppress a few checks for accessors and trivial methods |
| 392 | if (callee_method->code_size() > MaxTrivialSize) { |
| 393 | |
| 394 | // don't inline into giant methods |
| 395 | if (C->over_inlining_cutoff()) { |
| 396 | if ((!callee_method->force_inline() && !caller_method->is_compiled_lambda_form()) |
| 397 | || !IncrementalInline) { |
| 398 | set_msg("NodeCountInliningCutoff" ); |
| 399 | return false; |
| 400 | } else { |
| 401 | should_delay = true; |
| 402 | } |
| 403 | } |
| 404 | |
| 405 | if (!UseInterpreter && |
| 406 | is_init_with_ea(callee_method, caller_method, C)) { |
| 407 | // Escape Analysis stress testing when running Xcomp: |
| 408 | // inline constructors even if they are not reached. |
| 409 | } else if (forced_inline()) { |
| 410 | // Inlining was forced by CompilerOracle, ciReplay or annotation |
| 411 | } else if (is_not_reached(callee_method, caller_method, caller_bci, profile)) { |
| 412 | // don't inline unreached call sites |
| 413 | set_msg("call site not reached" ); |
| 414 | return false; |
| 415 | } |
| 416 | } |
| 417 | |
| 418 | if (!C->do_inlining() && InlineAccessors) { |
| 419 | set_msg("not an accessor" ); |
| 420 | return false; |
| 421 | } |
| 422 | |
| 423 | // Limit inlining depth in case inlining is forced or |
| 424 | // _max_inline_level was increased to compensate for lambda forms. |
| 425 | if (inline_level() > MaxForceInlineLevel) { |
| 426 | set_msg("MaxForceInlineLevel" ); |
| 427 | return false; |
| 428 | } |
| 429 | if (inline_level() > _max_inline_level) { |
| 430 | if (!callee_method->force_inline() || !IncrementalInline) { |
| 431 | set_msg("inlining too deep" ); |
| 432 | return false; |
| 433 | } else if (!C->inlining_incrementally()) { |
| 434 | should_delay = true; |
| 435 | } |
| 436 | } |
| 437 | |
| 438 | // detect direct and indirect recursive inlining |
| 439 | { |
| 440 | // count the current method and the callee |
| 441 | const bool is_compiled_lambda_form = callee_method->is_compiled_lambda_form(); |
| 442 | int inline_level = 0; |
| 443 | if (!is_compiled_lambda_form) { |
| 444 | if (method() == callee_method) { |
| 445 | inline_level++; |
| 446 | } |
| 447 | } |
| 448 | // count callers of current method and callee |
| 449 | Node* callee_argument0 = is_compiled_lambda_form ? jvms->map()->argument(jvms, 0)->uncast() : NULL; |
| 450 | for (JVMState* j = jvms->caller(); j != NULL && j->has_method(); j = j->caller()) { |
| 451 | if (j->method() == callee_method) { |
| 452 | if (is_compiled_lambda_form) { |
| 453 | // Since compiled lambda forms are heavily reused we allow recursive inlining. If it is truly |
| 454 | // a recursion (using the same "receiver") we limit inlining otherwise we can easily blow the |
| 455 | // compiler stack. |
| 456 | Node* caller_argument0 = j->map()->argument(j, 0)->uncast(); |
| 457 | if (caller_argument0 == callee_argument0) { |
| 458 | inline_level++; |
| 459 | } |
| 460 | } else { |
| 461 | inline_level++; |
| 462 | } |
| 463 | } |
| 464 | } |
| 465 | if (inline_level > MaxRecursiveInlineLevel) { |
| 466 | set_msg("recursive inlining is too deep" ); |
| 467 | return false; |
| 468 | } |
| 469 | } |
| 470 | |
| 471 | int size = callee_method->code_size_for_inlining(); |
| 472 | |
| 473 | if (ClipInlining && (int)count_inline_bcs() + size >= DesiredMethodLimit) { |
| 474 | if (!callee_method->force_inline() || !IncrementalInline) { |
| 475 | set_msg("size > DesiredMethodLimit" ); |
| 476 | return false; |
| 477 | } else if (!C->inlining_incrementally()) { |
| 478 | should_delay = true; |
| 479 | } |
| 480 | } |
| 481 | |
| 482 | // ok, inline this method |
| 483 | return true; |
| 484 | } |
| 485 | |
| 486 | //------------------------------pass_initial_checks---------------------------- |
| 487 | bool InlineTree::pass_initial_checks(ciMethod* caller_method, int caller_bci, ciMethod* callee_method) { |
| 488 | // Check if a callee_method was suggested |
| 489 | if (callee_method == NULL) { |
| 490 | return false; |
| 491 | } |
| 492 | ciInstanceKlass *callee_holder = callee_method->holder(); |
| 493 | // Check if klass of callee_method is loaded |
| 494 | if (!callee_holder->is_loaded()) { |
| 495 | return false; |
| 496 | } |
| 497 | if (!callee_holder->is_initialized() && |
| 498 | // access allowed in the context of static initializer |
| 499 | C->needs_clinit_barrier(callee_holder, caller_method)) { |
| 500 | return false; |
| 501 | } |
| 502 | if( !UseInterpreter ) /* running Xcomp */ { |
| 503 | // Checks that constant pool's call site has been visited |
| 504 | // stricter than callee_holder->is_initialized() |
| 505 | ciBytecodeStream iter(caller_method); |
| 506 | iter.force_bci(caller_bci); |
| 507 | Bytecodes::Code call_bc = iter.cur_bc(); |
| 508 | // An invokedynamic instruction does not have a klass. |
| 509 | if (call_bc != Bytecodes::_invokedynamic) { |
| 510 | int index = iter.get_index_u2_cpcache(); |
| 511 | if (!caller_method->is_klass_loaded(index, true)) { |
| 512 | return false; |
| 513 | } |
| 514 | // Try to do constant pool resolution if running Xcomp |
| 515 | if( !caller_method->check_call(index, call_bc == Bytecodes::_invokestatic) ) { |
| 516 | return false; |
| 517 | } |
| 518 | } |
| 519 | } |
| 520 | return true; |
| 521 | } |
| 522 | |
| 523 | //------------------------------check_can_parse-------------------------------- |
| 524 | const char* InlineTree::check_can_parse(ciMethod* callee) { |
| 525 | // Certain methods cannot be parsed at all: |
| 526 | if ( callee->is_native()) return "native method" ; |
| 527 | if ( callee->is_abstract()) return "abstract method" ; |
| 528 | if (!callee->has_balanced_monitors()) return "not compilable (unbalanced monitors)" ; |
| 529 | if ( callee->get_flow_analysis()->failing()) return "not compilable (flow analysis failed)" ; |
| 530 | if (!callee->can_be_parsed()) return "cannot be parsed" ; |
| 531 | return NULL; |
| 532 | } |
| 533 | |
| 534 | static void post_inlining_event(int compile_id,const char* msg, bool success, int bci, ciMethod* caller, ciMethod* callee) { |
| 535 | assert(caller != NULL, "invariant" ); |
| 536 | assert(callee != NULL, "invariant" ); |
| 537 | EventCompilerInlining event; |
| 538 | if (event.should_commit()) { |
| 539 | JfrStructCalleeMethod callee_struct; |
| 540 | callee_struct.set_type(callee->holder()->name()->as_utf8()); |
| 541 | callee_struct.set_name(callee->name()->as_utf8()); |
| 542 | callee_struct.set_descriptor(callee->signature()->as_symbol()->as_utf8()); |
| 543 | event.set_compileId(compile_id); |
| 544 | event.set_message(msg); |
| 545 | event.set_succeeded(success); |
| 546 | event.set_bci(bci); |
| 547 | event.set_caller(caller->get_Method()); |
| 548 | event.set_callee(callee_struct); |
| 549 | event.commit(); |
| 550 | } |
| 551 | } |
| 552 | |
| 553 | //------------------------------print_inlining--------------------------------- |
| 554 | void InlineTree::print_inlining(ciMethod* callee_method, int caller_bci, |
| 555 | ciMethod* caller_method, bool success) const { |
| 556 | const char* inline_msg = msg(); |
| 557 | assert(inline_msg != NULL, "just checking" ); |
| 558 | if (C->log() != NULL) { |
| 559 | if (success) { |
| 560 | C->log()->inline_success(inline_msg); |
| 561 | } else { |
| 562 | C->log()->inline_fail(inline_msg); |
| 563 | } |
| 564 | } |
| 565 | CompileTask::print_inlining_ul(callee_method, inline_level(), |
| 566 | caller_bci, inline_msg); |
| 567 | if (C->print_inlining()) { |
| 568 | C->print_inlining(callee_method, inline_level(), caller_bci, inline_msg); |
| 569 | guarantee(callee_method != NULL, "would crash in post_inlining_event" ); |
| 570 | if (Verbose) { |
| 571 | const InlineTree *top = this; |
| 572 | while (top->caller_tree() != NULL) { top = top->caller_tree(); } |
| 573 | //tty->print(" bcs: %d+%d invoked: %d", top->count_inline_bcs(), callee_method->code_size(), callee_method->interpreter_invocation_count()); |
| 574 | } |
| 575 | } |
| 576 | post_inlining_event(C->compile_id(), inline_msg, success, caller_bci, caller_method, callee_method); |
| 577 | } |
| 578 | |
| 579 | //------------------------------ok_to_inline----------------------------------- |
| 580 | WarmCallInfo* InlineTree::ok_to_inline(ciMethod* callee_method, JVMState* jvms, ciCallProfile& profile, WarmCallInfo* initial_wci, bool& should_delay) { |
| 581 | assert(callee_method != NULL, "caller checks for optimized virtual!" ); |
| 582 | assert(!should_delay, "should be initialized to false" ); |
| 583 | #ifdef ASSERT |
| 584 | // Make sure the incoming jvms has the same information content as me. |
| 585 | // This means that we can eventually make this whole class AllStatic. |
| 586 | if (jvms->caller() == NULL) { |
| 587 | assert(_caller_jvms == NULL, "redundant instance state" ); |
| 588 | } else { |
| 589 | assert(_caller_jvms->same_calls_as(jvms->caller()), "redundant instance state" ); |
| 590 | } |
| 591 | assert(_method == jvms->method(), "redundant instance state" ); |
| 592 | #endif |
| 593 | int caller_bci = jvms->bci(); |
| 594 | ciMethod* caller_method = jvms->method(); |
| 595 | |
| 596 | // Do some initial checks. |
| 597 | if (!pass_initial_checks(caller_method, caller_bci, callee_method)) { |
| 598 | set_msg("failed initial checks" ); |
| 599 | print_inlining(callee_method, caller_bci, caller_method, false /* !success */); |
| 600 | return NULL; |
| 601 | } |
| 602 | |
| 603 | // Do some parse checks. |
| 604 | set_msg(check_can_parse(callee_method)); |
| 605 | if (msg() != NULL) { |
| 606 | print_inlining(callee_method, caller_bci, caller_method, false /* !success */); |
| 607 | return NULL; |
| 608 | } |
| 609 | |
| 610 | // Check if inlining policy says no. |
| 611 | WarmCallInfo wci = *(initial_wci); |
| 612 | bool success = try_to_inline(callee_method, caller_method, caller_bci, |
| 613 | jvms, profile, &wci, should_delay); |
| 614 | |
| 615 | #ifndef PRODUCT |
| 616 | if (InlineWarmCalls && (PrintOpto || C->print_inlining())) { |
| 617 | bool cold = wci.is_cold(); |
| 618 | bool hot = !cold && wci.is_hot(); |
| 619 | bool old_cold = !success; |
| 620 | if (old_cold != cold || (Verbose || WizardMode)) { |
| 621 | if (msg() == NULL) { |
| 622 | set_msg("OK" ); |
| 623 | } |
| 624 | tty->print(" OldInlining= %4s : %s\n WCI=" , |
| 625 | old_cold ? "cold" : "hot" , msg()); |
| 626 | wci.print(); |
| 627 | } |
| 628 | } |
| 629 | #endif |
| 630 | if (success) { |
| 631 | wci = *(WarmCallInfo::always_hot()); |
| 632 | } else { |
| 633 | wci = *(WarmCallInfo::always_cold()); |
| 634 | } |
| 635 | |
| 636 | if (!InlineWarmCalls) { |
| 637 | if (!wci.is_cold() && !wci.is_hot()) { |
| 638 | // Do not inline the warm calls. |
| 639 | wci = *(WarmCallInfo::always_cold()); |
| 640 | } |
| 641 | } |
| 642 | |
| 643 | if (!wci.is_cold()) { |
| 644 | // Inline! |
| 645 | if (msg() == NULL) { |
| 646 | set_msg("inline (hot)" ); |
| 647 | } |
| 648 | print_inlining(callee_method, caller_bci, caller_method, true /* success */); |
| 649 | build_inline_tree_for_callee(callee_method, jvms, caller_bci); |
| 650 | if (InlineWarmCalls && !wci.is_hot()) { |
| 651 | return new (C) WarmCallInfo(wci); // copy to heap |
| 652 | } |
| 653 | return WarmCallInfo::always_hot(); |
| 654 | } |
| 655 | |
| 656 | // Do not inline |
| 657 | if (msg() == NULL) { |
| 658 | set_msg("too cold to inline" ); |
| 659 | } |
| 660 | print_inlining(callee_method, caller_bci, caller_method, false /* !success */ ); |
| 661 | return NULL; |
| 662 | } |
| 663 | |
| 664 | //------------------------------compute_callee_frequency----------------------- |
| 665 | float InlineTree::compute_callee_frequency( int caller_bci ) const { |
| 666 | int count = method()->interpreter_call_site_count(caller_bci); |
| 667 | int invcnt = method()->interpreter_invocation_count(); |
| 668 | float freq = (float)count/(float)invcnt; |
| 669 | // Call-site count / interpreter invocation count, scaled recursively. |
| 670 | // Always between 0.0 and 1.0. Represents the percentage of the method's |
| 671 | // total execution time used at this call site. |
| 672 | |
| 673 | return freq; |
| 674 | } |
| 675 | |
| 676 | //------------------------------build_inline_tree_for_callee------------------- |
| 677 | InlineTree *InlineTree::build_inline_tree_for_callee( ciMethod* callee_method, JVMState* caller_jvms, int caller_bci) { |
| 678 | float recur_frequency = _site_invoke_ratio * compute_callee_frequency(caller_bci); |
| 679 | // Attempt inlining. |
| 680 | InlineTree* old_ilt = callee_at(caller_bci, callee_method); |
| 681 | if (old_ilt != NULL) { |
| 682 | return old_ilt; |
| 683 | } |
| 684 | int max_inline_level_adjust = 0; |
| 685 | if (caller_jvms->method() != NULL) { |
| 686 | if (caller_jvms->method()->is_compiled_lambda_form()) { |
| 687 | max_inline_level_adjust += 1; // don't count actions in MH or indy adapter frames |
| 688 | } else if (callee_method->is_method_handle_intrinsic() || |
| 689 | callee_method->is_compiled_lambda_form()) { |
| 690 | max_inline_level_adjust += 1; // don't count method handle calls from java.lang.invoke implementation |
| 691 | } |
| 692 | if (max_inline_level_adjust != 0 && C->print_inlining() && (Verbose || WizardMode)) { |
| 693 | CompileTask::print_inline_indent(inline_level()); |
| 694 | tty->print_cr(" \\-> discounting inline depth" ); |
| 695 | } |
| 696 | if (max_inline_level_adjust != 0 && C->log()) { |
| 697 | int id1 = C->log()->identify(caller_jvms->method()); |
| 698 | int id2 = C->log()->identify(callee_method); |
| 699 | C->log()->elem("inline_level_discount caller='%d' callee='%d'" , id1, id2); |
| 700 | } |
| 701 | } |
| 702 | // Allocate in the comp_arena to make sure the InlineTree is live when dumping a replay compilation file |
| 703 | InlineTree* ilt = new (C->comp_arena()) InlineTree(C, this, callee_method, caller_jvms, caller_bci, recur_frequency, _max_inline_level + max_inline_level_adjust); |
| 704 | _subtrees.append(ilt); |
| 705 | |
| 706 | NOT_PRODUCT( _count_inlines += 1; ) |
| 707 | |
| 708 | return ilt; |
| 709 | } |
| 710 | |
| 711 | |
| 712 | //---------------------------------------callee_at----------------------------- |
| 713 | InlineTree *InlineTree::callee_at(int bci, ciMethod* callee) const { |
| 714 | for (int i = 0; i < _subtrees.length(); i++) { |
| 715 | InlineTree* sub = _subtrees.at(i); |
| 716 | if (sub->caller_bci() == bci && callee == sub->method()) { |
| 717 | return sub; |
| 718 | } |
| 719 | } |
| 720 | return NULL; |
| 721 | } |
| 722 | |
| 723 | |
| 724 | //------------------------------build_inline_tree_root------------------------- |
| 725 | InlineTree *InlineTree::build_inline_tree_root() { |
| 726 | Compile* C = Compile::current(); |
| 727 | |
| 728 | // Root of inline tree |
| 729 | InlineTree* ilt = new InlineTree(C, NULL, C->method(), NULL, -1, 1.0F, MaxInlineLevel); |
| 730 | |
| 731 | return ilt; |
| 732 | } |
| 733 | |
| 734 | |
| 735 | //-------------------------find_subtree_from_root----------------------------- |
| 736 | // Given a jvms, which determines a call chain from the root method, |
| 737 | // find the corresponding inline tree. |
| 738 | // Note: This method will be removed or replaced as InlineTree goes away. |
| 739 | InlineTree* InlineTree::find_subtree_from_root(InlineTree* root, JVMState* jvms, ciMethod* callee) { |
| 740 | InlineTree* iltp = root; |
| 741 | uint depth = jvms && jvms->has_method() ? jvms->depth() : 0; |
| 742 | for (uint d = 1; d <= depth; d++) { |
| 743 | JVMState* jvmsp = jvms->of_depth(d); |
| 744 | // Select the corresponding subtree for this bci. |
| 745 | assert(jvmsp->method() == iltp->method(), "tree still in sync" ); |
| 746 | ciMethod* d_callee = (d == depth) ? callee : jvms->of_depth(d+1)->method(); |
| 747 | InlineTree* sub = iltp->callee_at(jvmsp->bci(), d_callee); |
| 748 | if (sub == NULL) { |
| 749 | if (d == depth) { |
| 750 | sub = iltp->build_inline_tree_for_callee(d_callee, jvmsp, jvmsp->bci()); |
| 751 | } |
| 752 | guarantee(sub != NULL, "should be a sub-ilt here" ); |
| 753 | return sub; |
| 754 | } |
| 755 | iltp = sub; |
| 756 | } |
| 757 | return iltp; |
| 758 | } |
| 759 | |
| 760 | // Count number of nodes in this subtree |
| 761 | int InlineTree::count() const { |
| 762 | int result = 1; |
| 763 | for (int i = 0 ; i < _subtrees.length(); i++) { |
| 764 | result += _subtrees.at(i)->count(); |
| 765 | } |
| 766 | return result; |
| 767 | } |
| 768 | |
| 769 | void InlineTree::dump_replay_data(outputStream* out) { |
| 770 | out->print(" %d %d " , inline_level(), caller_bci()); |
| 771 | method()->dump_name_as_ascii(out); |
| 772 | for (int i = 0 ; i < _subtrees.length(); i++) { |
| 773 | _subtrees.at(i)->dump_replay_data(out); |
| 774 | } |
| 775 | } |
| 776 | |
| 777 | |
| 778 | #ifndef PRODUCT |
| 779 | void InlineTree::print_impl(outputStream* st, int indent) const { |
| 780 | for (int i = 0; i < indent; i++) st->print(" " ); |
| 781 | st->print(" @ %d" , caller_bci()); |
| 782 | method()->print_short_name(st); |
| 783 | st->cr(); |
| 784 | |
| 785 | for (int i = 0 ; i < _subtrees.length(); i++) { |
| 786 | _subtrees.at(i)->print_impl(st, indent + 2); |
| 787 | } |
| 788 | } |
| 789 | |
| 790 | void InlineTree::print_value_on(outputStream* st) const { |
| 791 | print_impl(st, 2); |
| 792 | } |
| 793 | #endif |
| 794 | |