1 | /* |
2 | * Copyright (c) 2005, 2018, Oracle and/or its affiliates. All rights reserved. |
3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 | * |
5 | * This code is free software; you can redistribute it and/or modify it |
6 | * under the terms of the GNU General Public License version 2 only, as |
7 | * published by the Free Software Foundation. |
8 | * |
9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
12 | * version 2 for more details (a copy is included in the LICENSE file that |
13 | * accompanied this code). |
14 | * |
15 | * You should have received a copy of the GNU General Public License version |
16 | * 2 along with this work; if not, write to the Free Software Foundation, |
17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
18 | * |
19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
20 | * or visit www.oracle.com if you need additional information or have any |
21 | * questions. |
22 | * |
23 | */ |
24 | |
25 | #include "precompiled.hpp" |
26 | #include "ci/ciArrayKlass.hpp" |
27 | #include "ci/ciEnv.hpp" |
28 | #include "ci/ciKlass.hpp" |
29 | #include "ci/ciMethod.hpp" |
30 | #include "classfile/javaClasses.inline.hpp" |
31 | #include "code/dependencies.hpp" |
32 | #include "compiler/compileLog.hpp" |
33 | #include "compiler/compileBroker.hpp" |
34 | #include "compiler/compileTask.hpp" |
35 | #include "memory/resourceArea.hpp" |
36 | #include "oops/klass.hpp" |
37 | #include "oops/oop.inline.hpp" |
38 | #include "oops/objArrayKlass.hpp" |
39 | #include "runtime/flags/flagSetting.hpp" |
40 | #include "runtime/handles.hpp" |
41 | #include "runtime/handles.inline.hpp" |
42 | #include "runtime/jniHandles.inline.hpp" |
43 | #include "runtime/thread.inline.hpp" |
44 | #include "utilities/copy.hpp" |
45 | |
46 | |
47 | #ifdef ASSERT |
48 | static bool must_be_in_vm() { |
49 | Thread* thread = Thread::current(); |
50 | if (thread->is_Java_thread()) |
51 | return ((JavaThread*)thread)->thread_state() == _thread_in_vm; |
52 | else |
53 | return true; //something like this: thread->is_VM_thread(); |
54 | } |
55 | #endif //ASSERT |
56 | |
57 | void Dependencies::initialize(ciEnv* env) { |
58 | Arena* arena = env->arena(); |
59 | _oop_recorder = env->oop_recorder(); |
60 | _log = env->log(); |
61 | _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0); |
62 | #if INCLUDE_JVMCI |
63 | _using_dep_values = false; |
64 | #endif |
65 | DEBUG_ONLY(_deps[end_marker] = NULL); |
66 | for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) { |
67 | _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0); |
68 | } |
69 | _content_bytes = NULL; |
70 | _size_in_bytes = (size_t)-1; |
71 | |
72 | assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity" ); |
73 | } |
74 | |
75 | void Dependencies::assert_evol_method(ciMethod* m) { |
76 | assert_common_1(evol_method, m); |
77 | } |
78 | |
79 | void Dependencies::assert_leaf_type(ciKlass* ctxk) { |
80 | if (ctxk->is_array_klass()) { |
81 | // As a special case, support this assertion on an array type, |
82 | // which reduces to an assertion on its element type. |
83 | // Note that this cannot be done with assertions that |
84 | // relate to concreteness or abstractness. |
85 | ciType* elemt = ctxk->as_array_klass()->base_element_type(); |
86 | if (!elemt->is_instance_klass()) return; // Ex: int[][] |
87 | ctxk = elemt->as_instance_klass(); |
88 | //if (ctxk->is_final()) return; // Ex: String[][] |
89 | } |
90 | check_ctxk(ctxk); |
91 | assert_common_1(leaf_type, ctxk); |
92 | } |
93 | |
94 | void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) { |
95 | check_ctxk_abstract(ctxk); |
96 | assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck); |
97 | } |
98 | |
99 | void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) { |
100 | check_ctxk_abstract(ctxk); |
101 | assert_common_1(abstract_with_no_concrete_subtype, ctxk); |
102 | } |
103 | |
104 | void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) { |
105 | check_ctxk_concrete(ctxk); |
106 | assert_common_1(concrete_with_no_concrete_subtype, ctxk); |
107 | } |
108 | |
109 | void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) { |
110 | check_ctxk(ctxk); |
111 | check_unique_method(ctxk, uniqm); |
112 | assert_common_2(unique_concrete_method, ctxk, uniqm); |
113 | } |
114 | |
115 | void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) { |
116 | check_ctxk(ctxk); |
117 | assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2); |
118 | } |
119 | |
120 | void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) { |
121 | check_ctxk(ctxk); |
122 | assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2); |
123 | } |
124 | |
125 | void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) { |
126 | check_ctxk(ctxk); |
127 | assert_common_1(no_finalizable_subclasses, ctxk); |
128 | } |
129 | |
130 | void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) { |
131 | assert_common_2(call_site_target_value, call_site, method_handle); |
132 | } |
133 | |
134 | #if INCLUDE_JVMCI |
135 | |
136 | Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) { |
137 | _oop_recorder = oop_recorder; |
138 | _log = log; |
139 | _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0); |
140 | _using_dep_values = true; |
141 | DEBUG_ONLY(_dep_values[end_marker] = NULL); |
142 | for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) { |
143 | _dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue()); |
144 | } |
145 | _content_bytes = NULL; |
146 | _size_in_bytes = (size_t)-1; |
147 | |
148 | assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity" ); |
149 | } |
150 | |
151 | void Dependencies::assert_evol_method(Method* m) { |
152 | assert_common_1(evol_method, DepValue(_oop_recorder, m)); |
153 | } |
154 | |
155 | void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) { |
156 | check_ctxk(ctxk); |
157 | assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk)); |
158 | } |
159 | |
160 | void Dependencies::assert_leaf_type(Klass* ctxk) { |
161 | if (ctxk->is_array_klass()) { |
162 | // As a special case, support this assertion on an array type, |
163 | // which reduces to an assertion on its element type. |
164 | // Note that this cannot be done with assertions that |
165 | // relate to concreteness or abstractness. |
166 | BasicType elemt = ArrayKlass::cast(ctxk)->element_type(); |
167 | if (is_java_primitive(elemt)) return; // Ex: int[][] |
168 | ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass(); |
169 | //if (ctxk->is_final()) return; // Ex: String[][] |
170 | } |
171 | check_ctxk(ctxk); |
172 | assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk)); |
173 | } |
174 | |
175 | void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) { |
176 | check_ctxk_abstract(ctxk); |
177 | DepValue ctxk_dv(_oop_recorder, ctxk); |
178 | DepValue conck_dv(_oop_recorder, conck, &ctxk_dv); |
179 | assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv); |
180 | } |
181 | |
182 | void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) { |
183 | check_ctxk(ctxk); |
184 | check_unique_method(ctxk, uniqm); |
185 | assert_common_2(unique_concrete_method, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm)); |
186 | } |
187 | |
188 | void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) { |
189 | assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle))); |
190 | } |
191 | |
192 | #endif // INCLUDE_JVMCI |
193 | |
194 | |
195 | // Helper function. If we are adding a new dep. under ctxk2, |
196 | // try to find an old dep. under a broader* ctxk1. If there is |
197 | // |
198 | bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps, |
199 | int ctxk_i, ciKlass* ctxk2) { |
200 | ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass(); |
201 | if (ctxk2->is_subtype_of(ctxk1)) { |
202 | return true; // success, and no need to change |
203 | } else if (ctxk1->is_subtype_of(ctxk2)) { |
204 | // new context class fully subsumes previous one |
205 | deps->at_put(ctxk_i, ctxk2); |
206 | return true; |
207 | } else { |
208 | return false; |
209 | } |
210 | } |
211 | |
212 | void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) { |
213 | assert(dep_args(dept) == 1, "sanity" ); |
214 | log_dependency(dept, x); |
215 | GrowableArray<ciBaseObject*>* deps = _deps[dept]; |
216 | |
217 | // see if the same (or a similar) dep is already recorded |
218 | if (note_dep_seen(dept, x)) { |
219 | assert(deps->find(x) >= 0, "sanity" ); |
220 | } else { |
221 | deps->append(x); |
222 | } |
223 | } |
224 | |
225 | void Dependencies::assert_common_2(DepType dept, |
226 | ciBaseObject* x0, ciBaseObject* x1) { |
227 | assert(dep_args(dept) == 2, "sanity" ); |
228 | log_dependency(dept, x0, x1); |
229 | GrowableArray<ciBaseObject*>* deps = _deps[dept]; |
230 | |
231 | // see if the same (or a similar) dep is already recorded |
232 | bool has_ctxk = has_explicit_context_arg(dept); |
233 | if (has_ctxk) { |
234 | assert(dep_context_arg(dept) == 0, "sanity" ); |
235 | if (note_dep_seen(dept, x1)) { |
236 | // look in this bucket for redundant assertions |
237 | const int stride = 2; |
238 | for (int i = deps->length(); (i -= stride) >= 0; ) { |
239 | ciBaseObject* y1 = deps->at(i+1); |
240 | if (x1 == y1) { // same subject; check the context |
241 | if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) { |
242 | return; |
243 | } |
244 | } |
245 | } |
246 | } |
247 | } else { |
248 | if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) { |
249 | // look in this bucket for redundant assertions |
250 | const int stride = 2; |
251 | for (int i = deps->length(); (i -= stride) >= 0; ) { |
252 | ciBaseObject* y0 = deps->at(i+0); |
253 | ciBaseObject* y1 = deps->at(i+1); |
254 | if (x0 == y0 && x1 == y1) { |
255 | return; |
256 | } |
257 | } |
258 | } |
259 | } |
260 | |
261 | // append the assertion in the correct bucket: |
262 | deps->append(x0); |
263 | deps->append(x1); |
264 | } |
265 | |
266 | void Dependencies::assert_common_3(DepType dept, |
267 | ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) { |
268 | assert(dep_context_arg(dept) == 0, "sanity" ); |
269 | assert(dep_args(dept) == 3, "sanity" ); |
270 | log_dependency(dept, ctxk, x, x2); |
271 | GrowableArray<ciBaseObject*>* deps = _deps[dept]; |
272 | |
273 | // try to normalize an unordered pair: |
274 | bool swap = false; |
275 | switch (dept) { |
276 | case abstract_with_exclusive_concrete_subtypes_2: |
277 | swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk); |
278 | break; |
279 | case exclusive_concrete_methods_2: |
280 | swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk); |
281 | break; |
282 | default: |
283 | break; |
284 | } |
285 | if (swap) { ciBaseObject* t = x; x = x2; x2 = t; } |
286 | |
287 | // see if the same (or a similar) dep is already recorded |
288 | if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) { |
289 | // look in this bucket for redundant assertions |
290 | const int stride = 3; |
291 | for (int i = deps->length(); (i -= stride) >= 0; ) { |
292 | ciBaseObject* y = deps->at(i+1); |
293 | ciBaseObject* y2 = deps->at(i+2); |
294 | if (x == y && x2 == y2) { // same subjects; check the context |
295 | if (maybe_merge_ctxk(deps, i+0, ctxk)) { |
296 | return; |
297 | } |
298 | } |
299 | } |
300 | } |
301 | // append the assertion in the correct bucket: |
302 | deps->append(ctxk); |
303 | deps->append(x); |
304 | deps->append(x2); |
305 | } |
306 | |
307 | #if INCLUDE_JVMCI |
308 | bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps, |
309 | int ctxk_i, DepValue ctxk2_dv) { |
310 | Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder); |
311 | Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder); |
312 | if (ctxk2->is_subtype_of(ctxk1)) { |
313 | return true; // success, and no need to change |
314 | } else if (ctxk1->is_subtype_of(ctxk2)) { |
315 | // new context class fully subsumes previous one |
316 | deps->at_put(ctxk_i, ctxk2_dv); |
317 | return true; |
318 | } else { |
319 | return false; |
320 | } |
321 | } |
322 | |
323 | void Dependencies::assert_common_1(DepType dept, DepValue x) { |
324 | assert(dep_args(dept) == 1, "sanity" ); |
325 | //log_dependency(dept, x); |
326 | GrowableArray<DepValue>* deps = _dep_values[dept]; |
327 | |
328 | // see if the same (or a similar) dep is already recorded |
329 | if (note_dep_seen(dept, x)) { |
330 | assert(deps->find(x) >= 0, "sanity" ); |
331 | } else { |
332 | deps->append(x); |
333 | } |
334 | } |
335 | |
336 | void Dependencies::assert_common_2(DepType dept, |
337 | DepValue x0, DepValue x1) { |
338 | assert(dep_args(dept) == 2, "sanity" ); |
339 | //log_dependency(dept, x0, x1); |
340 | GrowableArray<DepValue>* deps = _dep_values[dept]; |
341 | |
342 | // see if the same (or a similar) dep is already recorded |
343 | bool has_ctxk = has_explicit_context_arg(dept); |
344 | if (has_ctxk) { |
345 | assert(dep_context_arg(dept) == 0, "sanity" ); |
346 | if (note_dep_seen(dept, x1)) { |
347 | // look in this bucket for redundant assertions |
348 | const int stride = 2; |
349 | for (int i = deps->length(); (i -= stride) >= 0; ) { |
350 | DepValue y1 = deps->at(i+1); |
351 | if (x1 == y1) { // same subject; check the context |
352 | if (maybe_merge_ctxk(deps, i+0, x0)) { |
353 | return; |
354 | } |
355 | } |
356 | } |
357 | } |
358 | } else { |
359 | if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) { |
360 | // look in this bucket for redundant assertions |
361 | const int stride = 2; |
362 | for (int i = deps->length(); (i -= stride) >= 0; ) { |
363 | DepValue y0 = deps->at(i+0); |
364 | DepValue y1 = deps->at(i+1); |
365 | if (x0 == y0 && x1 == y1) { |
366 | return; |
367 | } |
368 | } |
369 | } |
370 | } |
371 | |
372 | // append the assertion in the correct bucket: |
373 | deps->append(x0); |
374 | deps->append(x1); |
375 | } |
376 | #endif // INCLUDE_JVMCI |
377 | |
378 | /// Support for encoding dependencies into an nmethod: |
379 | |
380 | void Dependencies::copy_to(nmethod* nm) { |
381 | address beg = nm->dependencies_begin(); |
382 | address end = nm->dependencies_end(); |
383 | guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing" ); |
384 | Copy::disjoint_words((HeapWord*) content_bytes(), |
385 | (HeapWord*) beg, |
386 | size_in_bytes() / sizeof(HeapWord)); |
387 | assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words" ); |
388 | } |
389 | |
390 | static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) { |
391 | for (int i = 0; i < narg; i++) { |
392 | int diff = p1[i]->ident() - p2[i]->ident(); |
393 | if (diff != 0) return diff; |
394 | } |
395 | return 0; |
396 | } |
397 | static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2) |
398 | { return sort_dep(p1, p2, 1); } |
399 | static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2) |
400 | { return sort_dep(p1, p2, 2); } |
401 | static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2) |
402 | { return sort_dep(p1, p2, 3); } |
403 | |
404 | #if INCLUDE_JVMCI |
405 | // metadata deps are sorted before object deps |
406 | static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) { |
407 | for (int i = 0; i < narg; i++) { |
408 | int diff = p1[i].sort_key() - p2[i].sort_key(); |
409 | if (diff != 0) return diff; |
410 | } |
411 | return 0; |
412 | } |
413 | static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2) |
414 | { return sort_dep_value(p1, p2, 1); } |
415 | static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2) |
416 | { return sort_dep_value(p1, p2, 2); } |
417 | static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2) |
418 | { return sort_dep_value(p1, p2, 3); } |
419 | #endif // INCLUDE_JVMCI |
420 | |
421 | void Dependencies::sort_all_deps() { |
422 | #if INCLUDE_JVMCI |
423 | if (_using_dep_values) { |
424 | for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { |
425 | DepType dept = (DepType)deptv; |
426 | GrowableArray<DepValue>* deps = _dep_values[dept]; |
427 | if (deps->length() <= 1) continue; |
428 | switch (dep_args(dept)) { |
429 | case 1: deps->sort(sort_dep_value_arg_1, 1); break; |
430 | case 2: deps->sort(sort_dep_value_arg_2, 2); break; |
431 | case 3: deps->sort(sort_dep_value_arg_3, 3); break; |
432 | default: ShouldNotReachHere(); break; |
433 | } |
434 | } |
435 | return; |
436 | } |
437 | #endif // INCLUDE_JVMCI |
438 | for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { |
439 | DepType dept = (DepType)deptv; |
440 | GrowableArray<ciBaseObject*>* deps = _deps[dept]; |
441 | if (deps->length() <= 1) continue; |
442 | switch (dep_args(dept)) { |
443 | case 1: deps->sort(sort_dep_arg_1, 1); break; |
444 | case 2: deps->sort(sort_dep_arg_2, 2); break; |
445 | case 3: deps->sort(sort_dep_arg_3, 3); break; |
446 | default: ShouldNotReachHere(); break; |
447 | } |
448 | } |
449 | } |
450 | |
451 | size_t Dependencies::estimate_size_in_bytes() { |
452 | size_t est_size = 100; |
453 | #if INCLUDE_JVMCI |
454 | if (_using_dep_values) { |
455 | for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { |
456 | DepType dept = (DepType)deptv; |
457 | GrowableArray<DepValue>* deps = _dep_values[dept]; |
458 | est_size += deps->length() * 2; // tags and argument(s) |
459 | } |
460 | return est_size; |
461 | } |
462 | #endif // INCLUDE_JVMCI |
463 | for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { |
464 | DepType dept = (DepType)deptv; |
465 | GrowableArray<ciBaseObject*>* deps = _deps[dept]; |
466 | est_size += deps->length()*2; // tags and argument(s) |
467 | } |
468 | return est_size; |
469 | } |
470 | |
471 | ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) { |
472 | switch (dept) { |
473 | case abstract_with_exclusive_concrete_subtypes_2: |
474 | return x->as_metadata()->as_klass(); |
475 | case unique_concrete_method: |
476 | case exclusive_concrete_methods_2: |
477 | return x->as_metadata()->as_method()->holder(); |
478 | default: |
479 | return NULL; // let NULL be NULL |
480 | } |
481 | } |
482 | |
483 | Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) { |
484 | assert(must_be_in_vm(), "raw oops here" ); |
485 | switch (dept) { |
486 | case abstract_with_exclusive_concrete_subtypes_2: |
487 | assert(x->is_klass(), "sanity" ); |
488 | return (Klass*) x; |
489 | case unique_concrete_method: |
490 | case exclusive_concrete_methods_2: |
491 | assert(x->is_method(), "sanity" ); |
492 | return ((Method*)x)->method_holder(); |
493 | default: |
494 | return NULL; // let NULL be NULL |
495 | } |
496 | } |
497 | |
498 | void Dependencies::encode_content_bytes() { |
499 | sort_all_deps(); |
500 | |
501 | // cast is safe, no deps can overflow INT_MAX |
502 | CompressedWriteStream bytes((int)estimate_size_in_bytes()); |
503 | |
504 | #if INCLUDE_JVMCI |
505 | if (_using_dep_values) { |
506 | for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { |
507 | DepType dept = (DepType)deptv; |
508 | GrowableArray<DepValue>* deps = _dep_values[dept]; |
509 | if (deps->length() == 0) continue; |
510 | int stride = dep_args(dept); |
511 | int ctxkj = dep_context_arg(dept); // -1 if no context arg |
512 | assert(stride > 0, "sanity" ); |
513 | for (int i = 0; i < deps->length(); i += stride) { |
514 | jbyte code_byte = (jbyte)dept; |
515 | int skipj = -1; |
516 | if (ctxkj >= 0 && ctxkj+1 < stride) { |
517 | Klass* ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder); |
518 | DepValue x = deps->at(i+ctxkj+1); // following argument |
519 | if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) { |
520 | skipj = ctxkj; // we win: maybe one less oop to keep track of |
521 | code_byte |= default_context_type_bit; |
522 | } |
523 | } |
524 | bytes.write_byte(code_byte); |
525 | for (int j = 0; j < stride; j++) { |
526 | if (j == skipj) continue; |
527 | DepValue v = deps->at(i+j); |
528 | int idx = v.index(); |
529 | bytes.write_int(idx); |
530 | } |
531 | } |
532 | } |
533 | } else { |
534 | #endif // INCLUDE_JVMCI |
535 | for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { |
536 | DepType dept = (DepType)deptv; |
537 | GrowableArray<ciBaseObject*>* deps = _deps[dept]; |
538 | if (deps->length() == 0) continue; |
539 | int stride = dep_args(dept); |
540 | int ctxkj = dep_context_arg(dept); // -1 if no context arg |
541 | assert(stride > 0, "sanity" ); |
542 | for (int i = 0; i < deps->length(); i += stride) { |
543 | jbyte code_byte = (jbyte)dept; |
544 | int skipj = -1; |
545 | if (ctxkj >= 0 && ctxkj+1 < stride) { |
546 | ciKlass* ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass(); |
547 | ciBaseObject* x = deps->at(i+ctxkj+1); // following argument |
548 | if (ctxk == ctxk_encoded_as_null(dept, x)) { |
549 | skipj = ctxkj; // we win: maybe one less oop to keep track of |
550 | code_byte |= default_context_type_bit; |
551 | } |
552 | } |
553 | bytes.write_byte(code_byte); |
554 | for (int j = 0; j < stride; j++) { |
555 | if (j == skipj) continue; |
556 | ciBaseObject* v = deps->at(i+j); |
557 | int idx; |
558 | if (v->is_object()) { |
559 | idx = _oop_recorder->find_index(v->as_object()->constant_encoding()); |
560 | } else { |
561 | ciMetadata* meta = v->as_metadata(); |
562 | idx = _oop_recorder->find_index(meta->constant_encoding()); |
563 | } |
564 | bytes.write_int(idx); |
565 | } |
566 | } |
567 | } |
568 | #if INCLUDE_JVMCI |
569 | } |
570 | #endif |
571 | |
572 | // write a sentinel byte to mark the end |
573 | bytes.write_byte(end_marker); |
574 | |
575 | // round it out to a word boundary |
576 | while (bytes.position() % sizeof(HeapWord) != 0) { |
577 | bytes.write_byte(end_marker); |
578 | } |
579 | |
580 | // check whether the dept byte encoding really works |
581 | assert((jbyte)default_context_type_bit != 0, "byte overflow" ); |
582 | |
583 | _content_bytes = bytes.buffer(); |
584 | _size_in_bytes = bytes.position(); |
585 | } |
586 | |
587 | |
588 | const char* Dependencies::_dep_name[TYPE_LIMIT] = { |
589 | "end_marker" , |
590 | "evol_method" , |
591 | "leaf_type" , |
592 | "abstract_with_unique_concrete_subtype" , |
593 | "abstract_with_no_concrete_subtype" , |
594 | "concrete_with_no_concrete_subtype" , |
595 | "unique_concrete_method" , |
596 | "abstract_with_exclusive_concrete_subtypes_2" , |
597 | "exclusive_concrete_methods_2" , |
598 | "no_finalizable_subclasses" , |
599 | "call_site_target_value" |
600 | }; |
601 | |
602 | int Dependencies::_dep_args[TYPE_LIMIT] = { |
603 | -1,// end_marker |
604 | 1, // evol_method m |
605 | 1, // leaf_type ctxk |
606 | 2, // abstract_with_unique_concrete_subtype ctxk, k |
607 | 1, // abstract_with_no_concrete_subtype ctxk |
608 | 1, // concrete_with_no_concrete_subtype ctxk |
609 | 2, // unique_concrete_method ctxk, m |
610 | 3, // unique_concrete_subtypes_2 ctxk, k1, k2 |
611 | 3, // unique_concrete_methods_2 ctxk, m1, m2 |
612 | 1, // no_finalizable_subclasses ctxk |
613 | 2 // call_site_target_value call_site, method_handle |
614 | }; |
615 | |
616 | const char* Dependencies::dep_name(Dependencies::DepType dept) { |
617 | if (!dept_in_mask(dept, all_types)) return "?bad-dep?" ; |
618 | return _dep_name[dept]; |
619 | } |
620 | |
621 | int Dependencies::dep_args(Dependencies::DepType dept) { |
622 | if (!dept_in_mask(dept, all_types)) return -1; |
623 | return _dep_args[dept]; |
624 | } |
625 | |
626 | void Dependencies::check_valid_dependency_type(DepType dept) { |
627 | guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d" , (int) dept); |
628 | } |
629 | |
630 | Dependencies::DepType Dependencies::validate_dependencies(CompileTask* task, bool counter_changed, char** failure_detail) { |
631 | // First, check non-klass dependencies as we might return early and |
632 | // not check klass dependencies if the system dictionary |
633 | // modification counter hasn't changed (see below). |
634 | for (Dependencies::DepStream deps(this); deps.next(); ) { |
635 | if (deps.is_klass_type()) continue; // skip klass dependencies |
636 | Klass* witness = deps.check_dependency(); |
637 | if (witness != NULL) { |
638 | return deps.type(); |
639 | } |
640 | } |
641 | |
642 | // Klass dependencies must be checked when the system dictionary |
643 | // changes. If logging is enabled all violated dependences will be |
644 | // recorded in the log. In debug mode check dependencies even if |
645 | // the system dictionary hasn't changed to verify that no invalid |
646 | // dependencies were inserted. Any violated dependences in this |
647 | // case are dumped to the tty. |
648 | if (!counter_changed && !trueInDebug) { |
649 | return end_marker; |
650 | } |
651 | |
652 | int klass_violations = 0; |
653 | DepType result = end_marker; |
654 | for (Dependencies::DepStream deps(this); deps.next(); ) { |
655 | if (!deps.is_klass_type()) continue; // skip non-klass dependencies |
656 | Klass* witness = deps.check_dependency(); |
657 | if (witness != NULL) { |
658 | if (klass_violations == 0) { |
659 | result = deps.type(); |
660 | if (failure_detail != NULL && klass_violations == 0) { |
661 | // Use a fixed size buffer to prevent the string stream from |
662 | // resizing in the context of an inner resource mark. |
663 | char* buffer = NEW_RESOURCE_ARRAY(char, O_BUFLEN); |
664 | stringStream st(buffer, O_BUFLEN); |
665 | deps.print_dependency(witness, true, &st); |
666 | *failure_detail = st.as_string(); |
667 | } |
668 | } |
669 | klass_violations++; |
670 | if (!counter_changed) { |
671 | // Dependence failed but counter didn't change. Log a message |
672 | // describing what failed and allow the assert at the end to |
673 | // trigger. |
674 | deps.print_dependency(witness); |
675 | } else if (xtty == NULL) { |
676 | // If we're not logging then a single violation is sufficient, |
677 | // otherwise we want to log all the dependences which were |
678 | // violated. |
679 | break; |
680 | } |
681 | } |
682 | } |
683 | |
684 | if (klass_violations != 0) { |
685 | #ifdef ASSERT |
686 | if (task != NULL && !counter_changed && !PrintCompilation) { |
687 | // Print out the compile task that failed |
688 | task->print_tty(); |
689 | } |
690 | #endif |
691 | assert(counter_changed, "failed dependencies, but counter didn't change" ); |
692 | } |
693 | return result; |
694 | } |
695 | |
696 | // for the sake of the compiler log, print out current dependencies: |
697 | void Dependencies::log_all_dependencies() { |
698 | if (log() == NULL) return; |
699 | ResourceMark rm; |
700 | for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) { |
701 | DepType dept = (DepType)deptv; |
702 | GrowableArray<ciBaseObject*>* deps = _deps[dept]; |
703 | int deplen = deps->length(); |
704 | if (deplen == 0) { |
705 | continue; |
706 | } |
707 | int stride = dep_args(dept); |
708 | GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride); |
709 | for (int i = 0; i < deps->length(); i += stride) { |
710 | for (int j = 0; j < stride; j++) { |
711 | // flush out the identities before printing |
712 | ciargs->push(deps->at(i+j)); |
713 | } |
714 | write_dependency_to(log(), dept, ciargs); |
715 | ciargs->clear(); |
716 | } |
717 | guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope" ); |
718 | } |
719 | } |
720 | |
721 | void Dependencies::write_dependency_to(CompileLog* log, |
722 | DepType dept, |
723 | GrowableArray<DepArgument>* args, |
724 | Klass* witness) { |
725 | if (log == NULL) { |
726 | return; |
727 | } |
728 | ResourceMark rm; |
729 | ciEnv* env = ciEnv::current(); |
730 | GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length()); |
731 | for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) { |
732 | DepArgument arg = *it; |
733 | if (arg.is_oop()) { |
734 | ciargs->push(env->get_object(arg.oop_value())); |
735 | } else { |
736 | ciargs->push(env->get_metadata(arg.metadata_value())); |
737 | } |
738 | } |
739 | int argslen = ciargs->length(); |
740 | Dependencies::write_dependency_to(log, dept, ciargs, witness); |
741 | guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope" ); |
742 | } |
743 | |
744 | void Dependencies::write_dependency_to(CompileLog* log, |
745 | DepType dept, |
746 | GrowableArray<ciBaseObject*>* args, |
747 | Klass* witness) { |
748 | if (log == NULL) { |
749 | return; |
750 | } |
751 | ResourceMark rm; |
752 | GrowableArray<int>* argids = new GrowableArray<int>(args->length()); |
753 | for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) { |
754 | ciBaseObject* obj = *it; |
755 | if (obj->is_object()) { |
756 | argids->push(log->identify(obj->as_object())); |
757 | } else { |
758 | argids->push(log->identify(obj->as_metadata())); |
759 | } |
760 | } |
761 | if (witness != NULL) { |
762 | log->begin_elem("dependency_failed" ); |
763 | } else { |
764 | log->begin_elem("dependency" ); |
765 | } |
766 | log->print(" type='%s'" , dep_name(dept)); |
767 | const int ctxkj = dep_context_arg(dept); // -1 if no context arg |
768 | if (ctxkj >= 0 && ctxkj < argids->length()) { |
769 | log->print(" ctxk='%d'" , argids->at(ctxkj)); |
770 | } |
771 | // write remaining arguments, if any. |
772 | for (int j = 0; j < argids->length(); j++) { |
773 | if (j == ctxkj) continue; // already logged |
774 | if (j == 1) { |
775 | log->print( " x='%d'" , argids->at(j)); |
776 | } else { |
777 | log->print(" x%d='%d'" , j, argids->at(j)); |
778 | } |
779 | } |
780 | if (witness != NULL) { |
781 | log->object("witness" , witness); |
782 | log->stamp(); |
783 | } |
784 | log->end_elem(); |
785 | } |
786 | |
787 | void Dependencies::write_dependency_to(xmlStream* xtty, |
788 | DepType dept, |
789 | GrowableArray<DepArgument>* args, |
790 | Klass* witness) { |
791 | if (xtty == NULL) { |
792 | return; |
793 | } |
794 | Thread* thread = Thread::current(); |
795 | HandleMark rm(thread); |
796 | ttyLocker ttyl; |
797 | int ctxkj = dep_context_arg(dept); // -1 if no context arg |
798 | if (witness != NULL) { |
799 | xtty->begin_elem("dependency_failed" ); |
800 | } else { |
801 | xtty->begin_elem("dependency" ); |
802 | } |
803 | xtty->print(" type='%s'" , dep_name(dept)); |
804 | if (ctxkj >= 0) { |
805 | xtty->object("ctxk" , args->at(ctxkj).metadata_value()); |
806 | } |
807 | // write remaining arguments, if any. |
808 | for (int j = 0; j < args->length(); j++) { |
809 | if (j == ctxkj) continue; // already logged |
810 | DepArgument arg = args->at(j); |
811 | if (j == 1) { |
812 | if (arg.is_oop()) { |
813 | xtty->object("x" , Handle(thread, arg.oop_value())); |
814 | } else { |
815 | xtty->object("x" , arg.metadata_value()); |
816 | } |
817 | } else { |
818 | char xn[12]; sprintf(xn, "x%d" , j); |
819 | if (arg.is_oop()) { |
820 | xtty->object(xn, Handle(thread, arg.oop_value())); |
821 | } else { |
822 | xtty->object(xn, arg.metadata_value()); |
823 | } |
824 | } |
825 | } |
826 | if (witness != NULL) { |
827 | xtty->object("witness" , witness); |
828 | xtty->stamp(); |
829 | } |
830 | xtty->end_elem(); |
831 | } |
832 | |
833 | void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args, |
834 | Klass* witness, outputStream* st) { |
835 | ResourceMark rm; |
836 | ttyLocker ttyl; // keep the following output all in one block |
837 | st->print_cr("%s of type %s" , |
838 | (witness == NULL)? "Dependency" : "Failed dependency" , |
839 | dep_name(dept)); |
840 | // print arguments |
841 | int ctxkj = dep_context_arg(dept); // -1 if no context arg |
842 | for (int j = 0; j < args->length(); j++) { |
843 | DepArgument arg = args->at(j); |
844 | bool put_star = false; |
845 | if (arg.is_null()) continue; |
846 | const char* what; |
847 | if (j == ctxkj) { |
848 | assert(arg.is_metadata(), "must be" ); |
849 | what = "context" ; |
850 | put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value()); |
851 | } else if (arg.is_method()) { |
852 | what = "method " ; |
853 | put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL); |
854 | } else if (arg.is_klass()) { |
855 | what = "class " ; |
856 | } else { |
857 | what = "object " ; |
858 | } |
859 | st->print(" %s = %s" , what, (put_star? "*" : "" )); |
860 | if (arg.is_klass()) { |
861 | st->print("%s" , ((Klass*)arg.metadata_value())->external_name()); |
862 | } else if (arg.is_method()) { |
863 | ((Method*)arg.metadata_value())->print_value_on(st); |
864 | } else if (arg.is_oop()) { |
865 | arg.oop_value()->print_value_on(st); |
866 | } else { |
867 | ShouldNotReachHere(); // Provide impl for this type. |
868 | } |
869 | |
870 | st->cr(); |
871 | } |
872 | if (witness != NULL) { |
873 | bool put_star = !Dependencies::is_concrete_klass(witness); |
874 | st->print_cr(" witness = %s%s" , |
875 | (put_star? "*" : "" ), |
876 | witness->external_name()); |
877 | } |
878 | } |
879 | |
880 | void Dependencies::DepStream::log_dependency(Klass* witness) { |
881 | if (_deps == NULL && xtty == NULL) return; // fast cutout for runtime |
882 | ResourceMark rm; |
883 | const int nargs = argument_count(); |
884 | GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs); |
885 | for (int j = 0; j < nargs; j++) { |
886 | if (is_oop_argument(j)) { |
887 | args->push(argument_oop(j)); |
888 | } else { |
889 | args->push(argument(j)); |
890 | } |
891 | } |
892 | int argslen = args->length(); |
893 | if (_deps != NULL && _deps->log() != NULL) { |
894 | if (ciEnv::current() != NULL) { |
895 | Dependencies::write_dependency_to(_deps->log(), type(), args, witness); |
896 | } else { |
897 | // Treat the CompileLog as an xmlstream instead |
898 | Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness); |
899 | } |
900 | } else { |
901 | Dependencies::write_dependency_to(xtty, type(), args, witness); |
902 | } |
903 | guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope" ); |
904 | } |
905 | |
906 | void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose, outputStream* st) { |
907 | ResourceMark rm; |
908 | int nargs = argument_count(); |
909 | GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs); |
910 | for (int j = 0; j < nargs; j++) { |
911 | if (is_oop_argument(j)) { |
912 | args->push(argument_oop(j)); |
913 | } else { |
914 | args->push(argument(j)); |
915 | } |
916 | } |
917 | int argslen = args->length(); |
918 | Dependencies::print_dependency(type(), args, witness, st); |
919 | if (verbose) { |
920 | if (_code != NULL) { |
921 | st->print(" code: " ); |
922 | _code->print_value_on(st); |
923 | st->cr(); |
924 | } |
925 | } |
926 | guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope" ); |
927 | } |
928 | |
929 | |
930 | /// Dependency stream support (decodes dependencies from an nmethod): |
931 | |
932 | #ifdef ASSERT |
933 | void Dependencies::DepStream::initial_asserts(size_t byte_limit) { |
934 | assert(must_be_in_vm(), "raw oops here" ); |
935 | _byte_limit = byte_limit; |
936 | _type = (DepType)(end_marker-1); // defeat "already at end" assert |
937 | assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other" ); |
938 | } |
939 | #endif //ASSERT |
940 | |
941 | bool Dependencies::DepStream::next() { |
942 | assert(_type != end_marker, "already at end" ); |
943 | if (_bytes.position() == 0 && _code != NULL |
944 | && _code->dependencies_size() == 0) { |
945 | // Method has no dependencies at all. |
946 | return false; |
947 | } |
948 | int code_byte = (_bytes.read_byte() & 0xFF); |
949 | if (code_byte == end_marker) { |
950 | DEBUG_ONLY(_type = end_marker); |
951 | return false; |
952 | } else { |
953 | int ctxk_bit = (code_byte & Dependencies::default_context_type_bit); |
954 | code_byte -= ctxk_bit; |
955 | DepType dept = (DepType)code_byte; |
956 | _type = dept; |
957 | Dependencies::check_valid_dependency_type(dept); |
958 | int stride = _dep_args[dept]; |
959 | assert(stride == dep_args(dept), "sanity" ); |
960 | int skipj = -1; |
961 | if (ctxk_bit != 0) { |
962 | skipj = 0; // currently the only context argument is at zero |
963 | assert(skipj == dep_context_arg(dept), "zero arg always ctxk" ); |
964 | } |
965 | for (int j = 0; j < stride; j++) { |
966 | _xi[j] = (j == skipj)? 0: _bytes.read_int(); |
967 | } |
968 | DEBUG_ONLY(_xi[stride] = -1); // help detect overruns |
969 | return true; |
970 | } |
971 | } |
972 | |
973 | inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) { |
974 | Metadata* o = NULL; |
975 | if (_code != NULL) { |
976 | o = _code->metadata_at(i); |
977 | } else { |
978 | o = _deps->oop_recorder()->metadata_at(i); |
979 | } |
980 | return o; |
981 | } |
982 | |
983 | inline oop Dependencies::DepStream::recorded_oop_at(int i) { |
984 | return (_code != NULL) |
985 | ? _code->oop_at(i) |
986 | : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i)); |
987 | } |
988 | |
989 | Metadata* Dependencies::DepStream::argument(int i) { |
990 | Metadata* result = recorded_metadata_at(argument_index(i)); |
991 | |
992 | if (result == NULL) { // Explicit context argument can be compressed |
993 | int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg |
994 | if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) { |
995 | result = ctxk_encoded_as_null(type(), argument(ctxkj+1)); |
996 | } |
997 | } |
998 | |
999 | assert(result == NULL || result->is_klass() || result->is_method(), "must be" ); |
1000 | return result; |
1001 | } |
1002 | |
1003 | /** |
1004 | * Returns a unique identifier for each dependency argument. |
1005 | */ |
1006 | uintptr_t Dependencies::DepStream::get_identifier(int i) { |
1007 | if (is_oop_argument(i)) { |
1008 | return (uintptr_t)(oopDesc*)argument_oop(i); |
1009 | } else { |
1010 | return (uintptr_t)argument(i); |
1011 | } |
1012 | } |
1013 | |
1014 | oop Dependencies::DepStream::argument_oop(int i) { |
1015 | oop result = recorded_oop_at(argument_index(i)); |
1016 | assert(oopDesc::is_oop_or_null(result), "must be" ); |
1017 | return result; |
1018 | } |
1019 | |
1020 | Klass* Dependencies::DepStream::context_type() { |
1021 | assert(must_be_in_vm(), "raw oops here" ); |
1022 | |
1023 | // Most dependencies have an explicit context type argument. |
1024 | { |
1025 | int ctxkj = dep_context_arg(type()); // -1 if no explicit context arg |
1026 | if (ctxkj >= 0) { |
1027 | Metadata* k = argument(ctxkj); |
1028 | assert(k != NULL && k->is_klass(), "type check" ); |
1029 | return (Klass*)k; |
1030 | } |
1031 | } |
1032 | |
1033 | // Some dependencies are using the klass of the first object |
1034 | // argument as implicit context type. |
1035 | { |
1036 | int ctxkj = dep_implicit_context_arg(type()); |
1037 | if (ctxkj >= 0) { |
1038 | Klass* k = argument_oop(ctxkj)->klass(); |
1039 | assert(k != NULL && k->is_klass(), "type check" ); |
1040 | return (Klass*) k; |
1041 | } |
1042 | } |
1043 | |
1044 | // And some dependencies don't have a context type at all, |
1045 | // e.g. evol_method. |
1046 | return NULL; |
1047 | } |
1048 | |
1049 | // ----------------- DependencySignature -------------------------------------- |
1050 | bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) { |
1051 | if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) { |
1052 | return false; |
1053 | } |
1054 | |
1055 | for (int i = 0; i < s1.args_count(); i++) { |
1056 | if (s1.arg(i) != s2.arg(i)) { |
1057 | return false; |
1058 | } |
1059 | } |
1060 | return true; |
1061 | } |
1062 | |
1063 | /// Checking dependencies: |
1064 | |
1065 | // This hierarchy walker inspects subtypes of a given type, |
1066 | // trying to find a "bad" class which breaks a dependency. |
1067 | // Such a class is called a "witness" to the broken dependency. |
1068 | // While searching around, we ignore "participants", which |
1069 | // are already known to the dependency. |
1070 | class ClassHierarchyWalker { |
1071 | public: |
1072 | enum { PARTICIPANT_LIMIT = 3 }; |
1073 | |
1074 | private: |
1075 | // optional method descriptor to check for: |
1076 | Symbol* _name; |
1077 | Symbol* _signature; |
1078 | |
1079 | // special classes which are not allowed to be witnesses: |
1080 | Klass* _participants[PARTICIPANT_LIMIT+1]; |
1081 | int _num_participants; |
1082 | |
1083 | // cache of method lookups |
1084 | Method* _found_methods[PARTICIPANT_LIMIT+1]; |
1085 | |
1086 | // if non-zero, tells how many witnesses to convert to participants |
1087 | int _record_witnesses; |
1088 | |
1089 | void initialize(Klass* participant) { |
1090 | _record_witnesses = 0; |
1091 | _participants[0] = participant; |
1092 | _found_methods[0] = NULL; |
1093 | _num_participants = 0; |
1094 | if (participant != NULL) { |
1095 | // Terminating NULL. |
1096 | _participants[1] = NULL; |
1097 | _found_methods[1] = NULL; |
1098 | _num_participants = 1; |
1099 | } |
1100 | } |
1101 | |
1102 | void initialize_from_method(Method* m) { |
1103 | assert(m != NULL && m->is_method(), "sanity" ); |
1104 | _name = m->name(); |
1105 | _signature = m->signature(); |
1106 | } |
1107 | |
1108 | public: |
1109 | // The walker is initialized to recognize certain methods and/or types |
1110 | // as friendly participants. |
1111 | ClassHierarchyWalker(Klass* participant, Method* m) { |
1112 | initialize_from_method(m); |
1113 | initialize(participant); |
1114 | } |
1115 | ClassHierarchyWalker(Method* m) { |
1116 | initialize_from_method(m); |
1117 | initialize(NULL); |
1118 | } |
1119 | ClassHierarchyWalker(Klass* participant = NULL) { |
1120 | _name = NULL; |
1121 | _signature = NULL; |
1122 | initialize(participant); |
1123 | } |
1124 | ClassHierarchyWalker(Klass* participants[], int num_participants) { |
1125 | _name = NULL; |
1126 | _signature = NULL; |
1127 | initialize(NULL); |
1128 | for (int i = 0; i < num_participants; ++i) { |
1129 | add_participant(participants[i]); |
1130 | } |
1131 | } |
1132 | |
1133 | // This is common code for two searches: One for concrete subtypes, |
1134 | // the other for concrete method implementations and overrides. |
1135 | bool doing_subtype_search() { |
1136 | return _name == NULL; |
1137 | } |
1138 | |
1139 | int num_participants() { return _num_participants; } |
1140 | Klass* participant(int n) { |
1141 | assert((uint)n <= (uint)_num_participants, "oob" ); |
1142 | return _participants[n]; |
1143 | } |
1144 | |
1145 | // Note: If n==num_participants, returns NULL. |
1146 | Method* found_method(int n) { |
1147 | assert((uint)n <= (uint)_num_participants, "oob" ); |
1148 | Method* fm = _found_methods[n]; |
1149 | assert(n == _num_participants || fm != NULL, "proper usage" ); |
1150 | if (fm != NULL && fm->method_holder() != _participants[n]) { |
1151 | // Default methods from interfaces can be added to classes. In |
1152 | // that case the holder of the method is not the class but the |
1153 | // interface where it's defined. |
1154 | assert(fm->is_default_method(), "sanity" ); |
1155 | return NULL; |
1156 | } |
1157 | return fm; |
1158 | } |
1159 | |
1160 | #ifdef ASSERT |
1161 | // Assert that m is inherited into ctxk, without intervening overrides. |
1162 | // (May return true even if this is not true, in corner cases where we punt.) |
1163 | bool check_method_context(Klass* ctxk, Method* m) { |
1164 | if (m->method_holder() == ctxk) |
1165 | return true; // Quick win. |
1166 | if (m->is_private()) |
1167 | return false; // Quick lose. Should not happen. |
1168 | if (!(m->is_public() || m->is_protected())) |
1169 | // The override story is complex when packages get involved. |
1170 | return true; // Must punt the assertion to true. |
1171 | Method* lm = ctxk->lookup_method(m->name(), m->signature()); |
1172 | if (lm == NULL && ctxk->is_instance_klass()) { |
1173 | // It might be an interface method |
1174 | lm = InstanceKlass::cast(ctxk)->lookup_method_in_ordered_interfaces(m->name(), |
1175 | m->signature()); |
1176 | } |
1177 | if (lm == m) |
1178 | // Method m is inherited into ctxk. |
1179 | return true; |
1180 | if (lm != NULL) { |
1181 | if (!(lm->is_public() || lm->is_protected())) { |
1182 | // Method is [package-]private, so the override story is complex. |
1183 | return true; // Must punt the assertion to true. |
1184 | } |
1185 | if (lm->is_static()) { |
1186 | // Static methods don't override non-static so punt |
1187 | return true; |
1188 | } |
1189 | if (!Dependencies::is_concrete_method(lm, ctxk) && |
1190 | !Dependencies::is_concrete_method(m, ctxk)) { |
1191 | // They are both non-concrete |
1192 | if (lm->method_holder()->is_subtype_of(m->method_holder())) { |
1193 | // Method m is overridden by lm, but both are non-concrete. |
1194 | return true; |
1195 | } |
1196 | if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() && |
1197 | ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) { |
1198 | // Interface method defined in multiple super interfaces |
1199 | return true; |
1200 | } |
1201 | } |
1202 | } |
1203 | ResourceMark rm; |
1204 | tty->print_cr("Dependency method not found in the associated context:" ); |
1205 | tty->print_cr(" context = %s" , ctxk->external_name()); |
1206 | tty->print( " method = " ); m->print_short_name(tty); tty->cr(); |
1207 | if (lm != NULL) { |
1208 | tty->print( " found = " ); lm->print_short_name(tty); tty->cr(); |
1209 | } |
1210 | return false; |
1211 | } |
1212 | #endif |
1213 | |
1214 | void add_participant(Klass* participant) { |
1215 | assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob" ); |
1216 | int np = _num_participants++; |
1217 | _participants[np] = participant; |
1218 | _participants[np+1] = NULL; |
1219 | _found_methods[np+1] = NULL; |
1220 | } |
1221 | |
1222 | void record_witnesses(int add) { |
1223 | if (add > PARTICIPANT_LIMIT) add = PARTICIPANT_LIMIT; |
1224 | assert(_num_participants + add < PARTICIPANT_LIMIT, "oob" ); |
1225 | _record_witnesses = add; |
1226 | } |
1227 | |
1228 | bool is_witness(Klass* k) { |
1229 | if (doing_subtype_search()) { |
1230 | return Dependencies::is_concrete_klass(k); |
1231 | } else if (!k->is_instance_klass()) { |
1232 | return false; // no methods to find in an array type |
1233 | } else { |
1234 | // Search class hierarchy first, skipping private implementations |
1235 | // as they never override any inherited methods |
1236 | Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature, Klass::skip_private); |
1237 | if (!Dependencies::is_concrete_method(m, k)) { |
1238 | // Check for re-abstraction of method |
1239 | if (!k->is_interface() && m != NULL && m->is_abstract()) { |
1240 | // Found a matching abstract method 'm' in the class hierarchy. |
1241 | // This is fine iff 'k' is an abstract class and all concrete subtypes |
1242 | // of 'k' override 'm' and are participates of the current search. |
1243 | ClassHierarchyWalker wf(_participants, _num_participants); |
1244 | Klass* w = wf.find_witness_subtype(k); |
1245 | if (w != NULL) { |
1246 | Method* wm = InstanceKlass::cast(w)->find_instance_method(_name, _signature); |
1247 | if (!Dependencies::is_concrete_method(wm, w)) { |
1248 | // Found a concrete subtype 'w' which does not override abstract method 'm'. |
1249 | // Bail out because 'm' could be called with 'w' as receiver (leading to an |
1250 | // AbstractMethodError) and thus the method we are looking for is not unique. |
1251 | _found_methods[_num_participants] = m; |
1252 | return true; |
1253 | } |
1254 | } |
1255 | } |
1256 | // Check interface defaults also, if any exist. |
1257 | Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods(); |
1258 | if (default_methods == NULL) |
1259 | return false; |
1260 | m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature); |
1261 | if (!Dependencies::is_concrete_method(m, NULL)) |
1262 | return false; |
1263 | } |
1264 | _found_methods[_num_participants] = m; |
1265 | // Note: If add_participant(k) is called, |
1266 | // the method m will already be memoized for it. |
1267 | return true; |
1268 | } |
1269 | } |
1270 | |
1271 | bool is_participant(Klass* k) { |
1272 | if (k == _participants[0]) { |
1273 | return true; |
1274 | } else if (_num_participants <= 1) { |
1275 | return false; |
1276 | } else { |
1277 | return in_list(k, &_participants[1]); |
1278 | } |
1279 | } |
1280 | bool ignore_witness(Klass* witness) { |
1281 | if (_record_witnesses == 0) { |
1282 | return false; |
1283 | } else { |
1284 | --_record_witnesses; |
1285 | add_participant(witness); |
1286 | return true; |
1287 | } |
1288 | } |
1289 | static bool in_list(Klass* x, Klass** list) { |
1290 | for (int i = 0; ; i++) { |
1291 | Klass* y = list[i]; |
1292 | if (y == NULL) break; |
1293 | if (y == x) return true; |
1294 | } |
1295 | return false; // not in list |
1296 | } |
1297 | |
1298 | private: |
1299 | // the actual search method: |
1300 | Klass* find_witness_anywhere(Klass* context_type, |
1301 | bool participants_hide_witnesses, |
1302 | bool top_level_call = true); |
1303 | // the spot-checking version: |
1304 | Klass* find_witness_in(KlassDepChange& changes, |
1305 | Klass* context_type, |
1306 | bool participants_hide_witnesses); |
1307 | public: |
1308 | Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) { |
1309 | assert(doing_subtype_search(), "must set up a subtype search" ); |
1310 | // When looking for unexpected concrete types, |
1311 | // do not look beneath expected ones. |
1312 | const bool participants_hide_witnesses = true; |
1313 | // CX > CC > C' is OK, even if C' is new. |
1314 | // CX > { CC, C' } is not OK if C' is new, and C' is the witness. |
1315 | if (changes != NULL) { |
1316 | return find_witness_in(*changes, context_type, participants_hide_witnesses); |
1317 | } else { |
1318 | return find_witness_anywhere(context_type, participants_hide_witnesses); |
1319 | } |
1320 | } |
1321 | Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) { |
1322 | assert(!doing_subtype_search(), "must set up a method definer search" ); |
1323 | // When looking for unexpected concrete methods, |
1324 | // look beneath expected ones, to see if there are overrides. |
1325 | const bool participants_hide_witnesses = true; |
1326 | // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness. |
1327 | if (changes != NULL) { |
1328 | return find_witness_in(*changes, context_type, !participants_hide_witnesses); |
1329 | } else { |
1330 | return find_witness_anywhere(context_type, !participants_hide_witnesses); |
1331 | } |
1332 | } |
1333 | }; |
1334 | |
1335 | #ifndef PRODUCT |
1336 | static int deps_find_witness_calls = 0; |
1337 | static int deps_find_witness_steps = 0; |
1338 | static int deps_find_witness_recursions = 0; |
1339 | static int deps_find_witness_singles = 0; |
1340 | static int deps_find_witness_print = 0; // set to -1 to force a final print |
1341 | static bool count_find_witness_calls() { |
1342 | if (TraceDependencies || LogCompilation) { |
1343 | int pcount = deps_find_witness_print + 1; |
1344 | bool final_stats = (pcount == 0); |
1345 | bool initial_call = (pcount == 1); |
1346 | bool occasional_print = ((pcount & ((1<<10) - 1)) == 0); |
1347 | if (pcount < 0) pcount = 1; // crude overflow protection |
1348 | deps_find_witness_print = pcount; |
1349 | if (VerifyDependencies && initial_call) { |
1350 | tty->print_cr("Warning: TraceDependencies results may be inflated by VerifyDependencies" ); |
1351 | } |
1352 | if (occasional_print || final_stats) { |
1353 | // Every now and then dump a little info about dependency searching. |
1354 | if (xtty != NULL) { |
1355 | ttyLocker ttyl; |
1356 | xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'" , |
1357 | deps_find_witness_calls, |
1358 | deps_find_witness_steps, |
1359 | deps_find_witness_recursions, |
1360 | deps_find_witness_singles); |
1361 | } |
1362 | if (final_stats || (TraceDependencies && WizardMode)) { |
1363 | ttyLocker ttyl; |
1364 | tty->print_cr("Dependency check (find_witness) " |
1365 | "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d" , |
1366 | deps_find_witness_calls, |
1367 | deps_find_witness_steps, |
1368 | (double)deps_find_witness_steps / deps_find_witness_calls, |
1369 | deps_find_witness_recursions, |
1370 | deps_find_witness_singles); |
1371 | } |
1372 | } |
1373 | return true; |
1374 | } |
1375 | return false; |
1376 | } |
1377 | #else |
1378 | #define count_find_witness_calls() (0) |
1379 | #endif //PRODUCT |
1380 | |
1381 | |
1382 | Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes, |
1383 | Klass* context_type, |
1384 | bool participants_hide_witnesses) { |
1385 | assert(changes.involves_context(context_type), "irrelevant dependency" ); |
1386 | Klass* new_type = changes.new_type(); |
1387 | |
1388 | (void)count_find_witness_calls(); |
1389 | NOT_PRODUCT(deps_find_witness_singles++); |
1390 | |
1391 | // Current thread must be in VM (not native mode, as in CI): |
1392 | assert(must_be_in_vm(), "raw oops here" ); |
1393 | // Must not move the class hierarchy during this check: |
1394 | assert_locked_or_safepoint(Compile_lock); |
1395 | |
1396 | int nof_impls = InstanceKlass::cast(context_type)->nof_implementors(); |
1397 | if (nof_impls > 1) { |
1398 | // Avoid this case: *I.m > { A.m, C }; B.m > C |
1399 | // %%% Until this is fixed more systematically, bail out. |
1400 | // See corresponding comment in find_witness_anywhere. |
1401 | return context_type; |
1402 | } |
1403 | |
1404 | assert(!is_participant(new_type), "only old classes are participants" ); |
1405 | if (participants_hide_witnesses) { |
1406 | // If the new type is a subtype of a participant, we are done. |
1407 | for (int i = 0; i < num_participants(); i++) { |
1408 | Klass* part = participant(i); |
1409 | if (part == NULL) continue; |
1410 | assert(changes.involves_context(part) == new_type->is_subtype_of(part), |
1411 | "correct marking of participants, b/c new_type is unique" ); |
1412 | if (changes.involves_context(part)) { |
1413 | // new guy is protected from this check by previous participant |
1414 | return NULL; |
1415 | } |
1416 | } |
1417 | } |
1418 | |
1419 | if (is_witness(new_type) && |
1420 | !ignore_witness(new_type)) { |
1421 | return new_type; |
1422 | } |
1423 | |
1424 | return NULL; |
1425 | } |
1426 | |
1427 | |
1428 | // Walk hierarchy under a context type, looking for unexpected types. |
1429 | // Do not report participant types, and recursively walk beneath |
1430 | // them only if participants_hide_witnesses is false. |
1431 | // If top_level_call is false, skip testing the context type, |
1432 | // because the caller has already considered it. |
1433 | Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type, |
1434 | bool participants_hide_witnesses, |
1435 | bool top_level_call) { |
1436 | // Current thread must be in VM (not native mode, as in CI): |
1437 | assert(must_be_in_vm(), "raw oops here" ); |
1438 | // Must not move the class hierarchy during this check: |
1439 | assert_locked_or_safepoint(Compile_lock); |
1440 | |
1441 | bool do_counts = count_find_witness_calls(); |
1442 | |
1443 | // Check the root of the sub-hierarchy first. |
1444 | if (top_level_call) { |
1445 | if (do_counts) { |
1446 | NOT_PRODUCT(deps_find_witness_calls++); |
1447 | NOT_PRODUCT(deps_find_witness_steps++); |
1448 | } |
1449 | if (is_participant(context_type)) { |
1450 | if (participants_hide_witnesses) return NULL; |
1451 | // else fall through to search loop... |
1452 | } else if (is_witness(context_type) && !ignore_witness(context_type)) { |
1453 | // The context is an abstract class or interface, to start with. |
1454 | return context_type; |
1455 | } |
1456 | } |
1457 | |
1458 | // Now we must check each implementor and each subclass. |
1459 | // Use a short worklist to avoid blowing the stack. |
1460 | // Each worklist entry is a *chain* of subklass siblings to process. |
1461 | const int CHAINMAX = 100; // >= 1 + InstanceKlass::implementors_limit |
1462 | Klass* chains[CHAINMAX]; |
1463 | int chaini = 0; // index into worklist |
1464 | Klass* chain; // scratch variable |
1465 | #define ADD_SUBCLASS_CHAIN(k) { \ |
1466 | assert(chaini < CHAINMAX, "oob"); \ |
1467 | chain = k->subklass(); \ |
1468 | if (chain != NULL) chains[chaini++] = chain; } |
1469 | |
1470 | // Look for non-abstract subclasses. |
1471 | // (Note: Interfaces do not have subclasses.) |
1472 | ADD_SUBCLASS_CHAIN(context_type); |
1473 | |
1474 | // If it is an interface, search its direct implementors. |
1475 | // (Their subclasses are additional indirect implementors. |
1476 | // See InstanceKlass::add_implementor.) |
1477 | // (Note: nof_implementors is always zero for non-interfaces.) |
1478 | if (top_level_call) { |
1479 | int nof_impls = InstanceKlass::cast(context_type)->nof_implementors(); |
1480 | if (nof_impls > 1) { |
1481 | // Avoid this case: *I.m > { A.m, C }; B.m > C |
1482 | // Here, I.m has 2 concrete implementations, but m appears unique |
1483 | // as A.m, because the search misses B.m when checking C. |
1484 | // The inherited method B.m was getting missed by the walker |
1485 | // when interface 'I' was the starting point. |
1486 | // %%% Until this is fixed more systematically, bail out. |
1487 | // (Old CHA had the same limitation.) |
1488 | return context_type; |
1489 | } |
1490 | if (nof_impls > 0) { |
1491 | Klass* impl = InstanceKlass::cast(context_type)->implementor(); |
1492 | assert(impl != NULL, "just checking" ); |
1493 | // If impl is the same as the context_type, then more than one |
1494 | // implementor has seen. No exact info in this case. |
1495 | if (impl == context_type) { |
1496 | return context_type; // report an inexact witness to this sad affair |
1497 | } |
1498 | if (do_counts) |
1499 | { NOT_PRODUCT(deps_find_witness_steps++); } |
1500 | if (is_participant(impl)) { |
1501 | if (!participants_hide_witnesses) { |
1502 | ADD_SUBCLASS_CHAIN(impl); |
1503 | } |
1504 | } else if (is_witness(impl) && !ignore_witness(impl)) { |
1505 | return impl; |
1506 | } else { |
1507 | ADD_SUBCLASS_CHAIN(impl); |
1508 | } |
1509 | } |
1510 | } |
1511 | |
1512 | // Recursively process each non-trivial sibling chain. |
1513 | while (chaini > 0) { |
1514 | Klass* chain = chains[--chaini]; |
1515 | for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) { |
1516 | if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); } |
1517 | if (is_participant(sub)) { |
1518 | if (participants_hide_witnesses) continue; |
1519 | // else fall through to process this guy's subclasses |
1520 | } else if (is_witness(sub) && !ignore_witness(sub)) { |
1521 | return sub; |
1522 | } |
1523 | if (chaini < (VerifyDependencies? 2: CHAINMAX)) { |
1524 | // Fast path. (Partially disabled if VerifyDependencies.) |
1525 | ADD_SUBCLASS_CHAIN(sub); |
1526 | } else { |
1527 | // Worklist overflow. Do a recursive call. Should be rare. |
1528 | // The recursive call will have its own worklist, of course. |
1529 | // (Note that sub has already been tested, so that there is |
1530 | // no need for the recursive call to re-test. That's handy, |
1531 | // since the recursive call sees sub as the context_type.) |
1532 | if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); } |
1533 | Klass* witness = find_witness_anywhere(sub, |
1534 | participants_hide_witnesses, |
1535 | /*top_level_call=*/ false); |
1536 | if (witness != NULL) return witness; |
1537 | } |
1538 | } |
1539 | } |
1540 | |
1541 | // No witness found. The dependency remains unbroken. |
1542 | return NULL; |
1543 | #undef ADD_SUBCLASS_CHAIN |
1544 | } |
1545 | |
1546 | |
1547 | bool Dependencies::is_concrete_klass(Klass* k) { |
1548 | if (k->is_abstract()) return false; |
1549 | // %%% We could treat classes which are concrete but |
1550 | // have not yet been instantiated as virtually abstract. |
1551 | // This would require a deoptimization barrier on first instantiation. |
1552 | //if (k->is_not_instantiated()) return false; |
1553 | return true; |
1554 | } |
1555 | |
1556 | bool Dependencies::is_concrete_method(Method* m, Klass * k) { |
1557 | // NULL is not a concrete method, |
1558 | // statics are irrelevant to virtual call sites, |
1559 | // abstract methods are not concrete, |
1560 | // overpass (error) methods are not concrete if k is abstract |
1561 | // |
1562 | // note "true" is conservative answer -- |
1563 | // overpass clause is false if k == NULL, implies return true if |
1564 | // answer depends on overpass clause. |
1565 | return ! ( m == NULL || m -> is_static() || m -> is_abstract() || |
1566 | (m->is_overpass() && k != NULL && k -> is_abstract()) ); |
1567 | } |
1568 | |
1569 | |
1570 | Klass* Dependencies::find_finalizable_subclass(Klass* k) { |
1571 | if (k->is_interface()) return NULL; |
1572 | if (k->has_finalizer()) return k; |
1573 | k = k->subklass(); |
1574 | while (k != NULL) { |
1575 | Klass* result = find_finalizable_subclass(k); |
1576 | if (result != NULL) return result; |
1577 | k = k->next_sibling(); |
1578 | } |
1579 | return NULL; |
1580 | } |
1581 | |
1582 | |
1583 | bool Dependencies::is_concrete_klass(ciInstanceKlass* k) { |
1584 | if (k->is_abstract()) return false; |
1585 | // We could also return false if k does not yet appear to be |
1586 | // instantiated, if the VM version supports this distinction also. |
1587 | //if (k->is_not_instantiated()) return false; |
1588 | return true; |
1589 | } |
1590 | |
1591 | bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) { |
1592 | return k->has_finalizable_subclass(); |
1593 | } |
1594 | |
1595 | |
1596 | // Any use of the contents (bytecodes) of a method must be |
1597 | // marked by an "evol_method" dependency, if those contents |
1598 | // can change. (Note: A method is always dependent on itself.) |
1599 | Klass* Dependencies::check_evol_method(Method* m) { |
1600 | assert(must_be_in_vm(), "raw oops here" ); |
1601 | // Did somebody do a JVMTI RedefineClasses while our backs were turned? |
1602 | // Or is there a now a breakpoint? |
1603 | // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.) |
1604 | if (m->is_old() |
1605 | || m->number_of_breakpoints() > 0) { |
1606 | return m->method_holder(); |
1607 | } else { |
1608 | return NULL; |
1609 | } |
1610 | } |
1611 | |
1612 | // This is a strong assertion: It is that the given type |
1613 | // has no subtypes whatever. It is most useful for |
1614 | // optimizing checks on reflected types or on array types. |
1615 | // (Checks on types which are derived from real instances |
1616 | // can be optimized more strongly than this, because we |
1617 | // know that the checked type comes from a concrete type, |
1618 | // and therefore we can disregard abstract types.) |
1619 | Klass* Dependencies::check_leaf_type(Klass* ctxk) { |
1620 | assert(must_be_in_vm(), "raw oops here" ); |
1621 | assert_locked_or_safepoint(Compile_lock); |
1622 | InstanceKlass* ctx = InstanceKlass::cast(ctxk); |
1623 | Klass* sub = ctx->subklass(); |
1624 | if (sub != NULL) { |
1625 | return sub; |
1626 | } else if (ctx->nof_implementors() != 0) { |
1627 | // if it is an interface, it must be unimplemented |
1628 | // (if it is not an interface, nof_implementors is always zero) |
1629 | Klass* impl = ctx->implementor(); |
1630 | assert(impl != NULL, "must be set" ); |
1631 | return impl; |
1632 | } else { |
1633 | return NULL; |
1634 | } |
1635 | } |
1636 | |
1637 | // Test the assertion that conck is the only concrete subtype* of ctxk. |
1638 | // The type conck itself is allowed to have have further concrete subtypes. |
1639 | // This allows the compiler to narrow occurrences of ctxk by conck, |
1640 | // when dealing with the types of actual instances. |
1641 | Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk, |
1642 | Klass* conck, |
1643 | KlassDepChange* changes) { |
1644 | ClassHierarchyWalker wf(conck); |
1645 | return wf.find_witness_subtype(ctxk, changes); |
1646 | } |
1647 | |
1648 | // If a non-concrete class has no concrete subtypes, it is not (yet) |
1649 | // instantiatable. This can allow the compiler to make some paths go |
1650 | // dead, if they are gated by a test of the type. |
1651 | Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk, |
1652 | KlassDepChange* changes) { |
1653 | // Find any concrete subtype, with no participants: |
1654 | ClassHierarchyWalker wf; |
1655 | return wf.find_witness_subtype(ctxk, changes); |
1656 | } |
1657 | |
1658 | |
1659 | // If a concrete class has no concrete subtypes, it can always be |
1660 | // exactly typed. This allows the use of a cheaper type test. |
1661 | Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk, |
1662 | KlassDepChange* changes) { |
1663 | // Find any concrete subtype, with only the ctxk as participant: |
1664 | ClassHierarchyWalker wf(ctxk); |
1665 | return wf.find_witness_subtype(ctxk, changes); |
1666 | } |
1667 | |
1668 | |
1669 | // Find the unique concrete proper subtype of ctxk, or NULL if there |
1670 | // is more than one concrete proper subtype. If there are no concrete |
1671 | // proper subtypes, return ctxk itself, whether it is concrete or not. |
1672 | // The returned subtype is allowed to have have further concrete subtypes. |
1673 | // That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }. |
1674 | Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) { |
1675 | ClassHierarchyWalker wf(ctxk); // Ignore ctxk when walking. |
1676 | wf.record_witnesses(1); // Record one other witness when walking. |
1677 | Klass* wit = wf.find_witness_subtype(ctxk); |
1678 | if (wit != NULL) return NULL; // Too many witnesses. |
1679 | Klass* conck = wf.participant(0); |
1680 | if (conck == NULL) { |
1681 | #ifndef PRODUCT |
1682 | // Make sure the dependency mechanism will pass this discovery: |
1683 | if (VerifyDependencies) { |
1684 | // Turn off dependency tracing while actually testing deps. |
1685 | FlagSetting fs(TraceDependencies, false); |
1686 | if (!Dependencies::is_concrete_klass(ctxk)) { |
1687 | guarantee(NULL == |
1688 | (void *)check_abstract_with_no_concrete_subtype(ctxk), |
1689 | "verify dep." ); |
1690 | } else { |
1691 | guarantee(NULL == |
1692 | (void *)check_concrete_with_no_concrete_subtype(ctxk), |
1693 | "verify dep." ); |
1694 | } |
1695 | } |
1696 | #endif //PRODUCT |
1697 | return ctxk; // Return ctxk as a flag for "no subtypes". |
1698 | } else { |
1699 | #ifndef PRODUCT |
1700 | // Make sure the dependency mechanism will pass this discovery: |
1701 | if (VerifyDependencies) { |
1702 | // Turn off dependency tracing while actually testing deps. |
1703 | FlagSetting fs(TraceDependencies, false); |
1704 | if (!Dependencies::is_concrete_klass(ctxk)) { |
1705 | guarantee(NULL == (void *) |
1706 | check_abstract_with_unique_concrete_subtype(ctxk, conck), |
1707 | "verify dep." ); |
1708 | } |
1709 | } |
1710 | #endif //PRODUCT |
1711 | return conck; |
1712 | } |
1713 | } |
1714 | |
1715 | // Test the assertion that the k[12] are the only concrete subtypes of ctxk, |
1716 | // except possibly for further subtypes of k[12] themselves. |
1717 | // The context type must be abstract. The types k1 and k2 are themselves |
1718 | // allowed to have further concrete subtypes. |
1719 | Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes( |
1720 | Klass* ctxk, |
1721 | Klass* k1, |
1722 | Klass* k2, |
1723 | KlassDepChange* changes) { |
1724 | ClassHierarchyWalker wf; |
1725 | wf.add_participant(k1); |
1726 | wf.add_participant(k2); |
1727 | return wf.find_witness_subtype(ctxk, changes); |
1728 | } |
1729 | |
1730 | // Search ctxk for concrete implementations. If there are klen or fewer, |
1731 | // pack them into the given array and return the number. |
1732 | // Otherwise, return -1, meaning the given array would overflow. |
1733 | // (Note that a return of 0 means there are exactly no concrete subtypes.) |
1734 | // In this search, if ctxk is concrete, it will be reported alone. |
1735 | // For any type CC reported, no proper subtypes of CC will be reported. |
1736 | int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk, |
1737 | int klen, |
1738 | Klass* karray[]) { |
1739 | ClassHierarchyWalker wf; |
1740 | wf.record_witnesses(klen); |
1741 | Klass* wit = wf.find_witness_subtype(ctxk); |
1742 | if (wit != NULL) return -1; // Too many witnesses. |
1743 | int num = wf.num_participants(); |
1744 | assert(num <= klen, "oob" ); |
1745 | // Pack the result array with the good news. |
1746 | for (int i = 0; i < num; i++) |
1747 | karray[i] = wf.participant(i); |
1748 | #ifndef PRODUCT |
1749 | // Make sure the dependency mechanism will pass this discovery: |
1750 | if (VerifyDependencies) { |
1751 | // Turn off dependency tracing while actually testing deps. |
1752 | FlagSetting fs(TraceDependencies, false); |
1753 | switch (Dependencies::is_concrete_klass(ctxk)? -1: num) { |
1754 | case -1: // ctxk was itself concrete |
1755 | guarantee(num == 1 && karray[0] == ctxk, "verify dep." ); |
1756 | break; |
1757 | case 0: |
1758 | guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk), |
1759 | "verify dep." ); |
1760 | break; |
1761 | case 1: |
1762 | guarantee(NULL == (void *) |
1763 | check_abstract_with_unique_concrete_subtype(ctxk, karray[0]), |
1764 | "verify dep." ); |
1765 | break; |
1766 | case 2: |
1767 | guarantee(NULL == (void *) |
1768 | check_abstract_with_exclusive_concrete_subtypes(ctxk, |
1769 | karray[0], |
1770 | karray[1]), |
1771 | "verify dep." ); |
1772 | break; |
1773 | default: |
1774 | ShouldNotReachHere(); // klen > 2 yet supported |
1775 | } |
1776 | } |
1777 | #endif //PRODUCT |
1778 | return num; |
1779 | } |
1780 | |
1781 | // If a class (or interface) has a unique concrete method uniqm, return NULL. |
1782 | // Otherwise, return a class that contains an interfering method. |
1783 | Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm, |
1784 | KlassDepChange* changes) { |
1785 | // Here is a missing optimization: If uniqm->is_final(), |
1786 | // we don't really need to search beneath it for overrides. |
1787 | // This is probably not important, since we don't use dependencies |
1788 | // to track final methods. (They can't be "definalized".) |
1789 | ClassHierarchyWalker wf(uniqm->method_holder(), uniqm); |
1790 | return wf.find_witness_definer(ctxk, changes); |
1791 | } |
1792 | |
1793 | // Find the set of all non-abstract methods under ctxk that match m. |
1794 | // (The method m must be defined or inherited in ctxk.) |
1795 | // Include m itself in the set, unless it is abstract. |
1796 | // If this set has exactly one element, return that element. |
1797 | Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) { |
1798 | // Return NULL if m is marked old; must have been a redefined method. |
1799 | if (m->is_old()) { |
1800 | return NULL; |
1801 | } |
1802 | ClassHierarchyWalker wf(m); |
1803 | assert(wf.check_method_context(ctxk, m), "proper context" ); |
1804 | wf.record_witnesses(1); |
1805 | Klass* wit = wf.find_witness_definer(ctxk); |
1806 | if (wit != NULL) return NULL; // Too many witnesses. |
1807 | Method* fm = wf.found_method(0); // Will be NULL if num_parts == 0. |
1808 | if (Dependencies::is_concrete_method(m, ctxk)) { |
1809 | if (fm == NULL) { |
1810 | // It turns out that m was always the only implementation. |
1811 | fm = m; |
1812 | } else if (fm != m) { |
1813 | // Two conflicting implementations after all. |
1814 | // (This can happen if m is inherited into ctxk and fm overrides it.) |
1815 | return NULL; |
1816 | } |
1817 | } |
1818 | #ifndef PRODUCT |
1819 | // Make sure the dependency mechanism will pass this discovery: |
1820 | if (VerifyDependencies && fm != NULL) { |
1821 | guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm), |
1822 | "verify dep." ); |
1823 | } |
1824 | #endif //PRODUCT |
1825 | return fm; |
1826 | } |
1827 | |
1828 | Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk, |
1829 | Method* m1, |
1830 | Method* m2, |
1831 | KlassDepChange* changes) { |
1832 | ClassHierarchyWalker wf(m1); |
1833 | wf.add_participant(m1->method_holder()); |
1834 | wf.add_participant(m2->method_holder()); |
1835 | return wf.find_witness_definer(ctxk, changes); |
1836 | } |
1837 | |
1838 | Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) { |
1839 | Klass* search_at = ctxk; |
1840 | if (changes != NULL) |
1841 | search_at = changes->new_type(); // just look at the new bit |
1842 | return find_finalizable_subclass(search_at); |
1843 | } |
1844 | |
1845 | Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) { |
1846 | assert(call_site != NULL, "sanity" ); |
1847 | assert(method_handle != NULL, "sanity" ); |
1848 | assert(call_site->is_a(SystemDictionary::CallSite_klass()), "sanity" ); |
1849 | |
1850 | if (changes == NULL) { |
1851 | // Validate all CallSites |
1852 | if (!oopDesc::equals(java_lang_invoke_CallSite::target(call_site), method_handle)) |
1853 | return call_site->klass(); // assertion failed |
1854 | } else { |
1855 | // Validate the given CallSite |
1856 | if (oopDesc::equals(call_site, changes->call_site()) && !oopDesc::equals(java_lang_invoke_CallSite::target(call_site), changes->method_handle())) { |
1857 | assert(!oopDesc::equals(method_handle, changes->method_handle()), "must be" ); |
1858 | return call_site->klass(); // assertion failed |
1859 | } |
1860 | } |
1861 | return NULL; // assertion still valid |
1862 | } |
1863 | |
1864 | void Dependencies::DepStream::trace_and_log_witness(Klass* witness) { |
1865 | if (witness != NULL) { |
1866 | if (TraceDependencies) { |
1867 | print_dependency(witness, /*verbose=*/ true); |
1868 | } |
1869 | // The following is a no-op unless logging is enabled: |
1870 | log_dependency(witness); |
1871 | } |
1872 | } |
1873 | |
1874 | |
1875 | Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) { |
1876 | assert_locked_or_safepoint(Compile_lock); |
1877 | Dependencies::check_valid_dependency_type(type()); |
1878 | |
1879 | Klass* witness = NULL; |
1880 | switch (type()) { |
1881 | case evol_method: |
1882 | witness = check_evol_method(method_argument(0)); |
1883 | break; |
1884 | case leaf_type: |
1885 | witness = check_leaf_type(context_type()); |
1886 | break; |
1887 | case abstract_with_unique_concrete_subtype: |
1888 | witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes); |
1889 | break; |
1890 | case abstract_with_no_concrete_subtype: |
1891 | witness = check_abstract_with_no_concrete_subtype(context_type(), changes); |
1892 | break; |
1893 | case concrete_with_no_concrete_subtype: |
1894 | witness = check_concrete_with_no_concrete_subtype(context_type(), changes); |
1895 | break; |
1896 | case unique_concrete_method: |
1897 | witness = check_unique_concrete_method(context_type(), method_argument(1), changes); |
1898 | break; |
1899 | case abstract_with_exclusive_concrete_subtypes_2: |
1900 | witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes); |
1901 | break; |
1902 | case exclusive_concrete_methods_2: |
1903 | witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes); |
1904 | break; |
1905 | case no_finalizable_subclasses: |
1906 | witness = check_has_no_finalizable_subclasses(context_type(), changes); |
1907 | break; |
1908 | default: |
1909 | witness = NULL; |
1910 | break; |
1911 | } |
1912 | trace_and_log_witness(witness); |
1913 | return witness; |
1914 | } |
1915 | |
1916 | |
1917 | Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) { |
1918 | assert_locked_or_safepoint(Compile_lock); |
1919 | Dependencies::check_valid_dependency_type(type()); |
1920 | |
1921 | Klass* witness = NULL; |
1922 | switch (type()) { |
1923 | case call_site_target_value: |
1924 | witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes); |
1925 | break; |
1926 | default: |
1927 | witness = NULL; |
1928 | break; |
1929 | } |
1930 | trace_and_log_witness(witness); |
1931 | return witness; |
1932 | } |
1933 | |
1934 | |
1935 | Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) { |
1936 | // Handle klass dependency |
1937 | if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type())) |
1938 | return check_klass_dependency(changes.as_klass_change()); |
1939 | |
1940 | // Handle CallSite dependency |
1941 | if (changes.is_call_site_change()) |
1942 | return check_call_site_dependency(changes.as_call_site_change()); |
1943 | |
1944 | // irrelevant dependency; skip it |
1945 | return NULL; |
1946 | } |
1947 | |
1948 | |
1949 | void DepChange::print() { |
1950 | int nsup = 0, nint = 0; |
1951 | for (ContextStream str(*this); str.next(); ) { |
1952 | Klass* k = str.klass(); |
1953 | switch (str.change_type()) { |
1954 | case Change_new_type: |
1955 | tty->print_cr(" dependee = %s" , k->external_name()); |
1956 | break; |
1957 | case Change_new_sub: |
1958 | if (!WizardMode) { |
1959 | ++nsup; |
1960 | } else { |
1961 | tty->print_cr(" context super = %s" , k->external_name()); |
1962 | } |
1963 | break; |
1964 | case Change_new_impl: |
1965 | if (!WizardMode) { |
1966 | ++nint; |
1967 | } else { |
1968 | tty->print_cr(" context interface = %s" , k->external_name()); |
1969 | } |
1970 | break; |
1971 | default: |
1972 | break; |
1973 | } |
1974 | } |
1975 | if (nsup + nint != 0) { |
1976 | tty->print_cr(" context supers = %d, interfaces = %d" , nsup, nint); |
1977 | } |
1978 | } |
1979 | |
1980 | void DepChange::ContextStream::start() { |
1981 | Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL; |
1982 | _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass); |
1983 | _klass = new_type; |
1984 | _ti_base = NULL; |
1985 | _ti_index = 0; |
1986 | _ti_limit = 0; |
1987 | } |
1988 | |
1989 | bool DepChange::ContextStream::next() { |
1990 | switch (_change_type) { |
1991 | case Start_Klass: // initial state; _klass is the new type |
1992 | _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces(); |
1993 | _ti_index = 0; |
1994 | _change_type = Change_new_type; |
1995 | return true; |
1996 | case Change_new_type: |
1997 | // fall through: |
1998 | _change_type = Change_new_sub; |
1999 | case Change_new_sub: |
2000 | // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277 |
2001 | { |
2002 | _klass = _klass->super(); |
2003 | if (_klass != NULL) { |
2004 | return true; |
2005 | } |
2006 | } |
2007 | // else set up _ti_limit and fall through: |
2008 | _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length(); |
2009 | _change_type = Change_new_impl; |
2010 | case Change_new_impl: |
2011 | if (_ti_index < _ti_limit) { |
2012 | _klass = _ti_base->at(_ti_index++); |
2013 | return true; |
2014 | } |
2015 | // fall through: |
2016 | _change_type = NO_CHANGE; // iterator is exhausted |
2017 | case NO_CHANGE: |
2018 | break; |
2019 | default: |
2020 | ShouldNotReachHere(); |
2021 | } |
2022 | return false; |
2023 | } |
2024 | |
2025 | void KlassDepChange::initialize() { |
2026 | // entire transaction must be under this lock: |
2027 | assert_lock_strong(Compile_lock); |
2028 | |
2029 | // Mark all dependee and all its superclasses |
2030 | // Mark transitive interfaces |
2031 | for (ContextStream str(*this); str.next(); ) { |
2032 | Klass* d = str.klass(); |
2033 | assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking" ); |
2034 | InstanceKlass::cast(d)->set_is_marked_dependent(true); |
2035 | } |
2036 | } |
2037 | |
2038 | KlassDepChange::~KlassDepChange() { |
2039 | // Unmark all dependee and all its superclasses |
2040 | // Unmark transitive interfaces |
2041 | for (ContextStream str(*this); str.next(); ) { |
2042 | Klass* d = str.klass(); |
2043 | InstanceKlass::cast(d)->set_is_marked_dependent(false); |
2044 | } |
2045 | } |
2046 | |
2047 | bool KlassDepChange::involves_context(Klass* k) { |
2048 | if (k == NULL || !k->is_instance_klass()) { |
2049 | return false; |
2050 | } |
2051 | InstanceKlass* ik = InstanceKlass::cast(k); |
2052 | bool is_contained = ik->is_marked_dependent(); |
2053 | assert(is_contained == new_type()->is_subtype_of(k), |
2054 | "correct marking of potential context types" ); |
2055 | return is_contained; |
2056 | } |
2057 | |
2058 | #ifndef PRODUCT |
2059 | void Dependencies::print_statistics() { |
2060 | if (deps_find_witness_print != 0) { |
2061 | // Call one final time, to flush out the data. |
2062 | deps_find_witness_print = -1; |
2063 | count_find_witness_calls(); |
2064 | } |
2065 | } |
2066 | #endif |
2067 | |
2068 | CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) : |
2069 | _call_site(call_site), |
2070 | _method_handle(method_handle) { |
2071 | assert(_call_site()->is_a(SystemDictionary::CallSite_klass()), "must be" ); |
2072 | assert(_method_handle.is_null() || _method_handle()->is_a(SystemDictionary::MethodHandle_klass()), "must be" ); |
2073 | } |
2074 | |