1 | /* |
2 | * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. |
3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 | * |
5 | * This code is free software; you can redistribute it and/or modify it |
6 | * under the terms of the GNU General Public License version 2 only, as |
7 | * published by the Free Software Foundation. |
8 | * |
9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
12 | * version 2 for more details (a copy is included in the LICENSE file that |
13 | * accompanied this code). |
14 | * |
15 | * You should have received a copy of the GNU General Public License version |
16 | * 2 along with this work; if not, write to the Free Software Foundation, |
17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
18 | * |
19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
20 | * or visit www.oracle.com if you need additional information or have any |
21 | * questions. |
22 | * |
23 | */ |
24 | |
25 | #include "precompiled.hpp" |
26 | #include "classfile/javaClasses.inline.hpp" |
27 | #include "code/codeCache.hpp" |
28 | #include "code/debugInfoRec.hpp" |
29 | #include "code/nmethod.hpp" |
30 | #include "code/pcDesc.hpp" |
31 | #include "code/scopeDesc.hpp" |
32 | #include "interpreter/interpreter.hpp" |
33 | #include "interpreter/oopMapCache.hpp" |
34 | #include "oops/instanceKlass.hpp" |
35 | #include "oops/oop.inline.hpp" |
36 | #include "runtime/basicLock.hpp" |
37 | #include "runtime/frame.inline.hpp" |
38 | #include "runtime/handles.inline.hpp" |
39 | #include "runtime/monitorChunk.hpp" |
40 | #include "runtime/signature.hpp" |
41 | #include "runtime/stubRoutines.hpp" |
42 | #include "runtime/vframeArray.hpp" |
43 | #include "runtime/vframe_hp.hpp" |
44 | #ifdef COMPILER2 |
45 | #include "opto/matcher.hpp" |
46 | #endif |
47 | |
48 | |
49 | // ------------- compiledVFrame -------------- |
50 | |
51 | StackValueCollection* compiledVFrame::locals() const { |
52 | // Natives has no scope |
53 | if (scope() == NULL) return new StackValueCollection(0); |
54 | GrowableArray<ScopeValue*>* scv_list = scope()->locals(); |
55 | if (scv_list == NULL) return new StackValueCollection(0); |
56 | |
57 | // scv_list is the list of ScopeValues describing the JVM stack state. |
58 | // There is one scv_list entry for every JVM stack state in use. |
59 | int length = scv_list->length(); |
60 | StackValueCollection* result = new StackValueCollection(length); |
61 | for (int i = 0; i < length; i++) { |
62 | result->add(create_stack_value(scv_list->at(i))); |
63 | } |
64 | |
65 | // Replace the original values with any stores that have been |
66 | // performed through compiledVFrame::update_locals. |
67 | GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread()->deferred_locals(); |
68 | if (list != NULL ) { |
69 | // In real life this never happens or is typically a single element search |
70 | for (int i = 0; i < list->length(); i++) { |
71 | if (list->at(i)->matches(this)) { |
72 | list->at(i)->update_locals(result); |
73 | break; |
74 | } |
75 | } |
76 | } |
77 | |
78 | return result; |
79 | } |
80 | |
81 | |
82 | void compiledVFrame::set_locals(StackValueCollection* values) const { |
83 | |
84 | fatal("Should use update_local for each local update" ); |
85 | } |
86 | |
87 | void compiledVFrame::update_local(BasicType type, int index, jvalue value) { |
88 | assert(index >= 0 && index < method()->max_locals(), "out of bounds" ); |
89 | update_deferred_value(type, index, value); |
90 | } |
91 | |
92 | void compiledVFrame::update_stack(BasicType type, int index, jvalue value) { |
93 | assert(index >= 0 && index < method()->max_stack(), "out of bounds" ); |
94 | update_deferred_value(type, index + method()->max_locals(), value); |
95 | } |
96 | |
97 | void compiledVFrame::update_monitor(int index, MonitorInfo* val) { |
98 | assert(index >= 0, "out of bounds" ); |
99 | jvalue value; |
100 | value.l = (jobject) val->owner(); |
101 | update_deferred_value(T_OBJECT, index + method()->max_locals() + method()->max_stack(), value); |
102 | } |
103 | |
104 | void compiledVFrame::update_deferred_value(BasicType type, int index, jvalue value) { |
105 | assert(fr().is_deoptimized_frame() || thread()->must_deopt_id() == fr().id(), |
106 | "frame must be scheduled for deoptimization" ); |
107 | GrowableArray<jvmtiDeferredLocalVariableSet*>* deferred = thread()->deferred_locals(); |
108 | jvmtiDeferredLocalVariableSet* locals = NULL; |
109 | if (deferred != NULL ) { |
110 | // See if this vframe has already had locals with deferred writes |
111 | for (int f = 0; f < deferred->length(); f++ ) { |
112 | if (deferred->at(f)->matches(this)) { |
113 | locals = deferred->at(f); |
114 | break; |
115 | } |
116 | } |
117 | // No matching vframe must push a new vframe |
118 | } else { |
119 | // No deferred updates pending for this thread. |
120 | // allocate in C heap |
121 | deferred = new(ResourceObj::C_HEAP, mtCompiler) GrowableArray<jvmtiDeferredLocalVariableSet*> (1, true); |
122 | thread()->set_deferred_locals(deferred); |
123 | } |
124 | if (locals == NULL) { |
125 | locals = new jvmtiDeferredLocalVariableSet(method(), bci(), fr().id(), vframe_id()); |
126 | deferred->push(locals); |
127 | assert(locals->id() == fr().id(), "Huh? Must match" ); |
128 | } |
129 | locals->set_value_at(index, type, value); |
130 | } |
131 | |
132 | StackValueCollection* compiledVFrame::expressions() const { |
133 | // Natives has no scope |
134 | if (scope() == NULL) return new StackValueCollection(0); |
135 | GrowableArray<ScopeValue*>* scv_list = scope()->expressions(); |
136 | if (scv_list == NULL) return new StackValueCollection(0); |
137 | |
138 | // scv_list is the list of ScopeValues describing the JVM stack state. |
139 | // There is one scv_list entry for every JVM stack state in use. |
140 | int length = scv_list->length(); |
141 | StackValueCollection* result = new StackValueCollection(length); |
142 | for (int i = 0; i < length; i++) { |
143 | result->add(create_stack_value(scv_list->at(i))); |
144 | } |
145 | |
146 | // Replace the original values with any stores that have been |
147 | // performed through compiledVFrame::update_stack. |
148 | GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread()->deferred_locals(); |
149 | if (list != NULL ) { |
150 | // In real life this never happens or is typically a single element search |
151 | for (int i = 0; i < list->length(); i++) { |
152 | if (list->at(i)->matches(this)) { |
153 | list->at(i)->update_stack(result); |
154 | break; |
155 | } |
156 | } |
157 | } |
158 | |
159 | return result; |
160 | } |
161 | |
162 | |
163 | // The implementation of the following two methods was factorized into the |
164 | // class StackValue because it is also used from within deoptimization.cpp for |
165 | // rematerialization and relocking of non-escaping objects. |
166 | |
167 | StackValue *compiledVFrame::create_stack_value(ScopeValue *sv) const { |
168 | return StackValue::create_stack_value(&_fr, register_map(), sv); |
169 | } |
170 | |
171 | BasicLock* compiledVFrame::resolve_monitor_lock(Location location) const { |
172 | return StackValue::resolve_monitor_lock(&_fr, location); |
173 | } |
174 | |
175 | |
176 | GrowableArray<MonitorInfo*>* compiledVFrame::monitors() const { |
177 | // Natives has no scope |
178 | if (scope() == NULL) { |
179 | CompiledMethod* nm = code(); |
180 | Method* method = nm->method(); |
181 | assert(method->is_native() || nm->is_aot(), "Expect a native method or precompiled method" ); |
182 | if (!method->is_synchronized()) { |
183 | return new GrowableArray<MonitorInfo*>(0); |
184 | } |
185 | // This monitor is really only needed for UseBiasedLocking, but |
186 | // return it in all cases for now as it might be useful for stack |
187 | // traces and tools as well |
188 | GrowableArray<MonitorInfo*> *monitors = new GrowableArray<MonitorInfo*>(1); |
189 | // Casting away const |
190 | frame& fr = (frame&) _fr; |
191 | MonitorInfo* info = new MonitorInfo( |
192 | fr.get_native_receiver(), fr.get_native_monitor(), false, false); |
193 | monitors->push(info); |
194 | return monitors; |
195 | } |
196 | GrowableArray<MonitorValue*>* monitors = scope()->monitors(); |
197 | if (monitors == NULL) { |
198 | return new GrowableArray<MonitorInfo*>(0); |
199 | } |
200 | GrowableArray<MonitorInfo*>* result = new GrowableArray<MonitorInfo*>(monitors->length()); |
201 | for (int index = 0; index < monitors->length(); index++) { |
202 | MonitorValue* mv = monitors->at(index); |
203 | ScopeValue* ov = mv->owner(); |
204 | StackValue *owner_sv = create_stack_value(ov); // it is an oop |
205 | if (ov->is_object() && owner_sv->obj_is_scalar_replaced()) { // The owner object was scalar replaced |
206 | assert(mv->eliminated(), "monitor should be eliminated for scalar replaced object" ); |
207 | // Put klass for scalar replaced object. |
208 | ScopeValue* kv = ((ObjectValue *)ov)->klass(); |
209 | assert(kv->is_constant_oop(), "klass should be oop constant for scalar replaced object" ); |
210 | Handle k(Thread::current(), ((ConstantOopReadValue*)kv)->value()()); |
211 | assert(java_lang_Class::is_instance(k()), "must be" ); |
212 | result->push(new MonitorInfo(k(), resolve_monitor_lock(mv->basic_lock()), |
213 | mv->eliminated(), true)); |
214 | } else { |
215 | result->push(new MonitorInfo(owner_sv->get_obj()(), resolve_monitor_lock(mv->basic_lock()), |
216 | mv->eliminated(), false)); |
217 | } |
218 | } |
219 | |
220 | // Replace the original values with any stores that have been |
221 | // performed through compiledVFrame::update_monitors. |
222 | GrowableArray<jvmtiDeferredLocalVariableSet*>* list = thread()->deferred_locals(); |
223 | if (list != NULL ) { |
224 | // In real life this never happens or is typically a single element search |
225 | for (int i = 0; i < list->length(); i++) { |
226 | if (list->at(i)->matches(this)) { |
227 | list->at(i)->update_monitors(result); |
228 | break; |
229 | } |
230 | } |
231 | } |
232 | |
233 | return result; |
234 | } |
235 | |
236 | |
237 | compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, CompiledMethod* nm) |
238 | : javaVFrame(fr, reg_map, thread) { |
239 | _scope = NULL; |
240 | _vframe_id = 0; |
241 | // Compiled method (native stub or Java code) |
242 | // native wrappers have no scope data, it is implied |
243 | if (!nm->is_compiled() || !nm->as_compiled_method()->is_native_method()) { |
244 | _scope = nm->scope_desc_at(_fr.pc()); |
245 | } |
246 | } |
247 | |
248 | compiledVFrame::compiledVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread, ScopeDesc* scope, int vframe_id) |
249 | : javaVFrame(fr, reg_map, thread) { |
250 | _scope = scope; |
251 | _vframe_id = vframe_id; |
252 | guarantee(_scope != NULL, "scope must be present" ); |
253 | } |
254 | |
255 | compiledVFrame* compiledVFrame::at_scope(int decode_offset, int vframe_id) { |
256 | if (scope()->decode_offset() != decode_offset) { |
257 | ScopeDesc* scope = this->scope()->at_offset(decode_offset); |
258 | return new compiledVFrame(frame_pointer(), register_map(), thread(), scope, vframe_id); |
259 | } |
260 | assert(_vframe_id == vframe_id, "wrong frame id" ); |
261 | return this; |
262 | } |
263 | |
264 | bool compiledVFrame::is_top() const { |
265 | // FIX IT: Remove this when new native stubs are in place |
266 | if (scope() == NULL) return true; |
267 | return scope()->is_top(); |
268 | } |
269 | |
270 | |
271 | CompiledMethod* compiledVFrame::code() const { |
272 | return CodeCache::find_compiled(_fr.pc()); |
273 | } |
274 | |
275 | |
276 | Method* compiledVFrame::method() const { |
277 | if (scope() == NULL) { |
278 | // native nmethods have no scope the method is implied |
279 | nmethod* nm = code()->as_nmethod(); |
280 | assert(nm->is_native_method(), "must be native" ); |
281 | return nm->method(); |
282 | } |
283 | return scope()->method(); |
284 | } |
285 | |
286 | |
287 | int compiledVFrame::bci() const { |
288 | int raw = raw_bci(); |
289 | return raw == SynchronizationEntryBCI ? 0 : raw; |
290 | } |
291 | |
292 | |
293 | int compiledVFrame::raw_bci() const { |
294 | if (scope() == NULL) { |
295 | // native nmethods have no scope the method/bci is implied |
296 | nmethod* nm = code()->as_nmethod(); |
297 | assert(nm->is_native_method(), "must be native" ); |
298 | return 0; |
299 | } |
300 | return scope()->bci(); |
301 | } |
302 | |
303 | bool compiledVFrame::should_reexecute() const { |
304 | if (scope() == NULL) { |
305 | // native nmethods have no scope the method/bci is implied |
306 | nmethod* nm = code()->as_nmethod(); |
307 | assert(nm->is_native_method(), "must be native" ); |
308 | return false; |
309 | } |
310 | return scope()->should_reexecute(); |
311 | } |
312 | |
313 | vframe* compiledVFrame::sender() const { |
314 | const frame f = fr(); |
315 | if (scope() == NULL) { |
316 | // native nmethods have no scope the method/bci is implied |
317 | nmethod* nm = code()->as_nmethod(); |
318 | assert(nm->is_native_method(), "must be native" ); |
319 | return vframe::sender(); |
320 | } else { |
321 | return scope()->is_top() |
322 | ? vframe::sender() |
323 | : new compiledVFrame(&f, register_map(), thread(), scope()->sender(), vframe_id() + 1); |
324 | } |
325 | } |
326 | |
327 | jvmtiDeferredLocalVariableSet::jvmtiDeferredLocalVariableSet(Method* method, int bci, intptr_t* id, int vframe_id) { |
328 | _method = method; |
329 | _bci = bci; |
330 | _id = id; |
331 | _vframe_id = vframe_id; |
332 | // Alway will need at least one, must be on C heap |
333 | _locals = new(ResourceObj::C_HEAP, mtCompiler) GrowableArray<jvmtiDeferredLocalVariable*> (1, true); |
334 | } |
335 | |
336 | jvmtiDeferredLocalVariableSet::~jvmtiDeferredLocalVariableSet() { |
337 | for (int i = 0; i < _locals->length(); i++ ) { |
338 | delete _locals->at(i); |
339 | } |
340 | // Free growableArray and c heap for elements |
341 | delete _locals; |
342 | } |
343 | |
344 | bool jvmtiDeferredLocalVariableSet::matches(const vframe* vf) { |
345 | if (!vf->is_compiled_frame()) return false; |
346 | compiledVFrame* cvf = (compiledVFrame*)vf; |
347 | if (cvf->fr().id() == id() && cvf->vframe_id() == vframe_id()) { |
348 | assert(cvf->method() == method() && cvf->bci() == bci(), "must agree" ); |
349 | return true; |
350 | } |
351 | return false; |
352 | } |
353 | |
354 | void jvmtiDeferredLocalVariableSet::set_value_at(int idx, BasicType type, jvalue val) { |
355 | for (int i = 0; i < _locals->length(); i++) { |
356 | if (_locals->at(i)->index() == idx) { |
357 | assert(_locals->at(i)->type() == type, "Wrong type" ); |
358 | _locals->at(i)->set_value(val); |
359 | return; |
360 | } |
361 | } |
362 | _locals->push(new jvmtiDeferredLocalVariable(idx, type, val)); |
363 | } |
364 | |
365 | void jvmtiDeferredLocalVariableSet::update_value(StackValueCollection* locals, BasicType type, int index, jvalue value) { |
366 | switch (type) { |
367 | case T_BOOLEAN: |
368 | locals->set_int_at(index, value.z); |
369 | break; |
370 | case T_CHAR: |
371 | locals->set_int_at(index, value.c); |
372 | break; |
373 | case T_FLOAT: |
374 | locals->set_float_at(index, value.f); |
375 | break; |
376 | case T_DOUBLE: |
377 | locals->set_double_at(index, value.d); |
378 | break; |
379 | case T_BYTE: |
380 | locals->set_int_at(index, value.b); |
381 | break; |
382 | case T_SHORT: |
383 | locals->set_int_at(index, value.s); |
384 | break; |
385 | case T_INT: |
386 | locals->set_int_at(index, value.i); |
387 | break; |
388 | case T_LONG: |
389 | locals->set_long_at(index, value.j); |
390 | break; |
391 | case T_OBJECT: |
392 | { |
393 | Handle obj(Thread::current(), (oop)value.l); |
394 | locals->set_obj_at(index, obj); |
395 | } |
396 | break; |
397 | default: |
398 | ShouldNotReachHere(); |
399 | } |
400 | } |
401 | |
402 | void jvmtiDeferredLocalVariableSet::update_locals(StackValueCollection* locals) { |
403 | for (int l = 0; l < _locals->length(); l ++) { |
404 | jvmtiDeferredLocalVariable* val = _locals->at(l); |
405 | if (val->index() >= 0 && val->index() < method()->max_locals()) { |
406 | update_value(locals, val->type(), val->index(), val->value()); |
407 | } |
408 | } |
409 | } |
410 | |
411 | |
412 | void jvmtiDeferredLocalVariableSet::update_stack(StackValueCollection* expressions) { |
413 | for (int l = 0; l < _locals->length(); l ++) { |
414 | jvmtiDeferredLocalVariable* val = _locals->at(l); |
415 | if (val->index() >= method()->max_locals() && val->index() < method()->max_locals() + method()->max_stack()) { |
416 | update_value(expressions, val->type(), val->index() - method()->max_locals(), val->value()); |
417 | } |
418 | } |
419 | } |
420 | |
421 | |
422 | void jvmtiDeferredLocalVariableSet::update_monitors(GrowableArray<MonitorInfo*>* monitors) { |
423 | for (int l = 0; l < _locals->length(); l ++) { |
424 | jvmtiDeferredLocalVariable* val = _locals->at(l); |
425 | if (val->index() >= method()->max_locals() + method()->max_stack()) { |
426 | int lock_index = val->index() - (method()->max_locals() + method()->max_stack()); |
427 | MonitorInfo* info = monitors->at(lock_index); |
428 | MonitorInfo* new_info = new MonitorInfo((oopDesc*)val->value().l, info->lock(), info->eliminated(), info->owner_is_scalar_replaced()); |
429 | monitors->at_put(lock_index, new_info); |
430 | } |
431 | } |
432 | } |
433 | |
434 | |
435 | void jvmtiDeferredLocalVariableSet::oops_do(OopClosure* f) { |
436 | // The Method* is on the stack so a live activation keeps it alive |
437 | // either by mirror in interpreter or code in compiled code. |
438 | for (int i = 0; i < _locals->length(); i++) { |
439 | if (_locals->at(i)->type() == T_OBJECT) { |
440 | f->do_oop(_locals->at(i)->oop_addr()); |
441 | } |
442 | } |
443 | } |
444 | |
445 | jvmtiDeferredLocalVariable::jvmtiDeferredLocalVariable(int index, BasicType type, jvalue value) { |
446 | _index = index; |
447 | _type = type; |
448 | _value = value; |
449 | } |
450 | |
451 | |
452 | #ifndef PRODUCT |
453 | void compiledVFrame::verify() const { |
454 | Unimplemented(); |
455 | } |
456 | #endif // PRODUCT |
457 | |