| 1 | /* |
| 2 | * Copyright (c) 1998, 2019, Oracle and/or its affiliates. All rights reserved. |
| 3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
| 4 | * |
| 5 | * This code is free software; you can redistribute it and/or modify it |
| 6 | * under the terms of the GNU General Public License version 2 only, as |
| 7 | * published by the Free Software Foundation. |
| 8 | * |
| 9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
| 10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 12 | * version 2 for more details (a copy is included in the LICENSE file that |
| 13 | * accompanied this code). |
| 14 | * |
| 15 | * You should have received a copy of the GNU General Public License version |
| 16 | * 2 along with this work; if not, write to the Free Software Foundation, |
| 17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
| 18 | * |
| 19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
| 20 | * or visit www.oracle.com if you need additional information or have any |
| 21 | * questions. |
| 22 | * |
| 23 | */ |
| 24 | |
| 25 | #include "precompiled.hpp" |
| 26 | #include "interpreter/bytecodes.hpp" |
| 27 | #include "interpreter/interpreter.hpp" |
| 28 | #include "interpreter/rewriter.hpp" |
| 29 | #include "memory/metadataFactory.hpp" |
| 30 | #include "memory/resourceArea.hpp" |
| 31 | #include "oops/constantPool.hpp" |
| 32 | #include "oops/generateOopMap.hpp" |
| 33 | #include "prims/methodHandles.hpp" |
| 34 | #include "runtime/fieldDescriptor.inline.hpp" |
| 35 | #include "runtime/handles.inline.hpp" |
| 36 | |
| 37 | // Computes a CPC map (new_index -> original_index) for constant pool entries |
| 38 | // that are referred to by the interpreter at runtime via the constant pool cache. |
| 39 | // Also computes a CP map (original_index -> new_index). |
| 40 | // Marks entries in CP which require additional processing. |
| 41 | void Rewriter::compute_index_maps() { |
| 42 | const int length = _pool->length(); |
| 43 | init_maps(length); |
| 44 | bool saw_mh_symbol = false; |
| 45 | for (int i = 0; i < length; i++) { |
| 46 | int tag = _pool->tag_at(i).value(); |
| 47 | switch (tag) { |
| 48 | case JVM_CONSTANT_InterfaceMethodref: |
| 49 | case JVM_CONSTANT_Fieldref : // fall through |
| 50 | case JVM_CONSTANT_Methodref : // fall through |
| 51 | add_cp_cache_entry(i); |
| 52 | break; |
| 53 | case JVM_CONSTANT_Dynamic: |
| 54 | assert(_pool->has_dynamic_constant(), "constant pool's _has_dynamic_constant flag not set" ); |
| 55 | add_resolved_references_entry(i); |
| 56 | break; |
| 57 | case JVM_CONSTANT_String : // fall through |
| 58 | case JVM_CONSTANT_MethodHandle : // fall through |
| 59 | case JVM_CONSTANT_MethodType : // fall through |
| 60 | add_resolved_references_entry(i); |
| 61 | break; |
| 62 | case JVM_CONSTANT_Utf8: |
| 63 | if (_pool->symbol_at(i) == vmSymbols::java_lang_invoke_MethodHandle() || |
| 64 | _pool->symbol_at(i) == vmSymbols::java_lang_invoke_VarHandle()) { |
| 65 | saw_mh_symbol = true; |
| 66 | } |
| 67 | break; |
| 68 | } |
| 69 | } |
| 70 | |
| 71 | // Record limits of resolved reference map for constant pool cache indices |
| 72 | record_map_limits(); |
| 73 | |
| 74 | guarantee((int) _cp_cache_map.length() - 1 <= (int) ((u2)-1), |
| 75 | "all cp cache indexes fit in a u2" ); |
| 76 | |
| 77 | if (saw_mh_symbol) { |
| 78 | _method_handle_invokers.at_grow(length, 0); |
| 79 | } |
| 80 | } |
| 81 | |
| 82 | // Unrewrite the bytecodes if an error occurs. |
| 83 | void Rewriter::restore_bytecodes() { |
| 84 | int len = _methods->length(); |
| 85 | bool invokespecial_error = false; |
| 86 | |
| 87 | for (int i = len-1; i >= 0; i--) { |
| 88 | Method* method = _methods->at(i); |
| 89 | scan_method(method, true, &invokespecial_error); |
| 90 | assert(!invokespecial_error, "reversing should not get an invokespecial error" ); |
| 91 | } |
| 92 | } |
| 93 | |
| 94 | // Creates a constant pool cache given a CPC map |
| 95 | void Rewriter::make_constant_pool_cache(TRAPS) { |
| 96 | ClassLoaderData* loader_data = _pool->pool_holder()->class_loader_data(); |
| 97 | ConstantPoolCache* cache = |
| 98 | ConstantPoolCache::allocate(loader_data, _cp_cache_map, |
| 99 | _invokedynamic_cp_cache_map, |
| 100 | _invokedynamic_references_map, CHECK); |
| 101 | |
| 102 | // initialize object cache in constant pool |
| 103 | _pool->set_cache(cache); |
| 104 | cache->set_constant_pool(_pool()); |
| 105 | |
| 106 | // _resolved_references is stored in pool->cache(), so need to be done after |
| 107 | // the above lines. |
| 108 | _pool->initialize_resolved_references(loader_data, _resolved_references_map, |
| 109 | _resolved_reference_limit, |
| 110 | THREAD); |
| 111 | |
| 112 | // Clean up constant pool cache if initialize_resolved_references() failed. |
| 113 | if (HAS_PENDING_EXCEPTION) { |
| 114 | MetadataFactory::free_metadata(loader_data, cache); |
| 115 | _pool->set_cache(NULL); // so the verifier isn't confused |
| 116 | } else { |
| 117 | DEBUG_ONLY( |
| 118 | if (DumpSharedSpaces) { |
| 119 | cache->verify_just_initialized(); |
| 120 | }) |
| 121 | } |
| 122 | } |
| 123 | |
| 124 | |
| 125 | |
| 126 | // The new finalization semantics says that registration of |
| 127 | // finalizable objects must be performed on successful return from the |
| 128 | // Object.<init> constructor. We could implement this trivially if |
| 129 | // <init> were never rewritten but since JVMTI allows this to occur, a |
| 130 | // more complicated solution is required. A special return bytecode |
| 131 | // is used only by Object.<init> to signal the finalization |
| 132 | // registration point. Additionally local 0 must be preserved so it's |
| 133 | // available to pass to the registration function. For simplicity we |
| 134 | // require that local 0 is never overwritten so it's available as an |
| 135 | // argument for registration. |
| 136 | |
| 137 | void Rewriter::rewrite_Object_init(const methodHandle& method, TRAPS) { |
| 138 | RawBytecodeStream bcs(method); |
| 139 | while (!bcs.is_last_bytecode()) { |
| 140 | Bytecodes::Code opcode = bcs.raw_next(); |
| 141 | switch (opcode) { |
| 142 | case Bytecodes::_return: *bcs.bcp() = Bytecodes::_return_register_finalizer; break; |
| 143 | |
| 144 | case Bytecodes::_istore: |
| 145 | case Bytecodes::_lstore: |
| 146 | case Bytecodes::_fstore: |
| 147 | case Bytecodes::_dstore: |
| 148 | case Bytecodes::_astore: |
| 149 | if (bcs.get_index() != 0) continue; |
| 150 | |
| 151 | // fall through |
| 152 | case Bytecodes::_istore_0: |
| 153 | case Bytecodes::_lstore_0: |
| 154 | case Bytecodes::_fstore_0: |
| 155 | case Bytecodes::_dstore_0: |
| 156 | case Bytecodes::_astore_0: |
| 157 | THROW_MSG(vmSymbols::java_lang_IncompatibleClassChangeError(), |
| 158 | "can't overwrite local 0 in Object.<init>" ); |
| 159 | break; |
| 160 | |
| 161 | default: |
| 162 | break; |
| 163 | } |
| 164 | } |
| 165 | } |
| 166 | |
| 167 | |
| 168 | // Rewrite a classfile-order CP index into a native-order CPC index. |
| 169 | void Rewriter::rewrite_member_reference(address bcp, int offset, bool reverse) { |
| 170 | address p = bcp + offset; |
| 171 | if (!reverse) { |
| 172 | int cp_index = Bytes::get_Java_u2(p); |
| 173 | int cache_index = cp_entry_to_cp_cache(cp_index); |
| 174 | Bytes::put_native_u2(p, cache_index); |
| 175 | if (!_method_handle_invokers.is_empty()) |
| 176 | maybe_rewrite_invokehandle(p - 1, cp_index, cache_index, reverse); |
| 177 | } else { |
| 178 | int cache_index = Bytes::get_native_u2(p); |
| 179 | int pool_index = cp_cache_entry_pool_index(cache_index); |
| 180 | Bytes::put_Java_u2(p, pool_index); |
| 181 | if (!_method_handle_invokers.is_empty()) |
| 182 | maybe_rewrite_invokehandle(p - 1, pool_index, cache_index, reverse); |
| 183 | } |
| 184 | } |
| 185 | |
| 186 | // If the constant pool entry for invokespecial is InterfaceMethodref, |
| 187 | // we need to add a separate cpCache entry for its resolution, because it is |
| 188 | // different than the resolution for invokeinterface with InterfaceMethodref. |
| 189 | // These cannot share cpCache entries. |
| 190 | void Rewriter::rewrite_invokespecial(address bcp, int offset, bool reverse, bool* invokespecial_error) { |
| 191 | address p = bcp + offset; |
| 192 | if (!reverse) { |
| 193 | int cp_index = Bytes::get_Java_u2(p); |
| 194 | if (_pool->tag_at(cp_index).is_interface_method()) { |
| 195 | int cache_index = add_invokespecial_cp_cache_entry(cp_index); |
| 196 | if (cache_index != (int)(jushort) cache_index) { |
| 197 | *invokespecial_error = true; |
| 198 | } |
| 199 | Bytes::put_native_u2(p, cache_index); |
| 200 | } else { |
| 201 | rewrite_member_reference(bcp, offset, reverse); |
| 202 | } |
| 203 | } else { |
| 204 | rewrite_member_reference(bcp, offset, reverse); |
| 205 | } |
| 206 | } |
| 207 | |
| 208 | |
| 209 | // Adjust the invocation bytecode for a signature-polymorphic method (MethodHandle.invoke, etc.) |
| 210 | void Rewriter::maybe_rewrite_invokehandle(address opc, int cp_index, int cache_index, bool reverse) { |
| 211 | if (!reverse) { |
| 212 | if ((*opc) == (u1)Bytecodes::_invokevirtual || |
| 213 | // allow invokespecial as an alias, although it would be very odd: |
| 214 | (*opc) == (u1)Bytecodes::_invokespecial) { |
| 215 | assert(_pool->tag_at(cp_index).is_method(), "wrong index" ); |
| 216 | // Determine whether this is a signature-polymorphic method. |
| 217 | if (cp_index >= _method_handle_invokers.length()) return; |
| 218 | int status = _method_handle_invokers.at(cp_index); |
| 219 | assert(status >= -1 && status <= 1, "oob tri-state" ); |
| 220 | if (status == 0) { |
| 221 | if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_MethodHandle() && |
| 222 | MethodHandles::is_signature_polymorphic_name(SystemDictionary::MethodHandle_klass(), |
| 223 | _pool->name_ref_at(cp_index))) { |
| 224 | // we may need a resolved_refs entry for the appendix |
| 225 | add_invokedynamic_resolved_references_entry(cp_index, cache_index); |
| 226 | status = +1; |
| 227 | } else if (_pool->klass_ref_at_noresolve(cp_index) == vmSymbols::java_lang_invoke_VarHandle() && |
| 228 | MethodHandles::is_signature_polymorphic_name(SystemDictionary::VarHandle_klass(), |
| 229 | _pool->name_ref_at(cp_index))) { |
| 230 | // we may need a resolved_refs entry for the appendix |
| 231 | add_invokedynamic_resolved_references_entry(cp_index, cache_index); |
| 232 | status = +1; |
| 233 | } else { |
| 234 | status = -1; |
| 235 | } |
| 236 | _method_handle_invokers.at(cp_index) = status; |
| 237 | } |
| 238 | // We use a special internal bytecode for such methods (if non-static). |
| 239 | // The basic reason for this is that such methods need an extra "appendix" argument |
| 240 | // to transmit the call site's intended call type. |
| 241 | if (status > 0) { |
| 242 | (*opc) = (u1)Bytecodes::_invokehandle; |
| 243 | } |
| 244 | } |
| 245 | } else { |
| 246 | // Do not need to look at cp_index. |
| 247 | if ((*opc) == (u1)Bytecodes::_invokehandle) { |
| 248 | (*opc) = (u1)Bytecodes::_invokevirtual; |
| 249 | // Ignore corner case of original _invokespecial instruction. |
| 250 | // This is safe because (a) the signature polymorphic method was final, and |
| 251 | // (b) the implementation of MethodHandle will not call invokespecial on it. |
| 252 | } |
| 253 | } |
| 254 | } |
| 255 | |
| 256 | |
| 257 | void Rewriter::rewrite_invokedynamic(address bcp, int offset, bool reverse) { |
| 258 | address p = bcp + offset; |
| 259 | assert(p[-1] == Bytecodes::_invokedynamic, "not invokedynamic bytecode" ); |
| 260 | if (!reverse) { |
| 261 | int cp_index = Bytes::get_Java_u2(p); |
| 262 | int cache_index = add_invokedynamic_cp_cache_entry(cp_index); |
| 263 | int resolved_index = add_invokedynamic_resolved_references_entry(cp_index, cache_index); |
| 264 | // Replace the trailing four bytes with a CPC index for the dynamic |
| 265 | // call site. Unlike other CPC entries, there is one per bytecode, |
| 266 | // not just one per distinct CP entry. In other words, the |
| 267 | // CPC-to-CP relation is many-to-one for invokedynamic entries. |
| 268 | // This means we must use a larger index size than u2 to address |
| 269 | // all these entries. That is the main reason invokedynamic |
| 270 | // must have a five-byte instruction format. (Of course, other JVM |
| 271 | // implementations can use the bytes for other purposes.) |
| 272 | // Note: We use native_u4 format exclusively for 4-byte indexes. |
| 273 | Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index)); |
| 274 | // add the bcp in case we need to patch this bytecode if we also find a |
| 275 | // invokespecial/InterfaceMethodref in the bytecode stream |
| 276 | _patch_invokedynamic_bcps->push(p); |
| 277 | _patch_invokedynamic_refs->push(resolved_index); |
| 278 | } else { |
| 279 | int cache_index = ConstantPool::decode_invokedynamic_index( |
| 280 | Bytes::get_native_u4(p)); |
| 281 | // We will reverse the bytecode rewriting _after_ adjusting them. |
| 282 | // Adjust the cache index by offset to the invokedynamic entries in the |
| 283 | // cpCache plus the delta if the invokedynamic bytecodes were adjusted. |
| 284 | int adjustment = cp_cache_delta() + _first_iteration_cp_cache_limit; |
| 285 | int cp_index = invokedynamic_cp_cache_entry_pool_index(cache_index - adjustment); |
| 286 | assert(_pool->tag_at(cp_index).is_invoke_dynamic(), "wrong index" ); |
| 287 | // zero out 4 bytes |
| 288 | Bytes::put_Java_u4(p, 0); |
| 289 | Bytes::put_Java_u2(p, cp_index); |
| 290 | } |
| 291 | } |
| 292 | |
| 293 | void Rewriter::patch_invokedynamic_bytecodes() { |
| 294 | // If the end of the cp_cache is the same as after initializing with the |
| 295 | // cpool, nothing needs to be done. Invokedynamic bytecodes are at the |
| 296 | // correct offsets. ie. no invokespecials added |
| 297 | int delta = cp_cache_delta(); |
| 298 | if (delta > 0) { |
| 299 | int length = _patch_invokedynamic_bcps->length(); |
| 300 | assert(length == _patch_invokedynamic_refs->length(), |
| 301 | "lengths should match" ); |
| 302 | for (int i = 0; i < length; i++) { |
| 303 | address p = _patch_invokedynamic_bcps->at(i); |
| 304 | int cache_index = ConstantPool::decode_invokedynamic_index( |
| 305 | Bytes::get_native_u4(p)); |
| 306 | Bytes::put_native_u4(p, ConstantPool::encode_invokedynamic_index(cache_index + delta)); |
| 307 | |
| 308 | // invokedynamic resolved references map also points to cp cache and must |
| 309 | // add delta to each. |
| 310 | int resolved_index = _patch_invokedynamic_refs->at(i); |
| 311 | assert(_invokedynamic_references_map.at(resolved_index) == cache_index, |
| 312 | "should be the same index" ); |
| 313 | _invokedynamic_references_map.at_put(resolved_index, cache_index + delta); |
| 314 | } |
| 315 | } |
| 316 | } |
| 317 | |
| 318 | |
| 319 | // Rewrite some ldc bytecodes to _fast_aldc |
| 320 | void Rewriter::maybe_rewrite_ldc(address bcp, int offset, bool is_wide, |
| 321 | bool reverse) { |
| 322 | if (!reverse) { |
| 323 | assert((*bcp) == (is_wide ? Bytecodes::_ldc_w : Bytecodes::_ldc), "not ldc bytecode" ); |
| 324 | address p = bcp + offset; |
| 325 | int cp_index = is_wide ? Bytes::get_Java_u2(p) : (u1)(*p); |
| 326 | constantTag tag = _pool->tag_at(cp_index).value(); |
| 327 | |
| 328 | if (tag.is_method_handle() || |
| 329 | tag.is_method_type() || |
| 330 | tag.is_string() || |
| 331 | (tag.is_dynamic_constant() && |
| 332 | // keep regular ldc interpreter logic for condy primitives |
| 333 | is_reference_type(FieldType::basic_type(_pool->uncached_signature_ref_at(cp_index)))) |
| 334 | ) { |
| 335 | int ref_index = cp_entry_to_resolved_references(cp_index); |
| 336 | if (is_wide) { |
| 337 | (*bcp) = Bytecodes::_fast_aldc_w; |
| 338 | assert(ref_index == (u2)ref_index, "index overflow" ); |
| 339 | Bytes::put_native_u2(p, ref_index); |
| 340 | } else { |
| 341 | (*bcp) = Bytecodes::_fast_aldc; |
| 342 | assert(ref_index == (u1)ref_index, "index overflow" ); |
| 343 | (*p) = (u1)ref_index; |
| 344 | } |
| 345 | } |
| 346 | } else { |
| 347 | Bytecodes::Code rewritten_bc = |
| 348 | (is_wide ? Bytecodes::_fast_aldc_w : Bytecodes::_fast_aldc); |
| 349 | if ((*bcp) == rewritten_bc) { |
| 350 | address p = bcp + offset; |
| 351 | int ref_index = is_wide ? Bytes::get_native_u2(p) : (u1)(*p); |
| 352 | int pool_index = resolved_references_entry_to_pool_index(ref_index); |
| 353 | if (is_wide) { |
| 354 | (*bcp) = Bytecodes::_ldc_w; |
| 355 | assert(pool_index == (u2)pool_index, "index overflow" ); |
| 356 | Bytes::put_Java_u2(p, pool_index); |
| 357 | } else { |
| 358 | (*bcp) = Bytecodes::_ldc; |
| 359 | assert(pool_index == (u1)pool_index, "index overflow" ); |
| 360 | (*p) = (u1)pool_index; |
| 361 | } |
| 362 | } |
| 363 | } |
| 364 | } |
| 365 | |
| 366 | |
| 367 | // Rewrites a method given the index_map information |
| 368 | void Rewriter::scan_method(Method* method, bool reverse, bool* invokespecial_error) { |
| 369 | |
| 370 | int nof_jsrs = 0; |
| 371 | bool has_monitor_bytecodes = false; |
| 372 | Bytecodes::Code c; |
| 373 | |
| 374 | // Bytecodes and their length |
| 375 | const address code_base = method->code_base(); |
| 376 | const int code_length = method->code_size(); |
| 377 | |
| 378 | int bc_length; |
| 379 | for (int bci = 0; bci < code_length; bci += bc_length) { |
| 380 | address bcp = code_base + bci; |
| 381 | int prefix_length = 0; |
| 382 | c = (Bytecodes::Code)(*bcp); |
| 383 | |
| 384 | // Since we have the code, see if we can get the length |
| 385 | // directly. Some more complicated bytecodes will report |
| 386 | // a length of zero, meaning we need to make another method |
| 387 | // call to calculate the length. |
| 388 | bc_length = Bytecodes::length_for(c); |
| 389 | if (bc_length == 0) { |
| 390 | bc_length = Bytecodes::length_at(method, bcp); |
| 391 | |
| 392 | // length_at will put us at the bytecode after the one modified |
| 393 | // by 'wide'. We don't currently examine any of the bytecodes |
| 394 | // modified by wide, but in case we do in the future... |
| 395 | if (c == Bytecodes::_wide) { |
| 396 | prefix_length = 1; |
| 397 | c = (Bytecodes::Code)bcp[1]; |
| 398 | } |
| 399 | } |
| 400 | |
| 401 | // Continuing with an invalid bytecode will fail in the loop below. |
| 402 | // So guarantee here. |
| 403 | guarantee(bc_length > 0, "Verifier should have caught this invalid bytecode" ); |
| 404 | |
| 405 | switch (c) { |
| 406 | case Bytecodes::_lookupswitch : { |
| 407 | #ifndef CC_INTERP |
| 408 | Bytecode_lookupswitch bc(method, bcp); |
| 409 | (*bcp) = ( |
| 410 | bc.number_of_pairs() < BinarySwitchThreshold |
| 411 | ? Bytecodes::_fast_linearswitch |
| 412 | : Bytecodes::_fast_binaryswitch |
| 413 | ); |
| 414 | #endif |
| 415 | break; |
| 416 | } |
| 417 | case Bytecodes::_fast_linearswitch: |
| 418 | case Bytecodes::_fast_binaryswitch: { |
| 419 | #ifndef CC_INTERP |
| 420 | (*bcp) = Bytecodes::_lookupswitch; |
| 421 | #endif |
| 422 | break; |
| 423 | } |
| 424 | |
| 425 | case Bytecodes::_invokespecial : { |
| 426 | rewrite_invokespecial(bcp, prefix_length+1, reverse, invokespecial_error); |
| 427 | break; |
| 428 | } |
| 429 | |
| 430 | case Bytecodes::_putstatic : |
| 431 | case Bytecodes::_putfield : { |
| 432 | if (!reverse) { |
| 433 | // Check if any final field of the class given as parameter is modified |
| 434 | // outside of initializer methods of the class. Fields that are modified |
| 435 | // are marked with a flag. For marked fields, the compilers do not perform |
| 436 | // constant folding (as the field can be changed after initialization). |
| 437 | // |
| 438 | // The check is performed after verification and only if verification has |
| 439 | // succeeded. Therefore, the class is guaranteed to be well-formed. |
| 440 | InstanceKlass* klass = method->method_holder(); |
| 441 | u2 bc_index = Bytes::get_Java_u2(bcp + prefix_length + 1); |
| 442 | constantPoolHandle cp(method->constants()); |
| 443 | Symbol* ref_class_name = cp->klass_name_at(cp->klass_ref_index_at(bc_index)); |
| 444 | |
| 445 | if (klass->name() == ref_class_name) { |
| 446 | Symbol* field_name = cp->name_ref_at(bc_index); |
| 447 | Symbol* field_sig = cp->signature_ref_at(bc_index); |
| 448 | |
| 449 | fieldDescriptor fd; |
| 450 | if (klass->find_field(field_name, field_sig, &fd) != NULL) { |
| 451 | if (fd.access_flags().is_final()) { |
| 452 | if (fd.access_flags().is_static()) { |
| 453 | if (!method->is_static_initializer()) { |
| 454 | fd.set_has_initialized_final_update(true); |
| 455 | } |
| 456 | } else { |
| 457 | if (!method->is_object_initializer()) { |
| 458 | fd.set_has_initialized_final_update(true); |
| 459 | } |
| 460 | } |
| 461 | } |
| 462 | } |
| 463 | } |
| 464 | } |
| 465 | } |
| 466 | // fall through |
| 467 | case Bytecodes::_getstatic : // fall through |
| 468 | case Bytecodes::_getfield : // fall through |
| 469 | case Bytecodes::_invokevirtual : // fall through |
| 470 | case Bytecodes::_invokestatic : |
| 471 | case Bytecodes::_invokeinterface: |
| 472 | case Bytecodes::_invokehandle : // if reverse=true |
| 473 | rewrite_member_reference(bcp, prefix_length+1, reverse); |
| 474 | break; |
| 475 | case Bytecodes::_invokedynamic: |
| 476 | rewrite_invokedynamic(bcp, prefix_length+1, reverse); |
| 477 | break; |
| 478 | case Bytecodes::_ldc: |
| 479 | case Bytecodes::_fast_aldc: // if reverse=true |
| 480 | maybe_rewrite_ldc(bcp, prefix_length+1, false, reverse); |
| 481 | break; |
| 482 | case Bytecodes::_ldc_w: |
| 483 | case Bytecodes::_fast_aldc_w: // if reverse=true |
| 484 | maybe_rewrite_ldc(bcp, prefix_length+1, true, reverse); |
| 485 | break; |
| 486 | case Bytecodes::_jsr : // fall through |
| 487 | case Bytecodes::_jsr_w : nof_jsrs++; break; |
| 488 | case Bytecodes::_monitorenter : // fall through |
| 489 | case Bytecodes::_monitorexit : has_monitor_bytecodes = true; break; |
| 490 | |
| 491 | default: break; |
| 492 | } |
| 493 | } |
| 494 | |
| 495 | // Update access flags |
| 496 | if (has_monitor_bytecodes) { |
| 497 | method->set_has_monitor_bytecodes(); |
| 498 | } |
| 499 | |
| 500 | // The present of a jsr bytecode implies that the method might potentially |
| 501 | // have to be rewritten, so we run the oopMapGenerator on the method |
| 502 | if (nof_jsrs > 0) { |
| 503 | method->set_has_jsrs(); |
| 504 | // Second pass will revisit this method. |
| 505 | assert(method->has_jsrs(), "didn't we just set this?" ); |
| 506 | } |
| 507 | } |
| 508 | |
| 509 | // After constant pool is created, revisit methods containing jsrs. |
| 510 | methodHandle Rewriter::rewrite_jsrs(const methodHandle& method, TRAPS) { |
| 511 | ResourceMark rm(THREAD); |
| 512 | ResolveOopMapConflicts romc(method); |
| 513 | methodHandle new_method = romc.do_potential_rewrite(CHECK_(methodHandle())); |
| 514 | // Update monitor matching info. |
| 515 | if (romc.monitor_safe()) { |
| 516 | new_method->set_guaranteed_monitor_matching(); |
| 517 | } |
| 518 | |
| 519 | return new_method; |
| 520 | } |
| 521 | |
| 522 | void Rewriter::rewrite_bytecodes(TRAPS) { |
| 523 | assert(_pool->cache() == NULL, "constant pool cache must not be set yet" ); |
| 524 | |
| 525 | // determine index maps for Method* rewriting |
| 526 | compute_index_maps(); |
| 527 | |
| 528 | if (RegisterFinalizersAtInit && _klass->name() == vmSymbols::java_lang_Object()) { |
| 529 | bool did_rewrite = false; |
| 530 | int i = _methods->length(); |
| 531 | while (i-- > 0) { |
| 532 | Method* method = _methods->at(i); |
| 533 | if (method->intrinsic_id() == vmIntrinsics::_Object_init) { |
| 534 | // rewrite the return bytecodes of Object.<init> to register the |
| 535 | // object for finalization if needed. |
| 536 | methodHandle m(THREAD, method); |
| 537 | rewrite_Object_init(m, CHECK); |
| 538 | did_rewrite = true; |
| 539 | break; |
| 540 | } |
| 541 | } |
| 542 | assert(did_rewrite, "must find Object::<init> to rewrite it" ); |
| 543 | } |
| 544 | |
| 545 | // rewrite methods, in two passes |
| 546 | int len = _methods->length(); |
| 547 | bool invokespecial_error = false; |
| 548 | |
| 549 | for (int i = len-1; i >= 0; i--) { |
| 550 | Method* method = _methods->at(i); |
| 551 | scan_method(method, false, &invokespecial_error); |
| 552 | if (invokespecial_error) { |
| 553 | // If you get an error here, there is no reversing bytecodes |
| 554 | // This exception is stored for this class and no further attempt is |
| 555 | // made at verifying or rewriting. |
| 556 | THROW_MSG(vmSymbols::java_lang_InternalError(), |
| 557 | "This classfile overflows invokespecial for interfaces " |
| 558 | "and cannot be loaded" ); |
| 559 | return; |
| 560 | } |
| 561 | } |
| 562 | |
| 563 | // May have to fix invokedynamic bytecodes if invokestatic/InterfaceMethodref |
| 564 | // entries had to be added. |
| 565 | patch_invokedynamic_bytecodes(); |
| 566 | } |
| 567 | |
| 568 | void Rewriter::rewrite(InstanceKlass* klass, TRAPS) { |
| 569 | if (!DumpSharedSpaces) { |
| 570 | assert(!klass->is_shared(), "archive methods must not be rewritten at run time" ); |
| 571 | } |
| 572 | ResourceMark rm(THREAD); |
| 573 | Rewriter rw(klass, klass->constants(), klass->methods(), CHECK); |
| 574 | // (That's all, folks.) |
| 575 | } |
| 576 | |
| 577 | Rewriter::Rewriter(InstanceKlass* klass, const constantPoolHandle& cpool, Array<Method*>* methods, TRAPS) |
| 578 | : _klass(klass), |
| 579 | _pool(cpool), |
| 580 | _methods(methods), |
| 581 | _cp_map(cpool->length()), |
| 582 | _cp_cache_map(cpool->length() / 2), |
| 583 | _reference_map(cpool->length()), |
| 584 | _resolved_references_map(cpool->length() / 2), |
| 585 | _invokedynamic_references_map(cpool->length() / 2), |
| 586 | _method_handle_invokers(cpool->length()), |
| 587 | _invokedynamic_cp_cache_map(cpool->length() / 4) |
| 588 | { |
| 589 | |
| 590 | // Rewrite bytecodes - exception here exits. |
| 591 | rewrite_bytecodes(CHECK); |
| 592 | |
| 593 | // Stress restoring bytecodes |
| 594 | if (StressRewriter) { |
| 595 | restore_bytecodes(); |
| 596 | rewrite_bytecodes(CHECK); |
| 597 | } |
| 598 | |
| 599 | // allocate constant pool cache, now that we've seen all the bytecodes |
| 600 | make_constant_pool_cache(THREAD); |
| 601 | |
| 602 | // Restore bytecodes to their unrewritten state if there are exceptions |
| 603 | // rewriting bytecodes or allocating the cpCache |
| 604 | if (HAS_PENDING_EXCEPTION) { |
| 605 | restore_bytecodes(); |
| 606 | return; |
| 607 | } |
| 608 | |
| 609 | // Relocate after everything, but still do this under the is_rewritten flag, |
| 610 | // so methods with jsrs in custom class lists in aren't attempted to be |
| 611 | // rewritten in the RO section of the shared archive. |
| 612 | // Relocated bytecodes don't have to be restored, only the cp cache entries |
| 613 | int len = _methods->length(); |
| 614 | for (int i = len-1; i >= 0; i--) { |
| 615 | methodHandle m(THREAD, _methods->at(i)); |
| 616 | |
| 617 | if (m->has_jsrs()) { |
| 618 | m = rewrite_jsrs(m, THREAD); |
| 619 | // Restore bytecodes to their unrewritten state if there are exceptions |
| 620 | // relocating bytecodes. If some are relocated, that is ok because that |
| 621 | // doesn't affect constant pool to cpCache rewriting. |
| 622 | if (HAS_PENDING_EXCEPTION) { |
| 623 | restore_bytecodes(); |
| 624 | return; |
| 625 | } |
| 626 | // Method might have gotten rewritten. |
| 627 | methods->at_put(i, m()); |
| 628 | } |
| 629 | } |
| 630 | } |
| 631 | |