| 1 | /* |
| 2 | * Copyright (c) 1997, 2018, Oracle and/or its affiliates. All rights reserved. |
| 3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
| 4 | * |
| 5 | * This code is free software; you can redistribute it and/or modify it |
| 6 | * under the terms of the GNU General Public License version 2 only, as |
| 7 | * published by the Free Software Foundation. |
| 8 | * |
| 9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
| 10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 12 | * version 2 for more details (a copy is included in the LICENSE file that |
| 13 | * accompanied this code). |
| 14 | * |
| 15 | * You should have received a copy of the GNU General Public License version |
| 16 | * 2 along with this work; if not, write to the Free Software Foundation, |
| 17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
| 18 | * |
| 19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
| 20 | * or visit www.oracle.com if you need additional information or have any |
| 21 | * questions. |
| 22 | * |
| 23 | */ |
| 24 | |
| 25 | #include "precompiled.hpp" |
| 26 | #include "asm/macroAssembler.inline.hpp" |
| 27 | #include "code/codeCache.hpp" |
| 28 | #include "code/compiledIC.hpp" |
| 29 | #include "code/icBuffer.hpp" |
| 30 | #include "code/nmethod.hpp" |
| 31 | #include "memory/resourceArea.hpp" |
| 32 | #include "runtime/mutexLocker.hpp" |
| 33 | #include "runtime/safepoint.hpp" |
| 34 | |
| 35 | // ---------------------------------------------------------------------------- |
| 36 | |
| 37 | #define __ _masm. |
| 38 | address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark) { |
| 39 | // Stub is fixed up when the corresponding call is converted from |
| 40 | // calling compiled code to calling interpreted code. |
| 41 | // movq rbx, 0 |
| 42 | // jmp -5 # to self |
| 43 | |
| 44 | if (mark == NULL) { |
| 45 | mark = cbuf.insts_mark(); // Get mark within main instrs section. |
| 46 | } |
| 47 | |
| 48 | // Note that the code buffer's insts_mark is always relative to insts. |
| 49 | // That's why we must use the macroassembler to generate a stub. |
| 50 | MacroAssembler _masm(&cbuf); |
| 51 | |
| 52 | address base = __ start_a_stub(to_interp_stub_size()); |
| 53 | if (base == NULL) { |
| 54 | return NULL; // CodeBuffer::expand failed. |
| 55 | } |
| 56 | // Static stub relocation stores the instruction address of the call. |
| 57 | __ relocate(static_stub_Relocation::spec(mark, false), Assembler::imm_operand); |
| 58 | // Static stub relocation also tags the Method* in the code-stream. |
| 59 | __ mov_metadata(rbx, (Metadata*) NULL); // Method is zapped till fixup time. |
| 60 | // This is recognized as unresolved by relocs/nativeinst/ic code. |
| 61 | __ jump(RuntimeAddress(__ pc())); |
| 62 | |
| 63 | assert(__ pc() - base <= to_interp_stub_size(), "wrong stub size" ); |
| 64 | |
| 65 | // Update current stubs pointer and restore insts_end. |
| 66 | __ end_a_stub(); |
| 67 | return base; |
| 68 | } |
| 69 | #undef __ |
| 70 | |
| 71 | int CompiledStaticCall::to_interp_stub_size() { |
| 72 | return NOT_LP64(10) // movl; jmp |
| 73 | LP64_ONLY(15); // movq (1+1+8); jmp (1+4) |
| 74 | } |
| 75 | |
| 76 | int CompiledStaticCall::to_trampoline_stub_size() { |
| 77 | // x86 doesn't use trampolines. |
| 78 | return 0; |
| 79 | } |
| 80 | |
| 81 | // Relocation entries for call stub, compiled java to interpreter. |
| 82 | int CompiledStaticCall::reloc_to_interp_stub() { |
| 83 | return 4; // 3 in emit_to_interp_stub + 1 in emit_call |
| 84 | } |
| 85 | |
| 86 | #if INCLUDE_AOT |
| 87 | #define __ _masm. |
| 88 | void CompiledStaticCall::emit_to_aot_stub(CodeBuffer &cbuf, address mark) { |
| 89 | if (!UseAOT) { |
| 90 | return; |
| 91 | } |
| 92 | // Stub is fixed up when the corresponding call is converted from |
| 93 | // calling compiled code to calling aot code. |
| 94 | // movq rax, imm64_aot_code_address |
| 95 | // jmp rax |
| 96 | |
| 97 | if (mark == NULL) { |
| 98 | mark = cbuf.insts_mark(); // Get mark within main instrs section. |
| 99 | } |
| 100 | |
| 101 | // Note that the code buffer's insts_mark is always relative to insts. |
| 102 | // That's why we must use the macroassembler to generate a stub. |
| 103 | MacroAssembler _masm(&cbuf); |
| 104 | |
| 105 | address base = |
| 106 | __ start_a_stub(to_aot_stub_size()); |
| 107 | guarantee(base != NULL, "out of space" ); |
| 108 | |
| 109 | // Static stub relocation stores the instruction address of the call. |
| 110 | __ relocate(static_stub_Relocation::spec(mark, true /* is_aot */), Assembler::imm_operand); |
| 111 | // Load destination AOT code address. |
| 112 | #ifdef _LP64 |
| 113 | __ mov64(rax, CONST64(0)); // address is zapped till fixup time. |
| 114 | #else |
| 115 | __ movl(rax, 0); // address is zapped till fixup time. |
| 116 | #endif |
| 117 | // This is recognized as unresolved by relocs/nativeinst/ic code. |
| 118 | __ jmp(rax); |
| 119 | |
| 120 | assert(__ pc() - base <= to_aot_stub_size(), "wrong stub size" ); |
| 121 | |
| 122 | // Update current stubs pointer and restore insts_end. |
| 123 | __ end_a_stub(); |
| 124 | } |
| 125 | #undef __ |
| 126 | |
| 127 | int CompiledStaticCall::to_aot_stub_size() { |
| 128 | if (UseAOT) { |
| 129 | return NOT_LP64(7) // movl; jmp |
| 130 | LP64_ONLY(12); // movq (1+1+8); jmp (2) |
| 131 | } else { |
| 132 | return 0; |
| 133 | } |
| 134 | } |
| 135 | |
| 136 | // Relocation entries for call stub, compiled java to aot. |
| 137 | int CompiledStaticCall::reloc_to_aot_stub() { |
| 138 | if (UseAOT) { |
| 139 | return 2; // 1 in emit_to_aot_stub + 1 in emit_call |
| 140 | } else { |
| 141 | return 0; |
| 142 | } |
| 143 | } |
| 144 | #endif // INCLUDE_AOT |
| 145 | |
| 146 | void CompiledDirectStaticCall::set_to_interpreted(const methodHandle& callee, address entry) { |
| 147 | address stub = find_stub(false /* is_aot */); |
| 148 | guarantee(stub != NULL, "stub not found" ); |
| 149 | |
| 150 | if (TraceICs) { |
| 151 | ResourceMark rm; |
| 152 | tty->print_cr("CompiledDirectStaticCall@" INTPTR_FORMAT ": set_to_interpreted %s" , |
| 153 | p2i(instruction_address()), |
| 154 | callee->name_and_sig_as_C_string()); |
| 155 | } |
| 156 | |
| 157 | // Creation also verifies the object. |
| 158 | NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); |
| 159 | NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); |
| 160 | |
| 161 | #ifdef ASSERT |
| 162 | Method* old_method = reinterpret_cast<Method*>(method_holder->data()); |
| 163 | address destination = jump->jump_destination(); |
| 164 | assert(old_method == NULL || old_method == callee() || |
| 165 | !old_method->method_holder()->is_loader_alive(), |
| 166 | "a) MT-unsafe modification of inline cache" ); |
| 167 | assert(destination == (address)-1 || destination == entry, |
| 168 | "b) MT-unsafe modification of inline cache" ); |
| 169 | #endif |
| 170 | |
| 171 | // Update stub. |
| 172 | method_holder->set_data((intptr_t)callee()); |
| 173 | jump->set_jump_destination(entry); |
| 174 | |
| 175 | // Update jump to call. |
| 176 | set_destination_mt_safe(stub); |
| 177 | } |
| 178 | |
| 179 | void CompiledDirectStaticCall::set_stub_to_clean(static_stub_Relocation* static_stub) { |
| 180 | assert(CompiledICLocker::is_safe(static_stub->addr()), "mt unsafe call" ); |
| 181 | // Reset stub. |
| 182 | address stub = static_stub->addr(); |
| 183 | assert(stub != NULL, "stub not found" ); |
| 184 | // Creation also verifies the object. |
| 185 | NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); |
| 186 | method_holder->set_data(0); |
| 187 | if (!static_stub->is_aot()) { |
| 188 | NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); |
| 189 | jump->set_jump_destination((address)-1); |
| 190 | } |
| 191 | } |
| 192 | |
| 193 | |
| 194 | //----------------------------------------------------------------------------- |
| 195 | // Non-product mode code |
| 196 | #ifndef PRODUCT |
| 197 | |
| 198 | void CompiledDirectStaticCall::verify() { |
| 199 | // Verify call. |
| 200 | _call->verify(); |
| 201 | _call->verify_alignment(); |
| 202 | |
| 203 | #ifdef ASSERT |
| 204 | CodeBlob *cb = CodeCache::find_blob_unsafe((address) _call); |
| 205 | assert(cb && !cb->is_aot(), "CompiledDirectStaticCall cannot be used on AOTCompiledMethod" ); |
| 206 | #endif |
| 207 | |
| 208 | // Verify stub. |
| 209 | address stub = find_stub(false /* is_aot */); |
| 210 | assert(stub != NULL, "no stub found for static call" ); |
| 211 | // Creation also verifies the object. |
| 212 | NativeMovConstReg* method_holder = nativeMovConstReg_at(stub); |
| 213 | NativeJump* jump = nativeJump_at(method_holder->next_instruction_address()); |
| 214 | |
| 215 | // Verify state. |
| 216 | assert(is_clean() || is_call_to_compiled() || is_call_to_interpreted(), "sanity check" ); |
| 217 | } |
| 218 | #endif // !PRODUCT |
| 219 | |