| 1 | /* |
| 2 | * Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved. |
| 3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
| 4 | * |
| 5 | * This code is free software; you can redistribute it and/or modify it |
| 6 | * under the terms of the GNU General Public License version 2 only, as |
| 7 | * published by the Free Software Foundation. |
| 8 | * |
| 9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
| 10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 12 | * version 2 for more details (a copy is included in the LICENSE file that |
| 13 | * accompanied this code). |
| 14 | * |
| 15 | * You should have received a copy of the GNU General Public License version |
| 16 | * 2 along with this work; if not, write to the Free Software Foundation, |
| 17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
| 18 | * |
| 19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
| 20 | * or visit www.oracle.com if you need additional information or have any |
| 21 | * questions. |
| 22 | */ |
| 23 | |
| 24 | #include "precompiled.hpp" |
| 25 | #include "c1/c1_LIR.hpp" |
| 26 | #include "c1/c1_LIRGenerator.hpp" |
| 27 | #include "c1/c1_CodeStubs.hpp" |
| 28 | #include "gc/z/c1/zBarrierSetC1.hpp" |
| 29 | #include "gc/z/zBarrierSet.hpp" |
| 30 | #include "gc/z/zBarrierSetAssembler.hpp" |
| 31 | #include "gc/z/zThreadLocalData.hpp" |
| 32 | #include "utilities/macros.hpp" |
| 33 | |
| 34 | ZLoadBarrierStubC1::ZLoadBarrierStubC1(LIRAccess& access, LIR_Opr ref, address runtime_stub) : |
| 35 | _decorators(access.decorators()), |
| 36 | _ref_addr(access.resolved_addr()), |
| 37 | _ref(ref), |
| 38 | _tmp(LIR_OprFact::illegalOpr), |
| 39 | _runtime_stub(runtime_stub) { |
| 40 | |
| 41 | assert(_ref_addr->is_address(), "Must be an address" ); |
| 42 | assert(_ref->is_register(), "Must be a register" ); |
| 43 | |
| 44 | // Allocate tmp register if needed |
| 45 | if (_ref_addr->as_address_ptr()->index()->is_valid() || |
| 46 | _ref_addr->as_address_ptr()->disp() != 0) { |
| 47 | // Has index or displacement, need tmp register to load address into |
| 48 | _tmp = access.gen()->new_pointer_register(); |
| 49 | } |
| 50 | } |
| 51 | |
| 52 | DecoratorSet ZLoadBarrierStubC1::decorators() const { |
| 53 | return _decorators; |
| 54 | } |
| 55 | |
| 56 | LIR_Opr ZLoadBarrierStubC1::ref() const { |
| 57 | return _ref; |
| 58 | } |
| 59 | |
| 60 | LIR_Opr ZLoadBarrierStubC1::ref_addr() const { |
| 61 | return _ref_addr; |
| 62 | } |
| 63 | |
| 64 | LIR_Opr ZLoadBarrierStubC1::tmp() const { |
| 65 | return _tmp; |
| 66 | } |
| 67 | |
| 68 | address ZLoadBarrierStubC1::runtime_stub() const { |
| 69 | return _runtime_stub; |
| 70 | } |
| 71 | |
| 72 | void ZLoadBarrierStubC1::visit(LIR_OpVisitState* visitor) { |
| 73 | visitor->do_slow_case(); |
| 74 | visitor->do_input(_ref_addr); |
| 75 | visitor->do_output(_ref); |
| 76 | if (_tmp->is_valid()) { |
| 77 | visitor->do_temp(_tmp); |
| 78 | } |
| 79 | } |
| 80 | |
| 81 | void ZLoadBarrierStubC1::emit_code(LIR_Assembler* ce) { |
| 82 | ZBarrierSet::assembler()->generate_c1_load_barrier_stub(ce, this); |
| 83 | } |
| 84 | |
| 85 | #ifndef PRODUCT |
| 86 | void ZLoadBarrierStubC1::print_name(outputStream* out) const { |
| 87 | out->print("ZLoadBarrierStubC1" ); |
| 88 | } |
| 89 | #endif // PRODUCT |
| 90 | |
| 91 | class LIR_OpZLoadBarrierTest : public LIR_Op { |
| 92 | private: |
| 93 | LIR_Opr _opr; |
| 94 | |
| 95 | public: |
| 96 | LIR_OpZLoadBarrierTest(LIR_Opr opr) : |
| 97 | LIR_Op(), |
| 98 | _opr(opr) {} |
| 99 | |
| 100 | virtual void visit(LIR_OpVisitState* state) { |
| 101 | state->do_input(_opr); |
| 102 | } |
| 103 | |
| 104 | virtual void emit_code(LIR_Assembler* ce) { |
| 105 | ZBarrierSet::assembler()->generate_c1_load_barrier_test(ce, _opr); |
| 106 | } |
| 107 | |
| 108 | virtual void print_instr(outputStream* out) const { |
| 109 | _opr->print(out); |
| 110 | out->print(" " ); |
| 111 | } |
| 112 | |
| 113 | #ifndef PRODUCT |
| 114 | virtual const char* name() const { |
| 115 | return "lir_z_load_barrier_test" ; |
| 116 | } |
| 117 | #endif // PRODUCT |
| 118 | }; |
| 119 | |
| 120 | static bool barrier_needed(LIRAccess& access) { |
| 121 | return ZBarrierSet::barrier_needed(access.decorators(), access.type()); |
| 122 | } |
| 123 | |
| 124 | ZBarrierSetC1::ZBarrierSetC1() : |
| 125 | _load_barrier_on_oop_field_preloaded_runtime_stub(NULL), |
| 126 | _load_barrier_on_weak_oop_field_preloaded_runtime_stub(NULL) {} |
| 127 | |
| 128 | address ZBarrierSetC1::load_barrier_on_oop_field_preloaded_runtime_stub(DecoratorSet decorators) const { |
| 129 | assert((decorators & ON_PHANTOM_OOP_REF) == 0, "Unsupported decorator" ); |
| 130 | //assert((decorators & ON_UNKNOWN_OOP_REF) == 0, "Unsupported decorator"); |
| 131 | |
| 132 | if ((decorators & ON_WEAK_OOP_REF) != 0) { |
| 133 | return _load_barrier_on_weak_oop_field_preloaded_runtime_stub; |
| 134 | } else { |
| 135 | return _load_barrier_on_oop_field_preloaded_runtime_stub; |
| 136 | } |
| 137 | } |
| 138 | |
| 139 | #ifdef ASSERT |
| 140 | #define __ access.gen()->lir(__FILE__, __LINE__)-> |
| 141 | #else |
| 142 | #define __ access.gen()->lir()-> |
| 143 | #endif |
| 144 | |
| 145 | void ZBarrierSetC1::load_barrier(LIRAccess& access, LIR_Opr result) const { |
| 146 | // Fast path |
| 147 | __ append(new LIR_OpZLoadBarrierTest(result)); |
| 148 | |
| 149 | // Slow path |
| 150 | const address runtime_stub = load_barrier_on_oop_field_preloaded_runtime_stub(access.decorators()); |
| 151 | CodeStub* const stub = new ZLoadBarrierStubC1(access, result, runtime_stub); |
| 152 | __ branch(lir_cond_notEqual, T_ADDRESS, stub); |
| 153 | __ branch_destination(stub->continuation()); |
| 154 | } |
| 155 | |
| 156 | LIR_Opr ZBarrierSetC1::resolve_address(LIRAccess& access, bool resolve_in_register) { |
| 157 | // We must resolve in register when patching. This is to avoid |
| 158 | // having a patch area in the load barrier stub, since the call |
| 159 | // into the runtime to patch will not have the proper oop map. |
| 160 | const bool patch_before_barrier = barrier_needed(access) && (access.decorators() & C1_NEEDS_PATCHING) != 0; |
| 161 | return BarrierSetC1::resolve_address(access, resolve_in_register || patch_before_barrier); |
| 162 | } |
| 163 | |
| 164 | #undef __ |
| 165 | |
| 166 | void ZBarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) { |
| 167 | BarrierSetC1::load_at_resolved(access, result); |
| 168 | |
| 169 | if (barrier_needed(access)) { |
| 170 | load_barrier(access, result); |
| 171 | } |
| 172 | } |
| 173 | |
| 174 | static void pre_load_barrier(LIRAccess& access) { |
| 175 | DecoratorSet decorators = access.decorators(); |
| 176 | |
| 177 | // Downgrade access to MO_UNORDERED |
| 178 | decorators = (decorators & ~MO_DECORATOR_MASK) | MO_UNORDERED; |
| 179 | |
| 180 | // Remove ACCESS_WRITE |
| 181 | decorators = (decorators & ~ACCESS_WRITE); |
| 182 | |
| 183 | // Generate synthetic load at |
| 184 | access.gen()->access_load_at(decorators, |
| 185 | access.type(), |
| 186 | access.base().item(), |
| 187 | access.offset().opr(), |
| 188 | access.gen()->new_register(access.type()), |
| 189 | NULL /* patch_emit_info */, |
| 190 | NULL /* load_emit_info */); |
| 191 | } |
| 192 | |
| 193 | LIR_Opr ZBarrierSetC1::atomic_xchg_at_resolved(LIRAccess& access, LIRItem& value) { |
| 194 | if (barrier_needed(access)) { |
| 195 | pre_load_barrier(access); |
| 196 | } |
| 197 | |
| 198 | return BarrierSetC1::atomic_xchg_at_resolved(access, value); |
| 199 | } |
| 200 | |
| 201 | LIR_Opr ZBarrierSetC1::atomic_cmpxchg_at_resolved(LIRAccess& access, LIRItem& cmp_value, LIRItem& new_value) { |
| 202 | if (barrier_needed(access)) { |
| 203 | pre_load_barrier(access); |
| 204 | } |
| 205 | |
| 206 | return BarrierSetC1::atomic_cmpxchg_at_resolved(access, cmp_value, new_value); |
| 207 | } |
| 208 | |
| 209 | class ZLoadBarrierRuntimeStubCodeGenClosure : public StubAssemblerCodeGenClosure { |
| 210 | private: |
| 211 | const DecoratorSet _decorators; |
| 212 | |
| 213 | public: |
| 214 | ZLoadBarrierRuntimeStubCodeGenClosure(DecoratorSet decorators) : |
| 215 | _decorators(decorators) {} |
| 216 | |
| 217 | virtual OopMapSet* generate_code(StubAssembler* sasm) { |
| 218 | ZBarrierSet::assembler()->generate_c1_load_barrier_runtime_stub(sasm, _decorators); |
| 219 | return NULL; |
| 220 | } |
| 221 | }; |
| 222 | |
| 223 | static address generate_c1_runtime_stub(BufferBlob* blob, DecoratorSet decorators, const char* name) { |
| 224 | ZLoadBarrierRuntimeStubCodeGenClosure cl(decorators); |
| 225 | CodeBlob* const code_blob = Runtime1::generate_blob(blob, -1 /* stub_id */, name, false /* expect_oop_map*/, &cl); |
| 226 | return code_blob->code_begin(); |
| 227 | } |
| 228 | |
| 229 | void ZBarrierSetC1::generate_c1_runtime_stubs(BufferBlob* blob) { |
| 230 | _load_barrier_on_oop_field_preloaded_runtime_stub = |
| 231 | generate_c1_runtime_stub(blob, ON_STRONG_OOP_REF, "load_barrier_on_oop_field_preloaded_runtime_stub" ); |
| 232 | _load_barrier_on_weak_oop_field_preloaded_runtime_stub = |
| 233 | generate_c1_runtime_stub(blob, ON_WEAK_OOP_REF, "load_barrier_on_weak_oop_field_preloaded_runtime_stub" ); |
| 234 | } |
| 235 | |