1 | /* |
2 | * Copyright (c) 2015, 2019, Oracle and/or its affiliates. All rights reserved. |
3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 | * |
5 | * This code is free software; you can redistribute it and/or modify it |
6 | * under the terms of the GNU General Public License version 2 only, as |
7 | * published by the Free Software Foundation. |
8 | * |
9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
12 | * version 2 for more details (a copy is included in the LICENSE file that |
13 | * accompanied this code). |
14 | * |
15 | * You should have received a copy of the GNU General Public License version |
16 | * 2 along with this work; if not, write to the Free Software Foundation, |
17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
18 | * |
19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
20 | * or visit www.oracle.com if you need additional information or have any |
21 | * questions. |
22 | */ |
23 | |
24 | #ifndef SHARE_GC_Z_C2_ZBARRIERSETC2_HPP |
25 | #define SHARE_GC_Z_C2_ZBARRIERSETC2_HPP |
26 | |
27 | #include "gc/shared/c2/barrierSetC2.hpp" |
28 | #include "memory/allocation.hpp" |
29 | #include "opto/node.hpp" |
30 | #include "utilities/growableArray.hpp" |
31 | |
32 | class ZCompareAndSwapPNode : public CompareAndSwapPNode { |
33 | public: |
34 | ZCompareAndSwapPNode(Node* c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : CompareAndSwapPNode(c, mem, adr, val, ex, mem_ord) { } |
35 | virtual int Opcode() const; |
36 | }; |
37 | |
38 | class ZWeakCompareAndSwapPNode : public WeakCompareAndSwapPNode { |
39 | public: |
40 | ZWeakCompareAndSwapPNode(Node* c, Node *mem, Node *adr, Node *val, Node *ex, MemNode::MemOrd mem_ord) : WeakCompareAndSwapPNode(c, mem, adr, val, ex, mem_ord) { } |
41 | virtual int Opcode() const; |
42 | }; |
43 | |
44 | class ZCompareAndExchangePNode : public CompareAndExchangePNode { |
45 | public: |
46 | ZCompareAndExchangePNode(Node* c, Node *mem, Node *adr, Node *val, Node *ex, const TypePtr* at, const Type* t, MemNode::MemOrd mem_ord) : CompareAndExchangePNode(c, mem, adr, val, ex, at, t, mem_ord) { } |
47 | virtual int Opcode() const; |
48 | }; |
49 | |
50 | class ZGetAndSetPNode : public GetAndSetPNode { |
51 | public: |
52 | ZGetAndSetPNode(Node* c, Node *mem, Node *adr, Node *val, const TypePtr* at, const Type* t) : GetAndSetPNode(c, mem, adr, val, at, t) { } |
53 | virtual int Opcode() const; |
54 | }; |
55 | |
56 | class LoadBarrierNode : public MultiNode { |
57 | private: |
58 | bool _weak; // On strong or weak oop reference |
59 | static bool is_dominator(PhaseIdealLoop* phase, bool linear_only, Node *d, Node *n); |
60 | void push_dominated_barriers(PhaseIterGVN* igvn) const; |
61 | |
62 | public: |
63 | enum { |
64 | Control, |
65 | Memory, |
66 | Oop, |
67 | Address, |
68 | Number_of_Outputs = Address, |
69 | Similar, |
70 | Number_of_Inputs |
71 | }; |
72 | |
73 | LoadBarrierNode(Compile* C, |
74 | Node* c, |
75 | Node* mem, |
76 | Node* val, |
77 | Node* adr, |
78 | bool weak); |
79 | |
80 | virtual int Opcode() const; |
81 | virtual uint size_of() const; |
82 | virtual bool cmp(const Node& n) const; |
83 | virtual const Type *bottom_type() const; |
84 | virtual const TypePtr* adr_type() const; |
85 | virtual const Type *Value(PhaseGVN *phase) const; |
86 | virtual Node *Identity(PhaseGVN *phase); |
87 | virtual Node *Ideal(PhaseGVN *phase, bool can_reshape); |
88 | virtual uint match_edge(uint idx) const; |
89 | |
90 | LoadBarrierNode* has_dominating_barrier(PhaseIdealLoop* phase, |
91 | bool linear_only, |
92 | bool look_for_similar); |
93 | |
94 | void fix_similar_in_uses(PhaseIterGVN* igvn); |
95 | |
96 | bool has_true_uses() const; |
97 | |
98 | bool can_be_eliminated() const { |
99 | return !in(Similar)->is_top(); |
100 | } |
101 | |
102 | bool is_weak() const { |
103 | return _weak; |
104 | } |
105 | }; |
106 | |
107 | class LoadBarrierSlowRegNode : public TypeNode { |
108 | private: |
109 | bool _is_weak; |
110 | public: |
111 | LoadBarrierSlowRegNode(Node *c, |
112 | Node *adr, |
113 | Node *src, |
114 | const TypePtr* t, |
115 | bool weak) : |
116 | TypeNode(t, 3), _is_weak(weak) { |
117 | init_req(1, adr); |
118 | init_req(2, src); |
119 | init_class_id(Class_LoadBarrierSlowReg); |
120 | } |
121 | |
122 | virtual uint size_of() const { |
123 | return sizeof(*this); |
124 | } |
125 | |
126 | virtual const char * name() { |
127 | return "LoadBarrierSlowRegNode" ; |
128 | } |
129 | |
130 | virtual Node *Ideal(PhaseGVN *phase, bool can_reshape) { |
131 | return NULL; |
132 | } |
133 | |
134 | virtual int Opcode() const; |
135 | |
136 | bool is_weak() { return _is_weak; } |
137 | }; |
138 | |
139 | class ZBarrierSetC2State : public ResourceObj { |
140 | private: |
141 | // List of load barrier nodes which need to be expanded before matching |
142 | GrowableArray<LoadBarrierNode*>* _load_barrier_nodes; |
143 | |
144 | public: |
145 | ZBarrierSetC2State(Arena* comp_arena); |
146 | int load_barrier_count() const; |
147 | void add_load_barrier_node(LoadBarrierNode* n); |
148 | void remove_load_barrier_node(LoadBarrierNode* n); |
149 | LoadBarrierNode* load_barrier_node(int idx) const; |
150 | }; |
151 | |
152 | class ZBarrierSetC2 : public BarrierSetC2 { |
153 | private: |
154 | ZBarrierSetC2State* state() const; |
155 | void expand_loadbarrier_node(PhaseMacroExpand* phase, LoadBarrierNode* barrier) const; |
156 | |
157 | #ifdef ASSERT |
158 | void verify_gc_barriers(bool post_parse) const; |
159 | #endif |
160 | |
161 | protected: |
162 | virtual Node* load_at_resolved(C2Access& access, const Type* val_type) const; |
163 | virtual Node* atomic_cmpxchg_val_at_resolved(C2AtomicParseAccess& access, |
164 | Node* expected_val, |
165 | Node* new_val, |
166 | const Type* val_type) const; |
167 | virtual Node* atomic_cmpxchg_bool_at_resolved(C2AtomicParseAccess& access, |
168 | Node* expected_val, |
169 | Node* new_val, |
170 | const Type* value_type) const; |
171 | virtual Node* atomic_xchg_at_resolved(C2AtomicParseAccess& access, |
172 | Node* new_val, |
173 | const Type* val_type) const; |
174 | |
175 | public: |
176 | virtual void* create_barrier_state(Arena* comp_arena) const; |
177 | |
178 | virtual bool has_load_barriers() const { return true; } |
179 | virtual bool is_gc_barrier_node(Node* node) const; |
180 | virtual Node* step_over_gc_barrier(Node* c) const; |
181 | virtual Node* step_over_gc_barrier_ctrl(Node* c) const; |
182 | |
183 | virtual void register_potential_barrier_node(Node* node) const; |
184 | virtual void unregister_potential_barrier_node(Node* node) const; |
185 | virtual void eliminate_gc_barrier(PhaseMacroExpand* macro, Node* node) const { } |
186 | virtual void enqueue_useful_gc_barrier(PhaseIterGVN* igvn, Node* node) const; |
187 | virtual void eliminate_useless_gc_barriers(Unique_Node_List &useful, Compile* C) const; |
188 | |
189 | virtual bool array_copy_requires_gc_barriers(bool tightly_coupled_alloc, BasicType type, bool is_clone, ArrayCopyPhase phase) const; |
190 | |
191 | virtual bool expand_barriers(Compile* C, PhaseIterGVN& igvn) const; |
192 | virtual bool final_graph_reshaping(Compile* compile, Node* n, uint opcode) const; |
193 | virtual bool matcher_find_shared_visit(Matcher* matcher, Matcher::MStack& mstack, Node* n, uint opcode, bool& mem_op, int& mem_addr_idx) const; |
194 | virtual bool matcher_find_shared_post_visit(Matcher* matcher, Node* n, uint opcode) const; |
195 | virtual bool needs_anti_dependence_check(const Node* node) const; |
196 | |
197 | #ifdef ASSERT |
198 | virtual void verify_gc_barriers(Compile* compile, CompilePhase phase) const; |
199 | #endif |
200 | |
201 | // Load barrier insertion and expansion external |
202 | virtual void barrier_insertion_phase(Compile* C, PhaseIterGVN &igvn) const; |
203 | virtual bool optimize_loops(PhaseIdealLoop* phase, LoopOptsMode mode, VectorSet& visited, Node_Stack& nstack, Node_List& worklist) const; |
204 | virtual bool is_gc_specific_loop_opts_pass(LoopOptsMode mode) const { return (mode == LoopOptsZBarrierInsertion); } |
205 | |
206 | private: |
207 | // Load barrier insertion and expansion internal |
208 | void insert_barriers_on_unsafe(PhaseIdealLoop* phase) const; |
209 | void clean_catch_blocks(PhaseIdealLoop* phase) const; |
210 | void insert_load_barriers(PhaseIdealLoop* phase) const; |
211 | LoadNode* insert_one_loadbarrier(PhaseIdealLoop* phase, LoadNode* load, Node* ctrl) const; |
212 | void insert_one_loadbarrier_inner(PhaseIdealLoop* phase, LoadNode* load, Node* ctrl, VectorSet visited) const; |
213 | }; |
214 | |
215 | #endif // SHARE_GC_Z_C2_ZBARRIERSETC2_HPP |
216 | |