1 | /* |
2 | * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved. |
3 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
4 | * |
5 | * This code is free software; you can redistribute it and/or modify it |
6 | * under the terms of the GNU General Public License version 2 only, as |
7 | * published by the Free Software Foundation. |
8 | * |
9 | * This code is distributed in the hope that it will be useful, but WITHOUT |
10 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
11 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
12 | * version 2 for more details (a copy is included in the LICENSE file that |
13 | * accompanied this code). |
14 | * |
15 | * You should have received a copy of the GNU General Public License version |
16 | * 2 along with this work; if not, write to the Free Software Foundation, |
17 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
18 | * |
19 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
20 | * or visit www.oracle.com if you need additional information or have any |
21 | * questions. |
22 | * |
23 | */ |
24 | |
25 | #ifndef SHARE_OPTO_LOCKNODE_HPP |
26 | #define SHARE_OPTO_LOCKNODE_HPP |
27 | |
28 | #include "opto/node.hpp" |
29 | #include "opto/opcodes.hpp" |
30 | #include "opto/subnode.hpp" |
31 | |
32 | class BiasedLockingCounters; |
33 | class RTMLockingCounters; |
34 | |
35 | //------------------------------BoxLockNode------------------------------------ |
36 | class BoxLockNode : public Node { |
37 | const int _slot; // stack slot |
38 | RegMask _inmask; // OptoReg corresponding to stack slot |
39 | bool _is_eliminated; // Associated locks were safely eliminated |
40 | |
41 | public: |
42 | BoxLockNode( int lock ); |
43 | virtual int Opcode() const; |
44 | virtual void emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const; |
45 | virtual uint size(PhaseRegAlloc *ra_) const; |
46 | virtual const RegMask &in_RegMask(uint) const; |
47 | virtual const RegMask &out_RegMask() const; |
48 | virtual uint size_of() const; |
49 | virtual uint hash() const; |
50 | virtual bool cmp( const Node &n ) const; |
51 | virtual const class Type *bottom_type() const { return TypeRawPtr::BOTTOM; } |
52 | virtual uint ideal_reg() const { return Op_RegP; } |
53 | |
54 | static OptoReg::Name reg(Node* box_node); |
55 | static BoxLockNode* box_node(Node* box_node); |
56 | static bool same_slot(Node* box1, Node* box2) { |
57 | return box1->as_BoxLock()->_slot == box2->as_BoxLock()->_slot; |
58 | } |
59 | int stack_slot() const { return _slot; } |
60 | |
61 | bool is_eliminated() const { return _is_eliminated; } |
62 | // mark lock as eliminated. |
63 | void set_eliminated() { _is_eliminated = true; } |
64 | |
65 | // Is BoxLock node used for one simple lock region? |
66 | bool is_simple_lock_region(LockNode** unique_lock, Node* obj); |
67 | |
68 | #ifndef PRODUCT |
69 | virtual void format( PhaseRegAlloc *, outputStream *st ) const; |
70 | virtual void dump_spec(outputStream *st) const { st->print(" Lock %d" ,_slot); } |
71 | #endif |
72 | }; |
73 | |
74 | //------------------------------FastLockNode----------------------------------- |
75 | class FastLockNode: public CmpNode { |
76 | private: |
77 | BiasedLockingCounters* _counters; |
78 | RTMLockingCounters* _rtm_counters; // RTM lock counters for inflated locks |
79 | RTMLockingCounters* _stack_rtm_counters; // RTM lock counters for stack locks |
80 | |
81 | public: |
82 | FastLockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) { |
83 | init_req(0,ctrl); |
84 | init_class_id(Class_FastLock); |
85 | _counters = NULL; |
86 | _rtm_counters = NULL; |
87 | _stack_rtm_counters = NULL; |
88 | } |
89 | Node* obj_node() const { return in(1); } |
90 | Node* box_node() const { return in(2); } |
91 | void set_box_node(Node* box) { set_req(2, box); } |
92 | |
93 | // FastLock and FastUnlockNode do not hash, we need one for each correspoding |
94 | // LockNode/UnLockNode to avoid creating Phi's. |
95 | virtual uint hash() const ; // { return NO_HASH; } |
96 | virtual uint size_of() const; |
97 | virtual bool cmp( const Node &n ) const ; // Always fail, except on self |
98 | virtual int Opcode() const; |
99 | virtual const Type* Value(PhaseGVN* phase) const { return TypeInt::CC; } |
100 | const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;} |
101 | |
102 | void create_lock_counter(JVMState* s); |
103 | void create_rtm_lock_counter(JVMState* state); |
104 | BiasedLockingCounters* counters() const { return _counters; } |
105 | RTMLockingCounters* rtm_counters() const { return _rtm_counters; } |
106 | RTMLockingCounters* stack_rtm_counters() const { return _stack_rtm_counters; } |
107 | }; |
108 | |
109 | |
110 | //------------------------------FastUnlockNode--------------------------------- |
111 | class FastUnlockNode: public CmpNode { |
112 | public: |
113 | FastUnlockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) { |
114 | init_req(0,ctrl); |
115 | init_class_id(Class_FastUnlock); |
116 | } |
117 | Node* obj_node() const { return in(1); } |
118 | Node* box_node() const { return in(2); } |
119 | |
120 | |
121 | // FastLock and FastUnlockNode do not hash, we need one for each correspoding |
122 | // LockNode/UnLockNode to avoid creating Phi's. |
123 | virtual uint hash() const ; // { return NO_HASH; } |
124 | virtual bool cmp( const Node &n ) const ; // Always fail, except on self |
125 | virtual int Opcode() const; |
126 | virtual const Type* Value(PhaseGVN* phase) const { return TypeInt::CC; } |
127 | const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;} |
128 | |
129 | }; |
130 | |
131 | #endif // SHARE_OPTO_LOCKNODE_HPP |
132 | |