23 */
24
25 #ifndef SHARE_VM_OPTO_LOCKNODE_HPP
26 #define SHARE_VM_OPTO_LOCKNODE_HPP
27
28 #include "opto/node.hpp"
29 #include "opto/opcodes.hpp"
30 #include "opto/subnode.hpp"
31
32 class BiasedLockingCounters;
33 class RTMLockingCounters;
34
35 //------------------------------BoxLockNode------------------------------------
36 class BoxLockNode : public Node {
37 const int _slot; // stack slot
38 RegMask _inmask; // OptoReg corresponding to stack slot
39 bool _is_eliminated; // Associated locks were safely eliminated
40
41 public:
42 BoxLockNode( int lock );
43 virtual int Opcode() const;
44 virtual void emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const;
45 virtual uint size(PhaseRegAlloc *ra_) const;
46 virtual const RegMask &in_RegMask(uint) const;
47 virtual const RegMask &out_RegMask() const;
48 virtual uint size_of() const;
49 virtual uint hash() const;
50 virtual uint cmp( const Node &n ) const;
51 virtual const class Type *bottom_type() const { return TypeRawPtr::BOTTOM; }
52 virtual uint ideal_reg() const { return Op_RegP; }
53
54 static OptoReg::Name reg(Node* box_node);
55 static BoxLockNode* box_node(Node* box_node);
56 static bool same_slot(Node* box1, Node* box2) {
57 return box1->as_BoxLock()->_slot == box2->as_BoxLock()->_slot;
58 }
59 int stack_slot() const { return _slot; }
60
61 bool is_eliminated() const { return _is_eliminated; }
62 // mark lock as eliminated.
63 void set_eliminated() { _is_eliminated = true; }
78 RTMLockingCounters* _rtm_counters; // RTM lock counters for inflated locks
79 RTMLockingCounters* _stack_rtm_counters; // RTM lock counters for stack locks
80
81 public:
82 FastLockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
83 init_req(0,ctrl);
84 init_class_id(Class_FastLock);
85 _counters = NULL;
86 _rtm_counters = NULL;
87 _stack_rtm_counters = NULL;
88 }
89 Node* obj_node() const { return in(1); }
90 Node* box_node() const { return in(2); }
91 void set_box_node(Node* box) { set_req(2, box); }
92
93 // FastLock and FastUnlockNode do not hash, we need one for each correspoding
94 // LockNode/UnLockNode to avoid creating Phi's.
95 virtual uint hash() const ; // { return NO_HASH; }
96 virtual uint size_of() const;
97 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
98 virtual int Opcode() const;
99 virtual const Type* Value(PhaseGVN* phase) const { return TypeInt::CC; }
100 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
101
102 void create_lock_counter(JVMState* s);
103 void create_rtm_lock_counter(JVMState* state);
104 BiasedLockingCounters* counters() const { return _counters; }
105 RTMLockingCounters* rtm_counters() const { return _rtm_counters; }
106 RTMLockingCounters* stack_rtm_counters() const { return _stack_rtm_counters; }
107 };
108
109
110 //------------------------------FastUnlockNode---------------------------------
111 class FastUnlockNode: public CmpNode {
112 public:
113 FastUnlockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
114 init_req(0,ctrl);
115 init_class_id(Class_FastUnlock);
116 }
117 Node* obj_node() const { return in(1); }
118 Node* box_node() const { return in(2); }
119
120
121 // FastLock and FastUnlockNode do not hash, we need one for each correspoding
122 // LockNode/UnLockNode to avoid creating Phi's.
123 virtual uint hash() const ; // { return NO_HASH; }
124 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
125 virtual int Opcode() const;
126 virtual const Type* Value(PhaseGVN* phase) const { return TypeInt::CC; }
127 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
128
129 };
130
131 #endif // SHARE_VM_OPTO_LOCKNODE_HPP
|
23 */
24
25 #ifndef SHARE_VM_OPTO_LOCKNODE_HPP
26 #define SHARE_VM_OPTO_LOCKNODE_HPP
27
28 #include "opto/node.hpp"
29 #include "opto/opcodes.hpp"
30 #include "opto/subnode.hpp"
31
32 class BiasedLockingCounters;
33 class RTMLockingCounters;
34
35 //------------------------------BoxLockNode------------------------------------
36 class BoxLockNode : public Node {
37 const int _slot; // stack slot
38 RegMask _inmask; // OptoReg corresponding to stack slot
39 bool _is_eliminated; // Associated locks were safely eliminated
40
41 public:
42 BoxLockNode( int lock );
43 virtual uint Opcode() const;
44 virtual void emit(CodeBuffer &cbuf, PhaseRegAlloc *ra_) const;
45 virtual uint size(PhaseRegAlloc *ra_) const;
46 virtual const RegMask &in_RegMask(uint) const;
47 virtual const RegMask &out_RegMask() const;
48 virtual uint size_of() const;
49 virtual uint hash() const;
50 virtual uint cmp( const Node &n ) const;
51 virtual const class Type *bottom_type() const { return TypeRawPtr::BOTTOM; }
52 virtual uint ideal_reg() const { return Op_RegP; }
53
54 static OptoReg::Name reg(Node* box_node);
55 static BoxLockNode* box_node(Node* box_node);
56 static bool same_slot(Node* box1, Node* box2) {
57 return box1->as_BoxLock()->_slot == box2->as_BoxLock()->_slot;
58 }
59 int stack_slot() const { return _slot; }
60
61 bool is_eliminated() const { return _is_eliminated; }
62 // mark lock as eliminated.
63 void set_eliminated() { _is_eliminated = true; }
78 RTMLockingCounters* _rtm_counters; // RTM lock counters for inflated locks
79 RTMLockingCounters* _stack_rtm_counters; // RTM lock counters for stack locks
80
81 public:
82 FastLockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
83 init_req(0,ctrl);
84 init_class_id(Class_FastLock);
85 _counters = NULL;
86 _rtm_counters = NULL;
87 _stack_rtm_counters = NULL;
88 }
89 Node* obj_node() const { return in(1); }
90 Node* box_node() const { return in(2); }
91 void set_box_node(Node* box) { set_req(2, box); }
92
93 // FastLock and FastUnlockNode do not hash, we need one for each correspoding
94 // LockNode/UnLockNode to avoid creating Phi's.
95 virtual uint hash() const ; // { return NO_HASH; }
96 virtual uint size_of() const;
97 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
98 virtual uint Opcode() const;
99 virtual const Type* Value(PhaseGVN* phase) const { return TypeInt::CC; }
100 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
101
102 void create_lock_counter(JVMState* s);
103 void create_rtm_lock_counter(JVMState* state);
104 BiasedLockingCounters* counters() const { return _counters; }
105 RTMLockingCounters* rtm_counters() const { return _rtm_counters; }
106 RTMLockingCounters* stack_rtm_counters() const { return _stack_rtm_counters; }
107 };
108
109
110 //------------------------------FastUnlockNode---------------------------------
111 class FastUnlockNode: public CmpNode {
112 public:
113 FastUnlockNode(Node *ctrl, Node *oop, Node *box) : CmpNode(oop,box) {
114 init_req(0,ctrl);
115 init_class_id(Class_FastUnlock);
116 }
117 Node* obj_node() const { return in(1); }
118 Node* box_node() const { return in(2); }
119
120
121 // FastLock and FastUnlockNode do not hash, we need one for each correspoding
122 // LockNode/UnLockNode to avoid creating Phi's.
123 virtual uint hash() const ; // { return NO_HASH; }
124 virtual uint cmp( const Node &n ) const ; // Always fail, except on self
125 virtual uint Opcode() const;
126 virtual const Type* Value(PhaseGVN* phase) const { return TypeInt::CC; }
127 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
128
129 };
130
131 #endif // SHARE_VM_OPTO_LOCKNODE_HPP
|