55 class UnlockNode;
56 class JVMState;
57 class OopMap;
58 class State;
59 class StartNode;
60 class MachCallNode;
61 class FastLockNode;
62
63 //------------------------------StartNode--------------------------------------
64 // The method start node
65 class StartNode : public MultiNode {
66 virtual uint cmp( const Node &n ) const;
67 virtual uint size_of() const; // Size is bigger
68 public:
69 const TypeTuple *_domain;
70 StartNode( Node *root, const TypeTuple *domain ) : MultiNode(2), _domain(domain) {
71 init_class_id(Class_Start);
72 init_req(0,this);
73 init_req(1,root);
74 }
75 virtual int Opcode() const;
76 virtual bool pinned() const { return true; };
77 virtual const Type *bottom_type() const;
78 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
79 virtual const Type* Value(PhaseGVN* phase) const;
80 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
81 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_reg, uint length ) const;
82 virtual const RegMask &in_RegMask(uint) const;
83 virtual Node *match( const ProjNode *proj, const Matcher *m );
84 virtual uint ideal_reg() const { return 0; }
85 #ifndef PRODUCT
86 virtual void dump_spec(outputStream *st) const;
87 virtual void dump_compact_spec(outputStream *st) const;
88 #endif
89 };
90
91 //------------------------------StartOSRNode-----------------------------------
92 // The method start node for on stack replacement code
93 class StartOSRNode : public StartNode {
94 public:
95 StartOSRNode( Node *root, const TypeTuple *domain ) : StartNode(root, domain) {}
96 virtual int Opcode() const;
97 static const TypeTuple *osr_domain();
98 };
99
100
101 //------------------------------ParmNode---------------------------------------
102 // Incoming parameters
103 class ParmNode : public ProjNode {
104 static const char * const names[TypeFunc::Parms+1];
105 public:
106 ParmNode( StartNode *src, uint con ) : ProjNode(src,con) {
107 init_class_id(Class_Parm);
108 }
109 virtual int Opcode() const;
110 virtual bool is_CFG() const { return (_con == TypeFunc::Control); }
111 virtual uint ideal_reg() const;
112 #ifndef PRODUCT
113 virtual void dump_spec(outputStream *st) const;
114 virtual void dump_compact_spec(outputStream *st) const;
115 virtual void related(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;
116 #endif
117 };
118
119
120 //------------------------------ReturnNode-------------------------------------
121 // Return from subroutine node
122 class ReturnNode : public Node {
123 public:
124 ReturnNode( uint edges, Node *cntrl, Node *i_o, Node *memory, Node *retadr, Node *frameptr );
125 virtual int Opcode() const;
126 virtual bool is_CFG() const { return true; }
127 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
128 virtual bool depends_only_on_test() const { return false; }
129 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
130 virtual const Type* Value(PhaseGVN* phase) const;
131 virtual uint ideal_reg() const { return NotAMachineReg; }
132 virtual uint match_edge(uint idx) const;
133 #ifndef PRODUCT
134 virtual void dump_req(outputStream *st = tty) const;
135 #endif
136 };
137
138
139 //------------------------------RethrowNode------------------------------------
140 // Rethrow of exception at call site. Ends a procedure before rethrowing;
141 // ends the current basic block like a ReturnNode. Restores registers and
142 // unwinds stack. Rethrow happens in the caller's method.
143 class RethrowNode : public Node {
144 public:
145 RethrowNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *ret_adr, Node *exception );
146 virtual int Opcode() const;
147 virtual bool is_CFG() const { return true; }
148 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
149 virtual bool depends_only_on_test() const { return false; }
150 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
151 virtual const Type* Value(PhaseGVN* phase) const;
152 virtual uint match_edge(uint idx) const;
153 virtual uint ideal_reg() const { return NotAMachineReg; }
154 #ifndef PRODUCT
155 virtual void dump_req(outputStream *st = tty) const;
156 #endif
157 };
158
159
160 //------------------------------TailCallNode-----------------------------------
161 // Pop stack frame and jump indirect
162 class TailCallNode : public ReturnNode {
163 public:
164 TailCallNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *retadr, Node *target, Node *moop )
165 : ReturnNode( TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, retadr ) {
166 init_req(TypeFunc::Parms, target);
167 init_req(TypeFunc::Parms+1, moop);
168 }
169
170 virtual int Opcode() const;
171 virtual uint match_edge(uint idx) const;
172 };
173
174 //------------------------------TailJumpNode-----------------------------------
175 // Pop stack frame and jump indirect
176 class TailJumpNode : public ReturnNode {
177 public:
178 TailJumpNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *target, Node *ex_oop)
179 : ReturnNode(TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, Compile::current()->top()) {
180 init_req(TypeFunc::Parms, target);
181 init_req(TypeFunc::Parms+1, ex_oop);
182 }
183
184 virtual int Opcode() const;
185 virtual uint match_edge(uint idx) const;
186 };
187
188 //-------------------------------JVMState-------------------------------------
189 // A linked list of JVMState nodes captures the whole interpreter state,
190 // plus GC roots, for all active calls at some call site in this compilation
191 // unit. (If there is no inlining, then the list has exactly one link.)
192 // This provides a way to map the optimized program back into the interpreter,
193 // or to let the GC mark the stack.
194 class JVMState : public ResourceObj {
195 friend class VMStructs;
196 public:
197 typedef enum {
198 Reexecute_Undefined = -1, // not defined -- will be translated into false later
199 Reexecute_False = 0, // false -- do not reexecute
200 Reexecute_True = 1 // true -- reexecute the bytecode
201 } ReexecuteState; //Reexecute State
202
203 private:
204 JVMState* _caller; // List pointer for forming scope chains
446 void record_replaced_node(Node* initial, Node* improved) {
447 _replaced_nodes.record(initial, improved);
448 }
449 void transfer_replaced_nodes_from(SafePointNode* sfpt, uint idx = 0) {
450 _replaced_nodes.transfer_from(sfpt->_replaced_nodes, idx);
451 }
452 void delete_replaced_nodes() {
453 _replaced_nodes.reset();
454 }
455 void apply_replaced_nodes() {
456 _replaced_nodes.apply(this);
457 }
458 void merge_replaced_nodes_with(SafePointNode* sfpt) {
459 _replaced_nodes.merge_with(sfpt->_replaced_nodes);
460 }
461 bool has_replaced_nodes() const {
462 return !_replaced_nodes.is_empty();
463 }
464
465 // Standard Node stuff
466 virtual int Opcode() const;
467 virtual bool pinned() const { return true; }
468 virtual const Type* Value(PhaseGVN* phase) const;
469 virtual const Type *bottom_type() const { return Type::CONTROL; }
470 virtual const TypePtr *adr_type() const { return _adr_type; }
471 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
472 virtual Node* Identity(PhaseGVN* phase);
473 virtual uint ideal_reg() const { return 0; }
474 virtual const RegMask &in_RegMask(uint) const;
475 virtual const RegMask &out_RegMask() const;
476 virtual uint match_edge(uint idx) const;
477
478 static bool needs_polling_address_input();
479
480 #ifndef PRODUCT
481 virtual void dump_spec(outputStream *st) const;
482 virtual void related(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;
483 #endif
484 };
485
486 //------------------------------SafePointScalarObjectNode----------------------
488 // at a safepoint.
489
490 class SafePointScalarObjectNode: public TypeNode {
491 uint _first_index; // First input edge relative index of a SafePoint node where
492 // states of the scalarized object fields are collected.
493 // It is relative to the last (youngest) jvms->_scloff.
494 uint _n_fields; // Number of non-static fields of the scalarized object.
495 DEBUG_ONLY(AllocateNode* _alloc;)
496
497 virtual uint hash() const ; // { return NO_HASH; }
498 virtual uint cmp( const Node &n ) const;
499
500 uint first_index() const { return _first_index; }
501
502 public:
503 SafePointScalarObjectNode(const TypeOopPtr* tp,
504 #ifdef ASSERT
505 AllocateNode* alloc,
506 #endif
507 uint first_index, uint n_fields);
508 virtual int Opcode() const;
509 virtual uint ideal_reg() const;
510 virtual const RegMask &in_RegMask(uint) const;
511 virtual const RegMask &out_RegMask() const;
512 virtual uint match_edge(uint idx) const;
513
514 uint first_index(JVMState* jvms) const {
515 assert(jvms != NULL, "missed JVMS");
516 return jvms->scloff() + _first_index;
517 }
518 uint n_fields() const { return _n_fields; }
519
520 #ifdef ASSERT
521 AllocateNode* alloc() const { return _alloc; }
522 #endif
523
524 virtual uint size_of() const { return sizeof(*this); }
525
526 // Assumes that "this" is an argument to a safepoint node "s", and that
527 // "new_call" is being created to correspond to "s". But the difference
528 // between the start index of the jvmstates of "new_call" and "s" is
654 protected:
655 virtual uint cmp( const Node &n ) const;
656 virtual uint size_of() const; // Size is bigger
657
658 bool _optimized_virtual;
659 bool _method_handle_invoke;
660 bool _override_symbolic_info; // Override symbolic call site info from bytecode
661 ciMethod* _method; // Method being direct called
662 public:
663 const int _bci; // Byte Code Index of call byte code
664 CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci)
665 : CallNode(tf, addr, TypePtr::BOTTOM),
666 _method(method), _bci(bci),
667 _optimized_virtual(false),
668 _method_handle_invoke(false),
669 _override_symbolic_info(false)
670 {
671 init_class_id(Class_CallJava);
672 }
673
674 virtual int Opcode() const;
675 ciMethod* method() const { return _method; }
676 void set_method(ciMethod *m) { _method = m; }
677 void set_optimized_virtual(bool f) { _optimized_virtual = f; }
678 bool is_optimized_virtual() const { return _optimized_virtual; }
679 void set_method_handle_invoke(bool f) { _method_handle_invoke = f; }
680 bool is_method_handle_invoke() const { return _method_handle_invoke; }
681 void set_override_symbolic_info(bool f) { _override_symbolic_info = f; }
682 bool override_symbolic_info() const { return _override_symbolic_info; }
683
684 #ifndef PRODUCT
685 virtual void dump_spec(outputStream *st) const;
686 virtual void dump_compact_spec(outputStream *st) const;
687 #endif
688 };
689
690 //------------------------------CallStaticJavaNode-----------------------------
691 // Make a direct subroutine call using Java calling convention (for static
692 // calls and optimized virtual calls, plus calls to wrappers for run-time
693 // routines); generates static stub.
694 class CallStaticJavaNode : public CallJavaNode {
719 // Result of Escape Analysis
720 bool _is_scalar_replaceable;
721 bool _is_non_escaping;
722
723 // If this is an uncommon trap, return the request code, else zero.
724 int uncommon_trap_request() const;
725 static int extract_uncommon_trap_request(const Node* call);
726
727 bool is_boxing_method() const {
728 return is_macro() && (method() != NULL) && method()->is_boxing_method();
729 }
730 // Later inlining modifies the JVMState, so we need to clone it
731 // when the call node is cloned (because it is macro node).
732 virtual void clone_jvms(Compile* C) {
733 if ((jvms() != NULL) && is_boxing_method()) {
734 set_jvms(jvms()->clone_deep(C));
735 jvms()->set_map_deep(this);
736 }
737 }
738
739 virtual int Opcode() const;
740 #ifndef PRODUCT
741 virtual void dump_spec(outputStream *st) const;
742 virtual void dump_compact_spec(outputStream *st) const;
743 #endif
744 };
745
746 //------------------------------CallDynamicJavaNode----------------------------
747 // Make a dispatched call using Java calling convention.
748 class CallDynamicJavaNode : public CallJavaNode {
749 virtual uint cmp( const Node &n ) const;
750 virtual uint size_of() const; // Size is bigger
751 public:
752 CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) {
753 init_class_id(Class_CallDynamicJava);
754 }
755
756 int _vtable_index;
757 virtual int Opcode() const;
758 #ifndef PRODUCT
759 virtual void dump_spec(outputStream *st) const;
760 #endif
761 };
762
763 //------------------------------CallRuntimeNode--------------------------------
764 // Make a direct subroutine call node into compiled C++ code.
765 class CallRuntimeNode : public CallNode {
766 virtual uint cmp( const Node &n ) const;
767 virtual uint size_of() const; // Size is bigger
768 public:
769 CallRuntimeNode(const TypeFunc* tf, address addr, const char* name,
770 const TypePtr* adr_type)
771 : CallNode(tf, addr, adr_type)
772 {
773 init_class_id(Class_CallRuntime);
774 _name = name;
775 }
776
777 virtual int Opcode() const;
778 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;
779
780 #ifndef PRODUCT
781 virtual void dump_spec(outputStream *st) const;
782 #endif
783 };
784
785 //------------------------------CallLeafNode-----------------------------------
786 // Make a direct subroutine call node into compiled C++ code, without
787 // safepoints
788 class CallLeafNode : public CallRuntimeNode {
789 public:
790 CallLeafNode(const TypeFunc* tf, address addr, const char* name,
791 const TypePtr* adr_type)
792 : CallRuntimeNode(tf, addr, name, adr_type)
793 {
794 init_class_id(Class_CallLeaf);
795 }
796 virtual int Opcode() const;
797 virtual bool guaranteed_safepoint() { return false; }
798 #ifndef PRODUCT
799 virtual void dump_spec(outputStream *st) const;
800 #endif
801 };
802
803 //------------------------------CallLeafNoFPNode-------------------------------
804 // CallLeafNode, not using floating point or using it in the same manner as
805 // the generated code
806 class CallLeafNoFPNode : public CallLeafNode {
807 public:
808 CallLeafNoFPNode(const TypeFunc* tf, address addr, const char* name,
809 const TypePtr* adr_type)
810 : CallLeafNode(tf, addr, name, adr_type)
811 {
812 }
813 virtual int Opcode() const;
814 };
815
816
817 //------------------------------Allocate---------------------------------------
818 // High-level memory allocation
819 //
820 // AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
821 // get expanded into a code sequence containing a call. Unlike other CallNodes,
822 // they have 2 memory projections and 2 i_o projections (which are distinguished by
823 // the _is_io_use flag in the projection.) This is needed when expanding the node in
824 // order to differentiate the uses of the projection on the normal control path from
825 // those on the exception return path.
826 //
827 class AllocateNode : public CallNode {
828 public:
829 enum {
830 // Output:
831 RawAddress = TypeFunc::Parms, // the newly-allocated raw address
832 // Inputs:
833 AllocSize = TypeFunc::Parms, // size (in bytes) of the new object
854
855 return TypeFunc::make(domain, range);
856 }
857
858 // Result of Escape Analysis
859 bool _is_scalar_replaceable;
860 bool _is_non_escaping;
861 // True when MemBar for new is redundant with MemBar at initialzer exit
862 bool _is_allocation_MemBar_redundant;
863
864 virtual uint size_of() const; // Size is bigger
865 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
866 Node *size, Node *klass_node, Node *initial_test);
867 // Expansion modifies the JVMState, so we need to clone it
868 virtual void clone_jvms(Compile* C) {
869 if (jvms() != NULL) {
870 set_jvms(jvms()->clone_deep(C));
871 jvms()->set_map_deep(this);
872 }
873 }
874 virtual int Opcode() const;
875 virtual uint ideal_reg() const { return Op_RegP; }
876 virtual bool guaranteed_safepoint() { return false; }
877
878 // allocations do not modify their arguments
879 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
880
881 // Pattern-match a possible usage of AllocateNode.
882 // Return null if no allocation is recognized.
883 // The operand is the pointer produced by the (possible) allocation.
884 // It must be a projection of the Allocate or its subsequent CastPP.
885 // (Note: This function is defined in file graphKit.cpp, near
886 // GraphKit::new_instance/new_array, whose output it recognizes.)
887 // The 'ptr' may not have an offset unless the 'offset' argument is given.
888 static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase);
889
890 // Fancy version which uses AddPNode::Ideal_base_and_offset to strip
891 // an offset, which is reported back to the caller.
892 // (Note: AllocateNode::Ideal_allocation is defined in graphKit.cpp.)
893 static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase,
894 intptr_t& offset);
933 void compute_MemBar_redundancy(ciMethod* initializer);
934 bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
935 };
936
937 //------------------------------AllocateArray---------------------------------
938 //
939 // High-level array allocation
940 //
941 class AllocateArrayNode : public AllocateNode {
942 public:
943 AllocateArrayNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
944 Node* size, Node* klass_node, Node* initial_test,
945 Node* count_val
946 )
947 : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node,
948 initial_test)
949 {
950 init_class_id(Class_AllocateArray);
951 set_req(AllocateNode::ALength, count_val);
952 }
953 virtual int Opcode() const;
954 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
955
956 // Dig the length operand out of a array allocation site.
957 Node* Ideal_length() {
958 return in(AllocateNode::ALength);
959 }
960
961 // Dig the length operand out of a array allocation site and narrow the
962 // type with a CastII, if necesssary
963 Node* make_ideal_length(const TypeOopPtr* ary_type, PhaseTransform *phase, bool can_create = true);
964
965 // Pattern-match a possible usage of AllocateArrayNode.
966 // Return null if no allocation is recognized.
967 static AllocateArrayNode* Ideal_array_allocation(Node* ptr, PhaseTransform* phase) {
968 AllocateNode* allo = Ideal_allocation(ptr, phase);
969 return (allo == NULL || !allo->is_AllocateArray())
970 ? NULL : allo->as_AllocateArray();
971 }
972 };
973
992 bool find_matching_unlock(const Node* ctrl, LockNode* lock,
993 GrowableArray<AbstractLockNode*> &lock_ops);
994 bool find_lock_and_unlock_through_if(Node* node, LockNode* lock,
995 GrowableArray<AbstractLockNode*> &lock_ops);
996 bool find_unlocks_for_region(const RegionNode* region, LockNode* lock,
997 GrowableArray<AbstractLockNode*> &lock_ops);
998 LockNode *find_matching_lock(UnlockNode* unlock);
999
1000 // Update the counter to indicate that this lock was eliminated.
1001 void set_eliminated_lock_counter() PRODUCT_RETURN;
1002
1003 public:
1004 AbstractLockNode(const TypeFunc *tf)
1005 : CallNode(tf, NULL, TypeRawPtr::BOTTOM),
1006 _kind(Regular)
1007 {
1008 #ifndef PRODUCT
1009 _counter = NULL;
1010 #endif
1011 }
1012 virtual int Opcode() const = 0;
1013 Node * obj_node() const {return in(TypeFunc::Parms + 0); }
1014 Node * box_node() const {return in(TypeFunc::Parms + 1); }
1015 Node * fastlock_node() const {return in(TypeFunc::Parms + 2); }
1016 void set_box_node(Node* box) { set_req(TypeFunc::Parms + 1, box); }
1017
1018 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
1019
1020 virtual uint size_of() const { return sizeof(*this); }
1021
1022 bool is_eliminated() const { return (_kind != Regular); }
1023 bool is_non_esc_obj() const { return (_kind == NonEscObj); }
1024 bool is_coarsened() const { return (_kind == Coarsened); }
1025 bool is_nested() const { return (_kind == Nested); }
1026
1027 const char * kind_as_string() const;
1028 void log_lock_optimization(Compile* c, const char * tag) const;
1029
1030 void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); }
1031 void set_coarsened() { _kind = Coarsened; set_eliminated_lock_counter(); }
1032 void set_nested() { _kind = Nested; set_eliminated_lock_counter(); }
1054 //
1055 class LockNode : public AbstractLockNode {
1056 public:
1057
1058 static const TypeFunc *lock_type() {
1059 // create input type (domain)
1060 const Type **fields = TypeTuple::fields(3);
1061 fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL; // Object to be Locked
1062 fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM; // Address of stack location for lock
1063 fields[TypeFunc::Parms+2] = TypeInt::BOOL; // FastLock
1064 const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields);
1065
1066 // create result type (range)
1067 fields = TypeTuple::fields(0);
1068
1069 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields);
1070
1071 return TypeFunc::make(domain,range);
1072 }
1073
1074 virtual int Opcode() const;
1075 virtual uint size_of() const; // Size is bigger
1076 LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
1077 init_class_id(Class_Lock);
1078 init_flags(Flag_is_macro);
1079 C->add_macro_node(this);
1080 }
1081 virtual bool guaranteed_safepoint() { return false; }
1082
1083 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1084 // Expansion modifies the JVMState, so we need to clone it
1085 virtual void clone_jvms(Compile* C) {
1086 if (jvms() != NULL) {
1087 set_jvms(jvms()->clone_deep(C));
1088 jvms()->set_map_deep(this);
1089 }
1090 }
1091
1092 bool is_nested_lock_region(); // Is this Lock nested?
1093 bool is_nested_lock_region(Compile * c); // Why isn't this Lock nested?
1094 };
1095
1096 //------------------------------Unlock---------------------------------------
1097 // High-level unlock operation
1098 class UnlockNode : public AbstractLockNode {
1099 private:
1100 #ifdef ASSERT
1101 JVMState* const _dbg_jvms; // Pointer to list of JVM State objects
1102 #endif
1103 public:
1104 virtual int Opcode() const;
1105 virtual uint size_of() const; // Size is bigger
1106 UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf )
1107 #ifdef ASSERT
1108 , _dbg_jvms(NULL)
1109 #endif
1110 {
1111 init_class_id(Class_Unlock);
1112 init_flags(Flag_is_macro);
1113 C->add_macro_node(this);
1114 }
1115 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1116 // unlock is never a safepoint
1117 virtual bool guaranteed_safepoint() { return false; }
1118 #ifdef ASSERT
1119 void set_dbg_jvms(JVMState* s) {
1120 *(JVMState**)&_dbg_jvms = s; // override const attribute in the accessor
1121 }
1122 JVMState* dbg_jvms() const { return _dbg_jvms; }
1123 #else
1124 JVMState* dbg_jvms() const { return NULL; }
|
55 class UnlockNode;
56 class JVMState;
57 class OopMap;
58 class State;
59 class StartNode;
60 class MachCallNode;
61 class FastLockNode;
62
63 //------------------------------StartNode--------------------------------------
64 // The method start node
65 class StartNode : public MultiNode {
66 virtual uint cmp( const Node &n ) const;
67 virtual uint size_of() const; // Size is bigger
68 public:
69 const TypeTuple *_domain;
70 StartNode( Node *root, const TypeTuple *domain ) : MultiNode(2), _domain(domain) {
71 init_class_id(Class_Start);
72 init_req(0,this);
73 init_req(1,root);
74 }
75 virtual uint Opcode() const;
76 virtual bool pinned() const { return true; };
77 virtual const Type *bottom_type() const;
78 virtual const TypePtr *adr_type() const { return TypePtr::BOTTOM; }
79 virtual const Type* Value(PhaseGVN* phase) const;
80 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
81 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_reg, uint length ) const;
82 virtual const RegMask &in_RegMask(uint) const;
83 virtual Node *match( const ProjNode *proj, const Matcher *m );
84 virtual uint ideal_reg() const { return 0; }
85 #ifndef PRODUCT
86 virtual void dump_spec(outputStream *st) const;
87 virtual void dump_compact_spec(outputStream *st) const;
88 #endif
89 };
90
91 //------------------------------StartOSRNode-----------------------------------
92 // The method start node for on stack replacement code
93 class StartOSRNode : public StartNode {
94 public:
95 StartOSRNode( Node *root, const TypeTuple *domain ) : StartNode(root, domain) {}
96 virtual uint Opcode() const;
97 static const TypeTuple *osr_domain();
98 };
99
100
101 //------------------------------ParmNode---------------------------------------
102 // Incoming parameters
103 class ParmNode : public ProjNode {
104 static const char * const names[TypeFunc::Parms+1];
105 public:
106 ParmNode( StartNode *src, uint con ) : ProjNode(src,con) {
107 init_class_id(Class_Parm);
108 }
109 virtual uint Opcode() const;
110 virtual bool is_CFG() const { return (_con == TypeFunc::Control); }
111 virtual uint ideal_reg() const;
112 #ifndef PRODUCT
113 virtual void dump_spec(outputStream *st) const;
114 virtual void dump_compact_spec(outputStream *st) const;
115 virtual void related(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;
116 #endif
117 };
118
119
120 //------------------------------ReturnNode-------------------------------------
121 // Return from subroutine node
122 class ReturnNode : public Node {
123 public:
124 ReturnNode( uint edges, Node *cntrl, Node *i_o, Node *memory, Node *retadr, Node *frameptr );
125 virtual uint Opcode() const;
126 virtual bool is_CFG() const { return true; }
127 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
128 virtual bool depends_only_on_test() const { return false; }
129 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
130 virtual const Type* Value(PhaseGVN* phase) const;
131 virtual uint ideal_reg() const { return NotAMachineReg; }
132 virtual uint match_edge(uint idx) const;
133 #ifndef PRODUCT
134 virtual void dump_req(outputStream *st = tty) const;
135 #endif
136 };
137
138
139 //------------------------------RethrowNode------------------------------------
140 // Rethrow of exception at call site. Ends a procedure before rethrowing;
141 // ends the current basic block like a ReturnNode. Restores registers and
142 // unwinds stack. Rethrow happens in the caller's method.
143 class RethrowNode : public Node {
144 public:
145 RethrowNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *ret_adr, Node *exception );
146 virtual uint Opcode() const;
147 virtual bool is_CFG() const { return true; }
148 virtual uint hash() const { return NO_HASH; } // CFG nodes do not hash
149 virtual bool depends_only_on_test() const { return false; }
150 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
151 virtual const Type* Value(PhaseGVN* phase) const;
152 virtual uint match_edge(uint idx) const;
153 virtual uint ideal_reg() const { return NotAMachineReg; }
154 #ifndef PRODUCT
155 virtual void dump_req(outputStream *st = tty) const;
156 #endif
157 };
158
159
160 //------------------------------TailCallNode-----------------------------------
161 // Pop stack frame and jump indirect
162 class TailCallNode : public ReturnNode {
163 public:
164 TailCallNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *retadr, Node *target, Node *moop )
165 : ReturnNode( TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, retadr ) {
166 init_req(TypeFunc::Parms, target);
167 init_req(TypeFunc::Parms+1, moop);
168 }
169
170 virtual uint Opcode() const;
171 virtual uint match_edge(uint idx) const;
172 };
173
174 //------------------------------TailJumpNode-----------------------------------
175 // Pop stack frame and jump indirect
176 class TailJumpNode : public ReturnNode {
177 public:
178 TailJumpNode( Node *cntrl, Node *i_o, Node *memory, Node *frameptr, Node *target, Node *ex_oop)
179 : ReturnNode(TypeFunc::Parms+2, cntrl, i_o, memory, frameptr, Compile::current()->top()) {
180 init_req(TypeFunc::Parms, target);
181 init_req(TypeFunc::Parms+1, ex_oop);
182 }
183
184 virtual uint Opcode() const;
185 virtual uint match_edge(uint idx) const;
186 };
187
188 //-------------------------------JVMState-------------------------------------
189 // A linked list of JVMState nodes captures the whole interpreter state,
190 // plus GC roots, for all active calls at some call site in this compilation
191 // unit. (If there is no inlining, then the list has exactly one link.)
192 // This provides a way to map the optimized program back into the interpreter,
193 // or to let the GC mark the stack.
194 class JVMState : public ResourceObj {
195 friend class VMStructs;
196 public:
197 typedef enum {
198 Reexecute_Undefined = -1, // not defined -- will be translated into false later
199 Reexecute_False = 0, // false -- do not reexecute
200 Reexecute_True = 1 // true -- reexecute the bytecode
201 } ReexecuteState; //Reexecute State
202
203 private:
204 JVMState* _caller; // List pointer for forming scope chains
446 void record_replaced_node(Node* initial, Node* improved) {
447 _replaced_nodes.record(initial, improved);
448 }
449 void transfer_replaced_nodes_from(SafePointNode* sfpt, uint idx = 0) {
450 _replaced_nodes.transfer_from(sfpt->_replaced_nodes, idx);
451 }
452 void delete_replaced_nodes() {
453 _replaced_nodes.reset();
454 }
455 void apply_replaced_nodes() {
456 _replaced_nodes.apply(this);
457 }
458 void merge_replaced_nodes_with(SafePointNode* sfpt) {
459 _replaced_nodes.merge_with(sfpt->_replaced_nodes);
460 }
461 bool has_replaced_nodes() const {
462 return !_replaced_nodes.is_empty();
463 }
464
465 // Standard Node stuff
466 virtual uint Opcode() const;
467 virtual bool pinned() const { return true; }
468 virtual const Type* Value(PhaseGVN* phase) const;
469 virtual const Type *bottom_type() const { return Type::CONTROL; }
470 virtual const TypePtr *adr_type() const { return _adr_type; }
471 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
472 virtual Node* Identity(PhaseGVN* phase);
473 virtual uint ideal_reg() const { return 0; }
474 virtual const RegMask &in_RegMask(uint) const;
475 virtual const RegMask &out_RegMask() const;
476 virtual uint match_edge(uint idx) const;
477
478 static bool needs_polling_address_input();
479
480 #ifndef PRODUCT
481 virtual void dump_spec(outputStream *st) const;
482 virtual void related(GrowableArray<Node*> *in_rel, GrowableArray<Node*> *out_rel, bool compact) const;
483 #endif
484 };
485
486 //------------------------------SafePointScalarObjectNode----------------------
488 // at a safepoint.
489
490 class SafePointScalarObjectNode: public TypeNode {
491 uint _first_index; // First input edge relative index of a SafePoint node where
492 // states of the scalarized object fields are collected.
493 // It is relative to the last (youngest) jvms->_scloff.
494 uint _n_fields; // Number of non-static fields of the scalarized object.
495 DEBUG_ONLY(AllocateNode* _alloc;)
496
497 virtual uint hash() const ; // { return NO_HASH; }
498 virtual uint cmp( const Node &n ) const;
499
500 uint first_index() const { return _first_index; }
501
502 public:
503 SafePointScalarObjectNode(const TypeOopPtr* tp,
504 #ifdef ASSERT
505 AllocateNode* alloc,
506 #endif
507 uint first_index, uint n_fields);
508 virtual uint Opcode() const;
509 virtual uint ideal_reg() const;
510 virtual const RegMask &in_RegMask(uint) const;
511 virtual const RegMask &out_RegMask() const;
512 virtual uint match_edge(uint idx) const;
513
514 uint first_index(JVMState* jvms) const {
515 assert(jvms != NULL, "missed JVMS");
516 return jvms->scloff() + _first_index;
517 }
518 uint n_fields() const { return _n_fields; }
519
520 #ifdef ASSERT
521 AllocateNode* alloc() const { return _alloc; }
522 #endif
523
524 virtual uint size_of() const { return sizeof(*this); }
525
526 // Assumes that "this" is an argument to a safepoint node "s", and that
527 // "new_call" is being created to correspond to "s". But the difference
528 // between the start index of the jvmstates of "new_call" and "s" is
654 protected:
655 virtual uint cmp( const Node &n ) const;
656 virtual uint size_of() const; // Size is bigger
657
658 bool _optimized_virtual;
659 bool _method_handle_invoke;
660 bool _override_symbolic_info; // Override symbolic call site info from bytecode
661 ciMethod* _method; // Method being direct called
662 public:
663 const int _bci; // Byte Code Index of call byte code
664 CallJavaNode(const TypeFunc* tf , address addr, ciMethod* method, int bci)
665 : CallNode(tf, addr, TypePtr::BOTTOM),
666 _method(method), _bci(bci),
667 _optimized_virtual(false),
668 _method_handle_invoke(false),
669 _override_symbolic_info(false)
670 {
671 init_class_id(Class_CallJava);
672 }
673
674 virtual uint Opcode() const;
675 ciMethod* method() const { return _method; }
676 void set_method(ciMethod *m) { _method = m; }
677 void set_optimized_virtual(bool f) { _optimized_virtual = f; }
678 bool is_optimized_virtual() const { return _optimized_virtual; }
679 void set_method_handle_invoke(bool f) { _method_handle_invoke = f; }
680 bool is_method_handle_invoke() const { return _method_handle_invoke; }
681 void set_override_symbolic_info(bool f) { _override_symbolic_info = f; }
682 bool override_symbolic_info() const { return _override_symbolic_info; }
683
684 #ifndef PRODUCT
685 virtual void dump_spec(outputStream *st) const;
686 virtual void dump_compact_spec(outputStream *st) const;
687 #endif
688 };
689
690 //------------------------------CallStaticJavaNode-----------------------------
691 // Make a direct subroutine call using Java calling convention (for static
692 // calls and optimized virtual calls, plus calls to wrappers for run-time
693 // routines); generates static stub.
694 class CallStaticJavaNode : public CallJavaNode {
719 // Result of Escape Analysis
720 bool _is_scalar_replaceable;
721 bool _is_non_escaping;
722
723 // If this is an uncommon trap, return the request code, else zero.
724 int uncommon_trap_request() const;
725 static int extract_uncommon_trap_request(const Node* call);
726
727 bool is_boxing_method() const {
728 return is_macro() && (method() != NULL) && method()->is_boxing_method();
729 }
730 // Later inlining modifies the JVMState, so we need to clone it
731 // when the call node is cloned (because it is macro node).
732 virtual void clone_jvms(Compile* C) {
733 if ((jvms() != NULL) && is_boxing_method()) {
734 set_jvms(jvms()->clone_deep(C));
735 jvms()->set_map_deep(this);
736 }
737 }
738
739 virtual uint Opcode() const;
740 #ifndef PRODUCT
741 virtual void dump_spec(outputStream *st) const;
742 virtual void dump_compact_spec(outputStream *st) const;
743 #endif
744 };
745
746 //------------------------------CallDynamicJavaNode----------------------------
747 // Make a dispatched call using Java calling convention.
748 class CallDynamicJavaNode : public CallJavaNode {
749 virtual uint cmp( const Node &n ) const;
750 virtual uint size_of() const; // Size is bigger
751 public:
752 CallDynamicJavaNode( const TypeFunc *tf , address addr, ciMethod* method, int vtable_index, int bci ) : CallJavaNode(tf,addr,method,bci), _vtable_index(vtable_index) {
753 init_class_id(Class_CallDynamicJava);
754 }
755
756 int _vtable_index;
757 virtual uint Opcode() const;
758 #ifndef PRODUCT
759 virtual void dump_spec(outputStream *st) const;
760 #endif
761 };
762
763 //------------------------------CallRuntimeNode--------------------------------
764 // Make a direct subroutine call node into compiled C++ code.
765 class CallRuntimeNode : public CallNode {
766 virtual uint cmp( const Node &n ) const;
767 virtual uint size_of() const; // Size is bigger
768 public:
769 CallRuntimeNode(const TypeFunc* tf, address addr, const char* name,
770 const TypePtr* adr_type)
771 : CallNode(tf, addr, adr_type)
772 {
773 init_class_id(Class_CallRuntime);
774 _name = name;
775 }
776
777 virtual uint Opcode() const;
778 virtual void calling_convention( BasicType* sig_bt, VMRegPair *parm_regs, uint argcnt ) const;
779
780 #ifndef PRODUCT
781 virtual void dump_spec(outputStream *st) const;
782 #endif
783 };
784
785 //------------------------------CallLeafNode-----------------------------------
786 // Make a direct subroutine call node into compiled C++ code, without
787 // safepoints
788 class CallLeafNode : public CallRuntimeNode {
789 public:
790 CallLeafNode(const TypeFunc* tf, address addr, const char* name,
791 const TypePtr* adr_type)
792 : CallRuntimeNode(tf, addr, name, adr_type)
793 {
794 init_class_id(Class_CallLeaf);
795 }
796 virtual uint Opcode() const;
797 virtual bool guaranteed_safepoint() { return false; }
798 #ifndef PRODUCT
799 virtual void dump_spec(outputStream *st) const;
800 #endif
801 };
802
803 //------------------------------CallLeafNoFPNode-------------------------------
804 // CallLeafNode, not using floating point or using it in the same manner as
805 // the generated code
806 class CallLeafNoFPNode : public CallLeafNode {
807 public:
808 CallLeafNoFPNode(const TypeFunc* tf, address addr, const char* name,
809 const TypePtr* adr_type)
810 : CallLeafNode(tf, addr, name, adr_type)
811 {
812 }
813 virtual uint Opcode() const;
814 };
815
816
817 //------------------------------Allocate---------------------------------------
818 // High-level memory allocation
819 //
820 // AllocateNode and AllocateArrayNode are subclasses of CallNode because they will
821 // get expanded into a code sequence containing a call. Unlike other CallNodes,
822 // they have 2 memory projections and 2 i_o projections (which are distinguished by
823 // the _is_io_use flag in the projection.) This is needed when expanding the node in
824 // order to differentiate the uses of the projection on the normal control path from
825 // those on the exception return path.
826 //
827 class AllocateNode : public CallNode {
828 public:
829 enum {
830 // Output:
831 RawAddress = TypeFunc::Parms, // the newly-allocated raw address
832 // Inputs:
833 AllocSize = TypeFunc::Parms, // size (in bytes) of the new object
854
855 return TypeFunc::make(domain, range);
856 }
857
858 // Result of Escape Analysis
859 bool _is_scalar_replaceable;
860 bool _is_non_escaping;
861 // True when MemBar for new is redundant with MemBar at initialzer exit
862 bool _is_allocation_MemBar_redundant;
863
864 virtual uint size_of() const; // Size is bigger
865 AllocateNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
866 Node *size, Node *klass_node, Node *initial_test);
867 // Expansion modifies the JVMState, so we need to clone it
868 virtual void clone_jvms(Compile* C) {
869 if (jvms() != NULL) {
870 set_jvms(jvms()->clone_deep(C));
871 jvms()->set_map_deep(this);
872 }
873 }
874 virtual uint Opcode() const;
875 virtual uint ideal_reg() const { return Op_RegP; }
876 virtual bool guaranteed_safepoint() { return false; }
877
878 // allocations do not modify their arguments
879 virtual bool may_modify(const TypeOopPtr *t_oop, PhaseTransform *phase) { return false;}
880
881 // Pattern-match a possible usage of AllocateNode.
882 // Return null if no allocation is recognized.
883 // The operand is the pointer produced by the (possible) allocation.
884 // It must be a projection of the Allocate or its subsequent CastPP.
885 // (Note: This function is defined in file graphKit.cpp, near
886 // GraphKit::new_instance/new_array, whose output it recognizes.)
887 // The 'ptr' may not have an offset unless the 'offset' argument is given.
888 static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase);
889
890 // Fancy version which uses AddPNode::Ideal_base_and_offset to strip
891 // an offset, which is reported back to the caller.
892 // (Note: AllocateNode::Ideal_allocation is defined in graphKit.cpp.)
893 static AllocateNode* Ideal_allocation(Node* ptr, PhaseTransform* phase,
894 intptr_t& offset);
933 void compute_MemBar_redundancy(ciMethod* initializer);
934 bool is_allocation_MemBar_redundant() { return _is_allocation_MemBar_redundant; }
935 };
936
937 //------------------------------AllocateArray---------------------------------
938 //
939 // High-level array allocation
940 //
941 class AllocateArrayNode : public AllocateNode {
942 public:
943 AllocateArrayNode(Compile* C, const TypeFunc *atype, Node *ctrl, Node *mem, Node *abio,
944 Node* size, Node* klass_node, Node* initial_test,
945 Node* count_val
946 )
947 : AllocateNode(C, atype, ctrl, mem, abio, size, klass_node,
948 initial_test)
949 {
950 init_class_id(Class_AllocateArray);
951 set_req(AllocateNode::ALength, count_val);
952 }
953 virtual uint Opcode() const;
954 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
955
956 // Dig the length operand out of a array allocation site.
957 Node* Ideal_length() {
958 return in(AllocateNode::ALength);
959 }
960
961 // Dig the length operand out of a array allocation site and narrow the
962 // type with a CastII, if necesssary
963 Node* make_ideal_length(const TypeOopPtr* ary_type, PhaseTransform *phase, bool can_create = true);
964
965 // Pattern-match a possible usage of AllocateArrayNode.
966 // Return null if no allocation is recognized.
967 static AllocateArrayNode* Ideal_array_allocation(Node* ptr, PhaseTransform* phase) {
968 AllocateNode* allo = Ideal_allocation(ptr, phase);
969 return (allo == NULL || !allo->is_AllocateArray())
970 ? NULL : allo->as_AllocateArray();
971 }
972 };
973
992 bool find_matching_unlock(const Node* ctrl, LockNode* lock,
993 GrowableArray<AbstractLockNode*> &lock_ops);
994 bool find_lock_and_unlock_through_if(Node* node, LockNode* lock,
995 GrowableArray<AbstractLockNode*> &lock_ops);
996 bool find_unlocks_for_region(const RegionNode* region, LockNode* lock,
997 GrowableArray<AbstractLockNode*> &lock_ops);
998 LockNode *find_matching_lock(UnlockNode* unlock);
999
1000 // Update the counter to indicate that this lock was eliminated.
1001 void set_eliminated_lock_counter() PRODUCT_RETURN;
1002
1003 public:
1004 AbstractLockNode(const TypeFunc *tf)
1005 : CallNode(tf, NULL, TypeRawPtr::BOTTOM),
1006 _kind(Regular)
1007 {
1008 #ifndef PRODUCT
1009 _counter = NULL;
1010 #endif
1011 }
1012 virtual uint Opcode() const = 0;
1013 Node * obj_node() const {return in(TypeFunc::Parms + 0); }
1014 Node * box_node() const {return in(TypeFunc::Parms + 1); }
1015 Node * fastlock_node() const {return in(TypeFunc::Parms + 2); }
1016 void set_box_node(Node* box) { set_req(TypeFunc::Parms + 1, box); }
1017
1018 const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}
1019
1020 virtual uint size_of() const { return sizeof(*this); }
1021
1022 bool is_eliminated() const { return (_kind != Regular); }
1023 bool is_non_esc_obj() const { return (_kind == NonEscObj); }
1024 bool is_coarsened() const { return (_kind == Coarsened); }
1025 bool is_nested() const { return (_kind == Nested); }
1026
1027 const char * kind_as_string() const;
1028 void log_lock_optimization(Compile* c, const char * tag) const;
1029
1030 void set_non_esc_obj() { _kind = NonEscObj; set_eliminated_lock_counter(); }
1031 void set_coarsened() { _kind = Coarsened; set_eliminated_lock_counter(); }
1032 void set_nested() { _kind = Nested; set_eliminated_lock_counter(); }
1054 //
1055 class LockNode : public AbstractLockNode {
1056 public:
1057
1058 static const TypeFunc *lock_type() {
1059 // create input type (domain)
1060 const Type **fields = TypeTuple::fields(3);
1061 fields[TypeFunc::Parms+0] = TypeInstPtr::NOTNULL; // Object to be Locked
1062 fields[TypeFunc::Parms+1] = TypeRawPtr::BOTTOM; // Address of stack location for lock
1063 fields[TypeFunc::Parms+2] = TypeInt::BOOL; // FastLock
1064 const TypeTuple *domain = TypeTuple::make(TypeFunc::Parms+3,fields);
1065
1066 // create result type (range)
1067 fields = TypeTuple::fields(0);
1068
1069 const TypeTuple *range = TypeTuple::make(TypeFunc::Parms+0,fields);
1070
1071 return TypeFunc::make(domain,range);
1072 }
1073
1074 virtual uint Opcode() const;
1075 virtual uint size_of() const; // Size is bigger
1076 LockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf ) {
1077 init_class_id(Class_Lock);
1078 init_flags(Flag_is_macro);
1079 C->add_macro_node(this);
1080 }
1081 virtual bool guaranteed_safepoint() { return false; }
1082
1083 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1084 // Expansion modifies the JVMState, so we need to clone it
1085 virtual void clone_jvms(Compile* C) {
1086 if (jvms() != NULL) {
1087 set_jvms(jvms()->clone_deep(C));
1088 jvms()->set_map_deep(this);
1089 }
1090 }
1091
1092 bool is_nested_lock_region(); // Is this Lock nested?
1093 bool is_nested_lock_region(Compile * c); // Why isn't this Lock nested?
1094 };
1095
1096 //------------------------------Unlock---------------------------------------
1097 // High-level unlock operation
1098 class UnlockNode : public AbstractLockNode {
1099 private:
1100 #ifdef ASSERT
1101 JVMState* const _dbg_jvms; // Pointer to list of JVM State objects
1102 #endif
1103 public:
1104 virtual uint Opcode() const;
1105 virtual uint size_of() const; // Size is bigger
1106 UnlockNode(Compile* C, const TypeFunc *tf) : AbstractLockNode( tf )
1107 #ifdef ASSERT
1108 , _dbg_jvms(NULL)
1109 #endif
1110 {
1111 init_class_id(Class_Unlock);
1112 init_flags(Flag_is_macro);
1113 C->add_macro_node(this);
1114 }
1115 virtual Node *Ideal(PhaseGVN *phase, bool can_reshape);
1116 // unlock is never a safepoint
1117 virtual bool guaranteed_safepoint() { return false; }
1118 #ifdef ASSERT
1119 void set_dbg_jvms(JVMState* s) {
1120 *(JVMState**)&_dbg_jvms = s; // override const attribute in the accessor
1121 }
1122 JVMState* dbg_jvms() const { return _dbg_jvms; }
1123 #else
1124 JVMState* dbg_jvms() const { return NULL; }
|