1 /* 2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewValueTypeInstance; 76 class NewArray; 77 class NewTypeArray; 78 class NewObjectArray; 79 class NewMultiArray; 80 class TypeCheck; 81 class CheckCast; 82 class InstanceOf; 83 class AccessMonitor; 84 class MonitorEnter; 85 class MonitorExit; 86 class Intrinsic; 87 class BlockBegin; 88 class BlockEnd; 89 class Goto; 90 class If; 91 class IfInstanceOf; 92 class Switch; 93 class TableSwitch; 94 class LookupSwitch; 95 class Return; 96 class Throw; 97 class Base; 98 class RoundFP; 99 class UnsafeOp; 100 class UnsafeRawOp; 101 class UnsafeGetRaw; 102 class UnsafePutRaw; 103 class UnsafeObjectOp; 104 class UnsafeGetObject; 105 class UnsafePutObject; 106 class UnsafeGetAndSetObject; 107 class ProfileCall; 108 class ProfileReturnType; 109 class ProfileInvoke; 110 class RuntimeCall; 111 class MemBar; 112 class RangeCheckPredicate; 113 #ifdef ASSERT 114 class Assert; 115 #endif 116 117 // A Value is a reference to the instruction creating the value 118 typedef Instruction* Value; 119 typedef GrowableArray<Value> Values; 120 typedef GrowableArray<ValueStack*> ValueStackStack; 121 122 // BlockClosure is the base class for block traversal/iteration. 123 124 class BlockClosure: public CompilationResourceObj { 125 public: 126 virtual void block_do(BlockBegin* block) = 0; 127 }; 128 129 130 // A simple closure class for visiting the values of an Instruction 131 class ValueVisitor: public StackObj { 132 public: 133 virtual void visit(Value* v) = 0; 134 }; 135 136 137 // Some array and list classes 138 typedef GrowableArray<BlockBegin*> BlockBeginArray; 139 140 class BlockList: public GrowableArray<BlockBegin*> { 141 public: 142 BlockList(): GrowableArray<BlockBegin*>() {} 143 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 144 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 145 146 void iterate_forward(BlockClosure* closure); 147 void iterate_backward(BlockClosure* closure); 148 void blocks_do(void f(BlockBegin*)); 149 void values_do(ValueVisitor* f); 150 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 151 }; 152 153 154 // InstructionVisitors provide type-based dispatch for instructions. 155 // For each concrete Instruction class X, a virtual function do_X is 156 // provided. Functionality that needs to be implemented for all classes 157 // (e.g., printing, code generation) is factored out into a specialised 158 // visitor instead of added to the Instruction classes itself. 159 160 class InstructionVisitor: public StackObj { 161 public: 162 virtual void do_Phi (Phi* x) = 0; 163 virtual void do_Local (Local* x) = 0; 164 virtual void do_Constant (Constant* x) = 0; 165 virtual void do_LoadField (LoadField* x) = 0; 166 virtual void do_StoreField (StoreField* x) = 0; 167 virtual void do_ArrayLength (ArrayLength* x) = 0; 168 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 169 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 170 virtual void do_NegateOp (NegateOp* x) = 0; 171 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 172 virtual void do_ShiftOp (ShiftOp* x) = 0; 173 virtual void do_LogicOp (LogicOp* x) = 0; 174 virtual void do_CompareOp (CompareOp* x) = 0; 175 virtual void do_IfOp (IfOp* x) = 0; 176 virtual void do_Convert (Convert* x) = 0; 177 virtual void do_NullCheck (NullCheck* x) = 0; 178 virtual void do_TypeCast (TypeCast* x) = 0; 179 virtual void do_Invoke (Invoke* x) = 0; 180 virtual void do_NewInstance (NewInstance* x) = 0; 181 virtual void do_NewValueTypeInstance(NewValueTypeInstance* x) = 0; 182 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 183 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 184 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 185 virtual void do_CheckCast (CheckCast* x) = 0; 186 virtual void do_InstanceOf (InstanceOf* x) = 0; 187 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 188 virtual void do_MonitorExit (MonitorExit* x) = 0; 189 virtual void do_Intrinsic (Intrinsic* x) = 0; 190 virtual void do_BlockBegin (BlockBegin* x) = 0; 191 virtual void do_Goto (Goto* x) = 0; 192 virtual void do_If (If* x) = 0; 193 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 194 virtual void do_TableSwitch (TableSwitch* x) = 0; 195 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 196 virtual void do_Return (Return* x) = 0; 197 virtual void do_Throw (Throw* x) = 0; 198 virtual void do_Base (Base* x) = 0; 199 virtual void do_OsrEntry (OsrEntry* x) = 0; 200 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 201 virtual void do_RoundFP (RoundFP* x) = 0; 202 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 203 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 204 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 205 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 206 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 207 virtual void do_ProfileCall (ProfileCall* x) = 0; 208 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 209 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 210 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 211 virtual void do_MemBar (MemBar* x) = 0; 212 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 213 #ifdef ASSERT 214 virtual void do_Assert (Assert* x) = 0; 215 #endif 216 }; 217 218 219 // Hashing support 220 // 221 // Note: This hash functions affect the performance 222 // of ValueMap - make changes carefully! 223 224 #define HASH1(x1 ) ((intx)(x1)) 225 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 226 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 227 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 228 229 230 // The following macros are used to implement instruction-specific hashing. 231 // By default, each instruction implements hash() and is_equal(Value), used 232 // for value numbering/common subexpression elimination. The default imple- 233 // mentation disables value numbering. Each instruction which can be value- 234 // numbered, should define corresponding hash() and is_equal(Value) functions 235 // via the macros below. The f arguments specify all the values/op codes, etc. 236 // that need to be identical for two instructions to be identical. 237 // 238 // Note: The default implementation of hash() returns 0 in order to indicate 239 // that the instruction should not be considered for value numbering. 240 // The currently used hash functions do not guarantee that never a 0 241 // is produced. While this is still correct, it may be a performance 242 // bug (no value numbering for that node). However, this situation is 243 // so unlikely, that we are not going to handle it specially. 244 245 #define HASHING1(class_name, enabled, f1) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH2(name(), f1) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == NULL ) return false; \ 253 if (f1 != _v->f1) return false; \ 254 return true; \ 255 } \ 256 257 258 #define HASHING2(class_name, enabled, f1, f2) \ 259 virtual intx hash() const { \ 260 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 261 } \ 262 virtual bool is_equal(Value v) const { \ 263 if (!(enabled) ) return false; \ 264 class_name* _v = v->as_##class_name(); \ 265 if (_v == NULL ) return false; \ 266 if (f1 != _v->f1) return false; \ 267 if (f2 != _v->f2) return false; \ 268 return true; \ 269 } \ 270 271 272 #define HASHING3(class_name, enabled, f1, f2, f3) \ 273 virtual intx hash() const { \ 274 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 275 } \ 276 virtual bool is_equal(Value v) const { \ 277 if (!(enabled) ) return false; \ 278 class_name* _v = v->as_##class_name(); \ 279 if (_v == NULL ) return false; \ 280 if (f1 != _v->f1) return false; \ 281 if (f2 != _v->f2) return false; \ 282 if (f3 != _v->f3) return false; \ 283 return true; \ 284 } \ 285 286 287 // The mother of all instructions... 288 289 class Instruction: public CompilationResourceObj { 290 private: 291 int _id; // the unique instruction id 292 #ifndef PRODUCT 293 int _printable_bci; // the bci of the instruction for printing 294 #endif 295 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 296 int _pin_state; // set of PinReason describing the reason for pinning 297 ValueType* _type; // the instruction value type 298 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 299 Instruction* _subst; // the substitution instruction if any 300 LIR_Opr _operand; // LIR specific information 301 unsigned int _flags; // Flag bits 302 303 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 304 ValueStack* _exception_state; // Copy of state for exception handling 305 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 306 307 friend class UseCountComputer; 308 friend class BlockBegin; 309 310 void update_exception_state(ValueStack* state); 311 312 protected: 313 BlockBegin* _block; // Block that contains this instruction 314 315 void set_type(ValueType* type) { 316 assert(type != NULL, "type must exist"); 317 _type = type; 318 } 319 320 // Helper class to keep track of which arguments need a null check 321 class ArgsNonNullState { 322 private: 323 int _nonnull_state; // mask identifying which args are nonnull 324 public: 325 ArgsNonNullState() 326 : _nonnull_state(AllBits) {} 327 328 // Does argument number i needs a null check? 329 bool arg_needs_null_check(int i) const { 330 // No data is kept for arguments starting at position 33 so 331 // conservatively assume that they need a null check. 332 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 333 return is_set_nth_bit(_nonnull_state, i); 334 } 335 return true; 336 } 337 338 // Set whether argument number i needs a null check or not 339 void set_arg_needs_null_check(int i, bool check) { 340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 341 if (check) { 342 _nonnull_state |= nth_bit(i); 343 } else { 344 _nonnull_state &= ~(nth_bit(i)); 345 } 346 } 347 } 348 }; 349 350 public: 351 void* operator new(size_t size) throw() { 352 Compilation* c = Compilation::current(); 353 void* res = c->arena()->Amalloc(size); 354 ((Instruction*)res)->_id = c->get_next_id(); 355 return res; 356 } 357 358 static const int no_bci = -99; 359 360 enum InstructionFlag { 361 NeedsNullCheckFlag = 0, 362 NeverNullFlag, // For "Q" signatures 363 CanTrapFlag, 364 DirectCompareFlag, 365 IsEliminatedFlag, 366 IsSafepointFlag, 367 IsStaticFlag, 368 IsStrictfpFlag, 369 NeedsStoreCheckFlag, 370 NeedsWriteBarrierFlag, 371 PreservesStateFlag, 372 TargetIsFinalFlag, 373 TargetIsLoadedFlag, 374 TargetIsStrictfpFlag, 375 UnorderedIsTrueFlag, 376 NeedsPatchingFlag, 377 ThrowIncompatibleClassChangeErrorFlag, 378 InvokeSpecialReceiverCheckFlag, 379 ProfileMDOFlag, 380 IsLinkedInBlockFlag, 381 NeedsRangeCheckFlag, 382 InWorkListFlag, 383 DeoptimizeOnException, 384 InstructionLastFlag 385 }; 386 387 public: 388 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 389 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 390 391 // 'globally' used condition values 392 enum Condition { 393 eql, neq, lss, leq, gtr, geq, aeq, beq 394 }; 395 396 // Instructions may be pinned for many reasons and under certain conditions 397 // with enough knowledge it's possible to safely unpin them. 398 enum PinReason { 399 PinUnknown = 1 << 0 400 , PinExplicitNullCheck = 1 << 3 401 , PinStackForStateSplit= 1 << 12 402 , PinStateSplitConstructor= 1 << 13 403 , PinGlobalValueNumbering= 1 << 14 404 }; 405 406 static Condition mirror(Condition cond); 407 static Condition negate(Condition cond); 408 409 // initialization 410 static int number_of_instructions() { 411 return Compilation::current()->number_of_instructions(); 412 } 413 414 // creation 415 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 416 : 417 #ifndef PRODUCT 418 _printable_bci(-99), 419 #endif 420 _use_count(0) 421 , _pin_state(0) 422 , _type(type) 423 , _next(NULL) 424 , _subst(NULL) 425 , _operand(LIR_OprFact::illegalOpr) 426 , _flags(0) 427 , _state_before(state_before) 428 , _exception_handlers(NULL) 429 , _block(NULL) 430 { 431 check_state(state_before); 432 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 433 update_exception_state(_state_before); 434 } 435 436 // accessors 437 int id() const { return _id; } 438 #ifndef PRODUCT 439 bool has_printable_bci() const { return _printable_bci != -99; } 440 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 441 void set_printable_bci(int bci) { _printable_bci = bci; } 442 #endif 443 int dominator_depth(); 444 int use_count() const { return _use_count; } 445 int pin_state() const { return _pin_state; } 446 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 447 ValueType* type() const { return _type; } 448 BlockBegin *block() const { return _block; } 449 Instruction* prev(); // use carefully, expensive operation 450 Instruction* next() const { return _next; } 451 bool has_subst() const { return _subst != NULL; } 452 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 453 LIR_Opr operand() const { return _operand; } 454 455 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 456 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 457 void set_never_null(bool f) { set_flag(NeverNullFlag, f); } 458 bool is_never_null() const { return check_flag(NeverNullFlag); } 459 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 460 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 461 462 bool has_uses() const { return use_count() > 0; } 463 ValueStack* state_before() const { return _state_before; } 464 ValueStack* exception_state() const { return _exception_state; } 465 virtual bool needs_exception_state() const { return true; } 466 XHandlers* exception_handlers() const { return _exception_handlers; } 467 ciKlass* as_loaded_klass_or_null() const; 468 469 // manipulation 470 void pin(PinReason reason) { _pin_state |= reason; } 471 void pin() { _pin_state |= PinUnknown; } 472 // DANGEROUS: only used by EliminateStores 473 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 474 475 Instruction* set_next(Instruction* next) { 476 assert(next->has_printable_bci(), "_printable_bci should have been set"); 477 assert(next != NULL, "must not be NULL"); 478 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 479 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 480 481 BlockBegin *block = this->block(); 482 next->_block = block; 483 484 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 485 _next = next; 486 return next; 487 } 488 489 Instruction* set_next(Instruction* next, int bci) { 490 #ifndef PRODUCT 491 next->set_printable_bci(bci); 492 #endif 493 return set_next(next); 494 } 495 496 // when blocks are merged 497 void fixup_block_pointers() { 498 Instruction *cur = next()->next(); // next()'s block is set in set_next 499 while (cur && cur->_block != block()) { 500 cur->_block = block(); 501 cur = cur->next(); 502 } 503 } 504 505 Instruction *insert_after(Instruction *i) { 506 Instruction* n = _next; 507 set_next(i); 508 i->set_next(n); 509 return _next; 510 } 511 512 bool is_flattened_array() const; // FIXME -- remove it 513 514 bool is_loaded_flattened_array() const; 515 bool maybe_flattened_array(); 516 517 Instruction *insert_after_same_bci(Instruction *i) { 518 #ifndef PRODUCT 519 i->set_printable_bci(printable_bci()); 520 #endif 521 return insert_after(i); 522 } 523 524 void set_subst(Instruction* subst) { 525 assert(subst == NULL || 526 type()->base() == subst->type()->base() || 527 subst->type()->base() == illegalType, "type can't change"); 528 _subst = subst; 529 } 530 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 531 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 532 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 533 534 // machine-specifics 535 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 536 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 537 538 // generic 539 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 540 virtual Phi* as_Phi() { return NULL; } 541 virtual Local* as_Local() { return NULL; } 542 virtual Constant* as_Constant() { return NULL; } 543 virtual AccessField* as_AccessField() { return NULL; } 544 virtual LoadField* as_LoadField() { return NULL; } 545 virtual StoreField* as_StoreField() { return NULL; } 546 virtual AccessArray* as_AccessArray() { return NULL; } 547 virtual ArrayLength* as_ArrayLength() { return NULL; } 548 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 549 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 550 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 551 virtual NegateOp* as_NegateOp() { return NULL; } 552 virtual Op2* as_Op2() { return NULL; } 553 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 554 virtual ShiftOp* as_ShiftOp() { return NULL; } 555 virtual LogicOp* as_LogicOp() { return NULL; } 556 virtual CompareOp* as_CompareOp() { return NULL; } 557 virtual IfOp* as_IfOp() { return NULL; } 558 virtual Convert* as_Convert() { return NULL; } 559 virtual NullCheck* as_NullCheck() { return NULL; } 560 virtual OsrEntry* as_OsrEntry() { return NULL; } 561 virtual StateSplit* as_StateSplit() { return NULL; } 562 virtual Invoke* as_Invoke() { return NULL; } 563 virtual NewInstance* as_NewInstance() { return NULL; } 564 virtual NewValueTypeInstance* as_NewValueTypeInstance() { return NULL; } 565 virtual NewArray* as_NewArray() { return NULL; } 566 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 567 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 568 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 569 virtual TypeCheck* as_TypeCheck() { return NULL; } 570 virtual CheckCast* as_CheckCast() { return NULL; } 571 virtual InstanceOf* as_InstanceOf() { return NULL; } 572 virtual TypeCast* as_TypeCast() { return NULL; } 573 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 574 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 575 virtual MonitorExit* as_MonitorExit() { return NULL; } 576 virtual Intrinsic* as_Intrinsic() { return NULL; } 577 virtual BlockBegin* as_BlockBegin() { return NULL; } 578 virtual BlockEnd* as_BlockEnd() { return NULL; } 579 virtual Goto* as_Goto() { return NULL; } 580 virtual If* as_If() { return NULL; } 581 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 582 virtual TableSwitch* as_TableSwitch() { return NULL; } 583 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 584 virtual Return* as_Return() { return NULL; } 585 virtual Throw* as_Throw() { return NULL; } 586 virtual Base* as_Base() { return NULL; } 587 virtual RoundFP* as_RoundFP() { return NULL; } 588 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 589 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 590 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 591 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 592 593 #ifdef ASSERT 594 virtual Assert* as_Assert() { return NULL; } 595 #endif 596 597 virtual void visit(InstructionVisitor* v) = 0; 598 599 virtual bool can_trap() const { return false; } 600 601 virtual void input_values_do(ValueVisitor* f) = 0; 602 virtual void state_values_do(ValueVisitor* f); 603 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 604 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 605 606 virtual ciType* exact_type() const; 607 virtual ciType* declared_type() const { return NULL; } 608 609 // hashing 610 virtual const char* name() const = 0; 611 HASHING1(Instruction, false, id()) // hashing disabled by default 612 613 // debugging 614 static void check_state(ValueStack* state) PRODUCT_RETURN; 615 void print() PRODUCT_RETURN; 616 void print_line() PRODUCT_RETURN; 617 void print(InstructionPrinter& ip) PRODUCT_RETURN; 618 }; 619 620 621 // The following macros are used to define base (i.e., non-leaf) 622 // and leaf instruction classes. They define class-name related 623 // generic functionality in one place. 624 625 #define BASE(class_name, super_class_name) \ 626 class class_name: public super_class_name { \ 627 public: \ 628 virtual class_name* as_##class_name() { return this; } \ 629 630 631 #define LEAF(class_name, super_class_name) \ 632 BASE(class_name, super_class_name) \ 633 public: \ 634 virtual const char* name() const { return #class_name; } \ 635 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 636 637 638 // Debugging support 639 640 641 #ifdef ASSERT 642 class AssertValues: public ValueVisitor { 643 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 644 }; 645 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 646 #else 647 #define ASSERT_VALUES 648 #endif // ASSERT 649 650 651 // A Phi is a phi function in the sense of SSA form. It stands for 652 // the value of a local variable at the beginning of a join block. 653 // A Phi consists of n operands, one for every incoming branch. 654 655 LEAF(Phi, Instruction) 656 private: 657 int _pf_flags; // the flags of the phi function 658 int _index; // to value on operand stack (index < 0) or to local 659 ciType* _exact_type; // currently is set only for flattened arrays, NULL otherwise. 660 public: 661 // creation 662 Phi(ValueType* type, BlockBegin* b, int index, ciType* exact_type) 663 : Instruction(type->base()) 664 , _pf_flags(0) 665 , _index(index) 666 , _exact_type(exact_type) 667 { 668 _block = b; 669 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 670 if (type->is_illegal()) { 671 make_illegal(); 672 } 673 } 674 675 virtual ciType* exact_type() const { 676 return _exact_type; 677 } 678 679 virtual ciType* declared_type() const { 680 return _exact_type; 681 } 682 683 // flags 684 enum Flag { 685 no_flag = 0, 686 visited = 1 << 0, 687 cannot_simplify = 1 << 1 688 }; 689 690 // accessors 691 bool is_local() const { return _index >= 0; } 692 bool is_on_stack() const { return !is_local(); } 693 int local_index() const { assert(is_local(), ""); return _index; } 694 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 695 696 Value operand_at(int i) const; 697 int operand_count() const; 698 699 void set(Flag f) { _pf_flags |= f; } 700 void clear(Flag f) { _pf_flags &= ~f; } 701 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 702 703 // Invalidates phis corresponding to merges of locals of two different types 704 // (these should never be referenced, otherwise the bytecodes are illegal) 705 void make_illegal() { 706 set(cannot_simplify); 707 set_type(illegalType); 708 } 709 710 bool is_illegal() const { 711 return type()->is_illegal(); 712 } 713 714 // generic 715 virtual void input_values_do(ValueVisitor* f) { 716 } 717 }; 718 719 720 // A local is a placeholder for an incoming argument to a function call. 721 LEAF(Local, Instruction) 722 private: 723 int _java_index; // the local index within the method to which the local belongs 724 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 725 ciType* _declared_type; 726 public: 727 // creation 728 Local(ciType* declared, ValueType* type, int index, bool receiver, bool never_null) 729 : Instruction(type) 730 , _java_index(index) 731 , _is_receiver(receiver) 732 , _declared_type(declared) 733 { 734 set_never_null(never_null); 735 NOT_PRODUCT(set_printable_bci(-1)); 736 } 737 738 // accessors 739 int java_index() const { return _java_index; } 740 bool is_receiver() const { return _is_receiver; } 741 742 virtual ciType* declared_type() const { return _declared_type; } 743 744 // generic 745 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 746 }; 747 748 749 LEAF(Constant, Instruction) 750 public: 751 // creation 752 Constant(ValueType* type): 753 Instruction(type, NULL, /*type_is_constant*/ true) 754 { 755 assert(type->is_constant(), "must be a constant"); 756 } 757 758 Constant(ValueType* type, ValueStack* state_before): 759 Instruction(type, state_before, /*type_is_constant*/ true) 760 { 761 assert(state_before != NULL, "only used for constants which need patching"); 762 assert(type->is_constant(), "must be a constant"); 763 // since it's patching it needs to be pinned 764 pin(); 765 } 766 767 // generic 768 virtual bool can_trap() const { return state_before() != NULL; } 769 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 770 771 virtual intx hash() const; 772 virtual bool is_equal(Value v) const; 773 774 virtual ciType* exact_type() const; 775 776 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 777 778 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 779 BlockBegin* compare(Instruction::Condition cond, Value right, 780 BlockBegin* true_sux, BlockBegin* false_sux) const { 781 switch (compare(cond, right)) { 782 case not_comparable: 783 return NULL; 784 case cond_false: 785 return false_sux; 786 case cond_true: 787 return true_sux; 788 default: 789 ShouldNotReachHere(); 790 return NULL; 791 } 792 } 793 }; 794 795 796 BASE(AccessField, Instruction) 797 private: 798 Value _obj; 799 int _offset; 800 ciField* _field; 801 NullCheck* _explicit_null_check; // For explicit null check elimination 802 803 public: 804 // creation 805 AccessField(Value obj, int offset, ciField* field, bool is_static, 806 ValueStack* state_before, bool needs_patching) 807 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 808 , _obj(obj) 809 , _offset(offset) 810 , _field(field) 811 , _explicit_null_check(NULL) 812 { 813 set_needs_null_check(!is_static); 814 set_flag(IsStaticFlag, is_static); 815 set_flag(NeedsPatchingFlag, needs_patching); 816 ASSERT_VALUES 817 // pin of all instructions with memory access 818 pin(); 819 } 820 821 // accessors 822 Value obj() const { return _obj; } 823 int offset() const { return _offset; } 824 ciField* field() const { return _field; } 825 BasicType field_type() const { return _field->type()->basic_type(); } 826 bool is_static() const { return check_flag(IsStaticFlag); } 827 NullCheck* explicit_null_check() const { return _explicit_null_check; } 828 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 829 830 // Unresolved getstatic and putstatic can cause initialization. 831 // Technically it occurs at the Constant that materializes the base 832 // of the static fields but it's simpler to model it here. 833 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 834 835 // manipulation 836 837 // Under certain circumstances, if a previous NullCheck instruction 838 // proved the target object non-null, we can eliminate the explicit 839 // null check and do an implicit one, simply specifying the debug 840 // information from the NullCheck. This field should only be consulted 841 // if needs_null_check() is true. 842 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 843 844 // generic 845 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 846 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 847 }; 848 849 850 LEAF(LoadField, AccessField) 851 public: 852 // creation 853 LoadField(Value obj, int offset, ciField* field, bool is_static, 854 ValueStack* state_before, bool needs_patching, 855 ciValueKlass* value_klass = NULL, Value default_value = NULL ) 856 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 857 {} 858 859 ciType* declared_type() const; 860 861 // generic 862 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 863 }; 864 865 866 LEAF(StoreField, AccessField) 867 private: 868 Value _value; 869 870 public: 871 // creation 872 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 873 ValueStack* state_before, bool needs_patching) 874 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 875 , _value(value) 876 { 877 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 878 ASSERT_VALUES 879 pin(); 880 } 881 882 // accessors 883 Value value() const { return _value; } 884 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 885 886 // generic 887 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 888 }; 889 890 891 BASE(AccessArray, Instruction) 892 private: 893 Value _array; 894 895 public: 896 // creation 897 AccessArray(ValueType* type, Value array, ValueStack* state_before) 898 : Instruction(type, state_before) 899 , _array(array) 900 { 901 set_needs_null_check(true); 902 ASSERT_VALUES 903 pin(); // instruction with side effect (null exception or range check throwing) 904 } 905 906 Value array() const { return _array; } 907 908 // generic 909 virtual bool can_trap() const { return needs_null_check(); } 910 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 911 }; 912 913 914 LEAF(ArrayLength, AccessArray) 915 private: 916 NullCheck* _explicit_null_check; // For explicit null check elimination 917 918 public: 919 // creation 920 ArrayLength(Value array, ValueStack* state_before) 921 : AccessArray(intType, array, state_before) 922 , _explicit_null_check(NULL) {} 923 924 // accessors 925 NullCheck* explicit_null_check() const { return _explicit_null_check; } 926 927 // setters 928 // See LoadField::set_explicit_null_check for documentation 929 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 930 931 // generic 932 HASHING1(ArrayLength, true, array()->subst()) 933 }; 934 935 936 BASE(AccessIndexed, AccessArray) 937 private: 938 Value _index; 939 Value _length; 940 BasicType _elt_type; 941 bool _mismatched; 942 943 public: 944 // creation 945 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 946 : AccessArray(as_ValueType(elt_type), array, state_before) 947 , _index(index) 948 , _length(length) 949 , _elt_type(elt_type) 950 , _mismatched(mismatched) 951 { 952 set_flag(Instruction::NeedsRangeCheckFlag, true); 953 ASSERT_VALUES 954 } 955 956 // accessors 957 Value index() const { return _index; } 958 Value length() const { return _length; } 959 BasicType elt_type() const { return _elt_type; } 960 bool mismatched() const { return _mismatched; } 961 962 void clear_length() { _length = NULL; } 963 // perform elimination of range checks involving constants 964 bool compute_needs_range_check(); 965 966 // generic 967 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 968 }; 969 970 971 LEAF(LoadIndexed, AccessIndexed) 972 private: 973 NullCheck* _explicit_null_check; // For explicit null check elimination 974 NewValueTypeInstance* _vt; 975 976 public: 977 // creation 978 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 979 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 980 , _explicit_null_check(NULL) {} 981 982 // accessors 983 NullCheck* explicit_null_check() const { return _explicit_null_check; } 984 985 // setters 986 // See LoadField::set_explicit_null_check for documentation 987 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 988 989 ciType* exact_type() const; 990 ciType* declared_type() const; 991 992 NewValueTypeInstance* vt() { return _vt; } 993 void set_vt(NewValueTypeInstance* vt) { _vt = vt; } 994 995 // generic 996 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 997 }; 998 999 1000 LEAF(StoreIndexed, AccessIndexed) 1001 private: 1002 Value _value; 1003 1004 ciMethod* _profiled_method; 1005 int _profiled_bci; 1006 bool _check_boolean; 1007 1008 public: 1009 // creation 1010 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1011 bool check_boolean, bool mismatched = false) 1012 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 1013 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean) 1014 { 1015 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 1016 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 1017 ASSERT_VALUES 1018 pin(); 1019 } 1020 1021 // accessors 1022 Value value() const { return _value; } 1023 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1024 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1025 bool check_boolean() const { return _check_boolean; } 1026 // Helpers for MethodData* profiling 1027 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1028 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1029 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1030 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1031 ciMethod* profiled_method() const { return _profiled_method; } 1032 int profiled_bci() const { return _profiled_bci; } 1033 // Flattened array support 1034 bool is_exact_flattened_array_store() const; 1035 // generic 1036 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1037 }; 1038 1039 1040 LEAF(NegateOp, Instruction) 1041 private: 1042 Value _x; 1043 1044 public: 1045 // creation 1046 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1047 ASSERT_VALUES 1048 } 1049 1050 // accessors 1051 Value x() const { return _x; } 1052 1053 // generic 1054 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1055 }; 1056 1057 1058 BASE(Op2, Instruction) 1059 private: 1060 Bytecodes::Code _op; 1061 Value _x; 1062 Value _y; 1063 1064 public: 1065 // creation 1066 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1067 : Instruction(type, state_before) 1068 , _op(op) 1069 , _x(x) 1070 , _y(y) 1071 { 1072 ASSERT_VALUES 1073 } 1074 1075 // accessors 1076 Bytecodes::Code op() const { return _op; } 1077 Value x() const { return _x; } 1078 Value y() const { return _y; } 1079 1080 // manipulators 1081 void swap_operands() { 1082 assert(is_commutative(), "operation must be commutative"); 1083 Value t = _x; _x = _y; _y = t; 1084 } 1085 1086 // generic 1087 virtual bool is_commutative() const { return false; } 1088 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1089 }; 1090 1091 1092 LEAF(ArithmeticOp, Op2) 1093 public: 1094 // creation 1095 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1096 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1097 { 1098 set_flag(IsStrictfpFlag, is_strictfp); 1099 if (can_trap()) pin(); 1100 } 1101 1102 // accessors 1103 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1104 1105 // generic 1106 virtual bool is_commutative() const; 1107 virtual bool can_trap() const; 1108 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1109 }; 1110 1111 1112 LEAF(ShiftOp, Op2) 1113 public: 1114 // creation 1115 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1116 1117 // generic 1118 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1119 }; 1120 1121 1122 LEAF(LogicOp, Op2) 1123 public: 1124 // creation 1125 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1126 1127 // generic 1128 virtual bool is_commutative() const; 1129 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1130 }; 1131 1132 1133 LEAF(CompareOp, Op2) 1134 public: 1135 // creation 1136 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1137 : Op2(intType, op, x, y, state_before) 1138 {} 1139 1140 // generic 1141 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1142 }; 1143 1144 1145 LEAF(IfOp, Op2) 1146 private: 1147 Value _tval; 1148 Value _fval; 1149 bool _substituability_check; 1150 1151 public: 1152 // creation 1153 IfOp(Value x, Condition cond, Value y, Value tval, Value fval, ValueStack* state_before, bool substituability_check) 1154 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1155 , _tval(tval) 1156 , _fval(fval) 1157 , _substituability_check(substituability_check) 1158 { 1159 ASSERT_VALUES 1160 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1161 set_state_before(state_before); 1162 } 1163 1164 // accessors 1165 virtual bool is_commutative() const; 1166 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1167 Condition cond() const { return (Condition)Op2::op(); } 1168 Value tval() const { return _tval; } 1169 Value fval() const { return _fval; } 1170 bool substituability_check() const { return _substituability_check; } 1171 // generic 1172 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1173 }; 1174 1175 1176 LEAF(Convert, Instruction) 1177 private: 1178 Bytecodes::Code _op; 1179 Value _value; 1180 1181 public: 1182 // creation 1183 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1184 ASSERT_VALUES 1185 } 1186 1187 // accessors 1188 Bytecodes::Code op() const { return _op; } 1189 Value value() const { return _value; } 1190 1191 // generic 1192 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1193 HASHING2(Convert, true, op(), value()->subst()) 1194 }; 1195 1196 1197 LEAF(NullCheck, Instruction) 1198 private: 1199 Value _obj; 1200 1201 public: 1202 // creation 1203 NullCheck(Value obj, ValueStack* state_before) 1204 : Instruction(obj->type()->base(), state_before) 1205 , _obj(obj) 1206 { 1207 ASSERT_VALUES 1208 set_can_trap(true); 1209 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1210 pin(Instruction::PinExplicitNullCheck); 1211 } 1212 1213 // accessors 1214 Value obj() const { return _obj; } 1215 1216 // setters 1217 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1218 1219 // generic 1220 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1221 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1222 HASHING1(NullCheck, true, obj()->subst()) 1223 }; 1224 1225 1226 // This node is supposed to cast the type of another node to a more precise 1227 // declared type. 1228 LEAF(TypeCast, Instruction) 1229 private: 1230 ciType* _declared_type; 1231 Value _obj; 1232 1233 public: 1234 // The type of this node is the same type as the object type (and it might be constant). 1235 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1236 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1237 _declared_type(type), 1238 _obj(obj) {} 1239 1240 // accessors 1241 ciType* declared_type() const { return _declared_type; } 1242 Value obj() const { return _obj; } 1243 1244 // generic 1245 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1246 }; 1247 1248 1249 BASE(StateSplit, Instruction) 1250 private: 1251 ValueStack* _state; 1252 1253 protected: 1254 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1255 1256 public: 1257 // creation 1258 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1259 : Instruction(type, state_before) 1260 , _state(NULL) 1261 { 1262 pin(PinStateSplitConstructor); 1263 } 1264 1265 // accessors 1266 ValueStack* state() const { return _state; } 1267 IRScope* scope() const; // the state's scope 1268 1269 // manipulation 1270 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1271 1272 // generic 1273 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1274 virtual void state_values_do(ValueVisitor* f); 1275 }; 1276 1277 1278 LEAF(Invoke, StateSplit) 1279 private: 1280 Bytecodes::Code _code; 1281 Value _recv; 1282 Values* _args; 1283 BasicTypeList* _signature; 1284 int _vtable_index; 1285 ciMethod* _target; 1286 1287 public: 1288 // creation 1289 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1290 int vtable_index, ciMethod* target, ValueStack* state_before, bool never_null); 1291 1292 // accessors 1293 Bytecodes::Code code() const { return _code; } 1294 Value receiver() const { return _recv; } 1295 bool has_receiver() const { return receiver() != NULL; } 1296 int number_of_arguments() const { return _args->length(); } 1297 Value argument_at(int i) const { return _args->at(i); } 1298 int vtable_index() const { return _vtable_index; } 1299 BasicTypeList* signature() const { return _signature; } 1300 ciMethod* target() const { return _target; } 1301 1302 ciType* declared_type() const; 1303 1304 // Returns false if target is not loaded 1305 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1306 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1307 // Returns false if target is not loaded 1308 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1309 1310 // JSR 292 support 1311 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1312 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1313 1314 virtual bool needs_exception_state() const { return false; } 1315 1316 // generic 1317 virtual bool can_trap() const { return true; } 1318 virtual void input_values_do(ValueVisitor* f) { 1319 StateSplit::input_values_do(f); 1320 if (has_receiver()) f->visit(&_recv); 1321 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1322 } 1323 virtual void state_values_do(ValueVisitor *f); 1324 }; 1325 1326 1327 LEAF(NewInstance, StateSplit) 1328 private: 1329 ciInstanceKlass* _klass; 1330 bool _is_unresolved; 1331 1332 public: 1333 // creation 1334 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1335 : StateSplit(instanceType, state_before) 1336 , _klass(klass), _is_unresolved(is_unresolved) 1337 {} 1338 1339 // accessors 1340 ciInstanceKlass* klass() const { return _klass; } 1341 bool is_unresolved() const { return _is_unresolved; } 1342 1343 virtual bool needs_exception_state() const { return false; } 1344 1345 // generic 1346 virtual bool can_trap() const { return true; } 1347 ciType* exact_type() const; 1348 ciType* declared_type() const; 1349 }; 1350 1351 LEAF(NewValueTypeInstance, StateSplit) 1352 bool _is_unresolved; 1353 ciValueKlass* _klass; 1354 Value _depends_on; // Link to instance on with withfield was called on 1355 1356 public: 1357 1358 // Default creation, always allocated for now 1359 NewValueTypeInstance(ciValueKlass* klass, ValueStack* state_before, bool is_unresolved, Value depends_on = NULL) 1360 : StateSplit(instanceType, state_before) 1361 , _is_unresolved(is_unresolved) 1362 , _klass(klass) 1363 { 1364 if (depends_on == NULL) { 1365 _depends_on = this; 1366 } else { 1367 _depends_on = depends_on; 1368 } 1369 set_never_null(true); 1370 } 1371 1372 // accessors 1373 bool is_unresolved() const { return _is_unresolved; } 1374 Value depends_on(); 1375 1376 ciValueKlass* klass() const { return _klass; } 1377 1378 virtual bool needs_exception_state() const { return false; } 1379 1380 // generic 1381 virtual bool can_trap() const { return true; } 1382 ciType* exact_type() const; 1383 ciType* declared_type() const; 1384 1385 // Only done in LIR Generator -> map everything to object 1386 void set_to_object_type() { set_type(instanceType); } 1387 }; 1388 1389 BASE(NewArray, StateSplit) 1390 private: 1391 Value _length; 1392 1393 public: 1394 // creation 1395 NewArray(Value length, ValueStack* state_before) 1396 : StateSplit(objectType, state_before) 1397 , _length(length) 1398 { 1399 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1400 } 1401 1402 // accessors 1403 Value length() const { return _length; } 1404 1405 virtual bool needs_exception_state() const { return false; } 1406 1407 ciType* exact_type() const { return NULL; } 1408 ciType* declared_type() const; 1409 1410 // generic 1411 virtual bool can_trap() const { return true; } 1412 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1413 }; 1414 1415 1416 LEAF(NewTypeArray, NewArray) 1417 private: 1418 BasicType _elt_type; 1419 1420 public: 1421 // creation 1422 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1423 : NewArray(length, state_before) 1424 , _elt_type(elt_type) 1425 {} 1426 1427 // accessors 1428 BasicType elt_type() const { return _elt_type; } 1429 ciType* exact_type() const; 1430 }; 1431 1432 1433 LEAF(NewObjectArray, NewArray) 1434 private: 1435 ciKlass* _klass; 1436 1437 public: 1438 // creation 1439 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1440 1441 // accessors 1442 ciKlass* klass() const { return _klass; } 1443 ciType* exact_type() const; 1444 }; 1445 1446 1447 LEAF(NewMultiArray, NewArray) 1448 private: 1449 ciKlass* _klass; 1450 Values* _dims; 1451 1452 public: 1453 // creation 1454 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1455 ASSERT_VALUES 1456 } 1457 1458 // accessors 1459 ciKlass* klass() const { return _klass; } 1460 Values* dims() const { return _dims; } 1461 int rank() const { return dims()->length(); } 1462 1463 // generic 1464 virtual void input_values_do(ValueVisitor* f) { 1465 // NOTE: we do not call NewArray::input_values_do since "length" 1466 // is meaningless for a multi-dimensional array; passing the 1467 // zeroth element down to NewArray as its length is a bad idea 1468 // since there will be a copy in the "dims" array which doesn't 1469 // get updated, and the value must not be traversed twice. Was bug 1470 // - kbr 4/10/2001 1471 StateSplit::input_values_do(f); 1472 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1473 } 1474 1475 ciType* exact_type() const; 1476 }; 1477 1478 1479 BASE(TypeCheck, StateSplit) 1480 private: 1481 ciKlass* _klass; 1482 Value _obj; 1483 1484 ciMethod* _profiled_method; 1485 int _profiled_bci; 1486 1487 public: 1488 // creation 1489 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1490 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1491 _profiled_method(NULL), _profiled_bci(0) { 1492 ASSERT_VALUES 1493 set_direct_compare(false); 1494 } 1495 1496 // accessors 1497 ciKlass* klass() const { return _klass; } 1498 Value obj() const { return _obj; } 1499 bool is_loaded() const { return klass() != NULL; } 1500 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1501 1502 // manipulation 1503 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1504 1505 // generic 1506 virtual bool can_trap() const { return true; } 1507 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1508 1509 // Helpers for MethodData* profiling 1510 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1511 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1512 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1513 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1514 ciMethod* profiled_method() const { return _profiled_method; } 1515 int profiled_bci() const { return _profiled_bci; } 1516 }; 1517 1518 1519 LEAF(CheckCast, TypeCheck) 1520 public: 1521 // creation 1522 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before, bool never_null = false) 1523 : TypeCheck(klass, obj, objectType, state_before) { 1524 set_never_null(never_null); 1525 } 1526 1527 void set_incompatible_class_change_check() { 1528 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1529 } 1530 bool is_incompatible_class_change_check() const { 1531 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1532 } 1533 void set_invokespecial_receiver_check() { 1534 set_flag(InvokeSpecialReceiverCheckFlag, true); 1535 } 1536 bool is_invokespecial_receiver_check() const { 1537 return check_flag(InvokeSpecialReceiverCheckFlag); 1538 } 1539 1540 virtual bool needs_exception_state() const { 1541 return !is_invokespecial_receiver_check(); 1542 } 1543 1544 ciType* declared_type() const; 1545 }; 1546 1547 1548 LEAF(InstanceOf, TypeCheck) 1549 public: 1550 // creation 1551 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1552 1553 virtual bool needs_exception_state() const { return false; } 1554 }; 1555 1556 1557 BASE(AccessMonitor, StateSplit) 1558 private: 1559 Value _obj; 1560 int _monitor_no; 1561 1562 public: 1563 // creation 1564 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1565 : StateSplit(illegalType, state_before) 1566 , _obj(obj) 1567 , _monitor_no(monitor_no) 1568 { 1569 set_needs_null_check(true); 1570 ASSERT_VALUES 1571 } 1572 1573 // accessors 1574 Value obj() const { return _obj; } 1575 int monitor_no() const { return _monitor_no; } 1576 1577 // generic 1578 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1579 }; 1580 1581 1582 LEAF(MonitorEnter, AccessMonitor) 1583 bool _maybe_valuetype; 1584 public: 1585 // creation 1586 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before, bool maybe_valuetype) 1587 : AccessMonitor(obj, monitor_no, state_before) 1588 , _maybe_valuetype(maybe_valuetype) 1589 { 1590 ASSERT_VALUES 1591 } 1592 1593 // accessors 1594 bool maybe_valuetype() const { return _maybe_valuetype; } 1595 1596 // generic 1597 virtual bool can_trap() const { return true; } 1598 }; 1599 1600 1601 LEAF(MonitorExit, AccessMonitor) 1602 public: 1603 // creation 1604 MonitorExit(Value obj, int monitor_no) 1605 : AccessMonitor(obj, monitor_no, NULL) 1606 { 1607 ASSERT_VALUES 1608 } 1609 }; 1610 1611 1612 LEAF(Intrinsic, StateSplit) 1613 private: 1614 vmIntrinsics::ID _id; 1615 Values* _args; 1616 Value _recv; 1617 ArgsNonNullState _nonnull_state; 1618 1619 public: 1620 // preserves_state can be set to true for Intrinsics 1621 // which are guaranteed to preserve register state across any slow 1622 // cases; setting it to true does not mean that the Intrinsic can 1623 // not trap, only that if we continue execution in the same basic 1624 // block after the Intrinsic, all of the registers are intact. This 1625 // allows load elimination and common expression elimination to be 1626 // performed across the Intrinsic. The default value is false. 1627 Intrinsic(ValueType* type, 1628 vmIntrinsics::ID id, 1629 Values* args, 1630 bool has_receiver, 1631 ValueStack* state_before, 1632 bool preserves_state, 1633 bool cantrap = true) 1634 : StateSplit(type, state_before) 1635 , _id(id) 1636 , _args(args) 1637 , _recv(NULL) 1638 { 1639 assert(args != NULL, "args must exist"); 1640 ASSERT_VALUES 1641 set_flag(PreservesStateFlag, preserves_state); 1642 set_flag(CanTrapFlag, cantrap); 1643 if (has_receiver) { 1644 _recv = argument_at(0); 1645 } 1646 set_needs_null_check(has_receiver); 1647 1648 // some intrinsics can't trap, so don't force them to be pinned 1649 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1650 unpin(PinStateSplitConstructor); 1651 } 1652 } 1653 1654 // accessors 1655 vmIntrinsics::ID id() const { return _id; } 1656 int number_of_arguments() const { return _args->length(); } 1657 Value argument_at(int i) const { return _args->at(i); } 1658 1659 bool has_receiver() const { return (_recv != NULL); } 1660 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1661 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1662 1663 bool arg_needs_null_check(int i) const { 1664 return _nonnull_state.arg_needs_null_check(i); 1665 } 1666 1667 void set_arg_needs_null_check(int i, bool check) { 1668 _nonnull_state.set_arg_needs_null_check(i, check); 1669 } 1670 1671 // generic 1672 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1673 virtual void input_values_do(ValueVisitor* f) { 1674 StateSplit::input_values_do(f); 1675 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1676 } 1677 }; 1678 1679 1680 class LIR_List; 1681 1682 LEAF(BlockBegin, StateSplit) 1683 private: 1684 int _block_id; // the unique block id 1685 int _bci; // start-bci of block 1686 int _depth_first_number; // number of this block in a depth-first ordering 1687 int _linear_scan_number; // number of this block in linear-scan ordering 1688 int _dominator_depth; 1689 int _loop_depth; // the loop nesting level of this block 1690 int _loop_index; // number of the innermost loop of this block 1691 int _flags; // the flags associated with this block 1692 1693 // fields used by BlockListBuilder 1694 int _total_preds; // number of predecessors found by BlockListBuilder 1695 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1696 1697 // SSA specific fields: (factor out later) 1698 BlockList _successors; // the successors of this block 1699 BlockList _predecessors; // the predecessors of this block 1700 BlockList _dominates; // list of blocks that are dominated by this block 1701 BlockBegin* _dominator; // the dominator of this block 1702 // SSA specific ends 1703 BlockEnd* _end; // the last instruction of this block 1704 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1705 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1706 int _exception_handler_pco; // if this block is the start of an exception handler, 1707 // this records the PC offset in the assembly code of the 1708 // first instruction in this block 1709 Label _label; // the label associated with this block 1710 LIR_List* _lir; // the low level intermediate representation for this block 1711 1712 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1713 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1714 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1715 ResourceBitMap _live_kill; // set of registers defined in this block 1716 1717 ResourceBitMap _fpu_register_usage; 1718 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1719 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1720 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1721 1722 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1723 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1724 1725 friend class SuxAndWeightAdjuster; 1726 1727 public: 1728 void* operator new(size_t size) throw() { 1729 Compilation* c = Compilation::current(); 1730 void* res = c->arena()->Amalloc(size); 1731 ((BlockBegin*)res)->_id = c->get_next_id(); 1732 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1733 return res; 1734 } 1735 1736 // initialization/counting 1737 static int number_of_blocks() { 1738 return Compilation::current()->number_of_blocks(); 1739 } 1740 1741 // creation 1742 BlockBegin(int bci) 1743 : StateSplit(illegalType) 1744 , _bci(bci) 1745 , _depth_first_number(-1) 1746 , _linear_scan_number(-1) 1747 , _dominator_depth(-1) 1748 , _loop_depth(0) 1749 , _loop_index(-1) 1750 , _flags(0) 1751 , _total_preds(0) 1752 , _stores_to_locals() 1753 , _successors(2) 1754 , _predecessors(2) 1755 , _dominates(2) 1756 , _dominator(NULL) 1757 , _end(NULL) 1758 , _exception_handlers(1) 1759 , _exception_states(NULL) 1760 , _exception_handler_pco(-1) 1761 , _lir(NULL) 1762 , _live_in() 1763 , _live_out() 1764 , _live_gen() 1765 , _live_kill() 1766 , _fpu_register_usage() 1767 , _fpu_stack_state(NULL) 1768 , _first_lir_instruction_id(-1) 1769 , _last_lir_instruction_id(-1) 1770 { 1771 _block = this; 1772 #ifndef PRODUCT 1773 set_printable_bci(bci); 1774 #endif 1775 } 1776 1777 // accessors 1778 int block_id() const { return _block_id; } 1779 int bci() const { return _bci; } 1780 BlockList* successors() { return &_successors; } 1781 BlockList* dominates() { return &_dominates; } 1782 BlockBegin* dominator() const { return _dominator; } 1783 int loop_depth() const { return _loop_depth; } 1784 int dominator_depth() const { return _dominator_depth; } 1785 int depth_first_number() const { return _depth_first_number; } 1786 int linear_scan_number() const { return _linear_scan_number; } 1787 BlockEnd* end() const { return _end; } 1788 Label* label() { return &_label; } 1789 LIR_List* lir() const { return _lir; } 1790 int exception_handler_pco() const { return _exception_handler_pco; } 1791 ResourceBitMap& live_in() { return _live_in; } 1792 ResourceBitMap& live_out() { return _live_out; } 1793 ResourceBitMap& live_gen() { return _live_gen; } 1794 ResourceBitMap& live_kill() { return _live_kill; } 1795 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1796 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1797 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1798 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1799 int total_preds() const { return _total_preds; } 1800 BitMap& stores_to_locals() { return _stores_to_locals; } 1801 1802 // manipulation 1803 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1804 void set_loop_depth(int d) { _loop_depth = d; } 1805 void set_dominator_depth(int d) { _dominator_depth = d; } 1806 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1807 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1808 void set_end(BlockEnd* end); 1809 void clear_end(); 1810 void disconnect_from_graph(); 1811 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1812 BlockBegin* insert_block_between(BlockBegin* sux); 1813 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1814 void set_lir(LIR_List* lir) { _lir = lir; } 1815 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1816 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1817 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1818 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1819 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1820 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1821 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1822 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1823 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1824 void increment_total_preds(int n = 1) { _total_preds += n; } 1825 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1826 1827 // generic 1828 virtual void state_values_do(ValueVisitor* f); 1829 1830 // successors and predecessors 1831 int number_of_sux() const; 1832 BlockBegin* sux_at(int i) const; 1833 void add_successor(BlockBegin* sux); 1834 void remove_successor(BlockBegin* pred); 1835 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1836 1837 void add_predecessor(BlockBegin* pred); 1838 void remove_predecessor(BlockBegin* pred); 1839 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1840 int number_of_preds() const { return _predecessors.length(); } 1841 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1842 1843 // exception handlers potentially invoked by this block 1844 void add_exception_handler(BlockBegin* b); 1845 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1846 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1847 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1848 1849 // states of the instructions that have an edge to this exception handler 1850 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1851 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1852 int add_exception_state(ValueStack* state); 1853 1854 // flags 1855 enum Flag { 1856 no_flag = 0, 1857 std_entry_flag = 1 << 0, 1858 osr_entry_flag = 1 << 1, 1859 exception_entry_flag = 1 << 2, 1860 subroutine_entry_flag = 1 << 3, 1861 backward_branch_target_flag = 1 << 4, 1862 is_on_work_list_flag = 1 << 5, 1863 was_visited_flag = 1 << 6, 1864 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1865 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1866 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1867 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1868 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1869 }; 1870 1871 void set(Flag f) { _flags |= f; } 1872 void clear(Flag f) { _flags &= ~f; } 1873 bool is_set(Flag f) const { return (_flags & f) != 0; } 1874 bool is_entry_block() const { 1875 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1876 return (_flags & entry_mask) != 0; 1877 } 1878 1879 // iteration 1880 void iterate_preorder (BlockClosure* closure); 1881 void iterate_postorder (BlockClosure* closure); 1882 1883 void block_values_do(ValueVisitor* f); 1884 1885 // loops 1886 void set_loop_index(int ix) { _loop_index = ix; } 1887 int loop_index() const { return _loop_index; } 1888 1889 // merging 1890 bool try_merge(ValueStack* state); // try to merge states at block begin 1891 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1892 1893 // debugging 1894 void print_block() PRODUCT_RETURN; 1895 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1896 }; 1897 1898 1899 BASE(BlockEnd, StateSplit) 1900 private: 1901 BlockList* _sux; 1902 1903 protected: 1904 BlockList* sux() const { return _sux; } 1905 1906 void set_sux(BlockList* sux) { 1907 #ifdef ASSERT 1908 assert(sux != NULL, "sux must exist"); 1909 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1910 #endif 1911 _sux = sux; 1912 } 1913 1914 public: 1915 // creation 1916 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1917 : StateSplit(type, state_before) 1918 , _sux(NULL) 1919 { 1920 set_flag(IsSafepointFlag, is_safepoint); 1921 } 1922 1923 // accessors 1924 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1925 // For compatibility with old code, for new code use block() 1926 BlockBegin* begin() const { return _block; } 1927 1928 // manipulation 1929 void set_begin(BlockBegin* begin); 1930 1931 // successors 1932 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1933 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1934 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1935 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1936 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1937 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1938 }; 1939 1940 1941 LEAF(Goto, BlockEnd) 1942 public: 1943 enum Direction { 1944 none, // Just a regular goto 1945 taken, not_taken // Goto produced from If 1946 }; 1947 private: 1948 ciMethod* _profiled_method; 1949 int _profiled_bci; 1950 Direction _direction; 1951 public: 1952 // creation 1953 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1954 : BlockEnd(illegalType, state_before, is_safepoint) 1955 , _profiled_method(NULL) 1956 , _profiled_bci(0) 1957 , _direction(none) { 1958 BlockList* s = new BlockList(1); 1959 s->append(sux); 1960 set_sux(s); 1961 } 1962 1963 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1964 , _profiled_method(NULL) 1965 , _profiled_bci(0) 1966 , _direction(none) { 1967 BlockList* s = new BlockList(1); 1968 s->append(sux); 1969 set_sux(s); 1970 } 1971 1972 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1973 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1974 int profiled_bci() const { return _profiled_bci; } 1975 Direction direction() const { return _direction; } 1976 1977 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1978 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1979 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1980 void set_direction(Direction d) { _direction = d; } 1981 }; 1982 1983 #ifdef ASSERT 1984 LEAF(Assert, Instruction) 1985 private: 1986 Value _x; 1987 Condition _cond; 1988 Value _y; 1989 char *_message; 1990 1991 public: 1992 // creation 1993 // unordered_is_true is valid for float/double compares only 1994 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1995 1996 // accessors 1997 Value x() const { return _x; } 1998 Condition cond() const { return _cond; } 1999 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2000 Value y() const { return _y; } 2001 const char *message() const { return _message; } 2002 2003 // generic 2004 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 2005 }; 2006 #endif 2007 2008 LEAF(RangeCheckPredicate, StateSplit) 2009 private: 2010 Value _x; 2011 Condition _cond; 2012 Value _y; 2013 2014 void check_state(); 2015 2016 public: 2017 // creation 2018 // unordered_is_true is valid for float/double compares only 2019 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 2020 , _x(x) 2021 , _cond(cond) 2022 , _y(y) 2023 { 2024 ASSERT_VALUES 2025 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2026 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2027 this->set_state(state); 2028 check_state(); 2029 } 2030 2031 // Always deoptimize 2032 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 2033 { 2034 this->set_state(state); 2035 _x = _y = NULL; 2036 check_state(); 2037 } 2038 2039 // accessors 2040 Value x() const { return _x; } 2041 Condition cond() const { return _cond; } 2042 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2043 Value y() const { return _y; } 2044 2045 void always_fail() { _x = _y = NULL; } 2046 2047 // generic 2048 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2049 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2050 }; 2051 2052 LEAF(If, BlockEnd) 2053 private: 2054 Value _x; 2055 Condition _cond; 2056 Value _y; 2057 ciMethod* _profiled_method; 2058 int _profiled_bci; // Canonicalizer may alter bci of If node 2059 bool _swapped; // Is the order reversed with respect to the original If in the 2060 // bytecode stream? 2061 bool _substituability_check; 2062 public: 2063 // creation 2064 // unordered_is_true is valid for float/double compares only 2065 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint, bool substituability_check=false) 2066 : BlockEnd(illegalType, state_before, is_safepoint) 2067 , _x(x) 2068 , _cond(cond) 2069 , _y(y) 2070 , _profiled_method(NULL) 2071 , _profiled_bci(0) 2072 , _swapped(false) 2073 , _substituability_check(substituability_check) 2074 { 2075 ASSERT_VALUES 2076 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2077 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2078 BlockList* s = new BlockList(2); 2079 s->append(tsux); 2080 s->append(fsux); 2081 set_sux(s); 2082 } 2083 2084 // accessors 2085 Value x() const { return _x; } 2086 Condition cond() const { return _cond; } 2087 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2088 Value y() const { return _y; } 2089 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2090 BlockBegin* tsux() const { return sux_for(true); } 2091 BlockBegin* fsux() const { return sux_for(false); } 2092 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2093 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2094 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2095 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2096 bool is_swapped() const { return _swapped; } 2097 2098 // manipulation 2099 void swap_operands() { 2100 Value t = _x; _x = _y; _y = t; 2101 _cond = mirror(_cond); 2102 } 2103 2104 void swap_sux() { 2105 assert(number_of_sux() == 2, "wrong number of successors"); 2106 BlockList* s = sux(); 2107 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2108 _cond = negate(_cond); 2109 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2110 } 2111 2112 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2113 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2114 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2115 void set_swapped(bool value) { _swapped = value; } 2116 bool substituability_check() const { return _substituability_check; } 2117 // generic 2118 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2119 }; 2120 2121 2122 LEAF(IfInstanceOf, BlockEnd) 2123 private: 2124 ciKlass* _klass; 2125 Value _obj; 2126 bool _test_is_instance; // jump if instance 2127 int _instanceof_bci; 2128 2129 public: 2130 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2131 : BlockEnd(illegalType, NULL, false) // temporary set to false 2132 , _klass(klass) 2133 , _obj(obj) 2134 , _test_is_instance(test_is_instance) 2135 , _instanceof_bci(instanceof_bci) 2136 { 2137 ASSERT_VALUES 2138 assert(instanceof_bci >= 0, "illegal bci"); 2139 BlockList* s = new BlockList(2); 2140 s->append(tsux); 2141 s->append(fsux); 2142 set_sux(s); 2143 } 2144 2145 // accessors 2146 // 2147 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2148 // instance of klass; otherwise it tests if it is *not* and instance 2149 // of klass. 2150 // 2151 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2152 // and an If instruction. The IfInstanceOf bci() corresponds to the 2153 // bci that the If would have had; the (this->) instanceof_bci() is 2154 // the bci of the original InstanceOf instruction. 2155 ciKlass* klass() const { return _klass; } 2156 Value obj() const { return _obj; } 2157 int instanceof_bci() const { return _instanceof_bci; } 2158 bool test_is_instance() const { return _test_is_instance; } 2159 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2160 BlockBegin* tsux() const { return sux_for(true); } 2161 BlockBegin* fsux() const { return sux_for(false); } 2162 2163 // manipulation 2164 void swap_sux() { 2165 assert(number_of_sux() == 2, "wrong number of successors"); 2166 BlockList* s = sux(); 2167 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2168 _test_is_instance = !_test_is_instance; 2169 } 2170 2171 // generic 2172 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2173 }; 2174 2175 2176 BASE(Switch, BlockEnd) 2177 private: 2178 Value _tag; 2179 2180 public: 2181 // creation 2182 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2183 : BlockEnd(illegalType, state_before, is_safepoint) 2184 , _tag(tag) { 2185 ASSERT_VALUES 2186 set_sux(sux); 2187 } 2188 2189 // accessors 2190 Value tag() const { return _tag; } 2191 int length() const { return number_of_sux() - 1; } 2192 2193 virtual bool needs_exception_state() const { return false; } 2194 2195 // generic 2196 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2197 }; 2198 2199 2200 LEAF(TableSwitch, Switch) 2201 private: 2202 int _lo_key; 2203 2204 public: 2205 // creation 2206 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2207 : Switch(tag, sux, state_before, is_safepoint) 2208 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2209 2210 // accessors 2211 int lo_key() const { return _lo_key; } 2212 int hi_key() const { return _lo_key + (length() - 1); } 2213 }; 2214 2215 2216 LEAF(LookupSwitch, Switch) 2217 private: 2218 intArray* _keys; 2219 2220 public: 2221 // creation 2222 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2223 : Switch(tag, sux, state_before, is_safepoint) 2224 , _keys(keys) { 2225 assert(keys != NULL, "keys must exist"); 2226 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2227 } 2228 2229 // accessors 2230 int key_at(int i) const { return _keys->at(i); } 2231 }; 2232 2233 2234 LEAF(Return, BlockEnd) 2235 private: 2236 Value _result; 2237 2238 public: 2239 // creation 2240 Return(Value result) : 2241 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2242 _result(result) {} 2243 2244 // accessors 2245 Value result() const { return _result; } 2246 bool has_result() const { return result() != NULL; } 2247 2248 // generic 2249 virtual void input_values_do(ValueVisitor* f) { 2250 BlockEnd::input_values_do(f); 2251 if (has_result()) f->visit(&_result); 2252 } 2253 }; 2254 2255 2256 LEAF(Throw, BlockEnd) 2257 private: 2258 Value _exception; 2259 2260 public: 2261 // creation 2262 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2263 ASSERT_VALUES 2264 } 2265 2266 // accessors 2267 Value exception() const { return _exception; } 2268 2269 // generic 2270 virtual bool can_trap() const { return true; } 2271 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2272 }; 2273 2274 2275 LEAF(Base, BlockEnd) 2276 public: 2277 // creation 2278 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2279 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2280 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2281 BlockList* s = new BlockList(2); 2282 if (osr_entry != NULL) s->append(osr_entry); 2283 s->append(std_entry); // must be default sux! 2284 set_sux(s); 2285 } 2286 2287 // accessors 2288 BlockBegin* std_entry() const { return default_sux(); } 2289 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2290 }; 2291 2292 2293 LEAF(OsrEntry, Instruction) 2294 public: 2295 // creation 2296 #ifdef _LP64 2297 OsrEntry() : Instruction(longType) { pin(); } 2298 #else 2299 OsrEntry() : Instruction(intType) { pin(); } 2300 #endif 2301 2302 // generic 2303 virtual void input_values_do(ValueVisitor* f) { } 2304 }; 2305 2306 2307 // Models the incoming exception at a catch site 2308 LEAF(ExceptionObject, Instruction) 2309 public: 2310 // creation 2311 ExceptionObject() : Instruction(objectType) { 2312 pin(); 2313 } 2314 2315 // generic 2316 virtual void input_values_do(ValueVisitor* f) { } 2317 }; 2318 2319 2320 // Models needed rounding for floating-point values on Intel. 2321 // Currently only used to represent rounding of double-precision 2322 // values stored into local variables, but could be used to model 2323 // intermediate rounding of single-precision values as well. 2324 LEAF(RoundFP, Instruction) 2325 private: 2326 Value _input; // floating-point value to be rounded 2327 2328 public: 2329 RoundFP(Value input) 2330 : Instruction(input->type()) // Note: should not be used for constants 2331 , _input(input) 2332 { 2333 ASSERT_VALUES 2334 } 2335 2336 // accessors 2337 Value input() const { return _input; } 2338 2339 // generic 2340 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2341 }; 2342 2343 2344 BASE(UnsafeOp, Instruction) 2345 private: 2346 BasicType _basic_type; // ValueType can not express byte-sized integers 2347 2348 protected: 2349 // creation 2350 UnsafeOp(BasicType basic_type, bool is_put) 2351 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2352 , _basic_type(basic_type) 2353 { 2354 //Note: Unsafe ops are not not guaranteed to throw NPE. 2355 // Convservatively, Unsafe operations must be pinned though we could be 2356 // looser about this if we wanted to.. 2357 pin(); 2358 } 2359 2360 public: 2361 // accessors 2362 BasicType basic_type() { return _basic_type; } 2363 2364 // generic 2365 virtual void input_values_do(ValueVisitor* f) { } 2366 }; 2367 2368 2369 BASE(UnsafeRawOp, UnsafeOp) 2370 private: 2371 Value _base; // Base address (a Java long) 2372 Value _index; // Index if computed by optimizer; initialized to NULL 2373 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2374 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2375 // to scale index by. 2376 2377 protected: 2378 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2379 : UnsafeOp(basic_type, is_put) 2380 , _base(addr) 2381 , _index(NULL) 2382 , _log2_scale(0) 2383 { 2384 // Can not use ASSERT_VALUES because index may be NULL 2385 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2386 } 2387 2388 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2389 : UnsafeOp(basic_type, is_put) 2390 , _base(base) 2391 , _index(index) 2392 , _log2_scale(log2_scale) 2393 { 2394 } 2395 2396 public: 2397 // accessors 2398 Value base() { return _base; } 2399 Value index() { return _index; } 2400 bool has_index() { return (_index != NULL); } 2401 int log2_scale() { return _log2_scale; } 2402 2403 // setters 2404 void set_base (Value base) { _base = base; } 2405 void set_index(Value index) { _index = index; } 2406 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2407 2408 // generic 2409 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2410 f->visit(&_base); 2411 if (has_index()) f->visit(&_index); } 2412 }; 2413 2414 2415 LEAF(UnsafeGetRaw, UnsafeRawOp) 2416 private: 2417 bool _may_be_unaligned, _is_wide; // For OSREntry 2418 2419 public: 2420 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2421 : UnsafeRawOp(basic_type, addr, false) { 2422 _may_be_unaligned = may_be_unaligned; 2423 _is_wide = is_wide; 2424 } 2425 2426 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2427 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2428 _may_be_unaligned = may_be_unaligned; 2429 _is_wide = is_wide; 2430 } 2431 2432 bool may_be_unaligned() { return _may_be_unaligned; } 2433 bool is_wide() { return _is_wide; } 2434 }; 2435 2436 2437 LEAF(UnsafePutRaw, UnsafeRawOp) 2438 private: 2439 Value _value; // Value to be stored 2440 2441 public: 2442 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2443 : UnsafeRawOp(basic_type, addr, true) 2444 , _value(value) 2445 { 2446 assert(value != NULL, "just checking"); 2447 ASSERT_VALUES 2448 } 2449 2450 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2451 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2452 , _value(value) 2453 { 2454 assert(value != NULL, "just checking"); 2455 ASSERT_VALUES 2456 } 2457 2458 // accessors 2459 Value value() { return _value; } 2460 2461 // generic 2462 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2463 f->visit(&_value); } 2464 }; 2465 2466 2467 BASE(UnsafeObjectOp, UnsafeOp) 2468 private: 2469 Value _object; // Object to be fetched from or mutated 2470 Value _offset; // Offset within object 2471 bool _is_volatile; // true if volatile - dl/JSR166 2472 public: 2473 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2474 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2475 { 2476 } 2477 2478 // accessors 2479 Value object() { return _object; } 2480 Value offset() { return _offset; } 2481 bool is_volatile() { return _is_volatile; } 2482 // generic 2483 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2484 f->visit(&_object); 2485 f->visit(&_offset); } 2486 }; 2487 2488 2489 LEAF(UnsafeGetObject, UnsafeObjectOp) 2490 public: 2491 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2492 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2493 { 2494 ASSERT_VALUES 2495 } 2496 }; 2497 2498 2499 LEAF(UnsafePutObject, UnsafeObjectOp) 2500 private: 2501 Value _value; // Value to be stored 2502 public: 2503 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2504 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2505 , _value(value) 2506 { 2507 ASSERT_VALUES 2508 } 2509 2510 // accessors 2511 Value value() { return _value; } 2512 2513 // generic 2514 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2515 f->visit(&_value); } 2516 }; 2517 2518 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2519 private: 2520 Value _value; // Value to be stored 2521 bool _is_add; 2522 public: 2523 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2524 : UnsafeObjectOp(basic_type, object, offset, false, false) 2525 , _value(value) 2526 , _is_add(is_add) 2527 { 2528 ASSERT_VALUES 2529 } 2530 2531 // accessors 2532 bool is_add() const { return _is_add; } 2533 Value value() { return _value; } 2534 2535 // generic 2536 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2537 f->visit(&_value); } 2538 }; 2539 2540 LEAF(ProfileCall, Instruction) 2541 private: 2542 ciMethod* _method; 2543 int _bci_of_invoke; 2544 ciMethod* _callee; // the method that is called at the given bci 2545 Value _recv; 2546 ciKlass* _known_holder; 2547 Values* _obj_args; // arguments for type profiling 2548 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2549 bool _inlined; // Are we profiling a call that is inlined 2550 2551 public: 2552 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2553 : Instruction(voidType) 2554 , _method(method) 2555 , _bci_of_invoke(bci) 2556 , _callee(callee) 2557 , _recv(recv) 2558 , _known_holder(known_holder) 2559 , _obj_args(obj_args) 2560 , _inlined(inlined) 2561 { 2562 // The ProfileCall has side-effects and must occur precisely where located 2563 pin(); 2564 } 2565 2566 ciMethod* method() const { return _method; } 2567 int bci_of_invoke() const { return _bci_of_invoke; } 2568 ciMethod* callee() const { return _callee; } 2569 Value recv() const { return _recv; } 2570 ciKlass* known_holder() const { return _known_holder; } 2571 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2572 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2573 bool arg_needs_null_check(int i) const { 2574 return _nonnull_state.arg_needs_null_check(i); 2575 } 2576 bool inlined() const { return _inlined; } 2577 2578 void set_arg_needs_null_check(int i, bool check) { 2579 _nonnull_state.set_arg_needs_null_check(i, check); 2580 } 2581 2582 virtual void input_values_do(ValueVisitor* f) { 2583 if (_recv != NULL) { 2584 f->visit(&_recv); 2585 } 2586 for (int i = 0; i < nb_profiled_args(); i++) { 2587 f->visit(_obj_args->adr_at(i)); 2588 } 2589 } 2590 }; 2591 2592 LEAF(ProfileReturnType, Instruction) 2593 private: 2594 ciMethod* _method; 2595 ciMethod* _callee; 2596 int _bci_of_invoke; 2597 Value _ret; 2598 2599 public: 2600 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2601 : Instruction(voidType) 2602 , _method(method) 2603 , _callee(callee) 2604 , _bci_of_invoke(bci) 2605 , _ret(ret) 2606 { 2607 set_needs_null_check(true); 2608 // The ProfileType has side-effects and must occur precisely where located 2609 pin(); 2610 } 2611 2612 ciMethod* method() const { return _method; } 2613 ciMethod* callee() const { return _callee; } 2614 int bci_of_invoke() const { return _bci_of_invoke; } 2615 Value ret() const { return _ret; } 2616 2617 virtual void input_values_do(ValueVisitor* f) { 2618 if (_ret != NULL) { 2619 f->visit(&_ret); 2620 } 2621 } 2622 }; 2623 2624 // Call some C runtime function that doesn't safepoint, 2625 // optionally passing the current thread as the first argument. 2626 LEAF(RuntimeCall, Instruction) 2627 private: 2628 const char* _entry_name; 2629 address _entry; 2630 Values* _args; 2631 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2632 2633 public: 2634 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2635 : Instruction(type) 2636 , _entry_name(entry_name) 2637 , _entry(entry) 2638 , _args(args) 2639 , _pass_thread(pass_thread) { 2640 ASSERT_VALUES 2641 pin(); 2642 } 2643 2644 const char* entry_name() const { return _entry_name; } 2645 address entry() const { return _entry; } 2646 int number_of_arguments() const { return _args->length(); } 2647 Value argument_at(int i) const { return _args->at(i); } 2648 bool pass_thread() const { return _pass_thread; } 2649 2650 virtual void input_values_do(ValueVisitor* f) { 2651 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2652 } 2653 }; 2654 2655 // Use to trip invocation counter of an inlined method 2656 2657 LEAF(ProfileInvoke, Instruction) 2658 private: 2659 ciMethod* _inlinee; 2660 ValueStack* _state; 2661 2662 public: 2663 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2664 : Instruction(voidType) 2665 , _inlinee(inlinee) 2666 , _state(state) 2667 { 2668 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2669 pin(); 2670 } 2671 2672 ciMethod* inlinee() { return _inlinee; } 2673 ValueStack* state() { return _state; } 2674 virtual void input_values_do(ValueVisitor*) {} 2675 virtual void state_values_do(ValueVisitor*); 2676 }; 2677 2678 LEAF(MemBar, Instruction) 2679 private: 2680 LIR_Code _code; 2681 2682 public: 2683 MemBar(LIR_Code code) 2684 : Instruction(voidType) 2685 , _code(code) 2686 { 2687 pin(); 2688 } 2689 2690 LIR_Code code() { return _code; } 2691 2692 virtual void input_values_do(ValueVisitor*) {} 2693 }; 2694 2695 class BlockPair: public CompilationResourceObj { 2696 private: 2697 BlockBegin* _from; 2698 BlockBegin* _to; 2699 public: 2700 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2701 BlockBegin* from() const { return _from; } 2702 BlockBegin* to() const { return _to; } 2703 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2704 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2705 void set_to(BlockBegin* b) { _to = b; } 2706 void set_from(BlockBegin* b) { _from = b; } 2707 }; 2708 2709 typedef GrowableArray<BlockPair*> BlockPairList; 2710 2711 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2712 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2713 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2714 2715 #undef ASSERT_VALUES 2716 2717 #endif // SHARE_C1_C1_INSTRUCTION_HPP