1 /* 2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #ifndef SHARE_C1_C1_INSTRUCTION_HPP 26 #define SHARE_C1_C1_INSTRUCTION_HPP 27 28 #include "c1/c1_Compilation.hpp" 29 #include "c1/c1_LIR.hpp" 30 #include "c1/c1_ValueType.hpp" 31 #include "ci/ciField.hpp" 32 33 // Predefined classes 34 class ciField; 35 class ValueStack; 36 class InstructionPrinter; 37 class IRScope; 38 class LIR_OprDesc; 39 typedef LIR_OprDesc* LIR_Opr; 40 41 42 // Instruction class hierarchy 43 // 44 // All leaf classes in the class hierarchy are concrete classes 45 // (i.e., are instantiated). All other classes are abstract and 46 // serve factoring. 47 48 class Instruction; 49 class Phi; 50 class Local; 51 class Constant; 52 class AccessField; 53 class LoadField; 54 class StoreField; 55 class AccessArray; 56 class ArrayLength; 57 class AccessIndexed; 58 class LoadIndexed; 59 class StoreIndexed; 60 class NegateOp; 61 class Op2; 62 class ArithmeticOp; 63 class ShiftOp; 64 class LogicOp; 65 class CompareOp; 66 class IfOp; 67 class Convert; 68 class NullCheck; 69 class TypeCast; 70 class OsrEntry; 71 class ExceptionObject; 72 class StateSplit; 73 class Invoke; 74 class NewInstance; 75 class NewValueTypeInstance; 76 class NewArray; 77 class NewTypeArray; 78 class NewObjectArray; 79 class NewMultiArray; 80 class TypeCheck; 81 class CheckCast; 82 class InstanceOf; 83 class AccessMonitor; 84 class MonitorEnter; 85 class MonitorExit; 86 class Intrinsic; 87 class BlockBegin; 88 class BlockEnd; 89 class Goto; 90 class If; 91 class IfInstanceOf; 92 class Switch; 93 class TableSwitch; 94 class LookupSwitch; 95 class Return; 96 class Throw; 97 class Base; 98 class RoundFP; 99 class UnsafeOp; 100 class UnsafeRawOp; 101 class UnsafeGetRaw; 102 class UnsafePutRaw; 103 class UnsafeObjectOp; 104 class UnsafeGetObject; 105 class UnsafePutObject; 106 class UnsafeGetAndSetObject; 107 class ProfileCall; 108 class ProfileReturnType; 109 class ProfileInvoke; 110 class RuntimeCall; 111 class MemBar; 112 class RangeCheckPredicate; 113 #ifdef ASSERT 114 class Assert; 115 #endif 116 117 // A Value is a reference to the instruction creating the value 118 typedef Instruction* Value; 119 typedef GrowableArray<Value> Values; 120 typedef GrowableArray<ValueStack*> ValueStackStack; 121 122 // BlockClosure is the base class for block traversal/iteration. 123 124 class BlockClosure: public CompilationResourceObj { 125 public: 126 virtual void block_do(BlockBegin* block) = 0; 127 }; 128 129 130 // A simple closure class for visiting the values of an Instruction 131 class ValueVisitor: public StackObj { 132 public: 133 virtual void visit(Value* v) = 0; 134 }; 135 136 137 // Some array and list classes 138 typedef GrowableArray<BlockBegin*> BlockBeginArray; 139 140 class BlockList: public GrowableArray<BlockBegin*> { 141 public: 142 BlockList(): GrowableArray<BlockBegin*>() {} 143 BlockList(const int size): GrowableArray<BlockBegin*>(size) {} 144 BlockList(const int size, BlockBegin* init): GrowableArray<BlockBegin*>(size, size, init) {} 145 146 void iterate_forward(BlockClosure* closure); 147 void iterate_backward(BlockClosure* closure); 148 void blocks_do(void f(BlockBegin*)); 149 void values_do(ValueVisitor* f); 150 void print(bool cfg_only = false, bool live_only = false) PRODUCT_RETURN; 151 }; 152 153 154 // InstructionVisitors provide type-based dispatch for instructions. 155 // For each concrete Instruction class X, a virtual function do_X is 156 // provided. Functionality that needs to be implemented for all classes 157 // (e.g., printing, code generation) is factored out into a specialised 158 // visitor instead of added to the Instruction classes itself. 159 160 class InstructionVisitor: public StackObj { 161 public: 162 virtual void do_Phi (Phi* x) = 0; 163 virtual void do_Local (Local* x) = 0; 164 virtual void do_Constant (Constant* x) = 0; 165 virtual void do_LoadField (LoadField* x) = 0; 166 virtual void do_StoreField (StoreField* x) = 0; 167 virtual void do_ArrayLength (ArrayLength* x) = 0; 168 virtual void do_LoadIndexed (LoadIndexed* x) = 0; 169 virtual void do_StoreIndexed (StoreIndexed* x) = 0; 170 virtual void do_NegateOp (NegateOp* x) = 0; 171 virtual void do_ArithmeticOp (ArithmeticOp* x) = 0; 172 virtual void do_ShiftOp (ShiftOp* x) = 0; 173 virtual void do_LogicOp (LogicOp* x) = 0; 174 virtual void do_CompareOp (CompareOp* x) = 0; 175 virtual void do_IfOp (IfOp* x) = 0; 176 virtual void do_Convert (Convert* x) = 0; 177 virtual void do_NullCheck (NullCheck* x) = 0; 178 virtual void do_TypeCast (TypeCast* x) = 0; 179 virtual void do_Invoke (Invoke* x) = 0; 180 virtual void do_NewInstance (NewInstance* x) = 0; 181 virtual void do_NewValueTypeInstance(NewValueTypeInstance* x) = 0; 182 virtual void do_NewTypeArray (NewTypeArray* x) = 0; 183 virtual void do_NewObjectArray (NewObjectArray* x) = 0; 184 virtual void do_NewMultiArray (NewMultiArray* x) = 0; 185 virtual void do_CheckCast (CheckCast* x) = 0; 186 virtual void do_InstanceOf (InstanceOf* x) = 0; 187 virtual void do_MonitorEnter (MonitorEnter* x) = 0; 188 virtual void do_MonitorExit (MonitorExit* x) = 0; 189 virtual void do_Intrinsic (Intrinsic* x) = 0; 190 virtual void do_BlockBegin (BlockBegin* x) = 0; 191 virtual void do_Goto (Goto* x) = 0; 192 virtual void do_If (If* x) = 0; 193 virtual void do_IfInstanceOf (IfInstanceOf* x) = 0; 194 virtual void do_TableSwitch (TableSwitch* x) = 0; 195 virtual void do_LookupSwitch (LookupSwitch* x) = 0; 196 virtual void do_Return (Return* x) = 0; 197 virtual void do_Throw (Throw* x) = 0; 198 virtual void do_Base (Base* x) = 0; 199 virtual void do_OsrEntry (OsrEntry* x) = 0; 200 virtual void do_ExceptionObject(ExceptionObject* x) = 0; 201 virtual void do_RoundFP (RoundFP* x) = 0; 202 virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0; 203 virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0; 204 virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0; 205 virtual void do_UnsafePutObject(UnsafePutObject* x) = 0; 206 virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0; 207 virtual void do_ProfileCall (ProfileCall* x) = 0; 208 virtual void do_ProfileReturnType (ProfileReturnType* x) = 0; 209 virtual void do_ProfileInvoke (ProfileInvoke* x) = 0; 210 virtual void do_RuntimeCall (RuntimeCall* x) = 0; 211 virtual void do_MemBar (MemBar* x) = 0; 212 virtual void do_RangeCheckPredicate(RangeCheckPredicate* x) = 0; 213 #ifdef ASSERT 214 virtual void do_Assert (Assert* x) = 0; 215 #endif 216 }; 217 218 219 // Hashing support 220 // 221 // Note: This hash functions affect the performance 222 // of ValueMap - make changes carefully! 223 224 #define HASH1(x1 ) ((intx)(x1)) 225 #define HASH2(x1, x2 ) ((HASH1(x1 ) << 7) ^ HASH1(x2)) 226 #define HASH3(x1, x2, x3 ) ((HASH2(x1, x2 ) << 7) ^ HASH1(x3)) 227 #define HASH4(x1, x2, x3, x4) ((HASH3(x1, x2, x3) << 7) ^ HASH1(x4)) 228 229 230 // The following macros are used to implement instruction-specific hashing. 231 // By default, each instruction implements hash() and is_equal(Value), used 232 // for value numbering/common subexpression elimination. The default imple- 233 // mentation disables value numbering. Each instruction which can be value- 234 // numbered, should define corresponding hash() and is_equal(Value) functions 235 // via the macros below. The f arguments specify all the values/op codes, etc. 236 // that need to be identical for two instructions to be identical. 237 // 238 // Note: The default implementation of hash() returns 0 in order to indicate 239 // that the instruction should not be considered for value numbering. 240 // The currently used hash functions do not guarantee that never a 0 241 // is produced. While this is still correct, it may be a performance 242 // bug (no value numbering for that node). However, this situation is 243 // so unlikely, that we are not going to handle it specially. 244 245 #define HASHING1(class_name, enabled, f1) \ 246 virtual intx hash() const { \ 247 return (enabled) ? HASH2(name(), f1) : 0; \ 248 } \ 249 virtual bool is_equal(Value v) const { \ 250 if (!(enabled) ) return false; \ 251 class_name* _v = v->as_##class_name(); \ 252 if (_v == NULL ) return false; \ 253 if (f1 != _v->f1) return false; \ 254 return true; \ 255 } \ 256 257 258 #define HASHING2(class_name, enabled, f1, f2) \ 259 virtual intx hash() const { \ 260 return (enabled) ? HASH3(name(), f1, f2) : 0; \ 261 } \ 262 virtual bool is_equal(Value v) const { \ 263 if (!(enabled) ) return false; \ 264 class_name* _v = v->as_##class_name(); \ 265 if (_v == NULL ) return false; \ 266 if (f1 != _v->f1) return false; \ 267 if (f2 != _v->f2) return false; \ 268 return true; \ 269 } \ 270 271 272 #define HASHING3(class_name, enabled, f1, f2, f3) \ 273 virtual intx hash() const { \ 274 return (enabled) ? HASH4(name(), f1, f2, f3) : 0; \ 275 } \ 276 virtual bool is_equal(Value v) const { \ 277 if (!(enabled) ) return false; \ 278 class_name* _v = v->as_##class_name(); \ 279 if (_v == NULL ) return false; \ 280 if (f1 != _v->f1) return false; \ 281 if (f2 != _v->f2) return false; \ 282 if (f3 != _v->f3) return false; \ 283 return true; \ 284 } \ 285 286 287 // The mother of all instructions... 288 289 class Instruction: public CompilationResourceObj { 290 private: 291 int _id; // the unique instruction id 292 #ifndef PRODUCT 293 int _printable_bci; // the bci of the instruction for printing 294 #endif 295 int _use_count; // the number of instructions refering to this value (w/o prev/next); only roots can have use count = 0 or > 1 296 int _pin_state; // set of PinReason describing the reason for pinning 297 ValueType* _type; // the instruction value type 298 Instruction* _next; // the next instruction if any (NULL for BlockEnd instructions) 299 Instruction* _subst; // the substitution instruction if any 300 LIR_Opr _operand; // LIR specific information 301 unsigned int _flags; // Flag bits 302 303 ValueStack* _state_before; // Copy of state with input operands still on stack (or NULL) 304 ValueStack* _exception_state; // Copy of state for exception handling 305 XHandlers* _exception_handlers; // Flat list of exception handlers covering this instruction 306 307 friend class UseCountComputer; 308 friend class BlockBegin; 309 310 void update_exception_state(ValueStack* state); 311 312 protected: 313 BlockBegin* _block; // Block that contains this instruction 314 315 void set_type(ValueType* type) { 316 assert(type != NULL, "type must exist"); 317 _type = type; 318 } 319 320 // Helper class to keep track of which arguments need a null check 321 class ArgsNonNullState { 322 private: 323 int _nonnull_state; // mask identifying which args are nonnull 324 public: 325 ArgsNonNullState() 326 : _nonnull_state(AllBits) {} 327 328 // Does argument number i needs a null check? 329 bool arg_needs_null_check(int i) const { 330 // No data is kept for arguments starting at position 33 so 331 // conservatively assume that they need a null check. 332 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 333 return is_set_nth_bit(_nonnull_state, i); 334 } 335 return true; 336 } 337 338 // Set whether argument number i needs a null check or not 339 void set_arg_needs_null_check(int i, bool check) { 340 if (i >= 0 && i < (int)sizeof(_nonnull_state) * BitsPerByte) { 341 if (check) { 342 _nonnull_state |= nth_bit(i); 343 } else { 344 _nonnull_state &= ~(nth_bit(i)); 345 } 346 } 347 } 348 }; 349 350 public: 351 void* operator new(size_t size) throw() { 352 Compilation* c = Compilation::current(); 353 void* res = c->arena()->Amalloc(size); 354 ((Instruction*)res)->_id = c->get_next_id(); 355 return res; 356 } 357 358 static const int no_bci = -99; 359 360 enum InstructionFlag { 361 NeedsNullCheckFlag = 0, 362 CanTrapFlag, 363 DirectCompareFlag, 364 IsEliminatedFlag, 365 IsSafepointFlag, 366 IsStaticFlag, 367 IsStrictfpFlag, 368 NeedsStoreCheckFlag, 369 NeedsWriteBarrierFlag, 370 PreservesStateFlag, 371 TargetIsFinalFlag, 372 TargetIsLoadedFlag, 373 TargetIsStrictfpFlag, 374 UnorderedIsTrueFlag, 375 NeedsPatchingFlag, 376 ThrowIncompatibleClassChangeErrorFlag, 377 InvokeSpecialReceiverCheckFlag, 378 ProfileMDOFlag, 379 IsLinkedInBlockFlag, 380 NeedsRangeCheckFlag, 381 InWorkListFlag, 382 DeoptimizeOnException, 383 InstructionLastFlag 384 }; 385 386 public: 387 bool check_flag(InstructionFlag id) const { return (_flags & (1 << id)) != 0; } 388 void set_flag(InstructionFlag id, bool f) { _flags = f ? (_flags | (1 << id)) : (_flags & ~(1 << id)); }; 389 390 // 'globally' used condition values 391 enum Condition { 392 eql, neq, lss, leq, gtr, geq, aeq, beq 393 }; 394 395 // Instructions may be pinned for many reasons and under certain conditions 396 // with enough knowledge it's possible to safely unpin them. 397 enum PinReason { 398 PinUnknown = 1 << 0 399 , PinExplicitNullCheck = 1 << 3 400 , PinStackForStateSplit= 1 << 12 401 , PinStateSplitConstructor= 1 << 13 402 , PinGlobalValueNumbering= 1 << 14 403 }; 404 405 static Condition mirror(Condition cond); 406 static Condition negate(Condition cond); 407 408 // initialization 409 static int number_of_instructions() { 410 return Compilation::current()->number_of_instructions(); 411 } 412 413 // creation 414 Instruction(ValueType* type, ValueStack* state_before = NULL, bool type_is_constant = false) 415 : 416 #ifndef PRODUCT 417 _printable_bci(-99), 418 #endif 419 _use_count(0) 420 , _pin_state(0) 421 , _type(type) 422 , _next(NULL) 423 , _subst(NULL) 424 , _operand(LIR_OprFact::illegalOpr) 425 , _flags(0) 426 , _state_before(state_before) 427 , _exception_handlers(NULL) 428 , _block(NULL) 429 { 430 check_state(state_before); 431 assert(type != NULL && (!type->is_constant() || type_is_constant), "type must exist"); 432 update_exception_state(_state_before); 433 } 434 435 // accessors 436 int id() const { return _id; } 437 #ifndef PRODUCT 438 bool has_printable_bci() const { return _printable_bci != -99; } 439 int printable_bci() const { assert(has_printable_bci(), "_printable_bci should have been set"); return _printable_bci; } 440 void set_printable_bci(int bci) { _printable_bci = bci; } 441 #endif 442 int dominator_depth(); 443 int use_count() const { return _use_count; } 444 int pin_state() const { return _pin_state; } 445 bool is_pinned() const { return _pin_state != 0 || PinAllInstructions; } 446 ValueType* type() const { return _type; } 447 BlockBegin *block() const { return _block; } 448 Instruction* prev(); // use carefully, expensive operation 449 Instruction* next() const { return _next; } 450 bool has_subst() const { return _subst != NULL; } 451 Instruction* subst() { return _subst == NULL ? this : _subst->subst(); } 452 LIR_Opr operand() const { return _operand; } 453 454 void set_needs_null_check(bool f) { set_flag(NeedsNullCheckFlag, f); } 455 bool needs_null_check() const { return check_flag(NeedsNullCheckFlag); } 456 bool is_linked() const { return check_flag(IsLinkedInBlockFlag); } 457 bool can_be_linked() { return as_Local() == NULL && as_Phi() == NULL; } 458 459 bool has_uses() const { return use_count() > 0; } 460 ValueStack* state_before() const { return _state_before; } 461 ValueStack* exception_state() const { return _exception_state; } 462 virtual bool needs_exception_state() const { return true; } 463 XHandlers* exception_handlers() const { return _exception_handlers; } 464 465 // manipulation 466 void pin(PinReason reason) { _pin_state |= reason; } 467 void pin() { _pin_state |= PinUnknown; } 468 // DANGEROUS: only used by EliminateStores 469 void unpin(PinReason reason) { assert((reason & PinUnknown) == 0, "can't unpin unknown state"); _pin_state &= ~reason; } 470 471 Instruction* set_next(Instruction* next) { 472 assert(next->has_printable_bci(), "_printable_bci should have been set"); 473 assert(next != NULL, "must not be NULL"); 474 assert(as_BlockEnd() == NULL, "BlockEnd instructions must have no next"); 475 assert(next->can_be_linked(), "shouldn't link these instructions into list"); 476 477 BlockBegin *block = this->block(); 478 next->_block = block; 479 480 next->set_flag(Instruction::IsLinkedInBlockFlag, true); 481 _next = next; 482 return next; 483 } 484 485 Instruction* set_next(Instruction* next, int bci) { 486 #ifndef PRODUCT 487 next->set_printable_bci(bci); 488 #endif 489 return set_next(next); 490 } 491 492 // when blocks are merged 493 void fixup_block_pointers() { 494 Instruction *cur = next()->next(); // next()'s block is set in set_next 495 while (cur && cur->_block != block()) { 496 cur->_block = block(); 497 cur = cur->next(); 498 } 499 } 500 501 Instruction *insert_after(Instruction *i) { 502 Instruction* n = _next; 503 set_next(i); 504 i->set_next(n); 505 return _next; 506 } 507 508 bool is_flattened_array() const; // FIXME -- remove it 509 510 bool is_loaded_flattened_array() const; 511 bool maybe_flattened_array() const; 512 513 Instruction *insert_after_same_bci(Instruction *i) { 514 #ifndef PRODUCT 515 i->set_printable_bci(printable_bci()); 516 #endif 517 return insert_after(i); 518 } 519 520 void set_subst(Instruction* subst) { 521 assert(subst == NULL || 522 type()->base() == subst->type()->base() || 523 subst->type()->base() == illegalType, "type can't change"); 524 _subst = subst; 525 } 526 void set_exception_handlers(XHandlers *xhandlers) { _exception_handlers = xhandlers; } 527 void set_exception_state(ValueStack* s) { check_state(s); _exception_state = s; } 528 void set_state_before(ValueStack* s) { check_state(s); _state_before = s; } 529 530 // machine-specifics 531 void set_operand(LIR_Opr operand) { assert(operand != LIR_OprFact::illegalOpr, "operand must exist"); _operand = operand; } 532 void clear_operand() { _operand = LIR_OprFact::illegalOpr; } 533 534 // generic 535 virtual Instruction* as_Instruction() { return this; } // to satisfy HASHING1 macro 536 virtual Phi* as_Phi() { return NULL; } 537 virtual Local* as_Local() { return NULL; } 538 virtual Constant* as_Constant() { return NULL; } 539 virtual AccessField* as_AccessField() { return NULL; } 540 virtual LoadField* as_LoadField() { return NULL; } 541 virtual StoreField* as_StoreField() { return NULL; } 542 virtual AccessArray* as_AccessArray() { return NULL; } 543 virtual ArrayLength* as_ArrayLength() { return NULL; } 544 virtual AccessIndexed* as_AccessIndexed() { return NULL; } 545 virtual LoadIndexed* as_LoadIndexed() { return NULL; } 546 virtual StoreIndexed* as_StoreIndexed() { return NULL; } 547 virtual NegateOp* as_NegateOp() { return NULL; } 548 virtual Op2* as_Op2() { return NULL; } 549 virtual ArithmeticOp* as_ArithmeticOp() { return NULL; } 550 virtual ShiftOp* as_ShiftOp() { return NULL; } 551 virtual LogicOp* as_LogicOp() { return NULL; } 552 virtual CompareOp* as_CompareOp() { return NULL; } 553 virtual IfOp* as_IfOp() { return NULL; } 554 virtual Convert* as_Convert() { return NULL; } 555 virtual NullCheck* as_NullCheck() { return NULL; } 556 virtual OsrEntry* as_OsrEntry() { return NULL; } 557 virtual StateSplit* as_StateSplit() { return NULL; } 558 virtual Invoke* as_Invoke() { return NULL; } 559 virtual NewInstance* as_NewInstance() { return NULL; } 560 virtual NewValueTypeInstance* as_NewValueTypeInstance() { return NULL; } 561 virtual NewArray* as_NewArray() { return NULL; } 562 virtual NewTypeArray* as_NewTypeArray() { return NULL; } 563 virtual NewObjectArray* as_NewObjectArray() { return NULL; } 564 virtual NewMultiArray* as_NewMultiArray() { return NULL; } 565 virtual TypeCheck* as_TypeCheck() { return NULL; } 566 virtual CheckCast* as_CheckCast() { return NULL; } 567 virtual InstanceOf* as_InstanceOf() { return NULL; } 568 virtual TypeCast* as_TypeCast() { return NULL; } 569 virtual AccessMonitor* as_AccessMonitor() { return NULL; } 570 virtual MonitorEnter* as_MonitorEnter() { return NULL; } 571 virtual MonitorExit* as_MonitorExit() { return NULL; } 572 virtual Intrinsic* as_Intrinsic() { return NULL; } 573 virtual BlockBegin* as_BlockBegin() { return NULL; } 574 virtual BlockEnd* as_BlockEnd() { return NULL; } 575 virtual Goto* as_Goto() { return NULL; } 576 virtual If* as_If() { return NULL; } 577 virtual IfInstanceOf* as_IfInstanceOf() { return NULL; } 578 virtual TableSwitch* as_TableSwitch() { return NULL; } 579 virtual LookupSwitch* as_LookupSwitch() { return NULL; } 580 virtual Return* as_Return() { return NULL; } 581 virtual Throw* as_Throw() { return NULL; } 582 virtual Base* as_Base() { return NULL; } 583 virtual RoundFP* as_RoundFP() { return NULL; } 584 virtual ExceptionObject* as_ExceptionObject() { return NULL; } 585 virtual UnsafeOp* as_UnsafeOp() { return NULL; } 586 virtual ProfileInvoke* as_ProfileInvoke() { return NULL; } 587 virtual RangeCheckPredicate* as_RangeCheckPredicate() { return NULL; } 588 589 #ifdef ASSERT 590 virtual Assert* as_Assert() { return NULL; } 591 #endif 592 593 virtual void visit(InstructionVisitor* v) = 0; 594 595 virtual bool can_trap() const { return false; } 596 597 virtual void input_values_do(ValueVisitor* f) = 0; 598 virtual void state_values_do(ValueVisitor* f); 599 virtual void other_values_do(ValueVisitor* f) { /* usually no other - override on demand */ } 600 void values_do(ValueVisitor* f) { input_values_do(f); state_values_do(f); other_values_do(f); } 601 602 virtual ciType* exact_type() const; 603 virtual ciType* declared_type() const { return NULL; } 604 605 // hashing 606 virtual const char* name() const = 0; 607 HASHING1(Instruction, false, id()) // hashing disabled by default 608 609 // debugging 610 static void check_state(ValueStack* state) PRODUCT_RETURN; 611 void print() PRODUCT_RETURN; 612 void print_line() PRODUCT_RETURN; 613 void print(InstructionPrinter& ip) PRODUCT_RETURN; 614 }; 615 616 617 // The following macros are used to define base (i.e., non-leaf) 618 // and leaf instruction classes. They define class-name related 619 // generic functionality in one place. 620 621 #define BASE(class_name, super_class_name) \ 622 class class_name: public super_class_name { \ 623 public: \ 624 virtual class_name* as_##class_name() { return this; } \ 625 626 627 #define LEAF(class_name, super_class_name) \ 628 BASE(class_name, super_class_name) \ 629 public: \ 630 virtual const char* name() const { return #class_name; } \ 631 virtual void visit(InstructionVisitor* v) { v->do_##class_name(this); } \ 632 633 634 // Debugging support 635 636 637 #ifdef ASSERT 638 class AssertValues: public ValueVisitor { 639 void visit(Value* x) { assert((*x) != NULL, "value must exist"); } 640 }; 641 #define ASSERT_VALUES { AssertValues assert_value; values_do(&assert_value); } 642 #else 643 #define ASSERT_VALUES 644 #endif // ASSERT 645 646 647 // A Phi is a phi function in the sense of SSA form. It stands for 648 // the value of a local variable at the beginning of a join block. 649 // A Phi consists of n operands, one for every incoming branch. 650 651 LEAF(Phi, Instruction) 652 private: 653 int _pf_flags; // the flags of the phi function 654 int _index; // to value on operand stack (index < 0) or to local 655 ciType* _exact_type; // currently is set only for flattened arrays, NULL otherwise. 656 public: 657 // creation 658 Phi(ValueType* type, BlockBegin* b, int index, ciType* exact_type) 659 : Instruction(type->base()) 660 , _pf_flags(0) 661 , _index(index) 662 , _exact_type(exact_type) 663 { 664 _block = b; 665 NOT_PRODUCT(set_printable_bci(Value(b)->printable_bci())); 666 if (type->is_illegal()) { 667 make_illegal(); 668 } 669 } 670 671 virtual ciType* exact_type() const { 672 return _exact_type; 673 } 674 675 virtual ciType* declared_type() const { 676 return _exact_type; 677 } 678 679 // flags 680 enum Flag { 681 no_flag = 0, 682 visited = 1 << 0, 683 cannot_simplify = 1 << 1 684 }; 685 686 // accessors 687 bool is_local() const { return _index >= 0; } 688 bool is_on_stack() const { return !is_local(); } 689 int local_index() const { assert(is_local(), ""); return _index; } 690 int stack_index() const { assert(is_on_stack(), ""); return -(_index+1); } 691 692 Value operand_at(int i) const; 693 int operand_count() const; 694 695 void set(Flag f) { _pf_flags |= f; } 696 void clear(Flag f) { _pf_flags &= ~f; } 697 bool is_set(Flag f) const { return (_pf_flags & f) != 0; } 698 699 // Invalidates phis corresponding to merges of locals of two different types 700 // (these should never be referenced, otherwise the bytecodes are illegal) 701 void make_illegal() { 702 set(cannot_simplify); 703 set_type(illegalType); 704 } 705 706 bool is_illegal() const { 707 return type()->is_illegal(); 708 } 709 710 // generic 711 virtual void input_values_do(ValueVisitor* f) { 712 } 713 }; 714 715 716 // A local is a placeholder for an incoming argument to a function call. 717 LEAF(Local, Instruction) 718 private: 719 int _java_index; // the local index within the method to which the local belongs 720 bool _is_receiver; // if local variable holds the receiver: "this" for non-static methods 721 ciType* _declared_type; 722 public: 723 // creation 724 Local(ciType* declared, ValueType* type, int index, bool receiver) 725 : Instruction(type) 726 , _java_index(index) 727 , _is_receiver(receiver) 728 , _declared_type(declared) 729 { 730 NOT_PRODUCT(set_printable_bci(-1)); 731 } 732 733 // accessors 734 int java_index() const { return _java_index; } 735 bool is_receiver() const { return _is_receiver; } 736 737 virtual ciType* declared_type() const { return _declared_type; } 738 739 // generic 740 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 741 }; 742 743 744 LEAF(Constant, Instruction) 745 public: 746 // creation 747 Constant(ValueType* type): 748 Instruction(type, NULL, /*type_is_constant*/ true) 749 { 750 assert(type->is_constant(), "must be a constant"); 751 } 752 753 Constant(ValueType* type, ValueStack* state_before): 754 Instruction(type, state_before, /*type_is_constant*/ true) 755 { 756 assert(state_before != NULL, "only used for constants which need patching"); 757 assert(type->is_constant(), "must be a constant"); 758 // since it's patching it needs to be pinned 759 pin(); 760 } 761 762 // generic 763 virtual bool can_trap() const { return state_before() != NULL; } 764 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 765 766 virtual intx hash() const; 767 virtual bool is_equal(Value v) const; 768 769 virtual ciType* exact_type() const; 770 771 enum CompareResult { not_comparable = -1, cond_false, cond_true }; 772 773 virtual CompareResult compare(Instruction::Condition condition, Value right) const; 774 BlockBegin* compare(Instruction::Condition cond, Value right, 775 BlockBegin* true_sux, BlockBegin* false_sux) const { 776 switch (compare(cond, right)) { 777 case not_comparable: 778 return NULL; 779 case cond_false: 780 return false_sux; 781 case cond_true: 782 return true_sux; 783 default: 784 ShouldNotReachHere(); 785 return NULL; 786 } 787 } 788 }; 789 790 791 BASE(AccessField, Instruction) 792 private: 793 Value _obj; 794 int _offset; 795 ciField* _field; 796 NullCheck* _explicit_null_check; // For explicit null check elimination 797 798 public: 799 // creation 800 AccessField(Value obj, int offset, ciField* field, bool is_static, 801 ValueStack* state_before, bool needs_patching) 802 : Instruction(as_ValueType(field->type()->basic_type()), state_before) 803 , _obj(obj) 804 , _offset(offset) 805 , _field(field) 806 , _explicit_null_check(NULL) 807 { 808 set_needs_null_check(!is_static); 809 set_flag(IsStaticFlag, is_static); 810 set_flag(NeedsPatchingFlag, needs_patching); 811 ASSERT_VALUES 812 // pin of all instructions with memory access 813 pin(); 814 } 815 816 // accessors 817 Value obj() const { return _obj; } 818 int offset() const { return _offset; } 819 ciField* field() const { return _field; } 820 BasicType field_type() const { return _field->type()->basic_type(); } 821 bool is_static() const { return check_flag(IsStaticFlag); } 822 NullCheck* explicit_null_check() const { return _explicit_null_check; } 823 bool needs_patching() const { return check_flag(NeedsPatchingFlag); } 824 825 // Unresolved getstatic and putstatic can cause initialization. 826 // Technically it occurs at the Constant that materializes the base 827 // of the static fields but it's simpler to model it here. 828 bool is_init_point() const { return is_static() && (needs_patching() || !_field->holder()->is_initialized()); } 829 830 // manipulation 831 832 // Under certain circumstances, if a previous NullCheck instruction 833 // proved the target object non-null, we can eliminate the explicit 834 // null check and do an implicit one, simply specifying the debug 835 // information from the NullCheck. This field should only be consulted 836 // if needs_null_check() is true. 837 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 838 839 // generic 840 virtual bool can_trap() const { return needs_null_check() || needs_patching(); } 841 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 842 }; 843 844 845 LEAF(LoadField, AccessField) 846 ciValueKlass* _value_klass; 847 Value _default_value; 848 public: 849 // creation 850 LoadField(Value obj, int offset, ciField* field, bool is_static, 851 ValueStack* state_before, bool needs_patching, 852 ciValueKlass* value_klass = NULL, Value default_value = NULL ) 853 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 854 , _value_klass(value_klass), _default_value(default_value) 855 {} 856 857 ciType* declared_type() const; 858 859 // generic 860 HASHING2(LoadField, !needs_patching() && !field()->is_volatile(), obj()->subst(), offset()) // cannot be eliminated if needs patching or if volatile 861 862 ciValueKlass* value_klass() const { return _value_klass;} 863 Value default_value() const { return _default_value; } 864 }; 865 866 867 LEAF(StoreField, AccessField) 868 private: 869 Value _value; 870 871 public: 872 // creation 873 StoreField(Value obj, int offset, ciField* field, Value value, bool is_static, 874 ValueStack* state_before, bool needs_patching) 875 : AccessField(obj, offset, field, is_static, state_before, needs_patching) 876 , _value(value) 877 { 878 set_flag(NeedsWriteBarrierFlag, as_ValueType(field_type())->is_object()); 879 ASSERT_VALUES 880 pin(); 881 } 882 883 // accessors 884 Value value() const { return _value; } 885 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 886 887 // generic 888 virtual void input_values_do(ValueVisitor* f) { AccessField::input_values_do(f); f->visit(&_value); } 889 }; 890 891 892 BASE(AccessArray, Instruction) 893 private: 894 Value _array; 895 896 public: 897 // creation 898 AccessArray(ValueType* type, Value array, ValueStack* state_before) 899 : Instruction(type, state_before) 900 , _array(array) 901 { 902 set_needs_null_check(true); 903 ASSERT_VALUES 904 pin(); // instruction with side effect (null exception or range check throwing) 905 } 906 907 Value array() const { return _array; } 908 909 // generic 910 virtual bool can_trap() const { return needs_null_check(); } 911 virtual void input_values_do(ValueVisitor* f) { f->visit(&_array); } 912 }; 913 914 915 LEAF(ArrayLength, AccessArray) 916 private: 917 NullCheck* _explicit_null_check; // For explicit null check elimination 918 919 public: 920 // creation 921 ArrayLength(Value array, ValueStack* state_before) 922 : AccessArray(intType, array, state_before) 923 , _explicit_null_check(NULL) {} 924 925 // accessors 926 NullCheck* explicit_null_check() const { return _explicit_null_check; } 927 928 // setters 929 // See LoadField::set_explicit_null_check for documentation 930 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 931 932 // generic 933 HASHING1(ArrayLength, true, array()->subst()) 934 }; 935 936 937 BASE(AccessIndexed, AccessArray) 938 private: 939 Value _index; 940 Value _length; 941 BasicType _elt_type; 942 bool _mismatched; 943 944 public: 945 // creation 946 AccessIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched) 947 : AccessArray(as_ValueType(elt_type), array, state_before) 948 , _index(index) 949 , _length(length) 950 , _elt_type(elt_type) 951 , _mismatched(mismatched) 952 { 953 set_flag(Instruction::NeedsRangeCheckFlag, true); 954 ASSERT_VALUES 955 } 956 957 // accessors 958 Value index() const { return _index; } 959 Value length() const { return _length; } 960 BasicType elt_type() const { return _elt_type; } 961 bool mismatched() const { return _mismatched; } 962 963 void clear_length() { _length = NULL; } 964 // perform elimination of range checks involving constants 965 bool compute_needs_range_check(); 966 967 // generic 968 virtual void input_values_do(ValueVisitor* f) { AccessArray::input_values_do(f); f->visit(&_index); if (_length != NULL) f->visit(&_length); } 969 }; 970 971 972 LEAF(LoadIndexed, AccessIndexed) 973 private: 974 NullCheck* _explicit_null_check; // For explicit null check elimination 975 NewValueTypeInstance* _vt; 976 977 public: 978 // creation 979 LoadIndexed(Value array, Value index, Value length, BasicType elt_type, ValueStack* state_before, bool mismatched = false) 980 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 981 , _explicit_null_check(NULL) {} 982 983 // accessors 984 NullCheck* explicit_null_check() const { return _explicit_null_check; } 985 986 // setters 987 // See LoadField::set_explicit_null_check for documentation 988 void set_explicit_null_check(NullCheck* check) { _explicit_null_check = check; } 989 990 ciType* exact_type() const; 991 ciType* declared_type() const; 992 993 NewValueTypeInstance* vt() { return _vt; } 994 void set_vt(NewValueTypeInstance* vt) { _vt = vt; } 995 996 // generic 997 HASHING2(LoadIndexed, true, array()->subst(), index()->subst()) 998 }; 999 1000 1001 LEAF(StoreIndexed, AccessIndexed) 1002 private: 1003 Value _value; 1004 1005 ciMethod* _profiled_method; 1006 int _profiled_bci; 1007 bool _check_boolean; 1008 1009 public: 1010 // creation 1011 StoreIndexed(Value array, Value index, Value length, BasicType elt_type, Value value, ValueStack* state_before, 1012 bool check_boolean, bool mismatched = false) 1013 : AccessIndexed(array, index, length, elt_type, state_before, mismatched) 1014 , _value(value), _profiled_method(NULL), _profiled_bci(0), _check_boolean(check_boolean) 1015 { 1016 set_flag(NeedsWriteBarrierFlag, (as_ValueType(elt_type)->is_object())); 1017 set_flag(NeedsStoreCheckFlag, (as_ValueType(elt_type)->is_object())); 1018 ASSERT_VALUES 1019 pin(); 1020 } 1021 1022 // accessors 1023 Value value() const { return _value; } 1024 bool needs_write_barrier() const { return check_flag(NeedsWriteBarrierFlag); } 1025 bool needs_store_check() const { return check_flag(NeedsStoreCheckFlag); } 1026 bool check_boolean() const { return _check_boolean; } 1027 // Helpers for MethodData* profiling 1028 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1029 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1030 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1031 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1032 ciMethod* profiled_method() const { return _profiled_method; } 1033 int profiled_bci() const { return _profiled_bci; } 1034 // Flattened array support 1035 bool is_exact_flattened_array_store() const; 1036 // generic 1037 virtual void input_values_do(ValueVisitor* f) { AccessIndexed::input_values_do(f); f->visit(&_value); } 1038 }; 1039 1040 1041 LEAF(NegateOp, Instruction) 1042 private: 1043 Value _x; 1044 1045 public: 1046 // creation 1047 NegateOp(Value x) : Instruction(x->type()->base()), _x(x) { 1048 ASSERT_VALUES 1049 } 1050 1051 // accessors 1052 Value x() const { return _x; } 1053 1054 // generic 1055 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); } 1056 }; 1057 1058 1059 BASE(Op2, Instruction) 1060 private: 1061 Bytecodes::Code _op; 1062 Value _x; 1063 Value _y; 1064 1065 public: 1066 // creation 1067 Op2(ValueType* type, Bytecodes::Code op, Value x, Value y, ValueStack* state_before = NULL) 1068 : Instruction(type, state_before) 1069 , _op(op) 1070 , _x(x) 1071 , _y(y) 1072 { 1073 ASSERT_VALUES 1074 } 1075 1076 // accessors 1077 Bytecodes::Code op() const { return _op; } 1078 Value x() const { return _x; } 1079 Value y() const { return _y; } 1080 1081 // manipulators 1082 void swap_operands() { 1083 assert(is_commutative(), "operation must be commutative"); 1084 Value t = _x; _x = _y; _y = t; 1085 } 1086 1087 // generic 1088 virtual bool is_commutative() const { return false; } 1089 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 1090 }; 1091 1092 1093 LEAF(ArithmeticOp, Op2) 1094 public: 1095 // creation 1096 ArithmeticOp(Bytecodes::Code op, Value x, Value y, bool is_strictfp, ValueStack* state_before) 1097 : Op2(x->type()->meet(y->type()), op, x, y, state_before) 1098 { 1099 set_flag(IsStrictfpFlag, is_strictfp); 1100 if (can_trap()) pin(); 1101 } 1102 1103 // accessors 1104 bool is_strictfp() const { return check_flag(IsStrictfpFlag); } 1105 1106 // generic 1107 virtual bool is_commutative() const; 1108 virtual bool can_trap() const; 1109 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1110 }; 1111 1112 1113 LEAF(ShiftOp, Op2) 1114 public: 1115 // creation 1116 ShiftOp(Bytecodes::Code op, Value x, Value s) : Op2(x->type()->base(), op, x, s) {} 1117 1118 // generic 1119 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1120 }; 1121 1122 1123 LEAF(LogicOp, Op2) 1124 public: 1125 // creation 1126 LogicOp(Bytecodes::Code op, Value x, Value y) : Op2(x->type()->meet(y->type()), op, x, y) {} 1127 1128 // generic 1129 virtual bool is_commutative() const; 1130 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1131 }; 1132 1133 1134 LEAF(CompareOp, Op2) 1135 public: 1136 // creation 1137 CompareOp(Bytecodes::Code op, Value x, Value y, ValueStack* state_before) 1138 : Op2(intType, op, x, y, state_before) 1139 {} 1140 1141 // generic 1142 HASHING3(Op2, true, op(), x()->subst(), y()->subst()) 1143 }; 1144 1145 1146 LEAF(IfOp, Op2) 1147 private: 1148 Value _tval; 1149 Value _fval; 1150 1151 public: 1152 // creation 1153 IfOp(Value x, Condition cond, Value y, Value tval, Value fval) 1154 : Op2(tval->type()->meet(fval->type()), (Bytecodes::Code)cond, x, y) 1155 , _tval(tval) 1156 , _fval(fval) 1157 { 1158 ASSERT_VALUES 1159 assert(tval->type()->tag() == fval->type()->tag(), "types must match"); 1160 } 1161 1162 // accessors 1163 virtual bool is_commutative() const; 1164 Bytecodes::Code op() const { ShouldNotCallThis(); return Bytecodes::_illegal; } 1165 Condition cond() const { return (Condition)Op2::op(); } 1166 Value tval() const { return _tval; } 1167 Value fval() const { return _fval; } 1168 1169 // generic 1170 virtual void input_values_do(ValueVisitor* f) { Op2::input_values_do(f); f->visit(&_tval); f->visit(&_fval); } 1171 }; 1172 1173 1174 LEAF(Convert, Instruction) 1175 private: 1176 Bytecodes::Code _op; 1177 Value _value; 1178 1179 public: 1180 // creation 1181 Convert(Bytecodes::Code op, Value value, ValueType* to_type) : Instruction(to_type), _op(op), _value(value) { 1182 ASSERT_VALUES 1183 } 1184 1185 // accessors 1186 Bytecodes::Code op() const { return _op; } 1187 Value value() const { return _value; } 1188 1189 // generic 1190 virtual void input_values_do(ValueVisitor* f) { f->visit(&_value); } 1191 HASHING2(Convert, true, op(), value()->subst()) 1192 }; 1193 1194 1195 LEAF(NullCheck, Instruction) 1196 private: 1197 Value _obj; 1198 1199 public: 1200 // creation 1201 NullCheck(Value obj, ValueStack* state_before) 1202 : Instruction(obj->type()->base(), state_before) 1203 , _obj(obj) 1204 { 1205 ASSERT_VALUES 1206 set_can_trap(true); 1207 assert(_obj->type()->is_object(), "null check must be applied to objects only"); 1208 pin(Instruction::PinExplicitNullCheck); 1209 } 1210 1211 // accessors 1212 Value obj() const { return _obj; } 1213 1214 // setters 1215 void set_can_trap(bool can_trap) { set_flag(CanTrapFlag, can_trap); } 1216 1217 // generic 1218 virtual bool can_trap() const { return check_flag(CanTrapFlag); /* null-check elimination sets to false */ } 1219 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1220 HASHING1(NullCheck, true, obj()->subst()) 1221 }; 1222 1223 1224 // This node is supposed to cast the type of another node to a more precise 1225 // declared type. 1226 LEAF(TypeCast, Instruction) 1227 private: 1228 ciType* _declared_type; 1229 Value _obj; 1230 1231 public: 1232 // The type of this node is the same type as the object type (and it might be constant). 1233 TypeCast(ciType* type, Value obj, ValueStack* state_before) 1234 : Instruction(obj->type(), state_before, obj->type()->is_constant()), 1235 _declared_type(type), 1236 _obj(obj) {} 1237 1238 // accessors 1239 ciType* declared_type() const { return _declared_type; } 1240 Value obj() const { return _obj; } 1241 1242 // generic 1243 virtual void input_values_do(ValueVisitor* f) { f->visit(&_obj); } 1244 }; 1245 1246 1247 BASE(StateSplit, Instruction) 1248 private: 1249 ValueStack* _state; 1250 1251 protected: 1252 static void substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block); 1253 1254 public: 1255 // creation 1256 StateSplit(ValueType* type, ValueStack* state_before = NULL) 1257 : Instruction(type, state_before) 1258 , _state(NULL) 1259 { 1260 pin(PinStateSplitConstructor); 1261 } 1262 1263 // accessors 1264 ValueStack* state() const { return _state; } 1265 IRScope* scope() const; // the state's scope 1266 1267 // manipulation 1268 void set_state(ValueStack* state) { assert(_state == NULL, "overwriting existing state"); check_state(state); _state = state; } 1269 1270 // generic 1271 virtual void input_values_do(ValueVisitor* f) { /* no values */ } 1272 virtual void state_values_do(ValueVisitor* f); 1273 }; 1274 1275 1276 LEAF(Invoke, StateSplit) 1277 private: 1278 Bytecodes::Code _code; 1279 Value _recv; 1280 Values* _args; 1281 BasicTypeList* _signature; 1282 int _vtable_index; 1283 ciMethod* _target; 1284 1285 public: 1286 // creation 1287 Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 1288 int vtable_index, ciMethod* target, ValueStack* state_before); 1289 1290 // accessors 1291 Bytecodes::Code code() const { return _code; } 1292 Value receiver() const { return _recv; } 1293 bool has_receiver() const { return receiver() != NULL; } 1294 int number_of_arguments() const { return _args->length(); } 1295 Value argument_at(int i) const { return _args->at(i); } 1296 int vtable_index() const { return _vtable_index; } 1297 BasicTypeList* signature() const { return _signature; } 1298 ciMethod* target() const { return _target; } 1299 1300 ciType* declared_type() const; 1301 1302 // Returns false if target is not loaded 1303 bool target_is_final() const { return check_flag(TargetIsFinalFlag); } 1304 bool target_is_loaded() const { return check_flag(TargetIsLoadedFlag); } 1305 // Returns false if target is not loaded 1306 bool target_is_strictfp() const { return check_flag(TargetIsStrictfpFlag); } 1307 1308 // JSR 292 support 1309 bool is_invokedynamic() const { return code() == Bytecodes::_invokedynamic; } 1310 bool is_method_handle_intrinsic() const { return target()->is_method_handle_intrinsic(); } 1311 1312 virtual bool needs_exception_state() const { return false; } 1313 1314 // generic 1315 virtual bool can_trap() const { return true; } 1316 virtual void input_values_do(ValueVisitor* f) { 1317 StateSplit::input_values_do(f); 1318 if (has_receiver()) f->visit(&_recv); 1319 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1320 } 1321 virtual void state_values_do(ValueVisitor *f); 1322 }; 1323 1324 1325 LEAF(NewInstance, StateSplit) 1326 private: 1327 ciInstanceKlass* _klass; 1328 bool _is_unresolved; 1329 1330 public: 1331 // creation 1332 NewInstance(ciInstanceKlass* klass, ValueStack* state_before, bool is_unresolved) 1333 : StateSplit(instanceType, state_before) 1334 , _klass(klass), _is_unresolved(is_unresolved) 1335 {} 1336 1337 // accessors 1338 ciInstanceKlass* klass() const { return _klass; } 1339 bool is_unresolved() const { return _is_unresolved; } 1340 1341 virtual bool needs_exception_state() const { return false; } 1342 1343 // generic 1344 virtual bool can_trap() const { return true; } 1345 ciType* exact_type() const; 1346 ciType* declared_type() const; 1347 }; 1348 1349 LEAF(NewValueTypeInstance, StateSplit) 1350 bool _is_unresolved; 1351 ciValueKlass* _klass; 1352 Value _depends_on; // Link to instance on with withfield was called on 1353 1354 public: 1355 1356 // Default creation, always allocated for now 1357 NewValueTypeInstance(ciValueKlass* klass, ValueStack* state_before, bool is_unresolved, Value depends_on = NULL) 1358 : StateSplit(instanceType, state_before) 1359 , _is_unresolved(is_unresolved) 1360 , _klass(klass) 1361 { 1362 if (depends_on == NULL) { 1363 _depends_on = this; 1364 } else { 1365 _depends_on = depends_on; 1366 } 1367 } 1368 1369 // accessors 1370 bool is_unresolved() const { return _is_unresolved; } 1371 Value depends_on(); 1372 1373 ciValueKlass* klass() const { return _klass; } 1374 1375 virtual bool needs_exception_state() const { return false; } 1376 1377 // generic 1378 virtual bool can_trap() const { return true; } 1379 ciType* exact_type() const; 1380 ciType* declared_type() const; 1381 1382 // Only done in LIR Generator -> map everything to object 1383 void set_to_object_type() { set_type(instanceType); } 1384 }; 1385 1386 BASE(NewArray, StateSplit) 1387 private: 1388 Value _length; 1389 1390 public: 1391 // creation 1392 NewArray(Value length, ValueStack* state_before) 1393 : StateSplit(objectType, state_before) 1394 , _length(length) 1395 { 1396 // Do not ASSERT_VALUES since length is NULL for NewMultiArray 1397 } 1398 1399 // accessors 1400 Value length() const { return _length; } 1401 1402 virtual bool needs_exception_state() const { return false; } 1403 1404 ciType* exact_type() const { return NULL; } 1405 ciType* declared_type() const; 1406 1407 // generic 1408 virtual bool can_trap() const { return true; } 1409 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_length); } 1410 }; 1411 1412 1413 LEAF(NewTypeArray, NewArray) 1414 private: 1415 BasicType _elt_type; 1416 1417 public: 1418 // creation 1419 NewTypeArray(Value length, BasicType elt_type, ValueStack* state_before) 1420 : NewArray(length, state_before) 1421 , _elt_type(elt_type) 1422 {} 1423 1424 // accessors 1425 BasicType elt_type() const { return _elt_type; } 1426 ciType* exact_type() const; 1427 }; 1428 1429 1430 LEAF(NewObjectArray, NewArray) 1431 private: 1432 ciKlass* _klass; 1433 1434 public: 1435 // creation 1436 NewObjectArray(ciKlass* klass, Value length, ValueStack* state_before) : NewArray(length, state_before), _klass(klass) {} 1437 1438 // accessors 1439 ciKlass* klass() const { return _klass; } 1440 ciType* exact_type() const; 1441 }; 1442 1443 1444 LEAF(NewMultiArray, NewArray) 1445 private: 1446 ciKlass* _klass; 1447 Values* _dims; 1448 1449 public: 1450 // creation 1451 NewMultiArray(ciKlass* klass, Values* dims, ValueStack* state_before) : NewArray(NULL, state_before), _klass(klass), _dims(dims) { 1452 ASSERT_VALUES 1453 } 1454 1455 // accessors 1456 ciKlass* klass() const { return _klass; } 1457 Values* dims() const { return _dims; } 1458 int rank() const { return dims()->length(); } 1459 1460 // generic 1461 virtual void input_values_do(ValueVisitor* f) { 1462 // NOTE: we do not call NewArray::input_values_do since "length" 1463 // is meaningless for a multi-dimensional array; passing the 1464 // zeroth element down to NewArray as its length is a bad idea 1465 // since there will be a copy in the "dims" array which doesn't 1466 // get updated, and the value must not be traversed twice. Was bug 1467 // - kbr 4/10/2001 1468 StateSplit::input_values_do(f); 1469 for (int i = 0; i < _dims->length(); i++) f->visit(_dims->adr_at(i)); 1470 } 1471 1472 ciType* exact_type() const; 1473 }; 1474 1475 1476 BASE(TypeCheck, StateSplit) 1477 private: 1478 ciKlass* _klass; 1479 Value _obj; 1480 1481 ciMethod* _profiled_method; 1482 int _profiled_bci; 1483 1484 public: 1485 // creation 1486 TypeCheck(ciKlass* klass, Value obj, ValueType* type, ValueStack* state_before) 1487 : StateSplit(type, state_before), _klass(klass), _obj(obj), 1488 _profiled_method(NULL), _profiled_bci(0) { 1489 ASSERT_VALUES 1490 set_direct_compare(false); 1491 } 1492 1493 // accessors 1494 ciKlass* klass() const { return _klass; } 1495 Value obj() const { return _obj; } 1496 bool is_loaded() const { return klass() != NULL; } 1497 bool direct_compare() const { return check_flag(DirectCompareFlag); } 1498 1499 // manipulation 1500 void set_direct_compare(bool flag) { set_flag(DirectCompareFlag, flag); } 1501 1502 // generic 1503 virtual bool can_trap() const { return true; } 1504 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1505 1506 // Helpers for MethodData* profiling 1507 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1508 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1509 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1510 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1511 ciMethod* profiled_method() const { return _profiled_method; } 1512 int profiled_bci() const { return _profiled_bci; } 1513 }; 1514 1515 1516 LEAF(CheckCast, TypeCheck) 1517 bool _is_never_null; 1518 public: 1519 // creation 1520 CheckCast(ciKlass* klass, Value obj, ValueStack* state_before, bool never_null = false) 1521 : TypeCheck(klass, obj, objectType, state_before), _is_never_null(never_null) {} 1522 1523 void set_incompatible_class_change_check() { 1524 set_flag(ThrowIncompatibleClassChangeErrorFlag, true); 1525 } 1526 bool is_incompatible_class_change_check() const { 1527 return check_flag(ThrowIncompatibleClassChangeErrorFlag); 1528 } 1529 void set_invokespecial_receiver_check() { 1530 set_flag(InvokeSpecialReceiverCheckFlag, true); 1531 } 1532 bool is_invokespecial_receiver_check() const { 1533 return check_flag(InvokeSpecialReceiverCheckFlag); 1534 } 1535 bool is_never_null() const { 1536 return _is_never_null; 1537 } 1538 1539 virtual bool needs_exception_state() const { 1540 return !is_invokespecial_receiver_check(); 1541 } 1542 1543 ciType* declared_type() const; 1544 }; 1545 1546 1547 LEAF(InstanceOf, TypeCheck) 1548 public: 1549 // creation 1550 InstanceOf(ciKlass* klass, Value obj, ValueStack* state_before) : TypeCheck(klass, obj, intType, state_before) {} 1551 1552 virtual bool needs_exception_state() const { return false; } 1553 }; 1554 1555 1556 BASE(AccessMonitor, StateSplit) 1557 private: 1558 Value _obj; 1559 int _monitor_no; 1560 1561 public: 1562 // creation 1563 AccessMonitor(Value obj, int monitor_no, ValueStack* state_before = NULL) 1564 : StateSplit(illegalType, state_before) 1565 , _obj(obj) 1566 , _monitor_no(monitor_no) 1567 { 1568 set_needs_null_check(true); 1569 ASSERT_VALUES 1570 } 1571 1572 // accessors 1573 Value obj() const { return _obj; } 1574 int monitor_no() const { return _monitor_no; } 1575 1576 // generic 1577 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_obj); } 1578 }; 1579 1580 1581 LEAF(MonitorEnter, AccessMonitor) 1582 bool _maybe_valuetype; 1583 public: 1584 // creation 1585 MonitorEnter(Value obj, int monitor_no, ValueStack* state_before, bool maybe_valuetype) 1586 : AccessMonitor(obj, monitor_no, state_before) 1587 , _maybe_valuetype(maybe_valuetype) 1588 { 1589 ASSERT_VALUES 1590 } 1591 1592 // accessors 1593 bool maybe_valuetype() const { return _maybe_valuetype; } 1594 1595 // generic 1596 virtual bool can_trap() const { return true; } 1597 }; 1598 1599 1600 LEAF(MonitorExit, AccessMonitor) 1601 public: 1602 // creation 1603 MonitorExit(Value obj, int monitor_no) 1604 : AccessMonitor(obj, monitor_no, NULL) 1605 { 1606 ASSERT_VALUES 1607 } 1608 }; 1609 1610 1611 LEAF(Intrinsic, StateSplit) 1612 private: 1613 vmIntrinsics::ID _id; 1614 Values* _args; 1615 Value _recv; 1616 ArgsNonNullState _nonnull_state; 1617 1618 public: 1619 // preserves_state can be set to true for Intrinsics 1620 // which are guaranteed to preserve register state across any slow 1621 // cases; setting it to true does not mean that the Intrinsic can 1622 // not trap, only that if we continue execution in the same basic 1623 // block after the Intrinsic, all of the registers are intact. This 1624 // allows load elimination and common expression elimination to be 1625 // performed across the Intrinsic. The default value is false. 1626 Intrinsic(ValueType* type, 1627 vmIntrinsics::ID id, 1628 Values* args, 1629 bool has_receiver, 1630 ValueStack* state_before, 1631 bool preserves_state, 1632 bool cantrap = true) 1633 : StateSplit(type, state_before) 1634 , _id(id) 1635 , _args(args) 1636 , _recv(NULL) 1637 { 1638 assert(args != NULL, "args must exist"); 1639 ASSERT_VALUES 1640 set_flag(PreservesStateFlag, preserves_state); 1641 set_flag(CanTrapFlag, cantrap); 1642 if (has_receiver) { 1643 _recv = argument_at(0); 1644 } 1645 set_needs_null_check(has_receiver); 1646 1647 // some intrinsics can't trap, so don't force them to be pinned 1648 if (!can_trap() && !vmIntrinsics::should_be_pinned(_id)) { 1649 unpin(PinStateSplitConstructor); 1650 } 1651 } 1652 1653 // accessors 1654 vmIntrinsics::ID id() const { return _id; } 1655 int number_of_arguments() const { return _args->length(); } 1656 Value argument_at(int i) const { return _args->at(i); } 1657 1658 bool has_receiver() const { return (_recv != NULL); } 1659 Value receiver() const { assert(has_receiver(), "must have receiver"); return _recv; } 1660 bool preserves_state() const { return check_flag(PreservesStateFlag); } 1661 1662 bool arg_needs_null_check(int i) const { 1663 return _nonnull_state.arg_needs_null_check(i); 1664 } 1665 1666 void set_arg_needs_null_check(int i, bool check) { 1667 _nonnull_state.set_arg_needs_null_check(i, check); 1668 } 1669 1670 // generic 1671 virtual bool can_trap() const { return check_flag(CanTrapFlag); } 1672 virtual void input_values_do(ValueVisitor* f) { 1673 StateSplit::input_values_do(f); 1674 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 1675 } 1676 }; 1677 1678 1679 class LIR_List; 1680 1681 LEAF(BlockBegin, StateSplit) 1682 private: 1683 int _block_id; // the unique block id 1684 int _bci; // start-bci of block 1685 int _depth_first_number; // number of this block in a depth-first ordering 1686 int _linear_scan_number; // number of this block in linear-scan ordering 1687 int _dominator_depth; 1688 int _loop_depth; // the loop nesting level of this block 1689 int _loop_index; // number of the innermost loop of this block 1690 int _flags; // the flags associated with this block 1691 1692 // fields used by BlockListBuilder 1693 int _total_preds; // number of predecessors found by BlockListBuilder 1694 ResourceBitMap _stores_to_locals; // bit is set when a local variable is stored in the block 1695 1696 // SSA specific fields: (factor out later) 1697 BlockList _successors; // the successors of this block 1698 BlockList _predecessors; // the predecessors of this block 1699 BlockList _dominates; // list of blocks that are dominated by this block 1700 BlockBegin* _dominator; // the dominator of this block 1701 // SSA specific ends 1702 BlockEnd* _end; // the last instruction of this block 1703 BlockList _exception_handlers; // the exception handlers potentially invoked by this block 1704 ValueStackStack* _exception_states; // only for xhandler entries: states of all instructions that have an edge to this xhandler 1705 int _exception_handler_pco; // if this block is the start of an exception handler, 1706 // this records the PC offset in the assembly code of the 1707 // first instruction in this block 1708 Label _label; // the label associated with this block 1709 LIR_List* _lir; // the low level intermediate representation for this block 1710 1711 ResourceBitMap _live_in; // set of live LIR_Opr registers at entry to this block 1712 ResourceBitMap _live_out; // set of live LIR_Opr registers at exit from this block 1713 ResourceBitMap _live_gen; // set of registers used before any redefinition in this block 1714 ResourceBitMap _live_kill; // set of registers defined in this block 1715 1716 ResourceBitMap _fpu_register_usage; 1717 intArray* _fpu_stack_state; // For x86 FPU code generation with UseLinearScan 1718 int _first_lir_instruction_id; // ID of first LIR instruction in this block 1719 int _last_lir_instruction_id; // ID of last LIR instruction in this block 1720 1721 void iterate_preorder (boolArray& mark, BlockClosure* closure); 1722 void iterate_postorder(boolArray& mark, BlockClosure* closure); 1723 1724 friend class SuxAndWeightAdjuster; 1725 1726 public: 1727 void* operator new(size_t size) throw() { 1728 Compilation* c = Compilation::current(); 1729 void* res = c->arena()->Amalloc(size); 1730 ((BlockBegin*)res)->_id = c->get_next_id(); 1731 ((BlockBegin*)res)->_block_id = c->get_next_block_id(); 1732 return res; 1733 } 1734 1735 // initialization/counting 1736 static int number_of_blocks() { 1737 return Compilation::current()->number_of_blocks(); 1738 } 1739 1740 // creation 1741 BlockBegin(int bci) 1742 : StateSplit(illegalType) 1743 , _bci(bci) 1744 , _depth_first_number(-1) 1745 , _linear_scan_number(-1) 1746 , _dominator_depth(-1) 1747 , _loop_depth(0) 1748 , _loop_index(-1) 1749 , _flags(0) 1750 , _total_preds(0) 1751 , _stores_to_locals() 1752 , _successors(2) 1753 , _predecessors(2) 1754 , _dominates(2) 1755 , _dominator(NULL) 1756 , _end(NULL) 1757 , _exception_handlers(1) 1758 , _exception_states(NULL) 1759 , _exception_handler_pco(-1) 1760 , _lir(NULL) 1761 , _live_in() 1762 , _live_out() 1763 , _live_gen() 1764 , _live_kill() 1765 , _fpu_register_usage() 1766 , _fpu_stack_state(NULL) 1767 , _first_lir_instruction_id(-1) 1768 , _last_lir_instruction_id(-1) 1769 { 1770 _block = this; 1771 #ifndef PRODUCT 1772 set_printable_bci(bci); 1773 #endif 1774 } 1775 1776 // accessors 1777 int block_id() const { return _block_id; } 1778 int bci() const { return _bci; } 1779 BlockList* successors() { return &_successors; } 1780 BlockList* dominates() { return &_dominates; } 1781 BlockBegin* dominator() const { return _dominator; } 1782 int loop_depth() const { return _loop_depth; } 1783 int dominator_depth() const { return _dominator_depth; } 1784 int depth_first_number() const { return _depth_first_number; } 1785 int linear_scan_number() const { return _linear_scan_number; } 1786 BlockEnd* end() const { return _end; } 1787 Label* label() { return &_label; } 1788 LIR_List* lir() const { return _lir; } 1789 int exception_handler_pco() const { return _exception_handler_pco; } 1790 ResourceBitMap& live_in() { return _live_in; } 1791 ResourceBitMap& live_out() { return _live_out; } 1792 ResourceBitMap& live_gen() { return _live_gen; } 1793 ResourceBitMap& live_kill() { return _live_kill; } 1794 ResourceBitMap& fpu_register_usage() { return _fpu_register_usage; } 1795 intArray* fpu_stack_state() const { return _fpu_stack_state; } 1796 int first_lir_instruction_id() const { return _first_lir_instruction_id; } 1797 int last_lir_instruction_id() const { return _last_lir_instruction_id; } 1798 int total_preds() const { return _total_preds; } 1799 BitMap& stores_to_locals() { return _stores_to_locals; } 1800 1801 // manipulation 1802 void set_dominator(BlockBegin* dom) { _dominator = dom; } 1803 void set_loop_depth(int d) { _loop_depth = d; } 1804 void set_dominator_depth(int d) { _dominator_depth = d; } 1805 void set_depth_first_number(int dfn) { _depth_first_number = dfn; } 1806 void set_linear_scan_number(int lsn) { _linear_scan_number = lsn; } 1807 void set_end(BlockEnd* end); 1808 void clear_end(); 1809 void disconnect_from_graph(); 1810 static void disconnect_edge(BlockBegin* from, BlockBegin* to); 1811 BlockBegin* insert_block_between(BlockBegin* sux); 1812 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1813 void set_lir(LIR_List* lir) { _lir = lir; } 1814 void set_exception_handler_pco(int pco) { _exception_handler_pco = pco; } 1815 void set_live_in (const ResourceBitMap& map) { _live_in = map; } 1816 void set_live_out (const ResourceBitMap& map) { _live_out = map; } 1817 void set_live_gen (const ResourceBitMap& map) { _live_gen = map; } 1818 void set_live_kill(const ResourceBitMap& map) { _live_kill = map; } 1819 void set_fpu_register_usage(const ResourceBitMap& map) { _fpu_register_usage = map; } 1820 void set_fpu_stack_state(intArray* state) { _fpu_stack_state = state; } 1821 void set_first_lir_instruction_id(int id) { _first_lir_instruction_id = id; } 1822 void set_last_lir_instruction_id(int id) { _last_lir_instruction_id = id; } 1823 void increment_total_preds(int n = 1) { _total_preds += n; } 1824 void init_stores_to_locals(int locals_count) { _stores_to_locals.initialize(locals_count); } 1825 1826 // generic 1827 virtual void state_values_do(ValueVisitor* f); 1828 1829 // successors and predecessors 1830 int number_of_sux() const; 1831 BlockBegin* sux_at(int i) const; 1832 void add_successor(BlockBegin* sux); 1833 void remove_successor(BlockBegin* pred); 1834 bool is_successor(BlockBegin* sux) const { return _successors.contains(sux); } 1835 1836 void add_predecessor(BlockBegin* pred); 1837 void remove_predecessor(BlockBegin* pred); 1838 bool is_predecessor(BlockBegin* pred) const { return _predecessors.contains(pred); } 1839 int number_of_preds() const { return _predecessors.length(); } 1840 BlockBegin* pred_at(int i) const { return _predecessors.at(i); } 1841 1842 // exception handlers potentially invoked by this block 1843 void add_exception_handler(BlockBegin* b); 1844 bool is_exception_handler(BlockBegin* b) const { return _exception_handlers.contains(b); } 1845 int number_of_exception_handlers() const { return _exception_handlers.length(); } 1846 BlockBegin* exception_handler_at(int i) const { return _exception_handlers.at(i); } 1847 1848 // states of the instructions that have an edge to this exception handler 1849 int number_of_exception_states() { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states == NULL ? 0 : _exception_states->length(); } 1850 ValueStack* exception_state_at(int idx) const { assert(is_set(exception_entry_flag), "only for xhandlers"); return _exception_states->at(idx); } 1851 int add_exception_state(ValueStack* state); 1852 1853 // flags 1854 enum Flag { 1855 no_flag = 0, 1856 std_entry_flag = 1 << 0, 1857 osr_entry_flag = 1 << 1, 1858 exception_entry_flag = 1 << 2, 1859 subroutine_entry_flag = 1 << 3, 1860 backward_branch_target_flag = 1 << 4, 1861 is_on_work_list_flag = 1 << 5, 1862 was_visited_flag = 1 << 6, 1863 parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand 1864 critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split 1865 linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan 1866 linear_scan_loop_end_flag = 1 << 10, // set during loop-detection for LinearScan 1867 donot_eliminate_range_checks = 1 << 11 // Should be try to eliminate range checks in this block 1868 }; 1869 1870 void set(Flag f) { _flags |= f; } 1871 void clear(Flag f) { _flags &= ~f; } 1872 bool is_set(Flag f) const { return (_flags & f) != 0; } 1873 bool is_entry_block() const { 1874 const int entry_mask = std_entry_flag | osr_entry_flag | exception_entry_flag; 1875 return (_flags & entry_mask) != 0; 1876 } 1877 1878 // iteration 1879 void iterate_preorder (BlockClosure* closure); 1880 void iterate_postorder (BlockClosure* closure); 1881 1882 void block_values_do(ValueVisitor* f); 1883 1884 // loops 1885 void set_loop_index(int ix) { _loop_index = ix; } 1886 int loop_index() const { return _loop_index; } 1887 1888 // merging 1889 bool try_merge(ValueStack* state); // try to merge states at block begin 1890 void merge(ValueStack* state) { bool b = try_merge(state); assert(b, "merge failed"); } 1891 1892 // debugging 1893 void print_block() PRODUCT_RETURN; 1894 void print_block(InstructionPrinter& ip, bool live_only = false) PRODUCT_RETURN; 1895 }; 1896 1897 1898 BASE(BlockEnd, StateSplit) 1899 private: 1900 BlockList* _sux; 1901 1902 protected: 1903 BlockList* sux() const { return _sux; } 1904 1905 void set_sux(BlockList* sux) { 1906 #ifdef ASSERT 1907 assert(sux != NULL, "sux must exist"); 1908 for (int i = sux->length() - 1; i >= 0; i--) assert(sux->at(i) != NULL, "sux must exist"); 1909 #endif 1910 _sux = sux; 1911 } 1912 1913 public: 1914 // creation 1915 BlockEnd(ValueType* type, ValueStack* state_before, bool is_safepoint) 1916 : StateSplit(type, state_before) 1917 , _sux(NULL) 1918 { 1919 set_flag(IsSafepointFlag, is_safepoint); 1920 } 1921 1922 // accessors 1923 bool is_safepoint() const { return check_flag(IsSafepointFlag); } 1924 // For compatibility with old code, for new code use block() 1925 BlockBegin* begin() const { return _block; } 1926 1927 // manipulation 1928 void set_begin(BlockBegin* begin); 1929 1930 // successors 1931 int number_of_sux() const { return _sux != NULL ? _sux->length() : 0; } 1932 BlockBegin* sux_at(int i) const { return _sux->at(i); } 1933 BlockBegin* default_sux() const { return sux_at(number_of_sux() - 1); } 1934 BlockBegin** addr_sux_at(int i) const { return _sux->adr_at(i); } 1935 int sux_index(BlockBegin* sux) const { return _sux->find(sux); } 1936 void substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux); 1937 }; 1938 1939 1940 LEAF(Goto, BlockEnd) 1941 public: 1942 enum Direction { 1943 none, // Just a regular goto 1944 taken, not_taken // Goto produced from If 1945 }; 1946 private: 1947 ciMethod* _profiled_method; 1948 int _profiled_bci; 1949 Direction _direction; 1950 public: 1951 // creation 1952 Goto(BlockBegin* sux, ValueStack* state_before, bool is_safepoint = false) 1953 : BlockEnd(illegalType, state_before, is_safepoint) 1954 , _profiled_method(NULL) 1955 , _profiled_bci(0) 1956 , _direction(none) { 1957 BlockList* s = new BlockList(1); 1958 s->append(sux); 1959 set_sux(s); 1960 } 1961 1962 Goto(BlockBegin* sux, bool is_safepoint) : BlockEnd(illegalType, NULL, is_safepoint) 1963 , _profiled_method(NULL) 1964 , _profiled_bci(0) 1965 , _direction(none) { 1966 BlockList* s = new BlockList(1); 1967 s->append(sux); 1968 set_sux(s); 1969 } 1970 1971 bool should_profile() const { return check_flag(ProfileMDOFlag); } 1972 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 1973 int profiled_bci() const { return _profiled_bci; } 1974 Direction direction() const { return _direction; } 1975 1976 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 1977 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 1978 void set_profiled_bci(int bci) { _profiled_bci = bci; } 1979 void set_direction(Direction d) { _direction = d; } 1980 }; 1981 1982 #ifdef ASSERT 1983 LEAF(Assert, Instruction) 1984 private: 1985 Value _x; 1986 Condition _cond; 1987 Value _y; 1988 char *_message; 1989 1990 public: 1991 // creation 1992 // unordered_is_true is valid for float/double compares only 1993 Assert(Value x, Condition cond, bool unordered_is_true, Value y); 1994 1995 // accessors 1996 Value x() const { return _x; } 1997 Condition cond() const { return _cond; } 1998 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 1999 Value y() const { return _y; } 2000 const char *message() const { return _message; } 2001 2002 // generic 2003 virtual void input_values_do(ValueVisitor* f) { f->visit(&_x); f->visit(&_y); } 2004 }; 2005 #endif 2006 2007 LEAF(RangeCheckPredicate, StateSplit) 2008 private: 2009 Value _x; 2010 Condition _cond; 2011 Value _y; 2012 2013 void check_state(); 2014 2015 public: 2016 // creation 2017 // unordered_is_true is valid for float/double compares only 2018 RangeCheckPredicate(Value x, Condition cond, bool unordered_is_true, Value y, ValueStack* state) : StateSplit(illegalType) 2019 , _x(x) 2020 , _cond(cond) 2021 , _y(y) 2022 { 2023 ASSERT_VALUES 2024 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2025 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2026 this->set_state(state); 2027 check_state(); 2028 } 2029 2030 // Always deoptimize 2031 RangeCheckPredicate(ValueStack* state) : StateSplit(illegalType) 2032 { 2033 this->set_state(state); 2034 _x = _y = NULL; 2035 check_state(); 2036 } 2037 2038 // accessors 2039 Value x() const { return _x; } 2040 Condition cond() const { return _cond; } 2041 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2042 Value y() const { return _y; } 2043 2044 void always_fail() { _x = _y = NULL; } 2045 2046 // generic 2047 virtual void input_values_do(ValueVisitor* f) { StateSplit::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2048 HASHING3(RangeCheckPredicate, true, x()->subst(), y()->subst(), cond()) 2049 }; 2050 2051 LEAF(If, BlockEnd) 2052 private: 2053 Value _x; 2054 Condition _cond; 2055 Value _y; 2056 ciMethod* _profiled_method; 2057 int _profiled_bci; // Canonicalizer may alter bci of If node 2058 bool _swapped; // Is the order reversed with respect to the original If in the 2059 // bytecode stream? 2060 public: 2061 // creation 2062 // unordered_is_true is valid for float/double compares only 2063 If(Value x, Condition cond, bool unordered_is_true, Value y, BlockBegin* tsux, BlockBegin* fsux, ValueStack* state_before, bool is_safepoint) 2064 : BlockEnd(illegalType, state_before, is_safepoint) 2065 , _x(x) 2066 , _cond(cond) 2067 , _y(y) 2068 , _profiled_method(NULL) 2069 , _profiled_bci(0) 2070 , _swapped(false) 2071 { 2072 ASSERT_VALUES 2073 set_flag(UnorderedIsTrueFlag, unordered_is_true); 2074 assert(x->type()->tag() == y->type()->tag(), "types must match"); 2075 BlockList* s = new BlockList(2); 2076 s->append(tsux); 2077 s->append(fsux); 2078 set_sux(s); 2079 } 2080 2081 // accessors 2082 Value x() const { return _x; } 2083 Condition cond() const { return _cond; } 2084 bool unordered_is_true() const { return check_flag(UnorderedIsTrueFlag); } 2085 Value y() const { return _y; } 2086 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2087 BlockBegin* tsux() const { return sux_for(true); } 2088 BlockBegin* fsux() const { return sux_for(false); } 2089 BlockBegin* usux() const { return sux_for(unordered_is_true()); } 2090 bool should_profile() const { return check_flag(ProfileMDOFlag); } 2091 ciMethod* profiled_method() const { return _profiled_method; } // set only for profiled branches 2092 int profiled_bci() const { return _profiled_bci; } // set for profiled branches and tiered 2093 bool is_swapped() const { return _swapped; } 2094 2095 // manipulation 2096 void swap_operands() { 2097 Value t = _x; _x = _y; _y = t; 2098 _cond = mirror(_cond); 2099 } 2100 2101 void swap_sux() { 2102 assert(number_of_sux() == 2, "wrong number of successors"); 2103 BlockList* s = sux(); 2104 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2105 _cond = negate(_cond); 2106 set_flag(UnorderedIsTrueFlag, !check_flag(UnorderedIsTrueFlag)); 2107 } 2108 2109 void set_should_profile(bool value) { set_flag(ProfileMDOFlag, value); } 2110 void set_profiled_method(ciMethod* method) { _profiled_method = method; } 2111 void set_profiled_bci(int bci) { _profiled_bci = bci; } 2112 void set_swapped(bool value) { _swapped = value; } 2113 // generic 2114 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_x); f->visit(&_y); } 2115 }; 2116 2117 2118 LEAF(IfInstanceOf, BlockEnd) 2119 private: 2120 ciKlass* _klass; 2121 Value _obj; 2122 bool _test_is_instance; // jump if instance 2123 int _instanceof_bci; 2124 2125 public: 2126 IfInstanceOf(ciKlass* klass, Value obj, bool test_is_instance, int instanceof_bci, BlockBegin* tsux, BlockBegin* fsux) 2127 : BlockEnd(illegalType, NULL, false) // temporary set to false 2128 , _klass(klass) 2129 , _obj(obj) 2130 , _test_is_instance(test_is_instance) 2131 , _instanceof_bci(instanceof_bci) 2132 { 2133 ASSERT_VALUES 2134 assert(instanceof_bci >= 0, "illegal bci"); 2135 BlockList* s = new BlockList(2); 2136 s->append(tsux); 2137 s->append(fsux); 2138 set_sux(s); 2139 } 2140 2141 // accessors 2142 // 2143 // Note 1: If test_is_instance() is true, IfInstanceOf tests if obj *is* an 2144 // instance of klass; otherwise it tests if it is *not* and instance 2145 // of klass. 2146 // 2147 // Note 2: IfInstanceOf instructions are created by combining an InstanceOf 2148 // and an If instruction. The IfInstanceOf bci() corresponds to the 2149 // bci that the If would have had; the (this->) instanceof_bci() is 2150 // the bci of the original InstanceOf instruction. 2151 ciKlass* klass() const { return _klass; } 2152 Value obj() const { return _obj; } 2153 int instanceof_bci() const { return _instanceof_bci; } 2154 bool test_is_instance() const { return _test_is_instance; } 2155 BlockBegin* sux_for(bool is_true) const { return sux_at(is_true ? 0 : 1); } 2156 BlockBegin* tsux() const { return sux_for(true); } 2157 BlockBegin* fsux() const { return sux_for(false); } 2158 2159 // manipulation 2160 void swap_sux() { 2161 assert(number_of_sux() == 2, "wrong number of successors"); 2162 BlockList* s = sux(); 2163 BlockBegin* t = s->at(0); s->at_put(0, s->at(1)); s->at_put(1, t); 2164 _test_is_instance = !_test_is_instance; 2165 } 2166 2167 // generic 2168 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_obj); } 2169 }; 2170 2171 2172 BASE(Switch, BlockEnd) 2173 private: 2174 Value _tag; 2175 2176 public: 2177 // creation 2178 Switch(Value tag, BlockList* sux, ValueStack* state_before, bool is_safepoint) 2179 : BlockEnd(illegalType, state_before, is_safepoint) 2180 , _tag(tag) { 2181 ASSERT_VALUES 2182 set_sux(sux); 2183 } 2184 2185 // accessors 2186 Value tag() const { return _tag; } 2187 int length() const { return number_of_sux() - 1; } 2188 2189 virtual bool needs_exception_state() const { return false; } 2190 2191 // generic 2192 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_tag); } 2193 }; 2194 2195 2196 LEAF(TableSwitch, Switch) 2197 private: 2198 int _lo_key; 2199 2200 public: 2201 // creation 2202 TableSwitch(Value tag, BlockList* sux, int lo_key, ValueStack* state_before, bool is_safepoint) 2203 : Switch(tag, sux, state_before, is_safepoint) 2204 , _lo_key(lo_key) { assert(_lo_key <= hi_key(), "integer overflow"); } 2205 2206 // accessors 2207 int lo_key() const { return _lo_key; } 2208 int hi_key() const { return _lo_key + (length() - 1); } 2209 }; 2210 2211 2212 LEAF(LookupSwitch, Switch) 2213 private: 2214 intArray* _keys; 2215 2216 public: 2217 // creation 2218 LookupSwitch(Value tag, BlockList* sux, intArray* keys, ValueStack* state_before, bool is_safepoint) 2219 : Switch(tag, sux, state_before, is_safepoint) 2220 , _keys(keys) { 2221 assert(keys != NULL, "keys must exist"); 2222 assert(keys->length() == length(), "sux & keys have incompatible lengths"); 2223 } 2224 2225 // accessors 2226 int key_at(int i) const { return _keys->at(i); } 2227 }; 2228 2229 2230 LEAF(Return, BlockEnd) 2231 private: 2232 Value _result; 2233 2234 public: 2235 // creation 2236 Return(Value result) : 2237 BlockEnd(result == NULL ? voidType : result->type()->base(), NULL, true), 2238 _result(result) {} 2239 2240 // accessors 2241 Value result() const { return _result; } 2242 bool has_result() const { return result() != NULL; } 2243 2244 // generic 2245 virtual void input_values_do(ValueVisitor* f) { 2246 BlockEnd::input_values_do(f); 2247 if (has_result()) f->visit(&_result); 2248 } 2249 }; 2250 2251 2252 LEAF(Throw, BlockEnd) 2253 private: 2254 Value _exception; 2255 2256 public: 2257 // creation 2258 Throw(Value exception, ValueStack* state_before) : BlockEnd(illegalType, state_before, true), _exception(exception) { 2259 ASSERT_VALUES 2260 } 2261 2262 // accessors 2263 Value exception() const { return _exception; } 2264 2265 // generic 2266 virtual bool can_trap() const { return true; } 2267 virtual void input_values_do(ValueVisitor* f) { BlockEnd::input_values_do(f); f->visit(&_exception); } 2268 }; 2269 2270 2271 LEAF(Base, BlockEnd) 2272 public: 2273 // creation 2274 Base(BlockBegin* std_entry, BlockBegin* osr_entry) : BlockEnd(illegalType, NULL, false) { 2275 assert(std_entry->is_set(BlockBegin::std_entry_flag), "std entry must be flagged"); 2276 assert(osr_entry == NULL || osr_entry->is_set(BlockBegin::osr_entry_flag), "osr entry must be flagged"); 2277 BlockList* s = new BlockList(2); 2278 if (osr_entry != NULL) s->append(osr_entry); 2279 s->append(std_entry); // must be default sux! 2280 set_sux(s); 2281 } 2282 2283 // accessors 2284 BlockBegin* std_entry() const { return default_sux(); } 2285 BlockBegin* osr_entry() const { return number_of_sux() < 2 ? NULL : sux_at(0); } 2286 }; 2287 2288 2289 LEAF(OsrEntry, Instruction) 2290 public: 2291 // creation 2292 #ifdef _LP64 2293 OsrEntry() : Instruction(longType) { pin(); } 2294 #else 2295 OsrEntry() : Instruction(intType) { pin(); } 2296 #endif 2297 2298 // generic 2299 virtual void input_values_do(ValueVisitor* f) { } 2300 }; 2301 2302 2303 // Models the incoming exception at a catch site 2304 LEAF(ExceptionObject, Instruction) 2305 public: 2306 // creation 2307 ExceptionObject() : Instruction(objectType) { 2308 pin(); 2309 } 2310 2311 // generic 2312 virtual void input_values_do(ValueVisitor* f) { } 2313 }; 2314 2315 2316 // Models needed rounding for floating-point values on Intel. 2317 // Currently only used to represent rounding of double-precision 2318 // values stored into local variables, but could be used to model 2319 // intermediate rounding of single-precision values as well. 2320 LEAF(RoundFP, Instruction) 2321 private: 2322 Value _input; // floating-point value to be rounded 2323 2324 public: 2325 RoundFP(Value input) 2326 : Instruction(input->type()) // Note: should not be used for constants 2327 , _input(input) 2328 { 2329 ASSERT_VALUES 2330 } 2331 2332 // accessors 2333 Value input() const { return _input; } 2334 2335 // generic 2336 virtual void input_values_do(ValueVisitor* f) { f->visit(&_input); } 2337 }; 2338 2339 2340 BASE(UnsafeOp, Instruction) 2341 private: 2342 BasicType _basic_type; // ValueType can not express byte-sized integers 2343 2344 protected: 2345 // creation 2346 UnsafeOp(BasicType basic_type, bool is_put) 2347 : Instruction(is_put ? voidType : as_ValueType(basic_type)) 2348 , _basic_type(basic_type) 2349 { 2350 //Note: Unsafe ops are not not guaranteed to throw NPE. 2351 // Convservatively, Unsafe operations must be pinned though we could be 2352 // looser about this if we wanted to.. 2353 pin(); 2354 } 2355 2356 public: 2357 // accessors 2358 BasicType basic_type() { return _basic_type; } 2359 2360 // generic 2361 virtual void input_values_do(ValueVisitor* f) { } 2362 }; 2363 2364 2365 BASE(UnsafeRawOp, UnsafeOp) 2366 private: 2367 Value _base; // Base address (a Java long) 2368 Value _index; // Index if computed by optimizer; initialized to NULL 2369 int _log2_scale; // Scale factor: 0, 1, 2, or 3. 2370 // Indicates log2 of number of bytes (1, 2, 4, or 8) 2371 // to scale index by. 2372 2373 protected: 2374 UnsafeRawOp(BasicType basic_type, Value addr, bool is_put) 2375 : UnsafeOp(basic_type, is_put) 2376 , _base(addr) 2377 , _index(NULL) 2378 , _log2_scale(0) 2379 { 2380 // Can not use ASSERT_VALUES because index may be NULL 2381 assert(addr != NULL && addr->type()->is_long(), "just checking"); 2382 } 2383 2384 UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put) 2385 : UnsafeOp(basic_type, is_put) 2386 , _base(base) 2387 , _index(index) 2388 , _log2_scale(log2_scale) 2389 { 2390 } 2391 2392 public: 2393 // accessors 2394 Value base() { return _base; } 2395 Value index() { return _index; } 2396 bool has_index() { return (_index != NULL); } 2397 int log2_scale() { return _log2_scale; } 2398 2399 // setters 2400 void set_base (Value base) { _base = base; } 2401 void set_index(Value index) { _index = index; } 2402 void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; } 2403 2404 // generic 2405 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2406 f->visit(&_base); 2407 if (has_index()) f->visit(&_index); } 2408 }; 2409 2410 2411 LEAF(UnsafeGetRaw, UnsafeRawOp) 2412 private: 2413 bool _may_be_unaligned, _is_wide; // For OSREntry 2414 2415 public: 2416 UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false) 2417 : UnsafeRawOp(basic_type, addr, false) { 2418 _may_be_unaligned = may_be_unaligned; 2419 _is_wide = is_wide; 2420 } 2421 2422 UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false) 2423 : UnsafeRawOp(basic_type, base, index, log2_scale, false) { 2424 _may_be_unaligned = may_be_unaligned; 2425 _is_wide = is_wide; 2426 } 2427 2428 bool may_be_unaligned() { return _may_be_unaligned; } 2429 bool is_wide() { return _is_wide; } 2430 }; 2431 2432 2433 LEAF(UnsafePutRaw, UnsafeRawOp) 2434 private: 2435 Value _value; // Value to be stored 2436 2437 public: 2438 UnsafePutRaw(BasicType basic_type, Value addr, Value value) 2439 : UnsafeRawOp(basic_type, addr, true) 2440 , _value(value) 2441 { 2442 assert(value != NULL, "just checking"); 2443 ASSERT_VALUES 2444 } 2445 2446 UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value) 2447 : UnsafeRawOp(basic_type, base, index, log2_scale, true) 2448 , _value(value) 2449 { 2450 assert(value != NULL, "just checking"); 2451 ASSERT_VALUES 2452 } 2453 2454 // accessors 2455 Value value() { return _value; } 2456 2457 // generic 2458 virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f); 2459 f->visit(&_value); } 2460 }; 2461 2462 2463 BASE(UnsafeObjectOp, UnsafeOp) 2464 private: 2465 Value _object; // Object to be fetched from or mutated 2466 Value _offset; // Offset within object 2467 bool _is_volatile; // true if volatile - dl/JSR166 2468 public: 2469 UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile) 2470 : UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile) 2471 { 2472 } 2473 2474 // accessors 2475 Value object() { return _object; } 2476 Value offset() { return _offset; } 2477 bool is_volatile() { return _is_volatile; } 2478 // generic 2479 virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f); 2480 f->visit(&_object); 2481 f->visit(&_offset); } 2482 }; 2483 2484 2485 LEAF(UnsafeGetObject, UnsafeObjectOp) 2486 public: 2487 UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile) 2488 : UnsafeObjectOp(basic_type, object, offset, false, is_volatile) 2489 { 2490 ASSERT_VALUES 2491 } 2492 }; 2493 2494 2495 LEAF(UnsafePutObject, UnsafeObjectOp) 2496 private: 2497 Value _value; // Value to be stored 2498 public: 2499 UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile) 2500 : UnsafeObjectOp(basic_type, object, offset, true, is_volatile) 2501 , _value(value) 2502 { 2503 ASSERT_VALUES 2504 } 2505 2506 // accessors 2507 Value value() { return _value; } 2508 2509 // generic 2510 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2511 f->visit(&_value); } 2512 }; 2513 2514 LEAF(UnsafeGetAndSetObject, UnsafeObjectOp) 2515 private: 2516 Value _value; // Value to be stored 2517 bool _is_add; 2518 public: 2519 UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add) 2520 : UnsafeObjectOp(basic_type, object, offset, false, false) 2521 , _value(value) 2522 , _is_add(is_add) 2523 { 2524 ASSERT_VALUES 2525 } 2526 2527 // accessors 2528 bool is_add() const { return _is_add; } 2529 Value value() { return _value; } 2530 2531 // generic 2532 virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f); 2533 f->visit(&_value); } 2534 }; 2535 2536 LEAF(ProfileCall, Instruction) 2537 private: 2538 ciMethod* _method; 2539 int _bci_of_invoke; 2540 ciMethod* _callee; // the method that is called at the given bci 2541 Value _recv; 2542 ciKlass* _known_holder; 2543 Values* _obj_args; // arguments for type profiling 2544 ArgsNonNullState _nonnull_state; // Do we know whether some arguments are never null? 2545 bool _inlined; // Are we profiling a call that is inlined 2546 2547 public: 2548 ProfileCall(ciMethod* method, int bci, ciMethod* callee, Value recv, ciKlass* known_holder, Values* obj_args, bool inlined) 2549 : Instruction(voidType) 2550 , _method(method) 2551 , _bci_of_invoke(bci) 2552 , _callee(callee) 2553 , _recv(recv) 2554 , _known_holder(known_holder) 2555 , _obj_args(obj_args) 2556 , _inlined(inlined) 2557 { 2558 // The ProfileCall has side-effects and must occur precisely where located 2559 pin(); 2560 } 2561 2562 ciMethod* method() const { return _method; } 2563 int bci_of_invoke() const { return _bci_of_invoke; } 2564 ciMethod* callee() const { return _callee; } 2565 Value recv() const { return _recv; } 2566 ciKlass* known_holder() const { return _known_holder; } 2567 int nb_profiled_args() const { return _obj_args == NULL ? 0 : _obj_args->length(); } 2568 Value profiled_arg_at(int i) const { return _obj_args->at(i); } 2569 bool arg_needs_null_check(int i) const { 2570 return _nonnull_state.arg_needs_null_check(i); 2571 } 2572 bool inlined() const { return _inlined; } 2573 2574 void set_arg_needs_null_check(int i, bool check) { 2575 _nonnull_state.set_arg_needs_null_check(i, check); 2576 } 2577 2578 virtual void input_values_do(ValueVisitor* f) { 2579 if (_recv != NULL) { 2580 f->visit(&_recv); 2581 } 2582 for (int i = 0; i < nb_profiled_args(); i++) { 2583 f->visit(_obj_args->adr_at(i)); 2584 } 2585 } 2586 }; 2587 2588 LEAF(ProfileReturnType, Instruction) 2589 private: 2590 ciMethod* _method; 2591 ciMethod* _callee; 2592 int _bci_of_invoke; 2593 Value _ret; 2594 2595 public: 2596 ProfileReturnType(ciMethod* method, int bci, ciMethod* callee, Value ret) 2597 : Instruction(voidType) 2598 , _method(method) 2599 , _callee(callee) 2600 , _bci_of_invoke(bci) 2601 , _ret(ret) 2602 { 2603 set_needs_null_check(true); 2604 // The ProfileType has side-effects and must occur precisely where located 2605 pin(); 2606 } 2607 2608 ciMethod* method() const { return _method; } 2609 ciMethod* callee() const { return _callee; } 2610 int bci_of_invoke() const { return _bci_of_invoke; } 2611 Value ret() const { return _ret; } 2612 2613 virtual void input_values_do(ValueVisitor* f) { 2614 if (_ret != NULL) { 2615 f->visit(&_ret); 2616 } 2617 } 2618 }; 2619 2620 // Call some C runtime function that doesn't safepoint, 2621 // optionally passing the current thread as the first argument. 2622 LEAF(RuntimeCall, Instruction) 2623 private: 2624 const char* _entry_name; 2625 address _entry; 2626 Values* _args; 2627 bool _pass_thread; // Pass the JavaThread* as an implicit first argument 2628 2629 public: 2630 RuntimeCall(ValueType* type, const char* entry_name, address entry, Values* args, bool pass_thread = true) 2631 : Instruction(type) 2632 , _entry_name(entry_name) 2633 , _entry(entry) 2634 , _args(args) 2635 , _pass_thread(pass_thread) { 2636 ASSERT_VALUES 2637 pin(); 2638 } 2639 2640 const char* entry_name() const { return _entry_name; } 2641 address entry() const { return _entry; } 2642 int number_of_arguments() const { return _args->length(); } 2643 Value argument_at(int i) const { return _args->at(i); } 2644 bool pass_thread() const { return _pass_thread; } 2645 2646 virtual void input_values_do(ValueVisitor* f) { 2647 for (int i = 0; i < _args->length(); i++) f->visit(_args->adr_at(i)); 2648 } 2649 }; 2650 2651 // Use to trip invocation counter of an inlined method 2652 2653 LEAF(ProfileInvoke, Instruction) 2654 private: 2655 ciMethod* _inlinee; 2656 ValueStack* _state; 2657 2658 public: 2659 ProfileInvoke(ciMethod* inlinee, ValueStack* state) 2660 : Instruction(voidType) 2661 , _inlinee(inlinee) 2662 , _state(state) 2663 { 2664 // The ProfileInvoke has side-effects and must occur precisely where located QQQ??? 2665 pin(); 2666 } 2667 2668 ciMethod* inlinee() { return _inlinee; } 2669 ValueStack* state() { return _state; } 2670 virtual void input_values_do(ValueVisitor*) {} 2671 virtual void state_values_do(ValueVisitor*); 2672 }; 2673 2674 LEAF(MemBar, Instruction) 2675 private: 2676 LIR_Code _code; 2677 2678 public: 2679 MemBar(LIR_Code code) 2680 : Instruction(voidType) 2681 , _code(code) 2682 { 2683 pin(); 2684 } 2685 2686 LIR_Code code() { return _code; } 2687 2688 virtual void input_values_do(ValueVisitor*) {} 2689 }; 2690 2691 class BlockPair: public CompilationResourceObj { 2692 private: 2693 BlockBegin* _from; 2694 BlockBegin* _to; 2695 public: 2696 BlockPair(BlockBegin* from, BlockBegin* to): _from(from), _to(to) {} 2697 BlockBegin* from() const { return _from; } 2698 BlockBegin* to() const { return _to; } 2699 bool is_same(BlockBegin* from, BlockBegin* to) const { return _from == from && _to == to; } 2700 bool is_same(BlockPair* p) const { return _from == p->from() && _to == p->to(); } 2701 void set_to(BlockBegin* b) { _to = b; } 2702 void set_from(BlockBegin* b) { _from = b; } 2703 }; 2704 2705 typedef GrowableArray<BlockPair*> BlockPairList; 2706 2707 inline int BlockBegin::number_of_sux() const { assert(_end == NULL || _end->number_of_sux() == _successors.length(), "mismatch"); return _successors.length(); } 2708 inline BlockBegin* BlockBegin::sux_at(int i) const { assert(_end == NULL || _end->sux_at(i) == _successors.at(i), "mismatch"); return _successors.at(i); } 2709 inline void BlockBegin::add_successor(BlockBegin* sux) { assert(_end == NULL, "Would create mismatch with successors of BlockEnd"); _successors.append(sux); } 2710 2711 #undef ASSERT_VALUES 2712 2713 #endif // SHARE_C1_C1_INSTRUCTION_HPP