1 /* 2 * Copyright (c) 1999, 2019, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "c1/c1_IR.hpp" 27 #include "c1/c1_Instruction.hpp" 28 #include "c1/c1_InstructionPrinter.hpp" 29 #include "c1/c1_ValueStack.hpp" 30 #include "ci/ciObjArrayKlass.hpp" 31 #include "ci/ciTypeArrayKlass.hpp" 32 #include "ci/ciValueArrayKlass.hpp" 33 #include "ci/ciValueKlass.hpp" 34 35 36 // Implementation of Instruction 37 38 39 int Instruction::dominator_depth() { 40 int result = -1; 41 if (block()) { 42 result = block()->dominator_depth(); 43 } 44 assert(result != -1 || this->as_Local(), "Only locals have dominator depth -1"); 45 return result; 46 } 47 48 Instruction::Condition Instruction::mirror(Condition cond) { 49 switch (cond) { 50 case eql: return eql; 51 case neq: return neq; 52 case lss: return gtr; 53 case leq: return geq; 54 case gtr: return lss; 55 case geq: return leq; 56 case aeq: return beq; 57 case beq: return aeq; 58 } 59 ShouldNotReachHere(); 60 return eql; 61 } 62 63 64 Instruction::Condition Instruction::negate(Condition cond) { 65 switch (cond) { 66 case eql: return neq; 67 case neq: return eql; 68 case lss: return geq; 69 case leq: return gtr; 70 case gtr: return leq; 71 case geq: return lss; 72 case aeq: assert(false, "Above equal cannot be negated"); 73 case beq: assert(false, "Below equal cannot be negated"); 74 } 75 ShouldNotReachHere(); 76 return eql; 77 } 78 79 void Instruction::update_exception_state(ValueStack* state) { 80 if (state != NULL && (state->kind() == ValueStack::EmptyExceptionState || state->kind() == ValueStack::ExceptionState)) { 81 assert(state->kind() == ValueStack::EmptyExceptionState || Compilation::current()->env()->should_retain_local_variables(), "unexpected state kind"); 82 _exception_state = state; 83 } else { 84 _exception_state = NULL; 85 } 86 } 87 88 // Prev without need to have BlockBegin 89 Instruction* Instruction::prev() { 90 Instruction* p = NULL; 91 Instruction* q = block(); 92 while (q != this) { 93 assert(q != NULL, "this is not in the block's instruction list"); 94 p = q; q = q->next(); 95 } 96 return p; 97 } 98 99 100 void Instruction::state_values_do(ValueVisitor* f) { 101 if (state_before() != NULL) { 102 state_before()->values_do(f); 103 } 104 if (exception_state() != NULL){ 105 exception_state()->values_do(f); 106 } 107 } 108 109 ciType* Instruction::exact_type() const { 110 ciType* t = declared_type(); 111 if (t != NULL && t->is_klass()) { 112 return t->as_klass()->exact_klass(); 113 } 114 return NULL; 115 } 116 117 118 // FIXME -- make this obsolete. Use maybe_flattened_array() or check_flattened_array() instead. 119 bool Instruction::is_flattened_array() const { 120 if (ValueArrayFlatten) { 121 ciType* type = declared_type(); 122 if (type != NULL && type->is_value_array_klass()) { 123 ciValueKlass* element_klass = type->as_value_array_klass()->element_klass()->as_value_klass(); 124 if (!element_klass->is_loaded() || element_klass->flatten_array()) { 125 // Assume that all unloaded value arrays are not flattenable. If they 126 // turn out to be flattenable, we deoptimize on aaload/aastore. 127 // ^^^^ uugh -- this is ugly! 128 return true; 129 } 130 } 131 } 132 133 return false; 134 } 135 136 bool Instruction::is_loaded_flattened_array() const { 137 if (ValueArrayFlatten) { 138 ciType* type = declared_type(); 139 if (type != NULL && type->is_value_array_klass()) { 140 ciValueKlass* element_klass = type->as_value_array_klass()->element_klass()->as_value_klass(); 141 if (element_klass->is_loaded() && element_klass->flatten_array()) { 142 return true; 143 } 144 } 145 } 146 147 return false; 148 } 149 150 bool Instruction::maybe_flattened_array() const { 151 if (ValueArrayFlatten) { 152 ciType* type = declared_type(); 153 if (type != NULL) { 154 if (type->is_value_array_klass()) { 155 ciValueKlass* element_klass = type->as_value_array_klass()->element_klass()->as_value_klass(); 156 if (!element_klass->is_loaded() || element_klass->flatten_array()) { 157 // For unloaded value arrays, we will add a runtime check for flat-ness. 158 return true; 159 } 160 } else if (type->is_obj_array_klass()) { 161 ciKlass* element_klass = type->as_obj_array_klass()->element_klass(); 162 if (element_klass->is_java_lang_Object() || element_klass->is_interface()) { 163 // Array covariance: 164 // (ValueType[] <: Object[]) 165 // (ValueType[] <: <any interface>[]) 166 // We will add a runtime check for flat-ness. 167 return true; 168 } 169 } 170 } 171 } 172 173 return false; 174 } 175 176 #ifndef PRODUCT 177 void Instruction::check_state(ValueStack* state) { 178 if (state != NULL) { 179 state->verify(); 180 } 181 } 182 183 184 void Instruction::print() { 185 InstructionPrinter ip; 186 print(ip); 187 } 188 189 190 void Instruction::print_line() { 191 InstructionPrinter ip; 192 ip.print_line(this); 193 } 194 195 196 void Instruction::print(InstructionPrinter& ip) { 197 ip.print_head(); 198 ip.print_line(this); 199 tty->cr(); 200 } 201 #endif // PRODUCT 202 203 204 // perform constant and interval tests on index value 205 bool AccessIndexed::compute_needs_range_check() { 206 if (length()) { 207 Constant* clength = length()->as_Constant(); 208 Constant* cindex = index()->as_Constant(); 209 if (clength && cindex) { 210 IntConstant* l = clength->type()->as_IntConstant(); 211 IntConstant* i = cindex->type()->as_IntConstant(); 212 if (l && i && i->value() < l->value() && i->value() >= 0) { 213 return false; 214 } 215 } 216 } 217 218 if (!this->check_flag(NeedsRangeCheckFlag)) { 219 return false; 220 } 221 222 return true; 223 } 224 225 226 ciType* Constant::exact_type() const { 227 if (type()->is_object() && type()->as_ObjectType()->is_loaded()) { 228 return type()->as_ObjectType()->exact_type(); 229 } 230 return NULL; 231 } 232 233 ciType* LoadIndexed::exact_type() const { 234 ciType* array_type = array()->exact_type(); 235 if (array_type != NULL) { 236 assert(array_type->is_array_klass(), "what else?"); 237 ciArrayKlass* ak = (ciArrayKlass*)array_type; 238 239 if (ak->element_type()->is_instance_klass()) { 240 ciInstanceKlass* ik = (ciInstanceKlass*)ak->element_type(); 241 if (ik->is_loaded() && ik->is_final()) { 242 return ik; 243 } 244 } 245 } 246 return Instruction::exact_type(); 247 } 248 249 250 ciType* LoadIndexed::declared_type() const { 251 ciType* array_type = array()->declared_type(); 252 if (array_type == NULL || !array_type->is_loaded()) { 253 return NULL; 254 } 255 assert(array_type->is_array_klass(), "what else?"); 256 ciArrayKlass* ak = (ciArrayKlass*)array_type; 257 return ak->element_type(); 258 } 259 260 bool StoreIndexed::is_exact_flattened_array_store() const { 261 if (array()->is_loaded_flattened_array() && value()->as_Constant() == NULL) { 262 ciKlass* element_klass = array()->declared_type()->as_value_array_klass()->element_klass(); 263 ciKlass* actual_klass = value()->declared_type()->as_klass(); 264 if (element_klass == actual_klass) { 265 return true; 266 } 267 } 268 return false; 269 } 270 271 ciType* LoadField::declared_type() const { 272 return field()->type(); 273 } 274 275 276 ciType* NewTypeArray::exact_type() const { 277 return ciTypeArrayKlass::make(elt_type()); 278 } 279 280 ciType* NewObjectArray::exact_type() const { 281 ciKlass* element_klass = klass(); 282 if (element_klass->is_valuetype()) { 283 return ciValueArrayKlass::make(element_klass); 284 } else { 285 return ciObjArrayKlass::make(element_klass); 286 } 287 } 288 289 ciType* NewMultiArray::exact_type() const { 290 return _klass; 291 } 292 293 ciType* NewArray::declared_type() const { 294 return exact_type(); 295 } 296 297 ciType* NewInstance::exact_type() const { 298 return klass(); 299 } 300 301 ciType* NewInstance::declared_type() const { 302 return exact_type(); 303 } 304 305 Value NewValueTypeInstance::depends_on() { 306 if (_depends_on != this) { 307 if (_depends_on->as_NewValueTypeInstance() != NULL) { 308 return _depends_on->as_NewValueTypeInstance()->depends_on(); 309 } 310 } 311 return _depends_on; 312 } 313 314 ciType* NewValueTypeInstance::exact_type() const { 315 return klass(); 316 } 317 318 ciType* NewValueTypeInstance::declared_type() const { 319 return exact_type(); 320 } 321 322 ciType* CheckCast::declared_type() const { 323 return klass(); 324 } 325 326 // Implementation of ArithmeticOp 327 328 bool ArithmeticOp::is_commutative() const { 329 switch (op()) { 330 case Bytecodes::_iadd: // fall through 331 case Bytecodes::_ladd: // fall through 332 case Bytecodes::_fadd: // fall through 333 case Bytecodes::_dadd: // fall through 334 case Bytecodes::_imul: // fall through 335 case Bytecodes::_lmul: // fall through 336 case Bytecodes::_fmul: // fall through 337 case Bytecodes::_dmul: return true; 338 default : return false; 339 } 340 } 341 342 343 bool ArithmeticOp::can_trap() const { 344 switch (op()) { 345 case Bytecodes::_idiv: // fall through 346 case Bytecodes::_ldiv: // fall through 347 case Bytecodes::_irem: // fall through 348 case Bytecodes::_lrem: return true; 349 default : return false; 350 } 351 } 352 353 354 // Implementation of LogicOp 355 356 bool LogicOp::is_commutative() const { 357 #ifdef ASSERT 358 switch (op()) { 359 case Bytecodes::_iand: // fall through 360 case Bytecodes::_land: // fall through 361 case Bytecodes::_ior : // fall through 362 case Bytecodes::_lor : // fall through 363 case Bytecodes::_ixor: // fall through 364 case Bytecodes::_lxor: break; 365 default : ShouldNotReachHere(); break; 366 } 367 #endif 368 // all LogicOps are commutative 369 return true; 370 } 371 372 373 // Implementation of IfOp 374 375 bool IfOp::is_commutative() const { 376 return cond() == eql || cond() == neq; 377 } 378 379 380 // Implementation of StateSplit 381 382 void StateSplit::substitute(BlockList& list, BlockBegin* old_block, BlockBegin* new_block) { 383 NOT_PRODUCT(bool assigned = false;) 384 for (int i = 0; i < list.length(); i++) { 385 BlockBegin** b = list.adr_at(i); 386 if (*b == old_block) { 387 *b = new_block; 388 NOT_PRODUCT(assigned = true;) 389 } 390 } 391 assert(assigned == true, "should have assigned at least once"); 392 } 393 394 395 IRScope* StateSplit::scope() const { 396 return _state->scope(); 397 } 398 399 400 void StateSplit::state_values_do(ValueVisitor* f) { 401 Instruction::state_values_do(f); 402 if (state() != NULL) state()->values_do(f); 403 } 404 405 406 void BlockBegin::state_values_do(ValueVisitor* f) { 407 StateSplit::state_values_do(f); 408 409 if (is_set(BlockBegin::exception_entry_flag)) { 410 for (int i = 0; i < number_of_exception_states(); i++) { 411 exception_state_at(i)->values_do(f); 412 } 413 } 414 } 415 416 417 // Implementation of Invoke 418 419 420 Invoke::Invoke(Bytecodes::Code code, ValueType* result_type, Value recv, Values* args, 421 int vtable_index, ciMethod* target, ValueStack* state_before, bool never_null) 422 : StateSplit(result_type, state_before) 423 , _code(code) 424 , _recv(recv) 425 , _args(args) 426 , _vtable_index(vtable_index) 427 , _target(target) 428 { 429 set_flag(TargetIsLoadedFlag, target->is_loaded()); 430 set_flag(TargetIsFinalFlag, target_is_loaded() && target->is_final_method()); 431 set_flag(TargetIsStrictfpFlag, target_is_loaded() && target->is_strict()); 432 set_never_null(never_null); 433 434 assert(args != NULL, "args must exist"); 435 #ifdef ASSERT 436 AssertValues assert_value; 437 values_do(&assert_value); 438 #endif 439 440 // provide an initial guess of signature size. 441 _signature = new BasicTypeList(number_of_arguments() + (has_receiver() ? 1 : 0)); 442 if (has_receiver()) { 443 _signature->append(as_BasicType(receiver()->type())); 444 } 445 for (int i = 0; i < number_of_arguments(); i++) { 446 ValueType* t = argument_at(i)->type(); 447 BasicType bt = as_BasicType(t); 448 _signature->append(bt); 449 } 450 } 451 452 453 void Invoke::state_values_do(ValueVisitor* f) { 454 StateSplit::state_values_do(f); 455 if (state_before() != NULL) state_before()->values_do(f); 456 if (state() != NULL) state()->values_do(f); 457 } 458 459 ciType* Invoke::declared_type() const { 460 ciSignature* declared_signature = state()->scope()->method()->get_declared_signature_at_bci(state()->bci()); 461 ciType *t = declared_signature->return_type(); 462 assert(t->basic_type() != T_VOID, "need return value of void method?"); 463 return t; 464 } 465 466 // Implementation of Contant 467 intx Constant::hash() const { 468 if (state_before() == NULL) { 469 switch (type()->tag()) { 470 case intTag: 471 return HASH2(name(), type()->as_IntConstant()->value()); 472 case addressTag: 473 return HASH2(name(), type()->as_AddressConstant()->value()); 474 case longTag: 475 { 476 jlong temp = type()->as_LongConstant()->value(); 477 return HASH3(name(), high(temp), low(temp)); 478 } 479 case floatTag: 480 return HASH2(name(), jint_cast(type()->as_FloatConstant()->value())); 481 case doubleTag: 482 { 483 jlong temp = jlong_cast(type()->as_DoubleConstant()->value()); 484 return HASH3(name(), high(temp), low(temp)); 485 } 486 case objectTag: 487 assert(type()->as_ObjectType()->is_loaded(), "can't handle unloaded values"); 488 return HASH2(name(), type()->as_ObjectType()->constant_value()); 489 case metaDataTag: 490 assert(type()->as_MetadataType()->is_loaded(), "can't handle unloaded values"); 491 return HASH2(name(), type()->as_MetadataType()->constant_value()); 492 default: 493 ShouldNotReachHere(); 494 } 495 } 496 return 0; 497 } 498 499 bool Constant::is_equal(Value v) const { 500 if (v->as_Constant() == NULL) return false; 501 502 switch (type()->tag()) { 503 case intTag: 504 { 505 IntConstant* t1 = type()->as_IntConstant(); 506 IntConstant* t2 = v->type()->as_IntConstant(); 507 return (t1 != NULL && t2 != NULL && 508 t1->value() == t2->value()); 509 } 510 case longTag: 511 { 512 LongConstant* t1 = type()->as_LongConstant(); 513 LongConstant* t2 = v->type()->as_LongConstant(); 514 return (t1 != NULL && t2 != NULL && 515 t1->value() == t2->value()); 516 } 517 case floatTag: 518 { 519 FloatConstant* t1 = type()->as_FloatConstant(); 520 FloatConstant* t2 = v->type()->as_FloatConstant(); 521 return (t1 != NULL && t2 != NULL && 522 jint_cast(t1->value()) == jint_cast(t2->value())); 523 } 524 case doubleTag: 525 { 526 DoubleConstant* t1 = type()->as_DoubleConstant(); 527 DoubleConstant* t2 = v->type()->as_DoubleConstant(); 528 return (t1 != NULL && t2 != NULL && 529 jlong_cast(t1->value()) == jlong_cast(t2->value())); 530 } 531 case objectTag: 532 { 533 ObjectType* t1 = type()->as_ObjectType(); 534 ObjectType* t2 = v->type()->as_ObjectType(); 535 return (t1 != NULL && t2 != NULL && 536 t1->is_loaded() && t2->is_loaded() && 537 t1->constant_value() == t2->constant_value()); 538 } 539 case metaDataTag: 540 { 541 MetadataType* t1 = type()->as_MetadataType(); 542 MetadataType* t2 = v->type()->as_MetadataType(); 543 return (t1 != NULL && t2 != NULL && 544 t1->is_loaded() && t2->is_loaded() && 545 t1->constant_value() == t2->constant_value()); 546 } 547 default: 548 return false; 549 } 550 } 551 552 Constant::CompareResult Constant::compare(Instruction::Condition cond, Value right) const { 553 Constant* rc = right->as_Constant(); 554 // other is not a constant 555 if (rc == NULL) return not_comparable; 556 557 ValueType* lt = type(); 558 ValueType* rt = rc->type(); 559 // different types 560 if (lt->base() != rt->base()) return not_comparable; 561 switch (lt->tag()) { 562 case intTag: { 563 int x = lt->as_IntConstant()->value(); 564 int y = rt->as_IntConstant()->value(); 565 switch (cond) { 566 case If::eql: return x == y ? cond_true : cond_false; 567 case If::neq: return x != y ? cond_true : cond_false; 568 case If::lss: return x < y ? cond_true : cond_false; 569 case If::leq: return x <= y ? cond_true : cond_false; 570 case If::gtr: return x > y ? cond_true : cond_false; 571 case If::geq: return x >= y ? cond_true : cond_false; 572 default : break; 573 } 574 break; 575 } 576 case longTag: { 577 jlong x = lt->as_LongConstant()->value(); 578 jlong y = rt->as_LongConstant()->value(); 579 switch (cond) { 580 case If::eql: return x == y ? cond_true : cond_false; 581 case If::neq: return x != y ? cond_true : cond_false; 582 case If::lss: return x < y ? cond_true : cond_false; 583 case If::leq: return x <= y ? cond_true : cond_false; 584 case If::gtr: return x > y ? cond_true : cond_false; 585 case If::geq: return x >= y ? cond_true : cond_false; 586 default : break; 587 } 588 break; 589 } 590 case objectTag: { 591 ciObject* xvalue = lt->as_ObjectType()->constant_value(); 592 ciObject* yvalue = rt->as_ObjectType()->constant_value(); 593 assert(xvalue != NULL && yvalue != NULL, "not constants"); 594 if (xvalue->is_loaded() && yvalue->is_loaded()) { 595 switch (cond) { 596 case If::eql: return xvalue == yvalue ? cond_true : cond_false; 597 case If::neq: return xvalue != yvalue ? cond_true : cond_false; 598 default : break; 599 } 600 } 601 break; 602 } 603 case metaDataTag: { 604 ciMetadata* xvalue = lt->as_MetadataType()->constant_value(); 605 ciMetadata* yvalue = rt->as_MetadataType()->constant_value(); 606 assert(xvalue != NULL && yvalue != NULL, "not constants"); 607 if (xvalue->is_loaded() && yvalue->is_loaded()) { 608 switch (cond) { 609 case If::eql: return xvalue == yvalue ? cond_true : cond_false; 610 case If::neq: return xvalue != yvalue ? cond_true : cond_false; 611 default : break; 612 } 613 } 614 break; 615 } 616 default: 617 break; 618 } 619 return not_comparable; 620 } 621 622 623 // Implementation of BlockBegin 624 625 void BlockBegin::set_end(BlockEnd* end) { 626 assert(end != NULL, "should not reset block end to NULL"); 627 if (end == _end) { 628 return; 629 } 630 clear_end(); 631 632 // Set the new end 633 _end = end; 634 635 _successors.clear(); 636 // Now reset successors list based on BlockEnd 637 for (int i = 0; i < end->number_of_sux(); i++) { 638 BlockBegin* sux = end->sux_at(i); 639 _successors.append(sux); 640 sux->_predecessors.append(this); 641 } 642 _end->set_begin(this); 643 } 644 645 646 void BlockBegin::clear_end() { 647 // Must make the predecessors/successors match up with the 648 // BlockEnd's notion. 649 if (_end != NULL) { 650 // disconnect from the old end 651 _end->set_begin(NULL); 652 653 // disconnect this block from it's current successors 654 for (int i = 0; i < _successors.length(); i++) { 655 _successors.at(i)->remove_predecessor(this); 656 } 657 _end = NULL; 658 } 659 } 660 661 662 void BlockBegin::disconnect_edge(BlockBegin* from, BlockBegin* to) { 663 // disconnect any edges between from and to 664 #ifndef PRODUCT 665 if (PrintIR && Verbose) { 666 tty->print_cr("Disconnected edge B%d -> B%d", from->block_id(), to->block_id()); 667 } 668 #endif 669 for (int s = 0; s < from->number_of_sux();) { 670 BlockBegin* sux = from->sux_at(s); 671 if (sux == to) { 672 int index = sux->_predecessors.find(from); 673 if (index >= 0) { 674 sux->_predecessors.remove_at(index); 675 } 676 from->_successors.remove_at(s); 677 } else { 678 s++; 679 } 680 } 681 } 682 683 684 void BlockBegin::disconnect_from_graph() { 685 // disconnect this block from all other blocks 686 for (int p = 0; p < number_of_preds(); p++) { 687 pred_at(p)->remove_successor(this); 688 } 689 for (int s = 0; s < number_of_sux(); s++) { 690 sux_at(s)->remove_predecessor(this); 691 } 692 } 693 694 void BlockBegin::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) { 695 // modify predecessors before substituting successors 696 for (int i = 0; i < number_of_sux(); i++) { 697 if (sux_at(i) == old_sux) { 698 // remove old predecessor before adding new predecessor 699 // otherwise there is a dead predecessor in the list 700 new_sux->remove_predecessor(old_sux); 701 new_sux->add_predecessor(this); 702 } 703 } 704 old_sux->remove_predecessor(this); 705 end()->substitute_sux(old_sux, new_sux); 706 } 707 708 709 710 // In general it is not possible to calculate a value for the field "depth_first_number" 711 // of the inserted block, without recomputing the values of the other blocks 712 // in the CFG. Therefore the value of "depth_first_number" in BlockBegin becomes meaningless. 713 BlockBegin* BlockBegin::insert_block_between(BlockBegin* sux) { 714 int bci = sux->bci(); 715 // critical edge splitting may introduce a goto after a if and array 716 // bound check elimination may insert a predicate between the if and 717 // goto. The bci of the goto can't be the one of the if otherwise 718 // the state and bci are inconsistent and a deoptimization triggered 719 // by the predicate would lead to incorrect execution/a crash. 720 BlockBegin* new_sux = new BlockBegin(bci); 721 722 // mark this block (special treatment when block order is computed) 723 new_sux->set(critical_edge_split_flag); 724 725 // This goto is not a safepoint. 726 Goto* e = new Goto(sux, false); 727 new_sux->set_next(e, bci); 728 new_sux->set_end(e); 729 // setup states 730 ValueStack* s = end()->state(); 731 new_sux->set_state(s->copy(s->kind(), bci)); 732 e->set_state(s->copy(s->kind(), bci)); 733 assert(new_sux->state()->locals_size() == s->locals_size(), "local size mismatch!"); 734 assert(new_sux->state()->stack_size() == s->stack_size(), "stack size mismatch!"); 735 assert(new_sux->state()->locks_size() == s->locks_size(), "locks size mismatch!"); 736 737 // link predecessor to new block 738 end()->substitute_sux(sux, new_sux); 739 740 // The ordering needs to be the same, so remove the link that the 741 // set_end call above added and substitute the new_sux for this 742 // block. 743 sux->remove_predecessor(new_sux); 744 745 // the successor could be the target of a switch so it might have 746 // multiple copies of this predecessor, so substitute the new_sux 747 // for the first and delete the rest. 748 bool assigned = false; 749 BlockList& list = sux->_predecessors; 750 for (int i = 0; i < list.length(); i++) { 751 BlockBegin** b = list.adr_at(i); 752 if (*b == this) { 753 if (assigned) { 754 list.remove_at(i); 755 // reprocess this index 756 i--; 757 } else { 758 assigned = true; 759 *b = new_sux; 760 } 761 // link the new block back to it's predecessors. 762 new_sux->add_predecessor(this); 763 } 764 } 765 assert(assigned == true, "should have assigned at least once"); 766 return new_sux; 767 } 768 769 770 void BlockBegin::remove_successor(BlockBegin* pred) { 771 int idx; 772 while ((idx = _successors.find(pred)) >= 0) { 773 _successors.remove_at(idx); 774 } 775 } 776 777 778 void BlockBegin::add_predecessor(BlockBegin* pred) { 779 _predecessors.append(pred); 780 } 781 782 783 void BlockBegin::remove_predecessor(BlockBegin* pred) { 784 int idx; 785 while ((idx = _predecessors.find(pred)) >= 0) { 786 _predecessors.remove_at(idx); 787 } 788 } 789 790 791 void BlockBegin::add_exception_handler(BlockBegin* b) { 792 assert(b != NULL && (b->is_set(exception_entry_flag)), "exception handler must exist"); 793 // add only if not in the list already 794 if (!_exception_handlers.contains(b)) _exception_handlers.append(b); 795 } 796 797 int BlockBegin::add_exception_state(ValueStack* state) { 798 assert(is_set(exception_entry_flag), "only for xhandlers"); 799 if (_exception_states == NULL) { 800 _exception_states = new ValueStackStack(4); 801 } 802 _exception_states->append(state); 803 return _exception_states->length() - 1; 804 } 805 806 807 void BlockBegin::iterate_preorder(boolArray& mark, BlockClosure* closure) { 808 if (!mark.at(block_id())) { 809 mark.at_put(block_id(), true); 810 closure->block_do(this); 811 BlockEnd* e = end(); // must do this after block_do because block_do may change it! 812 { for (int i = number_of_exception_handlers() - 1; i >= 0; i--) exception_handler_at(i)->iterate_preorder(mark, closure); } 813 { for (int i = e->number_of_sux () - 1; i >= 0; i--) e->sux_at (i)->iterate_preorder(mark, closure); } 814 } 815 } 816 817 818 void BlockBegin::iterate_postorder(boolArray& mark, BlockClosure* closure) { 819 if (!mark.at(block_id())) { 820 mark.at_put(block_id(), true); 821 BlockEnd* e = end(); 822 { for (int i = number_of_exception_handlers() - 1; i >= 0; i--) exception_handler_at(i)->iterate_postorder(mark, closure); } 823 { for (int i = e->number_of_sux () - 1; i >= 0; i--) e->sux_at (i)->iterate_postorder(mark, closure); } 824 closure->block_do(this); 825 } 826 } 827 828 829 void BlockBegin::iterate_preorder(BlockClosure* closure) { 830 int mark_len = number_of_blocks(); 831 boolArray mark(mark_len, mark_len, false); 832 iterate_preorder(mark, closure); 833 } 834 835 836 void BlockBegin::iterate_postorder(BlockClosure* closure) { 837 int mark_len = number_of_blocks(); 838 boolArray mark(mark_len, mark_len, false); 839 iterate_postorder(mark, closure); 840 } 841 842 843 void BlockBegin::block_values_do(ValueVisitor* f) { 844 for (Instruction* n = this; n != NULL; n = n->next()) n->values_do(f); 845 } 846 847 848 #ifndef PRODUCT 849 #define TRACE_PHI(code) if (PrintPhiFunctions) { code; } 850 #else 851 #define TRACE_PHI(coce) 852 #endif 853 854 855 bool BlockBegin::try_merge(ValueStack* new_state) { 856 TRACE_PHI(tty->print_cr("********** try_merge for block B%d", block_id())); 857 858 // local variables used for state iteration 859 int index; 860 Value new_value, existing_value; 861 862 ValueStack* existing_state = state(); 863 if (existing_state == NULL) { 864 TRACE_PHI(tty->print_cr("first call of try_merge for this block")); 865 866 if (is_set(BlockBegin::was_visited_flag)) { 867 // this actually happens for complicated jsr/ret structures 868 return false; // BAILOUT in caller 869 } 870 871 // copy state because it is altered 872 new_state = new_state->copy(ValueStack::BlockBeginState, bci()); 873 874 // Use method liveness to invalidate dead locals 875 MethodLivenessResult liveness = new_state->scope()->method()->liveness_at_bci(bci()); 876 if (liveness.is_valid()) { 877 assert((int)liveness.size() == new_state->locals_size(), "error in use of liveness"); 878 879 for_each_local_value(new_state, index, new_value) { 880 if (!liveness.at(index) || new_value->type()->is_illegal()) { 881 new_state->invalidate_local(index); 882 TRACE_PHI(tty->print_cr("invalidating dead local %d", index)); 883 } 884 } 885 } 886 887 if (is_set(BlockBegin::parser_loop_header_flag)) { 888 TRACE_PHI(tty->print_cr("loop header block, initializing phi functions")); 889 890 for_each_stack_value(new_state, index, new_value) { 891 new_state->setup_phi_for_stack(this, index, NULL, new_value); 892 TRACE_PHI(tty->print_cr("creating phi-function %c%d for stack %d", new_state->stack_at(index)->type()->tchar(), new_state->stack_at(index)->id(), index)); 893 } 894 895 BitMap& requires_phi_function = new_state->scope()->requires_phi_function(); 896 897 for_each_local_value(new_state, index, new_value) { 898 bool requires_phi = requires_phi_function.at(index) || (new_value->type()->is_double_word() && requires_phi_function.at(index + 1)); 899 if (requires_phi || !SelectivePhiFunctions) { 900 new_state->setup_phi_for_local(this, index, NULL, new_value); 901 TRACE_PHI(tty->print_cr("creating phi-function %c%d for local %d", new_state->local_at(index)->type()->tchar(), new_state->local_at(index)->id(), index)); 902 } 903 } 904 } 905 906 // initialize state of block 907 set_state(new_state); 908 909 } else if (existing_state->is_same(new_state)) { 910 TRACE_PHI(tty->print_cr("exisiting state found")); 911 912 assert(existing_state->scope() == new_state->scope(), "not matching"); 913 assert(existing_state->locals_size() == new_state->locals_size(), "not matching"); 914 assert(existing_state->stack_size() == new_state->stack_size(), "not matching"); 915 916 if (is_set(BlockBegin::was_visited_flag)) { 917 TRACE_PHI(tty->print_cr("loop header block, phis must be present")); 918 919 if (!is_set(BlockBegin::parser_loop_header_flag)) { 920 // this actually happens for complicated jsr/ret structures 921 return false; // BAILOUT in caller 922 } 923 924 for_each_local_value(existing_state, index, existing_value) { 925 Value new_value = new_state->local_at(index); 926 if (new_value == NULL || new_value->type()->tag() != existing_value->type()->tag()) { 927 Phi* existing_phi = existing_value->as_Phi(); 928 if (existing_phi == NULL) { 929 return false; // BAILOUT in caller 930 } 931 // Invalidate the phi function here. This case is very rare except for 932 // JVMTI capability "can_access_local_variables". 933 // In really rare cases we will bail out in LIRGenerator::move_to_phi. 934 existing_phi->make_illegal(); 935 existing_state->invalidate_local(index); 936 TRACE_PHI(tty->print_cr("invalidating local %d because of type mismatch", index)); 937 } 938 } 939 940 #ifdef ASSERT 941 // check that all necessary phi functions are present 942 for_each_stack_value(existing_state, index, existing_value) { 943 assert(existing_value->as_Phi() != NULL && existing_value->as_Phi()->block() == this, "phi function required"); 944 } 945 for_each_local_value(existing_state, index, existing_value) { 946 assert(existing_value == new_state->local_at(index) || (existing_value->as_Phi() != NULL && existing_value->as_Phi()->as_Phi()->block() == this), "phi function required"); 947 } 948 #endif 949 950 } else { 951 TRACE_PHI(tty->print_cr("creating phi functions on demand")); 952 953 // create necessary phi functions for stack 954 for_each_stack_value(existing_state, index, existing_value) { 955 Value new_value = new_state->stack_at(index); 956 Phi* existing_phi = existing_value->as_Phi(); 957 958 if (new_value != existing_value && (existing_phi == NULL || existing_phi->block() != this)) { 959 existing_state->setup_phi_for_stack(this, index, existing_value, new_value); 960 TRACE_PHI(tty->print_cr("creating phi-function %c%d for stack %d", existing_state->stack_at(index)->type()->tchar(), existing_state->stack_at(index)->id(), index)); 961 } 962 } 963 964 // create necessary phi functions for locals 965 for_each_local_value(existing_state, index, existing_value) { 966 Value new_value = new_state->local_at(index); 967 Phi* existing_phi = existing_value->as_Phi(); 968 969 if (new_value == NULL || new_value->type()->tag() != existing_value->type()->tag()) { 970 existing_state->invalidate_local(index); 971 TRACE_PHI(tty->print_cr("invalidating local %d because of type mismatch", index)); 972 } else if (new_value != existing_value && (existing_phi == NULL || existing_phi->block() != this)) { 973 existing_state->setup_phi_for_local(this, index, existing_value, new_value); 974 TRACE_PHI(tty->print_cr("creating phi-function %c%d for local %d", existing_state->local_at(index)->type()->tchar(), existing_state->local_at(index)->id(), index)); 975 } 976 } 977 } 978 979 assert(existing_state->caller_state() == new_state->caller_state(), "caller states must be equal"); 980 981 } else { 982 assert(false, "stack or locks not matching (invalid bytecodes)"); 983 return false; 984 } 985 986 TRACE_PHI(tty->print_cr("********** try_merge for block B%d successful", block_id())); 987 988 return true; 989 } 990 991 992 #ifndef PRODUCT 993 void BlockBegin::print_block() { 994 InstructionPrinter ip; 995 print_block(ip, false); 996 } 997 998 999 void BlockBegin::print_block(InstructionPrinter& ip, bool live_only) { 1000 ip.print_instr(this); tty->cr(); 1001 ip.print_stack(this->state()); tty->cr(); 1002 ip.print_inline_level(this); 1003 ip.print_head(); 1004 for (Instruction* n = next(); n != NULL; n = n->next()) { 1005 if (!live_only || n->is_pinned() || n->use_count() > 0) { 1006 ip.print_line(n); 1007 } 1008 } 1009 tty->cr(); 1010 } 1011 #endif // PRODUCT 1012 1013 1014 // Implementation of BlockList 1015 1016 void BlockList::iterate_forward (BlockClosure* closure) { 1017 const int l = length(); 1018 for (int i = 0; i < l; i++) closure->block_do(at(i)); 1019 } 1020 1021 1022 void BlockList::iterate_backward(BlockClosure* closure) { 1023 for (int i = length() - 1; i >= 0; i--) closure->block_do(at(i)); 1024 } 1025 1026 1027 void BlockList::blocks_do(void f(BlockBegin*)) { 1028 for (int i = length() - 1; i >= 0; i--) f(at(i)); 1029 } 1030 1031 1032 void BlockList::values_do(ValueVisitor* f) { 1033 for (int i = length() - 1; i >= 0; i--) at(i)->block_values_do(f); 1034 } 1035 1036 1037 #ifndef PRODUCT 1038 void BlockList::print(bool cfg_only, bool live_only) { 1039 InstructionPrinter ip; 1040 for (int i = 0; i < length(); i++) { 1041 BlockBegin* block = at(i); 1042 if (cfg_only) { 1043 ip.print_instr(block); tty->cr(); 1044 } else { 1045 block->print_block(ip, live_only); 1046 } 1047 } 1048 } 1049 #endif // PRODUCT 1050 1051 1052 // Implementation of BlockEnd 1053 1054 void BlockEnd::set_begin(BlockBegin* begin) { 1055 BlockList* sux = NULL; 1056 if (begin != NULL) { 1057 sux = begin->successors(); 1058 } else if (this->begin() != NULL) { 1059 // copy our sux list 1060 BlockList* sux = new BlockList(this->begin()->number_of_sux()); 1061 for (int i = 0; i < this->begin()->number_of_sux(); i++) { 1062 sux->append(this->begin()->sux_at(i)); 1063 } 1064 } 1065 _sux = sux; 1066 } 1067 1068 1069 void BlockEnd::substitute_sux(BlockBegin* old_sux, BlockBegin* new_sux) { 1070 substitute(*_sux, old_sux, new_sux); 1071 } 1072 1073 1074 // Implementation of Phi 1075 1076 // Normal phi functions take their operands from the last instruction of the 1077 // predecessor. Special handling is needed for xhanlder entries because there 1078 // the state of arbitrary instructions are needed. 1079 1080 Value Phi::operand_at(int i) const { 1081 ValueStack* state; 1082 if (_block->is_set(BlockBegin::exception_entry_flag)) { 1083 state = _block->exception_state_at(i); 1084 } else { 1085 state = _block->pred_at(i)->end()->state(); 1086 } 1087 assert(state != NULL, ""); 1088 1089 if (is_local()) { 1090 return state->local_at(local_index()); 1091 } else { 1092 return state->stack_at(stack_index()); 1093 } 1094 } 1095 1096 1097 int Phi::operand_count() const { 1098 if (_block->is_set(BlockBegin::exception_entry_flag)) { 1099 return _block->number_of_exception_states(); 1100 } else { 1101 return _block->number_of_preds(); 1102 } 1103 } 1104 1105 #ifdef ASSERT 1106 // Constructor of Assert 1107 Assert::Assert(Value x, Condition cond, bool unordered_is_true, Value y) : Instruction(illegalType) 1108 , _x(x) 1109 , _cond(cond) 1110 , _y(y) 1111 { 1112 set_flag(UnorderedIsTrueFlag, unordered_is_true); 1113 assert(x->type()->tag() == y->type()->tag(), "types must match"); 1114 pin(); 1115 1116 stringStream strStream; 1117 Compilation::current()->method()->print_name(&strStream); 1118 1119 stringStream strStream1; 1120 InstructionPrinter ip1(1, &strStream1); 1121 ip1.print_instr(x); 1122 1123 stringStream strStream2; 1124 InstructionPrinter ip2(1, &strStream2); 1125 ip2.print_instr(y); 1126 1127 stringStream ss; 1128 ss.print("Assertion %s %s %s in method %s", strStream1.as_string(), ip2.cond_name(cond), strStream2.as_string(), strStream.as_string()); 1129 1130 _message = ss.as_string(); 1131 } 1132 #endif 1133 1134 void RangeCheckPredicate::check_state() { 1135 assert(state()->kind() != ValueStack::EmptyExceptionState && state()->kind() != ValueStack::ExceptionState, "will deopt with empty state"); 1136 } 1137 1138 void ProfileInvoke::state_values_do(ValueVisitor* f) { 1139 if (state() != NULL) state()->values_do(f); 1140 }