1 /*
   2  * Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "ci/ciValueKlass.hpp"
  27 #include "opto/addnode.hpp"
  28 #include "opto/castnode.hpp"
  29 #include "opto/graphKit.hpp"
  30 #include "opto/rootnode.hpp"
  31 #include "opto/valuetypenode.hpp"
  32 #include "opto/phaseX.hpp"
  33 
  34 // Clones the values type to handle control flow merges involving multiple value types.
  35 // The inputs are replaced by PhiNodes to represent the merged values for the given region.
  36 ValueTypeBaseNode* ValueTypeBaseNode::clone_with_phis(PhaseGVN* gvn, Node* region) {
  37   assert(!has_phi_inputs(region), "already cloned with phis");
  38   ValueTypeBaseNode* vt = clone()->as_ValueTypeBase();
  39 
  40   // Create a PhiNode for merging the oop values
  41   const Type* phi_type = Type::get_const_type(value_klass());
  42   PhiNode* oop = PhiNode::make(region, vt->get_oop(), phi_type);
  43   gvn->set_type(oop, phi_type);
  44   vt->set_oop(oop);
  45 
  46   // Create a PhiNode each for merging the field values
  47   for (uint i = 0; i < vt->field_count(); ++i) {
  48     ciType* type = vt->field_type(i);
  49     Node*  value = vt->field_value(i);
  50     if (type->is_valuetype() && value->isa_ValueType()) {
  51       // Handle flattened value type fields recursively
  52       value = value->as_ValueType()->clone_with_phis(gvn, region);
  53     } else {
  54       phi_type = Type::get_const_type(type);
  55       value = PhiNode::make(region, value, phi_type);
  56       gvn->set_type(value, phi_type);
  57     }
  58     vt->set_field_value(i, value);
  59   }
  60   gvn->set_type(vt, vt->bottom_type());
  61   return vt;
  62 }
  63 
  64 // Checks if the inputs of the ValueBaseTypeNode were replaced by PhiNodes
  65 // for the given region (see ValueBaseTypeNode::clone_with_phis).
  66 bool ValueTypeBaseNode::has_phi_inputs(Node* region) {
  67   // Check oop input
  68   bool result = get_oop()->is_Phi() && get_oop()->as_Phi()->region() == region;
  69 #ifdef ASSERT
  70   if (result) {
  71     // Check all field value inputs for consistency
  72     for (uint i = Oop; i < field_count(); ++i) {
  73       Node* n = in(i);
  74       if (n->is_ValueTypeBase()) {
  75         assert(n->as_ValueTypeBase()->has_phi_inputs(region), "inconsistent phi inputs");
  76       } else {
  77         assert(n->is_Phi() && n->as_Phi()->region() == region, "inconsistent phi inputs");
  78       }
  79     }
  80   }
  81 #endif
  82   return result;
  83 }
  84 
  85 // Merges 'this' with 'other' by updating the input PhiNodes added by 'clone_with_phis'
  86 ValueTypeBaseNode* ValueTypeBaseNode::merge_with(PhaseGVN* gvn, const ValueTypeBaseNode* other, int pnum, bool transform) {
  87   // Merge oop inputs
  88   PhiNode* phi = get_oop()->as_Phi();
  89   phi->set_req(pnum, other->get_oop());
  90   if (transform) {
  91     set_oop(gvn->transform(phi));
  92     gvn->record_for_igvn(phi);
  93   }
  94   // Merge field values
  95   for (uint i = 0; i < field_count(); ++i) {
  96     Node* val1 =        field_value(i);
  97     Node* val2 = other->field_value(i);
  98     if (val1->is_ValueType()) {
  99       val1->as_ValueType()->merge_with(gvn, val2->as_ValueType(), pnum, transform);
 100     } else {
 101       assert(val1->is_Phi(), "must be a phi node");
 102       assert(!val2->is_ValueType(), "inconsistent merge values");
 103       val1->set_req(pnum, val2);
 104     }
 105     if (transform) {
 106       set_field_value(i, gvn->transform(val1));
 107       gvn->record_for_igvn(val1);
 108     }
 109   }
 110   return this;
 111 }
 112 
 113 // Adds a new merge path to a valuetype node with phi inputs
 114 void ValueTypeBaseNode::add_new_path(Node* region) {
 115   assert(has_phi_inputs(region), "must have phi inputs");
 116 
 117   PhiNode* phi = get_oop()->as_Phi();
 118   phi->add_req(NULL);
 119   assert(phi->req() == region->req(), "must be same size as region");
 120 
 121   for (uint i = 0; i < field_count(); ++i) {
 122     Node* val = field_value(i);
 123     if (val->is_ValueType()) {
 124       val->as_ValueType()->add_new_path(region);
 125     } else {
 126       val->as_Phi()->add_req(NULL);
 127       assert(val->req() == region->req(), "must be same size as region");
 128     }
 129   }
 130 }
 131 
 132 Node* ValueTypeBaseNode::field_value(uint index) const {
 133   assert(index < field_count(), "index out of bounds");
 134   return in(Values + index);
 135 }
 136 
 137 // Get the value of the field at the given offset.
 138 // If 'recursive' is true, flattened value type fields will be resolved recursively.
 139 Node* ValueTypeBaseNode::field_value_by_offset(int offset, bool recursive) const {
 140   // If the field at 'offset' belongs to a flattened value type field, 'index' refers to the
 141   // corresponding ValueTypeNode input and 'sub_offset' is the offset in flattened value type.
 142   int index = value_klass()->field_index_by_offset(offset);
 143   int sub_offset = offset - field_offset(index);
 144   Node* value = field_value(index);
 145   assert(value != NULL, "field value not found");
 146   if (recursive && value->is_ValueType()) {
 147     ValueTypeNode* vt = value->as_ValueType();
 148     if (field_is_flattened(index)) {
 149       // Flattened value type field
 150       sub_offset += vt->value_klass()->first_field_offset(); // Add header size
 151       return vt->field_value_by_offset(sub_offset, recursive);
 152     } else {
 153       assert(sub_offset == 0, "should not have a sub offset");
 154       return vt;
 155     }
 156   }
 157   assert(!(recursive && value->is_ValueType()), "should not be a value type");
 158   assert(sub_offset == 0, "offset mismatch");
 159   return value;
 160 }
 161 
 162 void ValueTypeBaseNode::set_field_value(uint index, Node* value) {
 163   assert(index < field_count(), "index out of bounds");
 164   set_req(Values + index, value);
 165 }
 166 
 167 void ValueTypeBaseNode::set_field_value_by_offset(int offset, Node* value) {
 168   set_field_value(field_index(offset), value);
 169 }
 170 
 171 int ValueTypeBaseNode::field_offset(uint index) const {
 172   assert(index < field_count(), "index out of bounds");
 173   return value_klass()->declared_nonstatic_field_at(index)->offset();
 174 }
 175 
 176 uint ValueTypeBaseNode::field_index(int offset) const {
 177   uint i = 0;
 178   for (; i < field_count() && field_offset(i) != offset; i++) { }
 179   assert(i < field_count(), "field not found");
 180   return i;
 181 }
 182 
 183 ciType* ValueTypeBaseNode::field_type(uint index) const {
 184   assert(index < field_count(), "index out of bounds");
 185   return value_klass()->declared_nonstatic_field_at(index)->type();
 186 }
 187 
 188 bool ValueTypeBaseNode::field_is_flattened(uint index) const {
 189   assert(index < field_count(), "index out of bounds");
 190   ciField* field = value_klass()->declared_nonstatic_field_at(index);
 191   assert(!field->is_flattened() || field->type()->is_valuetype(), "must be a value type");
 192   return field->is_flattened();
 193 }
 194 
 195 bool ValueTypeBaseNode::field_is_flattenable(uint index) const {
 196   assert(index < field_count(), "index out of bounds");
 197   ciField* field = value_klass()->declared_nonstatic_field_at(index);
 198   assert(!field->is_flattenable() || field->type()->is_valuetype(), "must be a value type");
 199   return field->is_flattenable();
 200 }
 201 
 202 int ValueTypeBaseNode::make_scalar_in_safepoint(PhaseIterGVN* igvn, Unique_Node_List& worklist, SafePointNode* sfpt) {
 203   ciValueKlass* vk = value_klass();
 204   uint nfields = vk->nof_nonstatic_fields();
 205   JVMState* jvms = sfpt->jvms();
 206   int start = jvms->debug_start();
 207   int end   = jvms->debug_end();
 208   // Replace safepoint edge by SafePointScalarObjectNode and add field values
 209   assert(jvms != NULL, "missing JVMS");
 210   uint first_ind = (sfpt->req() - jvms->scloff());
 211   SafePointScalarObjectNode* sobj = new SafePointScalarObjectNode(value_ptr(),
 212 #ifdef ASSERT
 213                                                                   NULL,
 214 #endif
 215                                                                   first_ind, nfields);
 216   sobj->init_req(0, igvn->C->root());
 217   // Iterate over the value type fields in order of increasing
 218   // offset and add the field values to the safepoint.
 219   for (uint j = 0; j < nfields; ++j) {
 220     int offset = vk->nonstatic_field_at(j)->offset();
 221     Node* value = field_value_by_offset(offset, true /* include flattened value type fields */);
 222     if (value->is_ValueType()) {
 223       // Add value type field to the worklist to process later
 224       worklist.push(value);
 225     }
 226     sfpt->add_req(value);
 227   }
 228   jvms->set_endoff(sfpt->req());
 229   sobj = igvn->transform(sobj)->as_SafePointScalarObject();
 230   igvn->rehash_node_delayed(sfpt);
 231   return sfpt->replace_edges_in_range(this, sobj, start, end);
 232 }
 233 
 234 void ValueTypeBaseNode::make_scalar_in_safepoints(PhaseIterGVN* igvn) {
 235   // Process all safepoint uses and scalarize value type
 236   Unique_Node_List worklist;
 237   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 238     SafePointNode* sfpt = fast_out(i)->isa_SafePoint();
 239     if (sfpt != NULL && !sfpt->is_CallLeaf() && (!sfpt->is_Call() || sfpt->as_Call()->has_debug_use(this))) {
 240       int nb = 0;
 241       if (is_allocated(igvn) && get_oop()->is_Con()) {
 242         // Value type is allocated with a constant oop, link it directly
 243         nb = sfpt->replace_edges_in_range(this, get_oop(), sfpt->jvms()->debug_start(), sfpt->jvms()->debug_end());
 244         igvn->rehash_node_delayed(sfpt);
 245       } else {
 246         nb = make_scalar_in_safepoint(igvn, worklist, sfpt);
 247       }
 248       --i; imax -= nb;
 249     }
 250   }
 251   // Now scalarize non-flattened fields
 252   for (uint i = 0; i < worklist.size(); ++i) {
 253     Node* vt = worklist.at(i);
 254     vt->as_ValueType()->make_scalar_in_safepoints(igvn);
 255   }
 256 }
 257 
 258 const TypePtr* ValueTypeBaseNode::field_adr_type(Node* base, int offset, ciInstanceKlass* holder, DecoratorSet decorators, PhaseGVN& gvn) const {
 259   const TypeAryPtr* ary_type = gvn.type(base)->isa_aryptr();
 260   const TypePtr* adr_type = NULL;
 261   bool is_array = ary_type != NULL;
 262   if ((decorators & C2_MISMATCHED) != 0) {
 263     adr_type = TypeRawPtr::BOTTOM;
 264   } else if (is_array) {
 265     // In the case of a flattened value type array, each field has its own slice
 266     adr_type = ary_type->with_field_offset(offset)->add_offset(Type::OffsetBot);
 267   } else {
 268     ciField* field = holder->get_field_by_offset(offset, false);
 269     assert(field != NULL, "field not found");
 270     adr_type = gvn.C->alias_type(field)->adr_type();
 271   }
 272   return adr_type;
 273 }
 274 
 275 void ValueTypeBaseNode::load(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators) {
 276   // Initialize the value type by loading its field values from
 277   // memory and adding the values as input edges to the node.
 278   for (uint i = 0; i < field_count(); ++i) {
 279     int offset = holder_offset + field_offset(i);
 280     Node* value = NULL;
 281     ciType* ft = field_type(i);
 282     bool is_flattenable = field_is_flattenable(i);
 283     if (field_is_flattened(i)) {
 284       // Recursively load the flattened value type field
 285       value = ValueTypeNode::make_from_flattened(kit, ft->as_value_klass(), base, ptr, holder, offset, decorators);
 286     } else {
 287       const TypeOopPtr* oop_ptr = kit->gvn().type(base)->isa_oopptr();
 288       bool is_array = (oop_ptr->isa_aryptr() != NULL);
 289       if (base->is_Con() && !is_array) {
 290         // If the oop to the value type is constant (static final field), we can
 291         // also treat the fields as constants because the value type is immutable.
 292         ciObject* constant_oop = oop_ptr->const_oop();
 293         ciField* field = holder->get_field_by_offset(offset, false);
 294         assert(field != NULL, "field not found");
 295         ciConstant constant = constant_oop->as_instance()->field_value(field);
 296         const Type* con_type = Type::make_from_constant(constant, /*require_const=*/ true);
 297         assert(con_type != NULL, "type not found");
 298         value = kit->gvn().transform(kit->makecon(con_type));
 299         if (ft->is_valuetype() && !constant.as_object()->is_null_object()) {
 300           // Null-free, treat as flattenable
 301           is_flattenable = true;
 302         }
 303       } else {
 304         // Load field value from memory
 305         const TypePtr* adr_type = field_adr_type(base, offset, holder, decorators, kit->gvn());
 306         Node* adr = kit->basic_plus_adr(base, ptr, offset);
 307         BasicType bt = type2field[ft->basic_type()];
 308         assert(is_java_primitive(bt) || adr->bottom_type()->is_ptr_to_narrowoop() == UseCompressedOops, "inconsistent");
 309         const Type* val_type = Type::get_const_type(ft);
 310         if (is_array) {
 311           decorators |= IS_ARRAY;
 312         }
 313         value = kit->access_load_at(base, adr, adr_type, val_type, bt, decorators);
 314       }
 315       if (is_flattenable) {
 316         // Loading a non-flattened but flattenable value type from memory
 317         if (ft->as_value_klass()->is_scalarizable()) {
 318           value = ValueTypeNode::make_from_oop(kit, value, ft->as_value_klass());
 319         } else {
 320           value = kit->null2default(value, ft->as_value_klass());
 321         }
 322       }
 323     }
 324     set_field_value(i, value);
 325   }
 326 }
 327 
 328 void ValueTypeBaseNode::store_flattened(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators) const {
 329   // The value type is embedded into the object without an oop header. Subtract the
 330   // offset of the first field to account for the missing header when storing the values.
 331   if (holder == NULL) {
 332     holder = value_klass();
 333   }
 334   holder_offset -= value_klass()->first_field_offset();
 335   store(kit, base, ptr, holder, holder_offset, decorators);
 336 }
 337 
 338 void ValueTypeBaseNode::store(GraphKit* kit, Node* base, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators) const {
 339   // Write field values to memory
 340   for (uint i = 0; i < field_count(); ++i) {
 341     int offset = holder_offset + field_offset(i);
 342     Node* value = field_value(i);
 343     ciType* ft = field_type(i);
 344     if (field_is_flattened(i)) {
 345       // Recursively store the flattened value type field
 346       if (!value->is_ValueType()) {
 347         assert(!kit->gvn().type(value)->maybe_null(), "should never be null");
 348         value = ValueTypeNode::make_from_oop(kit, value, ft->as_value_klass());
 349       }
 350       value->as_ValueType()->store_flattened(kit, base, ptr, holder, offset, decorators);
 351     } else {
 352       // Store field value to memory
 353       const TypePtr* adr_type = field_adr_type(base, offset, holder, decorators, kit->gvn());
 354       Node* adr = kit->basic_plus_adr(base, ptr, offset);
 355       BasicType bt = type2field[ft->basic_type()];
 356       assert(is_java_primitive(bt) || adr->bottom_type()->is_ptr_to_narrowoop() == UseCompressedOops, "inconsistent");
 357       const Type* val_type = Type::get_const_type(ft);
 358       const TypeAryPtr* ary_type = kit->gvn().type(base)->isa_aryptr();
 359       if (ary_type != NULL) {
 360         decorators |= IS_ARRAY;
 361       }
 362       kit->access_store_at(base, adr, adr_type, value, val_type, bt, decorators);
 363     }
 364   }
 365 }
 366 
 367 ValueTypeBaseNode* ValueTypeBaseNode::allocate(GraphKit* kit, bool safe_for_replace) {
 368   // Check if value type is already allocated
 369   Node* null_ctl = kit->top();
 370   Node* not_null_oop = kit->null_check_oop(get_oop(), &null_ctl);
 371   if (null_ctl->is_top()) {
 372     // Value type is allocated
 373     return this;
 374   }
 375   assert(!is_allocated(&kit->gvn()), "should not be allocated");
 376   RegionNode* region = new RegionNode(3);
 377 
 378   // Oop is non-NULL, use it
 379   region->init_req(1, kit->control());
 380   PhiNode* oop = PhiNode::make(region, not_null_oop, value_ptr());
 381   PhiNode* io  = PhiNode::make(region, kit->i_o(), Type::ABIO);
 382   PhiNode* mem = PhiNode::make(region, kit->merged_memory(), Type::MEMORY, TypePtr::BOTTOM);
 383 
 384   int bci = kit->bci();
 385   bool reexecute = kit->jvms()->should_reexecute();
 386   {
 387     // Oop is NULL, allocate and initialize buffer
 388     PreserveJVMState pjvms(kit);
 389     // Propagate re-execution state and bci
 390     kit->set_bci(bci);
 391     kit->jvms()->set_bci(bci);
 392     kit->jvms()->set_should_reexecute(reexecute);
 393     kit->set_control(null_ctl);
 394     kit->kill_dead_locals();
 395     ciValueKlass* vk = value_klass();
 396     Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
 397     Node* alloc_oop  = kit->new_instance(klass_node, NULL, NULL, /* deoptimize_on_exception */ true, this);
 398     store(kit, alloc_oop, alloc_oop, vk, 0);
 399     region->init_req(2, kit->control());
 400     oop   ->init_req(2, alloc_oop);
 401     io    ->init_req(2, kit->i_o());
 402     mem   ->init_req(2, kit->merged_memory());
 403   }
 404 
 405   // Update GraphKit
 406   kit->set_control(kit->gvn().transform(region));
 407   kit->set_i_o(kit->gvn().transform(io));
 408   kit->set_all_memory(kit->gvn().transform(mem));
 409   kit->record_for_igvn(region);
 410   kit->record_for_igvn(oop);
 411   kit->record_for_igvn(io);
 412   kit->record_for_igvn(mem);
 413 
 414   // Use cloned ValueTypeNode to propagate oop from now on
 415   Node* res_oop = kit->gvn().transform(oop);
 416   ValueTypeBaseNode* vt = clone()->as_ValueTypeBase();
 417   vt->set_oop(res_oop);
 418   vt = kit->gvn().transform(vt)->as_ValueTypeBase();
 419   if (safe_for_replace) {
 420     kit->replace_in_map(this, vt);
 421   }
 422   return vt;
 423 }
 424 
 425 bool ValueTypeBaseNode::is_allocated(PhaseGVN* phase) const {
 426   Node* oop = get_oop();
 427   const Type* oop_type = (phase != NULL) ? phase->type(oop) : oop->bottom_type();
 428   return !oop_type->maybe_null();
 429 }
 430 
 431 // When a call returns multiple values, it has several result
 432 // projections, one per field. Replacing the result of the call by a
 433 // value type node (after late inlining) requires that for each result
 434 // projection, we find the corresponding value type field.
 435 void ValueTypeBaseNode::replace_call_results(GraphKit* kit, Node* call, Compile* C) {
 436   ciValueKlass* vk = value_klass();
 437   for (DUIterator_Fast imax, i = call->fast_outs(imax); i < imax; i++) {
 438     ProjNode* pn = call->fast_out(i)->as_Proj();
 439     uint con = pn->_con;
 440     if (con >= TypeFunc::Parms+1) {
 441       uint field_nb = con - (TypeFunc::Parms+1);
 442       int extra = 0;
 443       for (uint j = 0; j < field_nb - extra; j++) {
 444         ciField* f = vk->nonstatic_field_at(j);
 445         BasicType bt = f->type()->basic_type();
 446         if (bt == T_LONG || bt == T_DOUBLE) {
 447           extra++;
 448         }
 449       }
 450       ciField* f = vk->nonstatic_field_at(field_nb - extra);
 451       Node* field = field_value_by_offset(f->offset(), true);
 452       if (field->is_ValueType()) {
 453         assert(field->as_ValueType()->is_allocated(&kit->gvn()), "must be allocated");
 454         field = field->as_ValueType()->get_oop();
 455       }
 456       C->gvn_replace_by(pn, field);
 457       C->initial_gvn()->hash_delete(pn);
 458       pn->set_req(0, C->top());
 459       --i; --imax;
 460     }
 461   }
 462 }
 463 
 464 Node* ValueTypeBaseNode::allocate_fields(GraphKit* kit) {
 465   ValueTypeBaseNode* vt = clone()->as_ValueTypeBase();
 466   for (uint i = 0; i < field_count(); i++) {
 467      ValueTypeNode* value = field_value(i)->isa_ValueType();
 468      if (field_is_flattened(i)) {
 469        // Flattened value type field
 470        vt->set_field_value(i, value->allocate_fields(kit));
 471      } else if (value != NULL) {
 472        // Non-flattened value type field
 473        vt->set_field_value(i, value->allocate(kit));
 474      }
 475   }
 476   vt = kit->gvn().transform(vt)->as_ValueTypeBase();
 477   kit->replace_in_map(this, vt);
 478   return vt;
 479 }
 480 
 481 ValueTypeNode* ValueTypeNode::make_uninitialized(PhaseGVN& gvn, ciValueKlass* vk) {
 482   // Create a new ValueTypeNode with uninitialized values and NULL oop
 483   return new ValueTypeNode(vk, gvn.zerocon(T_VALUETYPE));
 484 }
 485 
 486 Node* ValueTypeNode::default_oop(PhaseGVN& gvn, ciValueKlass* vk) {
 487   // Returns the constant oop of the default value type allocation
 488   return gvn.makecon(TypeInstPtr::make(vk->default_value_instance()));
 489 }
 490 
 491 ValueTypeNode* ValueTypeNode::make_default(PhaseGVN& gvn, ciValueKlass* vk) {
 492   // Create a new ValueTypeNode with default values
 493   ValueTypeNode* vt = new ValueTypeNode(vk, default_oop(gvn, vk));
 494   for (uint i = 0; i < vt->field_count(); ++i) {
 495     ciType* field_type = vt->field_type(i);
 496     Node* value = NULL;
 497     if (field_type->is_valuetype() && vt->field_is_flattenable(i)) {
 498       ciValueKlass* field_klass = field_type->as_value_klass();
 499       if (field_klass->is_scalarizable() || vt->field_is_flattened(i)) {
 500         value = ValueTypeNode::make_default(gvn, field_klass);
 501       } else {
 502         value = default_oop(gvn, field_klass);
 503       }
 504     } else {
 505       value = gvn.zerocon(field_type->basic_type());
 506     }
 507     vt->set_field_value(i, value);
 508   }
 509   vt = gvn.transform(vt)->as_ValueType();
 510   assert(vt->is_default(gvn), "must be the default value type");
 511   return vt;
 512 }
 513 
 514 bool ValueTypeNode::is_default(PhaseGVN& gvn) const {
 515   for (uint i = 0; i < field_count(); ++i) {
 516     Node* value = field_value(i);
 517     if (!gvn.type(value)->is_zero_type() &&
 518         !(value->is_ValueType() && value->as_ValueType()->is_default(gvn)) &&
 519         !(field_type(i)->is_valuetype() && value == default_oop(gvn, field_type(i)->as_value_klass()))) {
 520       return false;
 521     }
 522   }
 523   return true;
 524 }
 525 
 526 ValueTypeNode* ValueTypeNode::make_from_oop(GraphKit* kit, Node* oop, ciValueKlass* vk) {
 527   PhaseGVN& gvn = kit->gvn();
 528 
 529   // Create and initialize a ValueTypeNode by loading all field
 530   // values from a heap-allocated version and also save the oop.
 531   ValueTypeNode* vt = new ValueTypeNode(vk, oop);
 532 
 533   if (oop->isa_ValueTypePtr()) {
 534     // Can happen with late inlining
 535     ValueTypePtrNode* vtptr = oop->as_ValueTypePtr();
 536     vt->set_oop(vtptr->get_oop());
 537     for (uint i = Oop+1; i < vtptr->req(); ++i) {
 538       vt->init_req(i, vtptr->in(i));
 539     }
 540   } else if (gvn.type(oop)->maybe_null()) {
 541     // Add a null check because the oop may be null
 542     Node* null_ctl = kit->top();
 543     Node* not_null_oop = kit->null_check_oop(oop, &null_ctl);
 544     if (kit->stopped()) {
 545       // Constant null
 546       kit->set_control(null_ctl);
 547       return make_default(gvn, vk);
 548     }
 549     vt->set_oop(not_null_oop);
 550     vt->load(kit, not_null_oop, not_null_oop, vk, /* holder_offset */ 0);
 551 
 552     if (null_ctl != kit->top()) {
 553       // Return default value type if oop is null
 554       ValueTypeNode* def = make_default(gvn, vk);
 555       Node* region = new RegionNode(3);
 556       region->init_req(1, kit->control());
 557       region->init_req(2, null_ctl);
 558 
 559       vt = vt->clone_with_phis(&gvn, region)->as_ValueType();
 560       vt->merge_with(&gvn, def, 2, true);
 561       kit->set_control(gvn.transform(region));
 562     }
 563   } else {
 564     // Oop can never be null
 565     Node* init_ctl = kit->control();
 566     vt->load(kit, oop, oop, vk, /* holder_offset */ 0);
 567     assert(init_ctl != kit->control() || !gvn.type(oop)->is_valuetypeptr() || oop->is_Con() || oop->Opcode() == Op_ValueTypePtr ||
 568            AllocateNode::Ideal_allocation(oop, &gvn) != NULL || vt->is_loaded(&gvn) == oop, "value type should be loaded");
 569   }
 570 
 571   assert(vt->is_allocated(&gvn), "value type should be allocated");
 572   return gvn.transform(vt)->as_ValueType();
 573 }
 574 
 575 // GraphKit wrapper for the 'make_from_flattened' method
 576 ValueTypeNode* ValueTypeNode::make_from_flattened(GraphKit* kit, ciValueKlass* vk, Node* obj, Node* ptr, ciInstanceKlass* holder, int holder_offset, DecoratorSet decorators) {
 577   // Create and initialize a ValueTypeNode by loading all field values from
 578   // a flattened value type field at 'holder_offset' or from a value type array.
 579   ValueTypeNode* vt = make_uninitialized(kit->gvn(), vk);
 580   // The value type is flattened into the object without an oop header. Subtract the
 581   // offset of the first field to account for the missing header when loading the values.
 582   holder_offset -= vk->first_field_offset();
 583   vt->load(kit, obj, ptr, holder, holder_offset, decorators);
 584   assert(vt->is_loaded(&kit->gvn()) != obj, "holder oop should not be used as flattened value type oop");
 585   return kit->gvn().transform(vt)->as_ValueType();
 586 }
 587 
 588 ValueTypeNode* ValueTypeNode::make_from_multi(GraphKit* kit, MultiNode* multi, ExtendedSignature& sig, ciValueKlass* vk, uint& base_input, bool in) {
 589   ValueTypeNode* vt = ValueTypeNode::make_uninitialized(kit->gvn(), vk);
 590   vt->initialize_fields(kit, multi, sig, base_input, 0, in);
 591   return kit->gvn().transform(vt)->as_ValueType();
 592 }
 593 
 594 ValueTypeNode* ValueTypeNode::make_larval(GraphKit* kit, bool allocate) const {
 595   ciValueKlass* vk = value_klass();
 596   ValueTypeNode* res = clone()->as_ValueType();
 597   if (allocate) {
 598     // Re-execute if buffering triggers deoptimization
 599     PreserveReexecuteState preexecs(kit);
 600     kit->jvms()->set_should_reexecute(true);
 601     Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
 602     Node* alloc_oop  = kit->new_instance(klass_node, NULL, NULL, true);
 603     AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop, &kit->gvn());
 604     alloc->_larval = true;
 605 
 606     store(kit, alloc_oop, alloc_oop, vk, 0);
 607     res->set_oop(alloc_oop);
 608   }
 609   res->set_type(TypeValueType::make(vk, true));
 610   res = kit->gvn().transform(res)->as_ValueType();
 611   return res;
 612 }
 613 
 614 ValueTypeNode* ValueTypeNode::finish_larval(GraphKit* kit) const {
 615   Node* obj = get_oop();
 616   Node* mark_addr = kit->basic_plus_adr(obj, oopDesc::mark_offset_in_bytes());
 617   Node* mark = kit->make_load(NULL, mark_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered);
 618   mark = kit->gvn().transform(new AndXNode(mark, kit->MakeConX(~markWord::larval_mask_in_place)));
 619   kit->store_to_memory(kit->control(), mark_addr, mark, TypeX_X->basic_type(), kit->gvn().type(mark_addr)->is_ptr(), MemNode::unordered);
 620 
 621   ciValueKlass* vk = value_klass();
 622   ValueTypeNode* res = clone()->as_ValueType();
 623   res->set_type(TypeValueType::make(vk, false));
 624   res = kit->gvn().transform(res)->as_ValueType();
 625   return res;
 626 }
 627 
 628 Node* ValueTypeNode::is_loaded(PhaseGVN* phase, ciValueKlass* vk, Node* base, int holder_offset) {
 629   if (vk == NULL) {
 630     vk = value_klass();
 631   }
 632   if (field_count() == 0) {
 633     assert(is_allocated(phase), "must be allocated");
 634     return get_oop();
 635   }
 636   for (uint i = 0; i < field_count(); ++i) {
 637     int offset = holder_offset + field_offset(i);
 638     Node* value = field_value(i);
 639     if (value->is_ValueType()) {
 640       ValueTypeNode* vt = value->as_ValueType();
 641       if (field_is_flattened(i)) {
 642         // Check value type field load recursively
 643         base = vt->is_loaded(phase, vk, base, offset - vt->value_klass()->first_field_offset());
 644         if (base == NULL) {
 645           return NULL;
 646         }
 647         continue;
 648       } else {
 649         value = vt->get_oop();
 650         if (value->Opcode() == Op_CastPP) {
 651           // Skip CastPP
 652           value = value->in(1);
 653         }
 654       }
 655     }
 656     if (value->isa_DecodeN()) {
 657       // Skip DecodeN
 658       value = value->in(1);
 659     }
 660     if (value->isa_Load()) {
 661       // Check if base and offset of field load matches value type layout
 662       intptr_t loffset = 0;
 663       Node* lbase = AddPNode::Ideal_base_and_offset(value->in(MemNode::Address), phase, loffset);
 664       if (lbase == NULL || (lbase != base && base != NULL) || loffset != offset) {
 665         return NULL;
 666       } else if (base == NULL) {
 667         // Set base and check if pointer type matches
 668         base = lbase;
 669         const TypeInstPtr* vtptr = phase->type(base)->isa_instptr();
 670         if (vtptr == NULL || !vtptr->klass()->equals(vk)) {
 671           return NULL;
 672         }
 673       }
 674     } else {
 675       return NULL;
 676     }
 677   }
 678   return base;
 679 }
 680 
 681 Node* ValueTypeNode::tagged_klass(ciValueKlass* vk, PhaseGVN& gvn) {
 682   const TypeKlassPtr* tk = TypeKlassPtr::make(vk);
 683   intptr_t bits = tk->get_con();
 684   set_nth_bit(bits, 0);
 685   return gvn.makecon(TypeRawPtr::make((address)bits));
 686 }
 687 
 688 void ValueTypeNode::pass_fields(GraphKit* kit, Node* n, ExtendedSignature& sig, uint& base_input, int base_offset) {
 689   for (uint i = 0; i < field_count(); i++) {
 690     int sig_offset = (*sig)._offset;
 691     uint idx = field_index(sig_offset - base_offset);
 692     Node* arg = field_value(idx);
 693 
 694     if (field_is_flattened(idx)) {
 695       // Flattened value type field
 696       ValueTypeNode* vt = arg->as_ValueType();
 697       vt->pass_fields(kit, n, sig, base_input, sig_offset - vt->value_klass()->first_field_offset());
 698     } else {
 699       if (arg->is_ValueType()) {
 700         // Non-flattened value type field
 701         ValueTypeNode* vt = arg->as_ValueType();
 702         assert(n->Opcode() != Op_Return || vt->is_allocated(&kit->gvn()), "value type field should be allocated on return");
 703         arg = vt->allocate(kit)->get_oop();
 704       }
 705       // Initialize call/return arguments
 706       BasicType bt = field_type(i)->basic_type();
 707       n->init_req(base_input++, arg);
 708       if (type2size[bt] == 2) {
 709         n->init_req(base_input++, kit->top());
 710       }
 711       // Skip reserved arguments
 712       while (SigEntry::next_is_reserved(sig, bt)) {
 713         n->init_req(base_input++, kit->top());
 714         if (type2size[bt] == 2) {
 715           n->init_req(base_input++, kit->top());
 716         }
 717       }
 718     }
 719   }
 720 }
 721 
 722 void ValueTypeNode::initialize_fields(GraphKit* kit, MultiNode* multi, ExtendedSignature& sig, uint& base_input, int base_offset, bool in) {
 723   PhaseGVN& gvn = kit->gvn();
 724   for (uint i = 0; i < field_count(); i++) {
 725     int sig_offset = (*sig)._offset;
 726     uint idx = field_index(sig_offset - base_offset);
 727     ciType* type = field_type(idx);
 728 
 729     Node* parm = NULL;
 730     if (field_is_flattened(idx)) {
 731       // Flattened value type field
 732       ValueTypeNode* vt = ValueTypeNode::make_uninitialized(gvn, type->as_value_klass());
 733       vt->initialize_fields(kit, multi, sig, base_input, sig_offset - type->as_value_klass()->first_field_offset(), in);
 734       parm = gvn.transform(vt);
 735     } else {
 736       if (multi->is_Start()) {
 737         assert(in, "return from start?");
 738         parm = gvn.transform(new ParmNode(multi->as_Start(), base_input));
 739       } else if (in) {
 740         parm = multi->as_Call()->in(base_input);
 741       } else {
 742         parm = gvn.transform(new ProjNode(multi->as_Call(), base_input));
 743       }
 744       if (field_is_flattenable(idx)) {
 745         // Non-flattened but flattenable value type
 746         if (type->as_value_klass()->is_scalarizable()) {
 747           parm = ValueTypeNode::make_from_oop(kit, parm, type->as_value_klass());
 748         } else {
 749           parm = kit->null2default(parm, type->as_value_klass());
 750         }
 751       }
 752       base_input += type2size[type->basic_type()];
 753       // Skip reserved arguments
 754       BasicType bt = type->basic_type();
 755       while (SigEntry::next_is_reserved(sig, bt)) {
 756         base_input += type2size[bt];
 757       }
 758     }
 759     assert(parm != NULL, "should never be null");
 760     set_field_value(idx, parm);
 761     gvn.record_for_igvn(parm);
 762   }
 763 }
 764 
 765 Node* ValueTypeNode::Ideal(PhaseGVN* phase, bool can_reshape) {
 766   Node* oop = get_oop();
 767   if (is_default(*phase) && (!oop->is_Con() || phase->type(oop)->is_zero_type())) {
 768     // Use the pre-allocated oop for default value types
 769     set_oop(default_oop(*phase, value_klass()));
 770     return this;
 771   } else if (oop->isa_ValueTypePtr()) {
 772     // Can happen with late inlining
 773     ValueTypePtrNode* vtptr = oop->as_ValueTypePtr();
 774     set_oop(vtptr->get_oop());
 775     for (uint i = Oop+1; i < vtptr->req(); ++i) {
 776       set_req(i, vtptr->in(i));
 777     }
 778     return this;
 779   }
 780 
 781   if (!is_allocated(phase)) {
 782     // Save base oop if fields are loaded from memory and the value
 783     // type is not buffered (in this case we should not use the oop).
 784     Node* base = is_loaded(phase);
 785     if (base != NULL) {
 786       set_oop(base);
 787       assert(is_allocated(phase), "should now be allocated");
 788       return this;
 789     }
 790   }
 791 
 792   if (can_reshape) {
 793     PhaseIterGVN* igvn = phase->is_IterGVN();
 794 
 795     if (is_default(*phase)) {
 796       // Search for users of the default value type
 797       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 798         Node* user = fast_out(i);
 799         AllocateNode* alloc = user->isa_Allocate();
 800         if (alloc != NULL && alloc->result_cast() != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 801           // Found an allocation of the default value type.
 802           // If the code in StoreNode::Identity() that removes useless stores was not yet
 803           // executed or ReduceFieldZeroing is disabled, there can still be initializing
 804           // stores (only zero-type or default value stores, because value types are immutable).
 805           Node* res = alloc->result_cast();
 806           for (DUIterator_Fast jmax, j = res->fast_outs(jmax); j < jmax; j++) {
 807             AddPNode* addp = res->fast_out(j)->isa_AddP();
 808             if (addp != NULL) {
 809               for (DUIterator_Fast kmax, k = addp->fast_outs(kmax); k < kmax; k++) {
 810                 StoreNode* store = addp->fast_out(k)->isa_Store();
 811                 if (store != NULL && store->outcnt() != 0) {
 812                   // Remove the useless store
 813                   igvn->replace_in_uses(store, store->in(MemNode::Memory));
 814                 }
 815               }
 816             }
 817           }
 818           // Replace allocation by pre-allocated oop
 819           igvn->replace_node(res, default_oop(*phase, value_klass()));
 820         } else if (user->is_ValueType()) {
 821           // Add value type user to worklist to give it a chance to get optimized as well
 822           igvn->_worklist.push(user);
 823         }
 824       }
 825     }
 826 
 827     if (is_allocated(igvn)) {
 828       // Value type is heap allocated, search for safepoint uses
 829       for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 830         Node* out = fast_out(i);
 831         if (out->is_SafePoint()) {
 832           // Let SafePointNode::Ideal() take care of re-wiring the
 833           // safepoint to the oop input instead of the value type node.
 834           igvn->rehash_node_delayed(out);
 835         }
 836       }
 837     }
 838   }
 839   return NULL;
 840 }
 841 
 842 // Search for multiple allocations of this value type
 843 // and try to replace them by dominating allocations.
 844 // Then unlink the value type node and remove it.
 845 void ValueTypeNode::remove_redundant_allocations(PhaseIterGVN* igvn, PhaseIdealLoop* phase) {
 846   // Search for allocations of this value type
 847   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 848     AllocateNode* alloc = fast_out(i)->isa_Allocate();
 849     if (alloc != NULL && alloc->in(AllocateNode::ValueNode) == this) {
 850       assert(!is_default(*igvn), "default value type allocation");
 851       Node* res = alloc->result_cast();
 852       if (res == NULL || !res->is_CheckCastPP()) {
 853         break; // No unique CheckCastPP
 854       }
 855       Node* res_dom = res;
 856       if (is_allocated(igvn)) {
 857         // The value type is already allocated but still connected to an AllocateNode.
 858         // This can happen with late inlining when we first allocate a value type argument
 859         // but later decide to inline the call with the callee code also allocating.
 860         res_dom = get_oop();
 861       } else {
 862         // Search for a dominating allocation of the same value type
 863         for (DUIterator_Fast jmax, j = fast_outs(jmax); j < jmax; j++) {
 864           AllocateNode* alloc_other = fast_out(j)->isa_Allocate();
 865           if (alloc_other != NULL && alloc_other->in(AllocateNode::ValueNode) == this) {
 866             Node* res_other = alloc_other->result_cast();
 867             if (res_other != NULL && res_other->is_CheckCastPP() && res_other != res_dom &&
 868                 phase->is_dominator(res_other->in(0), res_dom->in(0))) {
 869               res_dom = res_other;
 870             }
 871           }
 872         }
 873       }
 874       if (res_dom != res) {
 875         // Move users to dominating allocation
 876         igvn->replace_node(res, res_dom);
 877         // The result of the dominated allocation is now unused and will be
 878         // removed later in AllocateNode::Ideal() to not confuse loop opts.
 879         igvn->record_for_igvn(alloc);
 880 #ifdef ASSERT
 881         if (PrintEliminateAllocations) {
 882           tty->print("++++ Eliminated: %d Allocate ", alloc->_idx);
 883           dump_spec(tty);
 884           tty->cr();
 885         }
 886 #endif
 887       }
 888     }
 889   }
 890 
 891   // Process users
 892   for (DUIterator_Fast imax, i = fast_outs(imax); i < imax; i++) {
 893     Node* out = fast_out(i);
 894     if (out->is_ValueType()) {
 895       // Recursively process value type users
 896       out->as_ValueType()->remove_redundant_allocations(igvn, phase);
 897       --i; --imax;
 898     } else if (out->isa_Allocate() != NULL) {
 899       // Unlink AllocateNode
 900       assert(out->in(AllocateNode::ValueNode) == this, "should be linked");
 901       igvn->replace_input_of(out, AllocateNode::ValueNode, igvn->C->top());
 902       --i; --imax;
 903     } else {
 904 #ifdef ASSERT
 905       // The value type should not have any other users at this time
 906       out->dump();
 907       assert(false, "unexpected user of value type");
 908 #endif
 909     }
 910   }
 911   igvn->remove_dead_node(this);
 912 }
 913 
 914 ValueTypePtrNode* ValueTypePtrNode::make_from_value_type(GraphKit* kit, ValueTypeNode* vt) {
 915   Node* oop = vt->allocate(kit)->get_oop();
 916   ValueTypePtrNode* vtptr = new ValueTypePtrNode(vt->value_klass(), oop);
 917   for (uint i = Oop+1; i < vt->req(); i++) {
 918     vtptr->init_req(i, vt->in(i));
 919   }
 920   return kit->gvn().transform(vtptr)->as_ValueTypePtr();
 921 }
 922 
 923 ValueTypePtrNode* ValueTypePtrNode::make_from_oop(GraphKit* kit, Node* oop) {
 924   // Create and initialize a ValueTypePtrNode by loading all field
 925   // values from a heap-allocated version and also save the oop.
 926   ciValueKlass* vk = kit->gvn().type(oop)->value_klass();
 927   ValueTypePtrNode* vtptr = new ValueTypePtrNode(vk, oop);
 928   vtptr->load(kit, oop, oop, vk);
 929   return kit->gvn().transform(vtptr)->as_ValueTypePtr();
 930 }