< prev index next >

src/hotspot/share/opto/valuetypenode.cpp

Print this page
rev 59083 : DRAFT 8236522: NonTearable marker interface for inline classes to enforce atomicity


 382   PhiNode* mem = PhiNode::make(region, kit->merged_memory(), Type::MEMORY, TypePtr::BOTTOM);
 383 
 384   int bci = kit->bci();
 385   bool reexecute = kit->jvms()->should_reexecute();
 386   {
 387     // Oop is NULL, allocate and initialize buffer
 388     PreserveJVMState pjvms(kit);
 389     // Propagate re-execution state and bci
 390     kit->set_bci(bci);
 391     kit->jvms()->set_bci(bci);
 392     kit->jvms()->set_should_reexecute(reexecute);
 393     kit->set_control(null_ctl);
 394     kit->kill_dead_locals();
 395     ciValueKlass* vk = value_klass();
 396     Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
 397     Node* alloc_oop  = kit->new_instance(klass_node, NULL, NULL, /* deoptimize_on_exception */ true, this);
 398     store(kit, alloc_oop, alloc_oop, vk, 0);
 399 
 400     // Do not let stores that initialize this buffer be reordered with a subsequent
 401     // store that would make this buffer accessible by other threads.

 402     AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop, &kit->gvn());
 403     assert(alloc != NULL, "must have an allocation node");
 404     kit->insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
 405 
 406     region->init_req(2, kit->control());
 407     oop   ->init_req(2, alloc_oop);
 408     io    ->init_req(2, kit->i_o());
 409     mem   ->init_req(2, kit->merged_memory());
 410   }
 411 
 412   // Update GraphKit
 413   kit->set_control(kit->gvn().transform(region));
 414   kit->set_i_o(kit->gvn().transform(io));
 415   kit->set_all_memory(kit->gvn().transform(mem));
 416   kit->record_for_igvn(region);
 417   kit->record_for_igvn(oop);
 418   kit->record_for_igvn(io);
 419   kit->record_for_igvn(mem);
 420 
 421   // Use cloned ValueTypeNode to propagate oop from now on


 610     Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
 611     Node* alloc_oop  = kit->new_instance(klass_node, NULL, NULL, true);
 612     AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop, &kit->gvn());
 613     alloc->_larval = true;
 614 
 615     store(kit, alloc_oop, alloc_oop, vk, 0);
 616     res->set_oop(alloc_oop);
 617   }
 618   res->set_type(TypeValueType::make(vk, true));
 619   res = kit->gvn().transform(res)->as_ValueType();
 620   assert(!allocate || res->is_allocated(&kit->gvn()), "must be allocated");
 621   return res;
 622 }
 623 
 624 ValueTypeNode* ValueTypeNode::finish_larval(GraphKit* kit) const {
 625   Node* obj = get_oop();
 626   Node* mark_addr = kit->basic_plus_adr(obj, oopDesc::mark_offset_in_bytes());
 627   Node* mark = kit->make_load(NULL, mark_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered);
 628   mark = kit->gvn().transform(new AndXNode(mark, kit->MakeConX(~markWord::larval_mask_in_place)));
 629   kit->store_to_memory(kit->control(), mark_addr, mark, TypeX_X->basic_type(), kit->gvn().type(mark_addr)->is_ptr(), MemNode::unordered);

 630 
 631   // Do not let stores that initialize this buffer be reordered with a subsequent
 632   // store that would make this buffer accessible by other threads.
 633   AllocateNode* alloc = AllocateNode::Ideal_allocation(obj, &kit->gvn());
 634   assert(alloc != NULL, "must have an allocation node");
 635   kit->insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
 636 
 637   ciValueKlass* vk = value_klass();
 638   ValueTypeNode* res = clone()->as_ValueType();
 639   res->set_type(TypeValueType::make(vk, false));
 640   res = kit->gvn().transform(res)->as_ValueType();
 641   return res;











 642 }
 643 
 644 Node* ValueTypeNode::is_loaded(PhaseGVN* phase, ciValueKlass* vk, Node* base, int holder_offset) {
 645   if (vk == NULL) {
 646     vk = value_klass();
 647   }
 648   if (field_count() == 0) {
 649     assert(is_allocated(phase), "must be allocated");
 650     return get_oop();
 651   }
 652   for (uint i = 0; i < field_count(); ++i) {
 653     int offset = holder_offset + field_offset(i);
 654     Node* value = field_value(i);
 655     if (value->is_ValueType()) {
 656       ValueTypeNode* vt = value->as_ValueType();
 657       if (field_is_flattened(i)) {
 658         // Check value type field load recursively
 659         base = vt->is_loaded(phase, vk, base, offset - vt->value_klass()->first_field_offset());
 660         if (base == NULL) {
 661           return NULL;




 382   PhiNode* mem = PhiNode::make(region, kit->merged_memory(), Type::MEMORY, TypePtr::BOTTOM);
 383 
 384   int bci = kit->bci();
 385   bool reexecute = kit->jvms()->should_reexecute();
 386   {
 387     // Oop is NULL, allocate and initialize buffer
 388     PreserveJVMState pjvms(kit);
 389     // Propagate re-execution state and bci
 390     kit->set_bci(bci);
 391     kit->jvms()->set_bci(bci);
 392     kit->jvms()->set_should_reexecute(reexecute);
 393     kit->set_control(null_ctl);
 394     kit->kill_dead_locals();
 395     ciValueKlass* vk = value_klass();
 396     Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
 397     Node* alloc_oop  = kit->new_instance(klass_node, NULL, NULL, /* deoptimize_on_exception */ true, this);
 398     store(kit, alloc_oop, alloc_oop, vk, 0);
 399 
 400     // Do not let stores that initialize this buffer be reordered with a subsequent
 401     // store that would make this buffer accessible by other threads.
 402     // FIXME: coordinate with ready_to_publish(kit, alloc_oop)
 403     AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop, &kit->gvn());
 404     assert(alloc != NULL, "must have an allocation node");
 405     kit->insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
 406 
 407     region->init_req(2, kit->control());
 408     oop   ->init_req(2, alloc_oop);
 409     io    ->init_req(2, kit->i_o());
 410     mem   ->init_req(2, kit->merged_memory());
 411   }
 412 
 413   // Update GraphKit
 414   kit->set_control(kit->gvn().transform(region));
 415   kit->set_i_o(kit->gvn().transform(io));
 416   kit->set_all_memory(kit->gvn().transform(mem));
 417   kit->record_for_igvn(region);
 418   kit->record_for_igvn(oop);
 419   kit->record_for_igvn(io);
 420   kit->record_for_igvn(mem);
 421 
 422   // Use cloned ValueTypeNode to propagate oop from now on


 611     Node* klass_node = kit->makecon(TypeKlassPtr::make(vk));
 612     Node* alloc_oop  = kit->new_instance(klass_node, NULL, NULL, true);
 613     AllocateNode* alloc = AllocateNode::Ideal_allocation(alloc_oop, &kit->gvn());
 614     alloc->_larval = true;
 615 
 616     store(kit, alloc_oop, alloc_oop, vk, 0);
 617     res->set_oop(alloc_oop);
 618   }
 619   res->set_type(TypeValueType::make(vk, true));
 620   res = kit->gvn().transform(res)->as_ValueType();
 621   assert(!allocate || res->is_allocated(&kit->gvn()), "must be allocated");
 622   return res;
 623 }
 624 
 625 ValueTypeNode* ValueTypeNode::finish_larval(GraphKit* kit) const {
 626   Node* obj = get_oop();
 627   Node* mark_addr = kit->basic_plus_adr(obj, oopDesc::mark_offset_in_bytes());
 628   Node* mark = kit->make_load(NULL, mark_addr, TypeX_X, TypeX_X->basic_type(), MemNode::unordered);
 629   mark = kit->gvn().transform(new AndXNode(mark, kit->MakeConX(~markWord::larval_mask_in_place)));
 630   kit->store_to_memory(kit->control(), mark_addr, mark, TypeX_X->basic_type(), kit->gvn().type(mark_addr)->is_ptr(), MemNode::unordered);
 631   ready_to_publish(kit, obj);
 632 
 633   // Do not let stores that initialize this buffer be reordered with a subsequent
 634   // store that would make this buffer accessible by other threads.
 635   AllocateNode* alloc = AllocateNode::Ideal_allocation(obj, &kit->gvn());
 636   assert(alloc != NULL, "must have an allocation node");
 637   kit->insert_mem_bar(Op_MemBarStoreStore, alloc->proj_out_or_null(AllocateNode::RawAddress));
 638 
 639   ciValueKlass* vk = value_klass();
 640   ValueTypeNode* res = clone()->as_ValueType();
 641   res->set_type(TypeValueType::make(vk, false));
 642   res = kit->gvn().transform(res)->as_ValueType();
 643   return res;
 644 }
 645 
 646 void ValueTypeBaseNode::ready_to_publish(GraphKit* kit, Node* base) const {
 647   // Do not let stores that initialize this buffer be reordered with
 648   // a subsequent store that would make it accessible by other threads.
 649   // Required for correct non-flat array element publication.
 650   // (See jtreg test ValueTearing.java.)
 651   Node* raw_address_proj = NULL;  //FIXME
 652   kit->insert_mem_bar(Op_MemBarStoreStore, raw_address_proj);
 653   // Fails to prevent array element tearing:
 654   //kit->insert_mem_bar_volatile(Op_MemBarStoreStore, Compile::AliasIdxRaw, raw_address_proj);
 655 }
 656 
 657 Node* ValueTypeNode::is_loaded(PhaseGVN* phase, ciValueKlass* vk, Node* base, int holder_offset) {
 658   if (vk == NULL) {
 659     vk = value_klass();
 660   }
 661   if (field_count() == 0) {
 662     assert(is_allocated(phase), "must be allocated");
 663     return get_oop();
 664   }
 665   for (uint i = 0; i < field_count(); ++i) {
 666     int offset = holder_offset + field_offset(i);
 667     Node* value = field_value(i);
 668     if (value->is_ValueType()) {
 669       ValueTypeNode* vt = value->as_ValueType();
 670       if (field_is_flattened(i)) {
 671         // Check value type field load recursively
 672         base = vt->is_loaded(phase, vk, base, offset - vt->value_klass()->first_field_offset());
 673         if (base == NULL) {
 674           return NULL;


< prev index next >