src/share/vm/memory/referenceProcessor.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hs-gc9 Sdiff src/share/vm/memory

src/share/vm/memory/referenceProcessor.cpp

Print this page
rev 6446 : [mq]: ref-write-new-fix


  79   }
  80 #endif // ASSERT
  81 
  82   // Someone could have modified the value of the static
  83   // field in the j.l.r.SoftReference class that holds the
  84   // soft reference timestamp clock using reflection or
  85   // Unsafe between GCs. Unconditionally update the static
  86   // field in ReferenceProcessor here so that we use the new
  87   // value during reference discovery.
  88 
  89   _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
  90   _discovering_refs = true;
  91 }
  92 
  93 ReferenceProcessor::ReferenceProcessor(MemRegion span,
  94                                        bool      mt_processing,
  95                                        uint      mt_processing_degree,
  96                                        bool      mt_discovery,
  97                                        uint      mt_discovery_degree,
  98                                        bool      atomic_discovery,
  99                                        BoolObjectClosure* is_alive_non_header,
 100                                        bool      discovered_list_needs_post_barrier)  :
 101   _discovering_refs(false),
 102   _enqueuing_is_done(false),
 103   _is_alive_non_header(is_alive_non_header),
 104   _discovered_list_needs_post_barrier(discovered_list_needs_post_barrier),
 105   _processing_is_mt(mt_processing),
 106   _next_id(0)
 107 {
 108   _span = span;
 109   _discovery_is_atomic = atomic_discovery;
 110   _discovery_is_mt     = mt_discovery;
 111   _num_q               = MAX2(1U, mt_processing_degree);
 112   _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 113   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 114             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 115 
 116   if (_discovered_refs == NULL) {
 117     vm_exit_during_initialization("Could not allocated RefProc Array");
 118   }
 119   _discoveredSoftRefs    = &_discovered_refs[0];
 120   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 121   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 122   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 123 
 124   // Initialize all entries to NULL


 348     gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
 349                            INTPTR_FORMAT, (address)refs_list.head());
 350   }
 351 
 352   oop obj = NULL;
 353   oop next_d = refs_list.head();
 354   if (pending_list_uses_discovered_field()) { // New behavior
 355     // Walk down the list, self-looping the next field
 356     // so that the References are not considered active.
 357     while (obj != next_d) {
 358       obj = next_d;
 359       assert(obj->is_instanceRef(), "should be reference object");
 360       next_d = java_lang_ref_Reference::discovered(obj);
 361       if (TraceReferenceGC && PrintGCDetails) {
 362         gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
 363                                (void *)obj, (void *)next_d);
 364       }
 365       assert(java_lang_ref_Reference::next(obj) == NULL,
 366              "Reference not active; should not be discovered");
 367       // Self-loop next, so as to make Ref not active.
 368       // Post-barrier not needed when looping to self.
 369       java_lang_ref_Reference::set_next_raw(obj, obj);
 370       if (next_d == obj) {  // obj is last








 371         // Swap refs_list into pending_list_addr and
 372         // set obj's discovered to what we read from pending_list_addr.
 373         oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
 374         // Need post-barrier on pending_list_addr above;
 375         // see special post-barrier code at the end of
 376         // enqueue_discovered_reflists() further below.
 377         java_lang_ref_Reference::set_discovered_raw(obj, old); // old may be NULL
 378         oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), old);
 379       }
 380     }
 381   } else { // Old behavior
 382     // Walk down the list, copying the discovered field into
 383     // the next field and clearing the discovered field.
 384     while (obj != next_d) {
 385       obj = next_d;
 386       assert(obj->is_instanceRef(), "should be reference object");
 387       next_d = java_lang_ref_Reference::discovered(obj);
 388       if (TraceReferenceGC && PrintGCDetails) {
 389         gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
 390                                (void *)obj, (void *)next_d);
 391       }
 392       assert(java_lang_ref_Reference::next(obj) == NULL,
 393              "The reference should not be enqueued");
 394       if (next_d == obj) {  // obj is last
 395         // Swap refs_list into pending_list_addr and
 396         // set obj's next to what we read from pending_list_addr.


 479 }
 480 
 481 void DiscoveredListIterator::remove() {
 482   assert(_ref->is_oop(), "Dropping a bad reference");
 483   oop_store_raw(_discovered_addr, NULL);
 484 
 485   // First _prev_next ref actually points into DiscoveredList (gross).
 486   oop new_next;
 487   if (_next == _ref) {
 488     // At the end of the list, we should make _prev point to itself.
 489     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 490     // and _prev will be NULL.
 491     new_next = _prev;
 492   } else {
 493     new_next = _next;
 494   }
 495   // Remove Reference object from discovered list. Note that G1 does not need a
 496   // pre-barrier here because we know the Reference has already been found/marked,
 497   // that's how it ended up in the discovered list in the first place.
 498   oop_store_raw(_prev_next, new_next);
 499   if (_discovered_list_needs_post_barrier && _prev_next != _refs_list.adr_head()) {
 500     // Needs post-barrier and this is not the list head (which is not on the heap)
 501     oopDesc::bs()->write_ref_field(_prev_next, new_next);
 502   }
 503   NOT_PRODUCT(_removed++);
 504   _refs_list.dec_length(1);
 505 }
 506 
 507 // Make the Reference object active again.
 508 void DiscoveredListIterator::make_active() {
 509   // For G1 we don't want to use set_next - it
 510   // will dirty the card for the next field of
 511   // the reference object and will fail
 512   // CT verification.
 513   if (UseG1GC) {
 514     HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
 515     if (UseCompressedOops) {
 516       oopDesc::bs()->write_ref_field_pre((narrowOop*)next_addr, NULL);
 517     } else {
 518       oopDesc::bs()->write_ref_field_pre((oop*)next_addr, NULL);
 519     }
 520     java_lang_ref_Reference::set_next_raw(_ref, NULL);
 521   } else {
 522     java_lang_ref_Reference::set_next(_ref, NULL);
 523   }

 524 }
 525 
 526 void DiscoveredListIterator::clear_referent() {
 527   oop_store_raw(_referent_addr, NULL);
 528 }
 529 
 530 // NOTE: process_phase*() are largely similar, and at a high level
 531 // merely iterate over the extant list applying a predicate to
 532 // each of its elements and possibly removing that element from the
 533 // list and applying some further closures to that element.
 534 // We should consider the possibility of replacing these
 535 // process_phase*() methods by abstracting them into
 536 // a single general iterator invocation that receives appropriate
 537 // closures that accomplish this work.
 538 
 539 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
 540 // referents are not alive, but that should be kept alive for policy reasons.
 541 // Keep alive the transitive closure of all such referents.
 542 void
 543 ReferenceProcessor::process_phase1(DiscoveredList&    refs_list,
 544                                    ReferencePolicy*   policy,
 545                                    BoolObjectClosure* is_alive,
 546                                    OopClosure*        keep_alive,
 547                                    VoidClosure*       complete_gc) {
 548   assert(policy != NULL, "Must have a non-NULL policy");
 549   DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
 550   // Decide which softly reachable refs should be kept alive.
 551   while (iter.has_next()) {
 552     iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
 553     bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
 554     if (referent_is_dead &&
 555         !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
 556       if (TraceReferenceGC) {
 557         gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s"  ") by policy",
 558                                (void *)iter.obj(), iter.obj()->klass()->internal_name());
 559       }
 560       // Remove Reference object from list
 561       iter.remove();
 562       // Make the Reference object active again
 563       iter.make_active();
 564       // keep the referent around
 565       iter.make_referent_alive();
 566       iter.move_to_next();
 567     } else {
 568       iter.next();
 569     }
 570   }
 571   // Close the reachable set
 572   complete_gc->do_void();
 573   NOT_PRODUCT(
 574     if (PrintGCDetails && TraceReferenceGC) {
 575       gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
 576         "discovered Refs by policy, from list " INTPTR_FORMAT,
 577         iter.removed(), iter.processed(), (address)refs_list.head());
 578     }
 579   )
 580 }
 581 
 582 // Traverse the list and remove any Refs that are not active, or
 583 // whose referents are either alive or NULL.
 584 void
 585 ReferenceProcessor::pp2_work(DiscoveredList&    refs_list,
 586                              BoolObjectClosure* is_alive,
 587                              OopClosure*        keep_alive) {
 588   assert(discovery_is_atomic(), "Error");
 589   DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
 590   while (iter.has_next()) {
 591     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 592     DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
 593     assert(next == NULL, "Should not discover inactive Reference");
 594     if (iter.is_referent_alive()) {
 595       if (TraceReferenceGC) {
 596         gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
 597                                (void *)iter.obj(), iter.obj()->klass()->internal_name());
 598       }
 599       // The referent is reachable after all.
 600       // Remove Reference object from list.
 601       iter.remove();
 602       // Update the referent pointer as necessary: Note that this
 603       // should not entail any recursive marking because the
 604       // referent must already have been traversed.
 605       iter.make_referent_alive();
 606       iter.move_to_next();
 607     } else {
 608       iter.next();
 609     }
 610   }
 611   NOT_PRODUCT(
 612     if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
 613       gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
 614         "Refs in discovered list " INTPTR_FORMAT,
 615         iter.removed(), iter.processed(), (address)refs_list.head());
 616     }
 617   )
 618 }
 619 
 620 void
 621 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList&    refs_list,
 622                                                   BoolObjectClosure* is_alive,
 623                                                   OopClosure*        keep_alive,
 624                                                   VoidClosure*       complete_gc) {
 625   assert(!discovery_is_atomic(), "Error");
 626   DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
 627   while (iter.has_next()) {
 628     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
 629     HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
 630     oop next = java_lang_ref_Reference::next(iter.obj());
 631     if ((iter.referent() == NULL || iter.is_referent_alive() ||
 632          next != NULL)) {
 633       assert(next->is_oop_or_null(), "bad next field");
 634       // Remove Reference object from list
 635       iter.remove();
 636       // Trace the cohorts
 637       iter.make_referent_alive();
 638       if (UseCompressedOops) {
 639         keep_alive->do_oop((narrowOop*)next_addr);
 640       } else {
 641         keep_alive->do_oop((oop*)next_addr);
 642       }
 643       iter.move_to_next();
 644     } else {
 645       iter.next();
 646     }


 649   complete_gc->do_void();
 650   NOT_PRODUCT(
 651     if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
 652       gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
 653         "Refs in discovered list " INTPTR_FORMAT,
 654         iter.removed(), iter.processed(), (address)refs_list.head());
 655     }
 656   )
 657 }
 658 
 659 // Traverse the list and process the referents, by either
 660 // clearing them or keeping them (and their reachable
 661 // closure) alive.
 662 void
 663 ReferenceProcessor::process_phase3(DiscoveredList&    refs_list,
 664                                    bool               clear_referent,
 665                                    BoolObjectClosure* is_alive,
 666                                    OopClosure*        keep_alive,
 667                                    VoidClosure*       complete_gc) {
 668   ResourceMark rm;
 669   DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
 670   while (iter.has_next()) {
 671     iter.update_discovered();
 672     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 673     if (clear_referent) {
 674       // NULL out referent pointer
 675       iter.clear_referent();
 676     } else {
 677       // keep the referent around
 678       iter.make_referent_alive();
 679     }
 680     if (TraceReferenceGC) {
 681       gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 682                              clear_referent ? "cleared " : "",
 683                              (void *)iter.obj(), iter.obj()->klass()->internal_name());
 684     }
 685     assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
 686     iter.next();
 687   }
 688   // Remember to update the next pointer of the last ref.
 689   iter.update_discovered();


 765                     bool                marks_oops_alive)
 766     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
 767       _clear_referent(clear_referent)
 768   { }
 769   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 770                     OopClosure& keep_alive,
 771                     VoidClosure& complete_gc)
 772   {
 773     // Don't use "refs_list_index" calculated in this way because
 774     // balance_queues() has moved the Ref's into the first n queues.
 775     // Thread* thr = Thread::current();
 776     // int refs_list_index = ((WorkerThread*)thr)->id();
 777     // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
 778     _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
 779                                   &is_alive, &keep_alive, &complete_gc);
 780   }
 781 private:
 782   bool _clear_referent;
 783 };
 784 
 785 void ReferenceProcessor::set_discovered(oop ref, oop value) {
 786   java_lang_ref_Reference::set_discovered_raw(ref, value);
 787   if (_discovered_list_needs_post_barrier) {
 788     oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(ref), value);
 789   }
 790 }
 791 
 792 // Balances reference queues.
 793 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 794 // queues[0, 1, ..., _num_q-1] because only the first _num_q
 795 // corresponding to the active workers will be processed.
 796 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
 797 {
 798   // calculate total length
 799   size_t total_refs = 0;
 800   if (TraceReferenceGC && PrintGCDetails) {
 801     gclog_or_tty->print_cr("\nBalance ref_lists ");
 802   }
 803 
 804   for (uint i = 0; i < _max_num_q; ++i) {
 805     total_refs += ref_lists[i].length();
 806     if (TraceReferenceGC && PrintGCDetails) {
 807       gclog_or_tty->print("%d ", ref_lists[i].length());
 808     }
 809   }
 810   if (TraceReferenceGC && PrintGCDetails) {
 811     gclog_or_tty->print_cr(" = %d", total_refs);


 829                               avg_refs - ref_lists[to_idx].length());
 830         } else {
 831           refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
 832                               avg_refs - ref_lists[to_idx].length());
 833         }
 834 
 835         assert(refs_to_move > 0, "otherwise the code below will fail");
 836 
 837         oop move_head = ref_lists[from_idx].head();
 838         oop move_tail = move_head;
 839         oop new_head  = move_head;
 840         // find an element to split the list on
 841         for (size_t j = 0; j < refs_to_move; ++j) {
 842           move_tail = new_head;
 843           new_head = java_lang_ref_Reference::discovered(new_head);
 844         }
 845 
 846         // Add the chain to the to list.
 847         if (ref_lists[to_idx].head() == NULL) {
 848           // to list is empty. Make a loop at the end.
 849           set_discovered(move_tail, move_tail);
 850         } else {
 851           set_discovered(move_tail, ref_lists[to_idx].head());
 852         }
 853         ref_lists[to_idx].set_head(move_head);
 854         ref_lists[to_idx].inc_length(refs_to_move);
 855 
 856         // Remove the chain from the from list.
 857         if (move_tail == new_head) {
 858           // We found the end of the from list.
 859           ref_lists[from_idx].set_head(NULL);
 860         } else {
 861           ref_lists[from_idx].set_head(new_head);
 862         }
 863         ref_lists[from_idx].dec_length(refs_to_move);
 864         if (ref_lists[from_idx].length() == 0) {
 865           break;
 866         }
 867       } else {
 868         to_idx = (to_idx + 1) % _num_q;
 869       }
 870     }
 871   }


 965     }
 966   }
 967 
 968   return total_list_count;
 969 }
 970 
 971 void ReferenceProcessor::clean_up_discovered_references() {
 972   // loop over the lists
 973   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 974     if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
 975       gclog_or_tty->print_cr(
 976         "\nScrubbing %s discovered list of Null referents",
 977         list_name(i));
 978     }
 979     clean_up_discovered_reflist(_discovered_refs[i]);
 980   }
 981 }
 982 
 983 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
 984   assert(!discovery_is_atomic(), "Else why call this method?");
 985   DiscoveredListIterator iter(refs_list, NULL, NULL, _discovered_list_needs_post_barrier);
 986   while (iter.has_next()) {
 987     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
 988     oop next = java_lang_ref_Reference::next(iter.obj());
 989     assert(next->is_oop_or_null(), "bad next field");
 990     // If referent has been cleared or Reference is not active,
 991     // drop it.
 992     if (iter.referent() == NULL || next != NULL) {
 993       debug_only(
 994         if (PrintGCDetails && TraceReferenceGC) {
 995           gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
 996             INTPTR_FORMAT " with next field: " INTPTR_FORMAT
 997             " and referent: " INTPTR_FORMAT,
 998             (void *)iter.obj(), (void *)next, (void *)iter.referent());
 999         }
1000       )
1001       // Remove Reference object from list
1002       iter.remove();
1003       iter.move_to_next();
1004     } else {
1005       iter.next();


1054     default:
1055       ShouldNotReachHere();
1056   }
1057   if (TraceReferenceGC && PrintGCDetails) {
1058     gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT, id, list);
1059   }
1060   return list;
1061 }
1062 
1063 inline void
1064 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
1065                                               oop             obj,
1066                                               HeapWord*       discovered_addr) {
1067   assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
1068   // First we must make sure this object is only enqueued once. CAS in a non null
1069   // discovered_addr.
1070   oop current_head = refs_list.head();
1071   // The last ref must have its discovered field pointing to itself.
1072   oop next_discovered = (current_head != NULL) ? current_head : obj;
1073 
1074   // Note: In the case of G1, this specific pre-barrier is strictly
1075   // not necessary because the only case we are interested in
1076   // here is when *discovered_addr is NULL (see the CAS further below),
1077   // so this will expand to nothing. As a result, we have manually
1078   // elided this out for G1, but left in the test for some future
1079   // collector that might have need for a pre-barrier here, e.g.:-
1080   // oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1081   assert(!_discovered_list_needs_post_barrier || UseG1GC,
1082          "Need to check non-G1 collector: "
1083          "may need a pre-write-barrier for CAS from NULL below");
1084   oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1085                                                     NULL);
1086   if (retest == NULL) {
1087     // This thread just won the right to enqueue the object.
1088     // We have separate lists for enqueueing, so no synchronization
1089     // is necessary.
1090     refs_list.set_head(obj);
1091     refs_list.inc_length(1);
1092     if (_discovered_list_needs_post_barrier) {
1093       oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1094     }
1095 
1096     if (TraceReferenceGC) {
1097       gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
1098                              (void *)obj, obj->klass()->internal_name());
1099     }
1100   } else {
1101     // If retest was non NULL, another thread beat us to it:
1102     // The reference has already been discovered...
1103     if (TraceReferenceGC) {
1104       gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1105                              (void *)obj, obj->klass()->internal_name());
1106     }
1107   }
1108 }
1109 
1110 #ifndef PRODUCT
1111 // Non-atomic (i.e. concurrent) discovery might allow us
1112 // to observe j.l.References with NULL referents, being those
1113 // cleared concurrently by mutators during (or after) discovery.
1114 void ReferenceProcessor::verify_referent(oop obj) {


1225         (discovery_is_atomic() &&
1226          _span.contains(java_lang_ref_Reference::referent(obj)))) {
1227       // should_enqueue = true;
1228     } else {
1229       return false;
1230     }
1231   } else {
1232     assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1233            _span.contains(obj_addr), "code inconsistency");
1234   }
1235 
1236   // Get the right type of discovered queue head.
1237   DiscoveredList* list = get_discovered_list(rt);
1238   if (list == NULL) {
1239     return false;   // nothing special needs to be done
1240   }
1241 
1242   if (_discovery_is_mt) {
1243     add_to_discovered_list_mt(*list, obj, discovered_addr);
1244   } else {
1245     // If "_discovered_list_needs_post_barrier", we do write barriers when
1246     // updating the discovered reference list.  Otherwise, we do a raw store
1247     // here: the field will be visited later when processing the discovered
1248     // references.
1249     oop current_head = list->head();
1250     // The last ref must have its discovered field pointing to itself.
1251     oop next_discovered = (current_head != NULL) ? current_head : obj;
1252 
1253     // As in the case further above, since we are over-writing a NULL
1254     // pre-value, we can safely elide the pre-barrier here for the case of G1.
1255     // e.g.:- oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1256     assert(discovered == NULL, "control point invariant");
1257     assert(!_discovered_list_needs_post_barrier || UseG1GC,
1258            "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
1259     oop_store_raw(discovered_addr, next_discovered);
1260     if (_discovered_list_needs_post_barrier) {
1261       oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1262     }
1263     list->set_head(obj);
1264     list->inc_length(1);
1265 
1266     if (TraceReferenceGC) {
1267       gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
1268                                 (void *)obj, obj->klass()->internal_name());
1269     }
1270   }
1271   assert(obj->is_oop(), "Discovered a bad reference");
1272   verify_referent(obj);
1273   return true;
1274 }
1275 
1276 // Preclean the discovered references by removing those
1277 // whose referents are alive, and by marking from those that
1278 // are not active. These lists can be handled here
1279 // in any order and, indeed, concurrently.
1280 void ReferenceProcessor::preclean_discovered_references(
1281   BoolObjectClosure* is_alive,
1282   OopClosure* keep_alive,


1336       preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1337                                   keep_alive, complete_gc, yield);
1338     }
1339   }
1340 }
1341 
1342 // Walk the given discovered ref list, and remove all reference objects
1343 // whose referents are still alive, whose referents are NULL or which
1344 // are not active (have a non-NULL next field). NOTE: When we are
1345 // thus precleaning the ref lists (which happens single-threaded today),
1346 // we do not disable refs discovery to honor the correct semantics of
1347 // java.lang.Reference. As a result, we need to be careful below
1348 // that ref removal steps interleave safely with ref discovery steps
1349 // (in this thread).
1350 void
1351 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1352                                                 BoolObjectClosure* is_alive,
1353                                                 OopClosure*        keep_alive,
1354                                                 VoidClosure*       complete_gc,
1355                                                 YieldClosure*      yield) {
1356   DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
1357   while (iter.has_next()) {
1358     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1359     oop obj = iter.obj();
1360     oop next = java_lang_ref_Reference::next(obj);
1361     if (iter.referent() == NULL || iter.is_referent_alive() ||
1362         next != NULL) {
1363       // The referent has been cleared, or is alive, or the Reference is not
1364       // active; we need to trace and mark its cohort.
1365       if (TraceReferenceGC) {
1366         gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1367                                (void *)iter.obj(), iter.obj()->klass()->internal_name());
1368       }
1369       // Remove Reference object from list
1370       iter.remove();
1371       // Keep alive its cohort.
1372       iter.make_referent_alive();
1373       if (UseCompressedOops) {
1374         narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
1375         keep_alive->do_oop(next_addr);
1376       } else {




  79   }
  80 #endif // ASSERT
  81 
  82   // Someone could have modified the value of the static
  83   // field in the j.l.r.SoftReference class that holds the
  84   // soft reference timestamp clock using reflection or
  85   // Unsafe between GCs. Unconditionally update the static
  86   // field in ReferenceProcessor here so that we use the new
  87   // value during reference discovery.
  88 
  89   _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
  90   _discovering_refs = true;
  91 }
  92 
  93 ReferenceProcessor::ReferenceProcessor(MemRegion span,
  94                                        bool      mt_processing,
  95                                        uint      mt_processing_degree,
  96                                        bool      mt_discovery,
  97                                        uint      mt_discovery_degree,
  98                                        bool      atomic_discovery,
  99                                        BoolObjectClosure* is_alive_non_header)  :

 100   _discovering_refs(false),
 101   _enqueuing_is_done(false),
 102   _is_alive_non_header(is_alive_non_header),

 103   _processing_is_mt(mt_processing),
 104   _next_id(0)
 105 {
 106   _span = span;
 107   _discovery_is_atomic = atomic_discovery;
 108   _discovery_is_mt     = mt_discovery;
 109   _num_q               = MAX2(1U, mt_processing_degree);
 110   _max_num_q           = MAX2(_num_q, mt_discovery_degree);
 111   _discovered_refs     = NEW_C_HEAP_ARRAY(DiscoveredList,
 112             _max_num_q * number_of_subclasses_of_ref(), mtGC);
 113 
 114   if (_discovered_refs == NULL) {
 115     vm_exit_during_initialization("Could not allocated RefProc Array");
 116   }
 117   _discoveredSoftRefs    = &_discovered_refs[0];
 118   _discoveredWeakRefs    = &_discoveredSoftRefs[_max_num_q];
 119   _discoveredFinalRefs   = &_discoveredWeakRefs[_max_num_q];
 120   _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
 121 
 122   // Initialize all entries to NULL


 346     gclog_or_tty->print_cr("ReferenceProcessor::enqueue_discovered_reflist list "
 347                            INTPTR_FORMAT, (address)refs_list.head());
 348   }
 349 
 350   oop obj = NULL;
 351   oop next_d = refs_list.head();
 352   if (pending_list_uses_discovered_field()) { // New behavior
 353     // Walk down the list, self-looping the next field
 354     // so that the References are not considered active.
 355     while (obj != next_d) {
 356       obj = next_d;
 357       assert(obj->is_instanceRef(), "should be reference object");
 358       next_d = java_lang_ref_Reference::discovered(obj);
 359       if (TraceReferenceGC && PrintGCDetails) {
 360         gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
 361                                (void *)obj, (void *)next_d);
 362       }
 363       assert(java_lang_ref_Reference::next(obj) == NULL,
 364              "Reference not active; should not be discovered");
 365       // Self-loop next, so as to make Ref not active.

 366       java_lang_ref_Reference::set_next_raw(obj, obj);
 367       if (next_d != obj) {
 368         // The Java threads will see the Reference objects linked together through
 369         // the discovered field. Instead of trying to do the write barrier updates
 370         // in all places in the reference processor where we manipulate the discovered
 371         // field we make sure to do the barrier here where we anyway iterate through
 372         // all linked Reference objects.
 373         oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), next_d);
 374       } else {
 375         // This is the last object.
 376         // Swap refs_list into pending_list_addr and
 377         // set obj's discovered to what we read from pending_list_addr.
 378         oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
 379         // Need post-barrier on pending_list_addr. See enqueue_discovered_ref_helper() above.


 380         java_lang_ref_Reference::set_discovered_raw(obj, old); // old may be NULL
 381         oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(obj), old);
 382       }
 383     }
 384   } else { // Old behavior
 385     // Walk down the list, copying the discovered field into
 386     // the next field and clearing the discovered field.
 387     while (obj != next_d) {
 388       obj = next_d;
 389       assert(obj->is_instanceRef(), "should be reference object");
 390       next_d = java_lang_ref_Reference::discovered(obj);
 391       if (TraceReferenceGC && PrintGCDetails) {
 392         gclog_or_tty->print_cr("        obj " INTPTR_FORMAT "/next_d " INTPTR_FORMAT,
 393                                (void *)obj, (void *)next_d);
 394       }
 395       assert(java_lang_ref_Reference::next(obj) == NULL,
 396              "The reference should not be enqueued");
 397       if (next_d == obj) {  // obj is last
 398         // Swap refs_list into pending_list_addr and
 399         // set obj's next to what we read from pending_list_addr.


 482 }
 483 
 484 void DiscoveredListIterator::remove() {
 485   assert(_ref->is_oop(), "Dropping a bad reference");
 486   oop_store_raw(_discovered_addr, NULL);
 487 
 488   // First _prev_next ref actually points into DiscoveredList (gross).
 489   oop new_next;
 490   if (_next == _ref) {
 491     // At the end of the list, we should make _prev point to itself.
 492     // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
 493     // and _prev will be NULL.
 494     new_next = _prev;
 495   } else {
 496     new_next = _next;
 497   }
 498   // Remove Reference object from discovered list. Note that G1 does not need a
 499   // pre-barrier here because we know the Reference has already been found/marked,
 500   // that's how it ended up in the discovered list in the first place.
 501   oop_store_raw(_prev_next, new_next);




 502   NOT_PRODUCT(_removed++);
 503   _refs_list.dec_length(1);
 504 }
 505 
 506 // Make the Reference object active again.
 507 void DiscoveredListIterator::make_active() {
 508   // The pre barrier for G1 is probably just needed for the old
 509   // reference processing behavior. Should we guard this with
 510   // ReferenceProcessor::pending_list_uses_discovered_field() ?

 511   if (UseG1GC) {
 512     HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
 513     if (UseCompressedOops) {
 514       oopDesc::bs()->write_ref_field_pre((narrowOop*)next_addr, NULL);
 515     } else {
 516       oopDesc::bs()->write_ref_field_pre((oop*)next_addr, NULL);
 517     }



 518   }
 519   java_lang_ref_Reference::set_next_raw(_ref, NULL);
 520 }
 521 
 522 void DiscoveredListIterator::clear_referent() {
 523   oop_store_raw(_referent_addr, NULL);
 524 }
 525 
 526 // NOTE: process_phase*() are largely similar, and at a high level
 527 // merely iterate over the extant list applying a predicate to
 528 // each of its elements and possibly removing that element from the
 529 // list and applying some further closures to that element.
 530 // We should consider the possibility of replacing these
 531 // process_phase*() methods by abstracting them into
 532 // a single general iterator invocation that receives appropriate
 533 // closures that accomplish this work.
 534 
 535 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
 536 // referents are not alive, but that should be kept alive for policy reasons.
 537 // Keep alive the transitive closure of all such referents.
 538 void
 539 ReferenceProcessor::process_phase1(DiscoveredList&    refs_list,
 540                                    ReferencePolicy*   policy,
 541                                    BoolObjectClosure* is_alive,
 542                                    OopClosure*        keep_alive,
 543                                    VoidClosure*       complete_gc) {
 544   assert(policy != NULL, "Must have a non-NULL policy");
 545   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 546   // Decide which softly reachable refs should be kept alive.
 547   while (iter.has_next()) {
 548     iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
 549     bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
 550     if (referent_is_dead &&
 551         !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
 552       if (TraceReferenceGC) {
 553         gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s"  ") by policy",
 554                                (void *)iter.obj(), iter.obj()->klass()->internal_name());
 555       }
 556       // Remove Reference object from list
 557       iter.remove();
 558       // Make the Reference object active again
 559       iter.make_active();
 560       // keep the referent around
 561       iter.make_referent_alive();
 562       iter.move_to_next();
 563     } else {
 564       iter.next();
 565     }
 566   }
 567   // Close the reachable set
 568   complete_gc->do_void();
 569   NOT_PRODUCT(
 570     if (PrintGCDetails && TraceReferenceGC) {
 571       gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
 572         "discovered Refs by policy, from list " INTPTR_FORMAT,
 573         iter.removed(), iter.processed(), (address)refs_list.head());
 574     }
 575   )
 576 }
 577 
 578 // Traverse the list and remove any Refs that are not active, or
 579 // whose referents are either alive or NULL.
 580 void
 581 ReferenceProcessor::pp2_work(DiscoveredList&    refs_list,
 582                              BoolObjectClosure* is_alive,
 583                              OopClosure*        keep_alive) {
 584   assert(discovery_is_atomic(), "Error");
 585   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 586   while (iter.has_next()) {
 587     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 588     DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
 589     assert(next == NULL, "Should not discover inactive Reference");
 590     if (iter.is_referent_alive()) {
 591       if (TraceReferenceGC) {
 592         gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
 593                                (void *)iter.obj(), iter.obj()->klass()->internal_name());
 594       }
 595       // The referent is reachable after all.
 596       // Remove Reference object from list.
 597       iter.remove();
 598       // Update the referent pointer as necessary: Note that this
 599       // should not entail any recursive marking because the
 600       // referent must already have been traversed.
 601       iter.make_referent_alive();
 602       iter.move_to_next();
 603     } else {
 604       iter.next();
 605     }
 606   }
 607   NOT_PRODUCT(
 608     if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
 609       gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
 610         "Refs in discovered list " INTPTR_FORMAT,
 611         iter.removed(), iter.processed(), (address)refs_list.head());
 612     }
 613   )
 614 }
 615 
 616 void
 617 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList&    refs_list,
 618                                                   BoolObjectClosure* is_alive,
 619                                                   OopClosure*        keep_alive,
 620                                                   VoidClosure*       complete_gc) {
 621   assert(!discovery_is_atomic(), "Error");
 622   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 623   while (iter.has_next()) {
 624     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
 625     HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
 626     oop next = java_lang_ref_Reference::next(iter.obj());
 627     if ((iter.referent() == NULL || iter.is_referent_alive() ||
 628          next != NULL)) {
 629       assert(next->is_oop_or_null(), "bad next field");
 630       // Remove Reference object from list
 631       iter.remove();
 632       // Trace the cohorts
 633       iter.make_referent_alive();
 634       if (UseCompressedOops) {
 635         keep_alive->do_oop((narrowOop*)next_addr);
 636       } else {
 637         keep_alive->do_oop((oop*)next_addr);
 638       }
 639       iter.move_to_next();
 640     } else {
 641       iter.next();
 642     }


 645   complete_gc->do_void();
 646   NOT_PRODUCT(
 647     if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
 648       gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
 649         "Refs in discovered list " INTPTR_FORMAT,
 650         iter.removed(), iter.processed(), (address)refs_list.head());
 651     }
 652   )
 653 }
 654 
 655 // Traverse the list and process the referents, by either
 656 // clearing them or keeping them (and their reachable
 657 // closure) alive.
 658 void
 659 ReferenceProcessor::process_phase3(DiscoveredList&    refs_list,
 660                                    bool               clear_referent,
 661                                    BoolObjectClosure* is_alive,
 662                                    OopClosure*        keep_alive,
 663                                    VoidClosure*       complete_gc) {
 664   ResourceMark rm;
 665   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
 666   while (iter.has_next()) {
 667     iter.update_discovered();
 668     iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
 669     if (clear_referent) {
 670       // NULL out referent pointer
 671       iter.clear_referent();
 672     } else {
 673       // keep the referent around
 674       iter.make_referent_alive();
 675     }
 676     if (TraceReferenceGC) {
 677       gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
 678                              clear_referent ? "cleared " : "",
 679                              (void *)iter.obj(), iter.obj()->klass()->internal_name());
 680     }
 681     assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
 682     iter.next();
 683   }
 684   // Remember to update the next pointer of the last ref.
 685   iter.update_discovered();


 761                     bool                marks_oops_alive)
 762     : ProcessTask(ref_processor, refs_lists, marks_oops_alive),
 763       _clear_referent(clear_referent)
 764   { }
 765   virtual void work(unsigned int i, BoolObjectClosure& is_alive,
 766                     OopClosure& keep_alive,
 767                     VoidClosure& complete_gc)
 768   {
 769     // Don't use "refs_list_index" calculated in this way because
 770     // balance_queues() has moved the Ref's into the first n queues.
 771     // Thread* thr = Thread::current();
 772     // int refs_list_index = ((WorkerThread*)thr)->id();
 773     // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
 774     _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
 775                                   &is_alive, &keep_alive, &complete_gc);
 776   }
 777 private:
 778   bool _clear_referent;
 779 };
 780 







 781 // Balances reference queues.
 782 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
 783 // queues[0, 1, ..., _num_q-1] because only the first _num_q
 784 // corresponding to the active workers will be processed.
 785 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
 786 {
 787   // calculate total length
 788   size_t total_refs = 0;
 789   if (TraceReferenceGC && PrintGCDetails) {
 790     gclog_or_tty->print_cr("\nBalance ref_lists ");
 791   }
 792 
 793   for (uint i = 0; i < _max_num_q; ++i) {
 794     total_refs += ref_lists[i].length();
 795     if (TraceReferenceGC && PrintGCDetails) {
 796       gclog_or_tty->print("%d ", ref_lists[i].length());
 797     }
 798   }
 799   if (TraceReferenceGC && PrintGCDetails) {
 800     gclog_or_tty->print_cr(" = %d", total_refs);


 818                               avg_refs - ref_lists[to_idx].length());
 819         } else {
 820           refs_to_move = MIN2(ref_lists[from_idx].length() - avg_refs,
 821                               avg_refs - ref_lists[to_idx].length());
 822         }
 823 
 824         assert(refs_to_move > 0, "otherwise the code below will fail");
 825 
 826         oop move_head = ref_lists[from_idx].head();
 827         oop move_tail = move_head;
 828         oop new_head  = move_head;
 829         // find an element to split the list on
 830         for (size_t j = 0; j < refs_to_move; ++j) {
 831           move_tail = new_head;
 832           new_head = java_lang_ref_Reference::discovered(new_head);
 833         }
 834 
 835         // Add the chain to the to list.
 836         if (ref_lists[to_idx].head() == NULL) {
 837           // to list is empty. Make a loop at the end.
 838           java_lang_ref_Reference::set_discovered_raw(move_tail, move_tail);
 839         } else {
 840           java_lang_ref_Reference::set_discovered_raw(move_tail, ref_lists[to_idx].head());
 841         }
 842         ref_lists[to_idx].set_head(move_head);
 843         ref_lists[to_idx].inc_length(refs_to_move);
 844 
 845         // Remove the chain from the from list.
 846         if (move_tail == new_head) {
 847           // We found the end of the from list.
 848           ref_lists[from_idx].set_head(NULL);
 849         } else {
 850           ref_lists[from_idx].set_head(new_head);
 851         }
 852         ref_lists[from_idx].dec_length(refs_to_move);
 853         if (ref_lists[from_idx].length() == 0) {
 854           break;
 855         }
 856       } else {
 857         to_idx = (to_idx + 1) % _num_q;
 858       }
 859     }
 860   }


 954     }
 955   }
 956 
 957   return total_list_count;
 958 }
 959 
 960 void ReferenceProcessor::clean_up_discovered_references() {
 961   // loop over the lists
 962   for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
 963     if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
 964       gclog_or_tty->print_cr(
 965         "\nScrubbing %s discovered list of Null referents",
 966         list_name(i));
 967     }
 968     clean_up_discovered_reflist(_discovered_refs[i]);
 969   }
 970 }
 971 
 972 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
 973   assert(!discovery_is_atomic(), "Else why call this method?");
 974   DiscoveredListIterator iter(refs_list, NULL, NULL);
 975   while (iter.has_next()) {
 976     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
 977     oop next = java_lang_ref_Reference::next(iter.obj());
 978     assert(next->is_oop_or_null(), "bad next field");
 979     // If referent has been cleared or Reference is not active,
 980     // drop it.
 981     if (iter.referent() == NULL || next != NULL) {
 982       debug_only(
 983         if (PrintGCDetails && TraceReferenceGC) {
 984           gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
 985             INTPTR_FORMAT " with next field: " INTPTR_FORMAT
 986             " and referent: " INTPTR_FORMAT,
 987             (void *)iter.obj(), (void *)next, (void *)iter.referent());
 988         }
 989       )
 990       // Remove Reference object from list
 991       iter.remove();
 992       iter.move_to_next();
 993     } else {
 994       iter.next();


1043     default:
1044       ShouldNotReachHere();
1045   }
1046   if (TraceReferenceGC && PrintGCDetails) {
1047     gclog_or_tty->print_cr("Thread %d gets list " INTPTR_FORMAT, id, list);
1048   }
1049   return list;
1050 }
1051 
1052 inline void
1053 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
1054                                               oop             obj,
1055                                               HeapWord*       discovered_addr) {
1056   assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
1057   // First we must make sure this object is only enqueued once. CAS in a non null
1058   // discovered_addr.
1059   oop current_head = refs_list.head();
1060   // The last ref must have its discovered field pointing to itself.
1061   oop next_discovered = (current_head != NULL) ? current_head : obj;
1062 










1063   oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1064                                                     NULL);
1065   if (retest == NULL) {
1066     // This thread just won the right to enqueue the object.
1067     // We have separate lists for enqueueing, so no synchronization
1068     // is necessary.
1069     refs_list.set_head(obj);
1070     refs_list.inc_length(1);



1071 
1072     if (TraceReferenceGC) {
1073       gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
1074                              (void *)obj, obj->klass()->internal_name());
1075     }
1076   } else {
1077     // If retest was non NULL, another thread beat us to it:
1078     // The reference has already been discovered...
1079     if (TraceReferenceGC) {
1080       gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1081                              (void *)obj, obj->klass()->internal_name());
1082     }
1083   }
1084 }
1085 
1086 #ifndef PRODUCT
1087 // Non-atomic (i.e. concurrent) discovery might allow us
1088 // to observe j.l.References with NULL referents, being those
1089 // cleared concurrently by mutators during (or after) discovery.
1090 void ReferenceProcessor::verify_referent(oop obj) {


1201         (discovery_is_atomic() &&
1202          _span.contains(java_lang_ref_Reference::referent(obj)))) {
1203       // should_enqueue = true;
1204     } else {
1205       return false;
1206     }
1207   } else {
1208     assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1209            _span.contains(obj_addr), "code inconsistency");
1210   }
1211 
1212   // Get the right type of discovered queue head.
1213   DiscoveredList* list = get_discovered_list(rt);
1214   if (list == NULL) {
1215     return false;   // nothing special needs to be done
1216   }
1217 
1218   if (_discovery_is_mt) {
1219     add_to_discovered_list_mt(*list, obj, discovered_addr);
1220   } else {
1221     // We do a raw store here: the field will be visited later when processing
1222     // the discovered references.


1223     oop current_head = list->head();
1224     // The last ref must have its discovered field pointing to itself.
1225     oop next_discovered = (current_head != NULL) ? current_head : obj;
1226 



1227     assert(discovered == NULL, "control point invariant");


1228     oop_store_raw(discovered_addr, next_discovered);



1229     list->set_head(obj);
1230     list->inc_length(1);
1231 
1232     if (TraceReferenceGC) {
1233       gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
1234                                 (void *)obj, obj->klass()->internal_name());
1235     }
1236   }
1237   assert(obj->is_oop(), "Discovered a bad reference");
1238   verify_referent(obj);
1239   return true;
1240 }
1241 
1242 // Preclean the discovered references by removing those
1243 // whose referents are alive, and by marking from those that
1244 // are not active. These lists can be handled here
1245 // in any order and, indeed, concurrently.
1246 void ReferenceProcessor::preclean_discovered_references(
1247   BoolObjectClosure* is_alive,
1248   OopClosure* keep_alive,


1302       preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1303                                   keep_alive, complete_gc, yield);
1304     }
1305   }
1306 }
1307 
1308 // Walk the given discovered ref list, and remove all reference objects
1309 // whose referents are still alive, whose referents are NULL or which
1310 // are not active (have a non-NULL next field). NOTE: When we are
1311 // thus precleaning the ref lists (which happens single-threaded today),
1312 // we do not disable refs discovery to honor the correct semantics of
1313 // java.lang.Reference. As a result, we need to be careful below
1314 // that ref removal steps interleave safely with ref discovery steps
1315 // (in this thread).
1316 void
1317 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList&    refs_list,
1318                                                 BoolObjectClosure* is_alive,
1319                                                 OopClosure*        keep_alive,
1320                                                 VoidClosure*       complete_gc,
1321                                                 YieldClosure*      yield) {
1322   DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1323   while (iter.has_next()) {
1324     iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1325     oop obj = iter.obj();
1326     oop next = java_lang_ref_Reference::next(obj);
1327     if (iter.referent() == NULL || iter.is_referent_alive() ||
1328         next != NULL) {
1329       // The referent has been cleared, or is alive, or the Reference is not
1330       // active; we need to trace and mark its cohort.
1331       if (TraceReferenceGC) {
1332         gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1333                                (void *)iter.obj(), iter.obj()->klass()->internal_name());
1334       }
1335       // Remove Reference object from list
1336       iter.remove();
1337       // Keep alive its cohort.
1338       iter.make_referent_alive();
1339       if (UseCompressedOops) {
1340         narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
1341         keep_alive->do_oop(next_addr);
1342       } else {


src/share/vm/memory/referenceProcessor.cpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File