78 #endif // ASSERT
79
80 // Someone could have modified the value of the static
81 // field in the j.l.r.SoftReference class that holds the
82 // soft reference timestamp clock using reflection or
83 // Unsafe between GCs. Unconditionally update the static
84 // field in ReferenceProcessor here so that we use the new
85 // value during reference discovery.
86
87 _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
88 _discovering_refs = true;
89 }
90
91 ReferenceProcessor::ReferenceProcessor(MemRegion span,
92 bool mt_processing,
93 uint mt_processing_degree,
94 bool mt_discovery,
95 uint mt_discovery_degree,
96 bool atomic_discovery,
97 BoolObjectClosure* is_alive_non_header,
98 bool discovered_list_needs_barrier) :
99 _discovering_refs(false),
100 _enqueuing_is_done(false),
101 _is_alive_non_header(is_alive_non_header),
102 _discovered_list_needs_barrier(discovered_list_needs_barrier),
103 _processing_is_mt(mt_processing),
104 _next_id(0)
105 {
106 _span = span;
107 _discovery_is_atomic = atomic_discovery;
108 _discovery_is_mt = mt_discovery;
109 _num_q = MAX2(1U, mt_processing_degree);
110 _max_num_q = MAX2(_num_q, mt_discovery_degree);
111 _discovered_refs = NEW_C_HEAP_ARRAY(DiscoveredList,
112 _max_num_q * number_of_subclasses_of_ref(), mtGC);
113
114 if (_discovered_refs == NULL) {
115 vm_exit_during_initialization("Could not allocated RefProc Array");
116 }
117 _discoveredSoftRefs = &_discovered_refs[0];
118 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
119 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
120 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
121
122 // Initialize all entries to NULL
473 assert(allow_null_referent ?
474 _referent->is_oop_or_null()
475 : _referent->is_oop(),
476 "bad referent");
477 }
478
479 void DiscoveredListIterator::remove() {
480 assert(_ref->is_oop(), "Dropping a bad reference");
481 oop_store_raw(_discovered_addr, NULL);
482
483 // First _prev_next ref actually points into DiscoveredList (gross).
484 oop new_next;
485 if (_next == _ref) {
486 // At the end of the list, we should make _prev point to itself.
487 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
488 // and _prev will be NULL.
489 new_next = _prev;
490 } else {
491 new_next = _next;
492 }
493
494 if (UseCompressedOops) {
495 // Remove Reference object from list.
496 oopDesc::encode_store_heap_oop((narrowOop*)_prev_next, new_next);
497 } else {
498 // Remove Reference object from list.
499 oopDesc::store_heap_oop((oop*)_prev_next, new_next);
500 }
501 NOT_PRODUCT(_removed++);
502 _refs_list.dec_length(1);
503 }
504
505 // Make the Reference object active again.
506 void DiscoveredListIterator::make_active() {
507 // For G1 we don't want to use set_next - it
508 // will dirty the card for the next field of
509 // the reference object and will fail
510 // CT verification.
511 if (UseG1GC) {
512 HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
513 if (UseCompressedOops) {
514 oopDesc::bs()->write_ref_field_pre((narrowOop*)next_addr, NULL);
515 } else {
516 oopDesc::bs()->write_ref_field_pre((oop*)next_addr, NULL);
517 }
518 java_lang_ref_Reference::set_next_raw(_ref, NULL);
519 } else {
527
528 // NOTE: process_phase*() are largely similar, and at a high level
529 // merely iterate over the extant list applying a predicate to
530 // each of its elements and possibly removing that element from the
531 // list and applying some further closures to that element.
532 // We should consider the possibility of replacing these
533 // process_phase*() methods by abstracting them into
534 // a single general iterator invocation that receives appropriate
535 // closures that accomplish this work.
536
537 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
538 // referents are not alive, but that should be kept alive for policy reasons.
539 // Keep alive the transitive closure of all such referents.
540 void
541 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
542 ReferencePolicy* policy,
543 BoolObjectClosure* is_alive,
544 OopClosure* keep_alive,
545 VoidClosure* complete_gc) {
546 assert(policy != NULL, "Must have a non-NULL policy");
547 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
548 // Decide which softly reachable refs should be kept alive.
549 while (iter.has_next()) {
550 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
551 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
552 if (referent_is_dead &&
553 !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
554 if (TraceReferenceGC) {
555 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
556 (void *)iter.obj(), iter.obj()->klass()->internal_name());
557 }
558 // Remove Reference object from list
559 iter.remove();
560 // Make the Reference object active again
561 iter.make_active();
562 // keep the referent around
563 iter.make_referent_alive();
564 iter.move_to_next();
565 } else {
566 iter.next();
567 }
568 }
569 // Close the reachable set
570 complete_gc->do_void();
571 NOT_PRODUCT(
572 if (PrintGCDetails && TraceReferenceGC) {
573 gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
574 "discovered Refs by policy, from list " INTPTR_FORMAT,
575 iter.removed(), iter.processed(), (address)refs_list.head());
576 }
577 )
578 }
579
580 // Traverse the list and remove any Refs that are not active, or
581 // whose referents are either alive or NULL.
582 void
583 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
584 BoolObjectClosure* is_alive,
585 OopClosure* keep_alive) {
586 assert(discovery_is_atomic(), "Error");
587 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
588 while (iter.has_next()) {
589 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
590 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
591 assert(next == NULL, "Should not discover inactive Reference");
592 if (iter.is_referent_alive()) {
593 if (TraceReferenceGC) {
594 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
595 (void *)iter.obj(), iter.obj()->klass()->internal_name());
596 }
597 // The referent is reachable after all.
598 // Remove Reference object from list.
599 iter.remove();
600 // Update the referent pointer as necessary: Note that this
601 // should not entail any recursive marking because the
602 // referent must already have been traversed.
603 iter.make_referent_alive();
604 iter.move_to_next();
605 } else {
606 iter.next();
607 }
608 }
609 NOT_PRODUCT(
610 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
611 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
612 "Refs in discovered list " INTPTR_FORMAT,
613 iter.removed(), iter.processed(), (address)refs_list.head());
614 }
615 )
616 }
617
618 void
619 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
620 BoolObjectClosure* is_alive,
621 OopClosure* keep_alive,
622 VoidClosure* complete_gc) {
623 assert(!discovery_is_atomic(), "Error");
624 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
625 while (iter.has_next()) {
626 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
627 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
628 oop next = java_lang_ref_Reference::next(iter.obj());
629 if ((iter.referent() == NULL || iter.is_referent_alive() ||
630 next != NULL)) {
631 assert(next->is_oop_or_null(), "bad next field");
632 // Remove Reference object from list
633 iter.remove();
634 // Trace the cohorts
635 iter.make_referent_alive();
636 if (UseCompressedOops) {
637 keep_alive->do_oop((narrowOop*)next_addr);
638 } else {
639 keep_alive->do_oop((oop*)next_addr);
640 }
641 iter.move_to_next();
642 } else {
643 iter.next();
644 }
647 complete_gc->do_void();
648 NOT_PRODUCT(
649 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
650 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
651 "Refs in discovered list " INTPTR_FORMAT,
652 iter.removed(), iter.processed(), (address)refs_list.head());
653 }
654 )
655 }
656
657 // Traverse the list and process the referents, by either
658 // clearing them or keeping them (and their reachable
659 // closure) alive.
660 void
661 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
662 bool clear_referent,
663 BoolObjectClosure* is_alive,
664 OopClosure* keep_alive,
665 VoidClosure* complete_gc) {
666 ResourceMark rm;
667 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
668 while (iter.has_next()) {
669 iter.update_discovered();
670 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
671 if (clear_referent) {
672 // NULL out referent pointer
673 iter.clear_referent();
674 } else {
675 // keep the referent around
676 iter.make_referent_alive();
677 }
678 if (TraceReferenceGC) {
679 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
680 clear_referent ? "cleared " : "",
681 (void *)iter.obj(), iter.obj()->klass()->internal_name());
682 }
683 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
684 iter.next();
685 }
686 // Remember to update the next pointer of the last ref.
687 iter.update_discovered();
765 _clear_referent(clear_referent)
766 { }
767 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
768 OopClosure& keep_alive,
769 VoidClosure& complete_gc)
770 {
771 // Don't use "refs_list_index" calculated in this way because
772 // balance_queues() has moved the Ref's into the first n queues.
773 // Thread* thr = Thread::current();
774 // int refs_list_index = ((WorkerThread*)thr)->id();
775 // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
776 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
777 &is_alive, &keep_alive, &complete_gc);
778 }
779 private:
780 bool _clear_referent;
781 };
782
783 void ReferenceProcessor::set_discovered(oop ref, oop value) {
784 java_lang_ref_Reference::set_discovered_raw(ref, value);
785 if (_discovered_list_needs_barrier) {
786 oopDesc::bs()->write_ref_field(ref, value);
787 }
788 }
789
790 // Balances reference queues.
791 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
792 // queues[0, 1, ..., _num_q-1] because only the first _num_q
793 // corresponding to the active workers will be processed.
794 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
795 {
796 // calculate total length
797 size_t total_refs = 0;
798 if (TraceReferenceGC && PrintGCDetails) {
799 gclog_or_tty->print_cr("\nBalance ref_lists ");
800 }
801
802 for (uint i = 0; i < _max_num_q; ++i) {
803 total_refs += ref_lists[i].length();
804 if (TraceReferenceGC && PrintGCDetails) {
805 gclog_or_tty->print("%d ", ref_lists[i].length());
806 }
963 }
964 }
965
966 return total_list_count;
967 }
968
969 void ReferenceProcessor::clean_up_discovered_references() {
970 // loop over the lists
971 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
972 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
973 gclog_or_tty->print_cr(
974 "\nScrubbing %s discovered list of Null referents",
975 list_name(i));
976 }
977 clean_up_discovered_reflist(_discovered_refs[i]);
978 }
979 }
980
981 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
982 assert(!discovery_is_atomic(), "Else why call this method?");
983 DiscoveredListIterator iter(refs_list, NULL, NULL);
984 while (iter.has_next()) {
985 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
986 oop next = java_lang_ref_Reference::next(iter.obj());
987 assert(next->is_oop_or_null(), "bad next field");
988 // If referent has been cleared or Reference is not active,
989 // drop it.
990 if (iter.referent() == NULL || next != NULL) {
991 debug_only(
992 if (PrintGCDetails && TraceReferenceGC) {
993 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
994 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
995 " and referent: " INTPTR_FORMAT,
996 (void *)iter.obj(), (void *)next, (void *)iter.referent());
997 }
998 )
999 // Remove Reference object from list
1000 iter.remove();
1001 iter.move_to_next();
1002 } else {
1003 iter.next();
1059 }
1060
1061 inline void
1062 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
1063 oop obj,
1064 HeapWord* discovered_addr) {
1065 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
1066 // First we must make sure this object is only enqueued once. CAS in a non null
1067 // discovered_addr.
1068 oop current_head = refs_list.head();
1069 // The last ref must have its discovered field pointing to itself.
1070 oop next_discovered = (current_head != NULL) ? current_head : obj;
1071
1072 // Note: In the case of G1, this specific pre-barrier is strictly
1073 // not necessary because the only case we are interested in
1074 // here is when *discovered_addr is NULL (see the CAS further below),
1075 // so this will expand to nothing. As a result, we have manually
1076 // elided this out for G1, but left in the test for some future
1077 // collector that might have need for a pre-barrier here, e.g.:-
1078 // oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1079 assert(!_discovered_list_needs_barrier || UseG1GC,
1080 "Need to check non-G1 collector: "
1081 "may need a pre-write-barrier for CAS from NULL below");
1082 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1083 NULL);
1084 if (retest == NULL) {
1085 // This thread just won the right to enqueue the object.
1086 // We have separate lists for enqueueing, so no synchronization
1087 // is necessary.
1088 refs_list.set_head(obj);
1089 refs_list.inc_length(1);
1090 if (_discovered_list_needs_barrier) {
1091 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1092 }
1093
1094 if (TraceReferenceGC) {
1095 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
1096 (void *)obj, obj->klass()->internal_name());
1097 }
1098 } else {
1099 // If retest was non NULL, another thread beat us to it:
1100 // The reference has already been discovered...
1101 if (TraceReferenceGC) {
1102 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1103 (void *)obj, obj->klass()->internal_name());
1104 }
1105 }
1106 }
1107
1108 #ifndef PRODUCT
1109 // Non-atomic (i.e. concurrent) discovery might allow us
1110 // to observe j.l.References with NULL referents, being those
1223 (discovery_is_atomic() &&
1224 _span.contains(java_lang_ref_Reference::referent(obj)))) {
1225 // should_enqueue = true;
1226 } else {
1227 return false;
1228 }
1229 } else {
1230 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1231 _span.contains(obj_addr), "code inconsistency");
1232 }
1233
1234 // Get the right type of discovered queue head.
1235 DiscoveredList* list = get_discovered_list(rt);
1236 if (list == NULL) {
1237 return false; // nothing special needs to be done
1238 }
1239
1240 if (_discovery_is_mt) {
1241 add_to_discovered_list_mt(*list, obj, discovered_addr);
1242 } else {
1243 // If "_discovered_list_needs_barrier", we do write barriers when
1244 // updating the discovered reference list. Otherwise, we do a raw store
1245 // here: the field will be visited later when processing the discovered
1246 // references.
1247 oop current_head = list->head();
1248 // The last ref must have its discovered field pointing to itself.
1249 oop next_discovered = (current_head != NULL) ? current_head : obj;
1250
1251 // As in the case further above, since we are over-writing a NULL
1252 // pre-value, we can safely elide the pre-barrier here for the case of G1.
1253 // e.g.:- oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1254 assert(discovered == NULL, "control point invariant");
1255 assert(!_discovered_list_needs_barrier || UseG1GC,
1256 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
1257 oop_store_raw(discovered_addr, next_discovered);
1258 if (_discovered_list_needs_barrier) {
1259 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1260 }
1261 list->set_head(obj);
1262 list->inc_length(1);
1263
1264 if (TraceReferenceGC) {
1265 gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
1266 (void *)obj, obj->klass()->internal_name());
1267 }
1268 }
1269 assert(obj->is_oop(), "Discovered a bad reference");
1270 verify_referent(obj);
1271 return true;
1272 }
1273
1274 // Preclean the discovered references by removing those
1275 // whose referents are alive, and by marking from those that
1276 // are not active. These lists can be handled here
1277 // in any order and, indeed, concurrently.
1278 void ReferenceProcessor::preclean_discovered_references(
1334 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1335 keep_alive, complete_gc, yield);
1336 }
1337 }
1338 }
1339
1340 // Walk the given discovered ref list, and remove all reference objects
1341 // whose referents are still alive, whose referents are NULL or which
1342 // are not active (have a non-NULL next field). NOTE: When we are
1343 // thus precleaning the ref lists (which happens single-threaded today),
1344 // we do not disable refs discovery to honor the correct semantics of
1345 // java.lang.Reference. As a result, we need to be careful below
1346 // that ref removal steps interleave safely with ref discovery steps
1347 // (in this thread).
1348 void
1349 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
1350 BoolObjectClosure* is_alive,
1351 OopClosure* keep_alive,
1352 VoidClosure* complete_gc,
1353 YieldClosure* yield) {
1354 DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
1355 while (iter.has_next()) {
1356 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1357 oop obj = iter.obj();
1358 oop next = java_lang_ref_Reference::next(obj);
1359 if (iter.referent() == NULL || iter.is_referent_alive() ||
1360 next != NULL) {
1361 // The referent has been cleared, or is alive, or the Reference is not
1362 // active; we need to trace and mark its cohort.
1363 if (TraceReferenceGC) {
1364 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1365 (void *)iter.obj(), iter.obj()->klass()->internal_name());
1366 }
1367 // Remove Reference object from list
1368 iter.remove();
1369 // Keep alive its cohort.
1370 iter.make_referent_alive();
1371 if (UseCompressedOops) {
1372 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
1373 keep_alive->do_oop(next_addr);
1374 } else {
|
78 #endif // ASSERT
79
80 // Someone could have modified the value of the static
81 // field in the j.l.r.SoftReference class that holds the
82 // soft reference timestamp clock using reflection or
83 // Unsafe between GCs. Unconditionally update the static
84 // field in ReferenceProcessor here so that we use the new
85 // value during reference discovery.
86
87 _soft_ref_timestamp_clock = java_lang_ref_SoftReference::clock();
88 _discovering_refs = true;
89 }
90
91 ReferenceProcessor::ReferenceProcessor(MemRegion span,
92 bool mt_processing,
93 uint mt_processing_degree,
94 bool mt_discovery,
95 uint mt_discovery_degree,
96 bool atomic_discovery,
97 BoolObjectClosure* is_alive_non_header,
98 bool discovered_list_needs_post_barrier) :
99 _discovering_refs(false),
100 _enqueuing_is_done(false),
101 _is_alive_non_header(is_alive_non_header),
102 _discovered_list_needs_post_barrier(discovered_list_needs_post_barrier),
103 _processing_is_mt(mt_processing),
104 _next_id(0)
105 {
106 _span = span;
107 _discovery_is_atomic = atomic_discovery;
108 _discovery_is_mt = mt_discovery;
109 _num_q = MAX2(1U, mt_processing_degree);
110 _max_num_q = MAX2(_num_q, mt_discovery_degree);
111 _discovered_refs = NEW_C_HEAP_ARRAY(DiscoveredList,
112 _max_num_q * number_of_subclasses_of_ref(), mtGC);
113
114 if (_discovered_refs == NULL) {
115 vm_exit_during_initialization("Could not allocated RefProc Array");
116 }
117 _discoveredSoftRefs = &_discovered_refs[0];
118 _discoveredWeakRefs = &_discoveredSoftRefs[_max_num_q];
119 _discoveredFinalRefs = &_discoveredWeakRefs[_max_num_q];
120 _discoveredPhantomRefs = &_discoveredFinalRefs[_max_num_q];
121
122 // Initialize all entries to NULL
473 assert(allow_null_referent ?
474 _referent->is_oop_or_null()
475 : _referent->is_oop(),
476 "bad referent");
477 }
478
479 void DiscoveredListIterator::remove() {
480 assert(_ref->is_oop(), "Dropping a bad reference");
481 oop_store_raw(_discovered_addr, NULL);
482
483 // First _prev_next ref actually points into DiscoveredList (gross).
484 oop new_next;
485 if (_next == _ref) {
486 // At the end of the list, we should make _prev point to itself.
487 // If _ref is the first ref, then _prev_next will be in the DiscoveredList,
488 // and _prev will be NULL.
489 new_next = _prev;
490 } else {
491 new_next = _next;
492 }
493 // Remove Reference object from discovered list. Note that G1 does not need a
494 // pre-barrier here because we know the Reference has already been found/marked,
495 // that's how it ended up in the discovered list in the first place.
496 oop_store_raw(_prev_next, new_next);
497 if (_discovered_list_needs_post_barrier && _prev_next != _refs_list.adr_head()) {
498 // Needs post-barrier and this is not the list head (which is not on the heap)
499 oopDesc::bs()->write_ref_field(_prev_next, new_next);
500 }
501 NOT_PRODUCT(_removed++);
502 _refs_list.dec_length(1);
503 }
504
505 // Make the Reference object active again.
506 void DiscoveredListIterator::make_active() {
507 // For G1 we don't want to use set_next - it
508 // will dirty the card for the next field of
509 // the reference object and will fail
510 // CT verification.
511 if (UseG1GC) {
512 HeapWord* next_addr = java_lang_ref_Reference::next_addr(_ref);
513 if (UseCompressedOops) {
514 oopDesc::bs()->write_ref_field_pre((narrowOop*)next_addr, NULL);
515 } else {
516 oopDesc::bs()->write_ref_field_pre((oop*)next_addr, NULL);
517 }
518 java_lang_ref_Reference::set_next_raw(_ref, NULL);
519 } else {
527
528 // NOTE: process_phase*() are largely similar, and at a high level
529 // merely iterate over the extant list applying a predicate to
530 // each of its elements and possibly removing that element from the
531 // list and applying some further closures to that element.
532 // We should consider the possibility of replacing these
533 // process_phase*() methods by abstracting them into
534 // a single general iterator invocation that receives appropriate
535 // closures that accomplish this work.
536
537 // (SoftReferences only) Traverse the list and remove any SoftReferences whose
538 // referents are not alive, but that should be kept alive for policy reasons.
539 // Keep alive the transitive closure of all such referents.
540 void
541 ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
542 ReferencePolicy* policy,
543 BoolObjectClosure* is_alive,
544 OopClosure* keep_alive,
545 VoidClosure* complete_gc) {
546 assert(policy != NULL, "Must have a non-NULL policy");
547 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
548 // Decide which softly reachable refs should be kept alive.
549 while (iter.has_next()) {
550 iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
551 bool referent_is_dead = (iter.referent() != NULL) && !iter.is_referent_alive();
552 if (referent_is_dead &&
553 !policy->should_clear_reference(iter.obj(), _soft_ref_timestamp_clock)) {
554 if (TraceReferenceGC) {
555 gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
556 (void *)iter.obj(), iter.obj()->klass()->internal_name());
557 }
558 // Remove Reference object from list
559 iter.remove();
560 // Make the Reference object active again
561 iter.make_active();
562 // keep the referent around
563 iter.make_referent_alive();
564 iter.move_to_next();
565 } else {
566 iter.next();
567 }
568 }
569 // Close the reachable set
570 complete_gc->do_void();
571 NOT_PRODUCT(
572 if (PrintGCDetails && TraceReferenceGC) {
573 gclog_or_tty->print_cr(" Dropped %d dead Refs out of %d "
574 "discovered Refs by policy, from list " INTPTR_FORMAT,
575 iter.removed(), iter.processed(), (address)refs_list.head());
576 }
577 )
578 }
579
580 // Traverse the list and remove any Refs that are not active, or
581 // whose referents are either alive or NULL.
582 void
583 ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
584 BoolObjectClosure* is_alive,
585 OopClosure* keep_alive) {
586 assert(discovery_is_atomic(), "Error");
587 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
588 while (iter.has_next()) {
589 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
590 DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
591 assert(next == NULL, "Should not discover inactive Reference");
592 if (iter.is_referent_alive()) {
593 if (TraceReferenceGC) {
594 gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
595 (void *)iter.obj(), iter.obj()->klass()->internal_name());
596 }
597 // The referent is reachable after all.
598 // Remove Reference object from list.
599 iter.remove();
600 // Update the referent pointer as necessary: Note that this
601 // should not entail any recursive marking because the
602 // referent must already have been traversed.
603 iter.make_referent_alive();
604 iter.move_to_next();
605 } else {
606 iter.next();
607 }
608 }
609 NOT_PRODUCT(
610 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
611 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
612 "Refs in discovered list " INTPTR_FORMAT,
613 iter.removed(), iter.processed(), (address)refs_list.head());
614 }
615 )
616 }
617
618 void
619 ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
620 BoolObjectClosure* is_alive,
621 OopClosure* keep_alive,
622 VoidClosure* complete_gc) {
623 assert(!discovery_is_atomic(), "Error");
624 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
625 while (iter.has_next()) {
626 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
627 HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
628 oop next = java_lang_ref_Reference::next(iter.obj());
629 if ((iter.referent() == NULL || iter.is_referent_alive() ||
630 next != NULL)) {
631 assert(next->is_oop_or_null(), "bad next field");
632 // Remove Reference object from list
633 iter.remove();
634 // Trace the cohorts
635 iter.make_referent_alive();
636 if (UseCompressedOops) {
637 keep_alive->do_oop((narrowOop*)next_addr);
638 } else {
639 keep_alive->do_oop((oop*)next_addr);
640 }
641 iter.move_to_next();
642 } else {
643 iter.next();
644 }
647 complete_gc->do_void();
648 NOT_PRODUCT(
649 if (PrintGCDetails && TraceReferenceGC && (iter.processed() > 0)) {
650 gclog_or_tty->print_cr(" Dropped %d active Refs out of %d "
651 "Refs in discovered list " INTPTR_FORMAT,
652 iter.removed(), iter.processed(), (address)refs_list.head());
653 }
654 )
655 }
656
657 // Traverse the list and process the referents, by either
658 // clearing them or keeping them (and their reachable
659 // closure) alive.
660 void
661 ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
662 bool clear_referent,
663 BoolObjectClosure* is_alive,
664 OopClosure* keep_alive,
665 VoidClosure* complete_gc) {
666 ResourceMark rm;
667 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
668 while (iter.has_next()) {
669 iter.update_discovered();
670 iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
671 if (clear_referent) {
672 // NULL out referent pointer
673 iter.clear_referent();
674 } else {
675 // keep the referent around
676 iter.make_referent_alive();
677 }
678 if (TraceReferenceGC) {
679 gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
680 clear_referent ? "cleared " : "",
681 (void *)iter.obj(), iter.obj()->klass()->internal_name());
682 }
683 assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
684 iter.next();
685 }
686 // Remember to update the next pointer of the last ref.
687 iter.update_discovered();
765 _clear_referent(clear_referent)
766 { }
767 virtual void work(unsigned int i, BoolObjectClosure& is_alive,
768 OopClosure& keep_alive,
769 VoidClosure& complete_gc)
770 {
771 // Don't use "refs_list_index" calculated in this way because
772 // balance_queues() has moved the Ref's into the first n queues.
773 // Thread* thr = Thread::current();
774 // int refs_list_index = ((WorkerThread*)thr)->id();
775 // _ref_processor.process_phase3(_refs_lists[refs_list_index], _clear_referent,
776 _ref_processor.process_phase3(_refs_lists[i], _clear_referent,
777 &is_alive, &keep_alive, &complete_gc);
778 }
779 private:
780 bool _clear_referent;
781 };
782
783 void ReferenceProcessor::set_discovered(oop ref, oop value) {
784 java_lang_ref_Reference::set_discovered_raw(ref, value);
785 if (_discovered_list_needs_post_barrier) {
786 oopDesc::bs()->write_ref_field(java_lang_ref_Reference::discovered_addr(ref), value);
787 }
788 }
789
790 // Balances reference queues.
791 // Move entries from all queues[0, 1, ..., _max_num_q-1] to
792 // queues[0, 1, ..., _num_q-1] because only the first _num_q
793 // corresponding to the active workers will be processed.
794 void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
795 {
796 // calculate total length
797 size_t total_refs = 0;
798 if (TraceReferenceGC && PrintGCDetails) {
799 gclog_or_tty->print_cr("\nBalance ref_lists ");
800 }
801
802 for (uint i = 0; i < _max_num_q; ++i) {
803 total_refs += ref_lists[i].length();
804 if (TraceReferenceGC && PrintGCDetails) {
805 gclog_or_tty->print("%d ", ref_lists[i].length());
806 }
963 }
964 }
965
966 return total_list_count;
967 }
968
969 void ReferenceProcessor::clean_up_discovered_references() {
970 // loop over the lists
971 for (uint i = 0; i < _max_num_q * number_of_subclasses_of_ref(); i++) {
972 if (TraceReferenceGC && PrintGCDetails && ((i % _max_num_q) == 0)) {
973 gclog_or_tty->print_cr(
974 "\nScrubbing %s discovered list of Null referents",
975 list_name(i));
976 }
977 clean_up_discovered_reflist(_discovered_refs[i]);
978 }
979 }
980
981 void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list) {
982 assert(!discovery_is_atomic(), "Else why call this method?");
983 DiscoveredListIterator iter(refs_list, NULL, NULL, _discovered_list_needs_post_barrier);
984 while (iter.has_next()) {
985 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
986 oop next = java_lang_ref_Reference::next(iter.obj());
987 assert(next->is_oop_or_null(), "bad next field");
988 // If referent has been cleared or Reference is not active,
989 // drop it.
990 if (iter.referent() == NULL || next != NULL) {
991 debug_only(
992 if (PrintGCDetails && TraceReferenceGC) {
993 gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
994 INTPTR_FORMAT " with next field: " INTPTR_FORMAT
995 " and referent: " INTPTR_FORMAT,
996 (void *)iter.obj(), (void *)next, (void *)iter.referent());
997 }
998 )
999 // Remove Reference object from list
1000 iter.remove();
1001 iter.move_to_next();
1002 } else {
1003 iter.next();
1059 }
1060
1061 inline void
1062 ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
1063 oop obj,
1064 HeapWord* discovered_addr) {
1065 assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
1066 // First we must make sure this object is only enqueued once. CAS in a non null
1067 // discovered_addr.
1068 oop current_head = refs_list.head();
1069 // The last ref must have its discovered field pointing to itself.
1070 oop next_discovered = (current_head != NULL) ? current_head : obj;
1071
1072 // Note: In the case of G1, this specific pre-barrier is strictly
1073 // not necessary because the only case we are interested in
1074 // here is when *discovered_addr is NULL (see the CAS further below),
1075 // so this will expand to nothing. As a result, we have manually
1076 // elided this out for G1, but left in the test for some future
1077 // collector that might have need for a pre-barrier here, e.g.:-
1078 // oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1079 assert(!_discovered_list_needs_post_barrier || UseG1GC,
1080 "Need to check non-G1 collector: "
1081 "may need a pre-write-barrier for CAS from NULL below");
1082 oop retest = oopDesc::atomic_compare_exchange_oop(next_discovered, discovered_addr,
1083 NULL);
1084 if (retest == NULL) {
1085 // This thread just won the right to enqueue the object.
1086 // We have separate lists for enqueueing, so no synchronization
1087 // is necessary.
1088 refs_list.set_head(obj);
1089 refs_list.inc_length(1);
1090 if (_discovered_list_needs_post_barrier) {
1091 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1092 }
1093
1094 if (TraceReferenceGC) {
1095 gclog_or_tty->print_cr("Discovered reference (mt) (" INTPTR_FORMAT ": %s)",
1096 (void *)obj, obj->klass()->internal_name());
1097 }
1098 } else {
1099 // If retest was non NULL, another thread beat us to it:
1100 // The reference has already been discovered...
1101 if (TraceReferenceGC) {
1102 gclog_or_tty->print_cr("Already discovered reference (" INTPTR_FORMAT ": %s)",
1103 (void *)obj, obj->klass()->internal_name());
1104 }
1105 }
1106 }
1107
1108 #ifndef PRODUCT
1109 // Non-atomic (i.e. concurrent) discovery might allow us
1110 // to observe j.l.References with NULL referents, being those
1223 (discovery_is_atomic() &&
1224 _span.contains(java_lang_ref_Reference::referent(obj)))) {
1225 // should_enqueue = true;
1226 } else {
1227 return false;
1228 }
1229 } else {
1230 assert(RefDiscoveryPolicy == ReferenceBasedDiscovery &&
1231 _span.contains(obj_addr), "code inconsistency");
1232 }
1233
1234 // Get the right type of discovered queue head.
1235 DiscoveredList* list = get_discovered_list(rt);
1236 if (list == NULL) {
1237 return false; // nothing special needs to be done
1238 }
1239
1240 if (_discovery_is_mt) {
1241 add_to_discovered_list_mt(*list, obj, discovered_addr);
1242 } else {
1243 // If "_discovered_list_needs_post_barrier", we do write barriers when
1244 // updating the discovered reference list. Otherwise, we do a raw store
1245 // here: the field will be visited later when processing the discovered
1246 // references.
1247 oop current_head = list->head();
1248 // The last ref must have its discovered field pointing to itself.
1249 oop next_discovered = (current_head != NULL) ? current_head : obj;
1250
1251 // As in the case further above, since we are over-writing a NULL
1252 // pre-value, we can safely elide the pre-barrier here for the case of G1.
1253 // e.g.:- oopDesc::bs()->write_ref_field_pre((oop* or narrowOop*)discovered_addr, next_discovered);
1254 assert(discovered == NULL, "control point invariant");
1255 assert(!_discovered_list_needs_post_barrier || UseG1GC,
1256 "For non-G1 collector, may need a pre-write-barrier for CAS from NULL below");
1257 oop_store_raw(discovered_addr, next_discovered);
1258 if (_discovered_list_needs_post_barrier) {
1259 oopDesc::bs()->write_ref_field((void*)discovered_addr, next_discovered);
1260 }
1261 list->set_head(obj);
1262 list->inc_length(1);
1263
1264 if (TraceReferenceGC) {
1265 gclog_or_tty->print_cr("Discovered reference (" INTPTR_FORMAT ": %s)",
1266 (void *)obj, obj->klass()->internal_name());
1267 }
1268 }
1269 assert(obj->is_oop(), "Discovered a bad reference");
1270 verify_referent(obj);
1271 return true;
1272 }
1273
1274 // Preclean the discovered references by removing those
1275 // whose referents are alive, and by marking from those that
1276 // are not active. These lists can be handled here
1277 // in any order and, indeed, concurrently.
1278 void ReferenceProcessor::preclean_discovered_references(
1334 preclean_discovered_reflist(_discoveredPhantomRefs[i], is_alive,
1335 keep_alive, complete_gc, yield);
1336 }
1337 }
1338 }
1339
1340 // Walk the given discovered ref list, and remove all reference objects
1341 // whose referents are still alive, whose referents are NULL or which
1342 // are not active (have a non-NULL next field). NOTE: When we are
1343 // thus precleaning the ref lists (which happens single-threaded today),
1344 // we do not disable refs discovery to honor the correct semantics of
1345 // java.lang.Reference. As a result, we need to be careful below
1346 // that ref removal steps interleave safely with ref discovery steps
1347 // (in this thread).
1348 void
1349 ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
1350 BoolObjectClosure* is_alive,
1351 OopClosure* keep_alive,
1352 VoidClosure* complete_gc,
1353 YieldClosure* yield) {
1354 DiscoveredListIterator iter(refs_list, keep_alive, is_alive, _discovered_list_needs_post_barrier);
1355 while (iter.has_next()) {
1356 iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
1357 oop obj = iter.obj();
1358 oop next = java_lang_ref_Reference::next(obj);
1359 if (iter.referent() == NULL || iter.is_referent_alive() ||
1360 next != NULL) {
1361 // The referent has been cleared, or is alive, or the Reference is not
1362 // active; we need to trace and mark its cohort.
1363 if (TraceReferenceGC) {
1364 gclog_or_tty->print_cr("Precleaning Reference (" INTPTR_FORMAT ": %s)",
1365 (void *)iter.obj(), iter.obj()->klass()->internal_name());
1366 }
1367 // Remove Reference object from list
1368 iter.remove();
1369 // Keep alive its cohort.
1370 iter.make_referent_alive();
1371 if (UseCompressedOops) {
1372 narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
1373 keep_alive->do_oop(next_addr);
1374 } else {
|