< prev index next >

src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp

Print this page
rev 7993 : [mq]: fix
rev 7994 : [mq]: filter

*** 1776,1786 **** _refine_cte_cl(NULL), _full_collection(false), _secondary_free_list("Secondary Free List", new SecondaryFreeRegionListMtSafeChecker()), _old_set("Old Set", false /* humongous */, new OldRegionSetMtSafeChecker()), _humongous_set("Master Humongous Set", true /* humongous */, new HumongousRegionSetMtSafeChecker()), ! _humongous_is_live(), _has_humongous_reclaim_candidates(false), _free_regions_coming(false), _young_list(new YoungList(this)), _gc_time_stamp(0), _survivor_plab_stats(YoungPLABSize, PLABWeight), --- 1776,1786 ---- _refine_cte_cl(NULL), _full_collection(false), _secondary_free_list("Secondary Free List", new SecondaryFreeRegionListMtSafeChecker()), _old_set("Old Set", false /* humongous */, new OldRegionSetMtSafeChecker()), _humongous_set("Master Humongous Set", true /* humongous */, new HumongousRegionSetMtSafeChecker()), ! _humongous_reclaim_candidates(), _has_humongous_reclaim_candidates(false), _free_regions_coming(false), _young_list(new YoungList(this)), _gc_time_stamp(0), _survivor_plab_stats(YoungPLABSize, PLABWeight),
*** 1970,1981 **** _bot_shared = new G1BlockOffsetSharedArray(reserved_region(), bot_storage); _g1h = this; ! _in_cset_fast_test.initialize(_hrm.reserved().start(), _hrm.reserved().end(), HeapRegion::GrainBytes); ! _humongous_is_live.initialize(_hrm.reserved().start(), _hrm.reserved().end(), HeapRegion::GrainBytes); // Create the ConcurrentMark data structure and thread. // (Must do this late, so that "max_regions" is defined.) _cm = new ConcurrentMark(this, prev_bitmap_storage, next_bitmap_storage); if (_cm == NULL || !_cm->completed_initialization()) { --- 1970,1983 ---- _bot_shared = new G1BlockOffsetSharedArray(reserved_region(), bot_storage); _g1h = this; ! _in_cset_fast_test.initialize( ! _hrm.reserved().start(), _hrm.reserved().end(), HeapRegion::GrainBytes); ! _humongous_reclaim_candidates.initialize( ! _hrm.reserved().start(), _hrm.reserved().end(), HeapRegion::GrainBytes); // Create the ConcurrentMark data structure and thread. // (Must do this late, so that "max_regions" is defined.) _cm = new ConcurrentMark(this, prev_bitmap_storage, next_bitmap_storage); if (_cm == NULL || !_cm->completed_initialization()) {
*** 2059,2073 **** if (G1StringDedup::is_enabled()) { G1StringDedup::stop(); } } - void G1CollectedHeap::clear_humongous_is_live_table() { - guarantee(G1EagerReclaimHumongousObjects, "Should only be called if true"); - _humongous_is_live.clear(); - } - size_t G1CollectedHeap::conservative_max_heap_alignment() { return HeapRegion::max_region_size(); } void G1CollectedHeap::ref_processing_init() { --- 2061,2070 ----
*** 3466,3496 **** size_t G1CollectedHeap::cards_scanned() { return g1_rem_set()->cardsScanned(); } - bool G1CollectedHeap::humongous_region_is_always_live(uint index) { - HeapRegion* region = region_at(index); - assert(region->is_starts_humongous(), "Must start a humongous object"); - return oop(region->bottom())->is_objArray() || !region->rem_set()->is_empty(); - } - class RegisterHumongousWithInCSetFastTestClosure : public HeapRegionClosure { private: size_t _total_humongous; size_t _candidate_humongous; DirtyCardQueue _dcq; ! bool humongous_region_is_candidate(uint index) { ! HeapRegion* region = G1CollectedHeap::heap()->region_at(index); ! assert(region->is_starts_humongous(), "Must start a humongous object"); HeapRegionRemSet* const rset = region->rem_set(); ! bool const allow_stale_refs = G1EagerReclaimHumongousObjectsWithStaleRefs; ! return !oop(region->bottom())->is_objArray() && ! ((allow_stale_refs && rset->occupancy_less_or_equal_than(G1RSetSparseRegionEntries)) || ! (!allow_stale_refs && rset->is_empty())); } public: RegisterHumongousWithInCSetFastTestClosure() : _total_humongous(0), --- 3463,3524 ---- size_t G1CollectedHeap::cards_scanned() { return g1_rem_set()->cardsScanned(); } class RegisterHumongousWithInCSetFastTestClosure : public HeapRegionClosure { private: size_t _total_humongous; size_t _candidate_humongous; DirtyCardQueue _dcq; ! // We don't nominate objects with many remembered set entries, on ! // the assumption that such objects are likely still live. ! bool is_remset_small(HeapRegion* region) const { HeapRegionRemSet* const rset = region->rem_set(); ! return G1EagerReclaimHumongousObjectsWithStaleRefs ! ? rset->occupancy_less_or_equal_than(G1RSetSparseRegionEntries) ! : rset->is_empty(); ! } ! ! bool is_typeArray_region(HeapRegion* region) const { ! return oop(region->bottom())->is_typeArray(); ! } ! ! bool humongous_region_is_candidate(G1CollectedHeap* heap, HeapRegion* region) const { ! assert(region->is_starts_humongous(), "Must start a humongous object"); ! ! if (!heap->mark_in_progress() ! || (region->bottom() >= region->next_top_at_mark_start())) { ! // In order to maintain SATB invariants, during concurrent mark ! // we should only nominate an object containing references if it ! // was allocated after the start of marking, as such an object ! // doesn't need to have its references scanned. ! // ! // Also, we must not reclaim an object that is in the concurrent ! // mark stack. Objects allocated since the start of marking are ! // never added to the mark stack. ! ! // However, we presently only nominate is_typeArray() objects. ! // A humongous object containing references induces remembered ! // set entries on other regions. In order to reclaim such an ! // object, those remembered sets would need to be cleaned up. ! return is_typeArray_region(region) && is_remset_small(region); ! ! } else { ! // We may allow nomination of is_typeArray() objects that were ! // allocated before the start of concurrent marking. For this ! // we rely on mark stack insertion to exclude is_typeArray() ! // objects, preventing reclaiming an object that is in the mark ! // stack. Frequent allocation and drop of large binary blobs is ! // an important use case for eager reclaim, and this special ! // handling may reduce needed headroom. ! return G1EagerReclaimHumongousPreSnapshotTypeArrays ! && is_typeArray_region(region) ! && is_remset_small(region); ! } } public: RegisterHumongousWithInCSetFastTestClosure() : _total_humongous(0),
*** 3502,3519 **** if (!r->is_starts_humongous()) { return false; } G1CollectedHeap* g1h = G1CollectedHeap::heap(); ! uint region_idx = r->hrm_index(); ! bool is_candidate = humongous_region_is_candidate(region_idx); // Is_candidate already filters out humongous object with large remembered sets. // If we have a humongous object with a few remembered sets, we simply flush these // remembered set entries into the DCQS. That will result in automatic // re-evaluation of their remembered set entries during the following evacuation // phase. - if (is_candidate) { if (!r->rem_set()->is_empty()) { guarantee(r->rem_set()->occupancy_less_or_equal_than(G1RSetSparseRegionEntries), "Found a not-small remembered set here. This is inconsistent with previous assumptions."); G1SATBCardTableLoggingModRefBS* bs = g1h->g1_barrier_set(); HeapRegionRemSetIterator hrrs(r->rem_set()); --- 3530,3547 ---- if (!r->is_starts_humongous()) { return false; } G1CollectedHeap* g1h = G1CollectedHeap::heap(); ! if (!humongous_region_is_candidate(g1h, r)) { ! g1h->remove_humongous_reclaim_candidate(r->hrm_index()); ! } else { // Is_candidate already filters out humongous object with large remembered sets. // If we have a humongous object with a few remembered sets, we simply flush these // remembered set entries into the DCQS. That will result in automatic // re-evaluation of their remembered set entries during the following evacuation // phase. if (!r->rem_set()->is_empty()) { guarantee(r->rem_set()->occupancy_less_or_equal_than(G1RSetSparseRegionEntries), "Found a not-small remembered set here. This is inconsistent with previous assumptions."); G1SATBCardTableLoggingModRefBS* bs = g1h->g1_barrier_set(); HeapRegionRemSetIterator hrrs(r->rem_set());
*** 3531,3541 **** } } r->rem_set()->clear_locked(); } assert(r->rem_set()->is_empty(), "At this point any humongous candidate remembered set must be empty."); ! g1h->register_humongous_region_with_cset(region_idx); _candidate_humongous++; } _total_humongous++; return false; --- 3559,3571 ---- } } r->rem_set()->clear_locked(); } assert(r->rem_set()->is_empty(), "At this point any humongous candidate remembered set must be empty."); ! uint rindex = r->hrm_index(); ! g1h->add_humongous_reclaim_candidate(rindex); ! g1h->register_humongous_region_with_cset(rindex); _candidate_humongous++; } _total_humongous++; return false;
*** 3552,3574 **** g1_policy()->phase_times()->record_fast_reclaim_humongous_stats(0.0, 0, 0); return; } double time = os::elapsed_counter(); RegisterHumongousWithInCSetFastTestClosure cl; heap_region_iterate(&cl); time = ((double)(os::elapsed_counter() - time) / os::elapsed_frequency()) * 1000.0; g1_policy()->phase_times()->record_fast_reclaim_humongous_stats(time, cl.total_humongous(), cl.candidate_humongous()); _has_humongous_reclaim_candidates = cl.candidate_humongous() > 0; - if (_has_humongous_reclaim_candidates || G1TraceEagerReclaimHumongousObjects) { - clear_humongous_is_live_table(); - } - // Finally flush all remembered set entries to re-check into the global DCQS. cl.flush_rem_set_entries(); } void --- 3582,3601 ---- g1_policy()->phase_times()->record_fast_reclaim_humongous_stats(0.0, 0, 0); return; } double time = os::elapsed_counter(); + // Collect reclaim candidate information and register candidates with cset. RegisterHumongousWithInCSetFastTestClosure cl; heap_region_iterate(&cl); time = ((double)(os::elapsed_counter() - time) / os::elapsed_frequency()) * 1000.0; g1_policy()->phase_times()->record_fast_reclaim_humongous_stats(time, cl.total_humongous(), cl.candidate_humongous()); _has_humongous_reclaim_candidates = cl.candidate_humongous() > 0; // Finally flush all remembered set entries to re-check into the global DCQS. cl.flush_rem_set_entries(); } void
*** 6181,6225 **** // - never consider object arrays at this time because they would pose // considerable effort for cleaning up the the remembered sets. This is // required because stale remembered sets might reference locations that // are currently allocated into. uint region_idx = r->hrm_index(); ! if (g1h->humongous_is_live(region_idx) || ! g1h->humongous_region_is_always_live(region_idx)) { if (G1TraceEagerReclaimHumongousObjects) { ! gclog_or_tty->print_cr("Live humongous region %u size "SIZE_FORMAT" start "PTR_FORMAT" length "UINT32_FORMAT" with remset "SIZE_FORMAT" code roots "SIZE_FORMAT" is marked %d live-other %d obj array %d", region_idx, obj->size()*HeapWordSize, r->bottom(), r->region_num(), r->rem_set()->occupied(), r->rem_set()->strong_code_roots_list_length(), next_bitmap->isMarked(r->bottom()), ! g1h->humongous_is_live(region_idx), ! obj->is_objArray() ); } return false; } ! guarantee(!obj->is_objArray(), ! err_msg("Eagerly reclaiming object arrays is not supported, but the object "PTR_FORMAT" is.", r->bottom())); if (G1TraceEagerReclaimHumongousObjects) { ! gclog_or_tty->print_cr("Dead humongous region %u size "SIZE_FORMAT" start "PTR_FORMAT" length "UINT32_FORMAT" with remset "SIZE_FORMAT" code roots "SIZE_FORMAT" is marked %d live-other %d obj array %d", region_idx, obj->size()*HeapWordSize, r->bottom(), r->region_num(), r->rem_set()->occupied(), r->rem_set()->strong_code_roots_list_length(), next_bitmap->isMarked(r->bottom()), ! g1h->humongous_is_live(region_idx), ! obj->is_objArray() ); } // Need to clear mark bit of the humongous object if already set. if (next_bitmap->isMarked(r->bottom())) { next_bitmap->clear(r->bottom()); --- 6208,6253 ---- // - never consider object arrays at this time because they would pose // considerable effort for cleaning up the the remembered sets. This is // required because stale remembered sets might reference locations that // are currently allocated into. uint region_idx = r->hrm_index(); ! if (!g1h->is_humongous_reclaim_candidate(region_idx) || ! !r->rem_set()->is_empty()) { if (G1TraceEagerReclaimHumongousObjects) { ! gclog_or_tty->print_cr("Live humongous region %u size "SIZE_FORMAT" start "PTR_FORMAT" length "UINT32_FORMAT" with remset "SIZE_FORMAT" code roots "SIZE_FORMAT" is marked %d reclaim candidate %d type array %d", region_idx, obj->size()*HeapWordSize, r->bottom(), r->region_num(), r->rem_set()->occupied(), r->rem_set()->strong_code_roots_list_length(), next_bitmap->isMarked(r->bottom()), ! g1h->is_humongous_reclaim_candidate(region_idx), ! obj->is_typeArray() ); } return false; } ! guarantee(obj->is_typeArray(), ! err_msg("Only eagerly reclaiming type arrays is supported, but the object " ! PTR_FORMAT " is not.", r->bottom())); if (G1TraceEagerReclaimHumongousObjects) { ! gclog_or_tty->print_cr("Dead humongous region %u size "SIZE_FORMAT" start "PTR_FORMAT" length "UINT32_FORMAT" with remset "SIZE_FORMAT" code roots "SIZE_FORMAT" is marked %d reclaim candidate %d type array %d", region_idx, obj->size()*HeapWordSize, r->bottom(), r->region_num(), r->rem_set()->occupied(), r->rem_set()->strong_code_roots_list_length(), next_bitmap->isMarked(r->bottom()), ! g1h->is_humongous_reclaim_candidate(region_idx), ! obj->is_typeArray() ); } // Need to clear mark bit of the humongous object if already set. if (next_bitmap->isMarked(r->bottom())) { next_bitmap->clear(r->bottom());
< prev index next >