< prev index next >
src/share/vm/gc/g1/g1CollectedHeap.cpp
Print this page
rev 13070 : imported patch 8177544-full-gc-scope
rev 13071 : [mq]: 8177544-full-gc-scope-tschatzl-rev1
*** 49,59 ****
#include "gc/g1/g1Policy.hpp"
#include "gc/g1/g1RegionToSpaceMapper.hpp"
#include "gc/g1/g1RemSet.inline.hpp"
#include "gc/g1/g1RootClosures.hpp"
#include "gc/g1/g1RootProcessor.hpp"
! #include "gc/g1/g1SerialCollector.hpp"
#include "gc/g1/g1StringDedup.hpp"
#include "gc/g1/g1YCTypes.hpp"
#include "gc/g1/heapRegion.inline.hpp"
#include "gc/g1/heapRegionRemSet.hpp"
#include "gc/g1/heapRegionSet.inline.hpp"
--- 49,59 ----
#include "gc/g1/g1Policy.hpp"
#include "gc/g1/g1RegionToSpaceMapper.hpp"
#include "gc/g1/g1RemSet.inline.hpp"
#include "gc/g1/g1RootClosures.hpp"
#include "gc/g1/g1RootProcessor.hpp"
! #include "gc/g1/g1SerialFullCollector.hpp"
#include "gc/g1/g1StringDedup.hpp"
#include "gc/g1/g1YCTypes.hpp"
#include "gc/g1/heapRegion.inline.hpp"
#include "gc/g1/heapRegionRemSet.hpp"
#include "gc/g1/heapRegionSet.inline.hpp"
*** 1134,1162 ****
tear_down_region_sets(false /* free_list_only */);
collector_state()->set_gcs_are_young(true);
}
! void G1CollectedHeap::reset_card_cache_and_queue() {
if (_hot_card_cache->use_cache()) {
_hot_card_cache->reset_card_counts();
_hot_card_cache->reset_hot_cache();
}
! // Discard all stale remembered set updates.
JavaThread::dirty_card_queue_set().abandon_logs();
assert(dirty_card_queue_set().completed_buffers_num() == 0, "DCQS should be empty");
}
- void G1CollectedHeap::verify_before_full_collection(bool explicit_gc) {
- assert(!GCCause::is_user_requested_gc(gc_cause()) || explicit_gc, "invariant");
- assert(used() == recalculate_used(), "Should be equal");
- _verifier->verify_region_sets_optional();
- _verifier->verify_before_gc();
- _verifier->check_bitmaps("Full GC Start");
- }
-
void G1CollectedHeap::verify_after_full_collection() {
check_gc_time_stamps();
_hrm.verify_optional();
_verifier->verify_region_sets_optional();
_verifier->verify_after_gc();
--- 1134,1185 ----
tear_down_region_sets(false /* free_list_only */);
collector_state()->set_gcs_are_young(true);
}
! void G1CollectedHeap::verify_before_full_collection(bool explicit_gc) {
! assert(!GCCause::is_user_requested_gc(gc_cause()) || explicit_gc, "invariant");
! assert(used() == recalculate_used(), "Should be equal");
! _verifier->verify_region_sets_optional();
! _verifier->verify_before_gc();
! _verifier->check_bitmaps("Full GC Start");
! }
!
! void G1CollectedHeap::prepare_heap_for_mutators() {
! // Delete metaspaces for unloaded class loaders and clean up loader_data graph
! ClassLoaderDataGraph::purge();
! MetaspaceAux::verify_metrics();
!
! // Prepare heap for normal collections.
! assert(num_free_regions() == 0, "we should not have added any free regions");
! rebuild_region_sets(false /* free_list_only */);
! abort_refinement();
! resize_if_necessary_after_full_collection();
!
! // Rebuild the strong code root lists for each region
! rebuild_strong_code_roots();
!
! // Start a new incremental collection set for the next pause
! start_new_collection_set();
!
! _allocator->init_mutator_alloc_region();
!
! // Post collection state updates.
! MetaspaceGC::compute_new_size();
! }
!
! void G1CollectedHeap::abort_refinement() {
if (_hot_card_cache->use_cache()) {
_hot_card_cache->reset_card_counts();
_hot_card_cache->reset_hot_cache();
}
! // Discard all remembered set updates.
JavaThread::dirty_card_queue_set().abandon_logs();
assert(dirty_card_queue_set().completed_buffers_num() == 0, "DCQS should be empty");
}
void G1CollectedHeap::verify_after_full_collection() {
check_gc_time_stamps();
_hrm.verify_optional();
_verifier->verify_region_sets_optional();
_verifier->verify_after_gc();
*** 1187,1199 ****
assert(!ref_processor_cm()->discovery_enabled(), "Postcondition");
ref_processor_stw()->verify_no_references_recorded();
ref_processor_cm()->verify_no_references_recorded();
}
void G1CollectedHeap::do_full_collection_inner(G1FullGCScope* scope) {
GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause(), true);
- G1HeapTransition heap_transition(this);
g1_policy()->record_full_collection_start();
print_heap_before_gc();
print_heap_regions();
--- 1210,1231 ----
assert(!ref_processor_cm()->discovery_enabled(), "Postcondition");
ref_processor_stw()->verify_no_references_recorded();
ref_processor_cm()->verify_no_references_recorded();
}
+ void G1CollectedHeap::print_heap_after_full_collection(G1HeapTransition* heap_transition) {
+ print_hrm_post_compaction();
+ heap_transition->print();
+ print_heap_after_gc();
+ print_heap_regions();
+ #ifdef TRACESPINNING
+ ParallelTaskTerminator::print_termination_counts();
+ #endif
+ }
+
void G1CollectedHeap::do_full_collection_inner(G1FullGCScope* scope) {
GCTraceTime(Info, gc) tm("Pause Full", NULL, gc_cause(), true);
g1_policy()->record_full_collection_start();
print_heap_before_gc();
print_heap_regions();
*** 1201,1254 ****
verify_before_full_collection(scope->is_explicit_gc());
gc_prologue(true);
prepare_heap_for_full_collection();
! G1SerialCollector serial(scope, ref_processor_stw());
serial.prepare_collection();
serial.collect();
serial.complete_collection();
! assert(num_free_regions() == 0, "we should not have added any free regions");
! MemoryService::track_memory_usage();
!
! // Delete metaspaces for unloaded class loaders and clean up loader_data graph
! ClassLoaderDataGraph::purge();
! MetaspaceAux::verify_metrics();
!
! // Prepare heap for normal collections.
! rebuild_region_sets(false /* free_list_only */);
! reset_card_cache_and_queue();
! resize_if_necessary_after_full_collection();
!
! // Rebuild the strong code root lists for each region
! rebuild_strong_code_roots();
!
! // Start a new incremental collection set for the next pause
! start_new_collection_set();
!
! _allocator->init_mutator_alloc_region();
- // Post collection state updates.
- MetaspaceGC::compute_new_size();
- gc_epilogue(true);
g1_policy()->record_full_collection_end();
// Post collection verification.
verify_after_full_collection();
// Post collection logging.
// We should do this after we potentially resize the heap so
// that all the COMMIT / UNCOMMIT events are generated before
// the compaction events.
! print_hrm_post_compaction();
! heap_transition.print();
! print_heap_after_gc();
! print_heap_regions();
! #ifdef TRACESPINNING
! ParallelTaskTerminator::print_termination_counts();
! #endif
}
bool G1CollectedHeap::do_full_collection(bool explicit_gc,
bool clear_all_soft_refs) {
assert_at_safepoint(true /* should_be_vm_thread */);
--- 1233,1260 ----
verify_before_full_collection(scope->is_explicit_gc());
gc_prologue(true);
prepare_heap_for_full_collection();
! G1SerialFullCollector serial(scope, ref_processor_stw());
serial.prepare_collection();
serial.collect();
serial.complete_collection();
! prepare_heap_for_mutators();
g1_policy()->record_full_collection_end();
+ gc_epilogue(true);
// Post collection verification.
verify_after_full_collection();
// Post collection logging.
// We should do this after we potentially resize the heap so
// that all the COMMIT / UNCOMMIT events are generated before
// the compaction events.
! print_heap_after_full_collection(scope->heap_transition());
}
bool G1CollectedHeap::do_full_collection(bool explicit_gc,
bool clear_all_soft_refs) {
assert_at_safepoint(true /* should_be_vm_thread */);
*** 2605,2614 ****
--- 2611,2621 ----
resize_all_tlabs();
g1_policy()->phase_times()->record_resize_tlab_time_ms((os::elapsedTime() - start) * 1000.0);
allocation_context_stats().update(full);
+ MemoryService::track_memory_usage();
// We have just completed a GC. Update the soft reference
// policy with the new heap occupancy
Universe::update_heap_info_at_gc();
}
*** 3155,3166 ****
g1_policy()->record_collection_pause_end(pause_time_ms, total_cards_scanned, heap_used_bytes_before_gc);
evacuation_info.set_collectionset_used_before(collection_set()->bytes_used_before());
evacuation_info.set_bytes_copied(g1_policy()->bytes_copied_during_gc());
- MemoryService::track_memory_usage();
-
// In prepare_for_verify() below we'll need to scan the deferred
// update buffers to bring the RSets up-to-date if
// G1HRRSFlushLogBuffersOnVerify has been set. While scanning
// the update buffers we'll probably need to scan cards on the
// regions we just allocated to (i.e., the GC alloc
--- 3162,3171 ----
< prev index next >