1028 Universe::update_heap_info_at_gc();
1029
1030 bool young_gen_empty = eden_empty && from_space->is_empty() &&
1031 to_space->is_empty();
1032
1033 ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
1034 MemRegion old_mr = heap->old_gen()->reserved();
1035 if (young_gen_empty) {
1036 modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
1037 } else {
1038 modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1039 }
1040
1041 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1042 ClassLoaderDataGraph::purge();
1043 MetaspaceAux::verify_metrics();
1044
1045 CodeCache::gc_epilogue();
1046 JvmtiExport::gc_epilogue();
1047
1048 COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
1049
1050 ref_processor()->enqueue_discovered_references(NULL);
1051
1052 if (ZapUnusedHeapArea) {
1053 heap->gen_mangle_unused_area();
1054 }
1055
1056 // Update time of last GC
1057 reset_millis_since_last_gc();
1058 }
1059
1060 HeapWord*
1061 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1062 bool maximum_compaction)
1063 {
1064 const size_t region_size = ParallelCompactData::RegionSize;
1065 const ParallelCompactData& sd = summary_data();
1066
1067 const MutableSpace* const space = _space_info[id].space();
1068 HeapWord* const top_aligned_up = sd.region_align_up(space->top());
2025 {
2026 ResourceMark rm;
2027 HandleMark hm;
2028
2029 // Set the number of GC threads to be used in this collection
2030 gc_task_manager()->set_active_gang();
2031 gc_task_manager()->task_idle_workers();
2032
2033 TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
2034 GCTraceTime t1(GCCauseString("Full GC", gc_cause), PrintGC, !PrintGCDetails, NULL, _gc_tracer.gc_id());
2035 TraceCollectorStats tcs(counters());
2036 TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
2037
2038 if (TraceOldGenTime) accumulated_time()->start();
2039
2040 // Let the size policy know we're starting
2041 size_policy->major_collection_begin();
2042
2043 CodeCache::gc_prologue();
2044
2045 COMPILER2_PRESENT(DerivedPointerTable::clear());
2046
2047 ref_processor()->enable_discovery();
2048 ref_processor()->setup_policy(maximum_heap_compaction);
2049
2050 bool marked_for_unloading = false;
2051
2052 marking_start.update();
2053 marking_phase(vmthread_cm, maximum_heap_compaction, &_gc_tracer);
2054
2055 bool max_on_system_gc = UseMaximumCompactionOnSystemGC
2056 && GCCause::is_user_requested_gc(gc_cause);
2057 summary_phase(vmthread_cm, maximum_heap_compaction || max_on_system_gc);
2058
2059 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
2060 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
2061
2062 // adjust_roots() updates Universe::_intArrayKlassObj which is
2063 // needed by the compaction for filling holes in the dense prefix.
2064 adjust_roots();
2065
2066 compaction_start.update();
2067 compact();
2068
2069 // Reset the mark bitmap, summary data, and do other bookkeeping. Must be
2070 // done before resizing.
2071 post_compact();
2072
2073 // Let the size policy know we're done
2074 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
2075
2076 if (UseAdaptiveSizePolicy) {
2077 if (PrintAdaptiveSizePolicy) {
2078 gclog_or_tty->print("AdaptiveSizeStart: ");
2079 gclog_or_tty->stamp();
2080 gclog_or_tty->print_cr(" collection: %d ",
|
1028 Universe::update_heap_info_at_gc();
1029
1030 bool young_gen_empty = eden_empty && from_space->is_empty() &&
1031 to_space->is_empty();
1032
1033 ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
1034 MemRegion old_mr = heap->old_gen()->reserved();
1035 if (young_gen_empty) {
1036 modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
1037 } else {
1038 modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
1039 }
1040
1041 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
1042 ClassLoaderDataGraph::purge();
1043 MetaspaceAux::verify_metrics();
1044
1045 CodeCache::gc_epilogue();
1046 JvmtiExport::gc_epilogue();
1047
1048 #if defined(COMPILER2) || INCLUDE_JVMCI
1049 DerivedPointerTable::update_pointers();
1050 #endif
1051
1052 ref_processor()->enqueue_discovered_references(NULL);
1053
1054 if (ZapUnusedHeapArea) {
1055 heap->gen_mangle_unused_area();
1056 }
1057
1058 // Update time of last GC
1059 reset_millis_since_last_gc();
1060 }
1061
1062 HeapWord*
1063 PSParallelCompact::compute_dense_prefix_via_density(const SpaceId id,
1064 bool maximum_compaction)
1065 {
1066 const size_t region_size = ParallelCompactData::RegionSize;
1067 const ParallelCompactData& sd = summary_data();
1068
1069 const MutableSpace* const space = _space_info[id].space();
1070 HeapWord* const top_aligned_up = sd.region_align_up(space->top());
2027 {
2028 ResourceMark rm;
2029 HandleMark hm;
2030
2031 // Set the number of GC threads to be used in this collection
2032 gc_task_manager()->set_active_gang();
2033 gc_task_manager()->task_idle_workers();
2034
2035 TraceCPUTime tcpu(PrintGCDetails, true, gclog_or_tty);
2036 GCTraceTime t1(GCCauseString("Full GC", gc_cause), PrintGC, !PrintGCDetails, NULL, _gc_tracer.gc_id());
2037 TraceCollectorStats tcs(counters());
2038 TraceMemoryManagerStats tms(true /* Full GC */,gc_cause);
2039
2040 if (TraceOldGenTime) accumulated_time()->start();
2041
2042 // Let the size policy know we're starting
2043 size_policy->major_collection_begin();
2044
2045 CodeCache::gc_prologue();
2046
2047 #if defined(COMPILER2) || INCLUDE_JVMCI
2048 DerivedPointerTable::clear();
2049 #endif
2050
2051 ref_processor()->enable_discovery();
2052 ref_processor()->setup_policy(maximum_heap_compaction);
2053
2054 bool marked_for_unloading = false;
2055
2056 marking_start.update();
2057 marking_phase(vmthread_cm, maximum_heap_compaction, &_gc_tracer);
2058
2059 bool max_on_system_gc = UseMaximumCompactionOnSystemGC
2060 && GCCause::is_user_requested_gc(gc_cause);
2061 summary_phase(vmthread_cm, maximum_heap_compaction || max_on_system_gc);
2062
2063 #if defined(COMPILER2) || INCLUDE_JVMCI
2064 assert(DerivedPointerTable::is_active(), "Sanity");
2065 DerivedPointerTable::set_active(false);
2066 #endif
2067
2068 // adjust_roots() updates Universe::_intArrayKlassObj which is
2069 // needed by the compaction for filling holes in the dense prefix.
2070 adjust_roots();
2071
2072 compaction_start.update();
2073 compact();
2074
2075 // Reset the mark bitmap, summary data, and do other bookkeeping. Must be
2076 // done before resizing.
2077 post_compact();
2078
2079 // Let the size policy know we're done
2080 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
2081
2082 if (UseAdaptiveSizePolicy) {
2083 if (PrintAdaptiveSizePolicy) {
2084 gclog_or_tty->print("AdaptiveSizeStart: ");
2085 gclog_or_tty->stamp();
2086 gclog_or_tty->print_cr(" collection: %d ",
|