172 if (TraceOldGenTime) accumulated_time()->start();
173
174 // Let the size policy know we're starting
175 size_policy->major_collection_begin();
176
177 CodeCache::gc_prologue();
178 BiasedLocking::preserve_marks();
179
180 // Capture heap size before collection for printing.
181 size_t prev_used = heap->used();
182
183 // Capture metadata size before collection for sizing.
184 size_t metadata_prev_used = MetaspaceAux::used_bytes();
185
186 // For PrintGCDetails
187 size_t old_gen_prev_used = old_gen->used_in_bytes();
188 size_t young_gen_prev_used = young_gen->used_in_bytes();
189
190 allocate_stacks();
191
192 COMPILER2_PRESENT(DerivedPointerTable::clear());
193
194 ref_processor()->enable_discovery();
195 ref_processor()->setup_policy(clear_all_softrefs);
196
197 mark_sweep_phase1(clear_all_softrefs);
198
199 mark_sweep_phase2();
200
201 // Don't add any more derived pointers during phase3
202 COMPILER2_PRESENT(assert(DerivedPointerTable::is_active(), "Sanity"));
203 COMPILER2_PRESENT(DerivedPointerTable::set_active(false));
204
205 mark_sweep_phase3();
206
207 mark_sweep_phase4();
208
209 restore_marks();
210
211 deallocate_stacks();
212
213 if (ZapUnusedHeapArea) {
214 // Do a complete mangle (top to end) because the usage for
215 // scratch does not maintain a top pointer.
216 young_gen->to_space()->mangle_unused_area_complete();
217 }
218
219 eden_empty = young_gen->eden_space()->is_empty();
220 if (!eden_empty) {
221 eden_empty = absorb_live_data_from_eden(size_policy, young_gen, old_gen);
222 }
223
228 survivors_empty = young_gen->from_space()->is_empty() &&
229 young_gen->to_space()->is_empty();
230 young_gen_empty = eden_empty && survivors_empty;
231
232 ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
233 MemRegion old_mr = heap->old_gen()->reserved();
234 if (young_gen_empty) {
235 modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
236 } else {
237 modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
238 }
239
240 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
241 ClassLoaderDataGraph::purge();
242 MetaspaceAux::verify_metrics();
243
244 BiasedLocking::restore_marks();
245 CodeCache::gc_epilogue();
246 JvmtiExport::gc_epilogue();
247
248 COMPILER2_PRESENT(DerivedPointerTable::update_pointers());
249
250 ref_processor()->enqueue_discovered_references(NULL);
251
252 // Update time of last GC
253 reset_millis_since_last_gc();
254
255 // Let the size policy know we're done
256 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
257
258 if (UseAdaptiveSizePolicy) {
259
260 if (PrintAdaptiveSizePolicy) {
261 gclog_or_tty->print("AdaptiveSizeStart: ");
262 gclog_or_tty->stamp();
263 gclog_or_tty->print_cr(" collection: %d ",
264 heap->total_collections());
265 if (Verbose) {
266 gclog_or_tty->print("old_gen_capacity: " SIZE_FORMAT
267 " young_gen_capacity: " SIZE_FORMAT,
268 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
|
172 if (TraceOldGenTime) accumulated_time()->start();
173
174 // Let the size policy know we're starting
175 size_policy->major_collection_begin();
176
177 CodeCache::gc_prologue();
178 BiasedLocking::preserve_marks();
179
180 // Capture heap size before collection for printing.
181 size_t prev_used = heap->used();
182
183 // Capture metadata size before collection for sizing.
184 size_t metadata_prev_used = MetaspaceAux::used_bytes();
185
186 // For PrintGCDetails
187 size_t old_gen_prev_used = old_gen->used_in_bytes();
188 size_t young_gen_prev_used = young_gen->used_in_bytes();
189
190 allocate_stacks();
191
192 #if defined(COMPILER2) || INCLUDE_JVMCI
193 DerivedPointerTable::clear();
194 #endif
195
196 ref_processor()->enable_discovery();
197 ref_processor()->setup_policy(clear_all_softrefs);
198
199 mark_sweep_phase1(clear_all_softrefs);
200
201 mark_sweep_phase2();
202
203 #if defined(COMPILER2) || INCLUDE_JVMCI
204 // Don't add any more derived pointers during phase3
205 assert(DerivedPointerTable::is_active(), "Sanity");
206 DerivedPointerTable::set_active(false);
207 #endif
208
209 mark_sweep_phase3();
210
211 mark_sweep_phase4();
212
213 restore_marks();
214
215 deallocate_stacks();
216
217 if (ZapUnusedHeapArea) {
218 // Do a complete mangle (top to end) because the usage for
219 // scratch does not maintain a top pointer.
220 young_gen->to_space()->mangle_unused_area_complete();
221 }
222
223 eden_empty = young_gen->eden_space()->is_empty();
224 if (!eden_empty) {
225 eden_empty = absorb_live_data_from_eden(size_policy, young_gen, old_gen);
226 }
227
232 survivors_empty = young_gen->from_space()->is_empty() &&
233 young_gen->to_space()->is_empty();
234 young_gen_empty = eden_empty && survivors_empty;
235
236 ModRefBarrierSet* modBS = barrier_set_cast<ModRefBarrierSet>(heap->barrier_set());
237 MemRegion old_mr = heap->old_gen()->reserved();
238 if (young_gen_empty) {
239 modBS->clear(MemRegion(old_mr.start(), old_mr.end()));
240 } else {
241 modBS->invalidate(MemRegion(old_mr.start(), old_mr.end()));
242 }
243
244 // Delete metaspaces for unloaded class loaders and clean up loader_data graph
245 ClassLoaderDataGraph::purge();
246 MetaspaceAux::verify_metrics();
247
248 BiasedLocking::restore_marks();
249 CodeCache::gc_epilogue();
250 JvmtiExport::gc_epilogue();
251
252 #if defined(COMPILER2) || INCLUDE_JVMCI
253 DerivedPointerTable::update_pointers();
254 #endif
255
256 ref_processor()->enqueue_discovered_references(NULL);
257
258 // Update time of last GC
259 reset_millis_since_last_gc();
260
261 // Let the size policy know we're done
262 size_policy->major_collection_end(old_gen->used_in_bytes(), gc_cause);
263
264 if (UseAdaptiveSizePolicy) {
265
266 if (PrintAdaptiveSizePolicy) {
267 gclog_or_tty->print("AdaptiveSizeStart: ");
268 gclog_or_tty->stamp();
269 gclog_or_tty->print_cr(" collection: %d ",
270 heap->total_collections());
271 if (Verbose) {
272 gclog_or_tty->print("old_gen_capacity: " SIZE_FORMAT
273 " young_gen_capacity: " SIZE_FORMAT,
274 old_gen->capacity_in_bytes(), young_gen->capacity_in_bytes());
|