4356 G1CLDClosure<G1MarkFromRoot> scan_mark_cld_cl(&scan_mark_root_cl,
4357 false, // Process all klasses.
4358 true); // Need to claim CLDs.
4359 // Weak roots closures.
4360 G1ParCopyClosure<G1BarrierNone, G1MarkPromotedFromRoot> scan_mark_weak_root_cl(_g1h, &pss, rp);
4361 G1CLDClosure<G1MarkPromotedFromRoot> scan_mark_weak_cld_cl(&scan_mark_weak_root_cl,
4362 false, // Process all klasses.
4363 true); // Need to claim CLDs.
4364
4365 OopClosure* strong_root_cl;
4366 OopClosure* weak_root_cl;
4367 CLDClosure* strong_cld_cl;
4368 CLDClosure* weak_cld_cl;
4369
4370 bool trace_metadata = false;
4371
4372 if (_g1h->g1_policy()->during_initial_mark_pause()) {
4373 // We also need to mark copied objects.
4374 strong_root_cl = &scan_mark_root_cl;
4375 strong_cld_cl = &scan_mark_cld_cl;
4376 if (ClassUnloadingWithConcurrentMark) {
4377 weak_root_cl = &scan_mark_weak_root_cl;
4378 weak_cld_cl = &scan_mark_weak_cld_cl;
4379 trace_metadata = true;
4380 } else {
4381 weak_root_cl = &scan_mark_root_cl;
4382 weak_cld_cl = &scan_mark_cld_cl;
4383 }
4384 } else {
4385 strong_root_cl = &scan_only_root_cl;
4386 weak_root_cl = &scan_only_root_cl;
4387 strong_cld_cl = &scan_only_cld_cl;
4388 weak_cld_cl = &scan_only_cld_cl;
4389 }
4390
4391 pss.start_strong_roots();
4392
4393 _root_processor->evacuate_roots(strong_root_cl,
4394 weak_root_cl,
4395 strong_cld_cl,
4396 weak_cld_cl,
4689 bool claim_clean_klass_tree_task() {
4690 if (_clean_klass_tree_claimed) {
4691 return false;
4692 }
4693
4694 return Atomic::cmpxchg(1, (jint*)&_clean_klass_tree_claimed, 0) == 0;
4695 }
4696
4697 InstanceKlass* claim_next_klass() {
4698 Klass* klass;
4699 do {
4700 klass =_klass_iterator.next_klass();
4701 } while (klass != NULL && !klass->oop_is_instance());
4702
4703 return (InstanceKlass*)klass;
4704 }
4705
4706 public:
4707
4708 void clean_klass(InstanceKlass* ik) {
4709 ik->clean_implementors_list(_is_alive);
4710 ik->clean_method_data(_is_alive);
4711
4712 // G1 specific cleanup work that has
4713 // been moved here to be done in parallel.
4714 ik->clean_dependent_nmethods();
4715 }
4716
4717 void work() {
4718 ResourceMark rm;
4719
4720 // One worker will clean the subklass/sibling klass tree.
4721 if (claim_clean_klass_tree_task()) {
4722 Klass::clean_subklass_tree(_is_alive);
4723 }
4724
4725 // All workers will help cleaning the classes,
4726 InstanceKlass* klass;
4727 while ((klass = claim_next_klass()) != NULL) {
4728 clean_klass(klass);
4729 }
4730 }
4731 };
4732
4733 // To minimize the remark pause times, the tasks below are done in parallel.
4734 class G1ParallelCleaningTask : public AbstractGangTask {
4735 private:
|
4356 G1CLDClosure<G1MarkFromRoot> scan_mark_cld_cl(&scan_mark_root_cl,
4357 false, // Process all klasses.
4358 true); // Need to claim CLDs.
4359 // Weak roots closures.
4360 G1ParCopyClosure<G1BarrierNone, G1MarkPromotedFromRoot> scan_mark_weak_root_cl(_g1h, &pss, rp);
4361 G1CLDClosure<G1MarkPromotedFromRoot> scan_mark_weak_cld_cl(&scan_mark_weak_root_cl,
4362 false, // Process all klasses.
4363 true); // Need to claim CLDs.
4364
4365 OopClosure* strong_root_cl;
4366 OopClosure* weak_root_cl;
4367 CLDClosure* strong_cld_cl;
4368 CLDClosure* weak_cld_cl;
4369
4370 bool trace_metadata = false;
4371
4372 if (_g1h->g1_policy()->during_initial_mark_pause()) {
4373 // We also need to mark copied objects.
4374 strong_root_cl = &scan_mark_root_cl;
4375 strong_cld_cl = &scan_mark_cld_cl;
4376 if (ClassUnloading && ClassUnloadingWithConcurrentMark) {
4377 weak_root_cl = &scan_mark_weak_root_cl;
4378 weak_cld_cl = &scan_mark_weak_cld_cl;
4379 trace_metadata = true;
4380 } else {
4381 weak_root_cl = &scan_mark_root_cl;
4382 weak_cld_cl = &scan_mark_cld_cl;
4383 }
4384 } else {
4385 strong_root_cl = &scan_only_root_cl;
4386 weak_root_cl = &scan_only_root_cl;
4387 strong_cld_cl = &scan_only_cld_cl;
4388 weak_cld_cl = &scan_only_cld_cl;
4389 }
4390
4391 pss.start_strong_roots();
4392
4393 _root_processor->evacuate_roots(strong_root_cl,
4394 weak_root_cl,
4395 strong_cld_cl,
4396 weak_cld_cl,
4689 bool claim_clean_klass_tree_task() {
4690 if (_clean_klass_tree_claimed) {
4691 return false;
4692 }
4693
4694 return Atomic::cmpxchg(1, (jint*)&_clean_klass_tree_claimed, 0) == 0;
4695 }
4696
4697 InstanceKlass* claim_next_klass() {
4698 Klass* klass;
4699 do {
4700 klass =_klass_iterator.next_klass();
4701 } while (klass != NULL && !klass->oop_is_instance());
4702
4703 return (InstanceKlass*)klass;
4704 }
4705
4706 public:
4707
4708 void clean_klass(InstanceKlass* ik) {
4709 if (ClassUnloading) {
4710 ik->clean_implementors_list(_is_alive);
4711 ik->clean_method_data(_is_alive);
4712
4713 // G1 specific cleanup work that has
4714 // been moved here to be done in parallel.
4715 ik->clean_dependent_nmethods();
4716 }
4717 }
4718
4719 void work() {
4720 ResourceMark rm;
4721
4722 // One worker will clean the subklass/sibling klass tree.
4723 if (claim_clean_klass_tree_task()) {
4724 Klass::clean_subklass_tree(_is_alive);
4725 }
4726
4727 // All workers will help cleaning the classes,
4728 InstanceKlass* klass;
4729 while ((klass = claim_next_klass()) != NULL) {
4730 clean_klass(klass);
4731 }
4732 }
4733 };
4734
4735 // To minimize the remark pause times, the tasks below are done in parallel.
4736 class G1ParallelCleaningTask : public AbstractGangTask {
4737 private:
|