68 #include "oops/oop.inline.hpp"
69 #include "runtime/atomic.hpp"
70 #include "runtime/javaCalls.hpp"
71 #include "runtime/jniHandles.hpp"
72 #include "runtime/mutex.hpp"
73 #include "runtime/orderAccess.hpp"
74 #include "runtime/safepoint.hpp"
75 #include "runtime/synchronizer.hpp"
76 #include "utilities/growableArray.hpp"
77 #include "utilities/macros.hpp"
78 #include "utilities/ostream.hpp"
79 #if INCLUDE_TRACE
80 #include "trace/tracing.hpp"
81 #endif
82
83 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0;
84 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0;
85
86 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
87
88 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous, Dependencies dependencies) :
89 _class_loader(h_class_loader()),
90 _is_anonymous(is_anonymous),
91 // An anonymous class loader data doesn't have anything to keep
92 // it from being unloaded during parsing of the anonymous class.
93 // The null-class-loader should always be kept alive.
94 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0),
95 _metaspace(NULL), _unloading(false), _klasses(NULL),
96 _modules(NULL), _packages(NULL),
97 _claimed(0), _modified_oops(true), _accumulated_modified_oops(false),
98 _jmethod_ids(NULL), _handles(), _deallocate_list(NULL),
99 _next(NULL), _dependencies(dependencies),
100 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true,
101 Monitor::_safepoint_check_never)) {
102
103 // A ClassLoaderData created solely for an anonymous class should never have a
104 // ModuleEntryTable or PackageEntryTable created for it. The defining package
105 // and module for an anonymous class will be found in its host class.
106 if (!is_anonymous) {
107 _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size);
108 if (h_class_loader.is_null()) {
109 // Create unnamed module for boot loader
110 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this);
111 } else {
112 // Create unnamed module for all other loaders
113 _unnamed_module = ModuleEntry::create_unnamed_module(this);
114 }
115 } else {
116 _unnamed_module = NULL;
117 }
118
119 if (!is_anonymous) {
120 _dictionary = create_dictionary();
121 } else {
122 _dictionary = NULL;
123 }
124 TRACE_INIT_ID(this);
125 }
126
127 void ClassLoaderData::init_dependencies(TRAPS) {
128 assert(!Universe::is_fully_initialized(), "should only be called when initializing");
129 assert(is_the_null_class_loader_data(), "should only call this for the null class loader");
130 _dependencies.init(CHECK);
131 }
132
133 void ClassLoaderData::Dependencies::init(TRAPS) {
134 // Create empty dependencies array to add to. CMS requires this to be
135 // an oop so that it can track additions via card marks. We think.
136 _list_head = oopFactory::new_objectArray(2, CHECK);
137 }
138
139 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
140 Chunk* c = _head;
141 while (c != NULL) {
142 Chunk* next = c->_next;
143 delete c;
144 c = next;
145 }
146 }
147
148 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
149 if (_head == NULL || _head->_size == Chunk::CAPACITY) {
150 Chunk* next = new Chunk(_head);
151 OrderAccess::release_store(&_head, next);
152 }
153 oop* handle = &_head->_data[_head->_size];
154 *handle = o;
155 OrderAccess::release_store(&_head->_size, _head->_size + 1);
156 return handle;
157 }
158
159 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
160 for (juint i = 0; i < size; i++) {
161 if (c->_data[i] != NULL) {
162 f->do_oop(&c->_data[i]);
163 }
164 }
165 }
166
167 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
168 Chunk* head = OrderAccess::load_acquire(&_head);
169 if (head != NULL) {
170 // Must be careful when reading size of head
171 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
172 for (Chunk* c = head->_next; c != NULL; c = c->_next) {
173 oops_do_chunk(f, c, c->_size);
174 }
175 }
176 }
177
178 #ifdef ASSERT
179 class VerifyContainsOopClosure : public OopClosure {
180 oop* _target;
181 bool _found;
182
183 public:
184 VerifyContainsOopClosure(oop* target) : _target(target), _found(false) {}
185
186 void do_oop(oop* p) {
187 if (p == _target) {
188 _found = true;
189 }
190 }
191
192 void do_oop(narrowOop* p) {
193 // The ChunkedHandleList should not contain any narrowOop
194 ShouldNotReachHere();
195 }
196
197 bool found() const {
198 return _found;
199 }
200 };
201
202 bool ClassLoaderData::ChunkedHandleList::contains(oop* p) {
203 VerifyContainsOopClosure cl(p);
204 oops_do(&cl);
205 return cl.found();
206 }
207 #endif // ASSERT
208
209 bool ClassLoaderData::claim() {
210 if (_claimed == 1) {
211 return false;
212 }
213
214 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0;
215 }
216
217 // Anonymous classes have their own ClassLoaderData that is marked to keep alive
218 // while the class is being parsed, and if the class appears on the module fixup list.
219 // Due to the uniqueness that no other class shares the anonymous class' name or
220 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while
221 // it is being defined, therefore _keep_alive is not volatile or atomic.
222 void ClassLoaderData::inc_keep_alive() {
223 if (is_anonymous()) {
224 assert(_keep_alive >= 0, "Invalid keep alive increment count");
225 _keep_alive++;
226 }
227 }
228
229 void ClassLoaderData::dec_keep_alive() {
230 if (is_anonymous()) {
231 assert(_keep_alive > 0, "Invalid keep alive decrement count");
232 _keep_alive--;
233 }
234 }
235
236 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) {
237 if (must_claim && !claim()) {
238 return;
239 }
240
241 // Only clear modified_oops after the ClassLoaderData is claimed.
242 if (clear_mod_oops) {
243 clear_modified_oops();
244 }
245
246 f->do_oop(&_class_loader);
247 _dependencies.oops_do(f);
248 _handles.oops_do(f);
249 }
250
251 void ClassLoaderData::Dependencies::oops_do(OopClosure* f) {
252 f->do_oop((oop*)&_list_head);
253 }
254
255 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
256 // Lock-free access requires load_acquire
257 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
258 klass_closure->do_klass(k);
259 assert(k != k->next_link(), "no loops!");
260 }
261 }
262
263 void ClassLoaderData::classes_do(void f(Klass * const)) {
264 // Lock-free access requires load_acquire
265 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
266 f(k);
267 assert(k != k->next_link(), "no loops!");
268 }
269 }
270
271 void ClassLoaderData::methods_do(void f(Method*)) {
272 // Lock-free access requires load_acquire
273 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
274 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
309 entry = entry->next()) {
310 f(entry);
311 }
312 }
313 }
314 }
315
316 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
317 assert_locked_or_safepoint(Module_lock);
318 if (_packages != NULL) {
319 for (int i = 0; i < _packages->table_size(); i++) {
320 for (PackageEntry* entry = _packages->bucket(i);
321 entry != NULL;
322 entry = entry->next()) {
323 f(entry);
324 }
325 }
326 }
327 }
328
329 void ClassLoaderData::record_dependency(const Klass* k, TRAPS) {
330 assert(k != NULL, "invariant");
331
332 ClassLoaderData * const from_cld = this;
333 ClassLoaderData * const to_cld = k->class_loader_data();
334
335 // Do not need to record dependency if the dependency is to a class whose
336 // class loader data is never freed. (i.e. the dependency's class loader
337 // is one of the three builtin class loaders and the dependency is not
338 // anonymous.)
339 if (to_cld->is_permanent_class_loader_data()) {
340 return;
341 }
342
343 oop to;
344 if (to_cld->is_anonymous()) {
345 // Just return if an anonymous class is attempting to record a dependency
346 // to itself. (Note that every anonymous class has its own unique class
347 // loader data.)
348 if (to_cld == from_cld) {
349 return;
350 }
351 // Anonymous class dependencies are through the mirror.
352 to = k->java_mirror();
353 } else {
354 to = to_cld->class_loader();
355 oop from = from_cld->class_loader();
356
357 // Just return if this dependency is to a class with the same or a parent
358 // class_loader.
359 if (from == to || java_lang_ClassLoader::isAncestor(from, to)) {
360 return; // this class loader is in the parent list, no need to add it.
361 }
362 }
363
364 // It's a dependency we won't find through GC, add it. This is relatively rare.
365 // Must handle over GC point.
366 Handle dependency(THREAD, to);
367 from_cld->_dependencies.add(dependency, CHECK);
368
369 // Added a potentially young gen oop to the ClassLoaderData
370 record_modified_oops();
371 }
372
373
374 void ClassLoaderData::Dependencies::add(Handle dependency, TRAPS) {
375 // Check first if this dependency is already in the list.
376 // Save a pointer to the last to add to under the lock.
377 objArrayOop ok = _list_head;
378 objArrayOop last = NULL;
379 while (ok != NULL) {
380 last = ok;
381 if (ok->obj_at(0) == dependency()) {
382 // Don't need to add it
383 return;
384 }
385 ok = (objArrayOop)ok->obj_at(1);
386 }
387
388 // Must handle over GC points
389 assert (last != NULL, "dependencies should be initialized");
390 objArrayHandle last_handle(THREAD, last);
391
392 // Create a new dependency node with fields for (class_loader or mirror, next)
393 objArrayOop deps = oopFactory::new_objectArray(2, CHECK);
394 deps->obj_at_put(0, dependency());
395
396 // Must handle over GC points
397 objArrayHandle new_dependency(THREAD, deps);
398
399 // Add the dependency under lock
400 locked_add(last_handle, new_dependency, THREAD);
401 }
402
403 void ClassLoaderData::Dependencies::locked_add(objArrayHandle last_handle,
404 objArrayHandle new_dependency,
405 Thread* THREAD) {
406
407 // Have to lock and put the new dependency on the end of the dependency
408 // array so the card mark for CMS sees that this dependency is new.
409 // Can probably do this lock free with some effort.
410 ObjectLocker ol(Handle(THREAD, _list_head), THREAD);
411
412 oop loader_or_mirror = new_dependency->obj_at(0);
413
414 // Since the dependencies are only added, add to the end.
415 objArrayOop end = last_handle();
416 objArrayOop last = NULL;
417 while (end != NULL) {
418 last = end;
419 // check again if another thread added it to the end.
420 if (end->obj_at(0) == loader_or_mirror) {
421 // Don't need to add it
422 return;
423 }
424 end = (objArrayOop)end->obj_at(1);
425 }
426 assert (last != NULL, "dependencies should be initialized");
427 // fill in the first element with the oop in new_dependency.
428 if (last->obj_at(0) == NULL) {
429 last->obj_at_put(0, new_dependency->obj_at(0));
430 } else {
431 last->obj_at_put(1, new_dependency());
432 }
433 }
434
435 void ClassLoaderDataGraph::clear_claimed_marks() {
436 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
437 cld->clear_claimed();
438 }
439 }
440
441 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
442 {
443 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
444 Klass* old_value = _klasses;
445 k->set_next_link(old_value);
446 // Link the new item into the list, making sure the linked class is stable
447 // since the list can be walked without a lock
448 OrderAccess::release_store(&_klasses, k);
449 if (k->is_array_klass()) {
450 ClassLoaderDataGraph::inc_array_classes(1);
451 } else {
452 ClassLoaderDataGraph::inc_instance_classes(1);
453 }
454 }
455
456 if (publicize && k->class_loader_data() != NULL) {
457 ResourceMark rm;
458 log_trace(class, loader, data)("Adding k: " PTR_FORMAT " %s to CLD: "
459 PTR_FORMAT " loader: " PTR_FORMAT " %s",
460 p2i(k),
461 k->external_name(),
462 p2i(k->class_loader_data()),
463 p2i((void *)k->class_loader()),
464 loader_name());
465 }
466 }
467
468 // Class iterator used by the compiler. It gets some number of classes at
469 // a safepoint to decay invocation counters on the methods.
470 class ClassLoaderDataGraphKlassIteratorStatic {
471 ClassLoaderData* _current_loader_data;
472 Klass* _current_class_entry;
473 public:
474
475 ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {}
476
477 InstanceKlass* try_get_next_class() {
478 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
479 size_t max_classes = ClassLoaderDataGraph::num_instance_classes();
480 assert(max_classes > 0, "should not be called with no instance classes");
481 for (size_t i = 0; i < max_classes; ) {
482
483 if (_current_class_entry != NULL) {
484 Klass* k = _current_class_entry;
561 }
562
563 return;
564 }
565 prev = k;
566 assert(k != k->next_link(), "no loops!");
567 }
568 ShouldNotReachHere(); // should have found this class!!
569 }
570
571 void ClassLoaderData::unload() {
572 _unloading = true;
573
574 // Tell serviceability tools these classes are unloading
575 classes_do(InstanceKlass::notify_unload_class);
576
577 LogTarget(Debug, class, loader, data) lt;
578 if (lt.is_enabled()) {
579 ResourceMark rm;
580 LogStream ls(lt);
581 ls.print(": unload loader data " INTPTR_FORMAT, p2i(this));
582 ls.print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)class_loader()),
583 loader_name());
584 if (is_anonymous()) {
585 ls.print(" for anonymous class " INTPTR_FORMAT " ", p2i(_klasses));
586 }
587 ls.cr();
588 }
589
590 // Some items on the _deallocate_list need to free their C heap structures
591 // if they are not already on the _klasses list.
592 unload_deallocate_list();
593
594 // Clean up global class iterator for compiler
595 static_klass_iterator.adjust_saved_class(this);
596 }
597
598 ModuleEntryTable* ClassLoaderData::modules() {
599 // Lazily create the module entry table at first request.
600 // Lock-free access requires load_acquire.
601 ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
602 if (modules == NULL) {
603 MutexLocker m1(Module_lock);
604 // Check if _modules got allocated while we were waiting for this lock.
605 if ((modules = _modules) == NULL) {
606 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
762 // class loaders and not anonymous.
763 bool ClassLoaderData::is_permanent_class_loader_data() const {
764 return is_builtin_class_loader_data() && !is_anonymous();
765 }
766
767 Metaspace* ClassLoaderData::metaspace_non_null() {
768 // If the metaspace has not been allocated, create a new one. Might want
769 // to create smaller arena for Reflection class loaders also.
770 // The reason for the delayed allocation is because some class loaders are
771 // simply for delegating with no metadata of their own.
772 // Lock-free access requires load_acquire.
773 Metaspace* metaspace = OrderAccess::load_acquire(&_metaspace);
774 if (metaspace == NULL) {
775 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag);
776 // Check if _metaspace got allocated while we were waiting for this lock.
777 if ((metaspace = _metaspace) == NULL) {
778 if (this == the_null_class_loader_data()) {
779 assert (class_loader() == NULL, "Must be");
780 metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
781 } else if (is_anonymous()) {
782 if (class_loader() != NULL) {
783 log_trace(class, loader, data)("is_anonymous: %s", class_loader()->klass()->internal_name());
784 }
785 metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType);
786 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
787 if (class_loader() != NULL) {
788 log_trace(class, loader, data)("is_reflection: %s", class_loader()->klass()->internal_name());
789 }
790 metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
791 } else {
792 metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
793 }
794 // Ensure _metaspace is stable, since it is examined without a lock
795 OrderAccess::release_store(&_metaspace, metaspace);
796 }
797 }
798 return metaspace;
799 }
800
801 OopHandle ClassLoaderData::add_handle(Handle h) {
802 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
803 record_modified_oops();
804 return OopHandle(_handles.add(h()));
805 }
806
807 void ClassLoaderData::remove_handle(OopHandle h) {
808 assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading");
809 oop* ptr = h.ptr_raw();
810 if (ptr != NULL) {
811 assert(_handles.contains(ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
812 // This root is not walked in safepoints, and hence requires an appropriate
813 // decorator that e.g. maintains the SATB invariant in SATB collectors.
814 RootAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, oop(NULL));
815 }
816 }
817
818 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) {
819 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
820 if (dest.resolve() != NULL) {
821 return;
822 } else {
823 dest = _handles.add(h());
824 }
825 }
826
827 // Add this metadata pointer to be freed when it's safe. This is only during
828 // class unloading because Handles might point to this metadata field.
829 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
830 // Metadata in shared region isn't deleted.
831 if (!m->is_shared()) {
885 }
886 // Go backwards because this removes entries that are freed.
887 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
888 Metadata* m = _deallocate_list->at(i);
889 assert (!m->on_stack(), "wouldn't be unloading if this were so");
890 _deallocate_list->remove_at(i);
891 if (m->is_constantPool()) {
892 ((ConstantPool*)m)->release_C_heap_structures();
893 } else if (m->is_klass()) {
894 InstanceKlass* ik = (InstanceKlass*)m;
895 // also releases ik->constants() C heap memory
896 InstanceKlass::release_C_heap_structures(ik);
897 // Remove the class so unloading events aren't triggered for
898 // this class (scratch or error class) in do_unloading().
899 remove_class(ik);
900 }
901 }
902 }
903
904 // These anonymous class loaders are to contain classes used for JSR292
905 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(oop loader, TRAPS) {
906 // Add a new class loader data to the graph.
907 Handle lh(THREAD, loader);
908 return ClassLoaderDataGraph::add(lh, true, THREAD);
909 }
910
911 const char* ClassLoaderData::loader_name() {
912 // Handles null class loader
913 return SystemDictionary::loader_name(class_loader());
914 }
915
916 #ifndef PRODUCT
917 // Define to dump klasses
918 #undef CLD_DUMP_KLASSES
919
920 void ClassLoaderData::dump(outputStream * const out) {
921 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: " PTR_FORMAT " %s {",
922 p2i(this), p2i((void *)class_loader()),
923 p2i(class_loader() != NULL ? class_loader()->klass() : NULL), loader_name());
924 if (claimed()) out->print(" claimed ");
925 if (is_unloading()) out->print(" unloading ");
926 out->cr();
927 if (metaspace_or_null() != NULL) {
928 out->print_cr("metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null()));
929 metaspace_or_null()->dump(out);
930 } else {
931 out->print_cr("metaspace: NULL");
932 }
933
934 #ifdef CLD_DUMP_KLASSES
935 if (Verbose) {
936 Klass* k = _klasses;
937 while (k != NULL) {
938 out->print_cr("klass " PTR_FORMAT ", %s", p2i(k), k->name()->as_C_string());
939 assert(k != k->next_link(), "no loops!");
940 k = k->next_link();
941 }
942 }
943 #endif // CLD_DUMP_KLASSES
944 #undef CLD_DUMP_KLASSES
945 if (_jmethod_ids != NULL) {
946 Method::print_jmethod_ids(this, out);
947 }
948 out->print_cr("}");
949 }
950 #endif // PRODUCT
951
952 void ClassLoaderData::verify() {
953 assert_locked_or_safepoint(_metaspace_lock);
954 oop cl = class_loader();
955
956 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same");
957 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be");
958
959 // Verify the integrity of the allocated space.
960 if (metaspace_or_null() != NULL) {
961 metaspace_or_null()->verify();
962 }
963
964 for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
965 guarantee(k->class_loader_data() == this, "Must be the same");
966 k->verify();
967 assert(k != k->next_link(), "no loops!");
971 bool ClassLoaderData::contains_klass(Klass* klass) {
972 // Lock-free access requires load_acquire
973 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
974 if (k == klass) return true;
975 }
976 return false;
977 }
978
979
980 // GC root of class loader data created.
981 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
982 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
983 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
984 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
985
986 bool ClassLoaderDataGraph::_should_purge = false;
987 bool ClassLoaderDataGraph::_metaspace_oom = false;
988
989 // Add a new class loader data node to the list. Assign the newly created
990 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
991 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous, TRAPS) {
992 // We need to allocate all the oops for the ClassLoaderData before allocating the
993 // actual ClassLoaderData object.
994 ClassLoaderData::Dependencies dependencies(CHECK_NULL);
995
996 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the
997 // ClassLoaderData in the graph since the CLD
998 // contains unhandled oops
999
1000 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous, dependencies);
1001
1002
1003 if (!is_anonymous) {
1004 // First, Atomically set it
1005 ClassLoaderData* old = java_lang_ClassLoader::cmpxchg_loader_data(cld, loader(), NULL);
1006 if (old != NULL) {
1007 delete cld;
1008 // Returns the data.
1009 return old;
1010 }
1011 }
1012
1013 // We won the race, and therefore the task of adding the data to the list of
1014 // class loader data
1015 ClassLoaderData** list_head = &_head;
1016 ClassLoaderData* next = _head;
1017
1018 do {
1019 cld->set_next(next);
1020 ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next);
1021 if (exchanged == next) {
1022 LogTarget(Debug, class, loader, data) lt;
1023 if (lt.is_enabled()) {
1024 PauseNoSafepointVerifier pnsv(&no_safepoints); // Need safe points for JavaCalls::call_virtual
1025 LogStream ls(lt);
1026 print_creation(&ls, loader, cld, CHECK_NULL);
1027 }
1028 return cld;
1029 }
1030 next = exchanged;
1031 } while (true);
1032 }
1033
1034 void ClassLoaderDataGraph::print_creation(outputStream* out, Handle loader, ClassLoaderData* cld, TRAPS) {
1035 Handle string;
1036 if (loader.not_null()) {
1037 // Include the result of loader.toString() in the output. This allows
1038 // the user of the log to identify the class loader instance.
1039 JavaValue result(T_OBJECT);
1040 Klass* spec_klass = SystemDictionary::ClassLoader_klass();
1041 JavaCalls::call_virtual(&result,
1042 loader,
1043 spec_klass,
1044 vmSymbols::toString_name(),
1045 vmSymbols::void_string_signature(),
1046 CHECK);
1047 assert(result.get_type() == T_OBJECT, "just checking");
1048 string = Handle(THREAD, (oop)result.get_jobject());
1049 }
1050
1051 ResourceMark rm;
1052 out->print("create class loader data " INTPTR_FORMAT, p2i(cld));
1053 out->print(" for instance " INTPTR_FORMAT " of %s", p2i((void *)cld->class_loader()),
1054 cld->loader_name());
1055
1056 if (string.not_null()) {
1057 out->print(": ");
1058 java_lang_String::print(string(), out);
1059 }
1060 out->cr();
1061 }
1062
1063
1064 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) {
1065 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1066 cld->oops_do(f, must_claim);
1067 }
1068 }
1069
1070 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) {
1071 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
1072 if (cld->keep_alive()) {
1073 cld->oops_do(f, must_claim);
1074 }
1075 }
1076 }
1077
1078 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) {
1079 if (ClassUnloading) {
1080 keep_alive_oops_do(f, must_claim);
1081 } else {
1082 oops_do(f, must_claim);
1083 }
1460 return head; // Won the CAS.
1461 }
1462
1463 head = old_head;
1464 }
1465
1466 // Nothing more for the iterator to hand out.
1467 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1468 return NULL;
1469 }
1470
1471 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() {
1472 _data = ClassLoaderDataGraph::_head;
1473 }
1474
1475 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {}
1476
1477 #ifndef PRODUCT
1478 // callable from debugger
1479 extern "C" int print_loader_data_graph() {
1480 ClassLoaderDataGraph::dump_on(tty);
1481 return 0;
1482 }
1483
1484 void ClassLoaderDataGraph::verify() {
1485 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1486 data->verify();
1487 }
1488 }
1489
1490 void ClassLoaderDataGraph::dump_on(outputStream * const out) {
1491 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1492 data->dump(out);
1493 }
1494 MetaspaceAux::dump(out);
1495 }
1496 #endif // PRODUCT
1497
1498 void ClassLoaderData::print_value_on(outputStream* out) const {
1499 if (class_loader() == NULL) {
1500 out->print("NULL class loader");
1501 } else {
1502 out->print("class loader " INTPTR_FORMAT " ", p2i(this));
1503 class_loader()->print_value_on(out);
1504 }
1505 }
1506
1507 void ClassLoaderData::print_on(outputStream* out) const {
1508 if (class_loader() == NULL) {
1509 out->print("NULL class loader");
1510 } else {
1511 out->print("class loader " INTPTR_FORMAT " ", p2i(this));
1512 class_loader()->print_on(out);
1513 }
1514 }
1515
1516 #if INCLUDE_TRACE
1517
1518 Ticks ClassLoaderDataGraph::_class_unload_time;
1519
1520 void ClassLoaderDataGraph::class_unload_event(Klass* const k) {
1521 assert(k != NULL, "invariant");
1522
1523 // post class unload event
1524 EventClassUnload event(UNTIMED);
1525 event.set_endtime(_class_unload_time);
1526 event.set_unloadedClass(k);
1527 event.set_definingClassLoader(k->class_loader_data());
1528 event.commit();
1529 }
1530
1531 #endif // INCLUDE_TRACE
|
68 #include "oops/oop.inline.hpp"
69 #include "runtime/atomic.hpp"
70 #include "runtime/javaCalls.hpp"
71 #include "runtime/jniHandles.hpp"
72 #include "runtime/mutex.hpp"
73 #include "runtime/orderAccess.hpp"
74 #include "runtime/safepoint.hpp"
75 #include "runtime/synchronizer.hpp"
76 #include "utilities/growableArray.hpp"
77 #include "utilities/macros.hpp"
78 #include "utilities/ostream.hpp"
79 #if INCLUDE_TRACE
80 #include "trace/tracing.hpp"
81 #endif
82
83 volatile size_t ClassLoaderDataGraph::_num_array_classes = 0;
84 volatile size_t ClassLoaderDataGraph::_num_instance_classes = 0;
85
86 ClassLoaderData * ClassLoaderData::_the_null_class_loader_data = NULL;
87
88 void ClassLoaderData::init_null_class_loader_data() {
89 assert(_the_null_class_loader_data == NULL, "cannot initialize twice");
90 assert(ClassLoaderDataGraph::_head == NULL, "cannot initialize twice");
91
92 _the_null_class_loader_data = new ClassLoaderData(Handle(), false);
93 ClassLoaderDataGraph::_head = _the_null_class_loader_data;
94 assert(_the_null_class_loader_data->is_the_null_class_loader_data(), "Must be");
95
96 LogTarget(Debug, class, loader, data) lt;
97 if (lt.is_enabled()) {
98 ResourceMark rm;
99 LogStream ls(lt);
100 ls.print("create ");
101 _the_null_class_loader_data->print_value_on(&ls);
102 ls.cr();
103 }
104 }
105
106 ClassLoaderData::ClassLoaderData(Handle h_class_loader, bool is_anonymous) :
107 _class_loader(h_class_loader()),
108 _is_anonymous(is_anonymous),
109 // An anonymous class loader data doesn't have anything to keep
110 // it from being unloaded during parsing of the anonymous class.
111 // The null-class-loader should always be kept alive.
112 _keep_alive((is_anonymous || h_class_loader.is_null()) ? 1 : 0),
113 _metaspace(NULL), _unloading(false), _klasses(NULL),
114 _modules(NULL), _packages(NULL),
115 _claimed(0), _modified_oops(true), _accumulated_modified_oops(false),
116 _jmethod_ids(NULL), _handles(), _deallocate_list(NULL),
117 _next(NULL),
118 _metaspace_lock(new Mutex(Monitor::leaf+1, "Metaspace allocation lock", true,
119 Monitor::_safepoint_check_never)) {
120
121 // A ClassLoaderData created solely for an anonymous class should never have a
122 // ModuleEntryTable or PackageEntryTable created for it. The defining package
123 // and module for an anonymous class will be found in its host class.
124 if (!is_anonymous) {
125 _packages = new PackageEntryTable(PackageEntryTable::_packagetable_entry_size);
126 if (h_class_loader.is_null()) {
127 // Create unnamed module for boot loader
128 _unnamed_module = ModuleEntry::create_boot_unnamed_module(this);
129 } else {
130 // Create unnamed module for all other loaders
131 _unnamed_module = ModuleEntry::create_unnamed_module(this);
132 }
133 _dictionary = create_dictionary();
134 } else {
135 _packages = NULL;
136 _unnamed_module = NULL;
137 _dictionary = NULL;
138 }
139
140 NOT_PRODUCT(_dependency_count = 0); // number of class loader dependencies
141
142 TRACE_INIT_ID(this);
143 }
144
145 ClassLoaderData::ChunkedHandleList::~ChunkedHandleList() {
146 Chunk* c = _head;
147 while (c != NULL) {
148 Chunk* next = c->_next;
149 delete c;
150 c = next;
151 }
152 }
153
154 oop* ClassLoaderData::ChunkedHandleList::add(oop o) {
155 if (_head == NULL || _head->_size == Chunk::CAPACITY) {
156 Chunk* next = new Chunk(_head);
157 OrderAccess::release_store(&_head, next);
158 }
159 oop* handle = &_head->_data[_head->_size];
160 *handle = o;
161 OrderAccess::release_store(&_head->_size, _head->_size + 1);
162 return handle;
163 }
164
165 int ClassLoaderData::ChunkedHandleList::count() const {
166 int count = 0;
167 Chunk* chunk = _head;
168 while (chunk != NULL) {
169 count += chunk->_size;
170 chunk = chunk->_next;
171 }
172 return count;
173 }
174
175 inline void ClassLoaderData::ChunkedHandleList::oops_do_chunk(OopClosure* f, Chunk* c, const juint size) {
176 for (juint i = 0; i < size; i++) {
177 if (c->_data[i] != NULL) {
178 f->do_oop(&c->_data[i]);
179 }
180 }
181 }
182
183 void ClassLoaderData::ChunkedHandleList::oops_do(OopClosure* f) {
184 Chunk* head = OrderAccess::load_acquire(&_head);
185 if (head != NULL) {
186 // Must be careful when reading size of head
187 oops_do_chunk(f, head, OrderAccess::load_acquire(&head->_size));
188 for (Chunk* c = head->_next; c != NULL; c = c->_next) {
189 oops_do_chunk(f, c, c->_size);
190 }
191 }
192 }
193
194 class VerifyContainsOopClosure : public OopClosure {
195 oop _target;
196 bool _found;
197
198 public:
199 VerifyContainsOopClosure(oop target) : _target(target), _found(false) {}
200
201 void do_oop(oop* p) {
202 if (p != NULL && *p == _target) {
203 _found = true;
204 }
205 }
206
207 void do_oop(narrowOop* p) {
208 // The ChunkedHandleList should not contain any narrowOop
209 ShouldNotReachHere();
210 }
211
212 bool found() const {
213 return _found;
214 }
215 };
216
217 bool ClassLoaderData::ChunkedHandleList::contains(oop p) {
218 VerifyContainsOopClosure cl(p);
219 oops_do(&cl);
220 return cl.found();
221 }
222
223 bool ClassLoaderData::claim() {
224 if (_claimed == 1) {
225 return false;
226 }
227
228 return (int) Atomic::cmpxchg(1, &_claimed, 0) == 0;
229 }
230
231 // Anonymous classes have their own ClassLoaderData that is marked to keep alive
232 // while the class is being parsed, and if the class appears on the module fixup list.
233 // Due to the uniqueness that no other class shares the anonymous class' name or
234 // ClassLoaderData, no other non-GC thread has knowledge of the anonymous class while
235 // it is being defined, therefore _keep_alive is not volatile or atomic.
236 void ClassLoaderData::inc_keep_alive() {
237 if (is_anonymous()) {
238 assert(_keep_alive >= 0, "Invalid keep alive increment count");
239 _keep_alive++;
240 }
241 }
242
243 void ClassLoaderData::dec_keep_alive() {
244 if (is_anonymous()) {
245 assert(_keep_alive > 0, "Invalid keep alive decrement count");
246 _keep_alive--;
247 }
248 }
249
250 void ClassLoaderData::oops_do(OopClosure* f, bool must_claim, bool clear_mod_oops) {
251 if (must_claim && !claim()) {
252 return;
253 }
254
255 // Only clear modified_oops after the ClassLoaderData is claimed.
256 if (clear_mod_oops) {
257 clear_modified_oops();
258 }
259
260 f->do_oop(&_class_loader);
261 _handles.oops_do(f);
262 }
263
264 void ClassLoaderData::classes_do(KlassClosure* klass_closure) {
265 // Lock-free access requires load_acquire
266 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
267 klass_closure->do_klass(k);
268 assert(k != k->next_link(), "no loops!");
269 }
270 }
271
272 void ClassLoaderData::classes_do(void f(Klass * const)) {
273 // Lock-free access requires load_acquire
274 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
275 f(k);
276 assert(k != k->next_link(), "no loops!");
277 }
278 }
279
280 void ClassLoaderData::methods_do(void f(Method*)) {
281 // Lock-free access requires load_acquire
282 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
283 if (k->is_instance_klass() && InstanceKlass::cast(k)->is_loaded()) {
318 entry = entry->next()) {
319 f(entry);
320 }
321 }
322 }
323 }
324
325 void ClassLoaderData::packages_do(void f(PackageEntry*)) {
326 assert_locked_or_safepoint(Module_lock);
327 if (_packages != NULL) {
328 for (int i = 0; i < _packages->table_size(); i++) {
329 for (PackageEntry* entry = _packages->bucket(i);
330 entry != NULL;
331 entry = entry->next()) {
332 f(entry);
333 }
334 }
335 }
336 }
337
338 void ClassLoaderData::record_dependency(const Klass* k) {
339 assert(k != NULL, "invariant");
340
341 ClassLoaderData * const from_cld = this;
342 ClassLoaderData * const to_cld = k->class_loader_data();
343
344 // Do not need to record dependency if the dependency is to a class whose
345 // class loader data is never freed. (i.e. the dependency's class loader
346 // is one of the three builtin class loaders and the dependency is not
347 // anonymous.)
348 if (to_cld->is_permanent_class_loader_data()) {
349 return;
350 }
351
352 oop to;
353 if (to_cld->is_anonymous()) {
354 // Just return if an anonymous class is attempting to record a dependency
355 // to itself. (Note that every anonymous class has its own unique class
356 // loader data.)
357 if (to_cld == from_cld) {
358 return;
359 }
360 // Anonymous class dependencies are through the mirror.
361 to = k->java_mirror();
362 } else {
363 to = to_cld->class_loader();
364 oop from = from_cld->class_loader();
365
366 // Just return if this dependency is to a class with the same or a parent
367 // class_loader.
368 if (from == to || java_lang_ClassLoader::isAncestor(from, to)) {
369 return; // this class loader is in the parent list, no need to add it.
370 }
371 }
372
373 // It's a dependency we won't find through GC, add it.
374 if (!_handles.contains(to)) {
375 NOT_PRODUCT(Atomic::inc(&_dependency_count));
376 LogTarget(Trace, class, loader, data) lt;
377 if (lt.is_enabled()) {
378 ResourceMark rm;
379 LogStream ls(lt);
380 ls.print("adding dependency from ");
381 print_value_on(&ls);
382 ls.print(" to ");
383 to_cld->print_value_on(&ls);
384 ls.cr();
385 }
386 Handle dependency(Thread::current(), to);
387 add_handle(dependency);
388 // Added a potentially young gen oop to the ClassLoaderData
389 record_modified_oops();
390 }
391 }
392
393
394 void ClassLoaderDataGraph::clear_claimed_marks() {
395 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
396 cld->clear_claimed();
397 }
398 }
399
400 void ClassLoaderData::add_class(Klass* k, bool publicize /* true */) {
401 {
402 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
403 Klass* old_value = _klasses;
404 k->set_next_link(old_value);
405 // Link the new item into the list, making sure the linked class is stable
406 // since the list can be walked without a lock
407 OrderAccess::release_store(&_klasses, k);
408 if (k->is_array_klass()) {
409 ClassLoaderDataGraph::inc_array_classes(1);
410 } else {
411 ClassLoaderDataGraph::inc_instance_classes(1);
412 }
413 }
414
415 if (publicize) {
416 LogTarget(Trace, class, loader, data) lt;
417 if (lt.is_enabled()) {
418 ResourceMark rm;
419 LogStream ls(lt);
420 ls.print("Adding k: " PTR_FORMAT " %s to ", p2i(k), k->external_name());
421 print_value_on(&ls);
422 ls.cr();
423 }
424 }
425 }
426
427 // Class iterator used by the compiler. It gets some number of classes at
428 // a safepoint to decay invocation counters on the methods.
429 class ClassLoaderDataGraphKlassIteratorStatic {
430 ClassLoaderData* _current_loader_data;
431 Klass* _current_class_entry;
432 public:
433
434 ClassLoaderDataGraphKlassIteratorStatic() : _current_loader_data(NULL), _current_class_entry(NULL) {}
435
436 InstanceKlass* try_get_next_class() {
437 assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
438 size_t max_classes = ClassLoaderDataGraph::num_instance_classes();
439 assert(max_classes > 0, "should not be called with no instance classes");
440 for (size_t i = 0; i < max_classes; ) {
441
442 if (_current_class_entry != NULL) {
443 Klass* k = _current_class_entry;
520 }
521
522 return;
523 }
524 prev = k;
525 assert(k != k->next_link(), "no loops!");
526 }
527 ShouldNotReachHere(); // should have found this class!!
528 }
529
530 void ClassLoaderData::unload() {
531 _unloading = true;
532
533 // Tell serviceability tools these classes are unloading
534 classes_do(InstanceKlass::notify_unload_class);
535
536 LogTarget(Debug, class, loader, data) lt;
537 if (lt.is_enabled()) {
538 ResourceMark rm;
539 LogStream ls(lt);
540 ls.print("unload ");
541 print_value_on(&ls);
542 ls.cr();
543 }
544
545 // Some items on the _deallocate_list need to free their C heap structures
546 // if they are not already on the _klasses list.
547 unload_deallocate_list();
548
549 // Clean up global class iterator for compiler
550 static_klass_iterator.adjust_saved_class(this);
551 }
552
553 ModuleEntryTable* ClassLoaderData::modules() {
554 // Lazily create the module entry table at first request.
555 // Lock-free access requires load_acquire.
556 ModuleEntryTable* modules = OrderAccess::load_acquire(&_modules);
557 if (modules == NULL) {
558 MutexLocker m1(Module_lock);
559 // Check if _modules got allocated while we were waiting for this lock.
560 if ((modules = _modules) == NULL) {
561 modules = new ModuleEntryTable(ModuleEntryTable::_moduletable_entry_size);
717 // class loaders and not anonymous.
718 bool ClassLoaderData::is_permanent_class_loader_data() const {
719 return is_builtin_class_loader_data() && !is_anonymous();
720 }
721
722 Metaspace* ClassLoaderData::metaspace_non_null() {
723 // If the metaspace has not been allocated, create a new one. Might want
724 // to create smaller arena for Reflection class loaders also.
725 // The reason for the delayed allocation is because some class loaders are
726 // simply for delegating with no metadata of their own.
727 // Lock-free access requires load_acquire.
728 Metaspace* metaspace = OrderAccess::load_acquire(&_metaspace);
729 if (metaspace == NULL) {
730 MutexLockerEx ml(_metaspace_lock, Mutex::_no_safepoint_check_flag);
731 // Check if _metaspace got allocated while we were waiting for this lock.
732 if ((metaspace = _metaspace) == NULL) {
733 if (this == the_null_class_loader_data()) {
734 assert (class_loader() == NULL, "Must be");
735 metaspace = new Metaspace(_metaspace_lock, Metaspace::BootMetaspaceType);
736 } else if (is_anonymous()) {
737 metaspace = new Metaspace(_metaspace_lock, Metaspace::AnonymousMetaspaceType);
738 } else if (class_loader()->is_a(SystemDictionary::reflect_DelegatingClassLoader_klass())) {
739 metaspace = new Metaspace(_metaspace_lock, Metaspace::ReflectionMetaspaceType);
740 } else {
741 metaspace = new Metaspace(_metaspace_lock, Metaspace::StandardMetaspaceType);
742 }
743 // Ensure _metaspace is stable, since it is examined without a lock
744 OrderAccess::release_store(&_metaspace, metaspace);
745 }
746 }
747 return metaspace;
748 }
749
750 OopHandle ClassLoaderData::add_handle(Handle h) {
751 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
752 record_modified_oops();
753 return OopHandle(_handles.add(h()));
754 }
755
756 void ClassLoaderData::remove_handle(OopHandle h) {
757 assert(!is_unloading(), "Do not remove a handle for a CLD that is unloading");
758 oop* ptr = h.ptr_raw();
759 if (ptr != NULL) {
760 assert(_handles.contains(*ptr), "Got unexpected handle " PTR_FORMAT, p2i(ptr));
761 // This root is not walked in safepoints, and hence requires an appropriate
762 // decorator that e.g. maintains the SATB invariant in SATB collectors.
763 RootAccess<IN_CONCURRENT_ROOT>::oop_store(ptr, oop(NULL));
764 }
765 }
766
767 void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) {
768 MutexLockerEx ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
769 if (dest.resolve() != NULL) {
770 return;
771 } else {
772 dest = _handles.add(h());
773 }
774 }
775
776 // Add this metadata pointer to be freed when it's safe. This is only during
777 // class unloading because Handles might point to this metadata field.
778 void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
779 // Metadata in shared region isn't deleted.
780 if (!m->is_shared()) {
834 }
835 // Go backwards because this removes entries that are freed.
836 for (int i = _deallocate_list->length() - 1; i >= 0; i--) {
837 Metadata* m = _deallocate_list->at(i);
838 assert (!m->on_stack(), "wouldn't be unloading if this were so");
839 _deallocate_list->remove_at(i);
840 if (m->is_constantPool()) {
841 ((ConstantPool*)m)->release_C_heap_structures();
842 } else if (m->is_klass()) {
843 InstanceKlass* ik = (InstanceKlass*)m;
844 // also releases ik->constants() C heap memory
845 InstanceKlass::release_C_heap_structures(ik);
846 // Remove the class so unloading events aren't triggered for
847 // this class (scratch or error class) in do_unloading().
848 remove_class(ik);
849 }
850 }
851 }
852
853 // These anonymous class loaders are to contain classes used for JSR292
854 ClassLoaderData* ClassLoaderData::anonymous_class_loader_data(Handle loader) {
855 // Add a new class loader data to the graph.
856 return ClassLoaderDataGraph::add(loader, true);
857 }
858
859 const char* ClassLoaderData::loader_name() const {
860 // Handles null class loader
861 return SystemDictionary::loader_name(class_loader());
862 }
863
864
865 void ClassLoaderData::print_value_on(outputStream* out) const {
866 if (class_loader() != NULL) {
867 out->print("loader data: " INTPTR_FORMAT " for instance ", p2i(this));
868 class_loader()->print_value_on(out); // includes loader_name() and address of class loader instance
869 } else {
870 // loader data: 0xsomeaddr of <bootloader>
871 out->print("loader data: " INTPTR_FORMAT " of %s", p2i(this), loader_name());
872 }
873 if (is_anonymous()) {
874 out->print(" anonymous");
875 }
876 }
877
878 #ifndef PRODUCT
879 void ClassLoaderData::print_on(outputStream* out) const {
880 out->print("ClassLoaderData CLD: " PTR_FORMAT ", loader: " PTR_FORMAT ", loader_klass: %s {",
881 p2i(this), p2i((void *)class_loader()), loader_name());
882 if (is_anonymous()) out->print(" anonymous");
883 if (claimed()) out->print(" claimed");
884 if (is_unloading()) out->print(" unloading");
885 out->print(" metaspace: " INTPTR_FORMAT, p2i(metaspace_or_null()));
886
887 if (_jmethod_ids != NULL) {
888 Method::print_jmethod_ids(this, out);
889 }
890 out->print(" handles count %d", _handles.count());
891 out->print(" dependencies %d", _dependency_count);
892 out->print_cr("}");
893 }
894 #endif // PRODUCT
895
896 void ClassLoaderData::verify() {
897 assert_locked_or_safepoint(_metaspace_lock);
898 oop cl = class_loader();
899
900 guarantee(this == class_loader_data(cl) || is_anonymous(), "Must be the same");
901 guarantee(cl != NULL || this == ClassLoaderData::the_null_class_loader_data() || is_anonymous(), "must be");
902
903 // Verify the integrity of the allocated space.
904 if (metaspace_or_null() != NULL) {
905 metaspace_or_null()->verify();
906 }
907
908 for (Klass* k = _klasses; k != NULL; k = k->next_link()) {
909 guarantee(k->class_loader_data() == this, "Must be the same");
910 k->verify();
911 assert(k != k->next_link(), "no loops!");
915 bool ClassLoaderData::contains_klass(Klass* klass) {
916 // Lock-free access requires load_acquire
917 for (Klass* k = OrderAccess::load_acquire(&_klasses); k != NULL; k = k->next_link()) {
918 if (k == klass) return true;
919 }
920 return false;
921 }
922
923
924 // GC root of class loader data created.
925 ClassLoaderData* ClassLoaderDataGraph::_head = NULL;
926 ClassLoaderData* ClassLoaderDataGraph::_unloading = NULL;
927 ClassLoaderData* ClassLoaderDataGraph::_saved_unloading = NULL;
928 ClassLoaderData* ClassLoaderDataGraph::_saved_head = NULL;
929
930 bool ClassLoaderDataGraph::_should_purge = false;
931 bool ClassLoaderDataGraph::_metaspace_oom = false;
932
933 // Add a new class loader data node to the list. Assign the newly created
934 // ClassLoaderData into the java/lang/ClassLoader object as a hidden field
935 ClassLoaderData* ClassLoaderDataGraph::add(Handle loader, bool is_anonymous) {
936 NoSafepointVerifier no_safepoints; // we mustn't GC until we've installed the
937 // ClassLoaderData in the graph since the CLD
938 // contains unhandled oops
939
940 ClassLoaderData* cld = new ClassLoaderData(loader, is_anonymous);
941
942
943 if (!is_anonymous) {
944 // First, Atomically set it
945 ClassLoaderData* old = java_lang_ClassLoader::cmpxchg_loader_data(cld, loader(), NULL);
946 if (old != NULL) {
947 delete cld;
948 // Returns the data.
949 return old;
950 }
951 }
952
953 // We won the race, and therefore the task of adding the data to the list of
954 // class loader data
955 ClassLoaderData** list_head = &_head;
956 ClassLoaderData* next = _head;
957
958 do {
959 cld->set_next(next);
960 ClassLoaderData* exchanged = Atomic::cmpxchg(cld, list_head, next);
961 if (exchanged == next) {
962 LogTarget(Debug, class, loader, data) lt;
963 if (lt.is_enabled()) {
964 ResourceMark rm;
965 LogStream ls(lt);
966 ls.print("create ");
967 cld->print_value_on(&ls);
968 ls.cr();
969 }
970 return cld;
971 }
972 next = exchanged;
973 } while (true);
974 }
975
976 void ClassLoaderDataGraph::oops_do(OopClosure* f, bool must_claim) {
977 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
978 cld->oops_do(f, must_claim);
979 }
980 }
981
982 void ClassLoaderDataGraph::keep_alive_oops_do(OopClosure* f, bool must_claim) {
983 for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
984 if (cld->keep_alive()) {
985 cld->oops_do(f, must_claim);
986 }
987 }
988 }
989
990 void ClassLoaderDataGraph::always_strong_oops_do(OopClosure* f, bool must_claim) {
991 if (ClassUnloading) {
992 keep_alive_oops_do(f, must_claim);
993 } else {
994 oops_do(f, must_claim);
995 }
1372 return head; // Won the CAS.
1373 }
1374
1375 head = old_head;
1376 }
1377
1378 // Nothing more for the iterator to hand out.
1379 assert(head == NULL, "head is " PTR_FORMAT ", expected not null:", p2i(head));
1380 return NULL;
1381 }
1382
1383 ClassLoaderDataGraphMetaspaceIterator::ClassLoaderDataGraphMetaspaceIterator() {
1384 _data = ClassLoaderDataGraph::_head;
1385 }
1386
1387 ClassLoaderDataGraphMetaspaceIterator::~ClassLoaderDataGraphMetaspaceIterator() {}
1388
1389 #ifndef PRODUCT
1390 // callable from debugger
1391 extern "C" int print_loader_data_graph() {
1392 ResourceMark rm;
1393 ClassLoaderDataGraph::print_on(tty);
1394 return 0;
1395 }
1396
1397 void ClassLoaderDataGraph::verify() {
1398 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1399 data->verify();
1400 }
1401 }
1402
1403 void ClassLoaderDataGraph::print_on(outputStream * const out) {
1404 for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
1405 data->print_on(out);
1406 }
1407 }
1408 #endif // PRODUCT
1409
1410 #if INCLUDE_TRACE
1411
1412 Ticks ClassLoaderDataGraph::_class_unload_time;
1413
1414 void ClassLoaderDataGraph::class_unload_event(Klass* const k) {
1415 assert(k != NULL, "invariant");
1416
1417 // post class unload event
1418 EventClassUnload event(UNTIMED);
1419 event.set_endtime(_class_unload_time);
1420 event.set_unloadedClass(k);
1421 event.set_definingClassLoader(k->class_loader_data());
1422 event.commit();
1423 }
1424
1425 #endif // INCLUDE_TRACE
|