1 /* 2 * Copyright (c) 2003, 2020, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 #include "classfile/classLoaderDataGraph.hpp" 27 #include "classfile/javaClasses.inline.hpp" 28 #include "classfile/symbolTable.hpp" 29 #include "classfile/systemDictionary.hpp" 30 #include "classfile/vmSymbols.hpp" 31 #include "gc/shared/collectedHeap.hpp" 32 #include "jvmtifiles/jvmtiEnv.hpp" 33 #include "logging/log.hpp" 34 #include "memory/allocation.inline.hpp" 35 #include "memory/resourceArea.hpp" 36 #include "memory/universe.hpp" 37 #include "oops/access.inline.hpp" 38 #include "oops/arrayOop.inline.hpp" 39 #include "oops/constantPool.inline.hpp" 40 #include "oops/instanceMirrorKlass.hpp" 41 #include "oops/objArrayKlass.hpp" 42 #include "oops/objArrayOop.inline.hpp" 43 #include "oops/oop.inline.hpp" 44 #include "oops/typeArrayOop.inline.hpp" 45 #include "prims/jvmtiEventController.hpp" 46 #include "prims/jvmtiEventController.inline.hpp" 47 #include "prims/jvmtiExport.hpp" 48 #include "prims/jvmtiImpl.hpp" 49 #include "prims/jvmtiTagMap.hpp" 50 #include "runtime/frame.inline.hpp" 51 #include "runtime/handles.inline.hpp" 52 #include "runtime/javaCalls.hpp" 53 #include "runtime/jniHandles.inline.hpp" 54 #include "runtime/mutex.hpp" 55 #include "runtime/mutexLocker.hpp" 56 #include "runtime/reflectionUtils.hpp" 57 #include "runtime/thread.inline.hpp" 58 #include "runtime/threadSMR.hpp" 59 #include "runtime/vframe.hpp" 60 #include "runtime/vmThread.hpp" 61 #include "runtime/vmOperations.hpp" 62 #include "utilities/macros.hpp" 63 #if INCLUDE_ZGC 64 #include "gc/z/zGlobals.hpp" 65 #endif 66 67 // JvmtiTagHashmapEntry 68 // 69 // Each entry encapsulates a reference to the tagged object 70 // and the tag value. In addition an entry includes a next pointer which 71 // is used to chain entries together. 72 73 class JvmtiTagHashmapEntry : public CHeapObj<mtInternal> { 74 private: 75 friend class JvmtiTagMap; 76 77 oop _object; // tagged object 78 jlong _tag; // the tag 79 JvmtiTagHashmapEntry* _next; // next on the list 80 81 inline void init(oop object, jlong tag) { 82 _object = object; 83 _tag = tag; 84 _next = NULL; 85 } 86 87 // constructor 88 JvmtiTagHashmapEntry(oop object, jlong tag) { init(object, tag); } 89 90 public: 91 92 // accessor methods 93 inline oop* object_addr() { return &_object; } 94 inline oop object() { return NativeAccess<ON_PHANTOM_OOP_REF>::oop_load(object_addr()); } 95 // Peek at the object without keeping it alive. The returned object must be 96 // kept alive using a normal access if it leaks out of a thread transition from VM. 97 inline oop object_peek() { 98 return NativeAccess<ON_PHANTOM_OOP_REF | AS_NO_KEEPALIVE>::oop_load(object_addr()); 99 } 100 101 inline oop object_raw() { 102 return RawAccess<>::oop_load(object_addr()); 103 } 104 105 inline jlong tag() const { return _tag; } 106 107 inline void set_tag(jlong tag) { 108 assert(tag != 0, "can't be zero"); 109 _tag = tag; 110 } 111 112 inline bool equals(oop object) { 113 return object == object_peek(); 114 } 115 116 inline JvmtiTagHashmapEntry* next() const { return _next; } 117 inline void set_next(JvmtiTagHashmapEntry* next) { _next = next; } 118 }; 119 120 121 // JvmtiTagHashmap 122 // 123 // A hashmap is essentially a table of pointers to entries. Entries 124 // are hashed to a location, or position in the table, and then 125 // chained from that location. The "key" for hashing is address of 126 // the object, or oop. The "value" is the tag value. 127 // 128 // A hashmap maintains a count of the number entries in the hashmap 129 // and resizes if the number of entries exceeds a given threshold. 130 // The threshold is specified as a percentage of the size - for 131 // example a threshold of 0.75 will trigger the hashmap to resize 132 // if the number of entries is >75% of table size. 133 // 134 // A hashmap provides functions for adding, removing, and finding 135 // entries. It also provides a function to iterate over all entries 136 // in the hashmap. 137 138 class JvmtiTagHashmap : public CHeapObj<mtInternal> { 139 private: 140 friend class JvmtiTagMap; 141 142 enum { 143 small_trace_threshold = 10000, // threshold for tracing 144 medium_trace_threshold = 100000, 145 large_trace_threshold = 1000000, 146 initial_trace_threshold = small_trace_threshold 147 }; 148 149 static int _sizes[]; // array of possible hashmap sizes 150 int _size; // actual size of the table 151 int _size_index; // index into size table 152 153 int _entry_count; // number of entries in the hashmap 154 155 float _load_factor; // load factor as a % of the size 156 int _resize_threshold; // computed threshold to trigger resizing. 157 bool _resizing_enabled; // indicates if hashmap can resize 158 159 int _trace_threshold; // threshold for trace messages 160 161 JvmtiTagHashmapEntry** _table; // the table of entries. 162 163 // private accessors 164 int resize_threshold() const { return _resize_threshold; } 165 int trace_threshold() const { return _trace_threshold; } 166 167 // initialize the hashmap 168 void init(int size_index=0, float load_factor=4.0f) { 169 int initial_size = _sizes[size_index]; 170 _size_index = size_index; 171 _size = initial_size; 172 _entry_count = 0; 173 _trace_threshold = initial_trace_threshold; 174 _load_factor = load_factor; 175 _resize_threshold = (int)(_load_factor * _size); 176 _resizing_enabled = true; 177 size_t s = initial_size * sizeof(JvmtiTagHashmapEntry*); 178 _table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal); 179 if (_table == NULL) { 180 vm_exit_out_of_memory(s, OOM_MALLOC_ERROR, 181 "unable to allocate initial hashtable for jvmti object tags"); 182 } 183 for (int i=0; i<initial_size; i++) { 184 _table[i] = NULL; 185 } 186 } 187 188 // hash a given key (oop) with the specified size 189 static unsigned int hash(oop key, int size) { 190 const oop obj = Access<>::resolve(key); 191 const unsigned int hash = Universe::heap()->hash_oop(obj); 192 return hash % size; 193 } 194 195 // hash a given key (oop) 196 unsigned int hash(oop key) { 197 return hash(key, _size); 198 } 199 200 // resize the hashmap - allocates a large table and re-hashes 201 // all entries into the new table. 202 void resize() { 203 int new_size_index = _size_index+1; 204 int new_size = _sizes[new_size_index]; 205 if (new_size < 0) { 206 // hashmap already at maximum capacity 207 return; 208 } 209 210 // allocate new table 211 size_t s = new_size * sizeof(JvmtiTagHashmapEntry*); 212 JvmtiTagHashmapEntry** new_table = (JvmtiTagHashmapEntry**)os::malloc(s, mtInternal); 213 if (new_table == NULL) { 214 warning("unable to allocate larger hashtable for jvmti object tags"); 215 set_resizing_enabled(false); 216 return; 217 } 218 219 // initialize new table 220 int i; 221 for (i=0; i<new_size; i++) { 222 new_table[i] = NULL; 223 } 224 225 // rehash all entries into the new table 226 for (i=0; i<_size; i++) { 227 JvmtiTagHashmapEntry* entry = _table[i]; 228 while (entry != NULL) { 229 JvmtiTagHashmapEntry* next = entry->next(); 230 oop key = entry->object_peek(); 231 assert(key != NULL, "jni weak reference cleared!!"); 232 unsigned int h = hash(key, new_size); 233 JvmtiTagHashmapEntry* anchor = new_table[h]; 234 if (anchor == NULL) { 235 new_table[h] = entry; 236 entry->set_next(NULL); 237 } else { 238 entry->set_next(anchor); 239 new_table[h] = entry; 240 } 241 entry = next; 242 } 243 } 244 245 // free old table and update settings. 246 os::free((void*)_table); 247 _table = new_table; 248 _size_index = new_size_index; 249 _size = new_size; 250 251 // compute new resize threshold 252 _resize_threshold = (int)(_load_factor * _size); 253 } 254 255 256 // internal remove function - remove an entry at a given position in the 257 // table. 258 inline void remove(JvmtiTagHashmapEntry* prev, int pos, JvmtiTagHashmapEntry* entry) { 259 assert(pos >= 0 && pos < _size, "out of range"); 260 if (prev == NULL) { 261 _table[pos] = entry->next(); 262 } else { 263 prev->set_next(entry->next()); 264 } 265 assert(_entry_count > 0, "checking"); 266 _entry_count--; 267 } 268 269 // resizing switch 270 bool is_resizing_enabled() const { return _resizing_enabled; } 271 void set_resizing_enabled(bool enable) { _resizing_enabled = enable; } 272 273 // debugging 274 void print_memory_usage(); 275 void compute_next_trace_threshold(); 276 277 public: 278 279 // create a JvmtiTagHashmap of a preferred size and optionally a load factor. 280 // The preferred size is rounded down to an actual size. 281 JvmtiTagHashmap(int size, float load_factor=0.0f) { 282 int i=0; 283 while (_sizes[i] < size) { 284 if (_sizes[i] < 0) { 285 assert(i > 0, "sanity check"); 286 i--; 287 break; 288 } 289 i++; 290 } 291 292 // if a load factor is specified then use it, otherwise use default 293 if (load_factor > 0.01f) { 294 init(i, load_factor); 295 } else { 296 init(i); 297 } 298 } 299 300 // create a JvmtiTagHashmap with default settings 301 JvmtiTagHashmap() { 302 init(); 303 } 304 305 // release table when JvmtiTagHashmap destroyed 306 ~JvmtiTagHashmap() { 307 if (_table != NULL) { 308 os::free((void*)_table); 309 _table = NULL; 310 } 311 } 312 313 // accessors 314 int size() const { return _size; } 315 JvmtiTagHashmapEntry** table() const { return _table; } 316 int entry_count() const { return _entry_count; } 317 318 // find an entry in the hashmap, returns NULL if not found. 319 inline JvmtiTagHashmapEntry* find(oop key) { 320 unsigned int h = hash(key); 321 JvmtiTagHashmapEntry* entry = _table[h]; 322 while (entry != NULL) { 323 if (entry->equals(key)) { 324 return entry; 325 } 326 entry = entry->next(); 327 } 328 return NULL; 329 } 330 331 332 // add a new entry to hashmap 333 inline void add(oop key, JvmtiTagHashmapEntry* entry) { 334 assert(key != NULL, "checking"); 335 assert(find(key) == NULL, "duplicate detected"); 336 unsigned int h = hash(key); 337 JvmtiTagHashmapEntry* anchor = _table[h]; 338 if (anchor == NULL) { 339 _table[h] = entry; 340 entry->set_next(NULL); 341 } else { 342 entry->set_next(anchor); 343 _table[h] = entry; 344 } 345 346 _entry_count++; 347 if (log_is_enabled(Debug, jvmti, objecttagging) && entry_count() >= trace_threshold()) { 348 print_memory_usage(); 349 compute_next_trace_threshold(); 350 } 351 352 // if the number of entries exceed the threshold then resize 353 if (entry_count() > resize_threshold() && is_resizing_enabled()) { 354 resize(); 355 } 356 } 357 358 // remove an entry with the given key. 359 inline JvmtiTagHashmapEntry* remove(oop key) { 360 unsigned int h = hash(key); 361 JvmtiTagHashmapEntry* entry = _table[h]; 362 JvmtiTagHashmapEntry* prev = NULL; 363 while (entry != NULL) { 364 if (entry->equals(key)) { 365 break; 366 } 367 prev = entry; 368 entry = entry->next(); 369 } 370 if (entry != NULL) { 371 remove(prev, h, entry); 372 } 373 return entry; 374 } 375 376 // iterate over all entries in the hashmap 377 void entry_iterate(JvmtiTagHashmapEntryClosure* closure); 378 }; 379 380 // possible hashmap sizes - odd primes that roughly double in size. 381 // To avoid excessive resizing the odd primes from 4801-76831 and 382 // 76831-307261 have been removed. The list must be terminated by -1. 383 int JvmtiTagHashmap::_sizes[] = { 4801, 76831, 307261, 614563, 1228891, 384 2457733, 4915219, 9830479, 19660831, 39321619, 78643219, -1 }; 385 386 387 // A supporting class for iterating over all entries in Hashmap 388 class JvmtiTagHashmapEntryClosure { 389 public: 390 virtual void do_entry(JvmtiTagHashmapEntry* entry) = 0; 391 }; 392 393 394 // iterate over all entries in the hashmap 395 void JvmtiTagHashmap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) { 396 for (int i=0; i<_size; i++) { 397 JvmtiTagHashmapEntry* entry = _table[i]; 398 JvmtiTagHashmapEntry* prev = NULL; 399 while (entry != NULL) { 400 // obtain the next entry before invoking do_entry - this is 401 // necessary because do_entry may remove the entry from the 402 // hashmap. 403 JvmtiTagHashmapEntry* next = entry->next(); 404 closure->do_entry(entry); 405 entry = next; 406 } 407 } 408 } 409 410 // debugging 411 void JvmtiTagHashmap::print_memory_usage() { 412 intptr_t p = (intptr_t)this; 413 tty->print("[JvmtiTagHashmap @ " INTPTR_FORMAT, p); 414 415 // table + entries in KB 416 int hashmap_usage = (size()*sizeof(JvmtiTagHashmapEntry*) + 417 entry_count()*sizeof(JvmtiTagHashmapEntry))/K; 418 419 int weak_globals_usage = (int)(JNIHandles::weak_global_handle_memory_usage()/K); 420 tty->print_cr(", %d entries (%d KB) <JNI weak globals: %d KB>]", 421 entry_count(), hashmap_usage, weak_globals_usage); 422 } 423 424 // compute threshold for the next trace message 425 void JvmtiTagHashmap::compute_next_trace_threshold() { 426 _trace_threshold = entry_count(); 427 if (trace_threshold() < medium_trace_threshold) { 428 _trace_threshold += small_trace_threshold; 429 } else { 430 if (trace_threshold() < large_trace_threshold) { 431 _trace_threshold += medium_trace_threshold; 432 } else { 433 _trace_threshold += large_trace_threshold; 434 } 435 } 436 } 437 438 // create a JvmtiTagMap 439 JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) : 440 _env(env), 441 _lock(Mutex::nonleaf+2, "JvmtiTagMap._lock", false), 442 _free_entries(NULL), 443 _free_entries_count(0) 444 { 445 assert(JvmtiThreadState_lock->is_locked(), "sanity check"); 446 assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment"); 447 448 _hashmap = new JvmtiTagHashmap(); 449 450 // finally add us to the environment 451 ((JvmtiEnvBase *)env)->release_set_tag_map(this); 452 } 453 454 455 // destroy a JvmtiTagMap 456 JvmtiTagMap::~JvmtiTagMap() { 457 458 // no lock acquired as we assume the enclosing environment is 459 // also being destroryed. 460 ((JvmtiEnvBase *)_env)->set_tag_map(NULL); 461 462 JvmtiTagHashmapEntry** table = _hashmap->table(); 463 for (int j = 0; j < _hashmap->size(); j++) { 464 JvmtiTagHashmapEntry* entry = table[j]; 465 while (entry != NULL) { 466 JvmtiTagHashmapEntry* next = entry->next(); 467 delete entry; 468 entry = next; 469 } 470 } 471 472 // finally destroy the hashmap 473 delete _hashmap; 474 _hashmap = NULL; 475 476 // remove any entries on the free list 477 JvmtiTagHashmapEntry* entry = _free_entries; 478 while (entry != NULL) { 479 JvmtiTagHashmapEntry* next = entry->next(); 480 delete entry; 481 entry = next; 482 } 483 _free_entries = NULL; 484 } 485 486 // create a hashmap entry 487 // - if there's an entry on the (per-environment) free list then this 488 // is returned. Otherwise an new entry is allocated. 489 JvmtiTagHashmapEntry* JvmtiTagMap::create_entry(oop ref, jlong tag) { 490 assert(Thread::current()->is_VM_thread() || is_locked(), "checking"); 491 492 // ref was read with AS_NO_KEEPALIVE, or equivalent. 493 // The object needs to be kept alive when it is published. 494 Universe::heap()->keep_alive(ref); 495 496 JvmtiTagHashmapEntry* entry; 497 if (_free_entries == NULL) { 498 entry = new JvmtiTagHashmapEntry(ref, tag); 499 } else { 500 assert(_free_entries_count > 0, "mismatched _free_entries_count"); 501 _free_entries_count--; 502 entry = _free_entries; 503 _free_entries = entry->next(); 504 entry->init(ref, tag); 505 } 506 return entry; 507 } 508 509 // destroy an entry by returning it to the free list 510 void JvmtiTagMap::destroy_entry(JvmtiTagHashmapEntry* entry) { 511 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 512 // limit the size of the free list 513 if (_free_entries_count >= max_free_entries) { 514 delete entry; 515 } else { 516 entry->set_next(_free_entries); 517 _free_entries = entry; 518 _free_entries_count++; 519 } 520 } 521 522 // returns the tag map for the given environments. If the tag map 523 // doesn't exist then it is created. 524 JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) { 525 JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map_acquire(); 526 if (tag_map == NULL) { 527 MutexLocker mu(JvmtiThreadState_lock); 528 tag_map = ((JvmtiEnvBase*)env)->tag_map(); 529 if (tag_map == NULL) { 530 tag_map = new JvmtiTagMap(env); 531 } 532 } else { 533 DEBUG_ONLY(Thread::current()->check_possible_safepoint()); 534 } 535 return tag_map; 536 } 537 538 // iterate over all entries in the tag map. 539 void JvmtiTagMap::entry_iterate(JvmtiTagHashmapEntryClosure* closure) { 540 hashmap()->entry_iterate(closure); 541 } 542 543 // returns true if the hashmaps are empty 544 bool JvmtiTagMap::is_empty() { 545 assert(SafepointSynchronize::is_at_safepoint() || is_locked(), "checking"); 546 return hashmap()->entry_count() == 0; 547 } 548 549 550 // Return the tag value for an object, or 0 if the object is 551 // not tagged 552 // 553 static inline jlong tag_for(JvmtiTagMap* tag_map, oop o) { 554 JvmtiTagHashmapEntry* entry = tag_map->hashmap()->find(o); 555 if (entry == NULL) { 556 return 0; 557 } else { 558 return entry->tag(); 559 } 560 } 561 562 563 // A CallbackWrapper is a support class for querying and tagging an object 564 // around a callback to a profiler. The constructor does pre-callback 565 // work to get the tag value, klass tag value, ... and the destructor 566 // does the post-callback work of tagging or untagging the object. 567 // 568 // { 569 // CallbackWrapper wrapper(tag_map, o); 570 // 571 // (*callback)(wrapper.klass_tag(), wrapper.obj_size(), wrapper.obj_tag_p(), ...) 572 // 573 // } // wrapper goes out of scope here which results in the destructor 574 // checking to see if the object has been tagged, untagged, or the 575 // tag value has changed. 576 // 577 class CallbackWrapper : public StackObj { 578 private: 579 JvmtiTagMap* _tag_map; 580 JvmtiTagHashmap* _hashmap; 581 JvmtiTagHashmapEntry* _entry; 582 oop _o; 583 jlong _obj_size; 584 jlong _obj_tag; 585 jlong _klass_tag; 586 587 protected: 588 JvmtiTagMap* tag_map() const { return _tag_map; } 589 590 // invoked post-callback to tag, untag, or update the tag of an object 591 void inline post_callback_tag_update(oop o, JvmtiTagHashmap* hashmap, 592 JvmtiTagHashmapEntry* entry, jlong obj_tag); 593 public: 594 CallbackWrapper(JvmtiTagMap* tag_map, oop o) { 595 assert(Thread::current()->is_VM_thread() || tag_map->is_locked(), 596 "MT unsafe or must be VM thread"); 597 598 // object to tag 599 _o = o; 600 601 // object size 602 _obj_size = (jlong)_o->size() * wordSize; 603 604 // record the context 605 _tag_map = tag_map; 606 _hashmap = tag_map->hashmap(); 607 _entry = _hashmap->find(_o); 608 609 // get object tag 610 _obj_tag = (_entry == NULL) ? 0 : _entry->tag(); 611 612 // get the class and the class's tag value 613 assert(SystemDictionary::Class_klass()->is_mirror_instance_klass(), "Is not?"); 614 615 _klass_tag = tag_for(tag_map, _o->klass()->java_mirror()); 616 } 617 618 ~CallbackWrapper() { 619 post_callback_tag_update(_o, _hashmap, _entry, _obj_tag); 620 } 621 622 inline jlong* obj_tag_p() { return &_obj_tag; } 623 inline jlong obj_size() const { return _obj_size; } 624 inline jlong obj_tag() const { return _obj_tag; } 625 inline jlong klass_tag() const { return _klass_tag; } 626 }; 627 628 629 630 // callback post-callback to tag, untag, or update the tag of an object 631 void inline CallbackWrapper::post_callback_tag_update(oop o, 632 JvmtiTagHashmap* hashmap, 633 JvmtiTagHashmapEntry* entry, 634 jlong obj_tag) { 635 if (entry == NULL) { 636 if (obj_tag != 0) { 637 // callback has tagged the object 638 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 639 entry = tag_map()->create_entry(o, obj_tag); 640 hashmap->add(o, entry); 641 } 642 } else { 643 // object was previously tagged - the callback may have untagged 644 // the object or changed the tag value 645 if (obj_tag == 0) { 646 647 JvmtiTagHashmapEntry* entry_removed = hashmap->remove(o); 648 assert(entry_removed == entry, "checking"); 649 tag_map()->destroy_entry(entry); 650 651 } else { 652 if (obj_tag != entry->tag()) { 653 entry->set_tag(obj_tag); 654 } 655 } 656 } 657 } 658 659 // An extended CallbackWrapper used when reporting an object reference 660 // to the agent. 661 // 662 // { 663 // TwoOopCallbackWrapper wrapper(tag_map, referrer, o); 664 // 665 // (*callback)(wrapper.klass_tag(), 666 // wrapper.obj_size(), 667 // wrapper.obj_tag_p() 668 // wrapper.referrer_tag_p(), ...) 669 // 670 // } // wrapper goes out of scope here which results in the destructor 671 // checking to see if the referrer object has been tagged, untagged, 672 // or the tag value has changed. 673 // 674 class TwoOopCallbackWrapper : public CallbackWrapper { 675 private: 676 bool _is_reference_to_self; 677 JvmtiTagHashmap* _referrer_hashmap; 678 JvmtiTagHashmapEntry* _referrer_entry; 679 oop _referrer; 680 jlong _referrer_obj_tag; 681 jlong _referrer_klass_tag; 682 jlong* _referrer_tag_p; 683 684 bool is_reference_to_self() const { return _is_reference_to_self; } 685 686 public: 687 TwoOopCallbackWrapper(JvmtiTagMap* tag_map, oop referrer, oop o) : 688 CallbackWrapper(tag_map, o) 689 { 690 // self reference needs to be handled in a special way 691 _is_reference_to_self = (referrer == o); 692 693 if (_is_reference_to_self) { 694 _referrer_klass_tag = klass_tag(); 695 _referrer_tag_p = obj_tag_p(); 696 } else { 697 _referrer = referrer; 698 // record the context 699 _referrer_hashmap = tag_map->hashmap(); 700 _referrer_entry = _referrer_hashmap->find(_referrer); 701 702 // get object tag 703 _referrer_obj_tag = (_referrer_entry == NULL) ? 0 : _referrer_entry->tag(); 704 _referrer_tag_p = &_referrer_obj_tag; 705 706 // get referrer class tag. 707 _referrer_klass_tag = tag_for(tag_map, _referrer->klass()->java_mirror()); 708 } 709 } 710 711 ~TwoOopCallbackWrapper() { 712 if (!is_reference_to_self()){ 713 post_callback_tag_update(_referrer, 714 _referrer_hashmap, 715 _referrer_entry, 716 _referrer_obj_tag); 717 } 718 } 719 720 // address of referrer tag 721 // (for a self reference this will return the same thing as obj_tag_p()) 722 inline jlong* referrer_tag_p() { return _referrer_tag_p; } 723 724 // referrer's class tag 725 inline jlong referrer_klass_tag() { return _referrer_klass_tag; } 726 }; 727 728 // tag an object 729 // 730 // This function is performance critical. If many threads attempt to tag objects 731 // around the same time then it's possible that the Mutex associated with the 732 // tag map will be a hot lock. 733 void JvmtiTagMap::set_tag(jobject object, jlong tag) { 734 MutexLocker ml(lock()); 735 736 // resolve the object 737 oop o = JNIHandles::resolve_non_null(object); 738 739 // see if the object is already tagged 740 JvmtiTagHashmap* hashmap = _hashmap; 741 JvmtiTagHashmapEntry* entry = hashmap->find(o); 742 743 // if the object is not already tagged then we tag it 744 if (entry == NULL) { 745 if (tag != 0) { 746 entry = create_entry(o, tag); 747 hashmap->add(o, entry); 748 } else { 749 // no-op 750 } 751 } else { 752 // if the object is already tagged then we either update 753 // the tag (if a new tag value has been provided) 754 // or remove the object if the new tag value is 0. 755 if (tag == 0) { 756 hashmap->remove(o); 757 destroy_entry(entry); 758 } else { 759 entry->set_tag(tag); 760 } 761 } 762 } 763 764 // get the tag for an object 765 jlong JvmtiTagMap::get_tag(jobject object) { 766 MutexLocker ml(lock()); 767 768 // resolve the object 769 oop o = JNIHandles::resolve_non_null(object); 770 771 return tag_for(this, o); 772 } 773 774 775 // Helper class used to describe the static or instance fields of a class. 776 // For each field it holds the field index (as defined by the JVMTI specification), 777 // the field type, and the offset. 778 779 class ClassFieldDescriptor: public CHeapObj<mtInternal> { 780 private: 781 int _field_index; 782 int _field_offset; 783 char _field_type; 784 public: 785 ClassFieldDescriptor(int index, char type, int offset) : 786 _field_index(index), _field_offset(offset), _field_type(type) { 787 } 788 int field_index() const { return _field_index; } 789 char field_type() const { return _field_type; } 790 int field_offset() const { return _field_offset; } 791 }; 792 793 class ClassFieldMap: public CHeapObj<mtInternal> { 794 private: 795 enum { 796 initial_field_count = 5 797 }; 798 799 // list of field descriptors 800 GrowableArray<ClassFieldDescriptor*>* _fields; 801 802 // constructor 803 ClassFieldMap(); 804 805 // add a field 806 void add(int index, char type, int offset); 807 808 // returns the field count for the given class 809 static int compute_field_count(InstanceKlass* ik); 810 811 public: 812 ~ClassFieldMap(); 813 814 // access 815 int field_count() { return _fields->length(); } 816 ClassFieldDescriptor* field_at(int i) { return _fields->at(i); } 817 818 // functions to create maps of static or instance fields 819 static ClassFieldMap* create_map_of_static_fields(Klass* k); 820 static ClassFieldMap* create_map_of_instance_fields(oop obj); 821 }; 822 823 ClassFieldMap::ClassFieldMap() { 824 _fields = new (ResourceObj::C_HEAP, mtInternal) 825 GrowableArray<ClassFieldDescriptor*>(initial_field_count, true); 826 } 827 828 ClassFieldMap::~ClassFieldMap() { 829 for (int i=0; i<_fields->length(); i++) { 830 delete _fields->at(i); 831 } 832 delete _fields; 833 } 834 835 void ClassFieldMap::add(int index, char type, int offset) { 836 ClassFieldDescriptor* field = new ClassFieldDescriptor(index, type, offset); 837 _fields->append(field); 838 } 839 840 // Returns a heap allocated ClassFieldMap to describe the static fields 841 // of the given class. 842 // 843 ClassFieldMap* ClassFieldMap::create_map_of_static_fields(Klass* k) { 844 HandleMark hm; 845 InstanceKlass* ik = InstanceKlass::cast(k); 846 847 // create the field map 848 ClassFieldMap* field_map = new ClassFieldMap(); 849 850 FilteredFieldStream f(ik, false, false); 851 int max_field_index = f.field_count()-1; 852 853 int index = 0; 854 for (FilteredFieldStream fld(ik, true, true); !fld.eos(); fld.next(), index++) { 855 // ignore instance fields 856 if (!fld.access_flags().is_static()) { 857 continue; 858 } 859 field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset()); 860 } 861 return field_map; 862 } 863 864 // Returns a heap allocated ClassFieldMap to describe the instance fields 865 // of the given class. All instance fields are included (this means public 866 // and private fields declared in superclasses and superinterfaces too). 867 // 868 ClassFieldMap* ClassFieldMap::create_map_of_instance_fields(oop obj) { 869 HandleMark hm; 870 InstanceKlass* ik = InstanceKlass::cast(obj->klass()); 871 872 // create the field map 873 ClassFieldMap* field_map = new ClassFieldMap(); 874 875 FilteredFieldStream f(ik, false, false); 876 877 int max_field_index = f.field_count()-1; 878 879 int index = 0; 880 for (FilteredFieldStream fld(ik, false, false); !fld.eos(); fld.next(), index++) { 881 // ignore static fields 882 if (fld.access_flags().is_static()) { 883 continue; 884 } 885 field_map->add(max_field_index - index, fld.signature()->char_at(0), fld.offset()); 886 } 887 888 return field_map; 889 } 890 891 // Helper class used to cache a ClassFileMap for the instance fields of 892 // a cache. A JvmtiCachedClassFieldMap can be cached by an InstanceKlass during 893 // heap iteration and avoid creating a field map for each object in the heap 894 // (only need to create the map when the first instance of a class is encountered). 895 // 896 class JvmtiCachedClassFieldMap : public CHeapObj<mtInternal> { 897 private: 898 enum { 899 initial_class_count = 200 900 }; 901 ClassFieldMap* _field_map; 902 903 ClassFieldMap* field_map() const { return _field_map; } 904 905 JvmtiCachedClassFieldMap(ClassFieldMap* field_map); 906 ~JvmtiCachedClassFieldMap(); 907 908 static GrowableArray<InstanceKlass*>* _class_list; 909 static void add_to_class_list(InstanceKlass* ik); 910 911 public: 912 // returns the field map for a given object (returning map cached 913 // by InstanceKlass if possible 914 static ClassFieldMap* get_map_of_instance_fields(oop obj); 915 916 // removes the field map from all instanceKlasses - should be 917 // called before VM operation completes 918 static void clear_cache(); 919 920 // returns the number of ClassFieldMap cached by instanceKlasses 921 static int cached_field_map_count(); 922 }; 923 924 GrowableArray<InstanceKlass*>* JvmtiCachedClassFieldMap::_class_list; 925 926 JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) { 927 _field_map = field_map; 928 } 929 930 JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() { 931 if (_field_map != NULL) { 932 delete _field_map; 933 } 934 } 935 936 // Marker class to ensure that the class file map cache is only used in a defined 937 // scope. 938 class ClassFieldMapCacheMark : public StackObj { 939 private: 940 static bool _is_active; 941 public: 942 ClassFieldMapCacheMark() { 943 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 944 assert(JvmtiCachedClassFieldMap::cached_field_map_count() == 0, "cache not empty"); 945 assert(!_is_active, "ClassFieldMapCacheMark cannot be nested"); 946 _is_active = true; 947 } 948 ~ClassFieldMapCacheMark() { 949 JvmtiCachedClassFieldMap::clear_cache(); 950 _is_active = false; 951 } 952 static bool is_active() { return _is_active; } 953 }; 954 955 bool ClassFieldMapCacheMark::_is_active; 956 957 958 // record that the given InstanceKlass is caching a field map 959 void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) { 960 if (_class_list == NULL) { 961 _class_list = new (ResourceObj::C_HEAP, mtInternal) 962 GrowableArray<InstanceKlass*>(initial_class_count, true); 963 } 964 _class_list->push(ik); 965 } 966 967 // returns the instance field map for the given object 968 // (returns field map cached by the InstanceKlass if possible) 969 ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) { 970 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 971 assert(ClassFieldMapCacheMark::is_active(), "ClassFieldMapCacheMark not active"); 972 973 Klass* k = obj->klass(); 974 InstanceKlass* ik = InstanceKlass::cast(k); 975 976 // return cached map if possible 977 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 978 if (cached_map != NULL) { 979 assert(cached_map->field_map() != NULL, "missing field list"); 980 return cached_map->field_map(); 981 } else { 982 ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj); 983 cached_map = new JvmtiCachedClassFieldMap(field_map); 984 ik->set_jvmti_cached_class_field_map(cached_map); 985 add_to_class_list(ik); 986 return field_map; 987 } 988 } 989 990 // remove the fields maps cached from all instanceKlasses 991 void JvmtiCachedClassFieldMap::clear_cache() { 992 assert(Thread::current()->is_VM_thread(), "must be VMThread"); 993 if (_class_list != NULL) { 994 for (int i = 0; i < _class_list->length(); i++) { 995 InstanceKlass* ik = _class_list->at(i); 996 JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map(); 997 assert(cached_map != NULL, "should not be NULL"); 998 ik->set_jvmti_cached_class_field_map(NULL); 999 delete cached_map; // deletes the encapsulated field map 1000 } 1001 delete _class_list; 1002 _class_list = NULL; 1003 } 1004 } 1005 1006 // returns the number of ClassFieldMap cached by instanceKlasses 1007 int JvmtiCachedClassFieldMap::cached_field_map_count() { 1008 return (_class_list == NULL) ? 0 : _class_list->length(); 1009 } 1010 1011 // helper function to indicate if an object is filtered by its tag or class tag 1012 static inline bool is_filtered_by_heap_filter(jlong obj_tag, 1013 jlong klass_tag, 1014 int heap_filter) { 1015 // apply the heap filter 1016 if (obj_tag != 0) { 1017 // filter out tagged objects 1018 if (heap_filter & JVMTI_HEAP_FILTER_TAGGED) return true; 1019 } else { 1020 // filter out untagged objects 1021 if (heap_filter & JVMTI_HEAP_FILTER_UNTAGGED) return true; 1022 } 1023 if (klass_tag != 0) { 1024 // filter out objects with tagged classes 1025 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_TAGGED) return true; 1026 } else { 1027 // filter out objects with untagged classes. 1028 if (heap_filter & JVMTI_HEAP_FILTER_CLASS_UNTAGGED) return true; 1029 } 1030 return false; 1031 } 1032 1033 // helper function to indicate if an object is filtered by a klass filter 1034 static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) { 1035 if (klass_filter != NULL) { 1036 if (obj->klass() != klass_filter) { 1037 return true; 1038 } 1039 } 1040 return false; 1041 } 1042 1043 // helper function to tell if a field is a primitive field or not 1044 static inline bool is_primitive_field_type(char type) { 1045 return (type != JVM_SIGNATURE_CLASS && type != JVM_SIGNATURE_ARRAY); 1046 } 1047 1048 // helper function to copy the value from location addr to jvalue. 1049 static inline void copy_to_jvalue(jvalue *v, address addr, jvmtiPrimitiveType value_type) { 1050 switch (value_type) { 1051 case JVMTI_PRIMITIVE_TYPE_BOOLEAN : { v->z = *(jboolean*)addr; break; } 1052 case JVMTI_PRIMITIVE_TYPE_BYTE : { v->b = *(jbyte*)addr; break; } 1053 case JVMTI_PRIMITIVE_TYPE_CHAR : { v->c = *(jchar*)addr; break; } 1054 case JVMTI_PRIMITIVE_TYPE_SHORT : { v->s = *(jshort*)addr; break; } 1055 case JVMTI_PRIMITIVE_TYPE_INT : { v->i = *(jint*)addr; break; } 1056 case JVMTI_PRIMITIVE_TYPE_LONG : { v->j = *(jlong*)addr; break; } 1057 case JVMTI_PRIMITIVE_TYPE_FLOAT : { v->f = *(jfloat*)addr; break; } 1058 case JVMTI_PRIMITIVE_TYPE_DOUBLE : { v->d = *(jdouble*)addr; break; } 1059 default: ShouldNotReachHere(); 1060 } 1061 } 1062 1063 // helper function to invoke string primitive value callback 1064 // returns visit control flags 1065 static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb, 1066 CallbackWrapper* wrapper, 1067 oop str, 1068 void* user_data) 1069 { 1070 assert(str->klass() == SystemDictionary::String_klass(), "not a string"); 1071 1072 typeArrayOop s_value = java_lang_String::value(str); 1073 1074 // JDK-6584008: the value field may be null if a String instance is 1075 // partially constructed. 1076 if (s_value == NULL) { 1077 return 0; 1078 } 1079 // get the string value and length 1080 // (string value may be offset from the base) 1081 int s_len = java_lang_String::length(str); 1082 bool is_latin1 = java_lang_String::is_latin1(str); 1083 jchar* value; 1084 if (s_len > 0) { 1085 if (!is_latin1) { 1086 value = s_value->char_at_addr(0); 1087 } else { 1088 // Inflate latin1 encoded string to UTF16 1089 jchar* buf = NEW_C_HEAP_ARRAY(jchar, s_len, mtInternal); 1090 for (int i = 0; i < s_len; i++) { 1091 buf[i] = ((jchar) s_value->byte_at(i)) & 0xff; 1092 } 1093 value = &buf[0]; 1094 } 1095 } else { 1096 // Don't use char_at_addr(0) if length is 0 1097 value = (jchar*) s_value->base(T_CHAR); 1098 } 1099 1100 // invoke the callback 1101 jint res = (*cb)(wrapper->klass_tag(), 1102 wrapper->obj_size(), 1103 wrapper->obj_tag_p(), 1104 value, 1105 (jint)s_len, 1106 user_data); 1107 1108 if (is_latin1 && s_len > 0) { 1109 FREE_C_HEAP_ARRAY(jchar, value); 1110 } 1111 return res; 1112 } 1113 1114 // helper function to invoke string primitive value callback 1115 // returns visit control flags 1116 static jint invoke_array_primitive_value_callback(jvmtiArrayPrimitiveValueCallback cb, 1117 CallbackWrapper* wrapper, 1118 oop obj, 1119 void* user_data) 1120 { 1121 assert(obj->is_typeArray(), "not a primitive array"); 1122 1123 // get base address of first element 1124 typeArrayOop array = typeArrayOop(obj); 1125 BasicType type = TypeArrayKlass::cast(array->klass())->element_type(); 1126 void* elements = array->base(type); 1127 1128 // jvmtiPrimitiveType is defined so this mapping is always correct 1129 jvmtiPrimitiveType elem_type = (jvmtiPrimitiveType)type2char(type); 1130 1131 return (*cb)(wrapper->klass_tag(), 1132 wrapper->obj_size(), 1133 wrapper->obj_tag_p(), 1134 (jint)array->length(), 1135 elem_type, 1136 elements, 1137 user_data); 1138 } 1139 1140 // helper function to invoke the primitive field callback for all static fields 1141 // of a given class 1142 static jint invoke_primitive_field_callback_for_static_fields 1143 (CallbackWrapper* wrapper, 1144 oop obj, 1145 jvmtiPrimitiveFieldCallback cb, 1146 void* user_data) 1147 { 1148 // for static fields only the index will be set 1149 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1150 1151 assert(obj->klass() == SystemDictionary::Class_klass(), "not a class"); 1152 if (java_lang_Class::is_primitive(obj)) { 1153 return 0; 1154 } 1155 Klass* klass = java_lang_Class::as_Klass(obj); 1156 1157 // ignore classes for object and type arrays 1158 if (!klass->is_instance_klass()) { 1159 return 0; 1160 } 1161 1162 // ignore classes which aren't linked yet 1163 InstanceKlass* ik = InstanceKlass::cast(klass); 1164 if (!ik->is_linked()) { 1165 return 0; 1166 } 1167 1168 // get the field map 1169 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 1170 1171 // invoke the callback for each static primitive field 1172 for (int i=0; i<field_map->field_count(); i++) { 1173 ClassFieldDescriptor* field = field_map->field_at(i); 1174 1175 // ignore non-primitive fields 1176 char type = field->field_type(); 1177 if (!is_primitive_field_type(type)) { 1178 continue; 1179 } 1180 // one-to-one mapping 1181 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 1182 1183 // get offset and field value 1184 int offset = field->field_offset(); 1185 address addr = cast_from_oop<address>(klass->java_mirror()) + offset; 1186 jvalue value; 1187 copy_to_jvalue(&value, addr, value_type); 1188 1189 // field index 1190 reference_info.field.index = field->field_index(); 1191 1192 // invoke the callback 1193 jint res = (*cb)(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 1194 &reference_info, 1195 wrapper->klass_tag(), 1196 wrapper->obj_tag_p(), 1197 value, 1198 value_type, 1199 user_data); 1200 if (res & JVMTI_VISIT_ABORT) { 1201 delete field_map; 1202 return res; 1203 } 1204 } 1205 1206 delete field_map; 1207 return 0; 1208 } 1209 1210 // helper function to invoke the primitive field callback for all instance fields 1211 // of a given object 1212 static jint invoke_primitive_field_callback_for_instance_fields( 1213 CallbackWrapper* wrapper, 1214 oop obj, 1215 jvmtiPrimitiveFieldCallback cb, 1216 void* user_data) 1217 { 1218 // for instance fields only the index will be set 1219 static jvmtiHeapReferenceInfo reference_info = { 0 }; 1220 1221 // get the map of the instance fields 1222 ClassFieldMap* fields = JvmtiCachedClassFieldMap::get_map_of_instance_fields(obj); 1223 1224 // invoke the callback for each instance primitive field 1225 for (int i=0; i<fields->field_count(); i++) { 1226 ClassFieldDescriptor* field = fields->field_at(i); 1227 1228 // ignore non-primitive fields 1229 char type = field->field_type(); 1230 if (!is_primitive_field_type(type)) { 1231 continue; 1232 } 1233 // one-to-one mapping 1234 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 1235 1236 // get offset and field value 1237 int offset = field->field_offset(); 1238 address addr = cast_from_oop<address>(obj) + offset; 1239 jvalue value; 1240 copy_to_jvalue(&value, addr, value_type); 1241 1242 // field index 1243 reference_info.field.index = field->field_index(); 1244 1245 // invoke the callback 1246 jint res = (*cb)(JVMTI_HEAP_REFERENCE_FIELD, 1247 &reference_info, 1248 wrapper->klass_tag(), 1249 wrapper->obj_tag_p(), 1250 value, 1251 value_type, 1252 user_data); 1253 if (res & JVMTI_VISIT_ABORT) { 1254 return res; 1255 } 1256 } 1257 return 0; 1258 } 1259 1260 1261 // VM operation to iterate over all objects in the heap (both reachable 1262 // and unreachable) 1263 class VM_HeapIterateOperation: public VM_Operation { 1264 private: 1265 ObjectClosure* _blk; 1266 public: 1267 VM_HeapIterateOperation(ObjectClosure* blk) { _blk = blk; } 1268 1269 VMOp_Type type() const { return VMOp_HeapIterateOperation; } 1270 void doit() { 1271 // allows class files maps to be cached during iteration 1272 ClassFieldMapCacheMark cm; 1273 1274 // make sure that heap is parsable (fills TLABs with filler objects) 1275 Universe::heap()->ensure_parsability(false); // no need to retire TLABs 1276 1277 // Verify heap before iteration - if the heap gets corrupted then 1278 // JVMTI's IterateOverHeap will crash. 1279 if (VerifyBeforeIteration) { 1280 Universe::verify(); 1281 } 1282 1283 // do the iteration 1284 Universe::heap()->object_iterate(_blk); 1285 } 1286 1287 }; 1288 1289 1290 // An ObjectClosure used to support the deprecated IterateOverHeap and 1291 // IterateOverInstancesOfClass functions 1292 class IterateOverHeapObjectClosure: public ObjectClosure { 1293 private: 1294 JvmtiTagMap* _tag_map; 1295 Klass* _klass; 1296 jvmtiHeapObjectFilter _object_filter; 1297 jvmtiHeapObjectCallback _heap_object_callback; 1298 const void* _user_data; 1299 1300 // accessors 1301 JvmtiTagMap* tag_map() const { return _tag_map; } 1302 jvmtiHeapObjectFilter object_filter() const { return _object_filter; } 1303 jvmtiHeapObjectCallback object_callback() const { return _heap_object_callback; } 1304 Klass* klass() const { return _klass; } 1305 const void* user_data() const { return _user_data; } 1306 1307 // indicates if iteration has been aborted 1308 bool _iteration_aborted; 1309 bool is_iteration_aborted() const { return _iteration_aborted; } 1310 void set_iteration_aborted(bool aborted) { _iteration_aborted = aborted; } 1311 1312 public: 1313 IterateOverHeapObjectClosure(JvmtiTagMap* tag_map, 1314 Klass* klass, 1315 jvmtiHeapObjectFilter object_filter, 1316 jvmtiHeapObjectCallback heap_object_callback, 1317 const void* user_data) : 1318 _tag_map(tag_map), 1319 _klass(klass), 1320 _object_filter(object_filter), 1321 _heap_object_callback(heap_object_callback), 1322 _user_data(user_data), 1323 _iteration_aborted(false) 1324 { 1325 } 1326 1327 void do_object(oop o); 1328 }; 1329 1330 // invoked for each object in the heap 1331 void IterateOverHeapObjectClosure::do_object(oop o) { 1332 // check if iteration has been halted 1333 if (is_iteration_aborted()) return; 1334 1335 // instanceof check when filtering by klass 1336 if (klass() != NULL && !o->is_a(klass())) { 1337 return; 1338 } 1339 // prepare for the calllback 1340 CallbackWrapper wrapper(tag_map(), o); 1341 1342 // if the object is tagged and we're only interested in untagged objects 1343 // then don't invoke the callback. Similiarly, if the object is untagged 1344 // and we're only interested in tagged objects we skip the callback. 1345 if (wrapper.obj_tag() != 0) { 1346 if (object_filter() == JVMTI_HEAP_OBJECT_UNTAGGED) return; 1347 } else { 1348 if (object_filter() == JVMTI_HEAP_OBJECT_TAGGED) return; 1349 } 1350 1351 // invoke the agent's callback 1352 jvmtiIterationControl control = (*object_callback())(wrapper.klass_tag(), 1353 wrapper.obj_size(), 1354 wrapper.obj_tag_p(), 1355 (void*)user_data()); 1356 if (control == JVMTI_ITERATION_ABORT) { 1357 set_iteration_aborted(true); 1358 } 1359 } 1360 1361 // An ObjectClosure used to support the IterateThroughHeap function 1362 class IterateThroughHeapObjectClosure: public ObjectClosure { 1363 private: 1364 JvmtiTagMap* _tag_map; 1365 Klass* _klass; 1366 int _heap_filter; 1367 const jvmtiHeapCallbacks* _callbacks; 1368 const void* _user_data; 1369 1370 // accessor functions 1371 JvmtiTagMap* tag_map() const { return _tag_map; } 1372 int heap_filter() const { return _heap_filter; } 1373 const jvmtiHeapCallbacks* callbacks() const { return _callbacks; } 1374 Klass* klass() const { return _klass; } 1375 const void* user_data() const { return _user_data; } 1376 1377 // indicates if the iteration has been aborted 1378 bool _iteration_aborted; 1379 bool is_iteration_aborted() const { return _iteration_aborted; } 1380 1381 // used to check the visit control flags. If the abort flag is set 1382 // then we set the iteration aborted flag so that the iteration completes 1383 // without processing any further objects 1384 bool check_flags_for_abort(jint flags) { 1385 bool is_abort = (flags & JVMTI_VISIT_ABORT) != 0; 1386 if (is_abort) { 1387 _iteration_aborted = true; 1388 } 1389 return is_abort; 1390 } 1391 1392 public: 1393 IterateThroughHeapObjectClosure(JvmtiTagMap* tag_map, 1394 Klass* klass, 1395 int heap_filter, 1396 const jvmtiHeapCallbacks* heap_callbacks, 1397 const void* user_data) : 1398 _tag_map(tag_map), 1399 _klass(klass), 1400 _heap_filter(heap_filter), 1401 _callbacks(heap_callbacks), 1402 _user_data(user_data), 1403 _iteration_aborted(false) 1404 { 1405 } 1406 1407 void do_object(oop o); 1408 }; 1409 1410 // invoked for each object in the heap 1411 void IterateThroughHeapObjectClosure::do_object(oop obj) { 1412 // check if iteration has been halted 1413 if (is_iteration_aborted()) return; 1414 1415 // apply class filter 1416 if (is_filtered_by_klass_filter(obj, klass())) return; 1417 1418 // prepare for callback 1419 CallbackWrapper wrapper(tag_map(), obj); 1420 1421 // check if filtered by the heap filter 1422 if (is_filtered_by_heap_filter(wrapper.obj_tag(), wrapper.klass_tag(), heap_filter())) { 1423 return; 1424 } 1425 1426 // for arrays we need the length, otherwise -1 1427 bool is_array = obj->is_array(); 1428 int len = is_array ? arrayOop(obj)->length() : -1; 1429 1430 // invoke the object callback (if callback is provided) 1431 if (callbacks()->heap_iteration_callback != NULL) { 1432 jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback; 1433 jint res = (*cb)(wrapper.klass_tag(), 1434 wrapper.obj_size(), 1435 wrapper.obj_tag_p(), 1436 (jint)len, 1437 (void*)user_data()); 1438 if (check_flags_for_abort(res)) return; 1439 } 1440 1441 // for objects and classes we report primitive fields if callback provided 1442 if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) { 1443 jint res; 1444 jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback; 1445 if (obj->klass() == SystemDictionary::Class_klass()) { 1446 res = invoke_primitive_field_callback_for_static_fields(&wrapper, 1447 obj, 1448 cb, 1449 (void*)user_data()); 1450 } else { 1451 res = invoke_primitive_field_callback_for_instance_fields(&wrapper, 1452 obj, 1453 cb, 1454 (void*)user_data()); 1455 } 1456 if (check_flags_for_abort(res)) return; 1457 } 1458 1459 // string callback 1460 if (!is_array && 1461 callbacks()->string_primitive_value_callback != NULL && 1462 obj->klass() == SystemDictionary::String_klass()) { 1463 jint res = invoke_string_value_callback( 1464 callbacks()->string_primitive_value_callback, 1465 &wrapper, 1466 obj, 1467 (void*)user_data() ); 1468 if (check_flags_for_abort(res)) return; 1469 } 1470 1471 // array callback 1472 if (is_array && 1473 callbacks()->array_primitive_value_callback != NULL && 1474 obj->is_typeArray()) { 1475 jint res = invoke_array_primitive_value_callback( 1476 callbacks()->array_primitive_value_callback, 1477 &wrapper, 1478 obj, 1479 (void*)user_data() ); 1480 if (check_flags_for_abort(res)) return; 1481 } 1482 }; 1483 1484 1485 // Deprecated function to iterate over all objects in the heap 1486 void JvmtiTagMap::iterate_over_heap(jvmtiHeapObjectFilter object_filter, 1487 Klass* klass, 1488 jvmtiHeapObjectCallback heap_object_callback, 1489 const void* user_data) 1490 { 1491 MutexLocker ml(Heap_lock); 1492 IterateOverHeapObjectClosure blk(this, 1493 klass, 1494 object_filter, 1495 heap_object_callback, 1496 user_data); 1497 VM_HeapIterateOperation op(&blk); 1498 VMThread::execute(&op); 1499 } 1500 1501 1502 // Iterates over all objects in the heap 1503 void JvmtiTagMap::iterate_through_heap(jint heap_filter, 1504 Klass* klass, 1505 const jvmtiHeapCallbacks* callbacks, 1506 const void* user_data) 1507 { 1508 MutexLocker ml(Heap_lock); 1509 IterateThroughHeapObjectClosure blk(this, 1510 klass, 1511 heap_filter, 1512 callbacks, 1513 user_data); 1514 VM_HeapIterateOperation op(&blk); 1515 VMThread::execute(&op); 1516 } 1517 1518 // support class for get_objects_with_tags 1519 1520 class TagObjectCollector : public JvmtiTagHashmapEntryClosure { 1521 private: 1522 JvmtiEnv* _env; 1523 jlong* _tags; 1524 jint _tag_count; 1525 1526 GrowableArray<jobject>* _object_results; // collected objects (JNI weak refs) 1527 GrowableArray<uint64_t>* _tag_results; // collected tags 1528 1529 public: 1530 TagObjectCollector(JvmtiEnv* env, const jlong* tags, jint tag_count) { 1531 _env = env; 1532 _tags = (jlong*)tags; 1533 _tag_count = tag_count; 1534 _object_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<jobject>(1,true); 1535 _tag_results = new (ResourceObj::C_HEAP, mtInternal) GrowableArray<uint64_t>(1,true); 1536 } 1537 1538 ~TagObjectCollector() { 1539 delete _object_results; 1540 delete _tag_results; 1541 } 1542 1543 // for each tagged object check if the tag value matches 1544 // - if it matches then we create a JNI local reference to the object 1545 // and record the reference and tag value. 1546 // 1547 void do_entry(JvmtiTagHashmapEntry* entry) { 1548 for (int i=0; i<_tag_count; i++) { 1549 if (_tags[i] == entry->tag()) { 1550 // The reference in this tag map could be the only (implicitly weak) 1551 // reference to that object. If we hand it out, we need to keep it live wrt 1552 // SATB marking similar to other j.l.ref.Reference referents. This is 1553 // achieved by using a phantom load in the object() accessor. 1554 oop o = entry->object(); 1555 assert(o != NULL && Universe::heap()->is_in(o), "sanity check"); 1556 jobject ref = JNIHandles::make_local(JavaThread::current(), o); 1557 _object_results->append(ref); 1558 _tag_results->append((uint64_t)entry->tag()); 1559 } 1560 } 1561 } 1562 1563 // return the results from the collection 1564 // 1565 jvmtiError result(jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1566 jvmtiError error; 1567 int count = _object_results->length(); 1568 assert(count >= 0, "sanity check"); 1569 1570 // if object_result_ptr is not NULL then allocate the result and copy 1571 // in the object references. 1572 if (object_result_ptr != NULL) { 1573 error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr); 1574 if (error != JVMTI_ERROR_NONE) { 1575 return error; 1576 } 1577 for (int i=0; i<count; i++) { 1578 (*object_result_ptr)[i] = _object_results->at(i); 1579 } 1580 } 1581 1582 // if tag_result_ptr is not NULL then allocate the result and copy 1583 // in the tag values. 1584 if (tag_result_ptr != NULL) { 1585 error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr); 1586 if (error != JVMTI_ERROR_NONE) { 1587 if (object_result_ptr != NULL) { 1588 _env->Deallocate((unsigned char*)object_result_ptr); 1589 } 1590 return error; 1591 } 1592 for (int i=0; i<count; i++) { 1593 (*tag_result_ptr)[i] = (jlong)_tag_results->at(i); 1594 } 1595 } 1596 1597 *count_ptr = count; 1598 return JVMTI_ERROR_NONE; 1599 } 1600 }; 1601 1602 // return the list of objects with the specified tags 1603 jvmtiError JvmtiTagMap::get_objects_with_tags(const jlong* tags, 1604 jint count, jint* count_ptr, jobject** object_result_ptr, jlong** tag_result_ptr) { 1605 1606 TagObjectCollector collector(env(), tags, count); 1607 { 1608 // iterate over all tagged objects 1609 MutexLocker ml(lock()); 1610 entry_iterate(&collector); 1611 } 1612 return collector.result(count_ptr, object_result_ptr, tag_result_ptr); 1613 } 1614 1615 // Stack allocated class to help ensure that ObjectMarker is used 1616 // correctly. Constructor initializes ObjectMarker, destructor calls 1617 // ObjectMarker's done() function to restore object headers. 1618 class ObjectMarkerController : public StackObj { 1619 private: 1620 ObjectMarker* _marker; 1621 1622 public: 1623 ObjectMarkerController(); 1624 ~ObjectMarkerController(); 1625 1626 bool init(); 1627 1628 inline ObjectMarker* object_marker() const { 1629 return _marker; 1630 } 1631 }; 1632 1633 1634 // helper to map a jvmtiHeapReferenceKind to an old style jvmtiHeapRootKind 1635 // (not performance critical as only used for roots) 1636 static jvmtiHeapRootKind toJvmtiHeapRootKind(jvmtiHeapReferenceKind kind) { 1637 switch (kind) { 1638 case JVMTI_HEAP_REFERENCE_JNI_GLOBAL: return JVMTI_HEAP_ROOT_JNI_GLOBAL; 1639 case JVMTI_HEAP_REFERENCE_SYSTEM_CLASS: return JVMTI_HEAP_ROOT_SYSTEM_CLASS; 1640 case JVMTI_HEAP_REFERENCE_MONITOR: return JVMTI_HEAP_ROOT_MONITOR; 1641 case JVMTI_HEAP_REFERENCE_STACK_LOCAL: return JVMTI_HEAP_ROOT_STACK_LOCAL; 1642 case JVMTI_HEAP_REFERENCE_JNI_LOCAL: return JVMTI_HEAP_ROOT_JNI_LOCAL; 1643 case JVMTI_HEAP_REFERENCE_THREAD: return JVMTI_HEAP_ROOT_THREAD; 1644 case JVMTI_HEAP_REFERENCE_OTHER: return JVMTI_HEAP_ROOT_OTHER; 1645 default: ShouldNotReachHere(); return JVMTI_HEAP_ROOT_OTHER; 1646 } 1647 } 1648 1649 // Base class for all heap walk contexts. The base class maintains a flag 1650 // to indicate if the context is valid or not. 1651 class HeapWalkContext { 1652 private: 1653 bool _valid; 1654 public: 1655 HeapWalkContext(bool valid) { _valid = valid; } 1656 void invalidate() { _valid = false; } 1657 bool is_valid() const { return _valid; } 1658 }; 1659 1660 // A basic heap walk context for the deprecated heap walking functions. 1661 // The context for a basic heap walk are the callbacks and fields used by 1662 // the referrer caching scheme. 1663 class BasicHeapWalkContext: public HeapWalkContext { 1664 private: 1665 jvmtiHeapRootCallback _heap_root_callback; 1666 jvmtiStackReferenceCallback _stack_ref_callback; 1667 jvmtiObjectReferenceCallback _object_ref_callback; 1668 1669 // used for caching 1670 oop _last_referrer; 1671 jlong _last_referrer_tag; 1672 1673 public: 1674 BasicHeapWalkContext() : HeapWalkContext(false) { } 1675 1676 BasicHeapWalkContext(jvmtiHeapRootCallback heap_root_callback, 1677 jvmtiStackReferenceCallback stack_ref_callback, 1678 jvmtiObjectReferenceCallback object_ref_callback) : 1679 HeapWalkContext(true), 1680 _heap_root_callback(heap_root_callback), 1681 _stack_ref_callback(stack_ref_callback), 1682 _object_ref_callback(object_ref_callback), 1683 _last_referrer(NULL), 1684 _last_referrer_tag(0) { 1685 } 1686 1687 // accessors 1688 jvmtiHeapRootCallback heap_root_callback() const { return _heap_root_callback; } 1689 jvmtiStackReferenceCallback stack_ref_callback() const { return _stack_ref_callback; } 1690 jvmtiObjectReferenceCallback object_ref_callback() const { return _object_ref_callback; } 1691 1692 oop last_referrer() const { return _last_referrer; } 1693 void set_last_referrer(oop referrer) { _last_referrer = referrer; } 1694 jlong last_referrer_tag() const { return _last_referrer_tag; } 1695 void set_last_referrer_tag(jlong value) { _last_referrer_tag = value; } 1696 }; 1697 1698 // The advanced heap walk context for the FollowReferences functions. 1699 // The context is the callbacks, and the fields used for filtering. 1700 class AdvancedHeapWalkContext: public HeapWalkContext { 1701 private: 1702 jint _heap_filter; 1703 Klass* _klass_filter; 1704 const jvmtiHeapCallbacks* _heap_callbacks; 1705 1706 public: 1707 AdvancedHeapWalkContext() : HeapWalkContext(false) { } 1708 1709 AdvancedHeapWalkContext(jint heap_filter, 1710 Klass* klass_filter, 1711 const jvmtiHeapCallbacks* heap_callbacks) : 1712 HeapWalkContext(true), 1713 _heap_filter(heap_filter), 1714 _klass_filter(klass_filter), 1715 _heap_callbacks(heap_callbacks) { 1716 } 1717 1718 // accessors 1719 jint heap_filter() const { return _heap_filter; } 1720 Klass* klass_filter() const { return _klass_filter; } 1721 1722 const jvmtiHeapReferenceCallback heap_reference_callback() const { 1723 return _heap_callbacks->heap_reference_callback; 1724 }; 1725 const jvmtiPrimitiveFieldCallback primitive_field_callback() const { 1726 return _heap_callbacks->primitive_field_callback; 1727 } 1728 const jvmtiArrayPrimitiveValueCallback array_primitive_value_callback() const { 1729 return _heap_callbacks->array_primitive_value_callback; 1730 } 1731 const jvmtiStringPrimitiveValueCallback string_primitive_value_callback() const { 1732 return _heap_callbacks->string_primitive_value_callback; 1733 } 1734 }; 1735 1736 // The CallbackInvoker is a class with static functions that the heap walk can call 1737 // into to invoke callbacks. It works in one of two modes. The "basic" mode is 1738 // used for the deprecated IterateOverReachableObjects functions. The "advanced" 1739 // mode is for the newer FollowReferences function which supports a lot of 1740 // additional callbacks. 1741 class CallbackInvoker : AllStatic { 1742 friend class ObjectMarkerController; 1743 1744 private: 1745 // heap walk styles 1746 enum { basic, advanced }; 1747 static int _heap_walk_type; 1748 static bool is_basic_heap_walk() { return _heap_walk_type == basic; } 1749 static bool is_advanced_heap_walk() { return _heap_walk_type == advanced; } 1750 1751 // context for basic style heap walk 1752 static BasicHeapWalkContext _basic_context; 1753 static BasicHeapWalkContext* basic_context() { 1754 assert(_basic_context.is_valid(), "invalid"); 1755 return &_basic_context; 1756 } 1757 1758 // context for advanced style heap walk 1759 static AdvancedHeapWalkContext _advanced_context; 1760 static AdvancedHeapWalkContext* advanced_context() { 1761 assert(_advanced_context.is_valid(), "invalid"); 1762 return &_advanced_context; 1763 } 1764 1765 // context needed for all heap walks 1766 static JvmtiTagMap* _tag_map; 1767 static const void* _user_data; 1768 static GrowableArray<oop>* _visit_stack; 1769 static ObjectMarker* _object_marker; 1770 1771 // accessors 1772 static JvmtiTagMap* tag_map() { return _tag_map; } 1773 static const void* user_data() { return _user_data; } 1774 static GrowableArray<oop>* visit_stack() { return _visit_stack; } 1775 1776 // if the object hasn't been visited then push it onto the visit stack 1777 // so that it will be visited later 1778 static inline bool check_for_visit(oop obj) { 1779 if (!_object_marker->marked(obj)) visit_stack()->push(obj); 1780 return true; 1781 } 1782 1783 // invoke basic style callbacks 1784 static inline bool invoke_basic_heap_root_callback 1785 (jvmtiHeapRootKind root_kind, oop obj); 1786 static inline bool invoke_basic_stack_ref_callback 1787 (jvmtiHeapRootKind root_kind, jlong thread_tag, jint depth, jmethodID method, 1788 int slot, oop obj); 1789 static inline bool invoke_basic_object_reference_callback 1790 (jvmtiObjectReferenceKind ref_kind, oop referrer, oop referree, jint index); 1791 1792 // invoke advanced style callbacks 1793 static inline bool invoke_advanced_heap_root_callback 1794 (jvmtiHeapReferenceKind ref_kind, oop obj); 1795 static inline bool invoke_advanced_stack_ref_callback 1796 (jvmtiHeapReferenceKind ref_kind, jlong thread_tag, jlong tid, int depth, 1797 jmethodID method, jlocation bci, jint slot, oop obj); 1798 static inline bool invoke_advanced_object_reference_callback 1799 (jvmtiHeapReferenceKind ref_kind, oop referrer, oop referree, jint index); 1800 1801 // used to report the value of primitive fields 1802 static inline bool report_primitive_field 1803 (jvmtiHeapReferenceKind ref_kind, oop obj, jint index, address addr, char type); 1804 1805 public: 1806 // initialize for basic mode 1807 static void initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1808 GrowableArray<oop>* visit_stack, 1809 const void* user_data, 1810 BasicHeapWalkContext context); 1811 1812 // initialize for advanced mode 1813 static void initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1814 GrowableArray<oop>* visit_stack, 1815 const void* user_data, 1816 AdvancedHeapWalkContext context); 1817 1818 // functions to report roots 1819 static inline bool report_simple_root(jvmtiHeapReferenceKind kind, oop o); 1820 static inline bool report_jni_local_root(jlong thread_tag, jlong tid, jint depth, 1821 jmethodID m, oop o); 1822 static inline bool report_stack_ref_root(jlong thread_tag, jlong tid, jint depth, 1823 jmethodID method, jlocation bci, jint slot, oop o); 1824 1825 // functions to report references 1826 static inline bool report_array_element_reference(oop referrer, oop referree, jint index); 1827 static inline bool report_class_reference(oop referrer, oop referree); 1828 static inline bool report_class_loader_reference(oop referrer, oop referree); 1829 static inline bool report_signers_reference(oop referrer, oop referree); 1830 static inline bool report_protection_domain_reference(oop referrer, oop referree); 1831 static inline bool report_superclass_reference(oop referrer, oop referree); 1832 static inline bool report_interface_reference(oop referrer, oop referree); 1833 static inline bool report_static_field_reference(oop referrer, oop referree, jint slot); 1834 static inline bool report_field_reference(oop referrer, oop referree, jint slot); 1835 static inline bool report_constant_pool_reference(oop referrer, oop referree, jint index); 1836 static inline bool report_primitive_array_values(oop array); 1837 static inline bool report_string_value(oop str); 1838 static inline bool report_primitive_instance_field(oop o, jint index, address value, char type); 1839 static inline bool report_primitive_static_field(oop o, jint index, address value, char type); 1840 }; 1841 1842 // statics 1843 int CallbackInvoker::_heap_walk_type; 1844 BasicHeapWalkContext CallbackInvoker::_basic_context; 1845 AdvancedHeapWalkContext CallbackInvoker::_advanced_context; 1846 JvmtiTagMap* CallbackInvoker::_tag_map; 1847 const void* CallbackInvoker::_user_data; 1848 GrowableArray<oop>* CallbackInvoker::_visit_stack; 1849 ObjectMarker* CallbackInvoker::_object_marker; 1850 1851 // initialize for basic heap walk (IterateOverReachableObjects et al) 1852 void CallbackInvoker::initialize_for_basic_heap_walk(JvmtiTagMap* tag_map, 1853 GrowableArray<oop>* visit_stack, 1854 const void* user_data, 1855 BasicHeapWalkContext context) { 1856 _tag_map = tag_map; 1857 _visit_stack = visit_stack; 1858 _user_data = user_data; 1859 _basic_context = context; 1860 _advanced_context.invalidate(); // will trigger assertion if used 1861 _heap_walk_type = basic; 1862 } 1863 1864 // initialize for advanced heap walk (FollowReferences) 1865 void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map, 1866 GrowableArray<oop>* visit_stack, 1867 const void* user_data, 1868 AdvancedHeapWalkContext context) { 1869 _tag_map = tag_map; 1870 _visit_stack = visit_stack; 1871 _user_data = user_data; 1872 _advanced_context = context; 1873 _basic_context.invalidate(); // will trigger assertion if used 1874 _heap_walk_type = advanced; 1875 } 1876 1877 1878 // invoke basic style heap root callback 1879 inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) { 1880 // if we heap roots should be reported 1881 jvmtiHeapRootCallback cb = basic_context()->heap_root_callback(); 1882 if (cb == NULL) { 1883 return check_for_visit(obj); 1884 } 1885 1886 CallbackWrapper wrapper(tag_map(), obj); 1887 jvmtiIterationControl control = (*cb)(root_kind, 1888 wrapper.klass_tag(), 1889 wrapper.obj_size(), 1890 wrapper.obj_tag_p(), 1891 (void*)user_data()); 1892 // push root to visit stack when following references 1893 if (control == JVMTI_ITERATION_CONTINUE && 1894 basic_context()->object_ref_callback() != NULL) { 1895 visit_stack()->push(obj); 1896 } 1897 return control != JVMTI_ITERATION_ABORT; 1898 } 1899 1900 // invoke basic style stack ref callback 1901 inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind root_kind, 1902 jlong thread_tag, 1903 jint depth, 1904 jmethodID method, 1905 int slot, 1906 oop obj) { 1907 // if we stack refs should be reported 1908 jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback(); 1909 if (cb == NULL) { 1910 return check_for_visit(obj); 1911 } 1912 1913 CallbackWrapper wrapper(tag_map(), obj); 1914 jvmtiIterationControl control = (*cb)(root_kind, 1915 wrapper.klass_tag(), 1916 wrapper.obj_size(), 1917 wrapper.obj_tag_p(), 1918 thread_tag, 1919 depth, 1920 method, 1921 slot, 1922 (void*)user_data()); 1923 // push root to visit stack when following references 1924 if (control == JVMTI_ITERATION_CONTINUE && 1925 basic_context()->object_ref_callback() != NULL) { 1926 visit_stack()->push(obj); 1927 } 1928 return control != JVMTI_ITERATION_ABORT; 1929 } 1930 1931 // invoke basic style object reference callback 1932 inline bool CallbackInvoker::invoke_basic_object_reference_callback(jvmtiObjectReferenceKind ref_kind, 1933 oop referrer, 1934 oop referree, 1935 jint index) { 1936 1937 BasicHeapWalkContext* context = basic_context(); 1938 1939 // callback requires the referrer's tag. If it's the same referrer 1940 // as the last call then we use the cached value. 1941 jlong referrer_tag; 1942 if (referrer == context->last_referrer()) { 1943 referrer_tag = context->last_referrer_tag(); 1944 } else { 1945 referrer_tag = tag_for(tag_map(), referrer); 1946 } 1947 1948 // do the callback 1949 CallbackWrapper wrapper(tag_map(), referree); 1950 jvmtiObjectReferenceCallback cb = context->object_ref_callback(); 1951 jvmtiIterationControl control = (*cb)(ref_kind, 1952 wrapper.klass_tag(), 1953 wrapper.obj_size(), 1954 wrapper.obj_tag_p(), 1955 referrer_tag, 1956 index, 1957 (void*)user_data()); 1958 1959 // record referrer and referrer tag. For self-references record the 1960 // tag value from the callback as this might differ from referrer_tag. 1961 context->set_last_referrer(referrer); 1962 if (referrer == referree) { 1963 context->set_last_referrer_tag(*wrapper.obj_tag_p()); 1964 } else { 1965 context->set_last_referrer_tag(referrer_tag); 1966 } 1967 1968 if (control == JVMTI_ITERATION_CONTINUE) { 1969 return check_for_visit(referree); 1970 } else { 1971 return control != JVMTI_ITERATION_ABORT; 1972 } 1973 } 1974 1975 // invoke advanced style heap root callback 1976 inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferenceKind ref_kind, 1977 oop obj) { 1978 AdvancedHeapWalkContext* context = advanced_context(); 1979 1980 // check that callback is provided 1981 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 1982 if (cb == NULL) { 1983 return check_for_visit(obj); 1984 } 1985 1986 // apply class filter 1987 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 1988 return check_for_visit(obj); 1989 } 1990 1991 // setup the callback wrapper 1992 CallbackWrapper wrapper(tag_map(), obj); 1993 1994 // apply tag filter 1995 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 1996 wrapper.klass_tag(), 1997 context->heap_filter())) { 1998 return check_for_visit(obj); 1999 } 2000 2001 // for arrays we need the length, otherwise -1 2002 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2003 2004 // invoke the callback 2005 jint res = (*cb)(ref_kind, 2006 NULL, // referrer info 2007 wrapper.klass_tag(), 2008 0, // referrer_class_tag is 0 for heap root 2009 wrapper.obj_size(), 2010 wrapper.obj_tag_p(), 2011 NULL, // referrer_tag_p 2012 len, 2013 (void*)user_data()); 2014 if (res & JVMTI_VISIT_ABORT) { 2015 return false;// referrer class tag 2016 } 2017 if (res & JVMTI_VISIT_OBJECTS) { 2018 check_for_visit(obj); 2019 } 2020 return true; 2021 } 2022 2023 // report a reference from a thread stack to an object 2024 inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferenceKind ref_kind, 2025 jlong thread_tag, 2026 jlong tid, 2027 int depth, 2028 jmethodID method, 2029 jlocation bci, 2030 jint slot, 2031 oop obj) { 2032 AdvancedHeapWalkContext* context = advanced_context(); 2033 2034 // check that callback is provider 2035 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2036 if (cb == NULL) { 2037 return check_for_visit(obj); 2038 } 2039 2040 // apply class filter 2041 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2042 return check_for_visit(obj); 2043 } 2044 2045 // setup the callback wrapper 2046 CallbackWrapper wrapper(tag_map(), obj); 2047 2048 // apply tag filter 2049 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2050 wrapper.klass_tag(), 2051 context->heap_filter())) { 2052 return check_for_visit(obj); 2053 } 2054 2055 // setup the referrer info 2056 jvmtiHeapReferenceInfo reference_info; 2057 reference_info.stack_local.thread_tag = thread_tag; 2058 reference_info.stack_local.thread_id = tid; 2059 reference_info.stack_local.depth = depth; 2060 reference_info.stack_local.method = method; 2061 reference_info.stack_local.location = bci; 2062 reference_info.stack_local.slot = slot; 2063 2064 // for arrays we need the length, otherwise -1 2065 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2066 2067 // call into the agent 2068 int res = (*cb)(ref_kind, 2069 &reference_info, 2070 wrapper.klass_tag(), 2071 0, // referrer_class_tag is 0 for heap root (stack) 2072 wrapper.obj_size(), 2073 wrapper.obj_tag_p(), 2074 NULL, // referrer_tag is 0 for root 2075 len, 2076 (void*)user_data()); 2077 2078 if (res & JVMTI_VISIT_ABORT) { 2079 return false; 2080 } 2081 if (res & JVMTI_VISIT_OBJECTS) { 2082 check_for_visit(obj); 2083 } 2084 return true; 2085 } 2086 2087 // This mask is used to pass reference_info to a jvmtiHeapReferenceCallback 2088 // only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed. 2089 #define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \ 2090 | (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \ 2091 | (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \ 2092 | (1 << JVMTI_HEAP_REFERENCE_CONSTANT_POOL) \ 2093 | (1 << JVMTI_HEAP_REFERENCE_STACK_LOCAL) \ 2094 | (1 << JVMTI_HEAP_REFERENCE_JNI_LOCAL)) 2095 2096 // invoke the object reference callback to report a reference 2097 inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeapReferenceKind ref_kind, 2098 oop referrer, 2099 oop obj, 2100 jint index) 2101 { 2102 // field index is only valid field in reference_info 2103 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2104 2105 AdvancedHeapWalkContext* context = advanced_context(); 2106 2107 // check that callback is provider 2108 jvmtiHeapReferenceCallback cb = context->heap_reference_callback(); 2109 if (cb == NULL) { 2110 return check_for_visit(obj); 2111 } 2112 2113 // apply class filter 2114 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2115 return check_for_visit(obj); 2116 } 2117 2118 // setup the callback wrapper 2119 TwoOopCallbackWrapper wrapper(tag_map(), referrer, obj); 2120 2121 // apply tag filter 2122 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2123 wrapper.klass_tag(), 2124 context->heap_filter())) { 2125 return check_for_visit(obj); 2126 } 2127 2128 // field index is only valid field in reference_info 2129 reference_info.field.index = index; 2130 2131 // for arrays we need the length, otherwise -1 2132 jint len = (jint)(obj->is_array() ? arrayOop(obj)->length() : -1); 2133 2134 // invoke the callback 2135 int res = (*cb)(ref_kind, 2136 (REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL, 2137 wrapper.klass_tag(), 2138 wrapper.referrer_klass_tag(), 2139 wrapper.obj_size(), 2140 wrapper.obj_tag_p(), 2141 wrapper.referrer_tag_p(), 2142 len, 2143 (void*)user_data()); 2144 2145 if (res & JVMTI_VISIT_ABORT) { 2146 return false; 2147 } 2148 if (res & JVMTI_VISIT_OBJECTS) { 2149 check_for_visit(obj); 2150 } 2151 return true; 2152 } 2153 2154 // report a "simple root" 2155 inline bool CallbackInvoker::report_simple_root(jvmtiHeapReferenceKind kind, oop obj) { 2156 assert(kind != JVMTI_HEAP_REFERENCE_STACK_LOCAL && 2157 kind != JVMTI_HEAP_REFERENCE_JNI_LOCAL, "not a simple root"); 2158 2159 if (is_basic_heap_walk()) { 2160 // map to old style root kind 2161 jvmtiHeapRootKind root_kind = toJvmtiHeapRootKind(kind); 2162 return invoke_basic_heap_root_callback(root_kind, obj); 2163 } else { 2164 assert(is_advanced_heap_walk(), "wrong heap walk type"); 2165 return invoke_advanced_heap_root_callback(kind, obj); 2166 } 2167 } 2168 2169 2170 // invoke the primitive array values 2171 inline bool CallbackInvoker::report_primitive_array_values(oop obj) { 2172 assert(obj->is_typeArray(), "not a primitive array"); 2173 2174 AdvancedHeapWalkContext* context = advanced_context(); 2175 assert(context->array_primitive_value_callback() != NULL, "no callback"); 2176 2177 // apply class filter 2178 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2179 return true; 2180 } 2181 2182 CallbackWrapper wrapper(tag_map(), obj); 2183 2184 // apply tag filter 2185 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2186 wrapper.klass_tag(), 2187 context->heap_filter())) { 2188 return true; 2189 } 2190 2191 // invoke the callback 2192 int res = invoke_array_primitive_value_callback(context->array_primitive_value_callback(), 2193 &wrapper, 2194 obj, 2195 (void*)user_data()); 2196 return (!(res & JVMTI_VISIT_ABORT)); 2197 } 2198 2199 // invoke the string value callback 2200 inline bool CallbackInvoker::report_string_value(oop str) { 2201 assert(str->klass() == SystemDictionary::String_klass(), "not a string"); 2202 2203 AdvancedHeapWalkContext* context = advanced_context(); 2204 assert(context->string_primitive_value_callback() != NULL, "no callback"); 2205 2206 // apply class filter 2207 if (is_filtered_by_klass_filter(str, context->klass_filter())) { 2208 return true; 2209 } 2210 2211 CallbackWrapper wrapper(tag_map(), str); 2212 2213 // apply tag filter 2214 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2215 wrapper.klass_tag(), 2216 context->heap_filter())) { 2217 return true; 2218 } 2219 2220 // invoke the callback 2221 int res = invoke_string_value_callback(context->string_primitive_value_callback(), 2222 &wrapper, 2223 str, 2224 (void*)user_data()); 2225 return (!(res & JVMTI_VISIT_ABORT)); 2226 } 2227 2228 // invoke the primitive field callback 2229 inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_kind, 2230 oop obj, 2231 jint index, 2232 address addr, 2233 char type) 2234 { 2235 // for primitive fields only the index will be set 2236 static jvmtiHeapReferenceInfo reference_info = { 0 }; 2237 2238 AdvancedHeapWalkContext* context = advanced_context(); 2239 assert(context->primitive_field_callback() != NULL, "no callback"); 2240 2241 // apply class filter 2242 if (is_filtered_by_klass_filter(obj, context->klass_filter())) { 2243 return true; 2244 } 2245 2246 CallbackWrapper wrapper(tag_map(), obj); 2247 2248 // apply tag filter 2249 if (is_filtered_by_heap_filter(wrapper.obj_tag(), 2250 wrapper.klass_tag(), 2251 context->heap_filter())) { 2252 return true; 2253 } 2254 2255 // the field index in the referrer 2256 reference_info.field.index = index; 2257 2258 // map the type 2259 jvmtiPrimitiveType value_type = (jvmtiPrimitiveType)type; 2260 2261 // setup the jvalue 2262 jvalue value; 2263 copy_to_jvalue(&value, addr, value_type); 2264 2265 jvmtiPrimitiveFieldCallback cb = context->primitive_field_callback(); 2266 int res = (*cb)(ref_kind, 2267 &reference_info, 2268 wrapper.klass_tag(), 2269 wrapper.obj_tag_p(), 2270 value, 2271 value_type, 2272 (void*)user_data()); 2273 return (!(res & JVMTI_VISIT_ABORT)); 2274 } 2275 2276 2277 // instance field 2278 inline bool CallbackInvoker::report_primitive_instance_field(oop obj, 2279 jint index, 2280 address value, 2281 char type) { 2282 return report_primitive_field(JVMTI_HEAP_REFERENCE_FIELD, 2283 obj, 2284 index, 2285 value, 2286 type); 2287 } 2288 2289 // static field 2290 inline bool CallbackInvoker::report_primitive_static_field(oop obj, 2291 jint index, 2292 address value, 2293 char type) { 2294 return report_primitive_field(JVMTI_HEAP_REFERENCE_STATIC_FIELD, 2295 obj, 2296 index, 2297 value, 2298 type); 2299 } 2300 2301 // report a JNI local (root object) to the profiler 2302 inline bool CallbackInvoker::report_jni_local_root(jlong thread_tag, jlong tid, jint depth, jmethodID m, oop obj) { 2303 if (is_basic_heap_walk()) { 2304 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_JNI_LOCAL, 2305 thread_tag, 2306 depth, 2307 m, 2308 -1, 2309 obj); 2310 } else { 2311 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_JNI_LOCAL, 2312 thread_tag, tid, 2313 depth, 2314 m, 2315 (jlocation)-1, 2316 -1, 2317 obj); 2318 } 2319 } 2320 2321 2322 // report a local (stack reference, root object) 2323 inline bool CallbackInvoker::report_stack_ref_root(jlong thread_tag, 2324 jlong tid, 2325 jint depth, 2326 jmethodID method, 2327 jlocation bci, 2328 jint slot, 2329 oop obj) { 2330 if (is_basic_heap_walk()) { 2331 return invoke_basic_stack_ref_callback(JVMTI_HEAP_ROOT_STACK_LOCAL, 2332 thread_tag, 2333 depth, 2334 method, 2335 slot, 2336 obj); 2337 } else { 2338 return invoke_advanced_stack_ref_callback(JVMTI_HEAP_REFERENCE_STACK_LOCAL, 2339 thread_tag, 2340 tid, 2341 depth, 2342 method, 2343 bci, 2344 slot, 2345 obj); 2346 } 2347 } 2348 2349 // report an object referencing a class. 2350 inline bool CallbackInvoker::report_class_reference(oop referrer, oop referree) { 2351 if (is_basic_heap_walk()) { 2352 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2353 } else { 2354 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS, referrer, referree, -1); 2355 } 2356 } 2357 2358 // report a class referencing its class loader. 2359 inline bool CallbackInvoker::report_class_loader_reference(oop referrer, oop referree) { 2360 if (is_basic_heap_walk()) { 2361 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2362 } else { 2363 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CLASS_LOADER, referrer, referree, -1); 2364 } 2365 } 2366 2367 // report a class referencing its signers. 2368 inline bool CallbackInvoker::report_signers_reference(oop referrer, oop referree) { 2369 if (is_basic_heap_walk()) { 2370 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_SIGNERS, referrer, referree, -1); 2371 } else { 2372 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SIGNERS, referrer, referree, -1); 2373 } 2374 } 2375 2376 // report a class referencing its protection domain.. 2377 inline bool CallbackInvoker::report_protection_domain_reference(oop referrer, oop referree) { 2378 if (is_basic_heap_walk()) { 2379 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2380 } else { 2381 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_PROTECTION_DOMAIN, referrer, referree, -1); 2382 } 2383 } 2384 2385 // report a class referencing its superclass. 2386 inline bool CallbackInvoker::report_superclass_reference(oop referrer, oop referree) { 2387 if (is_basic_heap_walk()) { 2388 // Send this to be consistent with past implementation 2389 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CLASS, referrer, referree, -1); 2390 } else { 2391 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_SUPERCLASS, referrer, referree, -1); 2392 } 2393 } 2394 2395 // report a class referencing one of its interfaces. 2396 inline bool CallbackInvoker::report_interface_reference(oop referrer, oop referree) { 2397 if (is_basic_heap_walk()) { 2398 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_INTERFACE, referrer, referree, -1); 2399 } else { 2400 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_INTERFACE, referrer, referree, -1); 2401 } 2402 } 2403 2404 // report a class referencing one of its static fields. 2405 inline bool CallbackInvoker::report_static_field_reference(oop referrer, oop referree, jint slot) { 2406 if (is_basic_heap_walk()) { 2407 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2408 } else { 2409 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_STATIC_FIELD, referrer, referree, slot); 2410 } 2411 } 2412 2413 // report an array referencing an element object 2414 inline bool CallbackInvoker::report_array_element_reference(oop referrer, oop referree, jint index) { 2415 if (is_basic_heap_walk()) { 2416 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2417 } else { 2418 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT, referrer, referree, index); 2419 } 2420 } 2421 2422 // report an object referencing an instance field object 2423 inline bool CallbackInvoker::report_field_reference(oop referrer, oop referree, jint slot) { 2424 if (is_basic_heap_walk()) { 2425 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_FIELD, referrer, referree, slot); 2426 } else { 2427 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_FIELD, referrer, referree, slot); 2428 } 2429 } 2430 2431 // report an array referencing an element object 2432 inline bool CallbackInvoker::report_constant_pool_reference(oop referrer, oop referree, jint index) { 2433 if (is_basic_heap_walk()) { 2434 return invoke_basic_object_reference_callback(JVMTI_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2435 } else { 2436 return invoke_advanced_object_reference_callback(JVMTI_HEAP_REFERENCE_CONSTANT_POOL, referrer, referree, index); 2437 } 2438 } 2439 2440 2441 ObjectMarkerController::ObjectMarkerController() { 2442 _marker = Universe::heap()->object_marker(); 2443 } 2444 2445 bool ObjectMarkerController::init() { 2446 if (_marker->init()) { 2447 CallbackInvoker::_object_marker = _marker; 2448 return true; 2449 } else { 2450 return false; 2451 } 2452 } 2453 2454 ObjectMarkerController::~ObjectMarkerController() { 2455 CallbackInvoker::_object_marker = NULL; 2456 _marker->done(); 2457 } 2458 2459 // A supporting closure used to process simple roots 2460 class SimpleRootsClosure : public OopClosure { 2461 private: 2462 jvmtiHeapReferenceKind _kind; 2463 bool _continue; 2464 2465 jvmtiHeapReferenceKind root_kind() { return _kind; } 2466 2467 public: 2468 void set_kind(jvmtiHeapReferenceKind kind) { 2469 _kind = kind; 2470 _continue = true; 2471 } 2472 2473 inline bool stopped() { 2474 return !_continue; 2475 } 2476 2477 void do_oop(oop* obj_p) { 2478 // iteration has terminated 2479 if (stopped()) { 2480 return; 2481 } 2482 2483 oop o = NativeAccess<AS_NO_KEEPALIVE>::oop_load(obj_p); 2484 // ignore null 2485 if (o == NULL) { 2486 return; 2487 } 2488 2489 assert(Universe::heap()->is_in(o), "should be impossible"); 2490 2491 jvmtiHeapReferenceKind kind = root_kind(); 2492 if (kind == JVMTI_HEAP_REFERENCE_SYSTEM_CLASS) { 2493 // SystemDictionary::oops_do reports the application 2494 // class loader as a root. We want this root to be reported as 2495 // a root kind of "OTHER" rather than "SYSTEM_CLASS". 2496 if (!o->is_instance() || !InstanceKlass::cast(o->klass())->is_mirror_instance_klass()) { 2497 kind = JVMTI_HEAP_REFERENCE_OTHER; 2498 } 2499 } 2500 2501 // invoke the callback 2502 _continue = CallbackInvoker::report_simple_root(kind, o); 2503 2504 } 2505 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2506 }; 2507 2508 // A supporting closure used to process JNI locals 2509 class JNILocalRootsClosure : public OopClosure { 2510 private: 2511 jlong _thread_tag; 2512 jlong _tid; 2513 jint _depth; 2514 jmethodID _method; 2515 bool _continue; 2516 public: 2517 void set_context(jlong thread_tag, jlong tid, jint depth, jmethodID method) { 2518 _thread_tag = thread_tag; 2519 _tid = tid; 2520 _depth = depth; 2521 _method = method; 2522 _continue = true; 2523 } 2524 2525 inline bool stopped() { 2526 return !_continue; 2527 } 2528 2529 void do_oop(oop* obj_p) { 2530 // iteration has terminated 2531 if (stopped()) { 2532 return; 2533 } 2534 2535 oop o = *obj_p; 2536 // ignore null 2537 if (o == NULL) { 2538 return; 2539 } 2540 2541 // invoke the callback 2542 _continue = CallbackInvoker::report_jni_local_root(_thread_tag, _tid, _depth, _method, o); 2543 } 2544 virtual void do_oop(narrowOop* obj_p) { ShouldNotReachHere(); } 2545 }; 2546 2547 2548 // A VM operation to iterate over objects that are reachable from 2549 // a set of roots or an initial object. 2550 // 2551 // For VM_HeapWalkOperation the set of roots used is :- 2552 // 2553 // - All JNI global references 2554 // - All inflated monitors 2555 // - All classes loaded by the boot class loader (or all classes 2556 // in the event that class unloading is disabled) 2557 // - All java threads 2558 // - For each java thread then all locals and JNI local references 2559 // on the thread's execution stack 2560 // - All visible/explainable objects from Universes::oops_do 2561 // 2562 class VM_HeapWalkOperation: public VM_Operation { 2563 private: 2564 enum { 2565 initial_visit_stack_size = 4000 2566 }; 2567 2568 bool _is_advanced_heap_walk; // indicates FollowReferences 2569 JvmtiTagMap* _tag_map; 2570 Handle _initial_object; 2571 GrowableArray<oop>* _visit_stack; // the visit stack 2572 2573 bool _collecting_heap_roots; // are we collecting roots 2574 bool _following_object_refs; // are we following object references 2575 2576 bool _reporting_primitive_fields; // optional reporting 2577 bool _reporting_primitive_array_values; 2578 bool _reporting_string_values; 2579 2580 GrowableArray<oop>* create_visit_stack() { 2581 return new (ResourceObj::C_HEAP, mtInternal) GrowableArray<oop>(initial_visit_stack_size, true); 2582 } 2583 2584 // accessors 2585 bool is_advanced_heap_walk() const { return _is_advanced_heap_walk; } 2586 JvmtiTagMap* tag_map() const { return _tag_map; } 2587 Handle initial_object() const { return _initial_object; } 2588 2589 bool is_following_references() const { return _following_object_refs; } 2590 2591 bool is_reporting_primitive_fields() const { return _reporting_primitive_fields; } 2592 bool is_reporting_primitive_array_values() const { return _reporting_primitive_array_values; } 2593 bool is_reporting_string_values() const { return _reporting_string_values; } 2594 2595 GrowableArray<oop>* visit_stack() const { return _visit_stack; } 2596 2597 // iterate over the various object types 2598 inline bool iterate_over_array(oop o); 2599 inline bool iterate_over_type_array(oop o); 2600 inline bool iterate_over_class(oop o); 2601 inline bool iterate_over_object(oop o); 2602 2603 // root collection 2604 inline bool collect_simple_roots(); 2605 inline bool collect_stack_roots(); 2606 inline bool collect_stack_roots(JavaThread* java_thread, JNILocalRootsClosure* blk); 2607 2608 // visit an object 2609 inline bool visit(oop o); 2610 2611 public: 2612 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2613 Handle initial_object, 2614 BasicHeapWalkContext callbacks, 2615 const void* user_data); 2616 2617 VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2618 Handle initial_object, 2619 AdvancedHeapWalkContext callbacks, 2620 const void* user_data); 2621 2622 ~VM_HeapWalkOperation(); 2623 2624 VMOp_Type type() const { return VMOp_HeapWalkOperation; } 2625 void doit(); 2626 }; 2627 2628 2629 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2630 Handle initial_object, 2631 BasicHeapWalkContext callbacks, 2632 const void* user_data) { 2633 _is_advanced_heap_walk = false; 2634 _tag_map = tag_map; 2635 _initial_object = initial_object; 2636 _following_object_refs = (callbacks.object_ref_callback() != NULL); 2637 _reporting_primitive_fields = false; 2638 _reporting_primitive_array_values = false; 2639 _reporting_string_values = false; 2640 _visit_stack = create_visit_stack(); 2641 2642 2643 CallbackInvoker::initialize_for_basic_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2644 } 2645 2646 VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map, 2647 Handle initial_object, 2648 AdvancedHeapWalkContext callbacks, 2649 const void* user_data) { 2650 _is_advanced_heap_walk = true; 2651 _tag_map = tag_map; 2652 _initial_object = initial_object; 2653 _following_object_refs = true; 2654 _reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);; 2655 _reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);; 2656 _reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);; 2657 _visit_stack = create_visit_stack(); 2658 2659 CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks); 2660 } 2661 2662 VM_HeapWalkOperation::~VM_HeapWalkOperation() { 2663 if (_following_object_refs) { 2664 assert(_visit_stack != NULL, "checking"); 2665 delete _visit_stack; 2666 _visit_stack = NULL; 2667 } 2668 } 2669 2670 // an array references its class and has a reference to 2671 // each element in the array 2672 inline bool VM_HeapWalkOperation::iterate_over_array(oop o) { 2673 objArrayOop array = objArrayOop(o); 2674 2675 // array reference to its class 2676 oop mirror = ObjArrayKlass::cast(array->klass())->java_mirror(); 2677 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2678 return false; 2679 } 2680 2681 // iterate over the array and report each reference to a 2682 // non-null element 2683 for (int index=0; index<array->length(); index++) { 2684 oop elem = array->obj_at(index); 2685 if (elem == NULL) { 2686 continue; 2687 } 2688 2689 // report the array reference o[index] = elem 2690 if (!CallbackInvoker::report_array_element_reference(o, elem, index)) { 2691 return false; 2692 } 2693 } 2694 return true; 2695 } 2696 2697 // a type array references its class 2698 inline bool VM_HeapWalkOperation::iterate_over_type_array(oop o) { 2699 Klass* k = o->klass(); 2700 oop mirror = k->java_mirror(); 2701 if (!CallbackInvoker::report_class_reference(o, mirror)) { 2702 return false; 2703 } 2704 2705 // report the array contents if required 2706 if (is_reporting_primitive_array_values()) { 2707 if (!CallbackInvoker::report_primitive_array_values(o)) { 2708 return false; 2709 } 2710 } 2711 return true; 2712 } 2713 2714 #ifdef ASSERT 2715 // verify that a static oop field is in range 2716 static inline bool verify_static_oop(InstanceKlass* ik, 2717 oop mirror, int offset) { 2718 address obj_p = cast_from_oop<address>(mirror) + offset; 2719 address start = (address)InstanceMirrorKlass::start_of_static_fields(mirror); 2720 address end = start + (java_lang_Class::static_oop_field_count(mirror) * heapOopSize); 2721 assert(end >= start, "sanity check"); 2722 2723 if (obj_p >= start && obj_p < end) { 2724 return true; 2725 } else { 2726 return false; 2727 } 2728 } 2729 #endif // #ifdef ASSERT 2730 2731 // a class references its super class, interfaces, class loader, ... 2732 // and finally its static fields 2733 inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) { 2734 int i; 2735 Klass* klass = java_lang_Class::as_Klass(java_class); 2736 2737 if (klass->is_instance_klass()) { 2738 InstanceKlass* ik = InstanceKlass::cast(klass); 2739 2740 // Ignore the class if it hasn't been initialized yet 2741 if (!ik->is_linked()) { 2742 return true; 2743 } 2744 2745 // get the java mirror 2746 oop mirror = klass->java_mirror(); 2747 2748 // super (only if something more interesting than java.lang.Object) 2749 InstanceKlass* java_super = ik->java_super(); 2750 if (java_super != NULL && java_super != SystemDictionary::Object_klass()) { 2751 oop super = java_super->java_mirror(); 2752 if (!CallbackInvoker::report_superclass_reference(mirror, super)) { 2753 return false; 2754 } 2755 } 2756 2757 // class loader 2758 oop cl = ik->class_loader(); 2759 if (cl != NULL) { 2760 if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) { 2761 return false; 2762 } 2763 } 2764 2765 // protection domain 2766 oop pd = ik->protection_domain(); 2767 if (pd != NULL) { 2768 if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) { 2769 return false; 2770 } 2771 } 2772 2773 // signers 2774 oop signers = ik->signers(); 2775 if (signers != NULL) { 2776 if (!CallbackInvoker::report_signers_reference(mirror, signers)) { 2777 return false; 2778 } 2779 } 2780 2781 // references from the constant pool 2782 { 2783 ConstantPool* pool = ik->constants(); 2784 for (int i = 1; i < pool->length(); i++) { 2785 constantTag tag = pool->tag_at(i).value(); 2786 if (tag.is_string() || tag.is_klass() || tag.is_unresolved_klass()) { 2787 oop entry; 2788 if (tag.is_string()) { 2789 entry = pool->resolved_string_at(i); 2790 // If the entry is non-null it is resolved. 2791 if (entry == NULL) { 2792 continue; 2793 } 2794 } else if (tag.is_klass()) { 2795 entry = pool->resolved_klass_at(i)->java_mirror(); 2796 } else { 2797 // Code generated by JIT and AOT compilers might not resolve constant 2798 // pool entries. Treat them as resolved if they are loaded. 2799 assert(tag.is_unresolved_klass(), "must be"); 2800 constantPoolHandle cp(Thread::current(), pool); 2801 Klass* klass = ConstantPool::klass_at_if_loaded(cp, i); 2802 if (klass == NULL) { 2803 continue; 2804 } 2805 entry = klass->java_mirror(); 2806 } 2807 if (!CallbackInvoker::report_constant_pool_reference(mirror, entry, (jint)i)) { 2808 return false; 2809 } 2810 } 2811 } 2812 } 2813 2814 // interfaces 2815 // (These will already have been reported as references from the constant pool 2816 // but are specified by IterateOverReachableObjects and must be reported). 2817 Array<InstanceKlass*>* interfaces = ik->local_interfaces(); 2818 for (i = 0; i < interfaces->length(); i++) { 2819 oop interf = interfaces->at(i)->java_mirror(); 2820 if (interf == NULL) { 2821 continue; 2822 } 2823 if (!CallbackInvoker::report_interface_reference(mirror, interf)) { 2824 return false; 2825 } 2826 } 2827 2828 // iterate over the static fields 2829 2830 ClassFieldMap* field_map = ClassFieldMap::create_map_of_static_fields(klass); 2831 for (i=0; i<field_map->field_count(); i++) { 2832 ClassFieldDescriptor* field = field_map->field_at(i); 2833 char type = field->field_type(); 2834 if (!is_primitive_field_type(type)) { 2835 oop fld_o = mirror->obj_field(field->field_offset()); 2836 assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check"); 2837 if (fld_o != NULL) { 2838 int slot = field->field_index(); 2839 if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) { 2840 delete field_map; 2841 return false; 2842 } 2843 } 2844 } else { 2845 if (is_reporting_primitive_fields()) { 2846 address addr = cast_from_oop<address>(mirror) + field->field_offset(); 2847 int slot = field->field_index(); 2848 if (!CallbackInvoker::report_primitive_static_field(mirror, slot, addr, type)) { 2849 delete field_map; 2850 return false; 2851 } 2852 } 2853 } 2854 } 2855 delete field_map; 2856 2857 return true; 2858 } 2859 2860 return true; 2861 } 2862 2863 // an object references a class and its instance fields 2864 // (static fields are ignored here as we report these as 2865 // references from the class). 2866 inline bool VM_HeapWalkOperation::iterate_over_object(oop o) { 2867 // reference to the class 2868 if (!CallbackInvoker::report_class_reference(o, o->klass()->java_mirror())) { 2869 return false; 2870 } 2871 2872 // iterate over instance fields 2873 ClassFieldMap* field_map = JvmtiCachedClassFieldMap::get_map_of_instance_fields(o); 2874 for (int i=0; i<field_map->field_count(); i++) { 2875 ClassFieldDescriptor* field = field_map->field_at(i); 2876 char type = field->field_type(); 2877 if (!is_primitive_field_type(type)) { 2878 oop fld_o = o->obj_field_access<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>(field->field_offset()); 2879 // ignore any objects that aren't visible to profiler 2880 if (fld_o != NULL) { 2881 assert(Universe::heap()->is_in(fld_o), "unsafe code should not " 2882 "have references to Klass* anymore"); 2883 int slot = field->field_index(); 2884 if (!CallbackInvoker::report_field_reference(o, fld_o, slot)) { 2885 return false; 2886 } 2887 } 2888 } else { 2889 if (is_reporting_primitive_fields()) { 2890 // primitive instance field 2891 address addr = cast_from_oop<address>(o) + field->field_offset(); 2892 int slot = field->field_index(); 2893 if (!CallbackInvoker::report_primitive_instance_field(o, slot, addr, type)) { 2894 return false; 2895 } 2896 } 2897 } 2898 } 2899 2900 // if the object is a java.lang.String 2901 if (is_reporting_string_values() && 2902 o->klass() == SystemDictionary::String_klass()) { 2903 if (!CallbackInvoker::report_string_value(o)) { 2904 return false; 2905 } 2906 } 2907 return true; 2908 } 2909 2910 2911 // Collects all simple (non-stack) roots except for threads; 2912 // threads are handled in collect_stack_roots() as an optimization. 2913 // if there's a heap root callback provided then the callback is 2914 // invoked for each simple root. 2915 // if an object reference callback is provided then all simple 2916 // roots are pushed onto the marking stack so that they can be 2917 // processed later 2918 // 2919 inline bool VM_HeapWalkOperation::collect_simple_roots() { 2920 SimpleRootsClosure blk; 2921 2922 // JNI globals 2923 blk.set_kind(JVMTI_HEAP_REFERENCE_JNI_GLOBAL); 2924 JNIHandles::oops_do(&blk); 2925 if (blk.stopped()) { 2926 return false; 2927 } 2928 2929 // Preloaded classes and loader from the system dictionary 2930 blk.set_kind(JVMTI_HEAP_REFERENCE_SYSTEM_CLASS); 2931 SystemDictionary::oops_do(&blk); 2932 CLDToOopClosure cld_closure(&blk, false); 2933 ClassLoaderDataGraph::always_strong_cld_do(&cld_closure); 2934 if (blk.stopped()) { 2935 return false; 2936 } 2937 2938 // Inflated monitors 2939 blk.set_kind(JVMTI_HEAP_REFERENCE_MONITOR); 2940 ObjectSynchronizer::oops_do(&blk); 2941 if (blk.stopped()) { 2942 return false; 2943 } 2944 2945 // threads are now handled in collect_stack_roots() 2946 2947 // Other kinds of roots maintained by HotSpot 2948 // Many of these won't be visible but others (such as instances of important 2949 // exceptions) will be visible. 2950 blk.set_kind(JVMTI_HEAP_REFERENCE_OTHER); 2951 Universe::oops_do(&blk); 2952 if (blk.stopped()) { 2953 return false; 2954 } 2955 2956 return true; 2957 } 2958 2959 // Walk the stack of a given thread and find all references (locals 2960 // and JNI calls) and report these as stack references 2961 inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread, 2962 JNILocalRootsClosure* blk) 2963 { 2964 oop threadObj = java_thread->threadObj(); 2965 assert(threadObj != NULL, "sanity check"); 2966 2967 // only need to get the thread's tag once per thread 2968 jlong thread_tag = tag_for(_tag_map, threadObj); 2969 2970 // also need the thread id 2971 jlong tid = java_lang_Thread::thread_id(threadObj); 2972 2973 2974 if (java_thread->has_last_Java_frame()) { 2975 2976 // vframes are resource allocated 2977 Thread* current_thread = Thread::current(); 2978 ResourceMark rm(current_thread); 2979 HandleMark hm(current_thread); 2980 2981 RegisterMap reg_map(java_thread); 2982 frame f = java_thread->last_frame(); 2983 vframe* vf = vframe::new_vframe(&f, ®_map, java_thread); 2984 2985 bool is_top_frame = true; 2986 int depth = 0; 2987 frame* last_entry_frame = NULL; 2988 2989 while (vf != NULL) { 2990 if (vf->is_java_frame()) { 2991 2992 // java frame (interpreted, compiled, ...) 2993 javaVFrame *jvf = javaVFrame::cast(vf); 2994 2995 // the jmethodID 2996 jmethodID method = jvf->method()->jmethod_id(); 2997 2998 if (!(jvf->method()->is_native())) { 2999 jlocation bci = (jlocation)jvf->bci(); 3000 StackValueCollection* locals = jvf->locals(); 3001 for (int slot=0; slot<locals->size(); slot++) { 3002 if (locals->at(slot)->type() == T_OBJECT) { 3003 oop o = locals->obj_at(slot)(); 3004 if (o == NULL) { 3005 continue; 3006 } 3007 3008 // stack reference 3009 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 3010 bci, slot, o)) { 3011 return false; 3012 } 3013 } 3014 } 3015 3016 StackValueCollection* exprs = jvf->expressions(); 3017 for (int index=0; index < exprs->size(); index++) { 3018 if (exprs->at(index)->type() == T_OBJECT) { 3019 oop o = exprs->obj_at(index)(); 3020 if (o == NULL) { 3021 continue; 3022 } 3023 3024 // stack reference 3025 if (!CallbackInvoker::report_stack_ref_root(thread_tag, tid, depth, method, 3026 bci, locals->size() + index, o)) { 3027 return false; 3028 } 3029 } 3030 } 3031 3032 // Follow oops from compiled nmethod 3033 if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) { 3034 blk->set_context(thread_tag, tid, depth, method); 3035 jvf->cb()->as_nmethod()->oops_do(blk); 3036 } 3037 } else { 3038 blk->set_context(thread_tag, tid, depth, method); 3039 if (is_top_frame) { 3040 // JNI locals for the top frame. 3041 java_thread->active_handles()->oops_do(blk); 3042 } else { 3043 if (last_entry_frame != NULL) { 3044 // JNI locals for the entry frame 3045 assert(last_entry_frame->is_entry_frame(), "checking"); 3046 last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk); 3047 } 3048 } 3049 } 3050 last_entry_frame = NULL; 3051 depth++; 3052 } else { 3053 // externalVFrame - for an entry frame then we report the JNI locals 3054 // when we find the corresponding javaVFrame 3055 frame* fr = vf->frame_pointer(); 3056 assert(fr != NULL, "sanity check"); 3057 if (fr->is_entry_frame()) { 3058 last_entry_frame = fr; 3059 } 3060 } 3061 3062 vf = vf->sender(); 3063 is_top_frame = false; 3064 } 3065 } else { 3066 // no last java frame but there may be JNI locals 3067 blk->set_context(thread_tag, tid, 0, (jmethodID)NULL); 3068 java_thread->active_handles()->oops_do(blk); 3069 } 3070 return true; 3071 } 3072 3073 3074 // Collects the simple roots for all threads and collects all 3075 // stack roots - for each thread it walks the execution 3076 // stack to find all references and local JNI refs. 3077 inline bool VM_HeapWalkOperation::collect_stack_roots() { 3078 JNILocalRootsClosure blk; 3079 for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) { 3080 oop threadObj = thread->threadObj(); 3081 if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) { 3082 // Collect the simple root for this thread before we 3083 // collect its stack roots 3084 if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD, 3085 threadObj)) { 3086 return false; 3087 } 3088 if (!collect_stack_roots(thread, &blk)) { 3089 return false; 3090 } 3091 } 3092 } 3093 return true; 3094 } 3095 3096 // visit an object 3097 // first mark the object as visited 3098 // second get all the outbound references from this object (in other words, all 3099 // the objects referenced by this object). 3100 // 3101 bool VM_HeapWalkOperation::visit(oop o) { 3102 // instance 3103 if (o->is_instance()) { 3104 if (o->klass() == SystemDictionary::Class_klass()) { 3105 if (!java_lang_Class::is_primitive(o)) { 3106 // a java.lang.Class 3107 return iterate_over_class(o); 3108 } 3109 } else { 3110 return iterate_over_object(o); 3111 } 3112 } 3113 3114 // object array 3115 if (o->is_objArray()) { 3116 return iterate_over_array(o); 3117 } 3118 3119 // type array 3120 if (o->is_typeArray()) { 3121 return iterate_over_type_array(o); 3122 } 3123 3124 return true; 3125 } 3126 3127 void VM_HeapWalkOperation::doit() { 3128 ResourceMark rm; 3129 ObjectMarkerController marker; 3130 if (!marker.init()) { 3131 return; 3132 } 3133 3134 ClassFieldMapCacheMark cm; 3135 assert(visit_stack()->is_empty(), "visit stack must be empty"); 3136 ObjectMarker* const object_marker = marker.object_marker(); 3137 3138 // the heap walk starts with an initial object or the heap roots 3139 if (initial_object().is_null()) { 3140 // If either collect_stack_roots() or collect_simple_roots() 3141 // returns false at this point, then there are no mark bits 3142 // to reset. 3143 object_marker->set_needs_reset(false); 3144 3145 // Calling collect_stack_roots() before collect_simple_roots() 3146 // can result in a big performance boost for an agent that is 3147 // focused on analyzing references in the thread stacks. 3148 if (!collect_stack_roots()) return; 3149 3150 if (!collect_simple_roots()) return; 3151 3152 // no early return so enable heap traversal to reset the mark bits 3153 object_marker->set_needs_reset(true); 3154 } else { 3155 visit_stack()->push(initial_object()()); 3156 } 3157 3158 // object references required 3159 if (is_following_references()) { 3160 3161 // visit each object until all reachable objects have been 3162 // visited or the callback asked to terminate the iteration. 3163 while (!visit_stack()->is_empty()) { 3164 oop o = visit_stack()->pop(); 3165 if (object_marker->mark(o)) { 3166 if (!visit(o)) { 3167 break; 3168 } 3169 } 3170 } 3171 } 3172 } 3173 3174 // iterate over all objects that are reachable from a set of roots 3175 void JvmtiTagMap::iterate_over_reachable_objects(jvmtiHeapRootCallback heap_root_callback, 3176 jvmtiStackReferenceCallback stack_ref_callback, 3177 jvmtiObjectReferenceCallback object_ref_callback, 3178 const void* user_data) { 3179 MutexLocker ml(Heap_lock); 3180 BasicHeapWalkContext context(heap_root_callback, stack_ref_callback, object_ref_callback); 3181 VM_HeapWalkOperation op(this, Handle(), context, user_data); 3182 VMThread::execute(&op); 3183 } 3184 3185 // iterate over all objects that are reachable from a given object 3186 void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object, 3187 jvmtiObjectReferenceCallback object_ref_callback, 3188 const void* user_data) { 3189 oop obj = JNIHandles::resolve(object); 3190 Handle initial_object(Thread::current(), obj); 3191 3192 MutexLocker ml(Heap_lock); 3193 BasicHeapWalkContext context(NULL, NULL, object_ref_callback); 3194 VM_HeapWalkOperation op(this, initial_object, context, user_data); 3195 VMThread::execute(&op); 3196 } 3197 3198 // follow references from an initial object or the GC roots 3199 void JvmtiTagMap::follow_references(jint heap_filter, 3200 Klass* klass, 3201 jobject object, 3202 const jvmtiHeapCallbacks* callbacks, 3203 const void* user_data) 3204 { 3205 oop obj = JNIHandles::resolve(object); 3206 Handle initial_object(Thread::current(), obj); 3207 3208 MutexLocker ml(Heap_lock); 3209 AdvancedHeapWalkContext context(heap_filter, klass, callbacks); 3210 VM_HeapWalkOperation op(this, initial_object, context, user_data); 3211 VMThread::execute(&op); 3212 } 3213 3214 3215 void JvmtiTagMap::weak_oops_do(BoolObjectClosure* is_alive, OopClosure* f) { 3216 // No locks during VM bring-up (0 threads) and no safepoints after main 3217 // thread creation and before VMThread creation (1 thread); initial GC 3218 // verification can happen in that window which gets to here. 3219 assert(Threads::number_of_threads() <= 1 || 3220 SafepointSynchronize::is_at_safepoint(), 3221 "must be executed at a safepoint"); 3222 if (JvmtiEnv::environments_might_exist()) { 3223 JvmtiEnvIterator it; 3224 for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) { 3225 JvmtiTagMap* tag_map = env->tag_map_acquire(); 3226 if (tag_map != NULL && !tag_map->is_empty()) { 3227 tag_map->do_weak_oops(is_alive, f); 3228 } 3229 } 3230 } 3231 } 3232 3233 void JvmtiTagMap::do_weak_oops(BoolObjectClosure* is_alive, OopClosure* f) { 3234 3235 // does this environment have the OBJECT_FREE event enabled 3236 bool post_object_free = env()->is_enabled(JVMTI_EVENT_OBJECT_FREE); 3237 3238 // counters used for trace message 3239 int freed = 0; 3240 int moved = 0; 3241 3242 JvmtiTagHashmap* hashmap = this->hashmap(); 3243 3244 // reenable sizing (if disabled) 3245 hashmap->set_resizing_enabled(true); 3246 3247 // if the hashmap is empty then we can skip it 3248 if (hashmap->_entry_count == 0) { 3249 return; 3250 } 3251 3252 // now iterate through each entry in the table 3253 3254 JvmtiTagHashmapEntry** table = hashmap->table(); 3255 int size = hashmap->size(); 3256 3257 JvmtiTagHashmapEntry* delayed_add = NULL; 3258 3259 for (int pos = 0; pos < size; ++pos) { 3260 JvmtiTagHashmapEntry* entry = table[pos]; 3261 JvmtiTagHashmapEntry* prev = NULL; 3262 3263 while (entry != NULL) { 3264 JvmtiTagHashmapEntry* next = entry->next(); 3265 3266 // has object been GC'ed 3267 if (!is_alive->do_object_b(entry->object_raw())) { 3268 // grab the tag 3269 jlong tag = entry->tag(); 3270 guarantee(tag != 0, "checking"); 3271 3272 // remove GC'ed entry from hashmap and return the 3273 // entry to the free list 3274 hashmap->remove(prev, pos, entry); 3275 destroy_entry(entry); 3276 3277 // post the event to the profiler 3278 if (post_object_free) { 3279 JvmtiExport::post_object_free(env(), tag); 3280 } 3281 3282 ++freed; 3283 } else { 3284 f->do_oop(entry->object_addr()); 3285 oop new_oop = entry->object_raw(); 3286 3287 // if the object has moved then re-hash it and move its 3288 // entry to its new location. 3289 unsigned int new_pos = JvmtiTagHashmap::hash(new_oop, size); 3290 if (new_pos != (unsigned int)pos) { 3291 if (prev == NULL) { 3292 table[pos] = next; 3293 } else { 3294 prev->set_next(next); 3295 } 3296 if (new_pos < (unsigned int)pos) { 3297 entry->set_next(table[new_pos]); 3298 table[new_pos] = entry; 3299 } else { 3300 // Delay adding this entry to it's new position as we'd end up 3301 // hitting it again during this iteration. 3302 entry->set_next(delayed_add); 3303 delayed_add = entry; 3304 } 3305 moved++; 3306 } else { 3307 // object didn't move 3308 prev = entry; 3309 } 3310 } 3311 3312 entry = next; 3313 } 3314 } 3315 3316 // Re-add all the entries which were kept aside 3317 while (delayed_add != NULL) { 3318 JvmtiTagHashmapEntry* next = delayed_add->next(); 3319 unsigned int pos = JvmtiTagHashmap::hash(delayed_add->object_raw(), size); 3320 delayed_add->set_next(table[pos]); 3321 table[pos] = delayed_add; 3322 delayed_add = next; 3323 } 3324 3325 log_debug(jvmti, objecttagging)("(%d->%d, %d freed, %d total moves)", 3326 hashmap->_entry_count + freed, hashmap->_entry_count, freed, moved); 3327 } --- EOF ---