1 /*
   2  * Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
   3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
   4  *
   5  * This code is free software; you can redistribute it and/or modify it
   6  * under the terms of the GNU General Public License version 2 only, as
   7  * published by the Free Software Foundation.
   8  *
   9  * This code is distributed in the hope that it will be useful, but WITHOUT
  10  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
  12  * version 2 for more details (a copy is included in the LICENSE file that
  13  * accompanied this code).
  14  *
  15  * You should have received a copy of the GNU General Public License version
  16  * 2 along with this work; if not, write to the Free Software Foundation,
  17  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
  18  *
  19  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
  20  * or visit www.oracle.com if you need additional information or have any
  21  * questions.
  22  *
  23  */
  24 
  25 #include "precompiled.hpp"
  26 #include "jvm.h"
  27 #include "aot/aotLoader.hpp"
  28 #include "classfile/classFileParser.hpp"
  29 #include "classfile/classFileStream.hpp"
  30 #include "classfile/classLoader.hpp"
  31 #include "classfile/classLoaderData.inline.hpp"
  32 #include "classfile/javaClasses.hpp"
  33 #include "classfile/moduleEntry.hpp"
  34 #include "classfile/symbolTable.hpp"
  35 #include "classfile/systemDictionary.hpp"
  36 #include "classfile/systemDictionaryShared.hpp"
  37 #include "classfile/verifier.hpp"
  38 #include "classfile/vmSymbols.hpp"
  39 #include "code/dependencyContext.hpp"
  40 #include "compiler/compileBroker.hpp"
  41 #include "gc/shared/collectedHeap.inline.hpp"
  42 #include "interpreter/oopMapCache.hpp"
  43 #include "interpreter/rewriter.hpp"
  44 #include "jvmtifiles/jvmti.h"
  45 #include "logging/log.hpp"
  46 #include "logging/logMessage.hpp"
  47 #include "logging/logStream.hpp"
  48 #include "memory/allocation.inline.hpp"
  49 #include "memory/heapInspection.hpp"
  50 #include "memory/iterator.inline.hpp"
  51 #include "memory/metadataFactory.hpp"
  52 #include "memory/metaspaceClosure.hpp"
  53 #include "memory/metaspaceShared.hpp"
  54 #include "memory/oopFactory.hpp"
  55 #include "memory/resourceArea.hpp"
  56 #include "memory/universe.hpp"
  57 #include "oops/fieldStreams.hpp"
  58 #include "oops/constantPool.hpp"
  59 #include "oops/instanceClassLoaderKlass.hpp"
  60 #include "oops/instanceKlass.inline.hpp"
  61 #include "oops/instanceMirrorKlass.hpp"
  62 #include "oops/instanceOop.hpp"
  63 #include "oops/klass.inline.hpp"
  64 #include "oops/method.hpp"
  65 #include "oops/oop.inline.hpp"
  66 #include "oops/symbol.hpp"
  67 #include "prims/jvmtiExport.hpp"
  68 #include "prims/jvmtiRedefineClasses.hpp"
  69 #include "prims/jvmtiThreadState.hpp"
  70 #include "prims/methodComparator.hpp"
  71 #include "runtime/atomic.hpp"
  72 #include "runtime/fieldDescriptor.inline.hpp"
  73 #include "runtime/handles.inline.hpp"
  74 #include "runtime/javaCalls.hpp"
  75 #include "runtime/mutexLocker.hpp"
  76 #include "runtime/orderAccess.hpp"
  77 #include "runtime/thread.inline.hpp"
  78 #include "services/classLoadingService.hpp"
  79 #include "services/threadService.hpp"
  80 #include "utilities/dtrace.hpp"
  81 #include "utilities/events.hpp"
  82 #include "utilities/macros.hpp"
  83 #include "utilities/stringUtils.hpp"
  84 #ifdef COMPILER1
  85 #include "c1/c1_Compiler.hpp"
  86 #endif
  87 #if INCLUDE_JFR
  88 #include "jfr/jfrEvents.hpp"
  89 #endif
  90 
  91 
  92 #ifdef DTRACE_ENABLED
  93 
  94 
  95 #define HOTSPOT_CLASS_INITIALIZATION_required HOTSPOT_CLASS_INITIALIZATION_REQUIRED
  96 #define HOTSPOT_CLASS_INITIALIZATION_recursive HOTSPOT_CLASS_INITIALIZATION_RECURSIVE
  97 #define HOTSPOT_CLASS_INITIALIZATION_concurrent HOTSPOT_CLASS_INITIALIZATION_CONCURRENT
  98 #define HOTSPOT_CLASS_INITIALIZATION_erroneous HOTSPOT_CLASS_INITIALIZATION_ERRONEOUS
  99 #define HOTSPOT_CLASS_INITIALIZATION_super__failed HOTSPOT_CLASS_INITIALIZATION_SUPER_FAILED
 100 #define HOTSPOT_CLASS_INITIALIZATION_clinit HOTSPOT_CLASS_INITIALIZATION_CLINIT
 101 #define HOTSPOT_CLASS_INITIALIZATION_error HOTSPOT_CLASS_INITIALIZATION_ERROR
 102 #define HOTSPOT_CLASS_INITIALIZATION_end HOTSPOT_CLASS_INITIALIZATION_END
 103 #define DTRACE_CLASSINIT_PROBE(type, thread_type)                \
 104   {                                                              \
 105     char* data = NULL;                                           \
 106     int len = 0;                                                 \
 107     Symbol* clss_name = name();                                  \
 108     if (clss_name != NULL) {                                     \
 109       data = (char*)clss_name->bytes();                          \
 110       len = clss_name->utf8_length();                            \
 111     }                                                            \
 112     HOTSPOT_CLASS_INITIALIZATION_##type(                         \
 113       data, len, (void*)class_loader(), thread_type);            \
 114   }
 115 
 116 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)     \
 117   {                                                              \
 118     char* data = NULL;                                           \
 119     int len = 0;                                                 \
 120     Symbol* clss_name = name();                                  \
 121     if (clss_name != NULL) {                                     \
 122       data = (char*)clss_name->bytes();                          \
 123       len = clss_name->utf8_length();                            \
 124     }                                                            \
 125     HOTSPOT_CLASS_INITIALIZATION_##type(                         \
 126       data, len, (void*)class_loader(), thread_type, wait);      \
 127   }
 128 
 129 #else //  ndef DTRACE_ENABLED
 130 
 131 #define DTRACE_CLASSINIT_PROBE(type, thread_type)
 132 #define DTRACE_CLASSINIT_PROBE_WAIT(type, thread_type, wait)
 133 
 134 #endif //  ndef DTRACE_ENABLED
 135 
 136 static inline bool is_class_loader(const Symbol* class_name,
 137                                    const ClassFileParser& parser) {
 138   assert(class_name != NULL, "invariant");
 139 
 140   if (class_name == vmSymbols::java_lang_ClassLoader()) {
 141     return true;
 142   }
 143 
 144   if (SystemDictionary::ClassLoader_klass_loaded()) {
 145     const Klass* const super_klass = parser.super_klass();
 146     if (super_klass != NULL) {
 147       if (super_klass->is_subtype_of(SystemDictionary::ClassLoader_klass())) {
 148         return true;
 149       }
 150     }
 151   }
 152   return false;
 153 }
 154 
 155 // called to verify that k is a member of this nest
 156 bool InstanceKlass::has_nest_member(InstanceKlass* k, TRAPS) const {
 157   if (_nest_members == NULL || _nest_members == Universe::the_empty_short_array()) {
 158     if (log_is_enabled(Trace, class, nestmates)) {
 159       ResourceMark rm(THREAD);
 160       log_trace(class, nestmates)("Checked nest membership of %s in non-nest-host class %s",
 161                                   k->external_name(), this->external_name());
 162     }
 163     return false;
 164   }
 165 
 166   if (log_is_enabled(Trace, class, nestmates)) {
 167     ResourceMark rm(THREAD);
 168     log_trace(class, nestmates)("Checking nest membership of %s in %s",
 169                                 k->external_name(), this->external_name());
 170   }
 171 
 172   // Check for a resolved cp entry , else fall back to a name check.
 173   // We don't want to resolve any class other than the one being checked.
 174   for (int i = 0; i < _nest_members->length(); i++) {
 175     int cp_index = _nest_members->at(i);
 176     if (_constants->tag_at(cp_index).is_klass()) {
 177       Klass* k2 = _constants->klass_at(cp_index, CHECK_false);
 178       if (k2 == k) {
 179         log_trace(class, nestmates)("- class is listed at nest_members[%d] => cp[%d]", i, cp_index);
 180         return true;
 181       }
 182     }
 183     else {
 184       Symbol* name = _constants->klass_name_at(cp_index);
 185       if (name == k->name()) {
 186         log_trace(class, nestmates)("- Found it at nest_members[%d] => cp[%d]", i, cp_index);
 187 
 188         // Names match so check actual klass - this may trigger class loading if
 189         // it doesn't match (though that should be impossible). But to be safe we
 190         // have to check for a compiler thread executing here.
 191         if (!THREAD->can_call_java() && !_constants->tag_at(cp_index).is_klass()) {
 192           log_trace(class, nestmates)("- validation required resolution in an unsuitable thread");
 193           return false;
 194         }
 195 
 196         Klass* k2 = _constants->klass_at(cp_index, CHECK_false);
 197         if (k2 == k) {
 198           log_trace(class, nestmates)("- class is listed as a nest member");
 199           return true;
 200         }
 201         else {
 202           // same name but different klass!
 203           log_trace(class, nestmates)(" - klass comparison failed!");
 204           // can't have two names the same, so we're done
 205           return false;
 206         }
 207       }
 208     }
 209   }
 210   log_trace(class, nestmates)("- class is NOT a nest member!");
 211   return false;
 212 }
 213 
 214 // Return nest-host class, resolving, validating and saving it if needed.
 215 // In cases where this is called from a thread that can not do classloading
 216 // (such as a native JIT thread) then we simply return NULL, which in turn
 217 // causes the access check to return false. Such code will retry the access
 218 // from a more suitable environment later.
 219 InstanceKlass* InstanceKlass::nest_host(Symbol* validationException, TRAPS) {
 220   InstanceKlass* nest_host_k = _nest_host;
 221   if (nest_host_k == NULL) {
 222     // need to resolve and save our nest-host class. This could be attempted
 223     // concurrently but as the result is idempotent and we don't use the class
 224     // then we do not need any synchronization beyond what is implicitly used
 225     // during class loading.
 226     if (_nest_host_index != 0) { // we have a real nest_host
 227       // Before trying to resolve check if we're in a suitable context
 228       if (!THREAD->can_call_java() && !_constants->tag_at(_nest_host_index).is_klass()) {
 229         if (log_is_enabled(Trace, class, nestmates)) {
 230           ResourceMark rm(THREAD);
 231           log_trace(class, nestmates)("Rejected resolution of nest-host of %s in unsuitable thread",
 232                                       this->external_name());
 233         }
 234         return NULL;
 235       }
 236 
 237       if (log_is_enabled(Trace, class, nestmates)) {
 238         ResourceMark rm(THREAD);
 239         log_trace(class, nestmates)("Resolving nest-host of %s using cp entry for %s",
 240                                     this->external_name(),
 241                                     _constants->klass_name_at(_nest_host_index)->as_C_string());
 242       }
 243 
 244       Klass* k = _constants->klass_at(_nest_host_index, THREAD);
 245       if (HAS_PENDING_EXCEPTION) {
 246         Handle exc_h = Handle(THREAD, PENDING_EXCEPTION);
 247         if (exc_h->is_a(SystemDictionary::NoClassDefFoundError_klass())) {
 248           // throw a new CDNFE with the original as its cause, and a clear msg
 249           ResourceMark rm(THREAD);
 250           char buf[200];
 251           CLEAR_PENDING_EXCEPTION;
 252           jio_snprintf(buf, sizeof(buf),
 253                        "Unable to load nest-host class (%s) of %s",
 254                        _constants->klass_name_at(_nest_host_index)->as_C_string(),
 255                        this->external_name());
 256           log_trace(class, nestmates)("%s - NoClassDefFoundError", buf);
 257           THROW_MSG_CAUSE_NULL(vmSymbols::java_lang_NoClassDefFoundError(), buf, exc_h);
 258         }
 259         // All other exceptions pass through (OOME, StackOverflowError, LinkageErrors etc).
 260         return NULL;
 261       }
 262 
 263       // A valid nest-host is an instance class in the current package that lists this
 264       // class as a nest member. If any of these conditions are not met we post the
 265       // requested exception type (if any) and return NULL
 266 
 267       const char* error = NULL;
 268 
 269       // JVMS 5.4.4 indicates package check comes first
 270       if (is_same_class_package(k)) {
 271 
 272         // Now check actual membership. We can't be a member if our "host" is
 273         // not an instance class.
 274         if (k->is_instance_klass()) {
 275           nest_host_k = InstanceKlass::cast(k);
 276 
 277           bool is_member = nest_host_k->has_nest_member(this, CHECK_NULL);
 278           if (is_member) {
 279             // save resolved nest-host value
 280             _nest_host = nest_host_k;
 281 
 282             if (log_is_enabled(Trace, class, nestmates)) {
 283               ResourceMark rm(THREAD);
 284               log_trace(class, nestmates)("Resolved nest-host of %s to %s",
 285                                           this->external_name(), k->external_name());
 286             }
 287             return nest_host_k;
 288           }
 289         }
 290         error = "current type is not listed as a nest member";
 291       } else {
 292         error = "types are in different packages";
 293       }
 294 
 295       if (log_is_enabled(Trace, class, nestmates)) {
 296         ResourceMark rm(THREAD);
 297         log_trace(class, nestmates)
 298           ("Type %s (loader: %s) is not a nest member of "
 299            "resolved type %s (loader: %s): %s",
 300            this->external_name(),
 301            this->class_loader_data()->loader_name_and_id(),
 302            k->external_name(),
 303            k->class_loader_data()->loader_name_and_id(),
 304            error);
 305       }
 306 
 307       if (validationException != NULL && THREAD->can_call_java()) {
 308         ResourceMark rm(THREAD);
 309         Exceptions::fthrow(THREAD_AND_LOCATION,
 310                            validationException,
 311                            "Type %s (loader: %s) is not a nest member of %s (loader: %s): %s",
 312                            this->external_name(),
 313                            this->class_loader_data()->loader_name_and_id(),
 314                            k->external_name(),
 315                            k->class_loader_data()->loader_name_and_id(),
 316                            error
 317                            );
 318       }
 319       return NULL;
 320     } else {
 321       if (log_is_enabled(Trace, class, nestmates)) {
 322         ResourceMark rm(THREAD);
 323         log_trace(class, nestmates)("Type %s is not part of a nest: setting nest-host to self",
 324                                     this->external_name());
 325       }
 326       // save resolved nest-host value
 327       return (_nest_host = this);
 328     }
 329   }
 330   return nest_host_k;
 331 }
 332 
 333 // check if 'this' and k are nestmates (same nest_host), or k is our nest_host,
 334 // or we are k's nest_host - all of which is covered by comparing the two
 335 // resolved_nest_hosts
 336 bool InstanceKlass::has_nestmate_access_to(InstanceKlass* k, TRAPS) {
 337 
 338   assert(this != k, "this should be handled by higher-level code");
 339 
 340   // Per JVMS 5.4.4 we first resolve and validate the current class, then
 341   // the target class k. Resolution exceptions will be passed on by upper
 342   // layers. IncompatibleClassChangeErrors from membership validation failures
 343   // will also be passed through.
 344 
 345   Symbol* icce = vmSymbols::java_lang_IncompatibleClassChangeError();
 346   InstanceKlass* cur_host = nest_host(icce, CHECK_false);
 347   if (cur_host == NULL) {
 348     return false;
 349   }
 350 
 351   Klass* k_nest_host = k->nest_host(icce, CHECK_false);
 352   if (k_nest_host == NULL) {
 353     return false;
 354   }
 355 
 356   bool access = (cur_host == k_nest_host);
 357 
 358   if (log_is_enabled(Trace, class, nestmates)) {
 359     ResourceMark rm(THREAD);
 360     log_trace(class, nestmates)("Class %s does %shave nestmate access to %s",
 361                                 this->external_name(),
 362                                 access ? "" : "NOT ",
 363                                 k->external_name());
 364   }
 365 
 366   return access;
 367 }
 368 
 369 InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& parser, TRAPS) {
 370   const int size = InstanceKlass::size(parser.vtable_size(),
 371                                        parser.itable_size(),
 372                                        nonstatic_oop_map_size(parser.total_oop_map_count()),
 373                                        parser.is_interface(),
 374                                        parser.is_unsafe_anonymous(),
 375                                        should_store_fingerprint(parser.is_unsafe_anonymous()));
 376 
 377   const Symbol* const class_name = parser.class_name();
 378   assert(class_name != NULL, "invariant");
 379   ClassLoaderData* loader_data = parser.loader_data();
 380   assert(loader_data != NULL, "invariant");
 381 
 382   InstanceKlass* ik;
 383 
 384   // Allocation
 385   if (REF_NONE == parser.reference_type()) {
 386     if (class_name == vmSymbols::java_lang_Class()) {
 387       // mirror
 388       ik = new (loader_data, size, THREAD) InstanceMirrorKlass(parser);
 389     }
 390     else if (is_class_loader(class_name, parser)) {
 391       // class loader
 392       ik = new (loader_data, size, THREAD) InstanceClassLoaderKlass(parser);
 393     } else {
 394       // normal
 395       ik = new (loader_data, size, THREAD) InstanceKlass(parser, InstanceKlass::_misc_kind_other);
 396     }
 397   } else {
 398     // reference
 399     ik = new (loader_data, size, THREAD) InstanceRefKlass(parser);
 400   }
 401 
 402   // Check for pending exception before adding to the loader data and incrementing
 403   // class count.  Can get OOM here.
 404   if (HAS_PENDING_EXCEPTION) {
 405     return NULL;
 406   }
 407 
 408   return ik;
 409 }
 410 
 411 
 412 // copy method ordering from resource area to Metaspace
 413 void InstanceKlass::copy_method_ordering(const intArray* m, TRAPS) {
 414   if (m != NULL) {
 415     // allocate a new array and copy contents (memcpy?)
 416     _method_ordering = MetadataFactory::new_array<int>(class_loader_data(), m->length(), CHECK);
 417     for (int i = 0; i < m->length(); i++) {
 418       _method_ordering->at_put(i, m->at(i));
 419     }
 420   } else {
 421     _method_ordering = Universe::the_empty_int_array();
 422   }
 423 }
 424 
 425 // create a new array of vtable_indices for default methods
 426 Array<int>* InstanceKlass::create_new_default_vtable_indices(int len, TRAPS) {
 427   Array<int>* vtable_indices = MetadataFactory::new_array<int>(class_loader_data(), len, CHECK_NULL);
 428   assert(default_vtable_indices() == NULL, "only create once");
 429   set_default_vtable_indices(vtable_indices);
 430   return vtable_indices;
 431 }
 432 
 433 InstanceKlass::InstanceKlass(const ClassFileParser& parser, unsigned kind, KlassID id) :
 434   Klass(id),
 435   _nest_members(NULL),
 436   _nest_host_index(0),
 437   _nest_host(NULL),
 438   _static_field_size(parser.static_field_size()),
 439   _nonstatic_oop_map_size(nonstatic_oop_map_size(parser.total_oop_map_count())),
 440   _itable_len(parser.itable_size()),
 441   _init_thread(NULL),
 442   _init_state(allocated),
 443   _reference_type(parser.reference_type())
 444 {
 445   set_vtable_length(parser.vtable_size());
 446   set_kind(kind);
 447   set_access_flags(parser.access_flags());
 448   set_is_unsafe_anonymous(parser.is_unsafe_anonymous());
 449   set_layout_helper(Klass::instance_layout_helper(parser.layout_size(),
 450                                                     false));
 451 
 452   assert(NULL == _methods, "underlying memory not zeroed?");
 453   assert(is_instance_klass(), "is layout incorrect?");
 454   assert(size_helper() == parser.layout_size(), "incorrect size_helper?");
 455 
 456   if (Arguments::is_dumping_archive()) {
 457     SystemDictionaryShared::init_dumptime_info(this);
 458   }
 459 }
 460 
 461 void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
 462                                        Array<Method*>* methods) {
 463   if (methods != NULL && methods != Universe::the_empty_method_array() &&
 464       !methods->is_shared()) {
 465     for (int i = 0; i < methods->length(); i++) {
 466       Method* method = methods->at(i);
 467       if (method == NULL) continue;  // maybe null if error processing
 468       // Only want to delete methods that are not executing for RedefineClasses.
 469       // The previous version will point to them so they're not totally dangling
 470       assert (!method->on_stack(), "shouldn't be called with methods on stack");
 471       MetadataFactory::free_metadata(loader_data, method);
 472     }
 473     MetadataFactory::free_array<Method*>(loader_data, methods);
 474   }
 475 }
 476 
 477 void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
 478                                           const Klass* super_klass,
 479                                           Array<InstanceKlass*>* local_interfaces,
 480                                           Array<InstanceKlass*>* transitive_interfaces) {
 481   // Only deallocate transitive interfaces if not empty, same as super class
 482   // or same as local interfaces.  See code in parseClassFile.
 483   Array<InstanceKlass*>* ti = transitive_interfaces;
 484   if (ti != Universe::the_empty_instance_klass_array() && ti != local_interfaces) {
 485     // check that the interfaces don't come from super class
 486     Array<InstanceKlass*>* sti = (super_klass == NULL) ? NULL :
 487                     InstanceKlass::cast(super_klass)->transitive_interfaces();
 488     if (ti != sti && ti != NULL && !ti->is_shared()) {
 489       MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
 490     }
 491   }
 492 
 493   // local interfaces can be empty
 494   if (local_interfaces != Universe::the_empty_instance_klass_array() &&
 495       local_interfaces != NULL && !local_interfaces->is_shared()) {
 496     MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
 497   }
 498 }
 499 
 500 // This function deallocates the metadata and C heap pointers that the
 501 // InstanceKlass points to.
 502 void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
 503 
 504   // Orphan the mirror first, CMS thinks it's still live.
 505   if (java_mirror() != NULL) {
 506     java_lang_Class::set_klass(java_mirror(), NULL);
 507   }
 508 
 509   // Also remove mirror from handles
 510   loader_data->remove_handle(_java_mirror);
 511 
 512   // Need to take this class off the class loader data list.
 513   loader_data->remove_class(this);
 514 
 515   // The array_klass for this class is created later, after error handling.
 516   // For class redefinition, we keep the original class so this scratch class
 517   // doesn't have an array class.  Either way, assert that there is nothing
 518   // to deallocate.
 519   assert(array_klasses() == NULL, "array classes shouldn't be created for this class yet");
 520 
 521   // Release C heap allocated data that this might point to, which includes
 522   // reference counting symbol names.
 523   release_C_heap_structures();
 524 
 525   deallocate_methods(loader_data, methods());
 526   set_methods(NULL);
 527 
 528   if (method_ordering() != NULL &&
 529       method_ordering() != Universe::the_empty_int_array() &&
 530       !method_ordering()->is_shared()) {
 531     MetadataFactory::free_array<int>(loader_data, method_ordering());
 532   }
 533   set_method_ordering(NULL);
 534 
 535   // default methods can be empty
 536   if (default_methods() != NULL &&
 537       default_methods() != Universe::the_empty_method_array() &&
 538       !default_methods()->is_shared()) {
 539     MetadataFactory::free_array<Method*>(loader_data, default_methods());
 540   }
 541   // Do NOT deallocate the default methods, they are owned by superinterfaces.
 542   set_default_methods(NULL);
 543 
 544   // default methods vtable indices can be empty
 545   if (default_vtable_indices() != NULL &&
 546       !default_vtable_indices()->is_shared()) {
 547     MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
 548   }
 549   set_default_vtable_indices(NULL);
 550 
 551 
 552   // This array is in Klass, but remove it with the InstanceKlass since
 553   // this place would be the only caller and it can share memory with transitive
 554   // interfaces.
 555   if (secondary_supers() != NULL &&
 556       secondary_supers() != Universe::the_empty_klass_array() &&
 557       // see comments in compute_secondary_supers about the following cast
 558       (address)(secondary_supers()) != (address)(transitive_interfaces()) &&
 559       !secondary_supers()->is_shared()) {
 560     MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
 561   }
 562   set_secondary_supers(NULL);
 563 
 564   deallocate_interfaces(loader_data, super(), local_interfaces(), transitive_interfaces());
 565   set_transitive_interfaces(NULL);
 566   set_local_interfaces(NULL);
 567 
 568   if (fields() != NULL && !fields()->is_shared()) {
 569     MetadataFactory::free_array<jushort>(loader_data, fields());
 570   }
 571   set_fields(NULL, 0);
 572 
 573   // If a method from a redefined class is using this constant pool, don't
 574   // delete it, yet.  The new class's previous version will point to this.
 575   if (constants() != NULL) {
 576     assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
 577     if (!constants()->is_shared()) {
 578       MetadataFactory::free_metadata(loader_data, constants());
 579     }
 580     // Delete any cached resolution errors for the constant pool
 581     SystemDictionary::delete_resolution_error(constants());
 582 
 583     set_constants(NULL);
 584   }
 585 
 586   if (inner_classes() != NULL &&
 587       inner_classes() != Universe::the_empty_short_array() &&
 588       !inner_classes()->is_shared()) {
 589     MetadataFactory::free_array<jushort>(loader_data, inner_classes());
 590   }
 591   set_inner_classes(NULL);
 592 
 593   if (nest_members() != NULL &&
 594       nest_members() != Universe::the_empty_short_array() &&
 595       !nest_members()->is_shared()) {
 596     MetadataFactory::free_array<jushort>(loader_data, nest_members());
 597   }
 598   set_nest_members(NULL);
 599 
 600   // We should deallocate the Annotations instance if it's not in shared spaces.
 601   if (annotations() != NULL && !annotations()->is_shared()) {
 602     MetadataFactory::free_metadata(loader_data, annotations());
 603   }
 604   set_annotations(NULL);
 605 
 606   if (Arguments::is_dumping_archive()) {
 607     SystemDictionaryShared::remove_dumptime_info(this);
 608   }
 609 }
 610 
 611 bool InstanceKlass::should_be_initialized() const {
 612   return !is_initialized();
 613 }
 614 
 615 klassItable InstanceKlass::itable() const {
 616   return klassItable(const_cast<InstanceKlass*>(this));
 617 }
 618 
 619 void InstanceKlass::eager_initialize(Thread *thread) {
 620   if (!EagerInitialization) return;
 621 
 622   if (this->is_not_initialized()) {
 623     // abort if the the class has a class initializer
 624     if (this->class_initializer() != NULL) return;
 625 
 626     // abort if it is java.lang.Object (initialization is handled in genesis)
 627     Klass* super_klass = super();
 628     if (super_klass == NULL) return;
 629 
 630     // abort if the super class should be initialized
 631     if (!InstanceKlass::cast(super_klass)->is_initialized()) return;
 632 
 633     // call body to expose the this pointer
 634     eager_initialize_impl();
 635   }
 636 }
 637 
 638 // JVMTI spec thinks there are signers and protection domain in the
 639 // instanceKlass.  These accessors pretend these fields are there.
 640 // The hprof specification also thinks these fields are in InstanceKlass.
 641 oop InstanceKlass::protection_domain() const {
 642   // return the protection_domain from the mirror
 643   return java_lang_Class::protection_domain(java_mirror());
 644 }
 645 
 646 // To remove these from requires an incompatible change and CCC request.
 647 objArrayOop InstanceKlass::signers() const {
 648   // return the signers from the mirror
 649   return java_lang_Class::signers(java_mirror());
 650 }
 651 
 652 oop InstanceKlass::init_lock() const {
 653   // return the init lock from the mirror
 654   oop lock = java_lang_Class::init_lock(java_mirror());
 655   // Prevent reordering with any access of initialization state
 656   OrderAccess::loadload();
 657   assert((oop)lock != NULL || !is_not_initialized(), // initialized or in_error state
 658          "only fully initialized state can have a null lock");
 659   return lock;
 660 }
 661 
 662 // Set the initialization lock to null so the object can be GC'ed.  Any racing
 663 // threads to get this lock will see a null lock and will not lock.
 664 // That's okay because they all check for initialized state after getting
 665 // the lock and return.
 666 void InstanceKlass::fence_and_clear_init_lock() {
 667   // make sure previous stores are all done, notably the init_state.
 668   OrderAccess::storestore();
 669   java_lang_Class::set_init_lock(java_mirror(), NULL);
 670   assert(!is_not_initialized(), "class must be initialized now");
 671 }
 672 
 673 void InstanceKlass::eager_initialize_impl() {
 674   EXCEPTION_MARK;
 675   HandleMark hm(THREAD);
 676   Handle h_init_lock(THREAD, init_lock());
 677   ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL);
 678 
 679   // abort if someone beat us to the initialization
 680   if (!is_not_initialized()) return;  // note: not equivalent to is_initialized()
 681 
 682   ClassState old_state = init_state();
 683   link_class_impl(THREAD);
 684   if (HAS_PENDING_EXCEPTION) {
 685     CLEAR_PENDING_EXCEPTION;
 686     // Abort if linking the class throws an exception.
 687 
 688     // Use a test to avoid redundantly resetting the state if there's
 689     // no change.  Set_init_state() asserts that state changes make
 690     // progress, whereas here we might just be spinning in place.
 691     if (old_state != _init_state)
 692       set_init_state(old_state);
 693   } else {
 694     // linking successfull, mark class as initialized
 695     set_init_state(fully_initialized);
 696     fence_and_clear_init_lock();
 697     // trace
 698     if (log_is_enabled(Info, class, init)) {
 699       ResourceMark rm(THREAD);
 700       log_info(class, init)("[Initialized %s without side effects]", external_name());
 701     }
 702   }
 703 }
 704 
 705 
 706 // See "The Virtual Machine Specification" section 2.16.5 for a detailed explanation of the class initialization
 707 // process. The step comments refers to the procedure described in that section.
 708 // Note: implementation moved to static method to expose the this pointer.
 709 void InstanceKlass::initialize(TRAPS) {
 710   if (this->should_be_initialized()) {
 711     initialize_impl(CHECK);
 712     // Note: at this point the class may be initialized
 713     //       OR it may be in the state of being initialized
 714     //       in case of recursive initialization!
 715   } else {
 716     assert(is_initialized(), "sanity check");
 717   }
 718 }
 719 
 720 
 721 bool InstanceKlass::verify_code(TRAPS) {
 722   // 1) Verify the bytecodes
 723   return Verifier::verify(this, should_verify_class(), THREAD);
 724 }
 725 
 726 void InstanceKlass::link_class(TRAPS) {
 727   assert(is_loaded(), "must be loaded");
 728   if (!is_linked()) {
 729     link_class_impl(CHECK);
 730   }
 731 }
 732 
 733 // Called to verify that a class can link during initialization, without
 734 // throwing a VerifyError.
 735 bool InstanceKlass::link_class_or_fail(TRAPS) {
 736   assert(is_loaded(), "must be loaded");
 737   if (!is_linked()) {
 738     link_class_impl(CHECK_false);
 739   }
 740   return is_linked();
 741 }
 742 
 743 bool InstanceKlass::link_class_impl(TRAPS) {
 744   if (DumpSharedSpaces && is_in_error_state()) {
 745     // This is for CDS dumping phase only -- we use the in_error_state to indicate that
 746     // the class has failed verification. Throwing the NoClassDefFoundError here is just
 747     // a convenient way to stop repeat attempts to verify the same (bad) class.
 748     //
 749     // Note that the NoClassDefFoundError is not part of the JLS, and should not be thrown
 750     // if we are executing Java code. This is not a problem for CDS dumping phase since
 751     // it doesn't execute any Java code.
 752     ResourceMark rm(THREAD);
 753     Exceptions::fthrow(THREAD_AND_LOCATION,
 754                        vmSymbols::java_lang_NoClassDefFoundError(),
 755                        "Class %s, or one of its supertypes, failed class initialization",
 756                        external_name());
 757     return false;
 758   }
 759   // return if already verified
 760   if (is_linked()) {
 761     return true;
 762   }
 763 
 764   // Timing
 765   // timer handles recursion
 766   assert(THREAD->is_Java_thread(), "non-JavaThread in link_class_impl");
 767   JavaThread* jt = (JavaThread*)THREAD;
 768 
 769   // link super class before linking this class
 770   Klass* super_klass = super();
 771   if (super_klass != NULL) {
 772     if (super_klass->is_interface()) {  // check if super class is an interface
 773       ResourceMark rm(THREAD);
 774       Exceptions::fthrow(
 775         THREAD_AND_LOCATION,
 776         vmSymbols::java_lang_IncompatibleClassChangeError(),
 777         "class %s has interface %s as super class",
 778         external_name(),
 779         super_klass->external_name()
 780       );
 781       return false;
 782     }
 783 
 784     InstanceKlass* ik_super = InstanceKlass::cast(super_klass);
 785     ik_super->link_class_impl(CHECK_false);
 786   }
 787 
 788   // link all interfaces implemented by this class before linking this class
 789   Array<InstanceKlass*>* interfaces = local_interfaces();
 790   int num_interfaces = interfaces->length();
 791   for (int index = 0; index < num_interfaces; index++) {
 792     InstanceKlass* interk = interfaces->at(index);
 793     interk->link_class_impl(CHECK_false);
 794   }
 795 
 796   // in case the class is linked in the process of linking its superclasses
 797   if (is_linked()) {
 798     return true;
 799   }
 800 
 801   // trace only the link time for this klass that includes
 802   // the verification time
 803   PerfClassTraceTime vmtimer(ClassLoader::perf_class_link_time(),
 804                              ClassLoader::perf_class_link_selftime(),
 805                              ClassLoader::perf_classes_linked(),
 806                              jt->get_thread_stat()->perf_recursion_counts_addr(),
 807                              jt->get_thread_stat()->perf_timers_addr(),
 808                              PerfClassTraceTime::CLASS_LINK);
 809 
 810   // verification & rewriting
 811   {
 812     HandleMark hm(THREAD);
 813     Handle h_init_lock(THREAD, init_lock());
 814     ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL);
 815     // rewritten will have been set if loader constraint error found
 816     // on an earlier link attempt
 817     // don't verify or rewrite if already rewritten
 818     //
 819 
 820     if (!is_linked()) {
 821       if (!is_rewritten()) {
 822         {
 823           bool verify_ok = verify_code(THREAD);
 824           if (!verify_ok) {
 825             return false;
 826           }
 827         }
 828 
 829         // Just in case a side-effect of verify linked this class already
 830         // (which can sometimes happen since the verifier loads classes
 831         // using custom class loaders, which are free to initialize things)
 832         if (is_linked()) {
 833           return true;
 834         }
 835 
 836         // also sets rewritten
 837         rewrite_class(CHECK_false);
 838       } else if (is_shared()) {
 839         SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
 840       }
 841 
 842       // relocate jsrs and link methods after they are all rewritten
 843       link_methods(CHECK_false);
 844 
 845       // Initialize the vtable and interface table after
 846       // methods have been rewritten since rewrite may
 847       // fabricate new Method*s.
 848       // also does loader constraint checking
 849       //
 850       // initialize_vtable and initialize_itable need to be rerun for
 851       // a shared class if the class is not loaded by the NULL classloader.
 852       ClassLoaderData * loader_data = class_loader_data();
 853       if (!(is_shared() &&
 854             loader_data->is_the_null_class_loader_data())) {
 855         vtable().initialize_vtable(true, CHECK_false);
 856         itable().initialize_itable(true, CHECK_false);
 857       }
 858 #ifdef ASSERT
 859       else {
 860         vtable().verify(tty, true);
 861         // In case itable verification is ever added.
 862         // itable().verify(tty, true);
 863       }
 864 #endif
 865       set_init_state(linked);
 866       if (JvmtiExport::should_post_class_prepare()) {
 867         Thread *thread = THREAD;
 868         assert(thread->is_Java_thread(), "thread->is_Java_thread()");
 869         JvmtiExport::post_class_prepare((JavaThread *) thread, this);
 870       }
 871     }
 872   }
 873   return true;
 874 }
 875 
 876 // Rewrite the byte codes of all of the methods of a class.
 877 // The rewriter must be called exactly once. Rewriting must happen after
 878 // verification but before the first method of the class is executed.
 879 void InstanceKlass::rewrite_class(TRAPS) {
 880   assert(is_loaded(), "must be loaded");
 881   if (is_rewritten()) {
 882     assert(is_shared(), "rewriting an unshared class?");
 883     return;
 884   }
 885   Rewriter::rewrite(this, CHECK);
 886   set_rewritten();
 887 }
 888 
 889 // Now relocate and link method entry points after class is rewritten.
 890 // This is outside is_rewritten flag. In case of an exception, it can be
 891 // executed more than once.
 892 void InstanceKlass::link_methods(TRAPS) {
 893   int len = methods()->length();
 894   for (int i = len-1; i >= 0; i--) {
 895     methodHandle m(THREAD, methods()->at(i));
 896 
 897     // Set up method entry points for compiler and interpreter    .
 898     m->link_method(m, CHECK);
 899   }
 900 }
 901 
 902 // Eagerly initialize superinterfaces that declare default methods (concrete instance: any access)
 903 void InstanceKlass::initialize_super_interfaces(TRAPS) {
 904   assert (has_nonstatic_concrete_methods(), "caller should have checked this");
 905   for (int i = 0; i < local_interfaces()->length(); ++i) {
 906     InstanceKlass* ik = local_interfaces()->at(i);
 907 
 908     // Initialization is depth first search ie. we start with top of the inheritance tree
 909     // has_nonstatic_concrete_methods drives searching superinterfaces since it
 910     // means has_nonstatic_concrete_methods in its superinterface hierarchy
 911     if (ik->has_nonstatic_concrete_methods()) {
 912       ik->initialize_super_interfaces(CHECK);
 913     }
 914 
 915     // Only initialize() interfaces that "declare" concrete methods.
 916     if (ik->should_be_initialized() && ik->declares_nonstatic_concrete_methods()) {
 917       ik->initialize(CHECK);
 918     }
 919   }
 920 }
 921 
 922 void InstanceKlass::initialize_impl(TRAPS) {
 923   HandleMark hm(THREAD);
 924 
 925   // Make sure klass is linked (verified) before initialization
 926   // A class could already be verified, since it has been reflected upon.
 927   link_class(CHECK);
 928 
 929   DTRACE_CLASSINIT_PROBE(required, -1);
 930 
 931   bool wait = false;
 932 
 933   assert(THREAD->is_Java_thread(), "non-JavaThread in initialize_impl");
 934   JavaThread* jt = (JavaThread*)THREAD;
 935 
 936   // refer to the JVM book page 47 for description of steps
 937   // Step 1
 938   {
 939     Handle h_init_lock(THREAD, init_lock());
 940     ObjectLocker ol(h_init_lock, THREAD, h_init_lock() != NULL);
 941 
 942     // Step 2
 943     // If we were to use wait() instead of waitInterruptibly() then
 944     // we might end up throwing IE from link/symbol resolution sites
 945     // that aren't expected to throw.  This would wreak havoc.  See 6320309.
 946     while (is_being_initialized() && !is_reentrant_initialization(jt)) {
 947       wait = true;
 948       jt->set_class_to_be_initialized(this);
 949       ol.wait_uninterruptibly(jt);
 950       jt->set_class_to_be_initialized(NULL);
 951     }
 952 
 953     // Step 3
 954     if (is_being_initialized() && is_reentrant_initialization(jt)) {
 955       DTRACE_CLASSINIT_PROBE_WAIT(recursive, -1, wait);
 956       return;
 957     }
 958 
 959     // Step 4
 960     if (is_initialized()) {
 961       DTRACE_CLASSINIT_PROBE_WAIT(concurrent, -1, wait);
 962       return;
 963     }
 964 
 965     // Step 5
 966     if (is_in_error_state()) {
 967       DTRACE_CLASSINIT_PROBE_WAIT(erroneous, -1, wait);
 968       ResourceMark rm(THREAD);
 969       const char* desc = "Could not initialize class ";
 970       const char* className = external_name();
 971       size_t msglen = strlen(desc) + strlen(className) + 1;
 972       char* message = NEW_RESOURCE_ARRAY(char, msglen);
 973       if (NULL == message) {
 974         // Out of memory: can't create detailed error message
 975           THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), className);
 976       } else {
 977         jio_snprintf(message, msglen, "%s%s", desc, className);
 978           THROW_MSG(vmSymbols::java_lang_NoClassDefFoundError(), message);
 979       }
 980     }
 981 
 982     // Step 6
 983     set_init_state(being_initialized);
 984     set_init_thread(jt);
 985   }
 986 
 987   // Step 7
 988   // Next, if C is a class rather than an interface, initialize it's super class and super
 989   // interfaces.
 990   if (!is_interface()) {
 991     Klass* super_klass = super();
 992     if (super_klass != NULL && super_klass->should_be_initialized()) {
 993       super_klass->initialize(THREAD);
 994     }
 995     // If C implements any interface that declares a non-static, concrete method,
 996     // the initialization of C triggers initialization of its super interfaces.
 997     // Only need to recurse if has_nonstatic_concrete_methods which includes declaring and
 998     // having a superinterface that declares, non-static, concrete methods
 999     if (!HAS_PENDING_EXCEPTION && has_nonstatic_concrete_methods()) {
1000       initialize_super_interfaces(THREAD);
1001     }
1002 
1003     // If any exceptions, complete abruptly, throwing the same exception as above.
1004     if (HAS_PENDING_EXCEPTION) {
1005       Handle e(THREAD, PENDING_EXCEPTION);
1006       CLEAR_PENDING_EXCEPTION;
1007       {
1008         EXCEPTION_MARK;
1009         // Locks object, set state, and notify all waiting threads
1010         set_initialization_state_and_notify(initialization_error, THREAD);
1011         CLEAR_PENDING_EXCEPTION;
1012       }
1013       DTRACE_CLASSINIT_PROBE_WAIT(super__failed, -1, wait);
1014       THROW_OOP(e());
1015     }
1016   }
1017 
1018 
1019   // Look for aot compiled methods for this klass, including class initializer.
1020   AOTLoader::load_for_klass(this, THREAD);
1021 
1022   // Step 8
1023   {
1024     DTRACE_CLASSINIT_PROBE_WAIT(clinit, -1, wait);
1025     // Timer includes any side effects of class initialization (resolution,
1026     // etc), but not recursive entry into call_class_initializer().
1027     PerfClassTraceTime timer(ClassLoader::perf_class_init_time(),
1028                              ClassLoader::perf_class_init_selftime(),
1029                              ClassLoader::perf_classes_inited(),
1030                              jt->get_thread_stat()->perf_recursion_counts_addr(),
1031                              jt->get_thread_stat()->perf_timers_addr(),
1032                              PerfClassTraceTime::CLASS_CLINIT);
1033     call_class_initializer(THREAD);
1034   }
1035 
1036   // Step 9
1037   if (!HAS_PENDING_EXCEPTION) {
1038     set_initialization_state_and_notify(fully_initialized, CHECK);
1039     {
1040       debug_only(vtable().verify(tty, true);)
1041     }
1042   }
1043   else {
1044     // Step 10 and 11
1045     Handle e(THREAD, PENDING_EXCEPTION);
1046     CLEAR_PENDING_EXCEPTION;
1047     // JVMTI has already reported the pending exception
1048     // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1049     JvmtiExport::clear_detected_exception(jt);
1050     {
1051       EXCEPTION_MARK;
1052       set_initialization_state_and_notify(initialization_error, THREAD);
1053       CLEAR_PENDING_EXCEPTION;   // ignore any exception thrown, class initialization error is thrown below
1054       // JVMTI has already reported the pending exception
1055       // JVMTI internal flag reset is needed in order to report ExceptionInInitializerError
1056       JvmtiExport::clear_detected_exception(jt);
1057     }
1058     DTRACE_CLASSINIT_PROBE_WAIT(error, -1, wait);
1059     if (e->is_a(SystemDictionary::Error_klass())) {
1060       THROW_OOP(e());
1061     } else {
1062       JavaCallArguments args(e);
1063       THROW_ARG(vmSymbols::java_lang_ExceptionInInitializerError(),
1064                 vmSymbols::throwable_void_signature(),
1065                 &args);
1066     }
1067   }
1068   DTRACE_CLASSINIT_PROBE_WAIT(end, -1, wait);
1069 }
1070 
1071 
1072 void InstanceKlass::set_initialization_state_and_notify(ClassState state, TRAPS) {
1073   Handle h_init_lock(THREAD, init_lock());
1074   if (h_init_lock() != NULL) {
1075     ObjectLocker ol(h_init_lock, THREAD);
1076     set_init_thread(NULL); // reset _init_thread before changing _init_state
1077     set_init_state(state);
1078     fence_and_clear_init_lock();
1079     ol.notify_all(CHECK);
1080   } else {
1081     assert(h_init_lock() != NULL, "The initialization state should never be set twice");
1082     set_init_thread(NULL); // reset _init_thread before changing _init_state
1083     set_init_state(state);
1084   }
1085 }
1086 
1087 Klass* InstanceKlass::implementor() const {
1088   Klass* volatile* k = adr_implementor();
1089   if (k == NULL) {
1090     return NULL;
1091   } else {
1092     // This load races with inserts, and therefore needs acquire.
1093     Klass* kls = OrderAccess::load_acquire(k);
1094     if (kls != NULL && !kls->is_loader_alive()) {
1095       return NULL;  // don't return unloaded class
1096     } else {
1097       return kls;
1098     }
1099   }
1100 }
1101 
1102 
1103 void InstanceKlass::set_implementor(Klass* k) {
1104   assert_locked_or_safepoint(Compile_lock);
1105   assert(is_interface(), "not interface");
1106   Klass* volatile* addr = adr_implementor();
1107   assert(addr != NULL, "null addr");
1108   if (addr != NULL) {
1109     OrderAccess::release_store(addr, k);
1110   }
1111 }
1112 
1113 int  InstanceKlass::nof_implementors() const {
1114   Klass* k = implementor();
1115   if (k == NULL) {
1116     return 0;
1117   } else if (k != this) {
1118     return 1;
1119   } else {
1120     return 2;
1121   }
1122 }
1123 
1124 // The embedded _implementor field can only record one implementor.
1125 // When there are more than one implementors, the _implementor field
1126 // is set to the interface Klass* itself. Following are the possible
1127 // values for the _implementor field:
1128 //   NULL                  - no implementor
1129 //   implementor Klass*    - one implementor
1130 //   self                  - more than one implementor
1131 //
1132 // The _implementor field only exists for interfaces.
1133 void InstanceKlass::add_implementor(Klass* k) {
1134   assert_lock_strong(Compile_lock);
1135   assert(is_interface(), "not interface");
1136   // Filter out my subinterfaces.
1137   // (Note: Interfaces are never on the subklass list.)
1138   if (InstanceKlass::cast(k)->is_interface()) return;
1139 
1140   // Filter out subclasses whose supers already implement me.
1141   // (Note: CHA must walk subclasses of direct implementors
1142   // in order to locate indirect implementors.)
1143   Klass* sk = k->super();
1144   if (sk != NULL && InstanceKlass::cast(sk)->implements_interface(this))
1145     // We only need to check one immediate superclass, since the
1146     // implements_interface query looks at transitive_interfaces.
1147     // Any supers of the super have the same (or fewer) transitive_interfaces.
1148     return;
1149 
1150   Klass* ik = implementor();
1151   if (ik == NULL) {
1152     set_implementor(k);
1153   } else if (ik != this) {
1154     // There is already an implementor. Use itself as an indicator of
1155     // more than one implementors.
1156     set_implementor(this);
1157   }
1158 
1159   // The implementor also implements the transitive_interfaces
1160   for (int index = 0; index < local_interfaces()->length(); index++) {
1161     InstanceKlass::cast(local_interfaces()->at(index))->add_implementor(k);
1162   }
1163 }
1164 
1165 void InstanceKlass::init_implementor() {
1166   if (is_interface()) {
1167     set_implementor(NULL);
1168   }
1169 }
1170 
1171 
1172 void InstanceKlass::process_interfaces(Thread *thread) {
1173   // link this class into the implementors list of every interface it implements
1174   for (int i = local_interfaces()->length() - 1; i >= 0; i--) {
1175     assert(local_interfaces()->at(i)->is_klass(), "must be a klass");
1176     InstanceKlass* interf = InstanceKlass::cast(local_interfaces()->at(i));
1177     assert(interf->is_interface(), "expected interface");
1178     interf->add_implementor(this);
1179   }
1180 }
1181 
1182 bool InstanceKlass::can_be_primary_super_slow() const {
1183   if (is_interface())
1184     return false;
1185   else
1186     return Klass::can_be_primary_super_slow();
1187 }
1188 
1189 GrowableArray<Klass*>* InstanceKlass::compute_secondary_supers(int num_extra_slots,
1190                                                                Array<InstanceKlass*>* transitive_interfaces) {
1191   // The secondaries are the implemented interfaces.
1192   Array<InstanceKlass*>* interfaces = transitive_interfaces;
1193   int num_secondaries = num_extra_slots + interfaces->length();
1194   if (num_secondaries == 0) {
1195     // Must share this for correct bootstrapping!
1196     set_secondary_supers(Universe::the_empty_klass_array());
1197     return NULL;
1198   } else if (num_extra_slots == 0) {
1199     // The secondary super list is exactly the same as the transitive interfaces, so
1200     // let's use it instead of making a copy.
1201     // Redefine classes has to be careful not to delete this!
1202     // We need the cast because Array<Klass*> is NOT a supertype of Array<InstanceKlass*>,
1203     // (but it's safe to do here because we won't write into _secondary_supers from this point on).
1204     set_secondary_supers((Array<Klass*>*)(address)interfaces);
1205     return NULL;
1206   } else {
1207     // Copy transitive interfaces to a temporary growable array to be constructed
1208     // into the secondary super list with extra slots.
1209     GrowableArray<Klass*>* secondaries = new GrowableArray<Klass*>(interfaces->length());
1210     for (int i = 0; i < interfaces->length(); i++) {
1211       secondaries->push(interfaces->at(i));
1212     }
1213     return secondaries;
1214   }
1215 }
1216 
1217 bool InstanceKlass::implements_interface(Klass* k) const {
1218   if (this == k) return true;
1219   assert(k->is_interface(), "should be an interface class");
1220   for (int i = 0; i < transitive_interfaces()->length(); i++) {
1221     if (transitive_interfaces()->at(i) == k) {
1222       return true;
1223     }
1224   }
1225   return false;
1226 }
1227 
1228 bool InstanceKlass::is_same_or_direct_interface(Klass *k) const {
1229   // Verify direct super interface
1230   if (this == k) return true;
1231   assert(k->is_interface(), "should be an interface class");
1232   for (int i = 0; i < local_interfaces()->length(); i++) {
1233     if (local_interfaces()->at(i) == k) {
1234       return true;
1235     }
1236   }
1237   return false;
1238 }
1239 
1240 objArrayOop InstanceKlass::allocate_objArray(int n, int length, TRAPS) {
1241   check_array_allocation_length(length, arrayOopDesc::max_array_length(T_OBJECT), CHECK_NULL);
1242   int size = objArrayOopDesc::object_size(length);
1243   Klass* ak = array_klass(n, CHECK_NULL);
1244   objArrayOop o = (objArrayOop)Universe::heap()->array_allocate(ak, size, length,
1245                                                                 /* do_zero */ true, CHECK_NULL);
1246   return o;
1247 }
1248 
1249 instanceOop InstanceKlass::register_finalizer(instanceOop i, TRAPS) {
1250   if (TraceFinalizerRegistration) {
1251     tty->print("Registered ");
1252     i->print_value_on(tty);
1253     tty->print_cr(" (" INTPTR_FORMAT ") as finalizable", p2i(i));
1254   }
1255   instanceHandle h_i(THREAD, i);
1256   // Pass the handle as argument, JavaCalls::call expects oop as jobjects
1257   JavaValue result(T_VOID);
1258   JavaCallArguments args(h_i);
1259   methodHandle mh (THREAD, Universe::finalizer_register_method());
1260   JavaCalls::call(&result, mh, &args, CHECK_NULL);
1261   return h_i();
1262 }
1263 
1264 instanceOop InstanceKlass::allocate_instance(TRAPS) {
1265   bool has_finalizer_flag = has_finalizer(); // Query before possible GC
1266   int size = size_helper();  // Query before forming handle.
1267 
1268   instanceOop i;
1269 
1270   i = (instanceOop)Universe::heap()->obj_allocate(this, size, CHECK_NULL);
1271   if (has_finalizer_flag && !RegisterFinalizersAtInit) {
1272     i = register_finalizer(i, CHECK_NULL);
1273   }
1274   return i;
1275 }
1276 
1277 instanceHandle InstanceKlass::allocate_instance_handle(TRAPS) {
1278   return instanceHandle(THREAD, allocate_instance(THREAD));
1279 }
1280 
1281 void InstanceKlass::check_valid_for_instantiation(bool throwError, TRAPS) {
1282   if (is_interface() || is_abstract()) {
1283     ResourceMark rm(THREAD);
1284     THROW_MSG(throwError ? vmSymbols::java_lang_InstantiationError()
1285               : vmSymbols::java_lang_InstantiationException(), external_name());
1286   }
1287   if (this == SystemDictionary::Class_klass()) {
1288     ResourceMark rm(THREAD);
1289     THROW_MSG(throwError ? vmSymbols::java_lang_IllegalAccessError()
1290               : vmSymbols::java_lang_IllegalAccessException(), external_name());
1291   }
1292 }
1293 
1294 Klass* InstanceKlass::array_klass_impl(bool or_null, int n, TRAPS) {
1295   // Need load-acquire for lock-free read
1296   if (array_klasses_acquire() == NULL) {
1297     if (or_null) return NULL;
1298 
1299     ResourceMark rm;
1300     JavaThread *jt = (JavaThread *)THREAD;
1301     {
1302       // Atomic creation of array_klasses
1303       MutexLocker ma(MultiArray_lock, THREAD);
1304 
1305       // Check if update has already taken place
1306       if (array_klasses() == NULL) {
1307         Klass*    k = ObjArrayKlass::allocate_objArray_klass(class_loader_data(), 1, this, CHECK_NULL);
1308         // use 'release' to pair with lock-free load
1309         release_set_array_klasses(k);
1310       }
1311     }
1312   }
1313   // _this will always be set at this point
1314   ObjArrayKlass* oak = (ObjArrayKlass*)array_klasses();
1315   if (or_null) {
1316     return oak->array_klass_or_null(n);
1317   }
1318   return oak->array_klass(n, THREAD);
1319 }
1320 
1321 Klass* InstanceKlass::array_klass_impl(bool or_null, TRAPS) {
1322   return array_klass_impl(or_null, 1, THREAD);
1323 }
1324 
1325 static int call_class_initializer_counter = 0;   // for debugging
1326 
1327 Method* InstanceKlass::class_initializer() const {
1328   Method* clinit = find_method(
1329       vmSymbols::class_initializer_name(), vmSymbols::void_method_signature());
1330   if (clinit != NULL && clinit->has_valid_initializer_flags()) {
1331     return clinit;
1332   }
1333   return NULL;
1334 }
1335 
1336 void InstanceKlass::call_class_initializer(TRAPS) {
1337   if (ReplayCompiles &&
1338       (ReplaySuppressInitializers == 1 ||
1339        (ReplaySuppressInitializers >= 2 && class_loader() != NULL))) {
1340     // Hide the existence of the initializer for the purpose of replaying the compile
1341     return;
1342   }
1343 
1344   methodHandle h_method(THREAD, class_initializer());
1345   assert(!is_initialized(), "we cannot initialize twice");
1346   LogTarget(Info, class, init) lt;
1347   if (lt.is_enabled()) {
1348     ResourceMark rm;
1349     LogStream ls(lt);
1350     ls.print("%d Initializing ", call_class_initializer_counter++);
1351     name()->print_value_on(&ls);
1352     ls.print_cr("%s (" INTPTR_FORMAT ")", h_method() == NULL ? "(no method)" : "", p2i(this));
1353   }
1354   if (h_method() != NULL) {
1355     JavaCallArguments args; // No arguments
1356     JavaValue result(T_VOID);
1357     JavaCalls::call(&result, h_method, &args, CHECK); // Static call (no args)
1358   }
1359 }
1360 
1361 
1362 void InstanceKlass::mask_for(const methodHandle& method, int bci,
1363   InterpreterOopMap* entry_for) {
1364   // Lazily create the _oop_map_cache at first request
1365   // Lock-free access requires load_acquire.
1366   OopMapCache* oop_map_cache = OrderAccess::load_acquire(&_oop_map_cache);
1367   if (oop_map_cache == NULL) {
1368     MutexLocker x(OopMapCacheAlloc_lock);
1369     // Check if _oop_map_cache was allocated while we were waiting for this lock
1370     if ((oop_map_cache = _oop_map_cache) == NULL) {
1371       oop_map_cache = new OopMapCache();
1372       // Ensure _oop_map_cache is stable, since it is examined without a lock
1373       OrderAccess::release_store(&_oop_map_cache, oop_map_cache);
1374     }
1375   }
1376   // _oop_map_cache is constant after init; lookup below does its own locking.
1377   oop_map_cache->lookup(method, bci, entry_for);
1378 }
1379 
1380 
1381 bool InstanceKlass::find_local_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1382   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1383     Symbol* f_name = fs.name();
1384     Symbol* f_sig  = fs.signature();
1385     if (f_name == name && f_sig == sig) {
1386       fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
1387       return true;
1388     }
1389   }
1390   return false;
1391 }
1392 
1393 
1394 Klass* InstanceKlass::find_interface_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1395   const int n = local_interfaces()->length();
1396   for (int i = 0; i < n; i++) {
1397     Klass* intf1 = local_interfaces()->at(i);
1398     assert(intf1->is_interface(), "just checking type");
1399     // search for field in current interface
1400     if (InstanceKlass::cast(intf1)->find_local_field(name, sig, fd)) {
1401       assert(fd->is_static(), "interface field must be static");
1402       return intf1;
1403     }
1404     // search for field in direct superinterfaces
1405     Klass* intf2 = InstanceKlass::cast(intf1)->find_interface_field(name, sig, fd);
1406     if (intf2 != NULL) return intf2;
1407   }
1408   // otherwise field lookup fails
1409   return NULL;
1410 }
1411 
1412 
1413 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, fieldDescriptor* fd) const {
1414   // search order according to newest JVM spec (5.4.3.2, p.167).
1415   // 1) search for field in current klass
1416   if (find_local_field(name, sig, fd)) {
1417     return const_cast<InstanceKlass*>(this);
1418   }
1419   // 2) search for field recursively in direct superinterfaces
1420   { Klass* intf = find_interface_field(name, sig, fd);
1421     if (intf != NULL) return intf;
1422   }
1423   // 3) apply field lookup recursively if superclass exists
1424   { Klass* supr = super();
1425     if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, fd);
1426   }
1427   // 4) otherwise field lookup fails
1428   return NULL;
1429 }
1430 
1431 
1432 Klass* InstanceKlass::find_field(Symbol* name, Symbol* sig, bool is_static, fieldDescriptor* fd) const {
1433   // search order according to newest JVM spec (5.4.3.2, p.167).
1434   // 1) search for field in current klass
1435   if (find_local_field(name, sig, fd)) {
1436     if (fd->is_static() == is_static) return const_cast<InstanceKlass*>(this);
1437   }
1438   // 2) search for field recursively in direct superinterfaces
1439   if (is_static) {
1440     Klass* intf = find_interface_field(name, sig, fd);
1441     if (intf != NULL) return intf;
1442   }
1443   // 3) apply field lookup recursively if superclass exists
1444   { Klass* supr = super();
1445     if (supr != NULL) return InstanceKlass::cast(supr)->find_field(name, sig, is_static, fd);
1446   }
1447   // 4) otherwise field lookup fails
1448   return NULL;
1449 }
1450 
1451 
1452 bool InstanceKlass::find_local_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1453   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1454     if (fs.offset() == offset) {
1455       fd->reinitialize(const_cast<InstanceKlass*>(this), fs.index());
1456       if (fd->is_static() == is_static) return true;
1457     }
1458   }
1459   return false;
1460 }
1461 
1462 
1463 bool InstanceKlass::find_field_from_offset(int offset, bool is_static, fieldDescriptor* fd) const {
1464   Klass* klass = const_cast<InstanceKlass*>(this);
1465   while (klass != NULL) {
1466     if (InstanceKlass::cast(klass)->find_local_field_from_offset(offset, is_static, fd)) {
1467       return true;
1468     }
1469     klass = klass->super();
1470   }
1471   return false;
1472 }
1473 
1474 
1475 void InstanceKlass::methods_do(void f(Method* method)) {
1476   // Methods aren't stable until they are loaded.  This can be read outside
1477   // a lock through the ClassLoaderData for profiling
1478   if (!is_loaded()) {
1479     return;
1480   }
1481 
1482   int len = methods()->length();
1483   for (int index = 0; index < len; index++) {
1484     Method* m = methods()->at(index);
1485     assert(m->is_method(), "must be method");
1486     f(m);
1487   }
1488 }
1489 
1490 
1491 void InstanceKlass::do_local_static_fields(FieldClosure* cl) {
1492   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1493     if (fs.access_flags().is_static()) {
1494       fieldDescriptor& fd = fs.field_descriptor();
1495       cl->do_field(&fd);
1496     }
1497   }
1498 }
1499 
1500 
1501 void InstanceKlass::do_local_static_fields(void f(fieldDescriptor*, Handle, TRAPS), Handle mirror, TRAPS) {
1502   for (JavaFieldStream fs(this); !fs.done(); fs.next()) {
1503     if (fs.access_flags().is_static()) {
1504       fieldDescriptor& fd = fs.field_descriptor();
1505       f(&fd, mirror, CHECK);
1506     }
1507   }
1508 }
1509 
1510 
1511 static int compare_fields_by_offset(int* a, int* b) {
1512   return a[0] - b[0];
1513 }
1514 
1515 void InstanceKlass::do_nonstatic_fields(FieldClosure* cl) {
1516   InstanceKlass* super = superklass();
1517   if (super != NULL) {
1518     super->do_nonstatic_fields(cl);
1519   }
1520   fieldDescriptor fd;
1521   int length = java_fields_count();
1522   // In DebugInfo nonstatic fields are sorted by offset.
1523   int* fields_sorted = NEW_C_HEAP_ARRAY(int, 2*(length+1), mtClass);
1524   int j = 0;
1525   for (int i = 0; i < length; i += 1) {
1526     fd.reinitialize(this, i);
1527     if (!fd.is_static()) {
1528       fields_sorted[j + 0] = fd.offset();
1529       fields_sorted[j + 1] = i;
1530       j += 2;
1531     }
1532   }
1533   if (j > 0) {
1534     length = j;
1535     // _sort_Fn is defined in growableArray.hpp.
1536     qsort(fields_sorted, length/2, 2*sizeof(int), (_sort_Fn)compare_fields_by_offset);
1537     for (int i = 0; i < length; i += 2) {
1538       fd.reinitialize(this, fields_sorted[i + 1]);
1539       assert(!fd.is_static() && fd.offset() == fields_sorted[i], "only nonstatic fields");
1540       cl->do_field(&fd);
1541     }
1542   }
1543   FREE_C_HEAP_ARRAY(int, fields_sorted);
1544 }
1545 
1546 
1547 void InstanceKlass::array_klasses_do(void f(Klass* k, TRAPS), TRAPS) {
1548   if (array_klasses() != NULL)
1549     ArrayKlass::cast(array_klasses())->array_klasses_do(f, THREAD);
1550 }
1551 
1552 void InstanceKlass::array_klasses_do(void f(Klass* k)) {
1553   if (array_klasses() != NULL)
1554     ArrayKlass::cast(array_klasses())->array_klasses_do(f);
1555 }
1556 
1557 #ifdef ASSERT
1558 static int linear_search(const Array<Method*>* methods,
1559                          const Symbol* name,
1560                          const Symbol* signature) {
1561   const int len = methods->length();
1562   for (int index = 0; index < len; index++) {
1563     const Method* const m = methods->at(index);
1564     assert(m->is_method(), "must be method");
1565     if (m->signature() == signature && m->name() == name) {
1566        return index;
1567     }
1568   }
1569   return -1;
1570 }
1571 #endif
1572 
1573 bool InstanceKlass::_disable_method_binary_search = false;
1574 
1575 int InstanceKlass::quick_search(const Array<Method*>* methods, const Symbol* name) {
1576   int len = methods->length();
1577   int l = 0;
1578   int h = len - 1;
1579 
1580   if (_disable_method_binary_search) {
1581     // During portions of dynamic dumping, the methods array may not be sorted by ascending
1582     // addresses of their names. However, methods with the same name are still laid out
1583     // consecutively inside the methods array.
1584     assert(DynamicDumpSharedSpaces, "must be");
1585     while (l <= h) {
1586       Method* m = methods->at(l);
1587       if (m->name() == name) {
1588         return l;
1589       }
1590       l ++;
1591     }
1592     return -1;
1593   }
1594 
1595   // methods are sorted, so do binary search
1596   while (l <= h) {
1597     int mid = (l + h) >> 1;
1598     Method* m = methods->at(mid);
1599     assert(m->is_method(), "must be method");
1600     int res = m->name()->fast_compare(name);
1601     if (res == 0) {
1602       return mid;
1603     } else if (res < 0) {
1604       l = mid + 1;
1605     } else {
1606       h = mid - 1;
1607     }
1608   }
1609   return -1;
1610 }
1611 
1612 // find_method looks up the name/signature in the local methods array
1613 Method* InstanceKlass::find_method(const Symbol* name,
1614                                    const Symbol* signature) const {
1615   return find_method_impl(name, signature, find_overpass, find_static, find_private);
1616 }
1617 
1618 Method* InstanceKlass::find_method_impl(const Symbol* name,
1619                                         const Symbol* signature,
1620                                         OverpassLookupMode overpass_mode,
1621                                         StaticLookupMode static_mode,
1622                                         PrivateLookupMode private_mode) const {
1623   return InstanceKlass::find_method_impl(methods(),
1624                                          name,
1625                                          signature,
1626                                          overpass_mode,
1627                                          static_mode,
1628                                          private_mode);
1629 }
1630 
1631 // find_instance_method looks up the name/signature in the local methods array
1632 // and skips over static methods
1633 Method* InstanceKlass::find_instance_method(const Array<Method*>* methods,
1634                                             const Symbol* name,
1635                                             const Symbol* signature,
1636                                             PrivateLookupMode private_mode) {
1637   Method* const meth = InstanceKlass::find_method_impl(methods,
1638                                                  name,
1639                                                  signature,
1640                                                  find_overpass,
1641                                                  skip_static,
1642                                                  private_mode);
1643   assert(((meth == NULL) || !meth->is_static()),
1644     "find_instance_method should have skipped statics");
1645   return meth;
1646 }
1647 
1648 // find_instance_method looks up the name/signature in the local methods array
1649 // and skips over static methods
1650 Method* InstanceKlass::find_instance_method(const Symbol* name,
1651                                             const Symbol* signature,
1652                                             PrivateLookupMode private_mode) const {
1653   return InstanceKlass::find_instance_method(methods(), name, signature, private_mode);
1654 }
1655 
1656 // Find looks up the name/signature in the local methods array
1657 // and filters on the overpass, static and private flags
1658 // This returns the first one found
1659 // note that the local methods array can have up to one overpass, one static
1660 // and one instance (private or not) with the same name/signature
1661 Method* InstanceKlass::find_local_method(const Symbol* name,
1662                                          const Symbol* signature,
1663                                          OverpassLookupMode overpass_mode,
1664                                          StaticLookupMode static_mode,
1665                                          PrivateLookupMode private_mode) const {
1666   return InstanceKlass::find_method_impl(methods(),
1667                                          name,
1668                                          signature,
1669                                          overpass_mode,
1670                                          static_mode,
1671                                          private_mode);
1672 }
1673 
1674 // Find looks up the name/signature in the local methods array
1675 // and filters on the overpass, static and private flags
1676 // This returns the first one found
1677 // note that the local methods array can have up to one overpass, one static
1678 // and one instance (private or not) with the same name/signature
1679 Method* InstanceKlass::find_local_method(const Array<Method*>* methods,
1680                                          const Symbol* name,
1681                                          const Symbol* signature,
1682                                          OverpassLookupMode overpass_mode,
1683                                          StaticLookupMode static_mode,
1684                                          PrivateLookupMode private_mode) {
1685   return InstanceKlass::find_method_impl(methods,
1686                                          name,
1687                                          signature,
1688                                          overpass_mode,
1689                                          static_mode,
1690                                          private_mode);
1691 }
1692 
1693 Method* InstanceKlass::find_method(const Array<Method*>* methods,
1694                                    const Symbol* name,
1695                                    const Symbol* signature) {
1696   return InstanceKlass::find_method_impl(methods,
1697                                          name,
1698                                          signature,
1699                                          find_overpass,
1700                                          find_static,
1701                                          find_private);
1702 }
1703 
1704 Method* InstanceKlass::find_method_impl(const Array<Method*>* methods,
1705                                         const Symbol* name,
1706                                         const Symbol* signature,
1707                                         OverpassLookupMode overpass_mode,
1708                                         StaticLookupMode static_mode,
1709                                         PrivateLookupMode private_mode) {
1710   int hit = find_method_index(methods, name, signature, overpass_mode, static_mode, private_mode);
1711   return hit >= 0 ? methods->at(hit): NULL;
1712 }
1713 
1714 // true if method matches signature and conforms to skipping_X conditions.
1715 static bool method_matches(const Method* m,
1716                            const Symbol* signature,
1717                            bool skipping_overpass,
1718                            bool skipping_static,
1719                            bool skipping_private) {
1720   return ((m->signature() == signature) &&
1721     (!skipping_overpass || !m->is_overpass()) &&
1722     (!skipping_static || !m->is_static()) &&
1723     (!skipping_private || !m->is_private()));
1724 }
1725 
1726 // Used directly for default_methods to find the index into the
1727 // default_vtable_indices, and indirectly by find_method
1728 // find_method_index looks in the local methods array to return the index
1729 // of the matching name/signature. If, overpass methods are being ignored,
1730 // the search continues to find a potential non-overpass match.  This capability
1731 // is important during method resolution to prefer a static method, for example,
1732 // over an overpass method.
1733 // There is the possibility in any _method's array to have the same name/signature
1734 // for a static method, an overpass method and a local instance method
1735 // To correctly catch a given method, the search criteria may need
1736 // to explicitly skip the other two. For local instance methods, it
1737 // is often necessary to skip private methods
1738 int InstanceKlass::find_method_index(const Array<Method*>* methods,
1739                                      const Symbol* name,
1740                                      const Symbol* signature,
1741                                      OverpassLookupMode overpass_mode,
1742                                      StaticLookupMode static_mode,
1743                                      PrivateLookupMode private_mode) {
1744   const bool skipping_overpass = (overpass_mode == skip_overpass);
1745   const bool skipping_static = (static_mode == skip_static);
1746   const bool skipping_private = (private_mode == skip_private);
1747   const int hit = quick_search(methods, name);
1748   if (hit != -1) {
1749     const Method* const m = methods->at(hit);
1750 
1751     // Do linear search to find matching signature.  First, quick check
1752     // for common case, ignoring overpasses if requested.
1753     if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1754       return hit;
1755     }
1756 
1757     // search downwards through overloaded methods
1758     int i;
1759     for (i = hit - 1; i >= 0; --i) {
1760         const Method* const m = methods->at(i);
1761         assert(m->is_method(), "must be method");
1762         if (m->name() != name) {
1763           break;
1764         }
1765         if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1766           return i;
1767         }
1768     }
1769     // search upwards
1770     for (i = hit + 1; i < methods->length(); ++i) {
1771         const Method* const m = methods->at(i);
1772         assert(m->is_method(), "must be method");
1773         if (m->name() != name) {
1774           break;
1775         }
1776         if (method_matches(m, signature, skipping_overpass, skipping_static, skipping_private)) {
1777           return i;
1778         }
1779     }
1780     // not found
1781 #ifdef ASSERT
1782     const int index = (skipping_overpass || skipping_static || skipping_private) ? -1 :
1783       linear_search(methods, name, signature);
1784     assert(-1 == index, "binary search should have found entry %d", index);
1785 #endif
1786   }
1787   return -1;
1788 }
1789 
1790 int InstanceKlass::find_method_by_name(const Symbol* name, int* end) const {
1791   return find_method_by_name(methods(), name, end);
1792 }
1793 
1794 int InstanceKlass::find_method_by_name(const Array<Method*>* methods,
1795                                        const Symbol* name,
1796                                        int* end_ptr) {
1797   assert(end_ptr != NULL, "just checking");
1798   int start = quick_search(methods, name);
1799   int end = start + 1;
1800   if (start != -1) {
1801     while (start - 1 >= 0 && (methods->at(start - 1))->name() == name) --start;
1802     while (end < methods->length() && (methods->at(end))->name() == name) ++end;
1803     *end_ptr = end;
1804     return start;
1805   }
1806   return -1;
1807 }
1808 
1809 // uncached_lookup_method searches both the local class methods array and all
1810 // superclasses methods arrays, skipping any overpass methods in superclasses,
1811 // and possibly skipping private methods.
1812 Method* InstanceKlass::uncached_lookup_method(const Symbol* name,
1813                                               const Symbol* signature,
1814                                               OverpassLookupMode overpass_mode,
1815                                               PrivateLookupMode private_mode) const {
1816   OverpassLookupMode overpass_local_mode = overpass_mode;
1817   const Klass* klass = this;
1818   while (klass != NULL) {
1819     Method* const method = InstanceKlass::cast(klass)->find_method_impl(name,
1820                                                                         signature,
1821                                                                         overpass_local_mode,
1822                                                                         find_static,
1823                                                                         private_mode);
1824     if (method != NULL) {
1825       return method;
1826     }
1827     klass = klass->super();
1828     overpass_local_mode = skip_overpass;   // Always ignore overpass methods in superclasses
1829   }
1830   return NULL;
1831 }
1832 
1833 #ifdef ASSERT
1834 // search through class hierarchy and return true if this class or
1835 // one of the superclasses was redefined
1836 bool InstanceKlass::has_redefined_this_or_super() const {
1837   const Klass* klass = this;
1838   while (klass != NULL) {
1839     if (InstanceKlass::cast(klass)->has_been_redefined()) {
1840       return true;
1841     }
1842     klass = klass->super();
1843   }
1844   return false;
1845 }
1846 #endif
1847 
1848 // lookup a method in the default methods list then in all transitive interfaces
1849 // Do NOT return private or static methods
1850 Method* InstanceKlass::lookup_method_in_ordered_interfaces(Symbol* name,
1851                                                          Symbol* signature) const {
1852   Method* m = NULL;
1853   if (default_methods() != NULL) {
1854     m = find_method(default_methods(), name, signature);
1855   }
1856   // Look up interfaces
1857   if (m == NULL) {
1858     m = lookup_method_in_all_interfaces(name, signature, find_defaults);
1859   }
1860   return m;
1861 }
1862 
1863 // lookup a method in all the interfaces that this class implements
1864 // Do NOT return private or static methods, new in JDK8 which are not externally visible
1865 // They should only be found in the initial InterfaceMethodRef
1866 Method* InstanceKlass::lookup_method_in_all_interfaces(Symbol* name,
1867                                                        Symbol* signature,
1868                                                        DefaultsLookupMode defaults_mode) const {
1869   Array<InstanceKlass*>* all_ifs = transitive_interfaces();
1870   int num_ifs = all_ifs->length();
1871   InstanceKlass *ik = NULL;
1872   for (int i = 0; i < num_ifs; i++) {
1873     ik = all_ifs->at(i);
1874     Method* m = ik->lookup_method(name, signature);
1875     if (m != NULL && m->is_public() && !m->is_static() &&
1876         ((defaults_mode != skip_defaults) || !m->is_default_method())) {
1877       return m;
1878     }
1879   }
1880   return NULL;
1881 }
1882 
1883 /* jni_id_for_impl for jfieldIds only */
1884 JNIid* InstanceKlass::jni_id_for_impl(int offset) {
1885   MutexLocker ml(JfieldIdCreation_lock);
1886   // Retry lookup after we got the lock
1887   JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
1888   if (probe == NULL) {
1889     // Slow case, allocate new static field identifier
1890     probe = new JNIid(this, offset, jni_ids());
1891     set_jni_ids(probe);
1892   }
1893   return probe;
1894 }
1895 
1896 
1897 /* jni_id_for for jfieldIds only */
1898 JNIid* InstanceKlass::jni_id_for(int offset) {
1899   JNIid* probe = jni_ids() == NULL ? NULL : jni_ids()->find(offset);
1900   if (probe == NULL) {
1901     probe = jni_id_for_impl(offset);
1902   }
1903   return probe;
1904 }
1905 
1906 u2 InstanceKlass::enclosing_method_data(int offset) const {
1907   const Array<jushort>* const inner_class_list = inner_classes();
1908   if (inner_class_list == NULL) {
1909     return 0;
1910   }
1911   const int length = inner_class_list->length();
1912   if (length % inner_class_next_offset == 0) {
1913     return 0;
1914   }
1915   const int index = length - enclosing_method_attribute_size;
1916   assert(offset < enclosing_method_attribute_size, "invalid offset");
1917   return inner_class_list->at(index + offset);
1918 }
1919 
1920 void InstanceKlass::set_enclosing_method_indices(u2 class_index,
1921                                                  u2 method_index) {
1922   Array<jushort>* inner_class_list = inner_classes();
1923   assert (inner_class_list != NULL, "_inner_classes list is not set up");
1924   int length = inner_class_list->length();
1925   if (length % inner_class_next_offset == enclosing_method_attribute_size) {
1926     int index = length - enclosing_method_attribute_size;
1927     inner_class_list->at_put(
1928       index + enclosing_method_class_index_offset, class_index);
1929     inner_class_list->at_put(
1930       index + enclosing_method_method_index_offset, method_index);
1931   }
1932 }
1933 
1934 // Lookup or create a jmethodID.
1935 // This code is called by the VMThread and JavaThreads so the
1936 // locking has to be done very carefully to avoid deadlocks
1937 // and/or other cache consistency problems.
1938 //
1939 jmethodID InstanceKlass::get_jmethod_id(const methodHandle& method_h) {
1940   size_t idnum = (size_t)method_h->method_idnum();
1941   jmethodID* jmeths = methods_jmethod_ids_acquire();
1942   size_t length = 0;
1943   jmethodID id = NULL;
1944 
1945   // We use a double-check locking idiom here because this cache is
1946   // performance sensitive. In the normal system, this cache only
1947   // transitions from NULL to non-NULL which is safe because we use
1948   // release_set_methods_jmethod_ids() to advertise the new cache.
1949   // A partially constructed cache should never be seen by a racing
1950   // thread. We also use release_store() to save a new jmethodID
1951   // in the cache so a partially constructed jmethodID should never be
1952   // seen either. Cache reads of existing jmethodIDs proceed without a
1953   // lock, but cache writes of a new jmethodID requires uniqueness and
1954   // creation of the cache itself requires no leaks so a lock is
1955   // generally acquired in those two cases.
1956   //
1957   // If the RedefineClasses() API has been used, then this cache can
1958   // grow and we'll have transitions from non-NULL to bigger non-NULL.
1959   // Cache creation requires no leaks and we require safety between all
1960   // cache accesses and freeing of the old cache so a lock is generally
1961   // acquired when the RedefineClasses() API has been used.
1962 
1963   if (jmeths != NULL) {
1964     // the cache already exists
1965     if (!idnum_can_increment()) {
1966       // the cache can't grow so we can just get the current values
1967       get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1968     } else {
1969       // cache can grow so we have to be more careful
1970       if (Threads::number_of_threads() == 0 ||
1971           SafepointSynchronize::is_at_safepoint()) {
1972         // we're single threaded or at a safepoint - no locking needed
1973         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1974       } else {
1975         MutexLocker ml(JmethodIdCreation_lock);
1976         get_jmethod_id_length_value(jmeths, idnum, &length, &id);
1977       }
1978     }
1979   }
1980   // implied else:
1981   // we need to allocate a cache so default length and id values are good
1982 
1983   if (jmeths == NULL ||   // no cache yet
1984       length <= idnum ||  // cache is too short
1985       id == NULL) {       // cache doesn't contain entry
1986 
1987     // This function can be called by the VMThread so we have to do all
1988     // things that might block on a safepoint before grabbing the lock.
1989     // Otherwise, we can deadlock with the VMThread or have a cache
1990     // consistency issue. These vars keep track of what we might have
1991     // to free after the lock is dropped.
1992     jmethodID  to_dealloc_id     = NULL;
1993     jmethodID* to_dealloc_jmeths = NULL;
1994 
1995     // may not allocate new_jmeths or use it if we allocate it
1996     jmethodID* new_jmeths = NULL;
1997     if (length <= idnum) {
1998       // allocate a new cache that might be used
1999       size_t size = MAX2(idnum+1, (size_t)idnum_allocated_count());
2000       new_jmeths = NEW_C_HEAP_ARRAY(jmethodID, size+1, mtClass);
2001       memset(new_jmeths, 0, (size+1)*sizeof(jmethodID));
2002       // cache size is stored in element[0], other elements offset by one
2003       new_jmeths[0] = (jmethodID)size;
2004     }
2005 
2006     // allocate a new jmethodID that might be used
2007     jmethodID new_id = NULL;
2008     if (method_h->is_old() && !method_h->is_obsolete()) {
2009       // The method passed in is old (but not obsolete), we need to use the current version
2010       Method* current_method = method_with_idnum((int)idnum);
2011       assert(current_method != NULL, "old and but not obsolete, so should exist");
2012       new_id = Method::make_jmethod_id(class_loader_data(), current_method);
2013     } else {
2014       // It is the current version of the method or an obsolete method,
2015       // use the version passed in
2016       new_id = Method::make_jmethod_id(class_loader_data(), method_h());
2017     }
2018 
2019     if (Threads::number_of_threads() == 0 ||
2020         SafepointSynchronize::is_at_safepoint()) {
2021       // we're single threaded or at a safepoint - no locking needed
2022       id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths,
2023                                           &to_dealloc_id, &to_dealloc_jmeths);
2024     } else {
2025       MutexLocker ml(JmethodIdCreation_lock);
2026       id = get_jmethod_id_fetch_or_update(idnum, new_id, new_jmeths,
2027                                           &to_dealloc_id, &to_dealloc_jmeths);
2028     }
2029 
2030     // The lock has been dropped so we can free resources.
2031     // Free up either the old cache or the new cache if we allocated one.
2032     if (to_dealloc_jmeths != NULL) {
2033       FreeHeap(to_dealloc_jmeths);
2034     }
2035     // free up the new ID since it wasn't needed
2036     if (to_dealloc_id != NULL) {
2037       Method::destroy_jmethod_id(class_loader_data(), to_dealloc_id);
2038     }
2039   }
2040   return id;
2041 }
2042 
2043 // Figure out how many jmethodIDs haven't been allocated, and make
2044 // sure space for them is pre-allocated.  This makes getting all
2045 // method ids much, much faster with classes with more than 8
2046 // methods, and has a *substantial* effect on performance with jvmti
2047 // code that loads all jmethodIDs for all classes.
2048 void InstanceKlass::ensure_space_for_methodids(int start_offset) {
2049   int new_jmeths = 0;
2050   int length = methods()->length();
2051   for (int index = start_offset; index < length; index++) {
2052     Method* m = methods()->at(index);
2053     jmethodID id = m->find_jmethod_id_or_null();
2054     if (id == NULL) {
2055       new_jmeths++;
2056     }
2057   }
2058   if (new_jmeths != 0) {
2059     Method::ensure_jmethod_ids(class_loader_data(), new_jmeths);
2060   }
2061 }
2062 
2063 // Common code to fetch the jmethodID from the cache or update the
2064 // cache with the new jmethodID. This function should never do anything
2065 // that causes the caller to go to a safepoint or we can deadlock with
2066 // the VMThread or have cache consistency issues.
2067 //
2068 jmethodID InstanceKlass::get_jmethod_id_fetch_or_update(
2069             size_t idnum, jmethodID new_id,
2070             jmethodID* new_jmeths, jmethodID* to_dealloc_id_p,
2071             jmethodID** to_dealloc_jmeths_p) {
2072   assert(new_id != NULL, "sanity check");
2073   assert(to_dealloc_id_p != NULL, "sanity check");
2074   assert(to_dealloc_jmeths_p != NULL, "sanity check");
2075   assert(Threads::number_of_threads() == 0 ||
2076          SafepointSynchronize::is_at_safepoint() ||
2077          JmethodIdCreation_lock->owned_by_self(), "sanity check");
2078 
2079   // reacquire the cache - we are locked, single threaded or at a safepoint
2080   jmethodID* jmeths = methods_jmethod_ids_acquire();
2081   jmethodID  id     = NULL;
2082   size_t     length = 0;
2083 
2084   if (jmeths == NULL ||                         // no cache yet
2085       (length = (size_t)jmeths[0]) <= idnum) {  // cache is too short
2086     if (jmeths != NULL) {
2087       // copy any existing entries from the old cache
2088       for (size_t index = 0; index < length; index++) {
2089         new_jmeths[index+1] = jmeths[index+1];
2090       }
2091       *to_dealloc_jmeths_p = jmeths;  // save old cache for later delete
2092     }
2093     release_set_methods_jmethod_ids(jmeths = new_jmeths);
2094   } else {
2095     // fetch jmethodID (if any) from the existing cache
2096     id = jmeths[idnum+1];
2097     *to_dealloc_jmeths_p = new_jmeths;  // save new cache for later delete
2098   }
2099   if (id == NULL) {
2100     // No matching jmethodID in the existing cache or we have a new
2101     // cache or we just grew the cache. This cache write is done here
2102     // by the first thread to win the foot race because a jmethodID
2103     // needs to be unique once it is generally available.
2104     id = new_id;
2105 
2106     // The jmethodID cache can be read while unlocked so we have to
2107     // make sure the new jmethodID is complete before installing it
2108     // in the cache.
2109     OrderAccess::release_store(&jmeths[idnum+1], id);
2110   } else {
2111     *to_dealloc_id_p = new_id; // save new id for later delete
2112   }
2113   return id;
2114 }
2115 
2116 
2117 // Common code to get the jmethodID cache length and the jmethodID
2118 // value at index idnum if there is one.
2119 //
2120 void InstanceKlass::get_jmethod_id_length_value(jmethodID* cache,
2121        size_t idnum, size_t *length_p, jmethodID* id_p) {
2122   assert(cache != NULL, "sanity check");
2123   assert(length_p != NULL, "sanity check");
2124   assert(id_p != NULL, "sanity check");
2125 
2126   // cache size is stored in element[0], other elements offset by one
2127   *length_p = (size_t)cache[0];
2128   if (*length_p <= idnum) {  // cache is too short
2129     *id_p = NULL;
2130   } else {
2131     *id_p = cache[idnum+1];  // fetch jmethodID (if any)
2132   }
2133 }
2134 
2135 
2136 // Lookup a jmethodID, NULL if not found.  Do no blocking, no allocations, no handles
2137 jmethodID InstanceKlass::jmethod_id_or_null(Method* method) {
2138   size_t idnum = (size_t)method->method_idnum();
2139   jmethodID* jmeths = methods_jmethod_ids_acquire();
2140   size_t length;                                // length assigned as debugging crumb
2141   jmethodID id = NULL;
2142   if (jmeths != NULL &&                         // If there is a cache
2143       (length = (size_t)jmeths[0]) > idnum) {   // and if it is long enough,
2144     id = jmeths[idnum+1];                       // Look up the id (may be NULL)
2145   }
2146   return id;
2147 }
2148 
2149 inline DependencyContext InstanceKlass::dependencies() {
2150   DependencyContext dep_context(&_dep_context, &_dep_context_last_cleaned);
2151   return dep_context;
2152 }
2153 
2154 int InstanceKlass::mark_dependent_nmethods(KlassDepChange& changes) {
2155   return dependencies().mark_dependent_nmethods(changes);
2156 }
2157 
2158 void InstanceKlass::add_dependent_nmethod(nmethod* nm) {
2159   dependencies().add_dependent_nmethod(nm);
2160 }
2161 
2162 void InstanceKlass::remove_dependent_nmethod(nmethod* nm) {
2163   dependencies().remove_dependent_nmethod(nm);
2164 }
2165 
2166 void InstanceKlass::clean_dependency_context() {
2167   dependencies().clean_unloading_dependents();
2168 }
2169 
2170 #ifndef PRODUCT
2171 void InstanceKlass::print_dependent_nmethods(bool verbose) {
2172   dependencies().print_dependent_nmethods(verbose);
2173 }
2174 
2175 bool InstanceKlass::is_dependent_nmethod(nmethod* nm) {
2176   return dependencies().is_dependent_nmethod(nm);
2177 }
2178 #endif //PRODUCT
2179 
2180 void InstanceKlass::clean_weak_instanceklass_links() {
2181   clean_implementors_list();
2182   clean_method_data();
2183 }
2184 
2185 void InstanceKlass::clean_implementors_list() {
2186   assert(is_loader_alive(), "this klass should be live");
2187   if (is_interface()) {
2188     assert (ClassUnloading, "only called for ClassUnloading");
2189     for (;;) {
2190       // Use load_acquire due to competing with inserts
2191       Klass* impl = OrderAccess::load_acquire(adr_implementor());
2192       if (impl != NULL && !impl->is_loader_alive()) {
2193         // NULL this field, might be an unloaded klass or NULL
2194         Klass* volatile* klass = adr_implementor();
2195         if (Atomic::cmpxchg((Klass*)NULL, klass, impl) == impl) {
2196           // Successfully unlinking implementor.
2197           if (log_is_enabled(Trace, class, unload)) {
2198             ResourceMark rm;
2199             log_trace(class, unload)("unlinking class (implementor): %s", impl->external_name());
2200           }
2201           return;
2202         }
2203       } else {
2204         return;
2205       }
2206     }
2207   }
2208 }
2209 
2210 void InstanceKlass::clean_method_data() {
2211   for (int m = 0; m < methods()->length(); m++) {
2212     MethodData* mdo = methods()->at(m)->method_data();
2213     if (mdo != NULL) {
2214       MutexLocker ml(SafepointSynchronize::is_at_safepoint() ? NULL : mdo->extra_data_lock());
2215       mdo->clean_method_data(/*always_clean*/false);
2216     }
2217   }
2218 }
2219 
2220 bool InstanceKlass::supers_have_passed_fingerprint_checks() {
2221   if (java_super() != NULL && !java_super()->has_passed_fingerprint_check()) {
2222     ResourceMark rm;
2223     log_trace(class, fingerprint)("%s : super %s not fingerprinted", external_name(), java_super()->external_name());
2224     return false;
2225   }
2226 
2227   Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
2228   if (local_interfaces != NULL) {
2229     int length = local_interfaces->length();
2230     for (int i = 0; i < length; i++) {
2231       InstanceKlass* intf = local_interfaces->at(i);
2232       if (!intf->has_passed_fingerprint_check()) {
2233         ResourceMark rm;
2234         log_trace(class, fingerprint)("%s : interface %s not fingerprinted", external_name(), intf->external_name());
2235         return false;
2236       }
2237     }
2238   }
2239 
2240   return true;
2241 }
2242 
2243 bool InstanceKlass::should_store_fingerprint(bool is_unsafe_anonymous) {
2244 #if INCLUDE_AOT
2245   // We store the fingerprint into the InstanceKlass only in the following 2 cases:
2246   if (CalculateClassFingerprint) {
2247     // (1) We are running AOT to generate a shared library.
2248     return true;
2249   }
2250   if (Arguments::is_dumping_archive()) {
2251     // (2) We are running -Xshare:dump or -XX:ArchiveClassesAtExit to create a shared archive
2252     return true;
2253   }
2254   if (UseAOT && is_unsafe_anonymous) {
2255     // (3) We are using AOT code from a shared library and see an unsafe anonymous class
2256     return true;
2257   }
2258 #endif
2259 
2260   // In all other cases we might set the _misc_has_passed_fingerprint_check bit,
2261   // but do not store the 64-bit fingerprint to save space.
2262   return false;
2263 }
2264 
2265 bool InstanceKlass::has_stored_fingerprint() const {
2266 #if INCLUDE_AOT
2267   return should_store_fingerprint() || is_shared();
2268 #else
2269   return false;
2270 #endif
2271 }
2272 
2273 uint64_t InstanceKlass::get_stored_fingerprint() const {
2274   address adr = adr_fingerprint();
2275   if (adr != NULL) {
2276     return (uint64_t)Bytes::get_native_u8(adr); // adr may not be 64-bit aligned
2277   }
2278   return 0;
2279 }
2280 
2281 void InstanceKlass::store_fingerprint(uint64_t fingerprint) {
2282   address adr = adr_fingerprint();
2283   if (adr != NULL) {
2284     Bytes::put_native_u8(adr, (u8)fingerprint); // adr may not be 64-bit aligned
2285 
2286     ResourceMark rm;
2287     log_trace(class, fingerprint)("stored as " PTR64_FORMAT " for class %s", fingerprint, external_name());
2288   }
2289 }
2290 
2291 void InstanceKlass::metaspace_pointers_do(MetaspaceClosure* it) {
2292   Klass::metaspace_pointers_do(it);
2293 
2294   if (log_is_enabled(Trace, cds)) {
2295     ResourceMark rm;
2296     log_trace(cds)("Iter(InstanceKlass): %p (%s)", this, external_name());
2297   }
2298 
2299   it->push(&_annotations);
2300   it->push((Klass**)&_array_klasses);
2301   it->push(&_constants);
2302   it->push(&_inner_classes);
2303   it->push(&_array_name);
2304 #if INCLUDE_JVMTI
2305   it->push(&_previous_versions);
2306 #endif
2307   it->push(&_methods);
2308   it->push(&_default_methods);
2309   it->push(&_local_interfaces);
2310   it->push(&_transitive_interfaces);
2311   it->push(&_method_ordering);
2312   it->push(&_default_vtable_indices);
2313   it->push(&_fields);
2314 
2315   if (itable_length() > 0) {
2316     itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2317     int method_table_offset_in_words = ioe->offset()/wordSize;
2318     int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
2319                          / itableOffsetEntry::size();
2320 
2321     for (int i = 0; i < nof_interfaces; i ++, ioe ++) {
2322       if (ioe->interface_klass() != NULL) {
2323         it->push(ioe->interface_klass_addr());
2324         itableMethodEntry* ime = ioe->first_method_entry(this);
2325         int n = klassItable::method_count_for_interface(ioe->interface_klass());
2326         for (int index = 0; index < n; index ++) {
2327           it->push(ime[index].method_addr());
2328         }
2329       }
2330     }
2331   }
2332 
2333   it->push(&_nest_members);
2334 }
2335 
2336 void InstanceKlass::remove_unshareable_info() {
2337   Klass::remove_unshareable_info();
2338 
2339   if (is_in_error_state()) {
2340     // Classes are attempted to link during dumping and may fail,
2341     // but these classes are still in the dictionary and class list in CLD.
2342     // Check in_error state first because in_error is > linked state, so
2343     // is_linked() is true.
2344     // If there's a linking error, there is nothing else to remove.
2345     return;
2346   }
2347 
2348   // Reset to the 'allocated' state to prevent any premature accessing to
2349   // a shared class at runtime while the class is still being loaded and
2350   // restored. A class' init_state is set to 'loaded' at runtime when it's
2351   // being added to class hierarchy (see SystemDictionary:::add_to_hierarchy()).
2352   _init_state = allocated;
2353 
2354   { // Otherwise this needs to take out the Compile_lock.
2355     assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
2356     init_implementor();
2357   }
2358 
2359   constants()->remove_unshareable_info();
2360 
2361   for (int i = 0; i < methods()->length(); i++) {
2362     Method* m = methods()->at(i);
2363     m->remove_unshareable_info();
2364   }
2365 
2366   // do array classes also.
2367   if (array_klasses() != NULL) {
2368     array_klasses()->remove_unshareable_info();
2369   }
2370 
2371   // These are not allocated from metaspace. They are safe to set to NULL.
2372   _source_debug_extension = NULL;
2373   _dep_context = NULL;
2374   _osr_nmethods_head = NULL;
2375 #if INCLUDE_JVMTI
2376   _breakpoints = NULL;
2377   _previous_versions = NULL;
2378   _cached_class_file = NULL;
2379   _jvmti_cached_class_field_map = NULL;
2380 #endif
2381 
2382   _init_thread = NULL;
2383   _methods_jmethod_ids = NULL;
2384   _jni_ids = NULL;
2385   _oop_map_cache = NULL;
2386   // clear _nest_host to ensure re-load at runtime
2387   _nest_host = NULL;
2388   _package_entry = NULL;
2389   _dep_context_last_cleaned = 0;
2390 }
2391 
2392 void InstanceKlass::remove_java_mirror() {
2393   Klass::remove_java_mirror();
2394 
2395   // do array classes also.
2396   if (array_klasses() != NULL) {
2397     array_klasses()->remove_java_mirror();
2398   }
2399 }
2400 
2401 void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) {
2402   // SystemDictionary::add_to_hierarchy() sets the init_state to loaded
2403   // before the InstanceKlass is added to the SystemDictionary. Make
2404   // sure the current state is <loaded.
2405   assert(!is_loaded(), "invalid init state");
2406   set_package(loader_data, CHECK);
2407   Klass::restore_unshareable_info(loader_data, protection_domain, CHECK);
2408 
2409   Array<Method*>* methods = this->methods();
2410   int num_methods = methods->length();
2411   for (int index = 0; index < num_methods; ++index) {
2412     methods->at(index)->restore_unshareable_info(CHECK);
2413   }
2414   if (JvmtiExport::has_redefined_a_class()) {
2415     // Reinitialize vtable because RedefineClasses may have changed some
2416     // entries in this vtable for super classes so the CDS vtable might
2417     // point to old or obsolete entries.  RedefineClasses doesn't fix up
2418     // vtables in the shared system dictionary, only the main one.
2419     // It also redefines the itable too so fix that too.
2420     vtable().initialize_vtable(false, CHECK);
2421     itable().initialize_itable(false, CHECK);
2422   }
2423 
2424   // restore constant pool resolved references
2425   constants()->restore_unshareable_info(CHECK);
2426 
2427   if (array_klasses() != NULL) {
2428     // Array classes have null protection domain.
2429     // --> see ArrayKlass::complete_create_array_klass()
2430     array_klasses()->restore_unshareable_info(ClassLoaderData::the_null_class_loader_data(), Handle(), CHECK);
2431   }
2432 }
2433 
2434 // returns true IFF is_in_error_state() has been changed as a result of this call.
2435 bool InstanceKlass::check_sharing_error_state() {
2436   assert(DumpSharedSpaces, "should only be called during dumping");
2437   bool old_state = is_in_error_state();
2438 
2439   if (!is_in_error_state()) {
2440     bool bad = false;
2441     for (InstanceKlass* sup = java_super(); sup; sup = sup->java_super()) {
2442       if (sup->is_in_error_state()) {
2443         bad = true;
2444         break;
2445       }
2446     }
2447     if (!bad) {
2448       Array<InstanceKlass*>* interfaces = transitive_interfaces();
2449       for (int i = 0; i < interfaces->length(); i++) {
2450         InstanceKlass* iface = interfaces->at(i);
2451         if (iface->is_in_error_state()) {
2452           bad = true;
2453           break;
2454         }
2455       }
2456     }
2457 
2458     if (bad) {
2459       set_in_error_state();
2460     }
2461   }
2462 
2463   return (old_state != is_in_error_state());
2464 }
2465 
2466 void InstanceKlass::set_class_loader_type(s2 loader_type) {
2467   switch (loader_type) {
2468   case ClassLoader::BOOT_LOADER:
2469     _misc_flags |= _misc_is_shared_boot_class;
2470     break;
2471   case ClassLoader::PLATFORM_LOADER:
2472     _misc_flags |= _misc_is_shared_platform_class;
2473     break;
2474   case ClassLoader::APP_LOADER:
2475     _misc_flags |= _misc_is_shared_app_class;
2476     break;
2477   default:
2478     ShouldNotReachHere();
2479     break;
2480   }
2481 }
2482 
2483 #if INCLUDE_JVMTI
2484 static void clear_all_breakpoints(Method* m) {
2485   m->clear_all_breakpoints();
2486 }
2487 #endif
2488 
2489 void InstanceKlass::unload_class(InstanceKlass* ik) {
2490   // Release dependencies.
2491   ik->dependencies().remove_all_dependents();
2492 
2493   // notify the debugger
2494   if (JvmtiExport::should_post_class_unload()) {
2495     JvmtiExport::post_class_unload(ik);
2496   }
2497 
2498   // notify ClassLoadingService of class unload
2499   ClassLoadingService::notify_class_unloaded(ik);
2500 
2501   if (Arguments::is_dumping_archive()) {
2502     SystemDictionaryShared::remove_dumptime_info(ik);
2503   }
2504 
2505   if (log_is_enabled(Info, class, unload)) {
2506     ResourceMark rm;
2507     log_info(class, unload)("unloading class %s " INTPTR_FORMAT, ik->external_name(), p2i(ik));
2508   }
2509 
2510   Events::log_class_unloading(Thread::current(), ik);
2511 
2512 #if INCLUDE_JFR
2513   assert(ik != NULL, "invariant");
2514   EventClassUnload event;
2515   event.set_unloadedClass(ik);
2516   event.set_definingClassLoader(ik->class_loader_data());
2517   event.commit();
2518 #endif
2519 }
2520 
2521 void InstanceKlass::release_C_heap_structures(InstanceKlass* ik) {
2522   // Clean up C heap
2523   ik->release_C_heap_structures();
2524   ik->constants()->release_C_heap_structures();
2525 }
2526 
2527 void InstanceKlass::release_C_heap_structures() {
2528   // Can't release the constant pool here because the constant pool can be
2529   // deallocated separately from the InstanceKlass for default methods and
2530   // redefine classes.
2531 
2532   // Deallocate oop map cache
2533   if (_oop_map_cache != NULL) {
2534     delete _oop_map_cache;
2535     _oop_map_cache = NULL;
2536   }
2537 
2538   // Deallocate JNI identifiers for jfieldIDs
2539   JNIid::deallocate(jni_ids());
2540   set_jni_ids(NULL);
2541 
2542   jmethodID* jmeths = methods_jmethod_ids_acquire();
2543   if (jmeths != (jmethodID*)NULL) {
2544     release_set_methods_jmethod_ids(NULL);
2545     FreeHeap(jmeths);
2546   }
2547 
2548   assert(_dep_context == NULL,
2549          "dependencies should already be cleaned");
2550 
2551 #if INCLUDE_JVMTI
2552   // Deallocate breakpoint records
2553   if (breakpoints() != 0x0) {
2554     methods_do(clear_all_breakpoints);
2555     assert(breakpoints() == 0x0, "should have cleared breakpoints");
2556   }
2557 
2558   // deallocate the cached class file
2559   if (_cached_class_file != NULL) {
2560     os::free(_cached_class_file);
2561     _cached_class_file = NULL;
2562   }
2563 #endif
2564 
2565   // Decrement symbol reference counts associated with the unloaded class.
2566   if (_name != NULL) _name->decrement_refcount();
2567   // unreference array name derived from this class name (arrays of an unloaded
2568   // class can't be referenced anymore).
2569   if (_array_name != NULL)  _array_name->decrement_refcount();
2570   FREE_C_HEAP_ARRAY(char, _source_debug_extension);
2571 }
2572 
2573 void InstanceKlass::set_source_debug_extension(const char* array, int length) {
2574   if (array == NULL) {
2575     _source_debug_extension = NULL;
2576   } else {
2577     // Adding one to the attribute length in order to store a null terminator
2578     // character could cause an overflow because the attribute length is
2579     // already coded with an u4 in the classfile, but in practice, it's
2580     // unlikely to happen.
2581     assert((length+1) > length, "Overflow checking");
2582     char* sde = NEW_C_HEAP_ARRAY(char, (length + 1), mtClass);
2583     for (int i = 0; i < length; i++) {
2584       sde[i] = array[i];
2585     }
2586     sde[length] = '\0';
2587     _source_debug_extension = sde;
2588   }
2589 }
2590 
2591 const char* InstanceKlass::signature_name() const {
2592   int hash_len = 0;
2593   char hash_buf[40];
2594 
2595   // If this is an unsafe anonymous class, append a hash to make the name unique
2596   if (is_unsafe_anonymous()) {
2597     intptr_t hash = (java_mirror() != NULL) ? java_mirror()->identity_hash() : 0;
2598     jio_snprintf(hash_buf, sizeof(hash_buf), "/" UINTX_FORMAT, (uintx)hash);
2599     hash_len = (int)strlen(hash_buf);
2600   }
2601 
2602   // Get the internal name as a c string
2603   const char* src = (const char*) (name()->as_C_string());
2604   const int src_length = (int)strlen(src);
2605 
2606   char* dest = NEW_RESOURCE_ARRAY(char, src_length + hash_len + 3);
2607 
2608   // Add L as type indicator
2609   int dest_index = 0;
2610   dest[dest_index++] = 'L';
2611 
2612   // Add the actual class name
2613   for (int src_index = 0; src_index < src_length; ) {
2614     dest[dest_index++] = src[src_index++];
2615   }
2616 
2617   // If we have a hash, append it
2618   for (int hash_index = 0; hash_index < hash_len; ) {
2619     dest[dest_index++] = hash_buf[hash_index++];
2620   }
2621 
2622   // Add the semicolon and the NULL
2623   dest[dest_index++] = ';';
2624   dest[dest_index] = '\0';
2625   return dest;
2626 }
2627 
2628 // Used to obtain the package name from a fully qualified class name.
2629 Symbol* InstanceKlass::package_from_name(const Symbol* name, TRAPS) {
2630   if (name == NULL) {
2631     return NULL;
2632   } else {
2633     if (name->utf8_length() <= 0) {
2634       return NULL;
2635     }
2636     ResourceMark rm;
2637     const char* package_name = ClassLoader::package_from_name((const char*) name->as_C_string());
2638     if (package_name == NULL) {
2639       return NULL;
2640     }
2641     Symbol* pkg_name = SymbolTable::new_symbol(package_name);
2642     return pkg_name;
2643   }
2644 }
2645 
2646 ModuleEntry* InstanceKlass::module() const {
2647   // For an unsafe anonymous class return the host class' module
2648   if (is_unsafe_anonymous()) {
2649     assert(unsafe_anonymous_host() != NULL, "unsafe anonymous class must have a host class");
2650     return unsafe_anonymous_host()->module();
2651   }
2652 
2653   // Class is in a named package
2654   if (!in_unnamed_package()) {
2655     return _package_entry->module();
2656   }
2657 
2658   // Class is in an unnamed package, return its loader's unnamed module
2659   return class_loader_data()->unnamed_module();
2660 }
2661 
2662 void InstanceKlass::set_package(ClassLoaderData* loader_data, TRAPS) {
2663 
2664   // ensure java/ packages only loaded by boot or platform builtin loaders
2665   check_prohibited_package(name(), loader_data, CHECK);
2666 
2667   TempNewSymbol pkg_name = package_from_name(name(), CHECK);
2668 
2669   if (pkg_name != NULL && loader_data != NULL) {
2670 
2671     // Find in class loader's package entry table.
2672     _package_entry = loader_data->packages()->lookup_only(pkg_name);
2673 
2674     // If the package name is not found in the loader's package
2675     // entry table, it is an indication that the package has not
2676     // been defined. Consider it defined within the unnamed module.
2677     if (_package_entry == NULL) {
2678       ResourceMark rm;
2679 
2680       if (!ModuleEntryTable::javabase_defined()) {
2681         // Before java.base is defined during bootstrapping, define all packages in
2682         // the java.base module.  If a non-java.base package is erroneously placed
2683         // in the java.base module it will be caught later when java.base
2684         // is defined by ModuleEntryTable::verify_javabase_packages check.
2685         assert(ModuleEntryTable::javabase_moduleEntry() != NULL, JAVA_BASE_NAME " module is NULL");
2686         _package_entry = loader_data->packages()->lookup(pkg_name, ModuleEntryTable::javabase_moduleEntry());
2687       } else {
2688         assert(loader_data->unnamed_module() != NULL, "unnamed module is NULL");
2689         _package_entry = loader_data->packages()->lookup(pkg_name,
2690                                                          loader_data->unnamed_module());
2691       }
2692 
2693       // A package should have been successfully created
2694       assert(_package_entry != NULL, "Package entry for class %s not found, loader %s",
2695              name()->as_C_string(), loader_data->loader_name_and_id());
2696     }
2697 
2698     if (log_is_enabled(Debug, module)) {
2699       ResourceMark rm;
2700       ModuleEntry* m = _package_entry->module();
2701       log_trace(module)("Setting package: class: %s, package: %s, loader: %s, module: %s",
2702                         external_name(),
2703                         pkg_name->as_C_string(),
2704                         loader_data->loader_name_and_id(),
2705                         (m->is_named() ? m->name()->as_C_string() : UNNAMED_MODULE));
2706     }
2707   } else {
2708     ResourceMark rm;
2709     log_trace(module)("Setting package: class: %s, package: unnamed, loader: %s, module: %s",
2710                       external_name(),
2711                       (loader_data != NULL) ? loader_data->loader_name_and_id() : "NULL",
2712                       UNNAMED_MODULE);
2713   }
2714 }
2715 
2716 
2717 // different versions of is_same_class_package
2718 
2719 bool InstanceKlass::is_same_class_package(const Klass* class2) const {
2720   oop classloader1 = this->class_loader();
2721   PackageEntry* classpkg1 = this->package();
2722   if (class2->is_objArray_klass()) {
2723     class2 = ObjArrayKlass::cast(class2)->bottom_klass();
2724   }
2725 
2726   oop classloader2;
2727   PackageEntry* classpkg2;
2728   if (class2->is_instance_klass()) {
2729     classloader2 = class2->class_loader();
2730     classpkg2 = class2->package();
2731   } else {
2732     assert(class2->is_typeArray_klass(), "should be type array");
2733     classloader2 = NULL;
2734     classpkg2 = NULL;
2735   }
2736 
2737   // Same package is determined by comparing class loader
2738   // and package entries. Both must be the same. This rule
2739   // applies even to classes that are defined in the unnamed
2740   // package, they still must have the same class loader.
2741   if ((classloader1 == classloader2) && (classpkg1 == classpkg2)) {
2742     return true;
2743   }
2744 
2745   return false;
2746 }
2747 
2748 // return true if this class and other_class are in the same package. Classloader
2749 // and classname information is enough to determine a class's package
2750 bool InstanceKlass::is_same_class_package(oop other_class_loader,
2751                                           const Symbol* other_class_name) const {
2752   if (class_loader() != other_class_loader) {
2753     return false;
2754   }
2755   if (name()->fast_compare(other_class_name) == 0) {
2756      return true;
2757   }
2758 
2759   {
2760     ResourceMark rm;
2761 
2762     bool bad_class_name = false;
2763     const char* other_pkg =
2764       ClassLoader::package_from_name((const char*) other_class_name->as_C_string(), &bad_class_name);
2765     if (bad_class_name) {
2766       return false;
2767     }
2768     // Check that package_from_name() returns NULL, not "", if there is no package.
2769     assert(other_pkg == NULL || strlen(other_pkg) > 0, "package name is empty string");
2770 
2771     const Symbol* const this_package_name =
2772       this->package() != NULL ? this->package()->name() : NULL;
2773 
2774     if (this_package_name == NULL || other_pkg == NULL) {
2775       // One of the two doesn't have a package.  Only return true if the other
2776       // one also doesn't have a package.
2777       return (const char*)this_package_name == other_pkg;
2778     }
2779 
2780     // Check if package is identical
2781     return this_package_name->equals(other_pkg);
2782   }
2783 }
2784 
2785 // Returns true iff super_method can be overridden by a method in targetclassname
2786 // See JLS 3rd edition 8.4.6.1
2787 // Assumes name-signature match
2788 // "this" is InstanceKlass of super_method which must exist
2789 // note that the InstanceKlass of the method in the targetclassname has not always been created yet
2790 bool InstanceKlass::is_override(const methodHandle& super_method, Handle targetclassloader, Symbol* targetclassname, TRAPS) {
2791    // Private methods can not be overridden
2792    if (super_method->is_private()) {
2793      return false;
2794    }
2795    // If super method is accessible, then override
2796    if ((super_method->is_protected()) ||
2797        (super_method->is_public())) {
2798      return true;
2799    }
2800    // Package-private methods are not inherited outside of package
2801    assert(super_method->is_package_private(), "must be package private");
2802    return(is_same_class_package(targetclassloader(), targetclassname));
2803 }
2804 
2805 // Only boot and platform class loaders can define classes in "java/" packages.
2806 void InstanceKlass::check_prohibited_package(Symbol* class_name,
2807                                              ClassLoaderData* loader_data,
2808                                              TRAPS) {
2809   if (!loader_data->is_boot_class_loader_data() &&
2810       !loader_data->is_platform_class_loader_data() &&
2811       class_name != NULL) {
2812     ResourceMark rm(THREAD);
2813     char* name = class_name->as_C_string();
2814     if (strncmp(name, JAVAPKG, JAVAPKG_LEN) == 0 && name[JAVAPKG_LEN] == '/') {
2815       TempNewSymbol pkg_name = InstanceKlass::package_from_name(class_name, CHECK);
2816       assert(pkg_name != NULL, "Error in parsing package name starting with 'java/'");
2817       name = pkg_name->as_C_string();
2818       const char* class_loader_name = loader_data->loader_name_and_id();
2819       StringUtils::replace_no_expand(name, "/", ".");
2820       const char* msg_text1 = "Class loader (instance of): ";
2821       const char* msg_text2 = " tried to load prohibited package name: ";
2822       size_t len = strlen(msg_text1) + strlen(class_loader_name) + strlen(msg_text2) + strlen(name) + 1;
2823       char* message = NEW_RESOURCE_ARRAY_IN_THREAD(THREAD, char, len);
2824       jio_snprintf(message, len, "%s%s%s%s", msg_text1, class_loader_name, msg_text2, name);
2825       THROW_MSG(vmSymbols::java_lang_SecurityException(), message);
2826     }
2827   }
2828   return;
2829 }
2830 
2831 bool InstanceKlass::find_inner_classes_attr(int* ooff, int* noff, TRAPS) const {
2832   constantPoolHandle i_cp(THREAD, constants());
2833   for (InnerClassesIterator iter(this); !iter.done(); iter.next()) {
2834     int ioff = iter.inner_class_info_index();
2835     if (ioff != 0) {
2836       // Check to see if the name matches the class we're looking for
2837       // before attempting to find the class.
2838       if (i_cp->klass_name_at_matches(this, ioff)) {
2839         Klass* inner_klass = i_cp->klass_at(ioff, CHECK_false);
2840         if (this == inner_klass) {
2841           *ooff = iter.outer_class_info_index();
2842           *noff = iter.inner_name_index();
2843           return true;
2844         }
2845       }
2846     }
2847   }
2848   return false;
2849 }
2850 
2851 InstanceKlass* InstanceKlass::compute_enclosing_class(bool* inner_is_member, TRAPS) const {
2852   InstanceKlass* outer_klass = NULL;
2853   *inner_is_member = false;
2854   int ooff = 0, noff = 0;
2855   bool has_inner_classes_attr = find_inner_classes_attr(&ooff, &noff, THREAD);
2856   if (has_inner_classes_attr) {
2857     constantPoolHandle i_cp(THREAD, constants());
2858     if (ooff != 0) {
2859       Klass* ok = i_cp->klass_at(ooff, CHECK_NULL);
2860       outer_klass = InstanceKlass::cast(ok);
2861       *inner_is_member = true;
2862     }
2863     if (NULL == outer_klass) {
2864       // It may be unsafe anonymous; try for that.
2865       int encl_method_class_idx = enclosing_method_class_index();
2866       if (encl_method_class_idx != 0) {
2867         Klass* ok = i_cp->klass_at(encl_method_class_idx, CHECK_NULL);
2868         outer_klass = InstanceKlass::cast(ok);
2869         *inner_is_member = false;
2870       }
2871     }
2872   }
2873 
2874   // If no inner class attribute found for this class.
2875   if (NULL == outer_klass) return NULL;
2876 
2877   // Throws an exception if outer klass has not declared k as an inner klass
2878   // We need evidence that each klass knows about the other, or else
2879   // the system could allow a spoof of an inner class to gain access rights.
2880   Reflection::check_for_inner_class(outer_klass, this, *inner_is_member, CHECK_NULL);
2881   return outer_klass;
2882 }
2883 
2884 jint InstanceKlass::compute_modifier_flags(TRAPS) const {
2885   jint access = access_flags().as_int();
2886 
2887   // But check if it happens to be member class.
2888   InnerClassesIterator iter(this);
2889   for (; !iter.done(); iter.next()) {
2890     int ioff = iter.inner_class_info_index();
2891     // Inner class attribute can be zero, skip it.
2892     // Strange but true:  JVM spec. allows null inner class refs.
2893     if (ioff == 0) continue;
2894 
2895     // only look at classes that are already loaded
2896     // since we are looking for the flags for our self.
2897     Symbol* inner_name = constants()->klass_name_at(ioff);
2898     if (name() == inner_name) {
2899       // This is really a member class.
2900       access = iter.inner_access_flags();
2901       break;
2902     }
2903   }
2904   // Remember to strip ACC_SUPER bit
2905   return (access & (~JVM_ACC_SUPER)) & JVM_ACC_WRITTEN_FLAGS;
2906 }
2907 
2908 jint InstanceKlass::jvmti_class_status() const {
2909   jint result = 0;
2910 
2911   if (is_linked()) {
2912     result |= JVMTI_CLASS_STATUS_VERIFIED | JVMTI_CLASS_STATUS_PREPARED;
2913   }
2914 
2915   if (is_initialized()) {
2916     assert(is_linked(), "Class status is not consistent");
2917     result |= JVMTI_CLASS_STATUS_INITIALIZED;
2918   }
2919   if (is_in_error_state()) {
2920     result |= JVMTI_CLASS_STATUS_ERROR;
2921   }
2922   return result;
2923 }
2924 
2925 Method* InstanceKlass::method_at_itable(Klass* holder, int index, TRAPS) {
2926   itableOffsetEntry* ioe = (itableOffsetEntry*)start_of_itable();
2927   int method_table_offset_in_words = ioe->offset()/wordSize;
2928   int nof_interfaces = (method_table_offset_in_words - itable_offset_in_words())
2929                        / itableOffsetEntry::size();
2930 
2931   for (int cnt = 0 ; ; cnt ++, ioe ++) {
2932     // If the interface isn't implemented by the receiver class,
2933     // the VM should throw IncompatibleClassChangeError.
2934     if (cnt >= nof_interfaces) {
2935       ResourceMark rm(THREAD);
2936       stringStream ss;
2937       bool same_module = (module() == holder->module());
2938       ss.print("Receiver class %s does not implement "
2939                "the interface %s defining the method to be called "
2940                "(%s%s%s)",
2941                external_name(), holder->external_name(),
2942                (same_module) ? joint_in_module_of_loader(holder) : class_in_module_of_loader(),
2943                (same_module) ? "" : "; ",
2944                (same_module) ? "" : holder->class_in_module_of_loader());
2945       THROW_MSG_NULL(vmSymbols::java_lang_IncompatibleClassChangeError(), ss.as_string());
2946     }
2947 
2948     Klass* ik = ioe->interface_klass();
2949     if (ik == holder) break;
2950   }
2951 
2952   itableMethodEntry* ime = ioe->first_method_entry(this);
2953   Method* m = ime[index].method();
2954   if (m == NULL) {
2955     THROW_NULL(vmSymbols::java_lang_AbstractMethodError());
2956   }
2957   return m;
2958 }
2959 
2960 
2961 #if INCLUDE_JVMTI
2962 // update default_methods for redefineclasses for methods that are
2963 // not yet in the vtable due to concurrent subclass define and superinterface
2964 // redefinition
2965 // Note: those in the vtable, should have been updated via adjust_method_entries
2966 void InstanceKlass::adjust_default_methods(bool* trace_name_printed) {
2967   // search the default_methods for uses of either obsolete or EMCP methods
2968   if (default_methods() != NULL) {
2969     for (int index = 0; index < default_methods()->length(); index ++) {
2970       Method* old_method = default_methods()->at(index);
2971       if (old_method == NULL || !old_method->is_old()) {
2972         continue; // skip uninteresting entries
2973       }
2974       assert(!old_method->is_deleted(), "default methods may not be deleted");
2975       Method* new_method = old_method->get_new_method();
2976       default_methods()->at_put(index, new_method);
2977 
2978       if (log_is_enabled(Info, redefine, class, update)) {
2979         ResourceMark rm;
2980         if (!(*trace_name_printed)) {
2981           log_info(redefine, class, update)
2982             ("adjust: klassname=%s default methods from name=%s",
2983              external_name(), old_method->method_holder()->external_name());
2984           *trace_name_printed = true;
2985         }
2986         log_debug(redefine, class, update, vtables)
2987           ("default method update: %s(%s) ",
2988            new_method->name()->as_C_string(), new_method->signature()->as_C_string());
2989       }
2990     }
2991   }
2992 }
2993 #endif // INCLUDE_JVMTI
2994 
2995 // On-stack replacement stuff
2996 void InstanceKlass::add_osr_nmethod(nmethod* n) {
2997   assert_lock_strong(CompiledMethod_lock);
2998 #ifndef PRODUCT
2999   if (TieredCompilation) {
3000       nmethod * prev = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), n->comp_level(), true);
3001       assert(prev == NULL || !prev->is_in_use(),
3002       "redundunt OSR recompilation detected. memory leak in CodeCache!");
3003   }
3004 #endif
3005   // only one compilation can be active
3006   {
3007     assert(n->is_osr_method(), "wrong kind of nmethod");
3008     n->set_osr_link(osr_nmethods_head());
3009     set_osr_nmethods_head(n);
3010     // Raise the highest osr level if necessary
3011     if (TieredCompilation) {
3012       Method* m = n->method();
3013       m->set_highest_osr_comp_level(MAX2(m->highest_osr_comp_level(), n->comp_level()));
3014     }
3015   }
3016 
3017   // Get rid of the osr methods for the same bci that have lower levels.
3018   if (TieredCompilation) {
3019     for (int l = CompLevel_limited_profile; l < n->comp_level(); l++) {
3020       nmethod *inv = lookup_osr_nmethod(n->method(), n->osr_entry_bci(), l, true);
3021       if (inv != NULL && inv->is_in_use()) {
3022         inv->make_not_entrant();
3023       }
3024     }
3025   }
3026 }
3027 
3028 // Remove osr nmethod from the list. Return true if found and removed.
3029 bool InstanceKlass::remove_osr_nmethod(nmethod* n) {
3030   // This is a short non-blocking critical region, so the no safepoint check is ok.
3031   MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock
3032                  , Mutex::_no_safepoint_check_flag);
3033   assert(n->is_osr_method(), "wrong kind of nmethod");
3034   nmethod* last = NULL;
3035   nmethod* cur  = osr_nmethods_head();
3036   int max_level = CompLevel_none;  // Find the max comp level excluding n
3037   Method* m = n->method();
3038   // Search for match
3039   bool found = false;
3040   while(cur != NULL && cur != n) {
3041     if (TieredCompilation && m == cur->method()) {
3042       // Find max level before n
3043       max_level = MAX2(max_level, cur->comp_level());
3044     }
3045     last = cur;
3046     cur = cur->osr_link();
3047   }
3048   nmethod* next = NULL;
3049   if (cur == n) {
3050     found = true;
3051     next = cur->osr_link();
3052     if (last == NULL) {
3053       // Remove first element
3054       set_osr_nmethods_head(next);
3055     } else {
3056       last->set_osr_link(next);
3057     }
3058   }
3059   n->set_osr_link(NULL);
3060   if (TieredCompilation) {
3061     cur = next;
3062     while (cur != NULL) {
3063       // Find max level after n
3064       if (m == cur->method()) {
3065         max_level = MAX2(max_level, cur->comp_level());
3066       }
3067       cur = cur->osr_link();
3068     }
3069     m->set_highest_osr_comp_level(max_level);
3070   }
3071   return found;
3072 }
3073 
3074 int InstanceKlass::mark_osr_nmethods(const Method* m) {
3075   MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
3076                  Mutex::_no_safepoint_check_flag);
3077   nmethod* osr = osr_nmethods_head();
3078   int found = 0;
3079   while (osr != NULL) {
3080     assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3081     if (osr->method() == m) {
3082       osr->mark_for_deoptimization();
3083       found++;
3084     }
3085     osr = osr->osr_link();
3086   }
3087   return found;
3088 }
3089 
3090 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
3091   MutexLocker ml(CompiledMethod_lock->owned_by_self() ? NULL : CompiledMethod_lock,
3092                  Mutex::_no_safepoint_check_flag);
3093   nmethod* osr = osr_nmethods_head();
3094   nmethod* best = NULL;
3095   while (osr != NULL) {
3096     assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
3097     // There can be a time when a c1 osr method exists but we are waiting
3098     // for a c2 version. When c2 completes its osr nmethod we will trash
3099     // the c1 version and only be able to find the c2 version. However
3100     // while we overflow in the c1 code at back branches we don't want to
3101     // try and switch to the same code as we are already running
3102 
3103     if (osr->method() == m &&
3104         (bci == InvocationEntryBci || osr->osr_entry_bci() == bci)) {
3105       if (match_level) {
3106         if (osr->comp_level() == comp_level) {
3107           // Found a match - return it.
3108           return osr;
3109         }
3110       } else {
3111         if (best == NULL || (osr->comp_level() > best->comp_level())) {
3112           if (osr->comp_level() == CompLevel_highest_tier) {
3113             // Found the best possible - return it.
3114             return osr;
3115           }
3116           best = osr;
3117         }
3118       }
3119     }
3120     osr = osr->osr_link();
3121   }
3122 
3123   assert(match_level == false || best == NULL, "shouldn't pick up anything if match_level is set");
3124   if (best != NULL && best->comp_level() >= comp_level) {
3125     return best;
3126   }
3127   return NULL;
3128 }
3129 
3130 // -----------------------------------------------------------------------------------------------------
3131 // Printing
3132 
3133 #ifndef PRODUCT
3134 
3135 #define BULLET  " - "
3136 
3137 static const char* state_names[] = {
3138   "allocated", "loaded", "linked", "being_initialized", "fully_initialized", "initialization_error"
3139 };
3140 
3141 static void print_vtable(intptr_t* start, int len, outputStream* st) {
3142   for (int i = 0; i < len; i++) {
3143     intptr_t e = start[i];
3144     st->print("%d : " INTPTR_FORMAT, i, e);
3145     if (MetaspaceObj::is_valid((Metadata*)e)) {
3146       st->print(" ");
3147       ((Metadata*)e)->print_value_on(st);
3148     }
3149     st->cr();
3150   }
3151 }
3152 
3153 static void print_vtable(vtableEntry* start, int len, outputStream* st) {
3154   return print_vtable(reinterpret_cast<intptr_t*>(start), len, st);
3155 }
3156 
3157 void InstanceKlass::print_on(outputStream* st) const {
3158   assert(is_klass(), "must be klass");
3159   Klass::print_on(st);
3160 
3161   st->print(BULLET"instance size:     %d", size_helper());                        st->cr();
3162   st->print(BULLET"klass size:        %d", size());                               st->cr();
3163   st->print(BULLET"access:            "); access_flags().print_on(st);            st->cr();
3164   st->print(BULLET"state:             "); st->print_cr("%s", state_names[_init_state]);
3165   st->print(BULLET"name:              "); name()->print_value_on(st);             st->cr();
3166   st->print(BULLET"super:             "); Metadata::print_value_on_maybe_null(st, super()); st->cr();
3167   st->print(BULLET"sub:               ");
3168   Klass* sub = subklass();
3169   int n;
3170   for (n = 0; sub != NULL; n++, sub = sub->next_sibling()) {
3171     if (n < MaxSubklassPrintSize) {
3172       sub->print_value_on(st);
3173       st->print("   ");
3174     }
3175   }
3176   if (n >= MaxSubklassPrintSize) st->print("(" INTX_FORMAT " more klasses...)", n - MaxSubklassPrintSize);
3177   st->cr();
3178 
3179   if (is_interface()) {
3180     st->print_cr(BULLET"nof implementors:  %d", nof_implementors());
3181     if (nof_implementors() == 1) {
3182       st->print_cr(BULLET"implementor:    ");
3183       st->print("   ");
3184       implementor()->print_value_on(st);
3185       st->cr();
3186     }
3187   }
3188 
3189   st->print(BULLET"arrays:            "); Metadata::print_value_on_maybe_null(st, array_klasses()); st->cr();
3190   st->print(BULLET"methods:           "); methods()->print_value_on(st);                  st->cr();
3191   if (Verbose || WizardMode) {
3192     Array<Method*>* method_array = methods();
3193     for (int i = 0; i < method_array->length(); i++) {
3194       st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3195     }
3196   }
3197   st->print(BULLET"method ordering:   "); method_ordering()->print_value_on(st);      st->cr();
3198   st->print(BULLET"default_methods:   "); default_methods()->print_value_on(st);      st->cr();
3199   if (Verbose && default_methods() != NULL) {
3200     Array<Method*>* method_array = default_methods();
3201     for (int i = 0; i < method_array->length(); i++) {
3202       st->print("%d : ", i); method_array->at(i)->print_value(); st->cr();
3203     }
3204   }
3205   if (default_vtable_indices() != NULL) {
3206     st->print(BULLET"default vtable indices:   "); default_vtable_indices()->print_value_on(st);       st->cr();
3207   }
3208   st->print(BULLET"local interfaces:  "); local_interfaces()->print_value_on(st);      st->cr();
3209   st->print(BULLET"trans. interfaces: "); transitive_interfaces()->print_value_on(st); st->cr();
3210   st->print(BULLET"constants:         "); constants()->print_value_on(st);         st->cr();
3211   if (class_loader_data() != NULL) {
3212     st->print(BULLET"class loader data:  ");
3213     class_loader_data()->print_value_on(st);
3214     st->cr();
3215   }
3216   st->print(BULLET"unsafe anonymous host class:        "); Metadata::print_value_on_maybe_null(st, unsafe_anonymous_host()); st->cr();
3217   if (source_file_name() != NULL) {
3218     st->print(BULLET"source file:       ");
3219     source_file_name()->print_value_on(st);
3220     st->cr();
3221   }
3222   if (source_debug_extension() != NULL) {
3223     st->print(BULLET"source debug extension:       ");
3224     st->print("%s", source_debug_extension());
3225     st->cr();
3226   }
3227   st->print(BULLET"class annotations:       "); class_annotations()->print_value_on(st); st->cr();
3228   st->print(BULLET"class type annotations:  "); class_type_annotations()->print_value_on(st); st->cr();
3229   st->print(BULLET"field annotations:       "); fields_annotations()->print_value_on(st); st->cr();
3230   st->print(BULLET"field type annotations:  "); fields_type_annotations()->print_value_on(st); st->cr();
3231   {
3232     bool have_pv = false;
3233     // previous versions are linked together through the InstanceKlass
3234     for (InstanceKlass* pv_node = previous_versions();
3235          pv_node != NULL;
3236          pv_node = pv_node->previous_versions()) {
3237       if (!have_pv)
3238         st->print(BULLET"previous version:  ");
3239       have_pv = true;
3240       pv_node->constants()->print_value_on(st);
3241     }
3242     if (have_pv) st->cr();
3243   }
3244 
3245   if (generic_signature() != NULL) {
3246     st->print(BULLET"generic signature: ");
3247     generic_signature()->print_value_on(st);
3248     st->cr();
3249   }
3250   st->print(BULLET"inner classes:     "); inner_classes()->print_value_on(st);     st->cr();
3251   st->print(BULLET"nest members:     "); nest_members()->print_value_on(st);     st->cr();
3252   if (java_mirror() != NULL) {
3253     st->print(BULLET"java mirror:       ");
3254     java_mirror()->print_value_on(st);
3255     st->cr();
3256   } else {
3257     st->print_cr(BULLET"java mirror:       NULL");
3258   }
3259   st->print(BULLET"vtable length      %d  (start addr: " INTPTR_FORMAT ")", vtable_length(), p2i(start_of_vtable())); st->cr();
3260   if (vtable_length() > 0 && (Verbose || WizardMode))  print_vtable(start_of_vtable(), vtable_length(), st);
3261   st->print(BULLET"itable length      %d (start addr: " INTPTR_FORMAT ")", itable_length(), p2i(start_of_itable())); st->cr();
3262   if (itable_length() > 0 && (Verbose || WizardMode))  print_vtable(start_of_itable(), itable_length(), st);
3263   st->print_cr(BULLET"---- static fields (%d words):", static_field_size());
3264   FieldPrinter print_static_field(st);
3265   ((InstanceKlass*)this)->do_local_static_fields(&print_static_field);
3266   st->print_cr(BULLET"---- non-static fields (%d words):", nonstatic_field_size());
3267   FieldPrinter print_nonstatic_field(st);
3268   InstanceKlass* ik = const_cast<InstanceKlass*>(this);
3269   ik->do_nonstatic_fields(&print_nonstatic_field);
3270 
3271   st->print(BULLET"non-static oop maps: ");
3272   OopMapBlock* map     = start_of_nonstatic_oop_maps();
3273   OopMapBlock* end_map = map + nonstatic_oop_map_count();
3274   while (map < end_map) {
3275     st->print("%d-%d ", map->offset(), map->offset() + heapOopSize*(map->count() - 1));
3276     map++;
3277   }
3278   st->cr();
3279 }
3280 
3281 #endif //PRODUCT
3282 
3283 void InstanceKlass::print_value_on(outputStream* st) const {
3284   assert(is_klass(), "must be klass");
3285   if (Verbose || WizardMode)  access_flags().print_on(st);
3286   name()->print_value_on(st);
3287 }
3288 
3289 #ifndef PRODUCT
3290 
3291 void FieldPrinter::do_field(fieldDescriptor* fd) {
3292   _st->print(BULLET);
3293    if (_obj == NULL) {
3294      fd->print_on(_st);
3295      _st->cr();
3296    } else {
3297      fd->print_on_for(_st, _obj);
3298      _st->cr();
3299    }
3300 }
3301 
3302 
3303 void InstanceKlass::oop_print_on(oop obj, outputStream* st) {
3304   Klass::oop_print_on(obj, st);
3305 
3306   if (this == SystemDictionary::String_klass()) {
3307     typeArrayOop value  = java_lang_String::value(obj);
3308     juint        length = java_lang_String::length(obj);
3309     if (value != NULL &&
3310         value->is_typeArray() &&
3311         length <= (juint) value->length()) {
3312       st->print(BULLET"string: ");
3313       java_lang_String::print(obj, st);
3314       st->cr();
3315       if (!WizardMode)  return;  // that is enough
3316     }
3317   }
3318 
3319   st->print_cr(BULLET"---- fields (total size %d words):", oop_size(obj));
3320   FieldPrinter print_field(st, obj);
3321   do_nonstatic_fields(&print_field);
3322 
3323   if (this == SystemDictionary::Class_klass()) {
3324     st->print(BULLET"signature: ");
3325     java_lang_Class::print_signature(obj, st);
3326     st->cr();
3327     Klass* mirrored_klass = java_lang_Class::as_Klass(obj);
3328     st->print(BULLET"fake entry for mirror: ");
3329     Metadata::print_value_on_maybe_null(st, mirrored_klass);
3330     st->cr();
3331     Klass* array_klass = java_lang_Class::array_klass_acquire(obj);
3332     st->print(BULLET"fake entry for array: ");
3333     Metadata::print_value_on_maybe_null(st, array_klass);
3334     st->cr();
3335     st->print_cr(BULLET"fake entry for oop_size: %d", java_lang_Class::oop_size(obj));
3336     st->print_cr(BULLET"fake entry for static_oop_field_count: %d", java_lang_Class::static_oop_field_count(obj));
3337     Klass* real_klass = java_lang_Class::as_Klass(obj);
3338     if (real_klass != NULL && real_klass->is_instance_klass()) {
3339       InstanceKlass::cast(real_klass)->do_local_static_fields(&print_field);
3340     }
3341   } else if (this == SystemDictionary::MethodType_klass()) {
3342     st->print(BULLET"signature: ");
3343     java_lang_invoke_MethodType::print_signature(obj, st);
3344     st->cr();
3345   }
3346 }
3347 
3348 bool InstanceKlass::verify_itable_index(int i) {
3349   int method_count = klassItable::method_count_for_interface(this);
3350   assert(i >= 0 && i < method_count, "index out of bounds");
3351   return true;
3352 }
3353 
3354 #endif //PRODUCT
3355 
3356 void InstanceKlass::oop_print_value_on(oop obj, outputStream* st) {
3357   st->print("a ");
3358   name()->print_value_on(st);
3359   obj->print_address_on(st);
3360   if (this == SystemDictionary::String_klass()
3361       && java_lang_String::value(obj) != NULL) {
3362     ResourceMark rm;
3363     int len = java_lang_String::length(obj);
3364     int plen = (len < 24 ? len : 12);
3365     char* str = java_lang_String::as_utf8_string(obj, 0, plen);
3366     st->print(" = \"%s\"", str);
3367     if (len > plen)
3368       st->print("...[%d]", len);
3369   } else if (this == SystemDictionary::Class_klass()) {
3370     Klass* k = java_lang_Class::as_Klass(obj);
3371     st->print(" = ");
3372     if (k != NULL) {
3373       k->print_value_on(st);
3374     } else {
3375       const char* tname = type2name(java_lang_Class::primitive_type(obj));
3376       st->print("%s", tname ? tname : "type?");
3377     }
3378   } else if (this == SystemDictionary::MethodType_klass()) {
3379     st->print(" = ");
3380     java_lang_invoke_MethodType::print_signature(obj, st);
3381   } else if (java_lang_boxing_object::is_instance(obj)) {
3382     st->print(" = ");
3383     java_lang_boxing_object::print(obj, st);
3384   } else if (this == SystemDictionary::LambdaForm_klass()) {
3385     oop vmentry = java_lang_invoke_LambdaForm::vmentry(obj);
3386     if (vmentry != NULL) {
3387       st->print(" => ");
3388       vmentry->print_value_on(st);
3389     }
3390   } else if (this == SystemDictionary::MemberName_klass()) {
3391     Metadata* vmtarget = java_lang_invoke_MemberName::vmtarget(obj);
3392     if (vmtarget != NULL) {
3393       st->print(" = ");
3394       vmtarget->print_value_on(st);
3395     } else {
3396       java_lang_invoke_MemberName::clazz(obj)->print_value_on(st);
3397       st->print(".");
3398       java_lang_invoke_MemberName::name(obj)->print_value_on(st);
3399     }
3400   }
3401 }
3402 
3403 const char* InstanceKlass::internal_name() const {
3404   return external_name();
3405 }
3406 
3407 void InstanceKlass::print_class_load_logging(ClassLoaderData* loader_data,
3408                                              const char* module_name,
3409                                              const ClassFileStream* cfs) const {
3410   if (!log_is_enabled(Info, class, load)) {
3411     return;
3412   }
3413 
3414   ResourceMark rm;
3415   LogMessage(class, load) msg;
3416   stringStream info_stream;
3417 
3418   // Name and class hierarchy info
3419   info_stream.print("%s", external_name());
3420 
3421   // Source
3422   if (cfs != NULL) {
3423     if (cfs->source() != NULL) {
3424       if (module_name != NULL) {
3425         // When the boot loader created the stream, it didn't know the module name
3426         // yet. Let's format it now.
3427         if (cfs->from_boot_loader_modules_image()) {
3428           info_stream.print(" source: jrt:/%s", module_name);
3429         } else {
3430           info_stream.print(" source: %s", cfs->source());
3431         }
3432       } else {
3433         info_stream.print(" source: %s", cfs->source());
3434       }
3435     } else if (loader_data == ClassLoaderData::the_null_class_loader_data()) {
3436       Thread* THREAD = Thread::current();
3437       Klass* caller =
3438             THREAD->is_Java_thread()
3439                 ? ((JavaThread*)THREAD)->security_get_caller_class(1)
3440                 : NULL;
3441       // caller can be NULL, for example, during a JVMTI VM_Init hook
3442       if (caller != NULL) {
3443         info_stream.print(" source: instance of %s", caller->external_name());
3444       } else {
3445         // source is unknown
3446       }
3447     } else {
3448       oop class_loader = loader_data->class_loader();
3449       info_stream.print(" source: %s", class_loader->klass()->external_name());
3450     }
3451   } else {
3452     assert(this->is_shared(), "must be");
3453     if (MetaspaceShared::is_shared_dynamic((void*)this)) {
3454       info_stream.print(" source: shared objects file (top)");
3455     } else {
3456       info_stream.print(" source: shared objects file");
3457     }
3458   }
3459 
3460   msg.info("%s", info_stream.as_string());
3461 
3462   if (log_is_enabled(Debug, class, load)) {
3463     stringStream debug_stream;
3464 
3465     // Class hierarchy info
3466     debug_stream.print(" klass: " INTPTR_FORMAT " super: " INTPTR_FORMAT,
3467                        p2i(this),  p2i(superklass()));
3468 
3469     // Interfaces
3470     if (local_interfaces() != NULL && local_interfaces()->length() > 0) {
3471       debug_stream.print(" interfaces:");
3472       int length = local_interfaces()->length();
3473       for (int i = 0; i < length; i++) {
3474         debug_stream.print(" " INTPTR_FORMAT,
3475                            p2i(InstanceKlass::cast(local_interfaces()->at(i))));
3476       }
3477     }
3478 
3479     // Class loader
3480     debug_stream.print(" loader: [");
3481     loader_data->print_value_on(&debug_stream);
3482     debug_stream.print("]");
3483 
3484     // Classfile checksum
3485     if (cfs) {
3486       debug_stream.print(" bytes: %d checksum: %08x",
3487                          cfs->length(),
3488                          ClassLoader::crc32(0, (const char*)cfs->buffer(),
3489                          cfs->length()));
3490     }
3491 
3492     msg.debug("%s", debug_stream.as_string());
3493   }
3494 }
3495 
3496 #if INCLUDE_SERVICES
3497 // Size Statistics
3498 void InstanceKlass::collect_statistics(KlassSizeStats *sz) const {
3499   Klass::collect_statistics(sz);
3500 
3501   sz->_inst_size  = wordSize * size_helper();
3502   sz->_vtab_bytes = wordSize * vtable_length();
3503   sz->_itab_bytes = wordSize * itable_length();
3504   sz->_nonstatic_oopmap_bytes = wordSize * nonstatic_oop_map_size();
3505 
3506   int n = 0;
3507   n += (sz->_methods_array_bytes         = sz->count_array(methods()));
3508   n += (sz->_method_ordering_bytes       = sz->count_array(method_ordering()));
3509   n += (sz->_local_interfaces_bytes      = sz->count_array(local_interfaces()));
3510   n += (sz->_transitive_interfaces_bytes = sz->count_array(transitive_interfaces()));
3511   n += (sz->_fields_bytes                = sz->count_array(fields()));
3512   n += (sz->_inner_classes_bytes         = sz->count_array(inner_classes()));
3513   n += (sz->_nest_members_bytes          = sz->count_array(nest_members()));
3514   sz->_ro_bytes += n;
3515 
3516   const ConstantPool* cp = constants();
3517   if (cp) {
3518     cp->collect_statistics(sz);
3519   }
3520 
3521   const Annotations* anno = annotations();
3522   if (anno) {
3523     anno->collect_statistics(sz);
3524   }
3525 
3526   const Array<Method*>* methods_array = methods();
3527   if (methods()) {
3528     for (int i = 0; i < methods_array->length(); i++) {
3529       Method* method = methods_array->at(i);
3530       if (method) {
3531         sz->_method_count ++;
3532         method->collect_statistics(sz);
3533       }
3534     }
3535   }
3536 }
3537 #endif // INCLUDE_SERVICES
3538 
3539 // Verification
3540 
3541 class VerifyFieldClosure: public BasicOopIterateClosure {
3542  protected:
3543   template <class T> void do_oop_work(T* p) {
3544     oop obj = RawAccess<>::oop_load(p);
3545     if (!oopDesc::is_oop_or_null(obj)) {
3546       tty->print_cr("Failed: " PTR_FORMAT " -> " PTR_FORMAT, p2i(p), p2i(obj));
3547       Universe::print_on(tty);
3548       guarantee(false, "boom");
3549     }
3550   }
3551  public:
3552   virtual void do_oop(oop* p)       { VerifyFieldClosure::do_oop_work(p); }
3553   virtual void do_oop(narrowOop* p) { VerifyFieldClosure::do_oop_work(p); }
3554 };
3555 
3556 void InstanceKlass::verify_on(outputStream* st) {
3557 #ifndef PRODUCT
3558   // Avoid redundant verifies, this really should be in product.
3559   if (_verify_count == Universe::verify_count()) return;
3560   _verify_count = Universe::verify_count();
3561 #endif
3562 
3563   // Verify Klass
3564   Klass::verify_on(st);
3565 
3566   // Verify that klass is present in ClassLoaderData
3567   guarantee(class_loader_data()->contains_klass(this),
3568             "this class isn't found in class loader data");
3569 
3570   // Verify vtables
3571   if (is_linked()) {
3572     // $$$ This used to be done only for m/s collections.  Doing it
3573     // always seemed a valid generalization.  (DLD -- 6/00)
3574     vtable().verify(st);
3575   }
3576 
3577   // Verify first subklass
3578   if (subklass() != NULL) {
3579     guarantee(subklass()->is_klass(), "should be klass");
3580   }
3581 
3582   // Verify siblings
3583   Klass* super = this->super();
3584   Klass* sib = next_sibling();
3585   if (sib != NULL) {
3586     if (sib == this) {
3587       fatal("subclass points to itself " PTR_FORMAT, p2i(sib));
3588     }
3589 
3590     guarantee(sib->is_klass(), "should be klass");
3591     guarantee(sib->super() == super, "siblings should have same superklass");
3592   }
3593 
3594   // Verify local interfaces
3595   if (local_interfaces()) {
3596     Array<InstanceKlass*>* local_interfaces = this->local_interfaces();
3597     for (int j = 0; j < local_interfaces->length(); j++) {
3598       InstanceKlass* e = local_interfaces->at(j);
3599       guarantee(e->is_klass() && e->is_interface(), "invalid local interface");
3600     }
3601   }
3602 
3603   // Verify transitive interfaces
3604   if (transitive_interfaces() != NULL) {
3605     Array<InstanceKlass*>* transitive_interfaces = this->transitive_interfaces();
3606     for (int j = 0; j < transitive_interfaces->length(); j++) {
3607       InstanceKlass* e = transitive_interfaces->at(j);
3608       guarantee(e->is_klass() && e->is_interface(), "invalid transitive interface");
3609     }
3610   }
3611 
3612   // Verify methods
3613   if (methods() != NULL) {
3614     Array<Method*>* methods = this->methods();
3615     for (int j = 0; j < methods->length(); j++) {
3616       guarantee(methods->at(j)->is_method(), "non-method in methods array");
3617     }
3618     for (int j = 0; j < methods->length() - 1; j++) {
3619       Method* m1 = methods->at(j);
3620       Method* m2 = methods->at(j + 1);
3621       guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
3622     }
3623   }
3624 
3625   // Verify method ordering
3626   if (method_ordering() != NULL) {
3627     Array<int>* method_ordering = this->method_ordering();
3628     int length = method_ordering->length();
3629     if (JvmtiExport::can_maintain_original_method_order() ||
3630         ((UseSharedSpaces || DumpSharedSpaces) && length != 0)) {
3631       guarantee(length == methods()->length(), "invalid method ordering length");
3632       jlong sum = 0;
3633       for (int j = 0; j < length; j++) {
3634         int original_index = method_ordering->at(j);
3635         guarantee(original_index >= 0, "invalid method ordering index");
3636         guarantee(original_index < length, "invalid method ordering index");
3637         sum += original_index;
3638       }
3639       // Verify sum of indices 0,1,...,length-1
3640       guarantee(sum == ((jlong)length*(length-1))/2, "invalid method ordering sum");
3641     } else {
3642       guarantee(length == 0, "invalid method ordering length");
3643     }
3644   }
3645 
3646   // Verify default methods
3647   if (default_methods() != NULL) {
3648     Array<Method*>* methods = this->default_methods();
3649     for (int j = 0; j < methods->length(); j++) {
3650       guarantee(methods->at(j)->is_method(), "non-method in methods array");
3651     }
3652     for (int j = 0; j < methods->length() - 1; j++) {
3653       Method* m1 = methods->at(j);
3654       Method* m2 = methods->at(j + 1);
3655       guarantee(m1->name()->fast_compare(m2->name()) <= 0, "methods not sorted correctly");
3656     }
3657   }
3658 
3659   // Verify JNI static field identifiers
3660   if (jni_ids() != NULL) {
3661     jni_ids()->verify(this);
3662   }
3663 
3664   // Verify other fields
3665   if (array_klasses() != NULL) {
3666     guarantee(array_klasses()->is_klass(), "should be klass");
3667   }
3668   if (constants() != NULL) {
3669     guarantee(constants()->is_constantPool(), "should be constant pool");
3670   }
3671   const Klass* anonymous_host = unsafe_anonymous_host();
3672   if (anonymous_host != NULL) {
3673     guarantee(anonymous_host->is_klass(), "should be klass");
3674   }
3675 }
3676 
3677 void InstanceKlass::oop_verify_on(oop obj, outputStream* st) {
3678   Klass::oop_verify_on(obj, st);
3679   VerifyFieldClosure blk;
3680   obj->oop_iterate(&blk);
3681 }
3682 
3683 
3684 // JNIid class for jfieldIDs only
3685 // Note to reviewers:
3686 // These JNI functions are just moved over to column 1 and not changed
3687 // in the compressed oops workspace.
3688 JNIid::JNIid(Klass* holder, int offset, JNIid* next) {
3689   _holder = holder;
3690   _offset = offset;
3691   _next = next;
3692   debug_only(_is_static_field_id = false;)
3693 }
3694 
3695 
3696 JNIid* JNIid::find(int offset) {
3697   JNIid* current = this;
3698   while (current != NULL) {
3699     if (current->offset() == offset) return current;
3700     current = current->next();
3701   }
3702   return NULL;
3703 }
3704 
3705 void JNIid::deallocate(JNIid* current) {
3706   while (current != NULL) {
3707     JNIid* next = current->next();
3708     delete current;
3709     current = next;
3710   }
3711 }
3712 
3713 
3714 void JNIid::verify(Klass* holder) {
3715   int first_field_offset  = InstanceMirrorKlass::offset_of_static_fields();
3716   int end_field_offset;
3717   end_field_offset = first_field_offset + (InstanceKlass::cast(holder)->static_field_size() * wordSize);
3718 
3719   JNIid* current = this;
3720   while (current != NULL) {
3721     guarantee(current->holder() == holder, "Invalid klass in JNIid");
3722 #ifdef ASSERT
3723     int o = current->offset();
3724     if (current->is_static_field_id()) {
3725       guarantee(o >= first_field_offset  && o < end_field_offset,  "Invalid static field offset in JNIid");
3726     }
3727 #endif
3728     current = current->next();
3729   }
3730 }
3731 
3732 void InstanceKlass::set_init_state(ClassState state) {
3733 #ifdef ASSERT
3734   bool good_state = is_shared() ? (_init_state <= state)
3735                                                : (_init_state < state);
3736   assert(good_state || state == allocated, "illegal state transition");
3737 #endif
3738   assert(_init_thread == NULL, "should be cleared before state change");
3739   _init_state = (u1)state;
3740 }
3741 
3742 #if INCLUDE_JVMTI
3743 
3744 // RedefineClasses() support for previous versions
3745 
3746 // Globally, there is at least one previous version of a class to walk
3747 // during class unloading, which is saved because old methods in the class
3748 // are still running.   Otherwise the previous version list is cleaned up.
3749 bool InstanceKlass::_has_previous_versions = false;
3750 
3751 // Returns true if there are previous versions of a class for class
3752 // unloading only. Also resets the flag to false. purge_previous_version
3753 // will set the flag to true if there are any left, i.e., if there's any
3754 // work to do for next time. This is to avoid the expensive code cache
3755 // walk in CLDG::clean_deallocate_lists().
3756 bool InstanceKlass::has_previous_versions_and_reset() {
3757   bool ret = _has_previous_versions;
3758   log_trace(redefine, class, iklass, purge)("Class unloading: has_previous_versions = %s",
3759      ret ? "true" : "false");
3760   _has_previous_versions = false;
3761   return ret;
3762 }
3763 
3764 // Purge previous versions before adding new previous versions of the class and
3765 // during class unloading.
3766 void InstanceKlass::purge_previous_version_list() {
3767   assert(SafepointSynchronize::is_at_safepoint(), "only called at safepoint");
3768   assert(has_been_redefined(), "Should only be called for main class");
3769 
3770   // Quick exit.
3771   if (previous_versions() == NULL) {
3772     return;
3773   }
3774 
3775   // This klass has previous versions so see what we can cleanup
3776   // while it is safe to do so.
3777 
3778   int deleted_count = 0;    // leave debugging breadcrumbs
3779   int live_count = 0;
3780   ClassLoaderData* loader_data = class_loader_data();
3781   assert(loader_data != NULL, "should never be null");
3782 
3783   ResourceMark rm;
3784   log_trace(redefine, class, iklass, purge)("%s: previous versions", external_name());
3785 
3786   // previous versions are linked together through the InstanceKlass
3787   InstanceKlass* pv_node = previous_versions();
3788   InstanceKlass* last = this;
3789   int version = 0;
3790 
3791   // check the previous versions list
3792   for (; pv_node != NULL; ) {
3793 
3794     ConstantPool* pvcp = pv_node->constants();
3795     assert(pvcp != NULL, "cp ref was unexpectedly cleared");
3796 
3797     if (!pvcp->on_stack()) {
3798       // If the constant pool isn't on stack, none of the methods
3799       // are executing.  Unlink this previous_version.
3800       // The previous version InstanceKlass is on the ClassLoaderData deallocate list
3801       // so will be deallocated during the next phase of class unloading.
3802       log_trace(redefine, class, iklass, purge)
3803         ("previous version " INTPTR_FORMAT " is dead.", p2i(pv_node));
3804       // For debugging purposes.
3805       pv_node->set_is_scratch_class();
3806       // Unlink from previous version list.
3807       assert(pv_node->class_loader_data() == loader_data, "wrong loader_data");
3808       InstanceKlass* next = pv_node->previous_versions();
3809       pv_node->link_previous_versions(NULL);   // point next to NULL
3810       last->link_previous_versions(next);
3811       // Add to the deallocate list after unlinking
3812       loader_data->add_to_deallocate_list(pv_node);
3813       pv_node = next;
3814       deleted_count++;
3815       version++;
3816       continue;
3817     } else {
3818       log_trace(redefine, class, iklass, purge)("previous version " INTPTR_FORMAT " is alive", p2i(pv_node));
3819       assert(pvcp->pool_holder() != NULL, "Constant pool with no holder");
3820       guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
3821       live_count++;
3822       // found a previous version for next time we do class unloading
3823       _has_previous_versions = true;
3824     }
3825 
3826     // At least one method is live in this previous version.
3827     // Reset dead EMCP methods not to get breakpoints.
3828     // All methods are deallocated when all of the methods for this class are no
3829     // longer running.
3830     Array<Method*>* method_refs = pv_node->methods();
3831     if (method_refs != NULL) {
3832       log_trace(redefine, class, iklass, purge)("previous methods length=%d", method_refs->length());
3833       for (int j = 0; j < method_refs->length(); j++) {
3834         Method* method = method_refs->at(j);
3835 
3836         if (!method->on_stack()) {
3837           // no breakpoints for non-running methods
3838           if (method->is_running_emcp()) {
3839             method->set_running_emcp(false);
3840           }
3841         } else {
3842           assert (method->is_obsolete() || method->is_running_emcp(),
3843                   "emcp method cannot run after emcp bit is cleared");
3844           log_trace(redefine, class, iklass, purge)
3845             ("purge: %s(%s): prev method @%d in version @%d is alive",
3846              method->name()->as_C_string(), method->signature()->as_C_string(), j, version);
3847         }
3848       }
3849     }
3850     // next previous version
3851     last = pv_node;
3852     pv_node = pv_node->previous_versions();
3853     version++;
3854   }
3855   log_trace(redefine, class, iklass, purge)
3856     ("previous version stats: live=%d, deleted=%d", live_count, deleted_count);
3857 }
3858 
3859 void InstanceKlass::mark_newly_obsolete_methods(Array<Method*>* old_methods,
3860                                                 int emcp_method_count) {
3861   int obsolete_method_count = old_methods->length() - emcp_method_count;
3862 
3863   if (emcp_method_count != 0 && obsolete_method_count != 0 &&
3864       _previous_versions != NULL) {
3865     // We have a mix of obsolete and EMCP methods so we have to
3866     // clear out any matching EMCP method entries the hard way.
3867     int local_count = 0;
3868     for (int i = 0; i < old_methods->length(); i++) {
3869       Method* old_method = old_methods->at(i);
3870       if (old_method->is_obsolete()) {
3871         // only obsolete methods are interesting
3872         Symbol* m_name = old_method->name();
3873         Symbol* m_signature = old_method->signature();
3874 
3875         // previous versions are linked together through the InstanceKlass
3876         int j = 0;
3877         for (InstanceKlass* prev_version = _previous_versions;
3878              prev_version != NULL;
3879              prev_version = prev_version->previous_versions(), j++) {
3880 
3881           Array<Method*>* method_refs = prev_version->methods();
3882           for (int k = 0; k < method_refs->length(); k++) {
3883             Method* method = method_refs->at(k);
3884 
3885             if (!method->is_obsolete() &&
3886                 method->name() == m_name &&
3887                 method->signature() == m_signature) {
3888               // The current RedefineClasses() call has made all EMCP
3889               // versions of this method obsolete so mark it as obsolete
3890               log_trace(redefine, class, iklass, add)
3891                 ("%s(%s): flush obsolete method @%d in version @%d",
3892                  m_name->as_C_string(), m_signature->as_C_string(), k, j);
3893 
3894               method->set_is_obsolete();
3895               break;
3896             }
3897           }
3898 
3899           // The previous loop may not find a matching EMCP method, but
3900           // that doesn't mean that we can optimize and not go any
3901           // further back in the PreviousVersion generations. The EMCP
3902           // method for this generation could have already been made obsolete,
3903           // but there still may be an older EMCP method that has not
3904           // been made obsolete.
3905         }
3906 
3907         if (++local_count >= obsolete_method_count) {
3908           // no more obsolete methods so bail out now
3909           break;
3910         }
3911       }
3912     }
3913   }
3914 }
3915 
3916 // Save the scratch_class as the previous version if any of the methods are running.
3917 // The previous_versions are used to set breakpoints in EMCP methods and they are
3918 // also used to clean MethodData links to redefined methods that are no longer running.
3919 void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
3920                                          int emcp_method_count) {
3921   assert(Thread::current()->is_VM_thread(),
3922          "only VMThread can add previous versions");
3923 
3924   ResourceMark rm;
3925   log_trace(redefine, class, iklass, add)
3926     ("adding previous version ref for %s, EMCP_cnt=%d", scratch_class->external_name(), emcp_method_count);
3927 
3928   // Clean out old previous versions for this class
3929   purge_previous_version_list();
3930 
3931   // Mark newly obsolete methods in remaining previous versions.  An EMCP method from
3932   // a previous redefinition may be made obsolete by this redefinition.
3933   Array<Method*>* old_methods = scratch_class->methods();
3934   mark_newly_obsolete_methods(old_methods, emcp_method_count);
3935 
3936   // If the constant pool for this previous version of the class
3937   // is not marked as being on the stack, then none of the methods
3938   // in this previous version of the class are on the stack so
3939   // we don't need to add this as a previous version.
3940   ConstantPool* cp_ref = scratch_class->constants();
3941   if (!cp_ref->on_stack()) {
3942     log_trace(redefine, class, iklass, add)("scratch class not added; no methods are running");
3943     // For debugging purposes.
3944     scratch_class->set_is_scratch_class();
3945     scratch_class->class_loader_data()->add_to_deallocate_list(scratch_class);
3946     return;
3947   }
3948 
3949   if (emcp_method_count != 0) {
3950     // At least one method is still running, check for EMCP methods
3951     for (int i = 0; i < old_methods->length(); i++) {
3952       Method* old_method = old_methods->at(i);
3953       if (!old_method->is_obsolete() && old_method->on_stack()) {
3954         // if EMCP method (not obsolete) is on the stack, mark as EMCP so that
3955         // we can add breakpoints for it.
3956 
3957         // We set the method->on_stack bit during safepoints for class redefinition
3958         // and use this bit to set the is_running_emcp bit.
3959         // After the safepoint, the on_stack bit is cleared and the running emcp
3960         // method may exit.   If so, we would set a breakpoint in a method that
3961         // is never reached, but this won't be noticeable to the programmer.
3962         old_method->set_running_emcp(true);
3963         log_trace(redefine, class, iklass, add)
3964           ("EMCP method %s is on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method));
3965       } else if (!old_method->is_obsolete()) {
3966         log_trace(redefine, class, iklass, add)
3967           ("EMCP method %s is NOT on_stack " INTPTR_FORMAT, old_method->name_and_sig_as_C_string(), p2i(old_method));
3968       }
3969     }
3970   }
3971 
3972   // Add previous version if any methods are still running.
3973   // Set has_previous_version flag for processing during class unloading.
3974   _has_previous_versions = true;
3975   log_trace(redefine, class, iklass, add) ("scratch class added; one of its methods is on_stack.");
3976   assert(scratch_class->previous_versions() == NULL, "shouldn't have a previous version");
3977   scratch_class->link_previous_versions(previous_versions());
3978   link_previous_versions(scratch_class);
3979 } // end add_previous_version()
3980 
3981 #endif // INCLUDE_JVMTI
3982 
3983 Method* InstanceKlass::method_with_idnum(int idnum) {
3984   Method* m = NULL;
3985   if (idnum < methods()->length()) {
3986     m = methods()->at(idnum);
3987   }
3988   if (m == NULL || m->method_idnum() != idnum) {
3989     for (int index = 0; index < methods()->length(); ++index) {
3990       m = methods()->at(index);
3991       if (m->method_idnum() == idnum) {
3992         return m;
3993       }
3994     }
3995     // None found, return null for the caller to handle.
3996     return NULL;
3997   }
3998   return m;
3999 }
4000 
4001 
4002 Method* InstanceKlass::method_with_orig_idnum(int idnum) {
4003   if (idnum >= methods()->length()) {
4004     return NULL;
4005   }
4006   Method* m = methods()->at(idnum);
4007   if (m != NULL && m->orig_method_idnum() == idnum) {
4008     return m;
4009   }
4010   // Obsolete method idnum does not match the original idnum
4011   for (int index = 0; index < methods()->length(); ++index) {
4012     m = methods()->at(index);
4013     if (m->orig_method_idnum() == idnum) {
4014       return m;
4015     }
4016   }
4017   // None found, return null for the caller to handle.
4018   return NULL;
4019 }
4020 
4021 
4022 Method* InstanceKlass::method_with_orig_idnum(int idnum, int version) {
4023   InstanceKlass* holder = get_klass_version(version);
4024   if (holder == NULL) {
4025     return NULL; // The version of klass is gone, no method is found
4026   }
4027   Method* method = holder->method_with_orig_idnum(idnum);
4028   return method;
4029 }
4030 
4031 #if INCLUDE_JVMTI
4032 JvmtiCachedClassFileData* InstanceKlass::get_cached_class_file() {
4033   return _cached_class_file;
4034 }
4035 
4036 jint InstanceKlass::get_cached_class_file_len() {
4037   return VM_RedefineClasses::get_cached_class_file_len(_cached_class_file);
4038 }
4039 
4040 unsigned char * InstanceKlass::get_cached_class_file_bytes() {
4041   return VM_RedefineClasses::get_cached_class_file_bytes(_cached_class_file);
4042 }
4043 #endif