1 /* 2 * Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved. 3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 4 * 5 * This code is free software; you can redistribute it and/or modify it 6 * under the terms of the GNU General Public License version 2 only, as 7 * published by the Free Software Foundation. 8 * 9 * This code is distributed in the hope that it will be useful, but WITHOUT 10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 11 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 12 * version 2 for more details (a copy is included in the LICENSE file that 13 * accompanied this code). 14 * 15 * You should have received a copy of the GNU General Public License version 16 * 2 along with this work; if not, write to the Free Software Foundation, 17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 18 * 19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 20 * or visit www.oracle.com if you need additional information or have any 21 * questions. 22 * 23 */ 24 25 #include "precompiled.hpp" 26 27 #include "code/codeCache.hpp" 28 #include "memory/metadataFactory.hpp" 29 #include "memory/universe.hpp" 30 #include "oops/oop.inline.hpp" 31 32 #include "classfile/stringTable.hpp" 33 #include "classfile/classLoaderData.hpp" 34 35 #include "prims/whitebox.hpp" 36 #include "prims/wbtestmethods/parserTests.hpp" 37 38 #include "runtime/thread.hpp" 39 #include "runtime/arguments.hpp" 40 #include "runtime/interfaceSupport.hpp" 41 #include "runtime/os.hpp" 42 #include "runtime/vm_version.hpp" 43 44 #include "utilities/array.hpp" 45 #include "utilities/debug.hpp" 46 #include "utilities/macros.hpp" 47 #include "utilities/exceptions.hpp" 48 49 #if INCLUDE_ALL_GCS 50 #include "gc_implementation/parallelScavenge/parallelScavengeHeap.inline.hpp" 51 #include "gc_implementation/g1/concurrentMark.hpp" 52 #include "gc_implementation/g1/g1CollectedHeap.inline.hpp" 53 #include "gc_implementation/g1/heapRegionRemSet.hpp" 54 #endif // INCLUDE_ALL_GCS 55 56 #if INCLUDE_NMT 57 #include "services/mallocSiteTable.hpp" 58 #include "services/memTracker.hpp" 59 #include "utilities/nativeCallStack.hpp" 60 #endif // INCLUDE_NMT 61 62 #include "compiler/compileBroker.hpp" 63 #include "runtime/compilationPolicy.hpp" 64 65 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC 66 67 #define SIZE_T_MAX_VALUE ((size_t) -1) 68 69 bool WhiteBox::_used = false; 70 71 WB_ENTRY(jlong, WB_GetObjectAddress(JNIEnv* env, jobject o, jobject obj)) 72 return (jlong)(void*)JNIHandles::resolve(obj); 73 WB_END 74 75 WB_ENTRY(jint, WB_GetHeapOopSize(JNIEnv* env, jobject o)) 76 return heapOopSize; 77 WB_END 78 79 80 class WBIsKlassAliveClosure : public KlassClosure { 81 Symbol* _name; 82 bool _found; 83 public: 84 WBIsKlassAliveClosure(Symbol* name) : _name(name), _found(false) {} 85 86 void do_klass(Klass* k) { 87 if (_found) return; 88 Symbol* ksym = k->name(); 89 if (ksym->fast_compare(_name) == 0) { 90 _found = true; 91 } 92 } 93 94 bool found() const { 95 return _found; 96 } 97 }; 98 99 WB_ENTRY(jboolean, WB_IsClassAlive(JNIEnv* env, jobject target, jstring name)) 100 Handle h_name = JNIHandles::resolve(name); 101 if (h_name.is_null()) return false; 102 Symbol* sym = java_lang_String::as_symbol(h_name, CHECK_false); 103 TempNewSymbol tsym(sym); // Make sure to decrement reference count on sym on return 104 105 WBIsKlassAliveClosure closure(sym); 106 ClassLoaderDataGraph::classes_do(&closure); 107 108 return closure.found(); 109 WB_END 110 111 WB_ENTRY(jlong, WB_GetCompressedOopsMaxHeapSize(JNIEnv* env, jobject o)) { 112 return (jlong)Arguments::max_heap_for_compressed_oops(); 113 } 114 WB_END 115 116 WB_ENTRY(void, WB_PrintHeapSizes(JNIEnv* env, jobject o)) { 117 CollectorPolicy * p = Universe::heap()->collector_policy(); 118 gclog_or_tty->print_cr("Minimum heap "SIZE_FORMAT" Initial heap " 119 SIZE_FORMAT" Maximum heap "SIZE_FORMAT" Space alignment "SIZE_FORMAT" Heap alignment "SIZE_FORMAT, 120 p->min_heap_byte_size(), p->initial_heap_byte_size(), p->max_heap_byte_size(), 121 p->space_alignment(), p->heap_alignment()); 122 } 123 WB_END 124 125 #ifndef PRODUCT 126 // Forward declaration 127 void TestReservedSpace_test(); 128 void TestReserveMemorySpecial_test(); 129 void TestVirtualSpace_test(); 130 void TestMetaspaceAux_test(); 131 #endif 132 133 WB_ENTRY(void, WB_RunMemoryUnitTests(JNIEnv* env, jobject o)) 134 #ifndef PRODUCT 135 TestReservedSpace_test(); 136 TestReserveMemorySpecial_test(); 137 TestVirtualSpace_test(); 138 TestMetaspaceAux_test(); 139 #endif 140 WB_END 141 142 WB_ENTRY(void, WB_ReadFromNoaccessArea(JNIEnv* env, jobject o)) 143 size_t granularity = os::vm_allocation_granularity(); 144 ReservedHeapSpace rhs(100 * granularity, granularity, false, NULL); 145 VirtualSpace vs; 146 vs.initialize(rhs, 50 * granularity); 147 148 //Check if constraints are complied 149 if (!( UseCompressedOops && rhs.base() != NULL && 150 Universe::narrow_oop_base() != NULL && 151 Universe::narrow_oop_use_implicit_null_checks() )) { 152 tty->print_cr("WB_ReadFromNoaccessArea method is useless:\n " 153 "\tUseCompressedOops is %d\n" 154 "\trhs.base() is "PTR_FORMAT"\n" 155 "\tUniverse::narrow_oop_base() is "PTR_FORMAT"\n" 156 "\tUniverse::narrow_oop_use_implicit_null_checks() is %d", 157 UseCompressedOops, 158 rhs.base(), 159 Universe::narrow_oop_base(), 160 Universe::narrow_oop_use_implicit_null_checks()); 161 return; 162 } 163 tty->print_cr("Reading from no access area... "); 164 tty->print_cr("*(vs.low_boundary() - rhs.noaccess_prefix() / 2 ) = %c", 165 *(vs.low_boundary() - rhs.noaccess_prefix() / 2 )); 166 WB_END 167 168 static jint wb_stress_virtual_space_resize(size_t reserved_space_size, 169 size_t magnitude, size_t iterations) { 170 size_t granularity = os::vm_allocation_granularity(); 171 ReservedHeapSpace rhs(reserved_space_size * granularity, granularity, false, NULL); 172 VirtualSpace vs; 173 if (!vs.initialize(rhs, 0)) { 174 tty->print_cr("Failed to initialize VirtualSpace. Can't proceed."); 175 return 3; 176 } 177 178 long seed = os::random(); 179 tty->print_cr("Random seed is %ld", seed); 180 os::init_random(seed); 181 182 for (size_t i = 0; i < iterations; i++) { 183 184 // Whether we will shrink or grow 185 bool shrink = os::random() % 2L == 0; 186 187 // Get random delta to resize virtual space 188 size_t delta = (size_t)os::random() % magnitude; 189 190 // If we are about to shrink virtual space below zero, then expand instead 191 if (shrink && vs.committed_size() < delta) { 192 shrink = false; 193 } 194 195 // Resizing by delta 196 if (shrink) { 197 vs.shrink_by(delta); 198 } else { 199 // If expanding fails expand_by will silently return false 200 vs.expand_by(delta, true); 201 } 202 } 203 return 0; 204 } 205 206 WB_ENTRY(jint, WB_StressVirtualSpaceResize(JNIEnv* env, jobject o, 207 jlong reserved_space_size, jlong magnitude, jlong iterations)) 208 tty->print_cr("reservedSpaceSize="JLONG_FORMAT", magnitude="JLONG_FORMAT", " 209 "iterations="JLONG_FORMAT"\n", reserved_space_size, magnitude, 210 iterations); 211 if (reserved_space_size < 0 || magnitude < 0 || iterations < 0) { 212 tty->print_cr("One of variables printed above is negative. Can't proceed.\n"); 213 return 1; 214 } 215 216 // sizeof(size_t) depends on whether OS is 32bit or 64bit. sizeof(jlong) is 217 // always 8 byte. That's why we should avoid overflow in case of 32bit platform. 218 if (sizeof(size_t) < sizeof(jlong)) { 219 jlong size_t_max_value = (jlong) SIZE_T_MAX_VALUE; 220 if (reserved_space_size > size_t_max_value || magnitude > size_t_max_value 221 || iterations > size_t_max_value) { 222 tty->print_cr("One of variables printed above overflows size_t. Can't proceed.\n"); 223 return 2; 224 } 225 } 226 227 return wb_stress_virtual_space_resize((size_t) reserved_space_size, 228 (size_t) magnitude, (size_t) iterations); 229 WB_END 230 231 WB_ENTRY(jboolean, WB_isObjectInOldGen(JNIEnv* env, jobject o, jobject obj)) 232 oop p = JNIHandles::resolve(obj); 233 #if INCLUDE_ALL_GCS 234 if (UseG1GC) { 235 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 236 const HeapRegion* hr = g1->heap_region_containing(p); 237 if (hr == NULL) { 238 return false; 239 } 240 return !(hr->is_young()); 241 } else if (UseParallelGC) { 242 ParallelScavengeHeap* psh = ParallelScavengeHeap::heap(); 243 return !psh->is_in_young(p); 244 } 245 #endif // INCLUDE_ALL_GCS 246 GenCollectedHeap* gch = GenCollectedHeap::heap(); 247 return !gch->is_in_young(p); 248 WB_END 249 250 WB_ENTRY(jlong, WB_GetObjectSize(JNIEnv* env, jobject o, jobject obj)) 251 oop p = JNIHandles::resolve(obj); 252 return p->size() * HeapWordSize; 253 WB_END 254 255 #if INCLUDE_ALL_GCS 256 WB_ENTRY(jboolean, WB_G1IsHumongous(JNIEnv* env, jobject o, jobject obj)) 257 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 258 oop result = JNIHandles::resolve(obj); 259 const HeapRegion* hr = g1->heap_region_containing(result); 260 return hr->is_humongous(); 261 WB_END 262 263 WB_ENTRY(jlong, WB_G1NumFreeRegions(JNIEnv* env, jobject o)) 264 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 265 size_t nr = g1->num_free_regions(); 266 return (jlong)nr; 267 WB_END 268 269 WB_ENTRY(jboolean, WB_G1InConcurrentMark(JNIEnv* env, jobject o)) 270 G1CollectedHeap* g1 = G1CollectedHeap::heap(); 271 ConcurrentMark* cm = g1->concurrent_mark(); 272 return cm->concurrent_marking_in_progress(); 273 WB_END 274 275 WB_ENTRY(jint, WB_G1RegionSize(JNIEnv* env, jobject o)) 276 return (jint)HeapRegion::GrainBytes; 277 WB_END 278 #endif // INCLUDE_ALL_GCS 279 280 #if INCLUDE_NMT 281 // Alloc memory using the test memory type so that we can use that to see if 282 // NMT picks it up correctly 283 WB_ENTRY(jlong, WB_NMTMalloc(JNIEnv* env, jobject o, jlong size)) 284 jlong addr = 0; 285 addr = (jlong)(uintptr_t)os::malloc(size, mtTest); 286 return addr; 287 WB_END 288 289 // Alloc memory with pseudo call stack. The test can create psudo malloc 290 // allocation site to stress the malloc tracking. 291 WB_ENTRY(jlong, WB_NMTMallocWithPseudoStack(JNIEnv* env, jobject o, jlong size, jint pseudo_stack)) 292 address pc = (address)(size_t)pseudo_stack; 293 NativeCallStack stack(&pc, 1); 294 return (jlong)os::malloc(size, mtTest, stack); 295 WB_END 296 297 // Free the memory allocated by NMTAllocTest 298 WB_ENTRY(void, WB_NMTFree(JNIEnv* env, jobject o, jlong mem)) 299 os::free((void*)(uintptr_t)mem, mtTest); 300 WB_END 301 302 WB_ENTRY(jlong, WB_NMTReserveMemory(JNIEnv* env, jobject o, jlong size)) 303 jlong addr = 0; 304 305 addr = (jlong)(uintptr_t)os::reserve_memory(size); 306 MemTracker::record_virtual_memory_type((address)addr, mtTest); 307 308 return addr; 309 WB_END 310 311 312 WB_ENTRY(void, WB_NMTCommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 313 os::commit_memory((char *)(uintptr_t)addr, size, !ExecMem); 314 MemTracker::record_virtual_memory_type((address)(uintptr_t)addr, mtTest); 315 WB_END 316 317 WB_ENTRY(void, WB_NMTUncommitMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 318 os::uncommit_memory((char *)(uintptr_t)addr, size); 319 WB_END 320 321 WB_ENTRY(void, WB_NMTReleaseMemory(JNIEnv* env, jobject o, jlong addr, jlong size)) 322 os::release_memory((char *)(uintptr_t)addr, size); 323 WB_END 324 325 WB_ENTRY(jboolean, WB_NMTIsDetailSupported(JNIEnv* env)) 326 return MemTracker::tracking_level() == NMT_detail; 327 WB_END 328 329 WB_ENTRY(void, WB_NMTOverflowHashBucket(JNIEnv* env, jobject o, jlong num)) 330 address pc = (address)1; 331 for (jlong index = 0; index < num; index ++) { 332 NativeCallStack stack(&pc, 1); 333 os::malloc(0, mtTest, stack); 334 pc += MallocSiteTable::hash_buckets(); 335 } 336 WB_END 337 338 WB_ENTRY(jboolean, WB_NMTChangeTrackingLevel(JNIEnv* env)) 339 // Test that we can downgrade NMT levels but not upgrade them. 340 if (MemTracker::tracking_level() == NMT_off) { 341 MemTracker::transition_to(NMT_off); 342 return MemTracker::tracking_level() == NMT_off; 343 } else { 344 assert(MemTracker::tracking_level() == NMT_detail, "Should start out as detail tracking"); 345 MemTracker::transition_to(NMT_summary); 346 assert(MemTracker::tracking_level() == NMT_summary, "Should be summary now"); 347 348 // Can't go to detail once NMT is set to summary. 349 MemTracker::transition_to(NMT_detail); 350 assert(MemTracker::tracking_level() == NMT_summary, "Should still be summary now"); 351 352 // Shutdown sets tracking level to minimal. 353 MemTracker::shutdown(); 354 assert(MemTracker::tracking_level() == NMT_minimal, "Should be minimal now"); 355 356 // Once the tracking level is minimal, we cannot increase to summary. 357 // The code ignores this request instead of asserting because if the malloc site 358 // table overflows in another thread, it tries to change the code to summary. 359 MemTracker::transition_to(NMT_summary); 360 assert(MemTracker::tracking_level() == NMT_minimal, "Should still be minimal now"); 361 362 // Really can never go up to detail, verify that the code would never do this. 363 MemTracker::transition_to(NMT_detail); 364 assert(MemTracker::tracking_level() == NMT_minimal, "Should still be minimal now"); 365 return MemTracker::tracking_level() == NMT_minimal; 366 } 367 WB_END 368 #endif // INCLUDE_NMT 369 370 static jmethodID reflected_method_to_jmid(JavaThread* thread, JNIEnv* env, jobject method) { 371 assert(method != NULL, "method should not be null"); 372 ThreadToNativeFromVM ttn(thread); 373 return env->FromReflectedMethod(method); 374 } 375 376 WB_ENTRY(void, WB_DeoptimizeAll(JNIEnv* env, jobject o)) 377 MutexLockerEx mu(Compile_lock); 378 CodeCache::mark_all_nmethods_for_deoptimization(); 379 VM_Deoptimize op; 380 VMThread::execute(&op); 381 WB_END 382 383 WB_ENTRY(jint, WB_DeoptimizeMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 384 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 385 int result = 0; 386 CHECK_JNI_EXCEPTION_(env, result); 387 MutexLockerEx mu(Compile_lock); 388 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 389 nmethod* code; 390 if (is_osr) { 391 // Mark all OSR compiled versions for deoptimization 392 while ((code = mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false, true)) != NULL) { 393 code->mark_for_deoptimization(); 394 ++result; 395 } 396 } else { 397 code = mh->code(); 398 } 399 if (code != NULL) { 400 code->mark_for_deoptimization(); 401 ++result; 402 } 403 result += CodeCache::mark_for_deoptimization(mh()); 404 if (result > 0) { 405 VM_Deoptimize op; 406 VMThread::execute(&op); 407 } 408 return result; 409 WB_END 410 411 WB_ENTRY(jboolean, WB_IsMethodCompiled(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 412 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 413 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 414 MutexLockerEx mu(Compile_lock); 415 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 416 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 417 if (code == NULL) { 418 return JNI_FALSE; 419 } 420 return (code->is_alive() && !code->is_marked_for_deoptimization()); 421 WB_END 422 423 WB_ENTRY(jboolean, WB_IsMethodCompilable(JNIEnv* env, jobject o, jobject method, jint comp_level, jboolean is_osr)) 424 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 425 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 426 MutexLockerEx mu(Compile_lock); 427 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 428 if (is_osr) { 429 return CompilationPolicy::can_be_osr_compiled(mh, comp_level); 430 } else { 431 return CompilationPolicy::can_be_compiled(mh, comp_level); 432 } 433 WB_END 434 435 WB_ENTRY(jboolean, WB_IsMethodQueuedForCompilation(JNIEnv* env, jobject o, jobject method)) 436 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 437 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 438 MutexLockerEx mu(Compile_lock); 439 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 440 return mh->queued_for_compilation(); 441 WB_END 442 443 WB_ENTRY(jint, WB_GetMethodCompilationLevel(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 444 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 445 CHECK_JNI_EXCEPTION_(env, CompLevel_none); 446 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 447 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 448 return (code != NULL ? code->comp_level() : CompLevel_none); 449 WB_END 450 451 WB_ENTRY(void, WB_MakeMethodNotCompilable(JNIEnv* env, jobject o, jobject method, jint comp_level, jboolean is_osr)) 452 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 453 CHECK_JNI_EXCEPTION(env); 454 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 455 if (is_osr) { 456 mh->set_not_osr_compilable(comp_level, true /* report */, "WhiteBox"); 457 } else { 458 mh->set_not_compilable(comp_level, true /* report */, "WhiteBox"); 459 } 460 WB_END 461 462 WB_ENTRY(jint, WB_GetMethodEntryBci(JNIEnv* env, jobject o, jobject method)) 463 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 464 CHECK_JNI_EXCEPTION_(env, InvocationEntryBci); 465 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 466 nmethod* code = mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false); 467 return (code != NULL && code->is_osr_method() ? code->osr_entry_bci() : InvocationEntryBci); 468 WB_END 469 470 WB_ENTRY(jboolean, WB_TestSetDontInlineMethod(JNIEnv* env, jobject o, jobject method, jboolean value)) 471 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 472 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 473 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 474 bool result = mh->dont_inline(); 475 mh->set_dont_inline(value == JNI_TRUE); 476 return result; 477 WB_END 478 479 WB_ENTRY(jint, WB_GetCompileQueueSize(JNIEnv* env, jobject o, jint comp_level)) 480 if (comp_level == CompLevel_any) { 481 return CompileBroker::queue_size(CompLevel_full_optimization) /* C2 */ + 482 CompileBroker::queue_size(CompLevel_full_profile) /* C1 */; 483 } else { 484 return CompileBroker::queue_size(comp_level); 485 } 486 WB_END 487 488 WB_ENTRY(jboolean, WB_TestSetForceInlineMethod(JNIEnv* env, jobject o, jobject method, jboolean value)) 489 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 490 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 491 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 492 bool result = mh->force_inline(); 493 mh->set_force_inline(value == JNI_TRUE); 494 return result; 495 WB_END 496 497 WB_ENTRY(jboolean, WB_EnqueueMethodForCompilation(JNIEnv* env, jobject o, jobject method, jint comp_level, jint bci)) 498 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 499 CHECK_JNI_EXCEPTION_(env, JNI_FALSE); 500 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 501 nmethod* nm = CompileBroker::compile_method(mh, bci, comp_level, mh, mh->invocation_count(), "WhiteBox", THREAD); 502 MutexLockerEx mu(Compile_lock); 503 return (mh->queued_for_compilation() || nm != NULL); 504 WB_END 505 506 class VM_WhiteBoxOperation : public VM_Operation { 507 public: 508 VM_WhiteBoxOperation() { } 509 VMOp_Type type() const { return VMOp_WhiteBoxOperation; } 510 bool allow_nested_vm_operations() const { return true; } 511 }; 512 513 class AlwaysFalseClosure : public BoolObjectClosure { 514 public: 515 bool do_object_b(oop p) { return false; } 516 }; 517 518 static AlwaysFalseClosure always_false; 519 520 class VM_WhiteBoxCleanMethodData : public VM_WhiteBoxOperation { 521 public: 522 VM_WhiteBoxCleanMethodData(MethodData* mdo) : _mdo(mdo) { } 523 void doit() { 524 _mdo->clean_method_data(&always_false); 525 } 526 private: 527 MethodData* _mdo; 528 }; 529 530 WB_ENTRY(void, WB_ClearMethodState(JNIEnv* env, jobject o, jobject method)) 531 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 532 CHECK_JNI_EXCEPTION(env); 533 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 534 MutexLockerEx mu(Compile_lock); 535 MethodData* mdo = mh->method_data(); 536 MethodCounters* mcs = mh->method_counters(); 537 538 if (mdo != NULL) { 539 mdo->init(); 540 ResourceMark rm; 541 int arg_count = mdo->method()->size_of_parameters(); 542 for (int i = 0; i < arg_count; i++) { 543 mdo->set_arg_modified(i, 0); 544 } 545 VM_WhiteBoxCleanMethodData op(mdo); 546 VMThread::execute(&op); 547 } 548 549 mh->clear_not_c1_compilable(); 550 mh->clear_not_c2_compilable(); 551 mh->clear_not_c2_osr_compilable(); 552 NOT_PRODUCT(mh->set_compiled_invocation_count(0)); 553 if (mcs != NULL) { 554 mcs->backedge_counter()->init(); 555 mcs->invocation_counter()->init(); 556 mcs->set_interpreter_invocation_count(0); 557 mcs->set_interpreter_throwout_count(0); 558 559 #ifdef TIERED 560 mcs->set_rate(0.0F); 561 mh->set_prev_event_count(0); 562 mh->set_prev_time(0); 563 #endif 564 } 565 WB_END 566 567 template <typename T> 568 static bool GetVMFlag(JavaThread* thread, JNIEnv* env, jstring name, T* value, bool (*TAt)(const char*, T*)) { 569 if (name == NULL) { 570 return false; 571 } 572 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 573 const char* flag_name = env->GetStringUTFChars(name, NULL); 574 bool result = (*TAt)(flag_name, value); 575 env->ReleaseStringUTFChars(name, flag_name); 576 return result; 577 } 578 579 template <typename T> 580 static bool SetVMFlag(JavaThread* thread, JNIEnv* env, jstring name, T* value, bool (*TAtPut)(const char*, T*, Flag::Flags)) { 581 if (name == NULL) { 582 return false; 583 } 584 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 585 const char* flag_name = env->GetStringUTFChars(name, NULL); 586 bool result = (*TAtPut)(flag_name, value, Flag::INTERNAL); 587 env->ReleaseStringUTFChars(name, flag_name); 588 return result; 589 } 590 591 template <typename T> 592 static jobject box(JavaThread* thread, JNIEnv* env, Symbol* name, Symbol* sig, T value) { 593 ResourceMark rm(thread); 594 jclass clazz = env->FindClass(name->as_C_string()); 595 CHECK_JNI_EXCEPTION_(env, NULL); 596 jmethodID methodID = env->GetStaticMethodID(clazz, 597 vmSymbols::valueOf_name()->as_C_string(), 598 sig->as_C_string()); 599 CHECK_JNI_EXCEPTION_(env, NULL); 600 jobject result = env->CallStaticObjectMethod(clazz, methodID, value); 601 CHECK_JNI_EXCEPTION_(env, NULL); 602 return result; 603 } 604 605 static jobject booleanBox(JavaThread* thread, JNIEnv* env, jboolean value) { 606 return box(thread, env, vmSymbols::java_lang_Boolean(), vmSymbols::Boolean_valueOf_signature(), value); 607 } 608 static jobject integerBox(JavaThread* thread, JNIEnv* env, jint value) { 609 return box(thread, env, vmSymbols::java_lang_Integer(), vmSymbols::Integer_valueOf_signature(), value); 610 } 611 static jobject longBox(JavaThread* thread, JNIEnv* env, jlong value) { 612 return box(thread, env, vmSymbols::java_lang_Long(), vmSymbols::Long_valueOf_signature(), value); 613 } 614 /* static jobject floatBox(JavaThread* thread, JNIEnv* env, jfloat value) { 615 return box(thread, env, vmSymbols::java_lang_Float(), vmSymbols::Float_valueOf_signature(), value); 616 }*/ 617 static jobject doubleBox(JavaThread* thread, JNIEnv* env, jdouble value) { 618 return box(thread, env, vmSymbols::java_lang_Double(), vmSymbols::Double_valueOf_signature(), value); 619 } 620 621 WB_ENTRY(jobject, WB_GetBooleanVMFlag(JNIEnv* env, jobject o, jstring name)) 622 bool result; 623 if (GetVMFlag <bool> (thread, env, name, &result, &CommandLineFlags::boolAt)) { 624 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 625 return booleanBox(thread, env, result); 626 } 627 return NULL; 628 WB_END 629 630 WB_ENTRY(jobject, WB_GetIntxVMFlag(JNIEnv* env, jobject o, jstring name)) 631 intx result; 632 if (GetVMFlag <intx> (thread, env, name, &result, &CommandLineFlags::intxAt)) { 633 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 634 return longBox(thread, env, result); 635 } 636 return NULL; 637 WB_END 638 639 WB_ENTRY(jobject, WB_GetUintxVMFlag(JNIEnv* env, jobject o, jstring name)) 640 uintx result; 641 if (GetVMFlag <uintx> (thread, env, name, &result, &CommandLineFlags::uintxAt)) { 642 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 643 return longBox(thread, env, result); 644 } 645 return NULL; 646 WB_END 647 648 WB_ENTRY(jobject, WB_GetUint64VMFlag(JNIEnv* env, jobject o, jstring name)) 649 uint64_t result; 650 if (GetVMFlag <uint64_t> (thread, env, name, &result, &CommandLineFlags::uint64_tAt)) { 651 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 652 return longBox(thread, env, result); 653 } 654 return NULL; 655 WB_END 656 657 WB_ENTRY(jobject, WB_GetSizeTVMFlag(JNIEnv* env, jobject o, jstring name)) 658 uintx result; 659 if (GetVMFlag <size_t> (thread, env, name, &result, &CommandLineFlags::size_tAt)) { 660 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 661 return longBox(thread, env, result); 662 } 663 return NULL; 664 WB_END 665 666 WB_ENTRY(jobject, WB_GetDoubleVMFlag(JNIEnv* env, jobject o, jstring name)) 667 double result; 668 if (GetVMFlag <double> (thread, env, name, &result, &CommandLineFlags::doubleAt)) { 669 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 670 return doubleBox(thread, env, result); 671 } 672 return NULL; 673 WB_END 674 675 WB_ENTRY(jstring, WB_GetStringVMFlag(JNIEnv* env, jobject o, jstring name)) 676 ccstr ccstrResult; 677 if (GetVMFlag <ccstr> (thread, env, name, &ccstrResult, &CommandLineFlags::ccstrAt)) { 678 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 679 jstring result = env->NewStringUTF(ccstrResult); 680 CHECK_JNI_EXCEPTION_(env, NULL); 681 return result; 682 } 683 return NULL; 684 WB_END 685 686 WB_ENTRY(void, WB_SetBooleanVMFlag(JNIEnv* env, jobject o, jstring name, jboolean value)) 687 bool result = value == JNI_TRUE ? true : false; 688 SetVMFlag <bool> (thread, env, name, &result, &CommandLineFlags::boolAtPut); 689 WB_END 690 691 WB_ENTRY(void, WB_SetIntxVMFlag(JNIEnv* env, jobject o, jstring name, jlong value)) 692 intx result = value; 693 SetVMFlag <intx> (thread, env, name, &result, &CommandLineFlags::intxAtPut); 694 WB_END 695 696 WB_ENTRY(void, WB_SetUintxVMFlag(JNIEnv* env, jobject o, jstring name, jlong value)) 697 uintx result = value; 698 SetVMFlag <uintx> (thread, env, name, &result, &CommandLineFlags::uintxAtPut); 699 WB_END 700 701 WB_ENTRY(void, WB_SetUint64VMFlag(JNIEnv* env, jobject o, jstring name, jlong value)) 702 uint64_t result = value; 703 SetVMFlag <uint64_t> (thread, env, name, &result, &CommandLineFlags::uint64_tAtPut); 704 WB_END 705 706 WB_ENTRY(void, WB_SetSizeTVMFlag(JNIEnv* env, jobject o, jstring name, jlong value)) 707 size_t result = value; 708 SetVMFlag <size_t> (thread, env, name, &result, &CommandLineFlags::size_tAtPut); 709 WB_END 710 711 WB_ENTRY(void, WB_SetDoubleVMFlag(JNIEnv* env, jobject o, jstring name, jdouble value)) 712 double result = value; 713 SetVMFlag <double> (thread, env, name, &result, &CommandLineFlags::doubleAtPut); 714 WB_END 715 716 WB_ENTRY(void, WB_SetStringVMFlag(JNIEnv* env, jobject o, jstring name, jstring value)) 717 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 718 const char* ccstrValue = (value == NULL) ? NULL : env->GetStringUTFChars(value, NULL); 719 ccstr ccstrResult = ccstrValue; 720 bool needFree; 721 { 722 ThreadInVMfromNative ttvfn(thread); // back to VM 723 needFree = SetVMFlag <ccstr> (thread, env, name, &ccstrResult, &CommandLineFlags::ccstrAtPut); 724 } 725 if (value != NULL) { 726 env->ReleaseStringUTFChars(value, ccstrValue); 727 } 728 if (needFree) { 729 FREE_C_HEAP_ARRAY(char, ccstrResult, mtInternal); 730 } 731 WB_END 732 733 734 WB_ENTRY(jboolean, WB_IsInStringTable(JNIEnv* env, jobject o, jstring javaString)) 735 ResourceMark rm(THREAD); 736 int len; 737 jchar* name = java_lang_String::as_unicode_string(JNIHandles::resolve(javaString), len, CHECK_false); 738 return (StringTable::lookup(name, len) != NULL); 739 WB_END 740 741 WB_ENTRY(void, WB_FullGC(JNIEnv* env, jobject o)) 742 Universe::heap()->collector_policy()->set_should_clear_all_soft_refs(true); 743 Universe::heap()->collect(GCCause::_last_ditch_collection); 744 #if INCLUDE_ALL_GCS 745 if (UseG1GC) { 746 // Needs to be cleared explicitly for G1 747 Universe::heap()->collector_policy()->set_should_clear_all_soft_refs(false); 748 } 749 #endif // INCLUDE_ALL_GCS 750 WB_END 751 752 WB_ENTRY(void, WB_YoungGC(JNIEnv* env, jobject o)) 753 Universe::heap()->collect(GCCause::_wb_young_gc); 754 WB_END 755 756 WB_ENTRY(void, WB_ReadReservedMemory(JNIEnv* env, jobject o)) 757 // static+volatile in order to force the read to happen 758 // (not be eliminated by the compiler) 759 static char c; 760 static volatile char* p; 761 762 p = os::reserve_memory(os::vm_allocation_granularity(), NULL, 0); 763 if (p == NULL) { 764 THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Failed to reserve memory"); 765 } 766 767 c = *p; 768 WB_END 769 770 WB_ENTRY(jstring, WB_GetCPUFeatures(JNIEnv* env, jobject o)) 771 const char* cpu_features = VM_Version::cpu_features(); 772 ThreadToNativeFromVM ttn(thread); 773 jstring features_string = env->NewStringUTF(cpu_features); 774 775 CHECK_JNI_EXCEPTION_(env, NULL); 776 777 return features_string; 778 WB_END 779 780 781 WB_ENTRY(jobjectArray, WB_GetNMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr)) 782 ResourceMark rm(THREAD); 783 jmethodID jmid = reflected_method_to_jmid(thread, env, method); 784 CHECK_JNI_EXCEPTION_(env, NULL); 785 methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid)); 786 nmethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code(); 787 jobjectArray result = NULL; 788 if (code == NULL) { 789 return result; 790 } 791 int insts_size = code->insts_size(); 792 793 ThreadToNativeFromVM ttn(thread); 794 jclass clazz = env->FindClass(vmSymbols::java_lang_Object()->as_C_string()); 795 CHECK_JNI_EXCEPTION_(env, NULL); 796 result = env->NewObjectArray(2, clazz, NULL); 797 if (result == NULL) { 798 return result; 799 } 800 801 jobject obj = integerBox(thread, env, code->comp_level()); 802 CHECK_JNI_EXCEPTION_(env, NULL); 803 env->SetObjectArrayElement(result, 0, obj); 804 805 jbyteArray insts = env->NewByteArray(insts_size); 806 CHECK_JNI_EXCEPTION_(env, NULL); 807 env->SetByteArrayRegion(insts, 0, insts_size, (jbyte*) code->insts_begin()); 808 env->SetObjectArrayElement(result, 1, insts); 809 810 return result; 811 WB_END 812 813 WB_ENTRY(jlong, WB_GetThreadStackSize(JNIEnv* env, jobject o)) 814 return (jlong) Thread::current()->stack_size(); 815 WB_END 816 817 WB_ENTRY(jlong, WB_GetThreadRemainingStackSize(JNIEnv* env, jobject o)) 818 JavaThread* t = JavaThread::current(); 819 return (jlong) t->stack_available(os::current_stack_pointer()) - (jlong) StackShadowPages * os::vm_page_size(); 820 WB_END 821 822 int WhiteBox::array_bytes_to_length(size_t bytes) { 823 return Array<u1>::bytes_to_length(bytes); 824 } 825 826 WB_ENTRY(jlong, WB_AllocateMetaspace(JNIEnv* env, jobject wb, jobject class_loader, jlong size)) 827 if (size < 0) { 828 THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), 829 err_msg("WB_AllocateMetaspace: size is negative: " JLONG_FORMAT, size)); 830 } 831 832 oop class_loader_oop = JNIHandles::resolve(class_loader); 833 ClassLoaderData* cld = class_loader_oop != NULL 834 ? java_lang_ClassLoader::loader_data(class_loader_oop) 835 : ClassLoaderData::the_null_class_loader_data(); 836 837 void* metadata = MetadataFactory::new_writeable_array<u1>(cld, WhiteBox::array_bytes_to_length((size_t)size), thread); 838 839 return (jlong)(uintptr_t)metadata; 840 WB_END 841 842 WB_ENTRY(void, WB_FreeMetaspace(JNIEnv* env, jobject wb, jobject class_loader, jlong addr, jlong size)) 843 oop class_loader_oop = JNIHandles::resolve(class_loader); 844 ClassLoaderData* cld = class_loader_oop != NULL 845 ? java_lang_ClassLoader::loader_data(class_loader_oop) 846 : ClassLoaderData::the_null_class_loader_data(); 847 848 MetadataFactory::free_array(cld, (Array<u1>*)(uintptr_t)addr); 849 WB_END 850 851 WB_ENTRY(jlong, WB_IncMetaspaceCapacityUntilGC(JNIEnv* env, jobject wb, jlong inc)) 852 if (inc < 0) { 853 THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), 854 err_msg("WB_IncMetaspaceCapacityUntilGC: inc is negative: " JLONG_FORMAT, inc)); 855 } 856 857 jlong max_size_t = (jlong) ((size_t) -1); 858 if (inc > max_size_t) { 859 THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), 860 err_msg("WB_IncMetaspaceCapacityUntilGC: inc does not fit in size_t: " JLONG_FORMAT, inc)); 861 } 862 863 size_t new_cap_until_GC = 0; 864 size_t aligned_inc = align_size_down((size_t) inc, Metaspace::commit_alignment()); 865 bool success = MetaspaceGC::inc_capacity_until_GC(aligned_inc, &new_cap_until_GC); 866 if (!success) { 867 THROW_MSG_0(vmSymbols::java_lang_IllegalStateException(), 868 "WB_IncMetaspaceCapacityUntilGC: could not increase capacity until GC " 869 "due to contention with another thread"); 870 } 871 return (jlong) new_cap_until_GC; 872 WB_END 873 874 WB_ENTRY(jlong, WB_MetaspaceCapacityUntilGC(JNIEnv* env, jobject wb)) 875 return (jlong) MetaspaceGC::capacity_until_GC(); 876 WB_END 877 878 //Some convenience methods to deal with objects from java 879 int WhiteBox::offset_for_field(const char* field_name, oop object, 880 Symbol* signature_symbol) { 881 assert(field_name != NULL && strlen(field_name) > 0, "Field name not valid"); 882 Thread* THREAD = Thread::current(); 883 884 //Get the class of our object 885 Klass* arg_klass = object->klass(); 886 //Turn it into an instance-klass 887 InstanceKlass* ik = InstanceKlass::cast(arg_klass); 888 889 //Create symbols to look for in the class 890 TempNewSymbol name_symbol = SymbolTable::lookup(field_name, (int) strlen(field_name), 891 THREAD); 892 893 //To be filled in with an offset of the field we're looking for 894 fieldDescriptor fd; 895 896 Klass* res = ik->find_field(name_symbol, signature_symbol, &fd); 897 if (res == NULL) { 898 tty->print_cr("Invalid layout of %s at %s", ik->external_name(), 899 name_symbol->as_C_string()); 900 fatal("Invalid layout of preloaded class"); 901 } 902 903 //fetch the field at the offset we've found 904 int dest_offset = fd.offset(); 905 906 return dest_offset; 907 } 908 909 910 const char* WhiteBox::lookup_jstring(const char* field_name, oop object) { 911 int offset = offset_for_field(field_name, object, 912 vmSymbols::string_signature()); 913 oop string = object->obj_field(offset); 914 if (string == NULL) { 915 return NULL; 916 } 917 const char* ret = java_lang_String::as_utf8_string(string); 918 return ret; 919 } 920 921 bool WhiteBox::lookup_bool(const char* field_name, oop object) { 922 int offset = 923 offset_for_field(field_name, object, vmSymbols::bool_signature()); 924 bool ret = (object->bool_field(offset) == JNI_TRUE); 925 return ret; 926 } 927 928 void WhiteBox::register_methods(JNIEnv* env, jclass wbclass, JavaThread* thread, JNINativeMethod* method_array, int method_count) { 929 ResourceMark rm; 930 ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI 931 932 // one by one registration natives for exception catching 933 jclass no_such_method_error_klass = env->FindClass(vmSymbols::java_lang_NoSuchMethodError()->as_C_string()); 934 CHECK_JNI_EXCEPTION(env); 935 for (int i = 0, n = method_count; i < n; ++i) { 936 // Skip dummy entries 937 if (method_array[i].fnPtr == NULL) continue; 938 if (env->RegisterNatives(wbclass, &method_array[i], 1) != 0) { 939 jthrowable throwable_obj = env->ExceptionOccurred(); 940 if (throwable_obj != NULL) { 941 env->ExceptionClear(); 942 if (env->IsInstanceOf(throwable_obj, no_such_method_error_klass)) { 943 // NoSuchMethodError is thrown when a method can't be found or a method is not native. 944 // Ignoring the exception since it is not preventing use of other WhiteBox methods. 945 tty->print_cr("Warning: 'NoSuchMethodError' on register of sun.hotspot.WhiteBox::%s%s", 946 method_array[i].name, method_array[i].signature); 947 } 948 } else { 949 // Registration failed unexpectedly. 950 tty->print_cr("Warning: unexpected error on register of sun.hotspot.WhiteBox::%s%s. All methods will be unregistered", 951 method_array[i].name, method_array[i].signature); 952 env->UnregisterNatives(wbclass); 953 break; 954 } 955 } 956 } 957 } 958 959 #define CC (char*) 960 961 static JNINativeMethod methods[] = { 962 {CC"getObjectAddress", CC"(Ljava/lang/Object;)J", (void*)&WB_GetObjectAddress }, 963 {CC"getObjectSize", CC"(Ljava/lang/Object;)J", (void*)&WB_GetObjectSize }, 964 {CC"isObjectInOldGen", CC"(Ljava/lang/Object;)Z", (void*)&WB_isObjectInOldGen }, 965 {CC"getHeapOopSize", CC"()I", (void*)&WB_GetHeapOopSize }, 966 {CC"isClassAlive0", CC"(Ljava/lang/String;)Z", (void*)&WB_IsClassAlive }, 967 {CC"parseCommandLine", 968 CC"(Ljava/lang/String;[Lsun/hotspot/parser/DiagnosticCommand;)[Ljava/lang/Object;", 969 (void*) &WB_ParseCommandLine 970 }, 971 {CC"getCompressedOopsMaxHeapSize", CC"()J", 972 (void*)&WB_GetCompressedOopsMaxHeapSize}, 973 {CC"printHeapSizes", CC"()V", (void*)&WB_PrintHeapSizes }, 974 {CC"runMemoryUnitTests", CC"()V", (void*)&WB_RunMemoryUnitTests}, 975 {CC"readFromNoaccessArea",CC"()V", (void*)&WB_ReadFromNoaccessArea}, 976 {CC"stressVirtualSpaceResize",CC"(JJJ)I", (void*)&WB_StressVirtualSpaceResize}, 977 #if INCLUDE_ALL_GCS 978 {CC"g1InConcurrentMark", CC"()Z", (void*)&WB_G1InConcurrentMark}, 979 {CC"g1IsHumongous", CC"(Ljava/lang/Object;)Z", (void*)&WB_G1IsHumongous }, 980 {CC"g1NumFreeRegions", CC"()J", (void*)&WB_G1NumFreeRegions }, 981 {CC"g1RegionSize", CC"()I", (void*)&WB_G1RegionSize }, 982 #endif // INCLUDE_ALL_GCS 983 #if INCLUDE_NMT 984 {CC"NMTMalloc", CC"(J)J", (void*)&WB_NMTMalloc }, 985 {CC"NMTMallocWithPseudoStack", CC"(JI)J", (void*)&WB_NMTMallocWithPseudoStack}, 986 {CC"NMTFree", CC"(J)V", (void*)&WB_NMTFree }, 987 {CC"NMTReserveMemory", CC"(J)J", (void*)&WB_NMTReserveMemory }, 988 {CC"NMTCommitMemory", CC"(JJ)V", (void*)&WB_NMTCommitMemory }, 989 {CC"NMTUncommitMemory", CC"(JJ)V", (void*)&WB_NMTUncommitMemory }, 990 {CC"NMTReleaseMemory", CC"(JJ)V", (void*)&WB_NMTReleaseMemory }, 991 {CC"NMTOverflowHashBucket", CC"(J)V", (void*)&WB_NMTOverflowHashBucket}, 992 {CC"NMTIsDetailSupported",CC"()Z", (void*)&WB_NMTIsDetailSupported}, 993 {CC"NMTChangeTrackingLevel", CC"()Z", (void*)&WB_NMTChangeTrackingLevel}, 994 #endif // INCLUDE_NMT 995 {CC"deoptimizeAll", CC"()V", (void*)&WB_DeoptimizeAll }, 996 {CC"deoptimizeMethod", CC"(Ljava/lang/reflect/Executable;Z)I", 997 (void*)&WB_DeoptimizeMethod }, 998 {CC"isMethodCompiled", CC"(Ljava/lang/reflect/Executable;Z)Z", 999 (void*)&WB_IsMethodCompiled }, 1000 {CC"isMethodCompilable", CC"(Ljava/lang/reflect/Executable;IZ)Z", 1001 (void*)&WB_IsMethodCompilable}, 1002 {CC"isMethodQueuedForCompilation", 1003 CC"(Ljava/lang/reflect/Executable;)Z", (void*)&WB_IsMethodQueuedForCompilation}, 1004 {CC"makeMethodNotCompilable", 1005 CC"(Ljava/lang/reflect/Executable;IZ)V", (void*)&WB_MakeMethodNotCompilable}, 1006 {CC"testSetDontInlineMethod", 1007 CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_TestSetDontInlineMethod}, 1008 {CC"getMethodCompilationLevel", 1009 CC"(Ljava/lang/reflect/Executable;Z)I", (void*)&WB_GetMethodCompilationLevel}, 1010 {CC"getMethodEntryBci", 1011 CC"(Ljava/lang/reflect/Executable;)I", (void*)&WB_GetMethodEntryBci}, 1012 {CC"getCompileQueueSize", 1013 CC"(I)I", (void*)&WB_GetCompileQueueSize}, 1014 {CC"testSetForceInlineMethod", 1015 CC"(Ljava/lang/reflect/Executable;Z)Z", (void*)&WB_TestSetForceInlineMethod}, 1016 {CC"enqueueMethodForCompilation", 1017 CC"(Ljava/lang/reflect/Executable;II)Z", (void*)&WB_EnqueueMethodForCompilation}, 1018 {CC"clearMethodState", 1019 CC"(Ljava/lang/reflect/Executable;)V", (void*)&WB_ClearMethodState}, 1020 {CC"setBooleanVMFlag", CC"(Ljava/lang/String;Z)V",(void*)&WB_SetBooleanVMFlag}, 1021 {CC"setIntxVMFlag", CC"(Ljava/lang/String;J)V",(void*)&WB_SetIntxVMFlag}, 1022 {CC"setUintxVMFlag", CC"(Ljava/lang/String;J)V",(void*)&WB_SetUintxVMFlag}, 1023 {CC"setUint64VMFlag", CC"(Ljava/lang/String;J)V",(void*)&WB_SetUint64VMFlag}, 1024 {CC"setSizeTVMFlag", CC"(Ljava/lang/String;J)V",(void*)&WB_SetSizeTVMFlag}, 1025 {CC"setDoubleVMFlag", CC"(Ljava/lang/String;D)V",(void*)&WB_SetDoubleVMFlag}, 1026 {CC"setStringVMFlag", CC"(Ljava/lang/String;Ljava/lang/String;)V", 1027 (void*)&WB_SetStringVMFlag}, 1028 {CC"getBooleanVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Boolean;", 1029 (void*)&WB_GetBooleanVMFlag}, 1030 {CC"getIntxVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Long;", 1031 (void*)&WB_GetIntxVMFlag}, 1032 {CC"getUintxVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Long;", 1033 (void*)&WB_GetUintxVMFlag}, 1034 {CC"getUint64VMFlag", CC"(Ljava/lang/String;)Ljava/lang/Long;", 1035 (void*)&WB_GetUint64VMFlag}, 1036 {CC"getSizeTVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Long;", 1037 (void*)&WB_GetSizeTVMFlag}, 1038 {CC"getDoubleVMFlag", CC"(Ljava/lang/String;)Ljava/lang/Double;", 1039 (void*)&WB_GetDoubleVMFlag}, 1040 {CC"getStringVMFlag", CC"(Ljava/lang/String;)Ljava/lang/String;", 1041 (void*)&WB_GetStringVMFlag}, 1042 {CC"isInStringTable", CC"(Ljava/lang/String;)Z", (void*)&WB_IsInStringTable }, 1043 {CC"fullGC", CC"()V", (void*)&WB_FullGC }, 1044 {CC"youngGC", CC"()V", (void*)&WB_YoungGC }, 1045 {CC"readReservedMemory", CC"()V", (void*)&WB_ReadReservedMemory }, 1046 {CC"allocateMetaspace", 1047 CC"(Ljava/lang/ClassLoader;J)J", (void*)&WB_AllocateMetaspace }, 1048 {CC"freeMetaspace", 1049 CC"(Ljava/lang/ClassLoader;JJ)V", (void*)&WB_FreeMetaspace }, 1050 {CC"incMetaspaceCapacityUntilGC", CC"(J)J", (void*)&WB_IncMetaspaceCapacityUntilGC }, 1051 {CC"metaspaceCapacityUntilGC", CC"()J", (void*)&WB_MetaspaceCapacityUntilGC }, 1052 {CC"getCPUFeatures", CC"()Ljava/lang/String;", (void*)&WB_GetCPUFeatures }, 1053 {CC"getNMethod", CC"(Ljava/lang/reflect/Executable;Z)[Ljava/lang/Object;", 1054 (void*)&WB_GetNMethod }, 1055 {CC"getThreadStackSize", CC"()J", (void*)&WB_GetThreadStackSize }, 1056 {CC"getThreadRemainingStackSize", CC"()J", (void*)&WB_GetThreadRemainingStackSize }, 1057 }; 1058 1059 #undef CC 1060 1061 JVM_ENTRY(void, JVM_RegisterWhiteBoxMethods(JNIEnv* env, jclass wbclass)) 1062 { 1063 if (WhiteBoxAPI) { 1064 // Make sure that wbclass is loaded by the null classloader 1065 instanceKlassHandle ikh = instanceKlassHandle(JNIHandles::resolve(wbclass)->klass()); 1066 Handle loader(ikh->class_loader()); 1067 if (loader.is_null()) { 1068 WhiteBox::register_methods(env, wbclass, thread, methods, sizeof(methods) / sizeof(methods[0])); 1069 WhiteBox::register_extended(env, wbclass, thread); 1070 WhiteBox::set_used(); 1071 } 1072 } 1073 } 1074 JVM_END