178 static oop _arithmetic_exception_instance; // preallocated exception object 179 static oop _virtual_machine_error_instance; // preallocated exception object 180 // The object used as an exception dummy when exceptions are thrown for 181 // the vm thread. 182 static oop _vm_exception; 183 184 // References waiting to be transferred to the ReferenceHandler 185 static oop _reference_pending_list; 186 187 // The particular choice of collected heap. 188 static CollectedHeap* _collectedHeap; 189 190 static intptr_t _non_oop_bits; 191 192 // For UseCompressedOops. 193 static struct NarrowPtrStruct _narrow_oop; 194 // For UseCompressedClassPointers. 195 static struct NarrowPtrStruct _narrow_klass; 196 static address _narrow_ptrs_base; 197 198 // array of dummy objects used with +FullGCAlot 199 debug_only(static objArrayOop _fullgc_alot_dummy_array;) 200 // index of next entry to clear 201 debug_only(static int _fullgc_alot_dummy_next;) 202 203 // Compiler/dispatch support 204 static int _base_vtable_size; // Java vtbl size of klass Object (in words) 205 206 // Initialization 207 static bool _bootstrapping; // true during genesis 208 static bool _module_initialized; // true after call_initPhase2 called 209 static bool _fully_initialized; // true after universe_init and initialize_vtables called 210 211 // the array of preallocated errors with backtraces 212 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; } 213 214 // generate an out of memory error; if possible using an error with preallocated backtrace; 215 // otherwise return the given default error. 216 static oop gen_out_of_memory_error(oop default_err); 217 419 // For UseCompressedClassPointers 420 static address narrow_klass_base() { return _narrow_klass._base; } 421 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); } 422 static int narrow_klass_shift() { return _narrow_klass._shift; } 423 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; } 424 425 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; } 426 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; } 427 static address narrow_ptrs_base() { return _narrow_ptrs_base; } 428 429 static void print_compressed_oops_mode(outputStream* st); 430 431 // this is set in vm_version on sparc (and then reset in universe afaict) 432 static void set_narrow_oop_shift(int shift) { 433 _narrow_oop._shift = shift; 434 } 435 436 static void set_narrow_klass_shift(int shift) { 437 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs"); 438 _narrow_klass._shift = shift; 439 } 440 441 // Reserve Java heap and determine CompressedOops mode 442 static ReservedSpace reserve_heap(size_t heap_size, size_t alignment); 443 444 // Historic gc information 445 static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; } 446 static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; } 447 static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; } 448 static void update_heap_info_at_gc(); 449 450 // Testers 451 static bool is_bootstrapping() { return _bootstrapping; } 452 static bool is_module_initialized() { return _module_initialized; } 453 static bool is_fully_initialized() { return _fully_initialized; } 454 455 static inline bool element_type_should_be_aligned(BasicType type); 456 static inline bool field_type_should_be_aligned(BasicType type); 457 static bool on_page_boundary(void* addr); 458 static bool should_fill_in_stack_trace(Handle throwable); 459 static void check_alignment(uintx size, uintx alignment, const char* name); | 178 static oop _arithmetic_exception_instance; // preallocated exception object 179 static oop _virtual_machine_error_instance; // preallocated exception object 180 // The object used as an exception dummy when exceptions are thrown for 181 // the vm thread. 182 static oop _vm_exception; 183 184 // References waiting to be transferred to the ReferenceHandler 185 static oop _reference_pending_list; 186 187 // The particular choice of collected heap. 188 static CollectedHeap* _collectedHeap; 189 190 static intptr_t _non_oop_bits; 191 192 // For UseCompressedOops. 193 static struct NarrowPtrStruct _narrow_oop; 194 // For UseCompressedClassPointers. 195 static struct NarrowPtrStruct _narrow_klass; 196 static address _narrow_ptrs_base; 197 198 // value type using klass alignment encoded as oop metadata 199 static int _oop_metadata_valuetype_mask; 200 201 // array of dummy objects used with +FullGCAlot 202 debug_only(static objArrayOop _fullgc_alot_dummy_array;) 203 // index of next entry to clear 204 debug_only(static int _fullgc_alot_dummy_next;) 205 206 // Compiler/dispatch support 207 static int _base_vtable_size; // Java vtbl size of klass Object (in words) 208 209 // Initialization 210 static bool _bootstrapping; // true during genesis 211 static bool _module_initialized; // true after call_initPhase2 called 212 static bool _fully_initialized; // true after universe_init and initialize_vtables called 213 214 // the array of preallocated errors with backtraces 215 static objArrayOop preallocated_out_of_memory_errors() { return _preallocated_out_of_memory_error_array; } 216 217 // generate an out of memory error; if possible using an error with preallocated backtrace; 218 // otherwise return the given default error. 219 static oop gen_out_of_memory_error(oop default_err); 220 422 // For UseCompressedClassPointers 423 static address narrow_klass_base() { return _narrow_klass._base; } 424 static bool is_narrow_klass_base(void* addr) { return (narrow_klass_base() == (address)addr); } 425 static int narrow_klass_shift() { return _narrow_klass._shift; } 426 static bool narrow_klass_use_implicit_null_checks() { return _narrow_klass._use_implicit_null_checks; } 427 428 static address* narrow_ptrs_base_addr() { return &_narrow_ptrs_base; } 429 static void set_narrow_ptrs_base(address a) { _narrow_ptrs_base = a; } 430 static address narrow_ptrs_base() { return _narrow_ptrs_base; } 431 432 static void print_compressed_oops_mode(outputStream* st); 433 434 // this is set in vm_version on sparc (and then reset in universe afaict) 435 static void set_narrow_oop_shift(int shift) { 436 _narrow_oop._shift = shift; 437 } 438 439 static void set_narrow_klass_shift(int shift) { 440 assert(shift == 0 || shift == LogKlassAlignmentInBytes, "invalid shift for klass ptrs"); 441 _narrow_klass._shift = shift; 442 if (shift == LogKlassAlignmentInBytes) { 443 _oop_metadata_valuetype_mask = 1; 444 } 445 } 446 447 static int oop_metadata_valuetype_mask() { return _oop_metadata_valuetype_mask; } 448 449 // Reserve Java heap and determine CompressedOops mode 450 static ReservedSpace reserve_heap(size_t heap_size, size_t alignment); 451 452 // Historic gc information 453 static size_t get_heap_capacity_at_last_gc() { return _heap_capacity_at_last_gc; } 454 static size_t get_heap_free_at_last_gc() { return _heap_capacity_at_last_gc - _heap_used_at_last_gc; } 455 static size_t get_heap_used_at_last_gc() { return _heap_used_at_last_gc; } 456 static void update_heap_info_at_gc(); 457 458 // Testers 459 static bool is_bootstrapping() { return _bootstrapping; } 460 static bool is_module_initialized() { return _module_initialized; } 461 static bool is_fully_initialized() { return _fully_initialized; } 462 463 static inline bool element_type_should_be_aligned(BasicType type); 464 static inline bool field_type_should_be_aligned(BasicType type); 465 static bool on_page_boundary(void* addr); 466 static bool should_fill_in_stack_trace(Handle throwable); 467 static void check_alignment(uintx size, uintx alignment, const char* name); |