1 /* 2 * Copyright (c) 2013, 2015, Red Hat, Inc. and/or its affiliates. 3 * 4 * This code is free software; you can redistribute it and/or modify it 5 * under the terms of the GNU General Public License version 2 only, as 6 * published by the Free Software Foundation. 7 * 8 * This code is distributed in the hope that it will be useful, but WITHOUT 9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 11 * version 2 for more details (a copy is included in the LICENSE file that 12 * accompanied this code). 13 * 14 * You should have received a copy of the GNU General Public License version 15 * 2 along with this work; if not, write to the Free Software Foundation, 16 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 17 * 18 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 19 * or visit www.oracle.com if you need additional information or have any 20 * questions. 21 * 22 */ 23 24 #include "memory/allocation.hpp" 25 #include "gc/g1/heapRegionBounds.inline.hpp" 26 #include "gc/shenandoah/brooksPointer.hpp" 27 #include "gc/shenandoah/shenandoahHeapRegion.hpp" 28 #include "gc/shenandoah/shenandoahHeap.inline.hpp" 29 #include "gc/shared/space.inline.hpp" 30 #include "memory/universe.hpp" 31 #include "oops/oop.inline.hpp" 32 #include "runtime/mutexLocker.hpp" 33 #include "runtime/os.hpp" 34 #include "runtime/safepoint.hpp" 35 36 Monitor ShenandoahHeapRegion::_mem_protect_lock(Mutex::special, "ShenandoahMemProtect_lock", true, Monitor::_safepoint_check_never); 37 size_t ShenandoahHeapRegion::RegionSizeShift = 0; 38 size_t ShenandoahHeapRegion::RegionSizeBytes = 0; 39 40 jint ShenandoahHeapRegion::initialize_heap_region(HeapWord* start, 41 size_t regionSizeWords, int index) { 42 43 reserved = MemRegion((HeapWord*) start, regionSizeWords); 44 ContiguousSpace::initialize(reserved, true, false); 45 liveData = 0; 46 _is_in_collection_set = false; 47 _region_number = index; 48 #ifdef ASSERT 49 _mem_protection_level = 1; // Off, level 1. 50 #endif 51 _top_at_mark_start = bottom(); 52 _top_at_prev_mark_start = bottom(); 53 _top_prev_mark_bitmap = bottom(); 54 return JNI_OK; 55 } 56 57 size_t ShenandoahHeapRegion::region_number() const { 58 return _region_number; 59 } 60 61 bool ShenandoahHeapRegion::rollback_allocation(uint size) { 62 set_top(top() - size); 63 return true; 64 } 65 66 void ShenandoahHeapRegion::clearLiveData() { 67 setLiveData(0); 68 } 69 70 void ShenandoahHeapRegion::setLiveData(size_t s) { 71 Atomic::store_ptr(s, (intptr_t*) &liveData); 72 } 73 74 size_t ShenandoahHeapRegion::getLiveData() const { 75 return liveData; 76 } 77 78 size_t ShenandoahHeapRegion::garbage() const { 79 assert(used() >= getLiveData() || is_humongous(), "Live Data must be a subset of used() live: "SIZE_FORMAT" used: "SIZE_FORMAT, getLiveData(), used()); 80 size_t result = used() - getLiveData(); 81 return result; 82 } 83 84 bool ShenandoahHeapRegion::is_in_collection_set() const { 85 return _is_in_collection_set; 86 } 87 88 #include <sys/mman.h> 89 90 #ifdef ASSERT 91 92 void ShenandoahHeapRegion::memProtectionOn() { 93 /* 94 tty->print_cr("protect memory on region level: "INT32_FORMAT, _mem_protection_level); 95 print(tty); 96 */ 97 MutexLockerEx ml(&_mem_protect_lock, true); 98 assert(_mem_protection_level >= 1, "invariant"); 99 100 if (--_mem_protection_level == 0) { 101 if (ShenandoahVerifyWritesToFromSpace) { 102 assert(! ShenandoahVerifyReadsToFromSpace, "can't verify from-space reads when verifying from-space writes"); 103 os::protect_memory((char*) bottom(), end() - bottom(), os::MEM_PROT_READ); 104 } else { 105 assert(ShenandoahVerifyReadsToFromSpace, "need to be verifying reads here"); 106 os::protect_memory((char*) bottom(), end() - bottom(), os::MEM_PROT_NONE); 107 } 108 } 109 } 110 111 void ShenandoahHeapRegion::memProtectionOff() { 112 /* 113 tty->print_cr("unprotect memory on region level: "INT32_FORMAT, _mem_protection_level); 114 print(tty); 115 */ 116 MutexLockerEx ml(&_mem_protect_lock, true); 117 assert(_mem_protection_level >= 0, "invariant"); 118 if (_mem_protection_level++ == 0) { 119 os::protect_memory((char*) bottom(), end() - bottom(), os::MEM_PROT_RW); 120 } 121 } 122 123 #endif 124 125 void ShenandoahHeapRegion::set_is_in_collection_set(bool b) { 126 assert(! (is_humongous() && b), "never ever enter a humongous region into the collection set"); 127 128 _is_in_collection_set = b; 129 130 if (b) { 131 // tty->print_cr("registering region in fast-cset"); 132 // print(); 133 ShenandoahHeap::heap()->register_region_with_in_cset_fast_test(this); 134 } 135 136 #ifdef ASSERT 137 if (ShenandoahVerifyWritesToFromSpace || ShenandoahVerifyReadsToFromSpace) { 138 if (b) { 139 memProtectionOn(); 140 assert(_mem_protection_level == 0, "need to be protected here"); 141 } else { 142 assert(_mem_protection_level == 0, "need to be protected here"); 143 memProtectionOff(); 144 } 145 } 146 #endif 147 } 148 149 ByteSize ShenandoahHeapRegion::is_in_collection_set_offset() { 150 return byte_offset_of(ShenandoahHeapRegion, _is_in_collection_set); 151 } 152 153 void ShenandoahHeapRegion::print_on(outputStream* st) const { 154 st->print_cr("ShenandoahHeapRegion: "PTR_FORMAT"/"SIZE_FORMAT, p2i(this), _region_number); 155 156 if (is_in_collection_set()) 157 st->print("C"); 158 if (is_humongous_start()) { 159 st->print("H"); 160 } 161 if (is_humongous_continuation()) { 162 st->print("h"); 163 } 164 //else 165 st->print(" "); 166 167 st->print_cr("live = "SIZE_FORMAT" garbage = "SIZE_FORMAT" bottom = "PTR_FORMAT" end = "PTR_FORMAT" top = "PTR_FORMAT, 168 getLiveData(), garbage(), p2i(bottom()), p2i(end()), p2i(top())); 169 } 170 171 172 class SkipUnreachableObjectToOopClosure: public ObjectClosure { 173 ExtendedOopClosure* _cl; 174 bool _skip_unreachable_objects; 175 ShenandoahHeap* _heap; 176 177 public: 178 SkipUnreachableObjectToOopClosure(ExtendedOopClosure* cl, bool skip_unreachable_objects) : 179 _cl(cl), _skip_unreachable_objects(skip_unreachable_objects), _heap(ShenandoahHeap::heap()) {} 180 181 void do_object(oop obj) { 182 183 if ((! _skip_unreachable_objects) || _heap->is_marked_current(obj)) { 184 #ifdef ASSERT 185 if (_skip_unreachable_objects) { 186 assert(_heap->is_marked_current(obj), "obj must be live"); 187 } 188 #endif 189 obj->oop_iterate(_cl); 190 } 191 192 } 193 }; 194 195 void ShenandoahHeapRegion::object_iterate_interruptible(ObjectClosure* blk, bool allow_cancel) { 196 HeapWord* p = bottom() + BrooksPointer::word_size(); 197 ShenandoahHeap* heap = ShenandoahHeap::heap(); 198 while (p < top() && !(allow_cancel && heap->cancelled_concgc())) { 199 blk->do_object(oop(p)); 200 #ifdef ASSERT 201 if (ShenandoahVerifyReadsToFromSpace) { 202 memProtectionOff(); 203 p += oop(p)->size() + BrooksPointer::word_size(); 204 memProtectionOn(); 205 } else { 206 p += oop(p)->size() + BrooksPointer::word_size(); 207 } 208 #else 209 p += oop(p)->size() + BrooksPointer::word_size(); 210 #endif 211 } 212 } 213 214 void ShenandoahHeapRegion::marked_object_iterate(ObjectClosure* blk) { 215 HeapWord* p = bottom(); 216 ShenandoahHeap* heap = ShenandoahHeap::heap(); 217 CMBitMap* bitmap = heap->next_mark_bit_map(); 218 while (p < top()) { 219 p += BrooksPointer::word_size(); 220 p = bitmap->getNextMarkedWordAddress(p, top()); 221 if (p < top()) { 222 oop obj = oop(p); 223 assert(heap->is_marked_current(obj), "must be marked"); 224 assert(p >= bottom() && p < top(), "must be within region bounds"); 225 assert(obj->is_oop(), "sanity"); 226 size_t size = obj->size(); 227 blk->do_object(obj); 228 p += size; 229 } 230 } 231 } 232 233 HeapWord* ShenandoahHeapRegion::object_iterate_careful(ObjectClosureCareful* blk) { 234 HeapWord * limit = concurrent_iteration_safe_limit(); 235 assert(limit <= top(), "sanity check"); 236 for (HeapWord* p = bottom() + BrooksPointer::word_size(); p < limit;) { 237 size_t size = blk->do_object_careful(oop(p)); 238 if (size == 0) { 239 return p; // failed at p 240 } else { 241 p += size + BrooksPointer::word_size(); 242 } 243 } 244 return NULL; // all done 245 } 246 247 void ShenandoahHeapRegion::oop_iterate_skip_unreachable(ExtendedOopClosure* cl, bool skip_unreachable_objects) { 248 SkipUnreachableObjectToOopClosure cl2(cl, skip_unreachable_objects); 249 object_iterate_interruptible(&cl2, false); 250 } 251 252 void ShenandoahHeapRegion::fill_region() { 253 ShenandoahHeap* sh = (ShenandoahHeap*) Universe::heap(); 254 255 if (free() > (BrooksPointer::word_size() + CollectedHeap::min_fill_size())) { 256 HeapWord* filler = allocate(BrooksPointer::word_size()); 257 HeapWord* obj = allocate(end() - top()); 258 sh->fill_with_object(obj, end() - obj); 259 sh->initialize_brooks_ptr(oop(obj)); 260 } 261 } 262 263 void ShenandoahHeapRegion::set_humongous_start(bool start) { 264 _humongous_start = start; 265 } 266 267 void ShenandoahHeapRegion::set_humongous_continuation(bool continuation) { 268 _humongous_continuation = continuation; 269 } 270 271 bool ShenandoahHeapRegion::is_humongous() const { 272 return _humongous_start || _humongous_continuation; 273 } 274 275 bool ShenandoahHeapRegion::is_humongous_start() const { 276 return _humongous_start; 277 } 278 279 bool ShenandoahHeapRegion::is_humongous_continuation() const { 280 return _humongous_continuation; 281 } 282 283 void ShenandoahHeapRegion::do_reset() { 284 ContiguousSpace::initialize(reserved, true, false); 285 clearLiveData(); 286 _humongous_start = false; 287 _humongous_continuation = false; 288 // _top_at_mark_start = bottom(); 289 _top_at_prev_mark_start = bottom(); 290 } 291 292 void ShenandoahHeapRegion::recycle() { 293 do_reset(); 294 set_is_in_collection_set(false); 295 } 296 297 void ShenandoahHeapRegion::reset() { 298 assert(_mem_protection_level == 1, "needs to be unprotected here"); 299 do_reset(); 300 _is_in_collection_set = false; 301 } 302 303 HeapWord* ShenandoahHeapRegion::block_start_const(const void* p) const { 304 assert(MemRegion(bottom(), end()).contains(p), 305 "p ("PTR_FORMAT") not in space ["PTR_FORMAT", "PTR_FORMAT")", 306 p2i(p), p2i(bottom()), p2i(end())); 307 if (p >= top()) { 308 return top(); 309 } else { 310 HeapWord* last = bottom() + BrooksPointer::word_size(); 311 HeapWord* cur = last; 312 while (cur <= p) { 313 last = cur; 314 cur += oop(cur)->size() + BrooksPointer::word_size(); 315 } 316 assert(oop(last)->is_oop(), 317 PTR_FORMAT" should be an object start", p2i(last)); 318 return last; 319 } 320 } 321 322 void ShenandoahHeapRegion::setup_heap_region_size(size_t initial_heap_size, size_t max_heap_size) { 323 uintx region_size = ShenandoahHeapRegionSize; 324 if (FLAG_IS_DEFAULT(ShenandoahHeapRegionSize)) { 325 size_t average_heap_size = (initial_heap_size + max_heap_size) / 2; 326 region_size = MAX2(average_heap_size / HeapRegionBounds::target_number(), 327 (uintx) HeapRegionBounds::min_size()); 328 } 329 330 int region_size_log = log2_long((jlong) region_size); 331 // Recalculate the region size to make sure it's a power of 332 // 2. This means that region_size is the largest power of 2 that's 333 // <= what we've calculated so far. 334 region_size = ((uintx)1 << region_size_log); 335 336 // Now make sure that we don't go over or under our limits. 337 if (region_size < HeapRegionBounds::min_size()) { 338 region_size = HeapRegionBounds::min_size(); 339 } else if (region_size > HeapRegionBounds::max_size()) { 340 region_size = HeapRegionBounds::max_size(); 341 } 342 343 // And recalculate the log. 344 region_size_log = log2_long((jlong) region_size); 345 346 // Now, set up the globals. 347 guarantee(RegionSizeShift == 0, "we should only set it once"); 348 RegionSizeShift = region_size_log; 349 350 guarantee(RegionSizeBytes == 0, "we should only set it once"); 351 RegionSizeBytes = (size_t)region_size; 352 353 if (ShenandoahLogConfig) { 354 tty->print_cr("Region size in bytes: "SIZE_FORMAT, RegionSizeBytes); 355 tty->print_cr("Region size shift: "SIZE_FORMAT, RegionSizeShift); 356 tty->print_cr("Initial number of regions: "SIZE_FORMAT, initial_heap_size / RegionSizeBytes); 357 tty->print_cr("Maximum number of regions: "SIZE_FORMAT, max_heap_size / RegionSizeBytes); 358 } 359 } 360 361 CompactibleSpace* ShenandoahHeapRegion::next_compaction_space() const { 362 return ShenandoahHeap::heap()->next_compaction_region(this); 363 } 364 365 void ShenandoahHeapRegion::prepare_for_compaction(CompactPoint* cp) { 366 scan_and_forward(this, cp); 367 } 368 369 void ShenandoahHeapRegion::adjust_pointers() { 370 // Check first is there is any work to do. 371 if (used() == 0) { 372 return; // Nothing to do. 373 } 374 375 scan_and_adjust_pointers(this); 376 } 377 378 void ShenandoahHeapRegion::compact() { 379 assert(!is_humongous(), "Shouldn't be compacting humongous regions"); 380 scan_and_compact(this); 381 } 382 383 void ShenandoahHeapRegion::init_top_at_mark_start() { 384 _top_at_mark_start = top(); 385 ShenandoahHeap::heap()->set_top_at_mark_start(bottom(), top()); 386 } 387 388 void ShenandoahHeapRegion::set_top_at_mark_start(HeapWord* top) { 389 _top_at_mark_start = top; 390 ShenandoahHeap::heap()->set_top_at_mark_start(bottom(), top); 391 } 392 393 void ShenandoahHeapRegion::reset_top_at_prev_mark_start() { 394 _top_at_prev_mark_start = bottom(); 395 } 396 397 HeapWord* ShenandoahHeapRegion::top_at_mark_start() { 398 return _top_at_mark_start; 399 } 400 401 HeapWord* ShenandoahHeapRegion::top_at_prev_mark_start() { 402 return _top_at_prev_mark_start; 403 } 404 405 HeapWord* ShenandoahHeapRegion::top_prev_mark_bitmap() { 406 return _top_prev_mark_bitmap; 407 } 408 409 bool ShenandoahHeapRegion::allocated_after_prev_mark_start(HeapWord* addr) const { 410 return addr >= _top_at_prev_mark_start; 411 } 412 413 void ShenandoahHeapRegion::swap_top_at_mark_start() { 414 HeapWord* tmp = _top_at_prev_mark_start; 415 _top_at_prev_mark_start = _top_at_mark_start; 416 _top_at_mark_start = tmp; 417 ShenandoahHeap::heap()->set_top_at_mark_start(bottom(), tmp); 418 } 419 420 void ShenandoahHeapRegion::set_top_prev_mark_bitmap(HeapWord* top) { 421 _top_prev_mark_bitmap = top; 422 } 423 424 void ShenandoahHeapRegion::enter_critical() { 425 assert(! SafepointSynchronize::is_at_safepoint(), "only outside safepoints"); 426 assert(_critical_pins >= 0, "sanity"); 427 Atomic::inc(&_critical_pins); 428 } 429 430 void ShenandoahHeapRegion::exit_critical() { 431 assert(! SafepointSynchronize::is_at_safepoint(), "only outside safepoints"); 432 Atomic::dec(&_critical_pins); 433 assert(_critical_pins >= 0, "sanity"); 434 } 435 436 bool ShenandoahHeapRegion::is_pinned() { 437 assert(_critical_pins >= 0, "sanity"); 438 assert(SafepointSynchronize::is_at_safepoint(), "only at safepoints"); 439 return _critical_pins > 0; 440 }