244 TaskQueueEntryChunk* cur = remove_chunk_from_chunk_list();
245
246 if (cur == NULL) {
247 return false;
248 }
249
250 Copy::conjoint_memory_atomic(cur->data, ptr_arr, EntriesPerChunk * sizeof(G1TaskQueueEntry));
251
252 add_chunk_to_free_list(cur);
253 return true;
254 }
255
256 void G1CMMarkStack::set_empty() {
257 _chunks_in_chunk_list = 0;
258 _hwm = 0;
259 _chunk_list = NULL;
260 _free_list = NULL;
261 }
262
263 G1CMRootMemRegions::G1CMRootMemRegions(uint const max_regions) :
264 _root_regions(NULL),
265 _max_regions(max_regions),
266 _num_root_regions(0),
267 _claimed_root_regions(0),
268 _scan_in_progress(false),
269 _should_abort(false) {
270 _root_regions = new MemRegion[_max_regions];
271 if (_root_regions == NULL) {
272 vm_exit_during_initialization("Could not allocate root MemRegion set.");
273 }
274 }
275
276 G1CMRootMemRegions::~G1CMRootMemRegions() {
277 delete[] _root_regions;
278 }
279
280 void G1CMRootMemRegions::reset() {
281 _num_root_regions = 0;
282 }
283
284 void G1CMRootMemRegions::add(HeapWord* start, HeapWord* end) {
285 assert_at_safepoint();
286 size_t idx = Atomic::fetch_and_add(&_num_root_regions, 1u);
287 assert(idx < _max_regions, "Trying to add more root MemRegions than there is space " SIZE_FORMAT, _max_regions);
288 assert(start != NULL && end != NULL && start <= end, "Start (" PTR_FORMAT ") should be less or equal to "
289 "end (" PTR_FORMAT ")", p2i(start), p2i(end));
290 _root_regions[idx].set_start(start);
291 _root_regions[idx].set_end(end);
292 }
293
294 void G1CMRootMemRegions::prepare_for_scan() {
295 assert(!scan_in_progress(), "pre-condition");
296
297 _scan_in_progress = _num_root_regions > 0;
|
244 TaskQueueEntryChunk* cur = remove_chunk_from_chunk_list();
245
246 if (cur == NULL) {
247 return false;
248 }
249
250 Copy::conjoint_memory_atomic(cur->data, ptr_arr, EntriesPerChunk * sizeof(G1TaskQueueEntry));
251
252 add_chunk_to_free_list(cur);
253 return true;
254 }
255
256 void G1CMMarkStack::set_empty() {
257 _chunks_in_chunk_list = 0;
258 _hwm = 0;
259 _chunk_list = NULL;
260 _free_list = NULL;
261 }
262
263 G1CMRootMemRegions::G1CMRootMemRegions(uint const max_regions) :
264 _root_regions(MemRegion::create(max_regions, mtGC)),
265 _max_regions(max_regions),
266 _num_root_regions(0),
267 _claimed_root_regions(0),
268 _scan_in_progress(false),
269 _should_abort(false) { }
270
271 G1CMRootMemRegions::~G1CMRootMemRegions() {
272 FREE_C_HEAP_ARRAY(MemRegion, _root_regions);
273 }
274
275 void G1CMRootMemRegions::reset() {
276 _num_root_regions = 0;
277 }
278
279 void G1CMRootMemRegions::add(HeapWord* start, HeapWord* end) {
280 assert_at_safepoint();
281 size_t idx = Atomic::fetch_and_add(&_num_root_regions, 1u);
282 assert(idx < _max_regions, "Trying to add more root MemRegions than there is space " SIZE_FORMAT, _max_regions);
283 assert(start != NULL && end != NULL && start <= end, "Start (" PTR_FORMAT ") should be less or equal to "
284 "end (" PTR_FORMAT ")", p2i(start), p2i(end));
285 _root_regions[idx].set_start(start);
286 _root_regions[idx].set_end(end);
287 }
288
289 void G1CMRootMemRegions::prepare_for_scan() {
290 assert(!scan_in_progress(), "pre-condition");
291
292 _scan_in_progress = _num_root_regions > 0;
|