src/share/vm/memory/referenceProcessor.hpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File hs-gc9 Cdiff src/share/vm/memory/referenceProcessor.hpp

src/share/vm/memory/referenceProcessor.hpp

Print this page
rev 6446 : [mq]: ref-write-new-fix

*** 97,107 **** oop _next; HeapWord* _referent_addr; oop _referent; OopClosure* _keep_alive; BoolObjectClosure* _is_alive; - bool _discovered_list_needs_post_barrier; DEBUG_ONLY( oop _first_seen; // cyclic linked list check ) --- 97,106 ----
*** 111,122 **** ) public: inline DiscoveredListIterator(DiscoveredList& refs_list, OopClosure* keep_alive, ! BoolObjectClosure* is_alive, ! bool discovered_list_needs_post_barrier = false): _refs_list(refs_list), _prev_next(refs_list.adr_head()), _prev(NULL), _ref(refs_list.head()), #ifdef ASSERT --- 110,120 ---- ) public: inline DiscoveredListIterator(DiscoveredList& refs_list, OopClosure* keep_alive, ! BoolObjectClosure* is_alive): _refs_list(refs_list), _prev_next(refs_list.adr_head()), _prev(NULL), _ref(refs_list.head()), #ifdef ASSERT
*** 126,137 **** _processed(0), _removed(0), #endif _next(NULL), _keep_alive(keep_alive), ! _is_alive(is_alive), ! _discovered_list_needs_post_barrier(discovered_list_needs_post_barrier) { } // End Of List. inline bool has_next() const { return _ref != NULL; } --- 124,134 ---- _processed(0), _removed(0), #endif _next(NULL), _keep_alive(keep_alive), ! _is_alive(is_alive) { } // End Of List. inline bool has_next() const { return _ref != NULL; }
*** 228,245 **** bool _discovering_refs; // true when discovery enabled bool _discovery_is_atomic; // if discovery is atomic wrt // other collectors in configuration bool _discovery_is_mt; // true if reference discovery is MT. - // If true, setting "next" field of a discovered refs list requires - // write post barrier. (Must be true if used in a collector in which - // elements of a discovered list may be moved during discovery: for - // example, a collector like Garbage-First that moves objects during a - // long-term concurrent marking phase that does weak reference - // discovery.) - bool _discovered_list_needs_post_barrier; - bool _enqueuing_is_done; // true if all weak references enqueued bool _processing_is_mt; // true during phases when // reference processing is MT. uint _next_id; // round-robin mod _num_q counter in // support of work distribution --- 225,234 ----
*** 380,394 **** const char* list_name(uint i); void enqueue_discovered_reflists(HeapWord* pending_list_addr, AbstractRefProcTaskExecutor* task_executor); protected: - // Set the 'discovered' field of the given reference to - // the given value - emitting post barriers depending upon - // the value of _discovered_list_needs_post_barrier. - void set_discovered(oop ref, oop value); - // "Preclean" the given discovered reference list // by removing references with strongly reachable referents. // Currently used in support of CMS only. void preclean_discovered_reflist(DiscoveredList& refs_list, BoolObjectClosure* is_alive, --- 369,378 ----
*** 425,436 **** // Default parameters give you a vanilla reference processor. ReferenceProcessor(MemRegion span, bool mt_processing = false, uint mt_processing_degree = 1, bool mt_discovery = false, uint mt_discovery_degree = 1, bool atomic_discovery = true, ! BoolObjectClosure* is_alive_non_header = NULL, ! bool discovered_list_needs_post_barrier = false); // RefDiscoveryPolicy values enum DiscoveryPolicy { ReferenceBasedDiscovery = 0, ReferentBasedDiscovery = 1, --- 409,419 ---- // Default parameters give you a vanilla reference processor. ReferenceProcessor(MemRegion span, bool mt_processing = false, uint mt_processing_degree = 1, bool mt_discovery = false, uint mt_discovery_degree = 1, bool atomic_discovery = true, ! BoolObjectClosure* is_alive_non_header = NULL); // RefDiscoveryPolicy values enum DiscoveryPolicy { ReferenceBasedDiscovery = 0, ReferentBasedDiscovery = 1,
src/share/vm/memory/referenceProcessor.hpp
Index Unified diffs Context diffs Sdiffs Patch New Old Previous File Next File