6420645: Create a vm that uses compressed oops for up to 32gb heapsizes

Compressed oops in instances, arrays, and headers. Code contributors are coleenp, phh, never, swamyv

Reviewed-by: jmasa, kamg, acorn, tbell, kvn, rasbold
This commit is contained in:
Coleen Phillimore 2008-04-13 17:43:42 -04:00
parent 680ecf1611
commit 4a831d45f0
273 changed files with 6585 additions and 2993 deletions

View file

@ -28,16 +28,32 @@
// List of discovered references.
class DiscoveredList {
public:
DiscoveredList() : _head(NULL), _len(0) { }
oop head() const { return _head; }
oop* head_ptr() { return &_head; }
void set_head(oop o) { _head = o; }
bool empty() const { return _head == ReferenceProcessor::_sentinelRef; }
DiscoveredList() : _len(0), _compressed_head(0), _oop_head(NULL) { }
oop head() const {
return UseCompressedOops ? oopDesc::decode_heap_oop_not_null(_compressed_head) :
_oop_head;
}
HeapWord* adr_head() {
return UseCompressedOops ? (HeapWord*)&_compressed_head :
(HeapWord*)&_oop_head;
}
void set_head(oop o) {
if (UseCompressedOops) {
// Must compress the head ptr.
_compressed_head = oopDesc::encode_heap_oop_not_null(o);
} else {
_oop_head = o;
}
}
bool empty() const { return head() == ReferenceProcessor::sentinel_ref(); }
size_t length() { return _len; }
void set_length(size_t len) { _len = len; }
private:
// Set value depending on UseCompressedOops. This could be a template class
// but then we have to fix all the instantiations and declarations that use this class.
oop _oop_head;
narrowOop _compressed_head;
size_t _len;
oop _head;
};
oop ReferenceProcessor::_sentinelRef = NULL;
@ -49,11 +65,11 @@ void referenceProcessor_init() {
}
void ReferenceProcessor::init_statics() {
assert(_sentinelRef == NULL, "should be initialized precsiely once");
assert(_sentinelRef == NULL, "should be initialized precisely once");
EXCEPTION_MARK;
_sentinelRef = instanceKlass::cast(
SystemDictionary::object_klass())->
allocate_permanent_instance(THREAD);
SystemDictionary::reference_klass())->
allocate_permanent_instance(THREAD);
// Initialize the master soft ref clock.
java_lang_ref_SoftReference::set_clock(os::javaTimeMillis());
@ -69,15 +85,13 @@ void ReferenceProcessor::init_statics() {
"Unrecongnized RefDiscoveryPolicy");
}
ReferenceProcessor* ReferenceProcessor::create_ref_processor(
MemRegion span,
bool atomic_discovery,
bool mt_discovery,
BoolObjectClosure* is_alive_non_header,
int parallel_gc_threads,
bool mt_processing)
{
ReferenceProcessor*
ReferenceProcessor::create_ref_processor(MemRegion span,
bool atomic_discovery,
bool mt_discovery,
BoolObjectClosure* is_alive_non_header,
int parallel_gc_threads,
bool mt_processing) {
int mt_degree = 1;
if (parallel_gc_threads > 1) {
mt_degree = parallel_gc_threads;
@ -93,10 +107,11 @@ ReferenceProcessor* ReferenceProcessor::create_ref_processor(
return rp;
}
ReferenceProcessor::ReferenceProcessor(MemRegion span,
bool atomic_discovery, bool mt_discovery, int mt_degree,
bool mt_processing) :
bool atomic_discovery,
bool mt_discovery,
int mt_degree,
bool mt_processing) :
_discovering_refs(false),
_enqueuing_is_done(false),
_is_alive_non_header(NULL),
@ -114,10 +129,10 @@ ReferenceProcessor::ReferenceProcessor(MemRegion span,
_discoveredWeakRefs = &_discoveredSoftRefs[_num_q];
_discoveredFinalRefs = &_discoveredWeakRefs[_num_q];
_discoveredPhantomRefs = &_discoveredFinalRefs[_num_q];
assert(_sentinelRef != NULL, "_sentinelRef is NULL");
assert(sentinel_ref() != NULL, "_sentinelRef is NULL");
// Initialized all entries to _sentinelRef
for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
_discoveredSoftRefs[i].set_head(_sentinelRef);
_discoveredSoftRefs[i].set_head(sentinel_ref());
_discoveredSoftRefs[i].set_length(0);
}
}
@ -134,16 +149,19 @@ void ReferenceProcessor::verify_no_references_recorded() {
void ReferenceProcessor::weak_oops_do(OopClosure* f) {
for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
f->do_oop(_discoveredSoftRefs[i].head_ptr());
if (UseCompressedOops) {
f->do_oop((narrowOop*)_discoveredSoftRefs[i].adr_head());
} else {
f->do_oop((oop*)_discoveredSoftRefs[i].adr_head());
}
}
}
void ReferenceProcessor::oops_do(OopClosure* f) {
f->do_oop(&_sentinelRef);
f->do_oop(adr_sentinel_ref());
}
void ReferenceProcessor::update_soft_ref_master_clock()
{
void ReferenceProcessor::update_soft_ref_master_clock() {
// Update (advance) the soft ref master clock field. This must be done
// after processing the soft ref list.
jlong now = os::javaTimeMillis();
@ -164,9 +182,7 @@ void ReferenceProcessor::update_soft_ref_master_clock()
// past clock value.
}
void
ReferenceProcessor::process_discovered_references(
void ReferenceProcessor::process_discovered_references(
ReferencePolicy* policy,
BoolObjectClosure* is_alive,
OopClosure* keep_alive,
@ -223,15 +239,13 @@ ReferenceProcessor::process_discovered_references(
}
}
#ifndef PRODUCT
// Calculate the number of jni handles.
unsigned int ReferenceProcessor::count_jni_refs()
{
uint ReferenceProcessor::count_jni_refs() {
class AlwaysAliveClosure: public BoolObjectClosure {
public:
bool do_object_b(oop obj) { return true; }
void do_object(oop obj) { assert(false, "Don't call"); }
virtual bool do_object_b(oop obj) { return true; }
virtual void do_object(oop obj) { assert(false, "Don't call"); }
};
class CountHandleClosure: public OopClosure {
@ -239,9 +253,8 @@ unsigned int ReferenceProcessor::count_jni_refs()
int _count;
public:
CountHandleClosure(): _count(0) {}
void do_oop(oop* unused) {
_count++;
}
void do_oop(oop* unused) { _count++; }
void do_oop(narrowOop* unused) { ShouldNotReachHere(); }
int count() { return _count; }
};
CountHandleClosure global_handle_count;
@ -262,36 +275,48 @@ void ReferenceProcessor::process_phaseJNI(BoolObjectClosure* is_alive,
#endif
JNIHandles::weak_oops_do(is_alive, keep_alive);
// Finally remember to keep sentinel around
keep_alive->do_oop(&_sentinelRef);
keep_alive->do_oop(adr_sentinel_ref());
complete_gc->do_void();
}
bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
NOT_PRODUCT(verify_ok_to_handle_reflists());
template <class T>
static bool enqueue_discovered_ref_helper(ReferenceProcessor* ref,
AbstractRefProcTaskExecutor* task_executor) {
// Remember old value of pending references list
oop* pending_list_addr = java_lang_ref_Reference::pending_list_addr();
oop old_pending_list_value = *pending_list_addr;
T* pending_list_addr = (T*)java_lang_ref_Reference::pending_list_addr();
T old_pending_list_value = *pending_list_addr;
// Enqueue references that are not made active again, and
// clear the decks for the next collection (cycle).
enqueue_discovered_reflists(pending_list_addr, task_executor);
ref->enqueue_discovered_reflists((HeapWord*)pending_list_addr, task_executor);
// Do the oop-check on pending_list_addr missed in
// enqueue_discovered_reflist. We should probably
// do a raw oop_check so that future such idempotent
// oop_stores relying on the oop-check side-effect
// may be elided automatically and safely without
// affecting correctness.
oop_store(pending_list_addr, *(pending_list_addr));
oop_store(pending_list_addr, oopDesc::load_decode_heap_oop(pending_list_addr));
// Stop treating discovered references specially.
disable_discovery();
ref->disable_discovery();
// Return true if new pending references were added
return old_pending_list_value != *pending_list_addr;
}
bool ReferenceProcessor::enqueue_discovered_references(AbstractRefProcTaskExecutor* task_executor) {
NOT_PRODUCT(verify_ok_to_handle_reflists());
if (UseCompressedOops) {
return enqueue_discovered_ref_helper<narrowOop>(this, task_executor);
} else {
return enqueue_discovered_ref_helper<oop>(this, task_executor);
}
}
void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
oop* pending_list_addr) {
HeapWord* pending_list_addr) {
// Given a list of refs linked through the "discovered" field
// (java.lang.ref.Reference.discovered) chain them through the
// "next" field (java.lang.ref.Reference.next) and prepend
@ -305,19 +330,19 @@ void ReferenceProcessor::enqueue_discovered_reflist(DiscoveredList& refs_list,
// the next field and clearing it (except for the last
// non-sentinel object which is treated specially to avoid
// confusion with an active reference).
while (obj != _sentinelRef) {
while (obj != sentinel_ref()) {
assert(obj->is_instanceRef(), "should be reference object");
oop next = java_lang_ref_Reference::discovered(obj);
if (TraceReferenceGC && PrintGCDetails) {
gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT,
(oopDesc*) obj, (oopDesc*) next);
gclog_or_tty->print_cr(" obj " INTPTR_FORMAT "/next " INTPTR_FORMAT,
obj, next);
}
assert(*java_lang_ref_Reference::next_addr(obj) == NULL,
"The reference should not be enqueued");
if (next == _sentinelRef) { // obj is last
assert(java_lang_ref_Reference::next(obj) == NULL,
"The reference should not be enqueued");
if (next == sentinel_ref()) { // obj is last
// Swap refs_list into pendling_list_addr and
// set obj's next to what we read from pending_list_addr.
oop old = (oop)Atomic::xchg_ptr(refs_list.head(), pending_list_addr);
oop old = oopDesc::atomic_exchange_oop(refs_list.head(), pending_list_addr);
// Need oop_check on pending_list_addr above;
// see special oop-check code at the end of
// enqueue_discovered_reflists() further below.
@ -341,15 +366,14 @@ class RefProcEnqueueTask: public AbstractRefProcTaskExecutor::EnqueueTask {
public:
RefProcEnqueueTask(ReferenceProcessor& ref_processor,
DiscoveredList discovered_refs[],
oop* pending_list_addr,
HeapWord* pending_list_addr,
oop sentinel_ref,
int n_queues)
: EnqueueTask(ref_processor, discovered_refs,
pending_list_addr, sentinel_ref, n_queues)
{ }
virtual void work(unsigned int work_id)
{
virtual void work(unsigned int work_id) {
assert(work_id < (unsigned int)_ref_processor.num_q(), "Index out-of-bounds");
// Simplest first cut: static partitioning.
int index = work_id;
@ -363,18 +387,18 @@ public:
};
// Enqueue references that are not made active again
void ReferenceProcessor::enqueue_discovered_reflists(oop* pending_list_addr,
void ReferenceProcessor::enqueue_discovered_reflists(HeapWord* pending_list_addr,
AbstractRefProcTaskExecutor* task_executor) {
if (_processing_is_mt && task_executor != NULL) {
// Parallel code
RefProcEnqueueTask tsk(*this, _discoveredSoftRefs,
pending_list_addr, _sentinelRef, _num_q);
pending_list_addr, sentinel_ref(), _num_q);
task_executor->execute(tsk);
} else {
// Serial code: call the parent class's implementation
for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
enqueue_discovered_reflist(_discoveredSoftRefs[i], pending_list_addr);
_discoveredSoftRefs[i].set_head(_sentinelRef);
_discoveredSoftRefs[i].set_head(sentinel_ref());
_discoveredSoftRefs[i].set_length(0);
}
}
@ -388,14 +412,13 @@ public:
BoolObjectClosure* is_alive);
// End Of List.
inline bool has_next() const
{ return _next != ReferenceProcessor::_sentinelRef; }
inline bool has_next() const { return _next != ReferenceProcessor::sentinel_ref(); }
// Get oop to the Reference object.
inline oop obj() const { return _ref; }
inline oop obj() const { return _ref; }
// Get oop to the referent object.
inline oop referent() const { return _referent; }
inline oop referent() const { return _referent; }
// Returns true if referent is alive.
inline bool is_referent_alive() const;
@ -417,13 +440,26 @@ public:
inline void make_active() { java_lang_ref_Reference::set_next(_ref, NULL); }
// Make the referent alive.
inline void make_referent_alive() { _keep_alive->do_oop(_referent_addr); }
inline void make_referent_alive() {
if (UseCompressedOops) {
_keep_alive->do_oop((narrowOop*)_referent_addr);
} else {
_keep_alive->do_oop((oop*)_referent_addr);
}
}
// Update the discovered field.
inline void update_discovered() { _keep_alive->do_oop(_prev_next); }
inline void update_discovered() {
// First _prev_next ref actually points into DiscoveredList (gross).
if (UseCompressedOops) {
_keep_alive->do_oop((narrowOop*)_prev_next);
} else {
_keep_alive->do_oop((oop*)_prev_next);
}
}
// NULL out referent pointer.
inline void clear_referent() { *_referent_addr = NULL; }
inline void clear_referent() { oop_store_raw(_referent_addr, NULL); }
// Statistics
NOT_PRODUCT(
@ -436,11 +472,11 @@ private:
private:
DiscoveredList& _refs_list;
oop* _prev_next;
HeapWord* _prev_next;
oop _ref;
oop* _discovered_addr;
HeapWord* _discovered_addr;
oop _next;
oop* _referent_addr;
HeapWord* _referent_addr;
oop _referent;
OopClosure* _keep_alive;
BoolObjectClosure* _is_alive;
@ -457,7 +493,7 @@ inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_li
OopClosure* keep_alive,
BoolObjectClosure* is_alive)
: _refs_list(refs_list),
_prev_next(refs_list.head_ptr()),
_prev_next(refs_list.adr_head()),
_ref(refs_list.head()),
#ifdef ASSERT
_first_seen(refs_list.head()),
@ -471,19 +507,18 @@ inline DiscoveredListIterator::DiscoveredListIterator(DiscoveredList& refs_li
_is_alive(is_alive)
{ }
inline bool DiscoveredListIterator::is_referent_alive() const
{
inline bool DiscoveredListIterator::is_referent_alive() const {
return _is_alive->do_object_b(_referent);
}
inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent))
{
inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referent)) {
_discovered_addr = java_lang_ref_Reference::discovered_addr(_ref);
assert(_discovered_addr && (*_discovered_addr)->is_oop_or_null(),
oop discovered = java_lang_ref_Reference::discovered(_ref);
assert(_discovered_addr && discovered->is_oop_or_null(),
"discovered field is bad");
_next = *_discovered_addr;
_next = discovered;
_referent_addr = java_lang_ref_Reference::referent_addr(_ref);
_referent = *_referent_addr;
_referent = java_lang_ref_Reference::referent(_ref);
assert(Universe::heap()->is_in_reserved_or_null(_referent),
"Wrong oop found in java.lang.Reference object");
assert(allow_null_referent ?
@ -492,32 +527,32 @@ inline void DiscoveredListIterator::load_ptrs(DEBUG_ONLY(bool allow_null_referen
"bad referent");
}
inline void DiscoveredListIterator::next()
{
inline void DiscoveredListIterator::next() {
_prev_next = _discovered_addr;
move_to_next();
}
inline void DiscoveredListIterator::remove()
{
inline void DiscoveredListIterator::remove() {
assert(_ref->is_oop(), "Dropping a bad reference");
// Clear the discovered_addr field so that the object does
// not look like it has been discovered.
*_discovered_addr = NULL;
// Remove Reference object from list.
*_prev_next = _next;
oop_store_raw(_discovered_addr, NULL);
// First _prev_next ref actually points into DiscoveredList (gross).
if (UseCompressedOops) {
// Remove Reference object from list.
oopDesc::encode_store_heap_oop_not_null((narrowOop*)_prev_next, _next);
} else {
// Remove Reference object from list.
oopDesc::store_heap_oop((oop*)_prev_next, _next);
}
NOT_PRODUCT(_removed++);
move_to_next();
}
inline void DiscoveredListIterator::move_to_next()
{
inline void DiscoveredListIterator::move_to_next() {
_ref = _next;
assert(_ref != _first_seen, "cyclic ref_list found");
NOT_PRODUCT(_processed++);
}
// NOTE: process_phase*() are largely similar, and at a high level
// merely iterate over the extant list applying a predicate to
// each of its elements and possibly removing that element from the
@ -531,13 +566,13 @@ inline void DiscoveredListIterator::move_to_next()
// referents are not alive, but that should be kept alive for policy reasons.
// Keep alive the transitive closure of all such referents.
void
ReferenceProcessor::process_phase1(DiscoveredList& refs_list_addr,
ReferenceProcessor::process_phase1(DiscoveredList& refs_list,
ReferencePolicy* policy,
BoolObjectClosure* is_alive,
OopClosure* keep_alive,
VoidClosure* complete_gc) {
assert(policy != NULL, "Must have a non-NULL policy");
DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
// Decide which softly reachable refs should be kept alive.
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(!discovery_is_atomic() /* allow_null_referent */));
@ -545,7 +580,7 @@ ReferenceProcessor::process_phase1(DiscoveredList& refs_list_addr,
if (referent_is_dead && !policy->should_clear_reference(iter.obj())) {
if (TraceReferenceGC) {
gclog_or_tty->print_cr("Dropping reference (" INTPTR_FORMAT ": %s" ") by policy",
(address)iter.obj(), iter.obj()->blueprint()->internal_name());
iter.obj(), iter.obj()->blueprint()->internal_name());
}
// Make the Reference object active again
iter.make_active();
@ -570,20 +605,19 @@ ReferenceProcessor::process_phase1(DiscoveredList& refs_list_addr,
// Traverse the list and remove any Refs that are not active, or
// whose referents are either alive or NULL.
void
ReferenceProcessor::pp2_work(DiscoveredList& refs_list_addr,
ReferenceProcessor::pp2_work(DiscoveredList& refs_list,
BoolObjectClosure* is_alive,
OopClosure* keep_alive)
{
OopClosure* keep_alive) {
assert(discovery_is_atomic(), "Error");
DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
DEBUG_ONLY(oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());)
assert(*next_addr == NULL, "Should not discover inactive Reference");
DEBUG_ONLY(oop next = java_lang_ref_Reference::next(iter.obj());)
assert(next == NULL, "Should not discover inactive Reference");
if (iter.is_referent_alive()) {
if (TraceReferenceGC) {
gclog_or_tty->print_cr("Dropping strongly reachable reference (" INTPTR_FORMAT ": %s)",
(address)iter.obj(), iter.obj()->blueprint()->internal_name());
iter.obj(), iter.obj()->blueprint()->internal_name());
}
// The referent is reachable after all.
// Update the referent pointer as necessary: Note that this
@ -605,25 +639,28 @@ ReferenceProcessor::pp2_work(DiscoveredList& refs_list_addr,
}
void
ReferenceProcessor::pp2_work_concurrent_discovery(
DiscoveredList& refs_list_addr,
BoolObjectClosure* is_alive,
OopClosure* keep_alive,
VoidClosure* complete_gc)
{
ReferenceProcessor::pp2_work_concurrent_discovery(DiscoveredList& refs_list,
BoolObjectClosure* is_alive,
OopClosure* keep_alive,
VoidClosure* complete_gc) {
assert(!discovery_is_atomic(), "Error");
DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
HeapWord* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
oop next = java_lang_ref_Reference::next(iter.obj());
if ((iter.referent() == NULL || iter.is_referent_alive() ||
*next_addr != NULL)) {
assert((*next_addr)->is_oop_or_null(), "bad next field");
next != NULL)) {
assert(next->is_oop_or_null(), "bad next field");
// Remove Reference object from list
iter.remove();
// Trace the cohorts
iter.make_referent_alive();
keep_alive->do_oop(next_addr);
if (UseCompressedOops) {
keep_alive->do_oop((narrowOop*)next_addr);
} else {
keep_alive->do_oop((oop*)next_addr);
}
} else {
iter.next();
}
@ -639,15 +676,15 @@ ReferenceProcessor::pp2_work_concurrent_discovery(
}
// Traverse the list and process the referents, by either
// either clearing them or keeping them (and their reachable
// clearing them or keeping them (and their reachable
// closure) alive.
void
ReferenceProcessor::process_phase3(DiscoveredList& refs_list_addr,
ReferenceProcessor::process_phase3(DiscoveredList& refs_list,
bool clear_referent,
BoolObjectClosure* is_alive,
OopClosure* keep_alive,
VoidClosure* complete_gc) {
DiscoveredListIterator iter(refs_list_addr, keep_alive, is_alive);
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
while (iter.has_next()) {
iter.update_discovered();
iter.load_ptrs(DEBUG_ONLY(false /* allow_null_referent */));
@ -661,7 +698,7 @@ ReferenceProcessor::process_phase3(DiscoveredList& refs_list_addr,
if (TraceReferenceGC) {
gclog_or_tty->print_cr("Adding %sreference (" INTPTR_FORMAT ": %s) as pending",
clear_referent ? "cleared " : "",
(address)iter.obj(), iter.obj()->blueprint()->internal_name());
iter.obj(), iter.obj()->blueprint()->internal_name());
}
assert(iter.obj()->is_oop(UseConcMarkSweepGC), "Adding a bad reference");
// If discovery is concurrent, we may have objects with null referents,
@ -679,15 +716,15 @@ ReferenceProcessor::process_phase3(DiscoveredList& refs_list_addr,
}
void
ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& ref_list) {
oop obj = ref_list.head();
while (obj != _sentinelRef) {
oop* discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
obj = *discovered_addr;
*discovered_addr = NULL;
ReferenceProcessor::abandon_partial_discovered_list(DiscoveredList& refs_list) {
oop obj = refs_list.head();
while (obj != sentinel_ref()) {
oop discovered = java_lang_ref_Reference::discovered(obj);
java_lang_ref_Reference::set_discovered_raw(obj, NULL);
obj = discovered;
}
ref_list.set_head(_sentinelRef);
ref_list.set_length(0);
refs_list.set_head(sentinel_ref());
refs_list.set_length(0);
}
void
@ -777,7 +814,7 @@ void ReferenceProcessor::balance_queues(DiscoveredList ref_lists[])
// find an element to split the list on
for (size_t j = 0; j < refs_to_move; ++j) {
move_tail = new_head;
new_head = *java_lang_ref_Reference::discovered_addr(new_head);
new_head = java_lang_ref_Reference::discovered(new_head);
}
java_lang_ref_Reference::set_discovered(move_tail, ref_lists[to_idx].head());
ref_lists[to_idx].set_head(move_head);
@ -875,17 +912,17 @@ void ReferenceProcessor::clean_up_discovered_reflist(DiscoveredList& refs_list)
size_t length = refs_list.length();
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
assert((*next_addr)->is_oop_or_null(), "bad next field");
oop next = java_lang_ref_Reference::next(iter.obj());
assert(next->is_oop_or_null(), "bad next field");
// If referent has been cleared or Reference is not active,
// drop it.
if (iter.referent() == NULL || *next_addr != NULL) {
if (iter.referent() == NULL || next != NULL) {
debug_only(
if (PrintGCDetails && TraceReferenceGC) {
gclog_or_tty->print_cr("clean_up_discovered_list: Dropping Reference: "
INTPTR_FORMAT " with next field: " INTPTR_FORMAT
" and referent: " INTPTR_FORMAT,
(address)iter.obj(), (address)*next_addr, (address)iter.referent());
iter.obj(), next, iter.referent());
}
)
// Remove Reference object from list
@ -950,18 +987,21 @@ inline DiscoveredList* ReferenceProcessor::get_discovered_list(ReferenceType rt)
return list;
}
inline void ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& list,
oop obj, oop* discovered_addr) {
inline void
ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& refs_list,
oop obj,
HeapWord* discovered_addr) {
assert(_discovery_is_mt, "!_discovery_is_mt should have been handled by caller");
// First we must make sure this object is only enqueued once. CAS in a non null
// discovered_addr.
oop retest = (oop)Atomic::cmpxchg_ptr(list.head(), discovered_addr, NULL);
oop retest = oopDesc::atomic_compare_exchange_oop(refs_list.head(), discovered_addr,
NULL);
if (retest == NULL) {
// This thread just won the right to enqueue the object.
// We have separate lists for enqueueing so no synchronization
// is necessary.
list.set_head(obj);
list.set_length(list.length() + 1);
refs_list.set_head(obj);
refs_list.set_length(refs_list.length() + 1);
} else {
// If retest was non NULL, another thread beat us to it:
// The reference has already been discovered...
@ -972,7 +1012,6 @@ inline void ReferenceProcessor::add_to_discovered_list_mt(DiscoveredList& list,
}
}
// We mention two of several possible choices here:
// #0: if the reference object is not in the "originating generation"
// (or part of the heap being collected, indicated by our "span"
@ -1006,8 +1045,8 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
return false;
}
// We only enqueue active references.
oop* next_addr = java_lang_ref_Reference::next_addr(obj);
if (*next_addr != NULL) {
oop next = java_lang_ref_Reference::next(obj);
if (next != NULL) {
return false;
}
@ -1034,14 +1073,14 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
}
}
oop* discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
assert(discovered_addr != NULL && (*discovered_addr)->is_oop_or_null(),
"bad discovered field");
if (*discovered_addr != NULL) {
HeapWord* discovered_addr = java_lang_ref_Reference::discovered_addr(obj);
oop discovered = java_lang_ref_Reference::discovered(obj);
assert(discovered->is_oop_or_null(), "bad discovered field");
if (discovered != NULL) {
// The reference has already been discovered...
if (TraceReferenceGC) {
gclog_or_tty->print_cr("Already enqueued reference (" INTPTR_FORMAT ": %s)",
(oopDesc*)obj, obj->blueprint()->internal_name());
obj, obj->blueprint()->internal_name());
}
if (RefDiscoveryPolicy == ReferentBasedDiscovery) {
// assumes that an object is not processed twice;
@ -1088,7 +1127,7 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
if (_discovery_is_mt) {
add_to_discovered_list_mt(*list, obj, discovered_addr);
} else {
*discovered_addr = list->head();
oop_store_raw(discovered_addr, list->head());
list->set_head(obj);
list->set_length(list->length() + 1);
}
@ -1106,7 +1145,7 @@ bool ReferenceProcessor::discover_reference(oop obj, ReferenceType rt) {
oop referent = java_lang_ref_Reference::referent(obj);
if (PrintGCDetails) {
gclog_or_tty->print_cr("Enqueued reference (" INTPTR_FORMAT ": %s)",
(oopDesc*) obj, obj->blueprint()->internal_name());
obj, obj->blueprint()->internal_name());
}
assert(referent->is_oop(), "Enqueued a bad referent");
}
@ -1181,17 +1220,20 @@ void ReferenceProcessor::preclean_discovered_references(
// are not active (have a non-NULL next field). NOTE: For this to work
// correctly, refs discovery can not be happening concurrently with this
// step.
void ReferenceProcessor::preclean_discovered_reflist(
DiscoveredList& refs_list, BoolObjectClosure* is_alive,
OopClosure* keep_alive, VoidClosure* complete_gc, YieldClosure* yield) {
void
ReferenceProcessor::preclean_discovered_reflist(DiscoveredList& refs_list,
BoolObjectClosure* is_alive,
OopClosure* keep_alive,
VoidClosure* complete_gc,
YieldClosure* yield) {
DiscoveredListIterator iter(refs_list, keep_alive, is_alive);
size_t length = refs_list.length();
while (iter.has_next()) {
iter.load_ptrs(DEBUG_ONLY(true /* allow_null_referent */));
oop* next_addr = java_lang_ref_Reference::next_addr(iter.obj());
oop obj = iter.obj();
oop next = java_lang_ref_Reference::next(obj);
if (iter.referent() == NULL || iter.is_referent_alive() ||
*next_addr != NULL) {
next != NULL) {
// The referent has been cleared, or is alive, or the Reference is not
// active; we need to trace and mark its cohort.
if (TraceReferenceGC) {
@ -1203,7 +1245,13 @@ void ReferenceProcessor::preclean_discovered_reflist(
--length;
// Keep alive its cohort.
iter.make_referent_alive();
keep_alive->do_oop(next_addr);
if (UseCompressedOops) {
narrowOop* next_addr = (narrowOop*)java_lang_ref_Reference::next_addr(obj);
keep_alive->do_oop(next_addr);
} else {
oop* next_addr = (oop*)java_lang_ref_Reference::next_addr(obj);
keep_alive->do_oop(next_addr);
}
} else {
iter.next();
}
@ -1241,7 +1289,7 @@ void ReferenceProcessor::verify_ok_to_handle_reflists() {
#endif
void ReferenceProcessor::verify() {
guarantee(_sentinelRef != NULL && _sentinelRef->is_oop(), "Lost _sentinelRef");
guarantee(sentinel_ref() != NULL && sentinel_ref()->is_oop(), "Lost _sentinelRef");
}
#ifndef PRODUCT
@ -1249,12 +1297,12 @@ void ReferenceProcessor::clear_discovered_references() {
guarantee(!_discovering_refs, "Discovering refs?");
for (int i = 0; i < _num_q * subclasses_of_ref; i++) {
oop obj = _discoveredSoftRefs[i].head();
while (obj != _sentinelRef) {
while (obj != sentinel_ref()) {
oop next = java_lang_ref_Reference::discovered(obj);
java_lang_ref_Reference::set_discovered(obj, (oop) NULL);
obj = next;
}
_discoveredSoftRefs[i].set_head(_sentinelRef);
_discoveredSoftRefs[i].set_head(sentinel_ref());
_discoveredSoftRefs[i].set_length(0);
}
}