6458402: 3 jvmti tests fail with CMS and +ExplicitGCInvokesConcurrent

Make JvmtiGCMark safe to run non-safepoint and instrument CMS

Reviewed-by: ysr, dcubed
This commit is contained in:
Keith McGuigan 2011-01-10 17:14:53 -05:00
parent 6215ab50b3
commit ae65c6240f
16 changed files with 157 additions and 456 deletions

View file

@ -3478,6 +3478,7 @@ void CMSCollector::checkpointRootsInitial(bool asynch) {
assert(_collectorState == InitialMarking, "Wrong collector state");
check_correct_thread_executing();
TraceCMSMemoryManagerStats tms(_collectorState);
ReferenceProcessor* rp = ref_processor();
SpecializationStats::clear();
assert(_restart_addr == NULL, "Control point invariant");
@ -5940,11 +5941,6 @@ void CMSCollector::refProcessingWork(bool asynch, bool clear_all_soft_refs) {
}
rp->verify_no_references_recorded();
assert(!rp->discovery_enabled(), "should have been disabled");
// JVMTI object tagging is based on JNI weak refs. If any of these
// refs were cleared then JVMTI needs to update its maps and
// maybe post ObjectFrees to agents.
JvmtiExport::cms_ref_processing_epilogue();
}
#ifndef PRODUCT
@ -6305,6 +6301,7 @@ void CMSCollector::do_CMS_operation(CMS_op_type op) {
switch (op) {
case CMS_op_checkpointRootsInitial: {
SvcGCMarker sgcm(SvcGCMarker::OTHER);
checkpointRootsInitial(true); // asynch
if (PrintGC) {
_cmsGen->printOccupancy("initial-mark");
@ -6312,6 +6309,7 @@ void CMSCollector::do_CMS_operation(CMS_op_type op) {
break;
}
case CMS_op_checkpointRootsFinal: {
SvcGCMarker sgcm(SvcGCMarker::OTHER);
checkpointRootsFinal(true, // asynch
false, // !clear_all_soft_refs
false); // !init_mark_was_synchronous

View file

@ -31,6 +31,7 @@
#include "gc_implementation/g1/g1RemSet.hpp"
#include "gc_implementation/g1/heapRegionRemSet.hpp"
#include "gc_implementation/g1/heapRegionSeq.inline.hpp"
#include "gc_implementation/shared/vmGCOperations.hpp"
#include "memory/genOopClosures.inline.hpp"
#include "memory/referencePolicy.hpp"
#include "memory/resourceArea.hpp"
@ -1142,6 +1143,8 @@ void ConcurrentMark::checkpointRootsFinal(bool clear_all_soft_refs) {
return;
}
SvcGCMarker sgcm(SvcGCMarker::OTHER);
if (VerifyDuringGC) {
HandleMark hm; // handle scope
gclog_or_tty->print(" VerifyDuringGC:(before)");

View file

@ -1192,7 +1192,7 @@ bool G1CollectedHeap::do_collection(bool explicit_gc,
return false;
}
DTraceGCProbeMarker gc_probe_marker(true /* full */);
SvcGCMarker sgcm(SvcGCMarker::FULL);
ResourceMark rm;
if (PrintHeapAtGC) {
@ -3214,7 +3214,7 @@ G1CollectedHeap::do_collection_pause_at_safepoint(double target_pause_time_ms) {
return false;
}
DTraceGCProbeMarker gc_probe_marker(false /* full */);
SvcGCMarker sgcm(SvcGCMarker::MINOR);
ResourceMark rm;
if (PrintHeapAtGC) {

View file

@ -38,7 +38,6 @@ VM_G1CollectForAllocation::VM_G1CollectForAllocation(
}
void VM_G1CollectForAllocation::doit() {
JvmtiGCForAllocationMarker jgcm;
G1CollectedHeap* g1h = G1CollectedHeap::heap();
_result = g1h->satisfy_failed_allocation(_word_size, &_pause_succeeded);
assert(_result == NULL || _pause_succeeded,
@ -46,7 +45,6 @@ void VM_G1CollectForAllocation::doit() {
}
void VM_G1CollectFull::doit() {
JvmtiGCFullMarker jgcm;
G1CollectedHeap* g1h = G1CollectedHeap::heap();
GCCauseSetter x(g1h, _gc_cause);
g1h->do_full_collection(false /* clear_all_soft_refs */);
@ -72,7 +70,6 @@ VM_G1IncCollectionPause::VM_G1IncCollectionPause(
}
void VM_G1IncCollectionPause::doit() {
JvmtiGCForAllocationMarker jgcm;
G1CollectedHeap* g1h = G1CollectedHeap::heap();
assert(!_should_initiate_conc_mark ||
((_gc_cause == GCCause::_gc_locker && GCLockerInvokesConcurrent) ||

View file

@ -42,8 +42,7 @@ VM_ParallelGCFailedAllocation::VM_ParallelGCFailedAllocation(size_t size,
}
void VM_ParallelGCFailedAllocation::doit() {
JvmtiGCForAllocationMarker jgcm;
notify_gc_begin(false);
SvcGCMarker sgcm(SvcGCMarker::MINOR);
ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "must be a ParallelScavengeHeap");
@ -54,8 +53,6 @@ void VM_ParallelGCFailedAllocation::doit() {
if (_result == NULL && GC_locker::is_active_and_needs_gc()) {
set_gc_locked();
}
notify_gc_end();
}
VM_ParallelGCFailedPermanentAllocation::VM_ParallelGCFailedPermanentAllocation(size_t size,
@ -67,8 +64,7 @@ VM_ParallelGCFailedPermanentAllocation::VM_ParallelGCFailedPermanentAllocation(s
}
void VM_ParallelGCFailedPermanentAllocation::doit() {
JvmtiGCFullMarker jgcm;
notify_gc_begin(true);
SvcGCMarker sgcm(SvcGCMarker::FULL);
ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
assert(heap->kind() == CollectedHeap::ParallelScavengeHeap, "must be a ParallelScavengeHeap");
@ -78,7 +74,6 @@ void VM_ParallelGCFailedPermanentAllocation::doit() {
if (_result == NULL && GC_locker::is_active_and_needs_gc()) {
set_gc_locked();
}
notify_gc_end();
}
// Only used for System.gc() calls
@ -91,8 +86,7 @@ VM_ParallelGCSystemGC::VM_ParallelGCSystemGC(unsigned int gc_count,
}
void VM_ParallelGCSystemGC::doit() {
JvmtiGCFullMarker jgcm;
notify_gc_begin(true);
SvcGCMarker sgcm(SvcGCMarker::FULL);
ParallelScavengeHeap* heap = (ParallelScavengeHeap*)Universe::heap();
assert(heap->kind() == CollectedHeap::ParallelScavengeHeap,
@ -106,5 +100,4 @@ void VM_ParallelGCSystemGC::doit() {
} else {
heap->invoke_full_gc(false);
}
notify_gc_end();
}

View file

@ -31,7 +31,6 @@
#include "memory/oopFactory.hpp"
#include "oops/instanceKlass.hpp"
#include "oops/instanceRefKlass.hpp"
#include "prims/jvmtiExport.hpp"
#include "runtime/handles.inline.hpp"
#include "runtime/init.hpp"
#include "runtime/interfaceSupport.hpp"
@ -40,6 +39,7 @@
#ifndef SERIALGC
#include "gc_implementation/g1/g1CollectedHeap.inline.hpp"
#endif
HS_DTRACE_PROBE_DECL1(hotspot, gc__begin, bool);
HS_DTRACE_PROBE_DECL(hotspot, gc__end);
@ -158,8 +158,7 @@ void VM_GC_HeapInspection::doit() {
void VM_GenCollectForAllocation::doit() {
JvmtiGCForAllocationMarker jgcm;
notify_gc_begin(false);
SvcGCMarker sgcm(SvcGCMarker::MINOR);
GenCollectedHeap* gch = GenCollectedHeap::heap();
GCCauseSetter gccs(gch, _gc_cause);
@ -169,22 +168,19 @@ void VM_GenCollectForAllocation::doit() {
if (_res == NULL && GC_locker::is_active_and_needs_gc()) {
set_gc_locked();
}
notify_gc_end();
}
void VM_GenCollectFull::doit() {
JvmtiGCFullMarker jgcm;
notify_gc_begin(true);
SvcGCMarker sgcm(SvcGCMarker::FULL);
GenCollectedHeap* gch = GenCollectedHeap::heap();
GCCauseSetter gccs(gch, _gc_cause);
gch->do_full_collection(gch->must_clear_all_soft_refs(), _max_level);
notify_gc_end();
}
void VM_GenCollectForPermanentAllocation::doit() {
JvmtiGCForAllocationMarker jgcm;
notify_gc_begin(true);
SvcGCMarker sgcm(SvcGCMarker::FULL);
SharedHeap* heap = (SharedHeap*)Universe::heap();
GCCauseSetter gccs(heap, _gc_cause);
switch (heap->kind()) {
@ -209,5 +205,4 @@ void VM_GenCollectForPermanentAllocation::doit() {
if (_res == NULL && GC_locker::is_active_and_needs_gc()) {
set_gc_locked();
}
notify_gc_end();
}

View file

@ -30,6 +30,7 @@
#include "runtime/jniHandles.hpp"
#include "runtime/synchronizer.hpp"
#include "runtime/vm_operations.hpp"
#include "prims/jvmtiExport.hpp"
// The following class hierarchy represents
// a set of operations (VM_Operation) related to GC.
@ -209,13 +210,17 @@ class VM_GenCollectForPermanentAllocation: public VM_GC_Operation {
HeapWord* result() const { return _res; }
};
class DTraceGCProbeMarker : public StackObj {
public:
DTraceGCProbeMarker(bool full) {
VM_GC_Operation::notify_gc_begin(full);
class SvcGCMarker : public StackObj {
private:
JvmtiGCMarker _jgcm;
public:
typedef enum { MINOR, FULL, OTHER } reason_type;
SvcGCMarker(reason_type reason ) {
VM_GC_Operation::notify_gc_begin(reason == FULL);
}
~DTraceGCProbeMarker() {
~SvcGCMarker() {
VM_GC_Operation::notify_gc_end();
}
};