mirror of
https://github.com/openjdk/jdk.git
synced 2025-09-20 02:54:35 +02:00
8056240: Investigate increased GC remark time after class unloading changes in CRM Fuse
Reviewed-by: mgerdin, coleenp, bdelsart
This commit is contained in:
parent
1055ae0436
commit
d410f19961
19 changed files with 506 additions and 68 deletions
|
@ -31,25 +31,23 @@
|
|||
#include "runtime/synchronizer.hpp"
|
||||
#include "runtime/thread.hpp"
|
||||
#include "services/threadService.hpp"
|
||||
#include "utilities/growableArray.hpp"
|
||||
#include "utilities/chunkedList.hpp"
|
||||
|
||||
volatile MetadataOnStackBuffer* MetadataOnStackMark::_used_buffers = NULL;
|
||||
volatile MetadataOnStackBuffer* MetadataOnStackMark::_free_buffers = NULL;
|
||||
|
||||
// Keep track of marked on-stack metadata so it can be cleared.
|
||||
GrowableArray<Metadata*>* _marked_objects = NULL;
|
||||
NOT_PRODUCT(bool MetadataOnStackMark::_is_active = false;)
|
||||
|
||||
// Walk metadata on the stack and mark it so that redefinition doesn't delete
|
||||
// it. Class unloading also walks the previous versions and might try to
|
||||
// delete it, so this class is used by class unloading also.
|
||||
MetadataOnStackMark::MetadataOnStackMark(bool has_redefined_a_class) {
|
||||
MetadataOnStackMark::MetadataOnStackMark(bool visit_code_cache) {
|
||||
assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
|
||||
assert(_used_buffers == NULL, "sanity check");
|
||||
NOT_PRODUCT(_is_active = true;)
|
||||
if (_marked_objects == NULL) {
|
||||
_marked_objects = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(1000, true);
|
||||
}
|
||||
|
||||
Threads::metadata_do(Metadata::mark_on_stack);
|
||||
if (has_redefined_a_class) {
|
||||
if (visit_code_cache) {
|
||||
CodeCache::alive_nmethods_do(nmethod::mark_on_stack);
|
||||
}
|
||||
CompileBroker::mark_on_stack();
|
||||
|
@ -62,15 +60,93 @@ MetadataOnStackMark::~MetadataOnStackMark() {
|
|||
// Unmark everything that was marked. Can't do the same walk because
|
||||
// redefine classes messes up the code cache so the set of methods
|
||||
// might not be the same.
|
||||
for (int i = 0; i< _marked_objects->length(); i++) {
|
||||
_marked_objects->at(i)->set_on_stack(false);
|
||||
|
||||
retire_buffer_for_thread(Thread::current());
|
||||
|
||||
MetadataOnStackBuffer* buffer = const_cast<MetadataOnStackBuffer* >(_used_buffers);
|
||||
while (buffer != NULL) {
|
||||
// Clear on stack state for all metadata.
|
||||
size_t size = buffer->size();
|
||||
for (size_t i = 0; i < size; i++) {
|
||||
Metadata* md = buffer->at(i);
|
||||
md->set_on_stack(false);
|
||||
}
|
||||
|
||||
MetadataOnStackBuffer* next = buffer->next_used();
|
||||
|
||||
// Move the buffer to the free list.
|
||||
buffer->clear();
|
||||
buffer->set_next_used(NULL);
|
||||
buffer->set_next_free(const_cast<MetadataOnStackBuffer*>(_free_buffers));
|
||||
_free_buffers = buffer;
|
||||
|
||||
// Step to next used buffer.
|
||||
buffer = next;
|
||||
}
|
||||
_marked_objects->clear(); // reuse growable array for next time.
|
||||
|
||||
_used_buffers = NULL;
|
||||
|
||||
NOT_PRODUCT(_is_active = false;)
|
||||
}
|
||||
|
||||
// Record which objects are marked so we can unmark the same objects.
|
||||
void MetadataOnStackMark::record(Metadata* m) {
|
||||
assert(_is_active, "metadata on stack marking is active");
|
||||
_marked_objects->push(m);
|
||||
void MetadataOnStackMark::retire_buffer(MetadataOnStackBuffer* buffer) {
|
||||
if (buffer == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
MetadataOnStackBuffer* old_head;
|
||||
|
||||
do {
|
||||
old_head = const_cast<MetadataOnStackBuffer*>(_used_buffers);
|
||||
buffer->set_next_used(old_head);
|
||||
} while (Atomic::cmpxchg_ptr(buffer, &_used_buffers, old_head) != old_head);
|
||||
}
|
||||
|
||||
void MetadataOnStackMark::retire_buffer_for_thread(Thread* thread) {
|
||||
retire_buffer(thread->metadata_on_stack_buffer());
|
||||
thread->set_metadata_on_stack_buffer(NULL);
|
||||
}
|
||||
|
||||
bool MetadataOnStackMark::has_buffer_for_thread(Thread* thread) {
|
||||
return thread->metadata_on_stack_buffer() != NULL;
|
||||
}
|
||||
|
||||
MetadataOnStackBuffer* MetadataOnStackMark::allocate_buffer() {
|
||||
MetadataOnStackBuffer* allocated;
|
||||
MetadataOnStackBuffer* new_head;
|
||||
|
||||
do {
|
||||
allocated = const_cast<MetadataOnStackBuffer*>(_free_buffers);
|
||||
if (allocated == NULL) {
|
||||
break;
|
||||
}
|
||||
new_head = allocated->next_free();
|
||||
} while (Atomic::cmpxchg_ptr(new_head, &_free_buffers, allocated) != allocated);
|
||||
|
||||
if (allocated == NULL) {
|
||||
allocated = new MetadataOnStackBuffer();
|
||||
}
|
||||
|
||||
assert(!allocated->is_full(), err_msg("Should not be full: " PTR_FORMAT, p2i(allocated)));
|
||||
|
||||
return allocated;
|
||||
}
|
||||
|
||||
// Record which objects are marked so we can unmark the same objects.
|
||||
void MetadataOnStackMark::record(Metadata* m, Thread* thread) {
|
||||
assert(_is_active, "metadata on stack marking is active");
|
||||
|
||||
MetadataOnStackBuffer* buffer = thread->metadata_on_stack_buffer();
|
||||
|
||||
if (buffer != NULL && buffer->is_full()) {
|
||||
retire_buffer(buffer);
|
||||
buffer = NULL;
|
||||
}
|
||||
|
||||
if (buffer == NULL) {
|
||||
buffer = allocate_buffer();
|
||||
thread->set_metadata_on_stack_buffer(buffer);
|
||||
}
|
||||
|
||||
buffer->push(m);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue