mirror of
https://github.com/openjdk/jdk.git
synced 2025-09-21 19:44:41 +02:00
6863023: need non-perm oops in code cache for JSR 292
Make a special root-list for those few nmethods which might contain non-perm oops. Reviewed-by: twisti, kvn, never, jmasa, ysr
This commit is contained in:
parent
1cf5b7ae11
commit
e261aecad8
74 changed files with 979 additions and 279 deletions
|
@ -175,6 +175,8 @@ class CodeBlob VALUE_OBJ_CLASS_SPEC {
|
|||
OopClosure* keep_alive,
|
||||
bool unloading_occurred);
|
||||
virtual void oops_do(OopClosure* f) = 0;
|
||||
// (All CodeBlob subtypes other than NMethod currently have
|
||||
// an empty oops_do() method.
|
||||
|
||||
// OopMap for frame
|
||||
OopMapSet* oop_maps() const { return _oop_maps; }
|
||||
|
|
|
@ -95,6 +95,7 @@ CodeHeap * CodeCache::_heap = new CodeHeap();
|
|||
int CodeCache::_number_of_blobs = 0;
|
||||
int CodeCache::_number_of_nmethods_with_dependencies = 0;
|
||||
bool CodeCache::_needs_cache_clean = false;
|
||||
nmethod* CodeCache::_scavenge_root_nmethods = NULL;
|
||||
|
||||
|
||||
CodeBlob* CodeCache::first() {
|
||||
|
@ -148,10 +149,7 @@ CodeBlob* CodeCache::allocate(int size) {
|
|||
}
|
||||
}
|
||||
verify_if_often();
|
||||
if (PrintCodeCache2) { // Need to add a new flag
|
||||
ResourceMark rm;
|
||||
tty->print_cr("CodeCache allocation: addr: " INTPTR_FORMAT ", size: 0x%x\n", cb, size);
|
||||
}
|
||||
print_trace("allocation", cb, size);
|
||||
return cb;
|
||||
}
|
||||
|
||||
|
@ -159,10 +157,7 @@ void CodeCache::free(CodeBlob* cb) {
|
|||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
verify_if_often();
|
||||
|
||||
if (PrintCodeCache2) { // Need to add a new flag
|
||||
ResourceMark rm;
|
||||
tty->print_cr("CodeCache free: addr: " INTPTR_FORMAT ", size: 0x%x\n", cb, cb->size());
|
||||
}
|
||||
print_trace("free", cb);
|
||||
if (cb->is_nmethod() && ((nmethod *)cb)->has_dependencies()) {
|
||||
_number_of_nmethods_with_dependencies--;
|
||||
}
|
||||
|
@ -260,14 +255,148 @@ void CodeCache::do_unloading(BoolObjectClosure* is_alive,
|
|||
}
|
||||
}
|
||||
|
||||
void CodeCache::oops_do(OopClosure* f) {
|
||||
void CodeCache::blobs_do(CodeBlobClosure* f) {
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
FOR_ALL_ALIVE_BLOBS(cb) {
|
||||
cb->oops_do(f);
|
||||
f->do_code_blob(cb);
|
||||
|
||||
#ifdef ASSERT
|
||||
if (cb->is_nmethod())
|
||||
((nmethod*)cb)->verify_scavenge_root_oops();
|
||||
#endif //ASSERT
|
||||
}
|
||||
}
|
||||
|
||||
// Walk the list of methods which might contain non-perm oops.
|
||||
void CodeCache::scavenge_root_nmethods_do(CodeBlobClosure* f) {
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
debug_only(mark_scavenge_root_nmethods());
|
||||
|
||||
for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
|
||||
debug_only(cur->clear_scavenge_root_marked());
|
||||
assert(cur->scavenge_root_not_marked(), "");
|
||||
assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
|
||||
|
||||
bool is_live = (!cur->is_zombie() && !cur->is_unloaded());
|
||||
#ifndef PRODUCT
|
||||
if (TraceScavenge) {
|
||||
cur->print_on(tty, is_live ? "scavenge root" : "dead scavenge root"); tty->cr();
|
||||
}
|
||||
#endif //PRODUCT
|
||||
if (is_live)
|
||||
// Perform cur->oops_do(f), maybe just once per nmethod.
|
||||
f->do_code_blob(cur);
|
||||
}
|
||||
|
||||
// Check for stray marks.
|
||||
debug_only(verify_perm_nmethods(NULL));
|
||||
}
|
||||
|
||||
void CodeCache::add_scavenge_root_nmethod(nmethod* nm) {
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
nm->set_on_scavenge_root_list();
|
||||
nm->set_scavenge_root_link(_scavenge_root_nmethods);
|
||||
set_scavenge_root_nmethods(nm);
|
||||
print_trace("add_scavenge_root", nm);
|
||||
}
|
||||
|
||||
void CodeCache::drop_scavenge_root_nmethod(nmethod* nm) {
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
print_trace("drop_scavenge_root", nm);
|
||||
nmethod* last = NULL;
|
||||
nmethod* cur = scavenge_root_nmethods();
|
||||
while (cur != NULL) {
|
||||
nmethod* next = cur->scavenge_root_link();
|
||||
if (cur == nm) {
|
||||
if (last != NULL)
|
||||
last->set_scavenge_root_link(next);
|
||||
else set_scavenge_root_nmethods(next);
|
||||
nm->set_scavenge_root_link(NULL);
|
||||
nm->clear_on_scavenge_root_list();
|
||||
return;
|
||||
}
|
||||
last = cur;
|
||||
cur = next;
|
||||
}
|
||||
assert(false, "should have been on list");
|
||||
}
|
||||
|
||||
void CodeCache::prune_scavenge_root_nmethods() {
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
debug_only(mark_scavenge_root_nmethods());
|
||||
|
||||
nmethod* last = NULL;
|
||||
nmethod* cur = scavenge_root_nmethods();
|
||||
while (cur != NULL) {
|
||||
nmethod* next = cur->scavenge_root_link();
|
||||
debug_only(cur->clear_scavenge_root_marked());
|
||||
assert(cur->scavenge_root_not_marked(), "");
|
||||
assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
|
||||
|
||||
if (!cur->is_zombie() && !cur->is_unloaded()
|
||||
&& cur->detect_scavenge_root_oops()) {
|
||||
// Keep it. Advance 'last' to prevent deletion.
|
||||
last = cur;
|
||||
} else {
|
||||
// Prune it from the list, so we don't have to look at it any more.
|
||||
print_trace("prune_scavenge_root", cur);
|
||||
cur->set_scavenge_root_link(NULL);
|
||||
cur->clear_on_scavenge_root_list();
|
||||
if (last != NULL)
|
||||
last->set_scavenge_root_link(next);
|
||||
else set_scavenge_root_nmethods(next);
|
||||
}
|
||||
cur = next;
|
||||
}
|
||||
|
||||
// Check for stray marks.
|
||||
debug_only(verify_perm_nmethods(NULL));
|
||||
}
|
||||
|
||||
#ifndef PRODUCT
|
||||
void CodeCache::asserted_non_scavengable_nmethods_do(CodeBlobClosure* f) {
|
||||
// While we are here, verify the integrity of the list.
|
||||
mark_scavenge_root_nmethods();
|
||||
for (nmethod* cur = scavenge_root_nmethods(); cur != NULL; cur = cur->scavenge_root_link()) {
|
||||
assert(cur->on_scavenge_root_list(), "else shouldn't be on this list");
|
||||
cur->clear_scavenge_root_marked();
|
||||
}
|
||||
verify_perm_nmethods(f);
|
||||
}
|
||||
|
||||
// Temporarily mark nmethods that are claimed to be on the non-perm list.
|
||||
void CodeCache::mark_scavenge_root_nmethods() {
|
||||
FOR_ALL_ALIVE_BLOBS(cb) {
|
||||
if (cb->is_nmethod()) {
|
||||
nmethod *nm = (nmethod*)cb;
|
||||
assert(nm->scavenge_root_not_marked(), "clean state");
|
||||
if (nm->on_scavenge_root_list())
|
||||
nm->set_scavenge_root_marked();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the closure is given, run it on the unlisted nmethods.
|
||||
// Also make sure that the effects of mark_scavenge_root_nmethods is gone.
|
||||
void CodeCache::verify_perm_nmethods(CodeBlobClosure* f_or_null) {
|
||||
FOR_ALL_ALIVE_BLOBS(cb) {
|
||||
bool call_f = (f_or_null != NULL);
|
||||
if (cb->is_nmethod()) {
|
||||
nmethod *nm = (nmethod*)cb;
|
||||
assert(nm->scavenge_root_not_marked(), "must be already processed");
|
||||
if (nm->on_scavenge_root_list())
|
||||
call_f = false; // don't show this one to the client
|
||||
nm->verify_scavenge_root_oops();
|
||||
} else {
|
||||
call_f = false; // not an nmethod
|
||||
}
|
||||
if (call_f) f_or_null->do_code_blob(cb);
|
||||
}
|
||||
}
|
||||
#endif //PRODUCT
|
||||
|
||||
void CodeCache::gc_prologue() {
|
||||
assert(!nmethod::oops_do_marking_is_active(), "oops_do_marking_epilogue must be called");
|
||||
}
|
||||
|
||||
|
||||
|
@ -285,6 +414,8 @@ void CodeCache::gc_epilogue() {
|
|||
cb->fix_oop_relocations();
|
||||
}
|
||||
set_needs_cache_clean(false);
|
||||
prune_scavenge_root_nmethods();
|
||||
assert(!nmethod::oops_do_marking_is_active(), "oops_do_marking_prologue must be called");
|
||||
}
|
||||
|
||||
|
||||
|
@ -508,6 +639,14 @@ void CodeCache::verify_if_often() {
|
|||
}
|
||||
}
|
||||
|
||||
void CodeCache::print_trace(const char* event, CodeBlob* cb, int size) {
|
||||
if (PrintCodeCache2) { // Need to add a new flag
|
||||
ResourceMark rm;
|
||||
if (size == 0) size = cb->size();
|
||||
tty->print_cr("CodeCache %s: addr: " INTPTR_FORMAT ", size: 0x%x", event, cb, size);
|
||||
}
|
||||
}
|
||||
|
||||
void CodeCache::print_internals() {
|
||||
int nmethodCount = 0;
|
||||
int runtimeStubCount = 0;
|
||||
|
|
|
@ -45,8 +45,13 @@ class CodeCache : AllStatic {
|
|||
static int _number_of_blobs;
|
||||
static int _number_of_nmethods_with_dependencies;
|
||||
static bool _needs_cache_clean;
|
||||
static nmethod* _scavenge_root_nmethods; // linked via nm->scavenge_root_link()
|
||||
|
||||
static void verify_if_often() PRODUCT_RETURN;
|
||||
|
||||
static void mark_scavenge_root_nmethods() PRODUCT_RETURN;
|
||||
static void verify_perm_nmethods(CodeBlobClosure* f_or_null) PRODUCT_RETURN;
|
||||
|
||||
public:
|
||||
|
||||
// Initialization
|
||||
|
@ -61,6 +66,7 @@ class CodeCache : AllStatic {
|
|||
static void flush(); // flushes all CodeBlobs
|
||||
static bool contains(void *p); // returns whether p is included
|
||||
static void blobs_do(void f(CodeBlob* cb)); // iterates over all CodeBlobs
|
||||
static void blobs_do(CodeBlobClosure* f); // iterates over all CodeBlobs
|
||||
static void nmethods_do(void f(nmethod* nm)); // iterates over all nmethods
|
||||
|
||||
// Lookup
|
||||
|
@ -106,12 +112,24 @@ class CodeCache : AllStatic {
|
|||
static void do_unloading(BoolObjectClosure* is_alive,
|
||||
OopClosure* keep_alive,
|
||||
bool unloading_occurred);
|
||||
static void oops_do(OopClosure* f);
|
||||
static void oops_do(OopClosure* f) {
|
||||
CodeBlobToOopClosure oopc(f, /*do_marking=*/ false);
|
||||
blobs_do(&oopc);
|
||||
}
|
||||
static void asserted_non_scavengable_nmethods_do(CodeBlobClosure* f = NULL) PRODUCT_RETURN;
|
||||
static void scavenge_root_nmethods_do(CodeBlobClosure* f);
|
||||
|
||||
static nmethod* scavenge_root_nmethods() { return _scavenge_root_nmethods; }
|
||||
static void set_scavenge_root_nmethods(nmethod* nm) { _scavenge_root_nmethods = nm; }
|
||||
static void add_scavenge_root_nmethod(nmethod* nm);
|
||||
static void drop_scavenge_root_nmethod(nmethod* nm);
|
||||
static void prune_scavenge_root_nmethods();
|
||||
|
||||
// Printing/debugging
|
||||
static void print() PRODUCT_RETURN; // prints summary
|
||||
static void print_internals();
|
||||
static void verify(); // verifies the code cache
|
||||
static void print_trace(const char* event, CodeBlob* cb, int size = 0) PRODUCT_RETURN;
|
||||
|
||||
// The full limits of the codeCache
|
||||
static address low_bound() { return (address) _heap->low_boundary(); }
|
||||
|
|
|
@ -299,7 +299,7 @@ void DebugInformationRecorder::describe_scope(int pc_offset,
|
|||
stream()->write_int(sender_stream_offset);
|
||||
|
||||
// serialize scope
|
||||
jobject method_enc = (method == NULL)? NULL: method->encoding();
|
||||
jobject method_enc = (method == NULL)? NULL: method->constant_encoding();
|
||||
stream()->write_int(oop_recorder()->find_index(method_enc));
|
||||
stream()->write_bci(bci);
|
||||
assert(method == NULL ||
|
||||
|
|
|
@ -302,7 +302,7 @@ void Dependencies::encode_content_bytes() {
|
|||
bytes.write_byte(code_byte);
|
||||
for (int j = 0; j < stride; j++) {
|
||||
if (j == skipj) continue;
|
||||
bytes.write_int(_oop_recorder->find_index(deps->at(i+j)->encoding()));
|
||||
bytes.write_int(_oop_recorder->find_index(deps->at(i+j)->constant_encoding()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -581,10 +581,13 @@ nmethod::nmethod(
|
|||
debug_only(No_Safepoint_Verifier nsv;)
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
|
||||
NOT_PRODUCT(_has_debug_info = false; )
|
||||
NOT_PRODUCT(_has_debug_info = false);
|
||||
_oops_do_mark_link = NULL;
|
||||
_method = method;
|
||||
_entry_bci = InvocationEntryBci;
|
||||
_link = NULL;
|
||||
_osr_link = NULL;
|
||||
_scavenge_root_link = NULL;
|
||||
_scavenge_root_state = 0;
|
||||
_compiler = NULL;
|
||||
// We have no exception handler or deopt handler make the
|
||||
// values something that will never match a pc like the nmethod vtable entry
|
||||
|
@ -618,7 +621,7 @@ nmethod::nmethod(
|
|||
_stack_traversal_mark = 0;
|
||||
|
||||
code_buffer->copy_oops_to(this);
|
||||
debug_only(check_store();)
|
||||
debug_only(verify_scavenge_root_oops());
|
||||
CodeCache::commit(this);
|
||||
VTune::create_nmethod(this);
|
||||
}
|
||||
|
@ -668,10 +671,13 @@ nmethod::nmethod(
|
|||
debug_only(No_Safepoint_Verifier nsv;)
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
|
||||
NOT_PRODUCT(_has_debug_info = false; )
|
||||
NOT_PRODUCT(_has_debug_info = false);
|
||||
_oops_do_mark_link = NULL;
|
||||
_method = method;
|
||||
_entry_bci = InvocationEntryBci;
|
||||
_link = NULL;
|
||||
_osr_link = NULL;
|
||||
_scavenge_root_link = NULL;
|
||||
_scavenge_root_state = 0;
|
||||
_compiler = NULL;
|
||||
// We have no exception handler or deopt handler make the
|
||||
// values something that will never match a pc like the nmethod vtable entry
|
||||
|
@ -703,7 +709,7 @@ nmethod::nmethod(
|
|||
_stack_traversal_mark = 0;
|
||||
|
||||
code_buffer->copy_oops_to(this);
|
||||
debug_only(check_store();)
|
||||
debug_only(verify_scavenge_root_oops());
|
||||
CodeCache::commit(this);
|
||||
VTune::create_nmethod(this);
|
||||
}
|
||||
|
@ -770,12 +776,15 @@ nmethod::nmethod(
|
|||
debug_only(No_Safepoint_Verifier nsv;)
|
||||
assert_locked_or_safepoint(CodeCache_lock);
|
||||
|
||||
NOT_PRODUCT(_has_debug_info = false; )
|
||||
NOT_PRODUCT(_has_debug_info = false);
|
||||
_oops_do_mark_link = NULL;
|
||||
_method = method;
|
||||
_compile_id = compile_id;
|
||||
_comp_level = comp_level;
|
||||
_entry_bci = entry_bci;
|
||||
_link = NULL;
|
||||
_osr_link = NULL;
|
||||
_scavenge_root_link = NULL;
|
||||
_scavenge_root_state = 0;
|
||||
_compiler = compiler;
|
||||
_orig_pc_offset = orig_pc_offset;
|
||||
#ifdef HAVE_DTRACE_H
|
||||
|
@ -813,7 +822,10 @@ nmethod::nmethod(
|
|||
code_buffer->copy_oops_to(this);
|
||||
debug_info->copy_to(this);
|
||||
dependencies->copy_to(this);
|
||||
debug_only(check_store();)
|
||||
if (ScavengeRootsInCode && detect_scavenge_root_oops()) {
|
||||
CodeCache::add_scavenge_root_nmethod(this);
|
||||
}
|
||||
debug_only(verify_scavenge_root_oops());
|
||||
|
||||
CodeCache::commit(this);
|
||||
|
||||
|
@ -902,23 +914,30 @@ void nmethod::print_on(outputStream* st, const char* title) const {
|
|||
if (st != NULL) {
|
||||
ttyLocker ttyl;
|
||||
// Print a little tag line that looks like +PrintCompilation output:
|
||||
st->print("%3d%c %s",
|
||||
int tlen = (int) strlen(title);
|
||||
bool do_nl = false;
|
||||
if (tlen > 0 && title[tlen-1] == '\n') { tlen--; do_nl = true; }
|
||||
st->print("%3d%c %.*s",
|
||||
compile_id(),
|
||||
is_osr_method() ? '%' :
|
||||
method() != NULL &&
|
||||
is_native_method() ? 'n' : ' ',
|
||||
title);
|
||||
tlen, title);
|
||||
#ifdef TIERED
|
||||
st->print(" (%d) ", comp_level());
|
||||
#endif // TIERED
|
||||
if (WizardMode) st->print(" (" INTPTR_FORMAT ")", this);
|
||||
if (method() != NULL) {
|
||||
method()->print_short_name(st);
|
||||
if (Universe::heap()->is_gc_active() && method() != NULL) {
|
||||
st->print("(method)");
|
||||
} else if (method() != NULL) {
|
||||
method()->print_short_name(st);
|
||||
if (is_osr_method())
|
||||
st->print(" @ %d", osr_entry_bci());
|
||||
if (method()->code_size() > 0)
|
||||
st->print(" (%d bytes)", method()->code_size());
|
||||
}
|
||||
|
||||
if (do_nl) st->cr();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1033,6 +1052,7 @@ void nmethod::cleanup_inline_caches() {
|
|||
}
|
||||
}
|
||||
|
||||
// This is a private interface with the sweeper.
|
||||
void nmethod::mark_as_seen_on_stack() {
|
||||
assert(is_not_entrant(), "must be a non-entrant method");
|
||||
set_stack_traversal_mark(NMethodSweeper::traversal_count());
|
||||
|
@ -1077,7 +1097,8 @@ void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
|
|||
" unloadable], methodOop(" INTPTR_FORMAT
|
||||
"), cause(" INTPTR_FORMAT ")",
|
||||
this, (address)_method, (address)cause);
|
||||
cause->klass()->print();
|
||||
if (!Universe::heap()->is_gc_active())
|
||||
cause->klass()->print();
|
||||
}
|
||||
// If _method is already NULL the methodOop is about to be unloaded,
|
||||
// so we don't have to break the cycle. Note that it is possible to
|
||||
|
@ -1105,7 +1126,8 @@ void nmethod::make_unloaded(BoolObjectClosure* is_alive, oop cause) {
|
|||
// The methodOop is gone at this point
|
||||
assert(_method == NULL, "Tautology");
|
||||
|
||||
set_link(NULL);
|
||||
set_osr_link(NULL);
|
||||
//set_scavenge_root_link(NULL); // done by prune_scavenge_root_nmethods
|
||||
NMethodSweeper::notify(this);
|
||||
}
|
||||
|
||||
|
@ -1291,6 +1313,10 @@ void nmethod::flush() {
|
|||
ec = next;
|
||||
}
|
||||
|
||||
if (on_scavenge_root_list()) {
|
||||
CodeCache::drop_scavenge_root_nmethod(this);
|
||||
}
|
||||
|
||||
((CodeBlob*)(this))->flush();
|
||||
|
||||
CodeCache::free(this);
|
||||
|
@ -1350,7 +1376,10 @@ bool nmethod::can_unload(BoolObjectClosure* is_alive,
|
|||
return false;
|
||||
}
|
||||
}
|
||||
assert(unloading_occurred, "Inconsistency in unloading");
|
||||
// If ScavengeRootsInCode is true, an nmethod might be unloaded
|
||||
// simply because one of its constant oops has gone dead.
|
||||
// No actual classes need to be unloaded in order for this to occur.
|
||||
assert(unloading_occurred || ScavengeRootsInCode, "Inconsistency in unloading");
|
||||
make_unloaded(is_alive, obj);
|
||||
return true;
|
||||
}
|
||||
|
@ -1558,12 +1587,108 @@ void nmethod::oops_do(OopClosure* f) {
|
|||
}
|
||||
|
||||
// Scopes
|
||||
// This includes oop constants not inlined in the code stream.
|
||||
for (oop* p = oops_begin(); p < oops_end(); p++) {
|
||||
if (*p == Universe::non_oop_word()) continue; // skip non-oops
|
||||
f->do_oop(p);
|
||||
}
|
||||
}
|
||||
|
||||
#define NMETHOD_SENTINEL ((nmethod*)badAddress)
|
||||
|
||||
nmethod* volatile nmethod::_oops_do_mark_nmethods;
|
||||
|
||||
// An nmethod is "marked" if its _mark_link is set non-null.
|
||||
// Even if it is the end of the linked list, it will have a non-null link value,
|
||||
// as long as it is on the list.
|
||||
// This code must be MP safe, because it is used from parallel GC passes.
|
||||
bool nmethod::test_set_oops_do_mark() {
|
||||
assert(nmethod::oops_do_marking_is_active(), "oops_do_marking_prologue must be called");
|
||||
nmethod* observed_mark_link = _oops_do_mark_link;
|
||||
if (observed_mark_link == NULL) {
|
||||
// Claim this nmethod for this thread to mark.
|
||||
observed_mark_link = (nmethod*)
|
||||
Atomic::cmpxchg_ptr(NMETHOD_SENTINEL, &_oops_do_mark_link, NULL);
|
||||
if (observed_mark_link == NULL) {
|
||||
|
||||
// Atomically append this nmethod (now claimed) to the head of the list:
|
||||
nmethod* observed_mark_nmethods = _oops_do_mark_nmethods;
|
||||
for (;;) {
|
||||
nmethod* required_mark_nmethods = observed_mark_nmethods;
|
||||
_oops_do_mark_link = required_mark_nmethods;
|
||||
observed_mark_nmethods = (nmethod*)
|
||||
Atomic::cmpxchg_ptr(this, &_oops_do_mark_nmethods, required_mark_nmethods);
|
||||
if (observed_mark_nmethods == required_mark_nmethods)
|
||||
break;
|
||||
}
|
||||
// Mark was clear when we first saw this guy.
|
||||
NOT_PRODUCT(if (TraceScavenge) print_on(tty, "oops_do, mark\n"));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// On fall through, another racing thread marked this nmethod before we did.
|
||||
return true;
|
||||
}
|
||||
|
||||
void nmethod::oops_do_marking_prologue() {
|
||||
NOT_PRODUCT(if (TraceScavenge) tty->print_cr("[oops_do_marking_prologue"));
|
||||
assert(_oops_do_mark_nmethods == NULL, "must not call oops_do_marking_prologue twice in a row");
|
||||
// We use cmpxchg_ptr instead of regular assignment here because the user
|
||||
// may fork a bunch of threads, and we need them all to see the same state.
|
||||
void* observed = Atomic::cmpxchg_ptr(NMETHOD_SENTINEL, &_oops_do_mark_nmethods, NULL);
|
||||
guarantee(observed == NULL, "no races in this sequential code");
|
||||
}
|
||||
|
||||
void nmethod::oops_do_marking_epilogue() {
|
||||
assert(_oops_do_mark_nmethods != NULL, "must not call oops_do_marking_epilogue twice in a row");
|
||||
nmethod* cur = _oops_do_mark_nmethods;
|
||||
while (cur != NMETHOD_SENTINEL) {
|
||||
assert(cur != NULL, "not NULL-terminated");
|
||||
nmethod* next = cur->_oops_do_mark_link;
|
||||
cur->_oops_do_mark_link = NULL;
|
||||
NOT_PRODUCT(if (TraceScavenge) cur->print_on(tty, "oops_do, unmark\n"));
|
||||
cur = next;
|
||||
}
|
||||
void* required = _oops_do_mark_nmethods;
|
||||
void* observed = Atomic::cmpxchg_ptr(NULL, &_oops_do_mark_nmethods, required);
|
||||
guarantee(observed == required, "no races in this sequential code");
|
||||
NOT_PRODUCT(if (TraceScavenge) tty->print_cr("oops_do_marking_epilogue]"));
|
||||
}
|
||||
|
||||
class DetectScavengeRoot: public OopClosure {
|
||||
bool _detected_scavenge_root;
|
||||
public:
|
||||
DetectScavengeRoot() : _detected_scavenge_root(false)
|
||||
{ NOT_PRODUCT(_print_nm = NULL); }
|
||||
bool detected_scavenge_root() { return _detected_scavenge_root; }
|
||||
virtual void do_oop(oop* p) {
|
||||
if ((*p) != NULL && (*p)->is_scavengable()) {
|
||||
NOT_PRODUCT(maybe_print(p));
|
||||
_detected_scavenge_root = true;
|
||||
}
|
||||
}
|
||||
virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
|
||||
|
||||
#ifndef PRODUCT
|
||||
nmethod* _print_nm;
|
||||
void maybe_print(oop* p) {
|
||||
if (_print_nm == NULL) return;
|
||||
if (!_detected_scavenge_root) _print_nm->print_on(tty, "new scavenge root");
|
||||
tty->print_cr(""PTR_FORMAT"[offset=%d] detected non-perm oop "PTR_FORMAT" (found at "PTR_FORMAT")",
|
||||
_print_nm, (int)((intptr_t)p - (intptr_t)_print_nm),
|
||||
(intptr_t)(*p), (intptr_t)p);
|
||||
(*p)->print();
|
||||
}
|
||||
#endif //PRODUCT
|
||||
};
|
||||
|
||||
bool nmethod::detect_scavenge_root_oops() {
|
||||
DetectScavengeRoot detect_scavenge_root;
|
||||
NOT_PRODUCT(if (TraceScavenge) detect_scavenge_root._print_nm = this);
|
||||
oops_do(&detect_scavenge_root);
|
||||
return detect_scavenge_root.detected_scavenge_root();
|
||||
}
|
||||
|
||||
// Method that knows how to preserve outgoing arguments at call. This method must be
|
||||
// called with a frame corresponding to a Java invoke
|
||||
void nmethod::preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map, OopClosure* f) {
|
||||
|
@ -1878,6 +2003,24 @@ bool nmethod::is_deopt_pc(address pc) {
|
|||
// -----------------------------------------------------------------------------
|
||||
// Verification
|
||||
|
||||
class VerifyOopsClosure: public OopClosure {
|
||||
nmethod* _nm;
|
||||
bool _ok;
|
||||
public:
|
||||
VerifyOopsClosure(nmethod* nm) : _nm(nm), _ok(true) { }
|
||||
bool ok() { return _ok; }
|
||||
virtual void do_oop(oop* p) {
|
||||
if ((*p) == NULL || (*p)->is_oop()) return;
|
||||
if (_ok) {
|
||||
_nm->print_nmethod(true);
|
||||
_ok = false;
|
||||
}
|
||||
tty->print_cr("*** non-oop "PTR_FORMAT" found at "PTR_FORMAT" (offset %d)",
|
||||
(intptr_t)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm));
|
||||
}
|
||||
virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
|
||||
};
|
||||
|
||||
void nmethod::verify() {
|
||||
|
||||
// Hmm. OSR methods can be deopted but not marked as zombie or not_entrant
|
||||
|
@ -1911,6 +2054,11 @@ void nmethod::verify() {
|
|||
}
|
||||
}
|
||||
|
||||
VerifyOopsClosure voc(this);
|
||||
oops_do(&voc);
|
||||
assert(voc.ok(), "embedded oops must be OK");
|
||||
verify_scavenge_root_oops();
|
||||
|
||||
verify_scopes();
|
||||
}
|
||||
|
||||
|
@ -1974,19 +2122,34 @@ void nmethod::verify_scopes() {
|
|||
// Non-product code
|
||||
#ifndef PRODUCT
|
||||
|
||||
void nmethod::check_store() {
|
||||
// Make sure all oops in the compiled code are tenured
|
||||
|
||||
RelocIterator iter(this);
|
||||
while (iter.next()) {
|
||||
if (iter.type() == relocInfo::oop_type) {
|
||||
oop_Relocation* reloc = iter.oop_reloc();
|
||||
oop obj = reloc->oop_value();
|
||||
if (obj != NULL && !obj->is_perm()) {
|
||||
fatal("must be permanent oop in compiled code");
|
||||
}
|
||||
class DebugScavengeRoot: public OopClosure {
|
||||
nmethod* _nm;
|
||||
bool _ok;
|
||||
public:
|
||||
DebugScavengeRoot(nmethod* nm) : _nm(nm), _ok(true) { }
|
||||
bool ok() { return _ok; }
|
||||
virtual void do_oop(oop* p) {
|
||||
if ((*p) == NULL || !(*p)->is_scavengable()) return;
|
||||
if (_ok) {
|
||||
_nm->print_nmethod(true);
|
||||
_ok = false;
|
||||
}
|
||||
tty->print_cr("*** non-perm oop "PTR_FORMAT" found at "PTR_FORMAT" (offset %d)",
|
||||
(intptr_t)(*p), (intptr_t)p, (int)((intptr_t)p - (intptr_t)_nm));
|
||||
(*p)->print();
|
||||
}
|
||||
virtual void do_oop(narrowOop* p) { ShouldNotReachHere(); }
|
||||
};
|
||||
|
||||
void nmethod::verify_scavenge_root_oops() {
|
||||
if (!on_scavenge_root_list()) {
|
||||
// Actually look inside, to verify the claim that it's clean.
|
||||
DebugScavengeRoot debug_scavenge_root(this);
|
||||
oops_do(&debug_scavenge_root);
|
||||
if (!debug_scavenge_root.ok())
|
||||
fatal("found an unadvertised bad non-perm oop in the code cache");
|
||||
}
|
||||
assert(scavenge_root_not_marked(), "");
|
||||
}
|
||||
|
||||
#endif // PRODUCT
|
||||
|
@ -2019,6 +2182,7 @@ void nmethod::print() const {
|
|||
if (is_not_entrant()) tty->print("not_entrant ");
|
||||
if (is_zombie()) tty->print("zombie ");
|
||||
if (is_unloaded()) tty->print("unloaded ");
|
||||
if (on_scavenge_root_list()) tty->print("scavenge_root ");
|
||||
tty->print_cr("}:");
|
||||
}
|
||||
if (size () > 0) tty->print_cr(" total in heap [" INTPTR_FORMAT "," INTPTR_FORMAT "] = %d",
|
||||
|
|
|
@ -125,6 +125,7 @@ class xmlStream;
|
|||
class nmethod : public CodeBlob {
|
||||
friend class VMStructs;
|
||||
friend class NMethodSweeper;
|
||||
friend class CodeCache; // non-perm oops
|
||||
private:
|
||||
// Shared fields for all nmethod's
|
||||
static int _zombie_instruction_size;
|
||||
|
@ -132,7 +133,12 @@ class nmethod : public CodeBlob {
|
|||
methodOop _method;
|
||||
int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method
|
||||
|
||||
nmethod* _link; // To support simple linked-list chaining of nmethods
|
||||
// To support simple linked-list chaining of nmethods:
|
||||
nmethod* _osr_link; // from instanceKlass::osr_nmethods_head
|
||||
nmethod* _scavenge_root_link; // from CodeCache::scavenge_root_nmethods
|
||||
|
||||
static nmethod* volatile _oops_do_mark_nmethods;
|
||||
nmethod* volatile _oops_do_mark_link;
|
||||
|
||||
AbstractCompiler* _compiler; // The compiler which compiled this nmethod
|
||||
|
||||
|
@ -174,6 +180,8 @@ class nmethod : public CodeBlob {
|
|||
// used by jvmti to track if an unload event has been posted for this nmethod.
|
||||
bool _unload_reported;
|
||||
|
||||
jbyte _scavenge_root_state;
|
||||
|
||||
NOT_PRODUCT(bool _has_debug_info; )
|
||||
|
||||
// Nmethod Flushing lock (if non-zero, then the nmethod is not removed)
|
||||
|
@ -242,7 +250,6 @@ class nmethod : public CodeBlob {
|
|||
|
||||
// helper methods
|
||||
void* operator new(size_t size, int nmethod_size);
|
||||
void check_store();
|
||||
|
||||
const char* reloc_string_for(u_char* begin, u_char* end);
|
||||
void make_not_entrant_or_zombie(int state);
|
||||
|
@ -407,6 +414,24 @@ class nmethod : public CodeBlob {
|
|||
int version() const { return flags.version; }
|
||||
void set_version(int v);
|
||||
|
||||
// Non-perm oop support
|
||||
bool on_scavenge_root_list() const { return (_scavenge_root_state & 1) != 0; }
|
||||
protected:
|
||||
enum { npl_on_list = 0x01, npl_marked = 0x10 };
|
||||
void set_on_scavenge_root_list() { _scavenge_root_state = npl_on_list; }
|
||||
void clear_on_scavenge_root_list() { _scavenge_root_state = 0; }
|
||||
// assertion-checking and pruning logic uses the bits of _scavenge_root_state
|
||||
#ifndef PRODUCT
|
||||
void set_scavenge_root_marked() { _scavenge_root_state |= npl_marked; }
|
||||
void clear_scavenge_root_marked() { _scavenge_root_state &= ~npl_marked; }
|
||||
bool scavenge_root_not_marked() { return (_scavenge_root_state &~ npl_on_list) == 0; }
|
||||
// N.B. there is no positive marked query, and we only use the not_marked query for asserts.
|
||||
#endif //PRODUCT
|
||||
nmethod* scavenge_root_link() const { return _scavenge_root_link; }
|
||||
void set_scavenge_root_link(nmethod *n) { _scavenge_root_link = n; }
|
||||
|
||||
public:
|
||||
|
||||
// Sweeper support
|
||||
long stack_traversal_mark() { return _stack_traversal_mark; }
|
||||
void set_stack_traversal_mark(long l) { _stack_traversal_mark = l; }
|
||||
|
@ -425,8 +450,8 @@ class nmethod : public CodeBlob {
|
|||
int osr_entry_bci() const { assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod"); return _entry_bci; }
|
||||
address osr_entry() const { assert(_entry_bci != InvocationEntryBci, "wrong kind of nmethod"); return _osr_entry_point; }
|
||||
void invalidate_osr_method();
|
||||
nmethod* link() const { return _link; }
|
||||
void set_link(nmethod *n) { _link = n; }
|
||||
nmethod* osr_link() const { return _osr_link; }
|
||||
void set_osr_link(nmethod *n) { _osr_link = n; }
|
||||
|
||||
// tells whether frames described by this nmethod can be deoptimized
|
||||
// note: native wrappers cannot be deoptimized.
|
||||
|
@ -467,6 +492,14 @@ class nmethod : public CodeBlob {
|
|||
void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map,
|
||||
OopClosure* f);
|
||||
void oops_do(OopClosure* f);
|
||||
bool detect_scavenge_root_oops();
|
||||
void verify_scavenge_root_oops() PRODUCT_RETURN;
|
||||
|
||||
bool test_set_oops_do_mark();
|
||||
static void oops_do_marking_prologue();
|
||||
static void oops_do_marking_epilogue();
|
||||
static bool oops_do_marking_is_active() { return _oops_do_mark_nmethods != NULL; }
|
||||
DEBUG_ONLY(bool test_oops_do_mark() { return _oops_do_mark_link != NULL; })
|
||||
|
||||
// ScopeDesc for an instruction
|
||||
ScopeDesc* scope_desc_at(address pc);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue