mirror of
https://github.com/openjdk/jdk.git
synced 2025-08-27 06:45:07 +02:00
7009641: Don't fail VM when CodeCache is full
Allocation in the code cache returns NULL instead of failing the entire VM Reviewed-by: kvn, iveresov
This commit is contained in:
parent
c368a33bf7
commit
a2889becd9
7 changed files with 54 additions and 9 deletions
|
@ -52,6 +52,11 @@ extern "C" void bad_compiled_vtable_index(JavaThread* thread, oopDesc* receiver,
|
||||||
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
|
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
|
||||||
const int sparc_code_length = VtableStub::pd_code_size_limit(true);
|
const int sparc_code_length = VtableStub::pd_code_size_limit(true);
|
||||||
VtableStub* s = new(sparc_code_length) VtableStub(true, vtable_index);
|
VtableStub* s = new(sparc_code_length) VtableStub(true, vtable_index);
|
||||||
|
// Can be NULL if there is no free space in the code cache.
|
||||||
|
if (s == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
ResourceMark rm;
|
ResourceMark rm;
|
||||||
CodeBuffer cb(s->entry_point(), sparc_code_length);
|
CodeBuffer cb(s->entry_point(), sparc_code_length);
|
||||||
MacroAssembler* masm = new MacroAssembler(&cb);
|
MacroAssembler* masm = new MacroAssembler(&cb);
|
||||||
|
@ -125,6 +130,11 @@ VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
|
||||||
VtableStub* VtableStubs::create_itable_stub(int itable_index) {
|
VtableStub* VtableStubs::create_itable_stub(int itable_index) {
|
||||||
const int sparc_code_length = VtableStub::pd_code_size_limit(false);
|
const int sparc_code_length = VtableStub::pd_code_size_limit(false);
|
||||||
VtableStub* s = new(sparc_code_length) VtableStub(false, itable_index);
|
VtableStub* s = new(sparc_code_length) VtableStub(false, itable_index);
|
||||||
|
// Can be NULL if there is no free space in the code cache.
|
||||||
|
if (s == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
ResourceMark rm;
|
ResourceMark rm;
|
||||||
CodeBuffer cb(s->entry_point(), sparc_code_length);
|
CodeBuffer cb(s->entry_point(), sparc_code_length);
|
||||||
MacroAssembler* masm = new MacroAssembler(&cb);
|
MacroAssembler* masm = new MacroAssembler(&cb);
|
||||||
|
|
|
@ -58,6 +58,11 @@ extern "C" void bad_compiled_vtable_index(JavaThread* thread, oop receiver, int
|
||||||
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
|
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
|
||||||
const int i486_code_length = VtableStub::pd_code_size_limit(true);
|
const int i486_code_length = VtableStub::pd_code_size_limit(true);
|
||||||
VtableStub* s = new(i486_code_length) VtableStub(true, vtable_index);
|
VtableStub* s = new(i486_code_length) VtableStub(true, vtable_index);
|
||||||
|
// Can be NULL if there is no free space in the code cache.
|
||||||
|
if (s == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
ResourceMark rm;
|
ResourceMark rm;
|
||||||
CodeBuffer cb(s->entry_point(), i486_code_length);
|
CodeBuffer cb(s->entry_point(), i486_code_length);
|
||||||
MacroAssembler* masm = new MacroAssembler(&cb);
|
MacroAssembler* masm = new MacroAssembler(&cb);
|
||||||
|
@ -132,6 +137,11 @@ VtableStub* VtableStubs::create_itable_stub(int itable_index) {
|
||||||
// add code here, bump the code stub size returned by pd_code_size_limit!
|
// add code here, bump the code stub size returned by pd_code_size_limit!
|
||||||
const int i486_code_length = VtableStub::pd_code_size_limit(false);
|
const int i486_code_length = VtableStub::pd_code_size_limit(false);
|
||||||
VtableStub* s = new(i486_code_length) VtableStub(false, itable_index);
|
VtableStub* s = new(i486_code_length) VtableStub(false, itable_index);
|
||||||
|
// Can be NULL if there is no free space in the code cache.
|
||||||
|
if (s == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
ResourceMark rm;
|
ResourceMark rm;
|
||||||
CodeBuffer cb(s->entry_point(), i486_code_length);
|
CodeBuffer cb(s->entry_point(), i486_code_length);
|
||||||
MacroAssembler* masm = new MacroAssembler(&cb);
|
MacroAssembler* masm = new MacroAssembler(&cb);
|
||||||
|
|
|
@ -49,6 +49,11 @@ extern "C" void bad_compiled_vtable_index(JavaThread* thread,
|
||||||
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
|
VtableStub* VtableStubs::create_vtable_stub(int vtable_index) {
|
||||||
const int amd64_code_length = VtableStub::pd_code_size_limit(true);
|
const int amd64_code_length = VtableStub::pd_code_size_limit(true);
|
||||||
VtableStub* s = new(amd64_code_length) VtableStub(true, vtable_index);
|
VtableStub* s = new(amd64_code_length) VtableStub(true, vtable_index);
|
||||||
|
// Can be NULL if there is no free space in the code cache.
|
||||||
|
if (s == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
ResourceMark rm;
|
ResourceMark rm;
|
||||||
CodeBuffer cb(s->entry_point(), amd64_code_length);
|
CodeBuffer cb(s->entry_point(), amd64_code_length);
|
||||||
MacroAssembler* masm = new MacroAssembler(&cb);
|
MacroAssembler* masm = new MacroAssembler(&cb);
|
||||||
|
@ -126,6 +131,11 @@ VtableStub* VtableStubs::create_itable_stub(int itable_index) {
|
||||||
// returned by pd_code_size_limit!
|
// returned by pd_code_size_limit!
|
||||||
const int amd64_code_length = VtableStub::pd_code_size_limit(false);
|
const int amd64_code_length = VtableStub::pd_code_size_limit(false);
|
||||||
VtableStub* s = new(amd64_code_length) VtableStub(false, itable_index);
|
VtableStub* s = new(amd64_code_length) VtableStub(false, itable_index);
|
||||||
|
// Can be NULL if there is no free space in the code cache.
|
||||||
|
if (s == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
ResourceMark rm;
|
ResourceMark rm;
|
||||||
CodeBuffer cb(s->entry_point(), amd64_code_length);
|
CodeBuffer cb(s->entry_point(), amd64_code_length);
|
||||||
MacroAssembler* masm = new MacroAssembler(&cb);
|
MacroAssembler* masm = new MacroAssembler(&cb);
|
||||||
|
|
|
@ -160,7 +160,7 @@ address CompiledIC::stub_address() const {
|
||||||
// High-level access to an inline cache. Guaranteed to be MT-safe.
|
// High-level access to an inline cache. Guaranteed to be MT-safe.
|
||||||
|
|
||||||
|
|
||||||
void CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
|
bool CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS) {
|
||||||
assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
|
assert(CompiledIC_lock->is_locked() || SafepointSynchronize::is_at_safepoint(), "");
|
||||||
assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
|
assert(!is_optimized(), "cannot set an optimized virtual call to megamorphic");
|
||||||
assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
|
assert(is_call_to_compiled() || is_call_to_interpreted(), "going directly to megamorphic?");
|
||||||
|
@ -170,8 +170,10 @@ void CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecod
|
||||||
assert(bytecode == Bytecodes::_invokeinterface, "");
|
assert(bytecode == Bytecodes::_invokeinterface, "");
|
||||||
int itable_index = call_info->itable_index();
|
int itable_index = call_info->itable_index();
|
||||||
entry = VtableStubs::find_itable_stub(itable_index);
|
entry = VtableStubs::find_itable_stub(itable_index);
|
||||||
|
if (entry == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
#ifdef ASSERT
|
#ifdef ASSERT
|
||||||
assert(entry != NULL, "entry not computed");
|
|
||||||
int index = call_info->resolved_method()->itable_index();
|
int index = call_info->resolved_method()->itable_index();
|
||||||
assert(index == itable_index, "CallInfo pre-computes this");
|
assert(index == itable_index, "CallInfo pre-computes this");
|
||||||
#endif //ASSERT
|
#endif //ASSERT
|
||||||
|
@ -184,6 +186,9 @@ void CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecod
|
||||||
int vtable_index = call_info->vtable_index();
|
int vtable_index = call_info->vtable_index();
|
||||||
assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
|
assert(call_info->resolved_klass()->verify_vtable_index(vtable_index), "sanity check");
|
||||||
entry = VtableStubs::find_vtable_stub(vtable_index);
|
entry = VtableStubs::find_vtable_stub(vtable_index);
|
||||||
|
if (entry == NULL) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
InlineCacheBuffer::create_transition_stub(this, NULL, entry);
|
InlineCacheBuffer::create_transition_stub(this, NULL, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -200,6 +205,7 @@ void CompiledIC::set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecod
|
||||||
// race because the IC entry was complete when we safepointed so
|
// race because the IC entry was complete when we safepointed so
|
||||||
// cleaning it immediately is harmless.
|
// cleaning it immediately is harmless.
|
||||||
// assert(is_megamorphic(), "sanity check");
|
// assert(is_megamorphic(), "sanity check");
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -226,7 +226,10 @@ class CompiledIC: public ResourceObj {
|
||||||
//
|
//
|
||||||
void set_to_clean(); // Can only be called during a safepoint operation
|
void set_to_clean(); // Can only be called during a safepoint operation
|
||||||
void set_to_monomorphic(CompiledICInfo& info);
|
void set_to_monomorphic(CompiledICInfo& info);
|
||||||
void set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS);
|
|
||||||
|
// Returns true if successful and false otherwise. The call can fail if memory
|
||||||
|
// allocation in the code cache fails.
|
||||||
|
bool set_to_megamorphic(CallInfo* call_info, Bytecodes::Code bytecode, TRAPS);
|
||||||
|
|
||||||
static void compute_monomorphic_entry(methodHandle method, KlassHandle receiver_klass,
|
static void compute_monomorphic_entry(methodHandle method, KlassHandle receiver_klass,
|
||||||
bool is_optimized, bool static_bound, CompiledICInfo& info, TRAPS);
|
bool is_optimized, bool static_bound, CompiledICInfo& info, TRAPS);
|
||||||
|
|
|
@ -46,12 +46,9 @@ address VtableStub::_chunk = NULL;
|
||||||
address VtableStub::_chunk_end = NULL;
|
address VtableStub::_chunk_end = NULL;
|
||||||
VMReg VtableStub::_receiver_location = VMRegImpl::Bad();
|
VMReg VtableStub::_receiver_location = VMRegImpl::Bad();
|
||||||
|
|
||||||
static int num_vtable_chunks = 0;
|
|
||||||
|
|
||||||
|
|
||||||
void* VtableStub::operator new(size_t size, int code_size) throw() {
|
void* VtableStub::operator new(size_t size, int code_size) throw() {
|
||||||
assert(size == sizeof(VtableStub), "mismatched size");
|
assert(size == sizeof(VtableStub), "mismatched size");
|
||||||
num_vtable_chunks++;
|
|
||||||
// compute real VtableStub size (rounded to nearest word)
|
// compute real VtableStub size (rounded to nearest word)
|
||||||
const int real_size = round_to(code_size + sizeof(VtableStub), wordSize);
|
const int real_size = round_to(code_size + sizeof(VtableStub), wordSize);
|
||||||
// malloc them in chunks to minimize header overhead
|
// malloc them in chunks to minimize header overhead
|
||||||
|
@ -60,7 +57,7 @@ void* VtableStub::operator new(size_t size, int code_size) throw() {
|
||||||
const int bytes = chunk_factor * real_size + pd_code_alignment();
|
const int bytes = chunk_factor * real_size + pd_code_alignment();
|
||||||
BufferBlob* blob = BufferBlob::create("vtable chunks", bytes);
|
BufferBlob* blob = BufferBlob::create("vtable chunks", bytes);
|
||||||
if (blob == NULL) {
|
if (blob == NULL) {
|
||||||
vm_exit_out_of_memory(bytes, OOM_MALLOC_ERROR, "CodeCache: no room for vtable chunks");
|
return NULL;
|
||||||
}
|
}
|
||||||
_chunk = blob->content_begin();
|
_chunk = blob->content_begin();
|
||||||
_chunk_end = _chunk + bytes;
|
_chunk_end = _chunk + bytes;
|
||||||
|
@ -121,6 +118,12 @@ address VtableStubs::find_stub(bool is_vtable_stub, int vtable_index) {
|
||||||
} else {
|
} else {
|
||||||
s = create_itable_stub(vtable_index);
|
s = create_itable_stub(vtable_index);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Creation of vtable or itable can fail if there is not enough free space in the code cache.
|
||||||
|
if (s == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
enter(is_vtable_stub, vtable_index, s);
|
enter(is_vtable_stub, vtable_index, s);
|
||||||
if (PrintAdapterHandlers) {
|
if (PrintAdapterHandlers) {
|
||||||
tty->print_cr("Decoding VtableStub %s[%d]@%d",
|
tty->print_cr("Decoding VtableStub %s[%d]@%d",
|
||||||
|
|
|
@ -1506,8 +1506,11 @@ methodHandle SharedRuntime::handle_ic_miss_helper(JavaThread *thread, TRAPS) {
|
||||||
info, CHECK_(methodHandle()));
|
info, CHECK_(methodHandle()));
|
||||||
inline_cache->set_to_monomorphic(info);
|
inline_cache->set_to_monomorphic(info);
|
||||||
} else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
|
} else if (!inline_cache->is_megamorphic() && !inline_cache->is_clean()) {
|
||||||
// Change to megamorphic
|
// Potential change to megamorphic
|
||||||
inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle()));
|
bool successful = inline_cache->set_to_megamorphic(&call_info, bc, CHECK_(methodHandle()));
|
||||||
|
if (!successful) {
|
||||||
|
inline_cache->set_to_clean();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// Either clean or megamorphic
|
// Either clean or megamorphic
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue