mirror of
https://github.com/openjdk/jdk.git
synced 2025-08-25 13:54:38 +02:00
6939930: exception unwind changes in 6919934 hurts compilation speed
Reviewed-by: twisti
This commit is contained in:
parent
a1446b775d
commit
98ee92e724
14 changed files with 208 additions and 101 deletions
|
@ -388,6 +388,60 @@ int LIR_Assembler::emit_exception_handler() {
|
|||
}
|
||||
|
||||
|
||||
// Emit the code to remove the frame from the stack in the exception
|
||||
// unwind path.
|
||||
int LIR_Assembler::emit_unwind_handler() {
|
||||
#ifndef PRODUCT
|
||||
if (CommentedAssembly) {
|
||||
_masm->block_comment("Unwind handler");
|
||||
}
|
||||
#endif
|
||||
|
||||
int offset = code_offset();
|
||||
|
||||
// Fetch the exception from TLS and clear out exception related thread state
|
||||
__ ld_ptr(G2_thread, in_bytes(JavaThread::exception_oop_offset()), O0);
|
||||
__ st_ptr(G0, G2_thread, in_bytes(JavaThread::exception_oop_offset()));
|
||||
__ st_ptr(G0, G2_thread, in_bytes(JavaThread::exception_pc_offset()));
|
||||
|
||||
__ bind(_unwind_handler_entry);
|
||||
__ verify_not_null_oop(O0);
|
||||
if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
|
||||
__ mov(O0, I0); // Preserve the exception
|
||||
}
|
||||
|
||||
// Preform needed unlocking
|
||||
MonitorExitStub* stub = NULL;
|
||||
if (method()->is_synchronized()) {
|
||||
monitor_address(0, FrameMap::I1_opr);
|
||||
stub = new MonitorExitStub(FrameMap::I1_opr, true, 0);
|
||||
__ unlock_object(I3, I2, I1, *stub->entry());
|
||||
__ bind(*stub->continuation());
|
||||
}
|
||||
|
||||
if (compilation()->env()->dtrace_method_probes()) {
|
||||
jobject2reg(method()->constant_encoding(), O0);
|
||||
__ call(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), relocInfo::runtime_call_type);
|
||||
__ delayed()->nop();
|
||||
}
|
||||
|
||||
if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
|
||||
__ mov(I0, O0); // Restore the exception
|
||||
}
|
||||
|
||||
// dispatch to the unwind logic
|
||||
__ call(Runtime1::entry_for(Runtime1::unwind_exception_id), relocInfo::runtime_call_type);
|
||||
__ delayed()->nop();
|
||||
|
||||
// Emit the slow path assembly
|
||||
if (stub != NULL) {
|
||||
stub->emit_code(this);
|
||||
}
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
|
||||
int LIR_Assembler::emit_deopt_handler() {
|
||||
// if the last instruction is a call (typically to do a throw which
|
||||
// is coming at the end after block reordering) the return address
|
||||
|
@ -2050,16 +2104,12 @@ int LIR_Assembler::shift_amount(BasicType t) {
|
|||
}
|
||||
|
||||
|
||||
void LIR_Assembler::throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info, bool unwind) {
|
||||
void LIR_Assembler::throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info) {
|
||||
assert(exceptionOop->as_register() == Oexception, "should match");
|
||||
assert(unwind || exceptionPC->as_register() == Oissuing_pc, "should match");
|
||||
assert(exceptionPC->as_register() == Oissuing_pc, "should match");
|
||||
|
||||
info->add_register_oop(exceptionOop);
|
||||
|
||||
if (unwind) {
|
||||
__ call(Runtime1::entry_for(Runtime1::unwind_exception_id), relocInfo::runtime_call_type);
|
||||
__ delayed()->nop();
|
||||
} else {
|
||||
// reuse the debug info from the safepoint poll for the throw op itself
|
||||
address pc_for_athrow = __ pc();
|
||||
int pc_for_athrow_offset = __ offset();
|
||||
|
@ -2070,6 +2120,13 @@ void LIR_Assembler::throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmit
|
|||
__ call(Runtime1::entry_for(Runtime1::handle_exception_id), relocInfo::runtime_call_type);
|
||||
__ delayed()->nop();
|
||||
}
|
||||
|
||||
|
||||
void LIR_Assembler::unwind_op(LIR_Opr exceptionOop) {
|
||||
assert(exceptionOop->as_register() == Oexception, "should match");
|
||||
|
||||
__ br(Assembler::always, false, Assembler::pt, _unwind_handler_entry);
|
||||
__ delayed()->nop();
|
||||
}
|
||||
|
||||
|
||||
|
@ -2358,7 +2415,7 @@ void LIR_Assembler::emit_alloc_array(LIR_OpAllocArray* op) {
|
|||
if (UseSlowPath ||
|
||||
(!UseFastNewObjectArray && (op->type() == T_OBJECT || op->type() == T_ARRAY)) ||
|
||||
(!UseFastNewTypeArray && (op->type() != T_OBJECT && op->type() != T_ARRAY))) {
|
||||
__ br(Assembler::always, false, Assembler::pn, *op->stub()->entry());
|
||||
__ br(Assembler::always, false, Assembler::pt, *op->stub()->entry());
|
||||
__ delayed()->nop();
|
||||
} else {
|
||||
__ allocate_array(op->obj()->as_register(),
|
||||
|
|
|
@ -455,6 +455,60 @@ int LIR_Assembler::emit_exception_handler() {
|
|||
}
|
||||
|
||||
|
||||
// Emit the code to remove the frame from the stack in the exception
|
||||
// unwind path.
|
||||
int LIR_Assembler::emit_unwind_handler() {
|
||||
#ifndef PRODUCT
|
||||
if (CommentedAssembly) {
|
||||
_masm->block_comment("Unwind handler");
|
||||
}
|
||||
#endif
|
||||
|
||||
int offset = code_offset();
|
||||
|
||||
// Fetch the exception from TLS and clear out exception related thread state
|
||||
__ get_thread(rsi);
|
||||
__ movptr(rax, Address(rsi, JavaThread::exception_oop_offset()));
|
||||
__ movptr(Address(rsi, JavaThread::exception_oop_offset()), (int32_t)NULL_WORD);
|
||||
__ movptr(Address(rsi, JavaThread::exception_pc_offset()), (int32_t)NULL_WORD);
|
||||
|
||||
__ bind(_unwind_handler_entry);
|
||||
__ verify_not_null_oop(rax);
|
||||
if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
|
||||
__ mov(rsi, rax); // Preserve the exception
|
||||
}
|
||||
|
||||
// Preform needed unlocking
|
||||
MonitorExitStub* stub = NULL;
|
||||
if (method()->is_synchronized()) {
|
||||
monitor_address(0, FrameMap::rax_opr);
|
||||
stub = new MonitorExitStub(FrameMap::rax_opr, true, 0);
|
||||
__ unlock_object(rdi, rbx, rax, *stub->entry());
|
||||
__ bind(*stub->continuation());
|
||||
}
|
||||
|
||||
if (compilation()->env()->dtrace_method_probes()) {
|
||||
__ movoop(Address(rsp, 0), method()->constant_encoding());
|
||||
__ call(RuntimeAddress(CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit)));
|
||||
}
|
||||
|
||||
if (method()->is_synchronized() || compilation()->env()->dtrace_method_probes()) {
|
||||
__ mov(rax, rsi); // Restore the exception
|
||||
}
|
||||
|
||||
// remove the activation and dispatch to the unwind handler
|
||||
__ remove_frame(initial_frame_size_in_bytes());
|
||||
__ jump(RuntimeAddress(Runtime1::entry_for(Runtime1::unwind_exception_id)));
|
||||
|
||||
// Emit the slow path assembly
|
||||
if (stub != NULL) {
|
||||
stub->emit_code(this);
|
||||
}
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
|
||||
int LIR_Assembler::emit_deopt_handler() {
|
||||
// if the last instruction is a call (typically to do a throw which
|
||||
// is coming at the end after block reordering) the return address
|
||||
|
@ -2795,16 +2849,15 @@ void LIR_Assembler::emit_static_call_stub() {
|
|||
}
|
||||
|
||||
|
||||
void LIR_Assembler::throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info, bool unwind) {
|
||||
void LIR_Assembler::throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info) {
|
||||
assert(exceptionOop->as_register() == rax, "must match");
|
||||
assert(unwind || exceptionPC->as_register() == rdx, "must match");
|
||||
assert(exceptionPC->as_register() == rdx, "must match");
|
||||
|
||||
// exception object is not added to oop map by LinearScan
|
||||
// (LinearScan assumes that no oops are in fixed registers)
|
||||
info->add_register_oop(exceptionOop);
|
||||
Runtime1::StubID unwind_id;
|
||||
|
||||
if (!unwind) {
|
||||
// get current pc information
|
||||
// pc is only needed if the method has an exception handler, the unwind code does not need it.
|
||||
int pc_for_athrow_offset = __ offset();
|
||||
|
@ -2820,17 +2873,19 @@ void LIR_Assembler::throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmit
|
|||
unwind_id = Runtime1::handle_exception_nofpu_id;
|
||||
}
|
||||
__ call(RuntimeAddress(Runtime1::entry_for(unwind_id)));
|
||||
} else {
|
||||
// remove the activation
|
||||
__ remove_frame(initial_frame_size_in_bytes());
|
||||
__ jump(RuntimeAddress(Runtime1::entry_for(Runtime1::unwind_exception_id)));
|
||||
}
|
||||
|
||||
// enough room for two byte trap
|
||||
__ nop();
|
||||
}
|
||||
|
||||
|
||||
void LIR_Assembler::unwind_op(LIR_Opr exceptionOop) {
|
||||
assert(exceptionOop->as_register() == rax, "must match");
|
||||
|
||||
__ jmp(_unwind_handler_entry);
|
||||
}
|
||||
|
||||
|
||||
void LIR_Assembler::shift_op(LIR_Code code, LIR_Opr left, LIR_Opr count, LIR_Opr dest, LIR_Opr tmp) {
|
||||
|
||||
// optimized version for linear scan:
|
||||
|
|
|
@ -40,6 +40,7 @@ public:
|
|||
Exceptions, // Offset where exception handler lives
|
||||
Deopt, // Offset where deopt handler lives
|
||||
DeoptMH, // Offset where MethodHandle deopt handler lives
|
||||
UnwindHandler, // Offset to default unwind handler
|
||||
max_Entries };
|
||||
|
||||
// special value to note codeBlobs where profile (forte) stack walking is
|
||||
|
@ -59,6 +60,7 @@ public:
|
|||
_values[Exceptions ] = -1;
|
||||
_values[Deopt ] = -1;
|
||||
_values[DeoptMH ] = -1;
|
||||
_values[UnwindHandler ] = -1;
|
||||
}
|
||||
|
||||
int value(Entries e) { return _values[e]; }
|
||||
|
|
|
@ -229,6 +229,10 @@ void Compilation::emit_code_epilog(LIR_Assembler* assembler) {
|
|||
code_offsets->set_value(CodeOffsets::DeoptMH, assembler->emit_deopt_handler());
|
||||
CHECK_BAILOUT();
|
||||
|
||||
// Emit the handler to remove the activation from the stack and
|
||||
// dispatch to the caller.
|
||||
offsets()->set_value(CodeOffsets::UnwindHandler, assembler->emit_unwind_handler());
|
||||
|
||||
// done
|
||||
masm()->flush();
|
||||
}
|
||||
|
|
|
@ -829,12 +829,8 @@ void GraphBuilder::ScopeData::setup_jsr_xhandlers() {
|
|||
// should be left alone since there can be only one and all code
|
||||
// should dispatch to the same one.
|
||||
XHandler* h = handlers->handler_at(i);
|
||||
if (h->handler_bci() != SynchronizationEntryBCI) {
|
||||
assert(h->handler_bci() != SynchronizationEntryBCI, "must be real");
|
||||
h->set_entry_block(block_at(h->handler_bci()));
|
||||
} else {
|
||||
assert(h->entry_block()->is_set(BlockBegin::default_exception_handler_flag),
|
||||
"should be the synthetic unlock block");
|
||||
}
|
||||
}
|
||||
_jsr_xhandlers = handlers;
|
||||
}
|
||||
|
@ -2867,19 +2863,6 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
|
|||
_initial_state = state_at_entry();
|
||||
start_block->merge(_initial_state);
|
||||
|
||||
// setup an exception handler to do the unlocking and/or
|
||||
// notification and unwind the frame.
|
||||
BlockBegin* sync_handler = new BlockBegin(-1);
|
||||
sync_handler->set(BlockBegin::exception_entry_flag);
|
||||
sync_handler->set(BlockBegin::is_on_work_list_flag);
|
||||
sync_handler->set(BlockBegin::default_exception_handler_flag);
|
||||
|
||||
ciExceptionHandler* desc = new ciExceptionHandler(method()->holder(), 0, method()->code_size(), -1, 0);
|
||||
XHandler* h = new XHandler(desc);
|
||||
h->set_entry_block(sync_handler);
|
||||
scope_data()->xhandlers()->append(h);
|
||||
scope_data()->set_has_handler();
|
||||
|
||||
// complete graph
|
||||
_vmap = new ValueMap();
|
||||
scope->compute_lock_stack_size();
|
||||
|
@ -2930,19 +2913,6 @@ GraphBuilder::GraphBuilder(Compilation* compilation, IRScope* scope)
|
|||
}
|
||||
CHECK_BAILOUT();
|
||||
|
||||
if (sync_handler && sync_handler->state() != NULL) {
|
||||
Value lock = NULL;
|
||||
if (method()->is_synchronized()) {
|
||||
lock = method()->is_static() ? new Constant(new InstanceConstant(method()->holder()->java_mirror())) :
|
||||
_initial_state->local_at(0);
|
||||
|
||||
sync_handler->state()->unlock();
|
||||
sync_handler->state()->lock(scope, lock);
|
||||
|
||||
}
|
||||
fill_sync_handler(lock, sync_handler, true);
|
||||
}
|
||||
|
||||
_start = setup_start_block(osr_bci, start_block, _osr_entry, _initial_state);
|
||||
|
||||
eliminate_redundant_phis(_start);
|
||||
|
|
|
@ -1628,11 +1628,10 @@ LEAF(BlockBegin, StateSplit)
|
|||
backward_branch_target_flag = 1 << 4,
|
||||
is_on_work_list_flag = 1 << 5,
|
||||
was_visited_flag = 1 << 6,
|
||||
default_exception_handler_flag = 1 << 8, // identify block which represents the default exception handler
|
||||
parser_loop_header_flag = 1 << 9, // set by parser to identify blocks where phi functions can not be created on demand
|
||||
critical_edge_split_flag = 1 << 10, // set for all blocks that are introduced when critical edges are split
|
||||
linear_scan_loop_header_flag = 1 << 11, // set during loop-detection for LinearScan
|
||||
linear_scan_loop_end_flag = 1 << 12 // set during loop-detection for LinearScan
|
||||
parser_loop_header_flag = 1 << 7, // set by parser to identify blocks where phi functions can not be created on demand
|
||||
critical_edge_split_flag = 1 << 8, // set for all blocks that are introduced when critical edges are split
|
||||
linear_scan_loop_header_flag = 1 << 9, // set during loop-detection for LinearScan
|
||||
linear_scan_loop_end_flag = 1 << 10 // set during loop-detection for LinearScan
|
||||
};
|
||||
|
||||
void set(Flag f) { _flags |= f; }
|
||||
|
|
|
@ -626,8 +626,7 @@ void LIR_OpVisitState::visit(LIR_Op* op) {
|
|||
break;
|
||||
}
|
||||
|
||||
case lir_throw:
|
||||
case lir_unwind: {
|
||||
case lir_throw: {
|
||||
assert(op->as_Op2() != NULL, "must be");
|
||||
LIR_Op2* op2 = (LIR_Op2*)op;
|
||||
|
||||
|
@ -639,6 +638,17 @@ void LIR_OpVisitState::visit(LIR_Op* op) {
|
|||
break;
|
||||
}
|
||||
|
||||
case lir_unwind: {
|
||||
assert(op->as_Op1() != NULL, "must be");
|
||||
LIR_Op1* op1 = (LIR_Op1*)op;
|
||||
|
||||
assert(op1->_info == NULL, "no info");
|
||||
assert(op1->_opr->is_valid(), "exception oop"); do_input(op1->_opr);
|
||||
assert(op1->_result->is_illegal(), "no result");
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
case lir_tan:
|
||||
case lir_sin:
|
||||
|
|
|
@ -801,6 +801,7 @@ enum LIR_Code {
|
|||
, lir_monaddr
|
||||
, lir_roundfp
|
||||
, lir_safepoint
|
||||
, lir_unwind
|
||||
, end_op1
|
||||
, begin_op2
|
||||
, lir_cmp
|
||||
|
@ -830,7 +831,6 @@ enum LIR_Code {
|
|||
, lir_ushr
|
||||
, lir_alloc_array
|
||||
, lir_throw
|
||||
, lir_unwind
|
||||
, lir_compare_to
|
||||
, end_op2
|
||||
, begin_op3
|
||||
|
@ -1827,8 +1827,12 @@ class LIR_List: public CompilationResourceObj {
|
|||
void logical_xor (LIR_Opr left, LIR_Opr right, LIR_Opr dst) { append(new LIR_Op2(lir_logic_xor, left, right, dst)); }
|
||||
|
||||
void null_check(LIR_Opr opr, CodeEmitInfo* info) { append(new LIR_Op1(lir_null_check, opr, info)); }
|
||||
void throw_exception(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info) { append(new LIR_Op2(lir_throw, exceptionPC, exceptionOop, LIR_OprFact::illegalOpr, info)); }
|
||||
void unwind_exception(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info) { append(new LIR_Op2(lir_unwind, exceptionPC, exceptionOop, LIR_OprFact::illegalOpr, info)); }
|
||||
void throw_exception(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info) {
|
||||
append(new LIR_Op2(lir_throw, exceptionPC, exceptionOop, LIR_OprFact::illegalOpr, info));
|
||||
}
|
||||
void unwind_exception(LIR_Opr exceptionOop) {
|
||||
append(new LIR_Op1(lir_unwind, exceptionOop));
|
||||
}
|
||||
|
||||
void compare_to (LIR_Opr left, LIR_Opr right, LIR_Opr dst) {
|
||||
append(new LIR_Op2(lir_compare_to, left, right, dst));
|
||||
|
|
|
@ -552,6 +552,10 @@ void LIR_Assembler::emit_op1(LIR_Op1* op) {
|
|||
monitor_address(op->in_opr()->as_constant_ptr()->as_jint(), op->result_opr());
|
||||
break;
|
||||
|
||||
case lir_unwind:
|
||||
unwind_op(op->in_opr());
|
||||
break;
|
||||
|
||||
default:
|
||||
Unimplemented();
|
||||
break;
|
||||
|
@ -707,8 +711,7 @@ void LIR_Assembler::emit_op2(LIR_Op2* op) {
|
|||
break;
|
||||
|
||||
case lir_throw:
|
||||
case lir_unwind:
|
||||
throw_op(op->in_opr1(), op->in_opr2(), op->info(), op->code() == lir_unwind);
|
||||
throw_op(op->in_opr1(), op->in_opr2(), op->info());
|
||||
break;
|
||||
|
||||
default:
|
||||
|
|
|
@ -39,6 +39,8 @@ class LIR_Assembler: public CompilationResourceObj {
|
|||
Instruction* _pending_non_safepoint;
|
||||
int _pending_non_safepoint_offset;
|
||||
|
||||
Label _unwind_handler_entry;
|
||||
|
||||
#ifdef ASSERT
|
||||
BlockList _branch_target_blocks;
|
||||
void check_no_unbound_labels();
|
||||
|
@ -134,6 +136,7 @@ class LIR_Assembler: public CompilationResourceObj {
|
|||
|
||||
// code patterns
|
||||
int emit_exception_handler();
|
||||
int emit_unwind_handler();
|
||||
void emit_exception_entries(ExceptionInfoList* info_list);
|
||||
int emit_deopt_handler();
|
||||
|
||||
|
@ -217,7 +220,8 @@ class LIR_Assembler: public CompilationResourceObj {
|
|||
|
||||
void build_frame();
|
||||
|
||||
void throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info, bool unwind);
|
||||
void throw_op(LIR_Opr exceptionPC, LIR_Opr exceptionOop, CodeEmitInfo* info);
|
||||
void unwind_op(LIR_Opr exceptionOop);
|
||||
void monitor_address(int monitor_ix, LIR_Opr dst);
|
||||
|
||||
void align_backward_branch_target();
|
||||
|
|
|
@ -1765,35 +1765,17 @@ void LIRGenerator::do_Throw(Throw* x) {
|
|||
__ null_check(exception_opr, new CodeEmitInfo(info, true));
|
||||
}
|
||||
|
||||
if (compilation()->env()->jvmti_can_post_on_exceptions() &&
|
||||
!block()->is_set(BlockBegin::default_exception_handler_flag)) {
|
||||
if (compilation()->env()->jvmti_can_post_on_exceptions()) {
|
||||
// we need to go through the exception lookup path to get JVMTI
|
||||
// notification done
|
||||
unwind = false;
|
||||
}
|
||||
|
||||
assert(!block()->is_set(BlockBegin::default_exception_handler_flag) || unwind,
|
||||
"should be no more handlers to dispatch to");
|
||||
|
||||
if (compilation()->env()->dtrace_method_probes() &&
|
||||
block()->is_set(BlockBegin::default_exception_handler_flag)) {
|
||||
// notify that this frame is unwinding
|
||||
BasicTypeList signature;
|
||||
signature.append(T_INT); // thread
|
||||
signature.append(T_OBJECT); // methodOop
|
||||
LIR_OprList* args = new LIR_OprList();
|
||||
args->append(getThreadPointer());
|
||||
LIR_Opr meth = new_register(T_OBJECT);
|
||||
__ oop2reg(method()->constant_encoding(), meth);
|
||||
args->append(meth);
|
||||
call_runtime(&signature, args, CAST_FROM_FN_PTR(address, SharedRuntime::dtrace_method_exit), voidType, NULL);
|
||||
}
|
||||
|
||||
// move exception oop into fixed register
|
||||
__ move(exception_opr, exceptionOopOpr());
|
||||
|
||||
if (unwind) {
|
||||
__ unwind_exception(LIR_OprFact::illegalOpr, exceptionOopOpr(), info);
|
||||
__ unwind_exception(exceptionOopOpr());
|
||||
} else {
|
||||
__ throw_exception(exceptionPcOpr(), exceptionOopOpr(), info);
|
||||
}
|
||||
|
|
|
@ -685,6 +685,7 @@ nmethod::nmethod(
|
|||
_exception_offset = 0;
|
||||
_deoptimize_offset = 0;
|
||||
_deoptimize_mh_offset = 0;
|
||||
_unwind_handler_offset = -1;
|
||||
_trap_offset = offsets->value(CodeOffsets::Dtrace_trap);
|
||||
_orig_pc_offset = 0;
|
||||
_stub_offset = data_offset();
|
||||
|
@ -798,6 +799,11 @@ nmethod::nmethod(
|
|||
_exception_offset = _stub_offset + offsets->value(CodeOffsets::Exceptions);
|
||||
_deoptimize_offset = _stub_offset + offsets->value(CodeOffsets::Deopt);
|
||||
_deoptimize_mh_offset = _stub_offset + offsets->value(CodeOffsets::DeoptMH);
|
||||
if (offsets->value(CodeOffsets::UnwindHandler) != -1) {
|
||||
_unwind_handler_offset = instructions_offset() + offsets->value(CodeOffsets::UnwindHandler);
|
||||
} else {
|
||||
_unwind_handler_offset = -1;
|
||||
}
|
||||
_consts_offset = instructions_offset() + code_buffer->total_offset_of(code_buffer->consts()->start());
|
||||
_scopes_data_offset = data_offset();
|
||||
_scopes_pcs_offset = _scopes_data_offset + round_to(debug_info->data_size (), oopSize);
|
||||
|
|
|
@ -154,6 +154,9 @@ class nmethod : public CodeBlob {
|
|||
// All deoptee's at a MethodHandle call site will resume execution
|
||||
// at this location described by this offset.
|
||||
int _deoptimize_mh_offset;
|
||||
// Offset of the unwind handler if it exists
|
||||
int _unwind_handler_offset;
|
||||
|
||||
#ifdef HAVE_DTRACE_H
|
||||
int _trap_offset;
|
||||
#endif // def HAVE_DTRACE_H
|
||||
|
@ -341,6 +344,7 @@ class nmethod : public CodeBlob {
|
|||
address exception_begin () const { return header_begin() + _exception_offset ; }
|
||||
address deopt_handler_begin () const { return header_begin() + _deoptimize_offset ; }
|
||||
address deopt_mh_handler_begin() const { return header_begin() + _deoptimize_mh_offset ; }
|
||||
address unwind_handler_begin () const { return _unwind_handler_offset != -1 ? (header_begin() + _unwind_handler_offset) : NULL; }
|
||||
address stub_begin () const { return header_begin() + _stub_offset ; }
|
||||
address stub_end () const { return header_begin() + _consts_offset ; }
|
||||
address consts_begin () const { return header_begin() + _consts_offset ; }
|
||||
|
|
|
@ -473,6 +473,13 @@ address SharedRuntime::compute_compiled_exc_handler(nmethod* nm, address ret_pc,
|
|||
t = table.entry_for(catch_pco, -1, 0);
|
||||
}
|
||||
|
||||
#ifdef COMPILER1
|
||||
if (t == NULL && nm->is_compiled_by_c1()) {
|
||||
assert(nm->unwind_handler_begin() != NULL, "");
|
||||
return nm->unwind_handler_begin();
|
||||
}
|
||||
#endif
|
||||
|
||||
if (t == NULL) {
|
||||
tty->print_cr("MISSING EXCEPTION HANDLER for pc " INTPTR_FORMAT " and handler bci %d", ret_pc, handler_bci);
|
||||
tty->print_cr(" Exception:");
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue