8263709: Cleanup THREAD/TRAPS/CHECK usage in JRT_ENTRY routines

Reviewed-by: coleenp, hseigel, iklam, kvn
This commit is contained in:
David Holmes 2021-04-15 02:20:31 +00:00
parent f4c3efd9cf
commit 79bff21b43
23 changed files with 906 additions and 919 deletions

View file

@ -159,24 +159,24 @@ void Deoptimization::UnrollBlock::print() {
// analysis, the method was changed from JRT_LEAF to JRT_BLOCK_ENTRY.
// The actual reallocation of previously eliminated objects occurs in realloc_objects,
// which is called from the method fetch_unroll_info_helper below.
JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* thread, int exec_mode))
JRT_BLOCK_ENTRY(Deoptimization::UnrollBlock*, Deoptimization::fetch_unroll_info(JavaThread* current, int exec_mode))
// fetch_unroll_info() is called at the beginning of the deoptimization
// handler. Note this fact before we start generating temporary frames
// that can confuse an asynchronous stack walker. This counter is
// decremented at the end of unpack_frames().
if (TraceDeoptimization) {
tty->print_cr("Deoptimizing thread " INTPTR_FORMAT, p2i(thread));
tty->print_cr("Deoptimizing thread " INTPTR_FORMAT, p2i(current));
}
thread->inc_in_deopt_handler();
current->inc_in_deopt_handler();
if (exec_mode == Unpack_exception) {
// When we get here, a callee has thrown an exception into a deoptimized
// frame. That throw might have deferred stack watermark checking until
// after unwinding. So we deal with such deferred requests here.
StackWatermarkSet::after_unwind(thread);
StackWatermarkSet::after_unwind(current);
}
return fetch_unroll_info_helper(thread, exec_mode);
return fetch_unroll_info_helper(current, exec_mode);
JRT_END
#if COMPILER2_OR_JVMCI
@ -223,6 +223,7 @@ static bool rematerialize_objects(JavaThread* thread, int exec_mode, CompiledMet
realloc_failures = Deoptimization::realloc_objects(thread, &deoptee, &map, objects, CHECK_AND_CLEAR_(true));
deoptimized_objects = true;
} else {
JavaThread* current = thread; // For JRT_BLOCK
JRT_BLOCK
realloc_failures = Deoptimization::realloc_objects(thread, &deoptee, &map, objects, THREAD);
JRT_END
@ -325,11 +326,11 @@ bool Deoptimization::deoptimize_objects_internal(JavaThread* thread, GrowableArr
#endif // COMPILER2_OR_JVMCI
// This is factored, since it is both called from a JRT_LEAF (deoptimization) and a JRT_ENTRY (uncommon_trap)
Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* thread, int exec_mode) {
Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread* current, int exec_mode) {
// When we get here we are about to unwind the deoptee frame. In order to
// catch not yet safe to use frames, the following stack watermark barrier
// poll will make such frames safe to use.
StackWatermarkSet::before_unwind(thread);
StackWatermarkSet::before_unwind(current);
// Note: there is a safepoint safety issue here. No matter whether we enter
// via vanilla deopt or uncommon trap we MUST NOT stop at a safepoint once
@ -337,29 +338,29 @@ Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread
//
// Allocate our special deoptimization ResourceMark
DeoptResourceMark* dmark = new DeoptResourceMark(thread);
assert(thread->deopt_mark() == NULL, "Pending deopt!");
thread->set_deopt_mark(dmark);
DeoptResourceMark* dmark = new DeoptResourceMark(current);
assert(current->deopt_mark() == NULL, "Pending deopt!");
current->set_deopt_mark(dmark);
frame stub_frame = thread->last_frame(); // Makes stack walkable as side effect
RegisterMap map(thread, true);
RegisterMap dummy_map(thread, false);
frame stub_frame = current->last_frame(); // Makes stack walkable as side effect
RegisterMap map(current, true);
RegisterMap dummy_map(current, false);
// Now get the deoptee with a valid map
frame deoptee = stub_frame.sender(&map);
// Set the deoptee nmethod
assert(thread->deopt_compiled_method() == NULL, "Pending deopt!");
assert(current->deopt_compiled_method() == NULL, "Pending deopt!");
CompiledMethod* cm = deoptee.cb()->as_compiled_method_or_null();
thread->set_deopt_compiled_method(cm);
current->set_deopt_compiled_method(cm);
if (VerifyStack) {
thread->validate_frame_layout();
current->validate_frame_layout();
}
// Create a growable array of VFrames where each VFrame represents an inlined
// Java frame. This storage is allocated with the usual system arena.
assert(deoptee.is_compiled_frame(), "Wrong frame type");
GrowableArray<compiledVFrame*>* chunk = new GrowableArray<compiledVFrame*>(10);
vframe* vf = vframe::new_vframe(&deoptee, &map, thread);
vframe* vf = vframe::new_vframe(&deoptee, &map, current);
while (!vf->is_top()) {
assert(vf->is_compiled_frame(), "Wrong frame type");
chunk->push(compiledVFrame::cast(vf));
@ -378,13 +379,13 @@ Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread
if (jvmci_enabled COMPILER2_PRESENT( || (DoEscapeAnalysis && EliminateAllocations)
|| EliminateAutoBox || EnableVectorAggressiveReboxing )) {
bool unused;
realloc_failures = rematerialize_objects(thread, exec_mode, cm, deoptee, map, chunk, unused);
realloc_failures = rematerialize_objects(current, exec_mode, cm, deoptee, map, chunk, unused);
}
#endif // COMPILER2_OR_JVMCI
// Revoke biases, done with in java state.
// No safepoints allowed after this
revoke_from_deopt_handler(thread, deoptee, &map);
revoke_from_deopt_handler(current, deoptee, &map);
// Ensure that no safepoint is taken after pointers have been stored
// in fields of rematerialized objects. If a safepoint occurs from here on
@ -394,9 +395,9 @@ Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread
#if COMPILER2_OR_JVMCI
if ((jvmci_enabled COMPILER2_PRESENT( || ((DoEscapeAnalysis || EliminateNestedLocks) && EliminateLocks) ))
&& !EscapeBarrier::objs_are_deoptimized(thread, deoptee.id())) {
&& !EscapeBarrier::objs_are_deoptimized(current, deoptee.id())) {
bool unused;
restore_eliminated_locks(thread, chunk, realloc_failures, deoptee, exec_mode, unused);
restore_eliminated_locks(current, chunk, realloc_failures, deoptee, exec_mode, unused);
}
#endif // COMPILER2_OR_JVMCI
@ -413,21 +414,21 @@ Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread
guarantee(exceptionObject() != NULL, "exception oop can not be null");
}
vframeArray* array = create_vframeArray(thread, deoptee, &map, chunk, realloc_failures);
vframeArray* array = create_vframeArray(current, deoptee, &map, chunk, realloc_failures);
#if COMPILER2_OR_JVMCI
if (realloc_failures) {
pop_frames_failed_reallocs(thread, array);
pop_frames_failed_reallocs(current, array);
}
#endif
assert(thread->vframe_array_head() == NULL, "Pending deopt!");
thread->set_vframe_array_head(array);
assert(current->vframe_array_head() == NULL, "Pending deopt!");
current->set_vframe_array_head(array);
// Now that the vframeArray has been created if we have any deferred local writes
// added by jvmti then we can free up that structure as the data is now in the
// vframeArray
JvmtiDeferredUpdates::delete_updates_for_frame(thread, array->original().id());
JvmtiDeferredUpdates::delete_updates_for_frame(current, array->original().id());
// Compute the caller frame based on the sender sp of stub_frame and stored frame sizes info.
CodeBlob* cb = stub_frame.cb();
@ -473,8 +474,8 @@ Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread
// PopFrame requires that the preserved incoming arguments from the recently-popped topmost
// activation be put back on the expression stack of the caller for reexecution
if (JvmtiExport::can_pop_frame() && thread->popframe_forcing_deopt_reexecution()) {
popframe_extra_args = in_words(thread->popframe_preserved_args_size_in_words());
if (JvmtiExport::can_pop_frame() && current->popframe_forcing_deopt_reexecution()) {
popframe_extra_args = in_words(current->popframe_preserved_args_size_in_words());
}
// Find the current pc for sender of the deoptee. Since the sender may have been deoptimized
@ -491,7 +492,7 @@ Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread
// frame.
bool caller_was_method_handle = false;
if (deopt_sender.is_interpreted_frame()) {
methodHandle method(thread, deopt_sender.interpreter_frame_method());
methodHandle method(current, deopt_sender.interpreter_frame_method());
Bytecode_invoke cur = Bytecode_invoke_check(method, deopt_sender.interpreter_frame_bci());
if (cur.is_invokedynamic() || cur.is_invokehandle()) {
// Method handle invokes may involve fairly arbitrary chains of
@ -539,7 +540,7 @@ Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread
// Compute whether the root vframe returns a float or double value.
BasicType return_type;
{
methodHandle method(thread, array->element(0)->method());
methodHandle method(current, array->element(0)->method());
Bytecode_invoke invoke = Bytecode_invoke_check(method, array->element(0)->bci());
return_type = invoke.is_valid() ? invoke.result_type() : T_ILLEGAL;
}
@ -578,21 +579,21 @@ Deoptimization::UnrollBlock* Deoptimization::fetch_unroll_info_helper(JavaThread
#if INCLUDE_JVMCI
if (exceptionObject() != NULL) {
thread->set_exception_oop(exceptionObject());
current->set_exception_oop(exceptionObject());
exec_mode = Unpack_exception;
}
#endif
if (thread->frames_to_pop_failed_realloc() > 0 && exec_mode != Unpack_uncommon_trap) {
assert(thread->has_pending_exception(), "should have thrown OOME");
thread->set_exception_oop(thread->pending_exception());
thread->clear_pending_exception();
if (current->frames_to_pop_failed_realloc() > 0 && exec_mode != Unpack_uncommon_trap) {
assert(current->has_pending_exception(), "should have thrown OOME");
current->set_exception_oop(current->pending_exception());
current->clear_pending_exception();
exec_mode = Unpack_exception;
}
#if INCLUDE_JVMCI
if (thread->frames_to_pop_failed_realloc() > 0) {
thread->set_pending_monitorenter(false);
if (current->frames_to_pop_failed_realloc() > 0) {
current->set_pending_monitorenter(false);
}
#endif
@ -1919,30 +1920,30 @@ static void post_deoptimization_event(CompiledMethod* nm,
#endif // INCLUDE_JFR
JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* thread, jint trap_request)) {
HandleMark hm(thread);
JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* current, jint trap_request)) {
HandleMark hm(current);
// uncommon_trap() is called at the beginning of the uncommon trap
// handler. Note this fact before we start generating temporary frames
// that can confuse an asynchronous stack walker. This counter is
// decremented at the end of unpack_frames().
thread->inc_in_deopt_handler();
current->inc_in_deopt_handler();
// We need to update the map if we have biased locking.
#if INCLUDE_JVMCI
// JVMCI might need to get an exception from the stack, which in turn requires the register map to be valid
RegisterMap reg_map(thread, true);
RegisterMap reg_map(current, true);
#else
RegisterMap reg_map(thread, UseBiasedLocking);
RegisterMap reg_map(current, UseBiasedLocking);
#endif
frame stub_frame = thread->last_frame();
frame stub_frame = current->last_frame();
frame fr = stub_frame.sender(&reg_map);
// Make sure the calling nmethod is not getting deoptimized and removed
// before we are done with it.
nmethodLocker nl(fr.pc());
// Log a message
Events::log_deopt_message(thread, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT,
Events::log_deopt_message(current, "Uncommon trap: trap_request=" PTR32_FORMAT " fr.pc=" INTPTR_FORMAT " relative=" INTPTR_FORMAT,
trap_request, p2i(fr.pc()), fr.pc() - fr.cb()->code_begin());
{
@ -1955,7 +1956,7 @@ JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* thread, jint tra
#endif
jint unloaded_class_index = trap_request_index(trap_request); // CP idx or -1
vframe* vf = vframe::new_vframe(&fr, &reg_map, thread);
vframe* vf = vframe::new_vframe(&fr, &reg_map, current);
compiledVFrame* cvf = compiledVFrame::cast(vf);
CompiledMethod* nm = cvf->code();
@ -1971,23 +1972,23 @@ JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* thread, jint tra
);
}
methodHandle trap_method(THREAD, trap_scope->method());
methodHandle trap_method(current, trap_scope->method());
int trap_bci = trap_scope->bci();
#if INCLUDE_JVMCI
jlong speculation = thread->pending_failed_speculation();
jlong speculation = current->pending_failed_speculation();
if (nm->is_compiled_by_jvmci() && nm->is_nmethod()) { // Exclude AOTed methods
nm->as_nmethod()->update_speculation(thread);
nm->as_nmethod()->update_speculation(current);
} else {
assert(speculation == 0, "There should not be a speculation for methods compiled by non-JVMCI compilers");
}
if (trap_bci == SynchronizationEntryBCI) {
trap_bci = 0;
thread->set_pending_monitorenter(true);
current->set_pending_monitorenter(true);
}
if (reason == Deoptimization::Reason_transfer_to_interpreter) {
thread->set_pending_transfer_to_interpreter(true);
current->set_pending_transfer_to_interpreter(true);
}
#endif
@ -2002,7 +2003,7 @@ JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* thread, jint tra
methodHandle profiled_method;
#if INCLUDE_JVMCI
if (nm->is_compiled_by_jvmci()) {
profiled_method = methodHandle(THREAD, nm->method());
profiled_method = methodHandle(current, nm->method());
} else {
profiled_method = trap_method;
}
@ -2011,12 +2012,12 @@ JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* thread, jint tra
#endif
MethodData* trap_mdo =
get_method_data(thread, profiled_method, create_if_missing);
get_method_data(current, profiled_method, create_if_missing);
JFR_ONLY(post_deoptimization_event(nm, trap_method(), trap_bci, trap_bc, reason, action);)
// Log a message
Events::log_deopt_message(thread, "Uncommon trap: reason=%s action=%s pc=" INTPTR_FORMAT " method=%s @ %d %s",
Events::log_deopt_message(current, "Uncommon trap: reason=%s action=%s pc=" INTPTR_FORMAT " method=%s @ %d %s",
trap_reason_name(reason), trap_action_name(action), p2i(fr.pc()),
trap_method->name_and_sig_as_C_string(), trap_bci, nm->compiler_name());
@ -2039,7 +2040,7 @@ JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* thread, jint tra
Symbol* class_name = NULL;
bool unresolved = false;
if (unloaded_class_index >= 0) {
constantPoolHandle constants (THREAD, trap_method->constants());
constantPoolHandle constants (current, trap_method->constants());
if (constants->tag_at(unloaded_class_index).is_unresolved_klass()) {
class_name = constants->klass_name_at(unloaded_class_index);
unresolved = true;
@ -2116,7 +2117,7 @@ JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* thread, jint tra
// Load class if necessary
if (unloaded_class_index >= 0) {
constantPoolHandle constants(THREAD, trap_method->constants());
constantPoolHandle constants(current, trap_method->constants());
load_class_by_index(constants, unloaded_class_index, THREAD);
}
@ -2228,7 +2229,7 @@ JRT_ENTRY(void, Deoptimization::uncommon_trap_inner(JavaThread* thread, jint tra
bool inc_recompile_count = false;
ProfileData* pdata = NULL;
if (ProfileTraps && CompilerConfig::is_c2_or_jvmci_compiler_enabled() && update_trap_state && trap_mdo != NULL) {
assert(trap_mdo == get_method_data(thread, profiled_method, false), "sanity");
assert(trap_mdo == get_method_data(current, profiled_method, false), "sanity");
uint this_trap_count = 0;
bool maybe_prior_trap = false;
bool maybe_prior_recompile = false;
@ -2469,7 +2470,7 @@ Deoptimization::update_method_data_from_interpreter(MethodData* trap_mdo, int tr
ignore_maybe_prior_recompile);
}
Deoptimization::UnrollBlock* Deoptimization::uncommon_trap(JavaThread* thread, jint trap_request, jint exec_mode) {
Deoptimization::UnrollBlock* Deoptimization::uncommon_trap(JavaThread* current, jint trap_request, jint exec_mode) {
// Enable WXWrite: current function is called from methods compiled by C2 directly
MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXWrite, thread));
@ -2479,10 +2480,10 @@ Deoptimization::UnrollBlock* Deoptimization::uncommon_trap(JavaThread* thread, j
// Still in Java no safepoints
{
// This enters VM and may safepoint
uncommon_trap_inner(thread, trap_request);
uncommon_trap_inner(current, trap_request);
}
HandleMark hm(thread);
return fetch_unroll_info_helper(thread, exec_mode);
HandleMark hm(current);
return fetch_unroll_info_helper(current, exec_mode);
}
// Local derived constants.