mirror of
https://github.com/openjdk/jdk.git
synced 2025-08-26 22:34:27 +02:00
8017317: PPC64 (part 7): cppInterpreter: implement support for biased locking
Reviewed-by: kvn, dholmes
This commit is contained in:
parent
ac0c6f1e84
commit
1e0a321895
1 changed files with 295 additions and 147 deletions
|
@ -36,6 +36,7 @@
|
||||||
#include "oops/objArrayKlass.hpp"
|
#include "oops/objArrayKlass.hpp"
|
||||||
#include "oops/oop.inline.hpp"
|
#include "oops/oop.inline.hpp"
|
||||||
#include "prims/jvmtiExport.hpp"
|
#include "prims/jvmtiExport.hpp"
|
||||||
|
#include "runtime/biasedLocking.hpp"
|
||||||
#include "runtime/frame.inline.hpp"
|
#include "runtime/frame.inline.hpp"
|
||||||
#include "runtime/handles.inline.hpp"
|
#include "runtime/handles.inline.hpp"
|
||||||
#include "runtime/interfaceSupport.hpp"
|
#include "runtime/interfaceSupport.hpp"
|
||||||
|
@ -679,114 +680,97 @@ BytecodeInterpreter::run(interpreterState istate) {
|
||||||
|
|
||||||
// lock method if synchronized
|
// lock method if synchronized
|
||||||
if (METHOD->is_synchronized()) {
|
if (METHOD->is_synchronized()) {
|
||||||
// oop rcvr = locals[0].j.r;
|
// oop rcvr = locals[0].j.r;
|
||||||
oop rcvr;
|
oop rcvr;
|
||||||
if (METHOD->is_static()) {
|
if (METHOD->is_static()) {
|
||||||
rcvr = METHOD->constants()->pool_holder()->java_mirror();
|
rcvr = METHOD->constants()->pool_holder()->java_mirror();
|
||||||
} else {
|
} else {
|
||||||
rcvr = LOCALS_OBJECT(0);
|
rcvr = LOCALS_OBJECT(0);
|
||||||
VERIFY_OOP(rcvr);
|
VERIFY_OOP(rcvr);
|
||||||
}
|
}
|
||||||
// The initial monitor is ours for the taking
|
// The initial monitor is ours for the taking
|
||||||
BasicObjectLock* mon = &istate->monitor_base()[-1];
|
// Monitor not filled in frame manager any longer as this caused race condition with biased locking.
|
||||||
oop monobj = mon->obj();
|
BasicObjectLock* mon = &istate->monitor_base()[-1];
|
||||||
assert(mon->obj() == rcvr, "method monitor mis-initialized");
|
mon->set_obj(rcvr);
|
||||||
|
bool success = false;
|
||||||
|
uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;
|
||||||
|
markOop mark = rcvr->mark();
|
||||||
|
intptr_t hash = (intptr_t) markOopDesc::no_hash;
|
||||||
|
// Implies UseBiasedLocking.
|
||||||
|
if (mark->has_bias_pattern()) {
|
||||||
|
uintptr_t thread_ident;
|
||||||
|
uintptr_t anticipated_bias_locking_value;
|
||||||
|
thread_ident = (uintptr_t)istate->thread();
|
||||||
|
anticipated_bias_locking_value =
|
||||||
|
(((uintptr_t)rcvr->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &
|
||||||
|
~((uintptr_t) markOopDesc::age_mask_in_place);
|
||||||
|
|
||||||
bool success = UseBiasedLocking;
|
if (anticipated_bias_locking_value == 0) {
|
||||||
if (UseBiasedLocking) {
|
// Already biased towards this thread, nothing to do.
|
||||||
markOop mark = rcvr->mark();
|
if (PrintBiasedLockingStatistics) {
|
||||||
if (mark->has_bias_pattern()) {
|
(* BiasedLocking::biased_lock_entry_count_addr())++;
|
||||||
// The bias pattern is present in the object's header. Need to check
|
}
|
||||||
// whether the bias owner and the epoch are both still current.
|
success = true;
|
||||||
intptr_t xx = ((intptr_t) THREAD) ^ (intptr_t) mark;
|
} else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {
|
||||||
xx = (intptr_t) rcvr->klass()->prototype_header() ^ xx;
|
// Try to revoke bias.
|
||||||
intptr_t yy = (xx & ~((int) markOopDesc::age_mask_in_place));
|
markOop header = rcvr->klass()->prototype_header();
|
||||||
if (yy != 0 ) {
|
if (hash != markOopDesc::no_hash) {
|
||||||
// At this point we know that the header has the bias pattern and
|
header = header->copy_set_hash(hash);
|
||||||
// that we are not the bias owner in the current epoch. We need to
|
}
|
||||||
// figure out more details about the state of the header in order to
|
if (Atomic::cmpxchg_ptr(header, rcvr->mark_addr(), mark) == mark) {
|
||||||
// know what operations can be legally performed on the object's
|
if (PrintBiasedLockingStatistics)
|
||||||
// header.
|
(*BiasedLocking::revoked_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
// If the low three bits in the xor result aren't clear, that means
|
} else if ((anticipated_bias_locking_value & epoch_mask_in_place) != 0) {
|
||||||
// the prototype header is no longer biased and we have to revoke
|
// Try to rebias.
|
||||||
// the bias on this object.
|
markOop new_header = (markOop) ( (intptr_t) rcvr->klass()->prototype_header() | thread_ident);
|
||||||
|
if (hash != markOopDesc::no_hash) {
|
||||||
if (yy & markOopDesc::biased_lock_mask_in_place == 0 ) {
|
new_header = new_header->copy_set_hash(hash);
|
||||||
// Biasing is still enabled for this data type. See whether the
|
}
|
||||||
// epoch of the current bias is still valid, meaning that the epoch
|
if (Atomic::cmpxchg_ptr((void*)new_header, rcvr->mark_addr(), mark) == mark) {
|
||||||
// bits of the mark word are equal to the epoch bits of the
|
if (PrintBiasedLockingStatistics) {
|
||||||
// prototype header. (Note that the prototype header's epoch bits
|
(* BiasedLocking::rebiased_lock_entry_count_addr())++;
|
||||||
// only change at a safepoint.) If not, attempt to rebias the object
|
|
||||||
// toward the current thread. Note that we must be absolutely sure
|
|
||||||
// that the current epoch is invalid in order to do this because
|
|
||||||
// otherwise the manipulations it performs on the mark word are
|
|
||||||
// illegal.
|
|
||||||
if (yy & markOopDesc::epoch_mask_in_place == 0) {
|
|
||||||
// The epoch of the current bias is still valid but we know nothing
|
|
||||||
// about the owner; it might be set or it might be clear. Try to
|
|
||||||
// acquire the bias of the object using an atomic operation. If this
|
|
||||||
// fails we will go in to the runtime to revoke the object's bias.
|
|
||||||
// Note that we first construct the presumed unbiased header so we
|
|
||||||
// don't accidentally blow away another thread's valid bias.
|
|
||||||
intptr_t unbiased = (intptr_t) mark & (markOopDesc::biased_lock_mask_in_place |
|
|
||||||
markOopDesc::age_mask_in_place |
|
|
||||||
markOopDesc::epoch_mask_in_place);
|
|
||||||
if (Atomic::cmpxchg_ptr((intptr_t)THREAD | unbiased, (intptr_t*) rcvr->mark_addr(), unbiased) != unbiased) {
|
|
||||||
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try_rebias:
|
|
||||||
// At this point we know the epoch has expired, meaning that the
|
|
||||||
// current "bias owner", if any, is actually invalid. Under these
|
|
||||||
// circumstances _only_, we are allowed to use the current header's
|
|
||||||
// value as the comparison value when doing the cas to acquire the
|
|
||||||
// bias in the current epoch. In other words, we allow transfer of
|
|
||||||
// the bias from one thread to another directly in this situation.
|
|
||||||
xx = (intptr_t) rcvr->klass()->prototype_header() | (intptr_t) THREAD;
|
|
||||||
if (Atomic::cmpxchg_ptr((intptr_t)THREAD | (intptr_t) rcvr->klass()->prototype_header(),
|
|
||||||
(intptr_t*) rcvr->mark_addr(),
|
|
||||||
(intptr_t) mark) != (intptr_t) mark) {
|
|
||||||
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try_revoke_bias:
|
|
||||||
// The prototype mark in the klass doesn't have the bias bit set any
|
|
||||||
// more, indicating that objects of this data type are not supposed
|
|
||||||
// to be biased any more. We are going to try to reset the mark of
|
|
||||||
// this object to the prototype value and fall through to the
|
|
||||||
// CAS-based locking scheme. Note that if our CAS fails, it means
|
|
||||||
// that another thread raced us for the privilege of revoking the
|
|
||||||
// bias of this particular object, so it's okay to continue in the
|
|
||||||
// normal locking code.
|
|
||||||
//
|
|
||||||
xx = (intptr_t) rcvr->klass()->prototype_header() | (intptr_t) THREAD;
|
|
||||||
if (Atomic::cmpxchg_ptr(rcvr->klass()->prototype_header(),
|
|
||||||
(intptr_t*) rcvr->mark_addr(),
|
|
||||||
mark) == mark) {
|
|
||||||
// (*counters->revoked_lock_entry_count_addr())++;
|
|
||||||
success = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
cas_label:
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
|
||||||
success = false;
|
|
||||||
}
|
}
|
||||||
}
|
success = true;
|
||||||
if (!success) {
|
} else {
|
||||||
markOop displaced = rcvr->mark()->set_unlocked();
|
// Try to bias towards thread in case object is anonymously biased.
|
||||||
mon->lock()->set_displaced_header(displaced);
|
markOop header = (markOop) ((uintptr_t) mark &
|
||||||
if (Atomic::cmpxchg_ptr(mon, rcvr->mark_addr(), displaced) != displaced) {
|
((uintptr_t)markOopDesc::biased_lock_mask_in_place |
|
||||||
// Is it simple recursive case?
|
(uintptr_t)markOopDesc::age_mask_in_place | epoch_mask_in_place));
|
||||||
if (THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
|
if (hash != markOopDesc::no_hash) {
|
||||||
mon->lock()->set_displaced_header(NULL);
|
header = header->copy_set_hash(hash);
|
||||||
} else {
|
}
|
||||||
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
|
markOop new_header = (markOop) ((uintptr_t) header | thread_ident);
|
||||||
|
// Debugging hint.
|
||||||
|
DEBUG_ONLY(mon->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)
|
||||||
|
if (Atomic::cmpxchg_ptr((void*)new_header, rcvr->mark_addr(), header) == header) {
|
||||||
|
if (PrintBiasedLockingStatistics) {
|
||||||
|
(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
|
||||||
|
}
|
||||||
|
success = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Traditional lightweight locking.
|
||||||
|
if (!success) {
|
||||||
|
markOop displaced = rcvr->mark()->set_unlocked();
|
||||||
|
mon->lock()->set_displaced_header(displaced);
|
||||||
|
bool call_vm = UseHeavyMonitors;
|
||||||
|
if (call_vm || Atomic::cmpxchg_ptr(mon, rcvr->mark_addr(), displaced) != displaced) {
|
||||||
|
// Is it simple recursive case?
|
||||||
|
if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
|
||||||
|
mon->lock()->set_displaced_header(NULL);
|
||||||
|
} else {
|
||||||
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, mon), handle_exception);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
THREAD->clr_do_not_unlock();
|
THREAD->clr_do_not_unlock();
|
||||||
|
|
||||||
|
@ -881,15 +865,84 @@ BytecodeInterpreter::run(interpreterState istate) {
|
||||||
BasicObjectLock* entry = (BasicObjectLock*) istate->stack_base();
|
BasicObjectLock* entry = (BasicObjectLock*) istate->stack_base();
|
||||||
assert(entry->obj() == NULL, "Frame manager didn't allocate the monitor");
|
assert(entry->obj() == NULL, "Frame manager didn't allocate the monitor");
|
||||||
entry->set_obj(lockee);
|
entry->set_obj(lockee);
|
||||||
|
bool success = false;
|
||||||
|
uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;
|
||||||
|
|
||||||
markOop displaced = lockee->mark()->set_unlocked();
|
markOop mark = lockee->mark();
|
||||||
entry->lock()->set_displaced_header(displaced);
|
intptr_t hash = (intptr_t) markOopDesc::no_hash;
|
||||||
if (Atomic::cmpxchg_ptr(entry, lockee->mark_addr(), displaced) != displaced) {
|
// implies UseBiasedLocking
|
||||||
// Is it simple recursive case?
|
if (mark->has_bias_pattern()) {
|
||||||
if (THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
|
uintptr_t thread_ident;
|
||||||
entry->lock()->set_displaced_header(NULL);
|
uintptr_t anticipated_bias_locking_value;
|
||||||
|
thread_ident = (uintptr_t)istate->thread();
|
||||||
|
anticipated_bias_locking_value =
|
||||||
|
(((uintptr_t)lockee->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &
|
||||||
|
~((uintptr_t) markOopDesc::age_mask_in_place);
|
||||||
|
|
||||||
|
if (anticipated_bias_locking_value == 0) {
|
||||||
|
// already biased towards this thread, nothing to do
|
||||||
|
if (PrintBiasedLockingStatistics) {
|
||||||
|
(* BiasedLocking::biased_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
|
success = true;
|
||||||
|
} else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {
|
||||||
|
// try revoke bias
|
||||||
|
markOop header = lockee->klass()->prototype_header();
|
||||||
|
if (hash != markOopDesc::no_hash) {
|
||||||
|
header = header->copy_set_hash(hash);
|
||||||
|
}
|
||||||
|
if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), mark) == mark) {
|
||||||
|
if (PrintBiasedLockingStatistics) {
|
||||||
|
(*BiasedLocking::revoked_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if ((anticipated_bias_locking_value & epoch_mask_in_place) !=0) {
|
||||||
|
// try rebias
|
||||||
|
markOop new_header = (markOop) ( (intptr_t) lockee->klass()->prototype_header() | thread_ident);
|
||||||
|
if (hash != markOopDesc::no_hash) {
|
||||||
|
new_header = new_header->copy_set_hash(hash);
|
||||||
|
}
|
||||||
|
if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), mark) == mark) {
|
||||||
|
if (PrintBiasedLockingStatistics) {
|
||||||
|
(* BiasedLocking::rebiased_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
|
||||||
|
}
|
||||||
|
success = true;
|
||||||
} else {
|
} else {
|
||||||
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
|
// try to bias towards thread in case object is anonymously biased
|
||||||
|
markOop header = (markOop) ((uintptr_t) mark & ((uintptr_t)markOopDesc::biased_lock_mask_in_place |
|
||||||
|
(uintptr_t)markOopDesc::age_mask_in_place | epoch_mask_in_place));
|
||||||
|
if (hash != markOopDesc::no_hash) {
|
||||||
|
header = header->copy_set_hash(hash);
|
||||||
|
}
|
||||||
|
markOop new_header = (markOop) ((uintptr_t) header | thread_ident);
|
||||||
|
// debugging hint
|
||||||
|
DEBUG_ONLY(entry->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)
|
||||||
|
if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), header) == header) {
|
||||||
|
if (PrintBiasedLockingStatistics) {
|
||||||
|
(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
|
||||||
|
}
|
||||||
|
success = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// traditional lightweight locking
|
||||||
|
if (!success) {
|
||||||
|
markOop displaced = lockee->mark()->set_unlocked();
|
||||||
|
entry->lock()->set_displaced_header(displaced);
|
||||||
|
bool call_vm = UseHeavyMonitors;
|
||||||
|
if (call_vm || Atomic::cmpxchg_ptr(entry, lockee->mark_addr(), displaced) != displaced) {
|
||||||
|
// Is it simple recursive case?
|
||||||
|
if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
|
||||||
|
entry->lock()->set_displaced_header(NULL);
|
||||||
|
} else {
|
||||||
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
UPDATE_PC_AND_TOS(1, -1);
|
UPDATE_PC_AND_TOS(1, -1);
|
||||||
|
@ -1700,14 +1753,87 @@ run:
|
||||||
}
|
}
|
||||||
if (entry != NULL) {
|
if (entry != NULL) {
|
||||||
entry->set_obj(lockee);
|
entry->set_obj(lockee);
|
||||||
markOop displaced = lockee->mark()->set_unlocked();
|
int success = false;
|
||||||
entry->lock()->set_displaced_header(displaced);
|
uintptr_t epoch_mask_in_place = (uintptr_t)markOopDesc::epoch_mask_in_place;
|
||||||
if (Atomic::cmpxchg_ptr(entry, lockee->mark_addr(), displaced) != displaced) {
|
|
||||||
// Is it simple recursive case?
|
markOop mark = lockee->mark();
|
||||||
if (THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
|
intptr_t hash = (intptr_t) markOopDesc::no_hash;
|
||||||
entry->lock()->set_displaced_header(NULL);
|
// implies UseBiasedLocking
|
||||||
} else {
|
if (mark->has_bias_pattern()) {
|
||||||
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
|
uintptr_t thread_ident;
|
||||||
|
uintptr_t anticipated_bias_locking_value;
|
||||||
|
thread_ident = (uintptr_t)istate->thread();
|
||||||
|
anticipated_bias_locking_value =
|
||||||
|
(((uintptr_t)lockee->klass()->prototype_header() | thread_ident) ^ (uintptr_t)mark) &
|
||||||
|
~((uintptr_t) markOopDesc::age_mask_in_place);
|
||||||
|
|
||||||
|
if (anticipated_bias_locking_value == 0) {
|
||||||
|
// already biased towards this thread, nothing to do
|
||||||
|
if (PrintBiasedLockingStatistics) {
|
||||||
|
(* BiasedLocking::biased_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
|
success = true;
|
||||||
|
}
|
||||||
|
else if ((anticipated_bias_locking_value & markOopDesc::biased_lock_mask_in_place) != 0) {
|
||||||
|
// try revoke bias
|
||||||
|
markOop header = lockee->klass()->prototype_header();
|
||||||
|
if (hash != markOopDesc::no_hash) {
|
||||||
|
header = header->copy_set_hash(hash);
|
||||||
|
}
|
||||||
|
if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), mark) == mark) {
|
||||||
|
if (PrintBiasedLockingStatistics)
|
||||||
|
(*BiasedLocking::revoked_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if ((anticipated_bias_locking_value & epoch_mask_in_place) !=0) {
|
||||||
|
// try rebias
|
||||||
|
markOop new_header = (markOop) ( (intptr_t) lockee->klass()->prototype_header() | thread_ident);
|
||||||
|
if (hash != markOopDesc::no_hash) {
|
||||||
|
new_header = new_header->copy_set_hash(hash);
|
||||||
|
}
|
||||||
|
if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), mark) == mark) {
|
||||||
|
if (PrintBiasedLockingStatistics)
|
||||||
|
(* BiasedLocking::rebiased_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
|
||||||
|
}
|
||||||
|
success = true;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// try to bias towards thread in case object is anonymously biased
|
||||||
|
markOop header = (markOop) ((uintptr_t) mark & ((uintptr_t)markOopDesc::biased_lock_mask_in_place |
|
||||||
|
(uintptr_t)markOopDesc::age_mask_in_place |
|
||||||
|
epoch_mask_in_place));
|
||||||
|
if (hash != markOopDesc::no_hash) {
|
||||||
|
header = header->copy_set_hash(hash);
|
||||||
|
}
|
||||||
|
markOop new_header = (markOop) ((uintptr_t) header | thread_ident);
|
||||||
|
// debugging hint
|
||||||
|
DEBUG_ONLY(entry->lock()->set_displaced_header((markOop) (uintptr_t) 0xdeaddead);)
|
||||||
|
if (Atomic::cmpxchg_ptr((void*)new_header, lockee->mark_addr(), header) == header) {
|
||||||
|
if (PrintBiasedLockingStatistics)
|
||||||
|
(* BiasedLocking::anonymously_biased_lock_entry_count_addr())++;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
|
||||||
|
}
|
||||||
|
success = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// traditional lightweight locking
|
||||||
|
if (!success) {
|
||||||
|
markOop displaced = lockee->mark()->set_unlocked();
|
||||||
|
entry->lock()->set_displaced_header(displaced);
|
||||||
|
bool call_vm = UseHeavyMonitors;
|
||||||
|
if (call_vm || Atomic::cmpxchg_ptr(entry, lockee->mark_addr(), displaced) != displaced) {
|
||||||
|
// Is it simple recursive case?
|
||||||
|
if (!call_vm && THREAD->is_lock_owned((address) displaced->clear_lock_bits())) {
|
||||||
|
entry->lock()->set_displaced_header(NULL);
|
||||||
|
} else {
|
||||||
|
CALL_VM(InterpreterRuntime::monitorenter(THREAD, entry), handle_exception);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
|
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
|
||||||
|
@ -1729,12 +1855,15 @@ run:
|
||||||
BasicLock* lock = most_recent->lock();
|
BasicLock* lock = most_recent->lock();
|
||||||
markOop header = lock->displaced_header();
|
markOop header = lock->displaced_header();
|
||||||
most_recent->set_obj(NULL);
|
most_recent->set_obj(NULL);
|
||||||
// If it isn't recursive we either must swap old header or call the runtime
|
if (!lockee->mark()->has_bias_pattern()) {
|
||||||
if (header != NULL) {
|
bool call_vm = UseHeavyMonitors;
|
||||||
if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {
|
// If it isn't recursive we either must swap old header or call the runtime
|
||||||
// restore object for the slow case
|
if (header != NULL || call_vm) {
|
||||||
most_recent->set_obj(lockee);
|
if (call_vm || Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {
|
||||||
CALL_VM(InterpreterRuntime::monitorexit(THREAD, most_recent), handle_exception);
|
// restore object for the slow case
|
||||||
|
most_recent->set_obj(lockee);
|
||||||
|
CALL_VM(InterpreterRuntime::monitorexit(THREAD, most_recent), handle_exception);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
|
UPDATE_PC_AND_TOS_AND_CONTINUE(1, -1);
|
||||||
|
@ -2678,15 +2807,18 @@ handle_return:
|
||||||
BasicLock* lock = end->lock();
|
BasicLock* lock = end->lock();
|
||||||
markOop header = lock->displaced_header();
|
markOop header = lock->displaced_header();
|
||||||
end->set_obj(NULL);
|
end->set_obj(NULL);
|
||||||
// If it isn't recursive we either must swap old header or call the runtime
|
|
||||||
if (header != NULL) {
|
if (!lockee->mark()->has_bias_pattern()) {
|
||||||
if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {
|
// If it isn't recursive we either must swap old header or call the runtime
|
||||||
// restore object for the slow case
|
if (header != NULL) {
|
||||||
end->set_obj(lockee);
|
if (Atomic::cmpxchg_ptr(header, lockee->mark_addr(), lock) != lock) {
|
||||||
{
|
// restore object for the slow case
|
||||||
// Prevent any HandleMarkCleaner from freeing our live handles
|
end->set_obj(lockee);
|
||||||
HandleMark __hm(THREAD);
|
{
|
||||||
CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, end));
|
// Prevent any HandleMarkCleaner from freeing our live handles
|
||||||
|
HandleMark __hm(THREAD);
|
||||||
|
CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, end));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2735,23 +2867,37 @@ handle_return:
|
||||||
illegal_state_oop = THREAD->pending_exception();
|
illegal_state_oop = THREAD->pending_exception();
|
||||||
THREAD->clear_pending_exception();
|
THREAD->clear_pending_exception();
|
||||||
}
|
}
|
||||||
|
} else if (UseHeavyMonitors) {
|
||||||
|
{
|
||||||
|
// Prevent any HandleMarkCleaner from freeing our live handles.
|
||||||
|
HandleMark __hm(THREAD);
|
||||||
|
CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, base));
|
||||||
|
}
|
||||||
|
if (THREAD->has_pending_exception()) {
|
||||||
|
if (!suppress_error) illegal_state_oop = THREAD->pending_exception();
|
||||||
|
THREAD->clear_pending_exception();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
BasicLock* lock = base->lock();
|
BasicLock* lock = base->lock();
|
||||||
markOop header = lock->displaced_header();
|
markOop header = lock->displaced_header();
|
||||||
base->set_obj(NULL);
|
base->set_obj(NULL);
|
||||||
// If it isn't recursive we either must swap old header or call the runtime
|
|
||||||
if (header != NULL) {
|
if (!rcvr->mark()->has_bias_pattern()) {
|
||||||
if (Atomic::cmpxchg_ptr(header, rcvr->mark_addr(), lock) != lock) {
|
base->set_obj(NULL);
|
||||||
// restore object for the slow case
|
// If it isn't recursive we either must swap old header or call the runtime
|
||||||
base->set_obj(rcvr);
|
if (header != NULL) {
|
||||||
{
|
if (Atomic::cmpxchg_ptr(header, rcvr->mark_addr(), lock) != lock) {
|
||||||
// Prevent any HandleMarkCleaner from freeing our live handles
|
// restore object for the slow case
|
||||||
HandleMark __hm(THREAD);
|
base->set_obj(rcvr);
|
||||||
CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, base));
|
{
|
||||||
}
|
// Prevent any HandleMarkCleaner from freeing our live handles
|
||||||
if (THREAD->has_pending_exception()) {
|
HandleMark __hm(THREAD);
|
||||||
if (!suppress_error) illegal_state_oop = THREAD->pending_exception();
|
CALL_VM_NOCHECK(InterpreterRuntime::monitorexit(THREAD, base));
|
||||||
THREAD->clear_pending_exception();
|
}
|
||||||
|
if (THREAD->has_pending_exception()) {
|
||||||
|
if (!suppress_error) illegal_state_oop = THREAD->pending_exception();
|
||||||
|
THREAD->clear_pending_exception();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2759,6 +2905,8 @@ handle_return:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Clear the do_not_unlock flag now.
|
||||||
|
THREAD->clr_do_not_unlock();
|
||||||
|
|
||||||
//
|
//
|
||||||
// Notify jvmti/jvmdi
|
// Notify jvmti/jvmdi
|
||||||
|
@ -3130,9 +3278,9 @@ BytecodeInterpreter::print() {
|
||||||
}
|
}
|
||||||
|
|
||||||
extern "C" {
|
extern "C" {
|
||||||
void PI(uintptr_t arg) {
|
void PI(uintptr_t arg) {
|
||||||
((BytecodeInterpreter*)arg)->print();
|
((BytecodeInterpreter*)arg)->print();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif // PRODUCT
|
#endif // PRODUCT
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue