mirror of
https://github.com/openjdk/jdk.git
synced 2025-09-18 01:54:47 +02:00
8165808: Add release barriers when allocating objects with concurrent collection
Add release_set_klass, use in slow-path allocators. Reviewed-by: jmasa, dholmes
This commit is contained in:
parent
94bbcbd378
commit
b77d0de3d9
4 changed files with 51 additions and 40 deletions
|
@ -304,9 +304,6 @@ class CollectedHeap : public CHeapObj<mtInternal> {
|
||||||
inline static oop array_allocate_nozero(KlassHandle klass, int size, int length, TRAPS);
|
inline static oop array_allocate_nozero(KlassHandle klass, int size, int length, TRAPS);
|
||||||
inline static oop class_allocate(KlassHandle klass, int size, TRAPS);
|
inline static oop class_allocate(KlassHandle klass, int size, TRAPS);
|
||||||
|
|
||||||
inline static void post_allocation_install_obj_klass(KlassHandle klass,
|
|
||||||
oop obj);
|
|
||||||
|
|
||||||
// Raw memory allocation facilities
|
// Raw memory allocation facilities
|
||||||
// The obj and array allocate methods are covers for these methods.
|
// The obj and array allocate methods are covers for these methods.
|
||||||
// mem_allocate() should never be
|
// mem_allocate() should never be
|
||||||
|
|
|
@ -41,14 +41,22 @@
|
||||||
// Inline allocation implementations.
|
// Inline allocation implementations.
|
||||||
|
|
||||||
void CollectedHeap::post_allocation_setup_common(KlassHandle klass,
|
void CollectedHeap::post_allocation_setup_common(KlassHandle klass,
|
||||||
HeapWord* obj) {
|
HeapWord* obj_ptr) {
|
||||||
post_allocation_setup_no_klass_install(klass, obj);
|
post_allocation_setup_no_klass_install(klass, obj_ptr);
|
||||||
post_allocation_install_obj_klass(klass, oop(obj));
|
oop obj = (oop)obj_ptr;
|
||||||
|
#if ! INCLUDE_ALL_GCS
|
||||||
|
obj->set_klass(klass());
|
||||||
|
#else
|
||||||
|
// Need a release store to ensure array/class length, mark word, and
|
||||||
|
// object zeroing are visible before setting the klass non-NULL, for
|
||||||
|
// concurrent collectors.
|
||||||
|
obj->release_set_klass(klass());
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void CollectedHeap::post_allocation_setup_no_klass_install(KlassHandle klass,
|
void CollectedHeap::post_allocation_setup_no_klass_install(KlassHandle klass,
|
||||||
HeapWord* objPtr) {
|
HeapWord* obj_ptr) {
|
||||||
oop obj = (oop)objPtr;
|
oop obj = (oop)obj_ptr;
|
||||||
|
|
||||||
assert(obj != NULL, "NULL object pointer");
|
assert(obj != NULL, "NULL object pointer");
|
||||||
if (UseBiasedLocking && (klass() != NULL)) {
|
if (UseBiasedLocking && (klass() != NULL)) {
|
||||||
|
@ -59,18 +67,6 @@ void CollectedHeap::post_allocation_setup_no_klass_install(KlassHandle klass,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void CollectedHeap::post_allocation_install_obj_klass(KlassHandle klass,
|
|
||||||
oop obj) {
|
|
||||||
// These asserts are kind of complicated because of klassKlass
|
|
||||||
// and the beginning of the world.
|
|
||||||
assert(klass() != NULL || !Universe::is_fully_initialized(), "NULL klass");
|
|
||||||
assert(klass() == NULL || klass()->is_klass(), "not a klass");
|
|
||||||
assert(obj != NULL, "NULL object pointer");
|
|
||||||
obj->set_klass(klass());
|
|
||||||
assert(!Universe::is_fully_initialized() || obj->klass() != NULL,
|
|
||||||
"missing klass");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Support for jvmti and dtrace
|
// Support for jvmti and dtrace
|
||||||
inline void post_allocation_notify(KlassHandle klass, oop obj, int size) {
|
inline void post_allocation_notify(KlassHandle klass, oop obj, int size) {
|
||||||
// support low memory notifications (no-op if not enabled)
|
// support low memory notifications (no-op if not enabled)
|
||||||
|
@ -88,25 +84,26 @@ inline void post_allocation_notify(KlassHandle klass, oop obj, int size) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void CollectedHeap::post_allocation_setup_obj(KlassHandle klass,
|
void CollectedHeap::post_allocation_setup_obj(KlassHandle klass,
|
||||||
HeapWord* obj,
|
HeapWord* obj_ptr,
|
||||||
int size) {
|
int size) {
|
||||||
post_allocation_setup_common(klass, obj);
|
post_allocation_setup_common(klass, obj_ptr);
|
||||||
|
oop obj = (oop)obj_ptr;
|
||||||
assert(Universe::is_bootstrapping() ||
|
assert(Universe::is_bootstrapping() ||
|
||||||
!((oop)obj)->is_array(), "must not be an array");
|
!obj->is_array(), "must not be an array");
|
||||||
// notify jvmti and dtrace
|
// notify jvmti and dtrace
|
||||||
post_allocation_notify(klass, (oop)obj, size);
|
post_allocation_notify(klass, obj, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
void CollectedHeap::post_allocation_setup_class(KlassHandle klass,
|
void CollectedHeap::post_allocation_setup_class(KlassHandle klass,
|
||||||
HeapWord* obj,
|
HeapWord* obj_ptr,
|
||||||
int size) {
|
int size) {
|
||||||
// Set oop_size field before setting the _klass field
|
// Set oop_size field before setting the _klass field because a
|
||||||
// in post_allocation_setup_common() because the klass field
|
// non-NULL _klass field indicates that the object is parsable by
|
||||||
// indicates that the object is parsable by concurrent GC.
|
// concurrent GC.
|
||||||
oop new_cls = (oop)obj;
|
oop new_cls = (oop)obj_ptr;
|
||||||
assert(size > 0, "oop_size must be positive.");
|
assert(size > 0, "oop_size must be positive.");
|
||||||
java_lang_Class::set_oop_size(new_cls, size);
|
java_lang_Class::set_oop_size(new_cls, size);
|
||||||
post_allocation_setup_common(klass, obj);
|
post_allocation_setup_common(klass, obj_ptr);
|
||||||
assert(Universe::is_bootstrapping() ||
|
assert(Universe::is_bootstrapping() ||
|
||||||
!new_cls->is_array(), "must not be an array");
|
!new_cls->is_array(), "must not be an array");
|
||||||
// notify jvmti and dtrace
|
// notify jvmti and dtrace
|
||||||
|
@ -114,15 +111,15 @@ void CollectedHeap::post_allocation_setup_class(KlassHandle klass,
|
||||||
}
|
}
|
||||||
|
|
||||||
void CollectedHeap::post_allocation_setup_array(KlassHandle klass,
|
void CollectedHeap::post_allocation_setup_array(KlassHandle klass,
|
||||||
HeapWord* obj,
|
HeapWord* obj_ptr,
|
||||||
int length) {
|
int length) {
|
||||||
// Set array length before setting the _klass field
|
// Set array length before setting the _klass field because a
|
||||||
// in post_allocation_setup_common() because the klass field
|
// non-NULL klass field indicates that the object is parsable by
|
||||||
// indicates that the object is parsable by concurrent GC.
|
// concurrent GC.
|
||||||
assert(length >= 0, "length should be non-negative");
|
assert(length >= 0, "length should be non-negative");
|
||||||
((arrayOop)obj)->set_length(length);
|
((arrayOop)obj_ptr)->set_length(length);
|
||||||
post_allocation_setup_common(klass, obj);
|
post_allocation_setup_common(klass, obj_ptr);
|
||||||
oop new_obj = (oop)obj;
|
oop new_obj = (oop)obj_ptr;
|
||||||
assert(new_obj->is_array(), "must be an array");
|
assert(new_obj->is_array(), "must be an array");
|
||||||
// notify jvmti and dtrace (must be after length is set for dtrace)
|
// notify jvmti and dtrace (must be after length is set for dtrace)
|
||||||
post_allocation_notify(klass, new_obj, new_obj->size());
|
post_allocation_notify(klass, new_obj, new_obj->size());
|
||||||
|
|
|
@ -87,6 +87,7 @@ class oopDesc {
|
||||||
inline narrowKlass* compressed_klass_addr();
|
inline narrowKlass* compressed_klass_addr();
|
||||||
|
|
||||||
inline void set_klass(Klass* k);
|
inline void set_klass(Klass* k);
|
||||||
|
inline void release_set_klass(Klass* k);
|
||||||
|
|
||||||
// For klass field compression
|
// For klass field compression
|
||||||
inline int klass_gap() const;
|
inline int klass_gap() const;
|
||||||
|
|
|
@ -129,10 +129,14 @@ narrowKlass* oopDesc::compressed_klass_addr() {
|
||||||
return &_metadata._compressed_klass;
|
return &_metadata._compressed_klass;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#define CHECK_SET_KLASS(k) \
|
||||||
|
do { \
|
||||||
|
assert(Universe::is_bootstrapping() || k != NULL, "NULL Klass"); \
|
||||||
|
assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass"); \
|
||||||
|
} while (0)
|
||||||
|
|
||||||
void oopDesc::set_klass(Klass* k) {
|
void oopDesc::set_klass(Klass* k) {
|
||||||
// since klasses are promoted no store check is needed
|
CHECK_SET_KLASS(k);
|
||||||
assert(Universe::is_bootstrapping() || k != NULL, "must be a real Klass*");
|
|
||||||
assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass*");
|
|
||||||
if (UseCompressedClassPointers) {
|
if (UseCompressedClassPointers) {
|
||||||
*compressed_klass_addr() = Klass::encode_klass_not_null(k);
|
*compressed_klass_addr() = Klass::encode_klass_not_null(k);
|
||||||
} else {
|
} else {
|
||||||
|
@ -140,6 +144,18 @@ void oopDesc::set_klass(Klass* k) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void oopDesc::release_set_klass(Klass* k) {
|
||||||
|
CHECK_SET_KLASS(k);
|
||||||
|
if (UseCompressedClassPointers) {
|
||||||
|
OrderAccess::release_store(compressed_klass_addr(),
|
||||||
|
Klass::encode_klass_not_null(k));
|
||||||
|
} else {
|
||||||
|
OrderAccess::release_store_ptr(klass_addr(), k);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#undef CHECK_SET_KLASS
|
||||||
|
|
||||||
int oopDesc::klass_gap() const {
|
int oopDesc::klass_gap() const {
|
||||||
return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes());
|
return *(int*)(((intptr_t)this) + klass_gap_offset_in_bytes());
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue