8278598: AlignmentReserve is repeatedly reinitialized

Reviewed-by: ayang, iwalulya
This commit is contained in:
Thomas Schatzl 2022-06-02 17:16:24 +00:00
parent e51ca1dee3
commit 1fcbaa4116
5 changed files with 23 additions and 22 deletions

View file

@ -60,6 +60,7 @@
class ClassLoaderData; class ClassLoaderData;
size_t CollectedHeap::_lab_alignment_reserve = ~(size_t)0;
Klass* CollectedHeap::_filler_object_klass = NULL; Klass* CollectedHeap::_filler_object_klass = NULL;
size_t CollectedHeap::_filler_array_max_size = 0; size_t CollectedHeap::_filler_array_max_size = 0;
size_t CollectedHeap::_stack_chunk_max_size = 0; size_t CollectedHeap::_stack_chunk_max_size = 0;
@ -242,6 +243,14 @@ CollectedHeap::CollectedHeap() :
_gc_cause(GCCause::_no_gc), _gc_cause(GCCause::_no_gc),
_gc_lastcause(GCCause::_no_gc) _gc_lastcause(GCCause::_no_gc)
{ {
// If the minimum object size is greater than MinObjAlignment, we can
// end up with a shard at the end of the buffer that's smaller than
// the smallest object. We can't allow that because the buffer must
// look like it's full of objects when we retire it, so we make
// sure we have enough space for a filler int array object.
size_t min_size = min_dummy_object_size();
_lab_alignment_reserve = min_size > (size_t)MinObjAlignment ? align_object_size(min_size) : 0;
const size_t max_len = size_t(arrayOopDesc::max_array_length(T_INT)); const size_t max_len = size_t(arrayOopDesc::max_array_length(T_INT));
const size_t elements_per_word = HeapWordSize / sizeof(jint); const size_t elements_per_word = HeapWordSize / sizeof(jint);
_filler_array_max_size = align_object_size(filler_array_hdr_size() + _filler_array_max_size = align_object_size(filler_array_hdr_size() +
@ -506,11 +515,6 @@ void CollectedHeap::fill_with_dummy_object(HeapWord* start, HeapWord* end, bool
CollectedHeap::fill_with_object(start, end, zap); CollectedHeap::fill_with_object(start, end, zap);
} }
size_t CollectedHeap::tlab_alloc_reserve() const {
size_t min_size = min_dummy_object_size();
return min_size > (size_t)MinObjAlignment ? align_object_size(min_size) : 0;
}
HeapWord* CollectedHeap::allocate_new_tlab(size_t min_size, HeapWord* CollectedHeap::allocate_new_tlab(size_t min_size,
size_t requested_size, size_t requested_size,
size_t* actual_size) { size_t* actual_size) {

View file

@ -114,6 +114,8 @@ class CollectedHeap : public CHeapObj<mtGC> {
bool _is_gc_active; bool _is_gc_active;
// (Minimum) Alignment reserve for TLABs and PLABs.
static size_t _lab_alignment_reserve;
// Used for filler objects (static, but initialized in ctor). // Used for filler objects (static, but initialized in ctor).
static size_t _filler_array_max_size; static size_t _filler_array_max_size;
@ -312,7 +314,10 @@ class CollectedHeap : public CHeapObj<mtGC> {
return oopDesc::header_size(); return oopDesc::header_size();
} }
size_t tlab_alloc_reserve() const; static size_t lab_alignment_reserve() {
assert(_lab_alignment_reserve != ~(size_t)0, "uninitialized");
return _lab_alignment_reserve;
}
// Some heaps may offer a contiguous region for shared non-blocking // Some heaps may offer a contiguous region for shared non-blocking
// allocation, via inlined code (by exporting the address of the top and // allocation, via inlined code (by exporting the address of the top and

View file

@ -34,7 +34,7 @@
size_t PLAB::min_size() { size_t PLAB::min_size() {
// Make sure that we return something that is larger than AlignmentReserve // Make sure that we return something that is larger than AlignmentReserve
return align_object_size(MAX2(MinTLABSize / HeapWordSize, (size_t)oopDesc::header_size())) + AlignmentReserve; return align_object_size(MAX2(MinTLABSize / HeapWordSize, (size_t)oopDesc::header_size())) + CollectedHeap::lab_alignment_reserve();
} }
size_t PLAB::max_size() { size_t PLAB::max_size() {
@ -45,19 +45,11 @@ PLAB::PLAB(size_t desired_plab_sz_) :
_word_sz(desired_plab_sz_), _bottom(NULL), _top(NULL), _word_sz(desired_plab_sz_), _bottom(NULL), _top(NULL),
_end(NULL), _hard_end(NULL), _allocated(0), _wasted(0), _undo_wasted(0) _end(NULL), _hard_end(NULL), _allocated(0), _wasted(0), _undo_wasted(0)
{ {
AlignmentReserve = Universe::heap()->tlab_alloc_reserve(); assert(min_size() > CollectedHeap::lab_alignment_reserve(),
assert(min_size() > AlignmentReserve,
"Minimum PLAB size " SIZE_FORMAT " must be larger than alignment reserve " SIZE_FORMAT " " "Minimum PLAB size " SIZE_FORMAT " must be larger than alignment reserve " SIZE_FORMAT " "
"to be able to contain objects", min_size(), AlignmentReserve); "to be able to contain objects", min_size(), CollectedHeap::lab_alignment_reserve());
} }
// If the minimum object size is greater than MinObjAlignment, we can
// end up with a shard at the end of the buffer that's smaller than
// the smallest object. We can't allow that because the buffer must
// look like it's full of objects when we retire it, so we make
// sure we have enough space for a filler int array object.
size_t PLAB::AlignmentReserve;
void PLAB::flush_and_retire_stats(PLABStats* stats) { void PLAB::flush_and_retire_stats(PLABStats* stats) {
// Retire the last allocation buffer. // Retire the last allocation buffer.
size_t unused = retire_internal(); size_t unused = retire_internal();

View file

@ -25,6 +25,7 @@
#ifndef SHARE_GC_SHARED_PLAB_HPP #ifndef SHARE_GC_SHARED_PLAB_HPP
#define SHARE_GC_SHARED_PLAB_HPP #define SHARE_GC_SHARED_PLAB_HPP
#include "gc/shared/collectedHeap.hpp"
#include "gc/shared/gcUtil.hpp" #include "gc/shared/gcUtil.hpp"
#include "memory/allocation.hpp" #include "memory/allocation.hpp"
#include "utilities/globalDefinitions.hpp" #include "utilities/globalDefinitions.hpp"
@ -46,7 +47,6 @@ protected:
size_t _wasted; // in HeapWord units size_t _wasted; // in HeapWord units
size_t _undo_wasted; size_t _undo_wasted;
char tail[32]; char tail[32];
static size_t AlignmentReserve;
// Force future allocations to fail and queries for contains() // Force future allocations to fail and queries for contains()
// to return false. Returns the amount of unused space in this PLAB. // to return false. Returns the amount of unused space in this PLAB.
@ -73,7 +73,7 @@ public:
// Must get initialized with "set_buf" for an allocation to succeed. // Must get initialized with "set_buf" for an allocation to succeed.
PLAB(size_t word_sz); PLAB(size_t word_sz);
static size_t size_required_for_allocation(size_t word_size) { return word_size + AlignmentReserve; } static size_t size_required_for_allocation(size_t word_size) { return word_size + CollectedHeap::lab_alignment_reserve(); }
// Minimum PLAB size. // Minimum PLAB size.
static size_t min_size(); static size_t min_size();
@ -117,13 +117,13 @@ public:
// Sets the space of the buffer to be [buf, space+word_sz()). // Sets the space of the buffer to be [buf, space+word_sz()).
void set_buf(HeapWord* buf, size_t new_word_sz) { void set_buf(HeapWord* buf, size_t new_word_sz) {
assert(new_word_sz > AlignmentReserve, "Too small"); assert(new_word_sz > CollectedHeap::lab_alignment_reserve(), "Too small");
_word_sz = new_word_sz; _word_sz = new_word_sz;
_bottom = buf; _bottom = buf;
_top = _bottom; _top = _bottom;
_hard_end = _bottom + word_sz(); _hard_end = _bottom + word_sz();
_end = _hard_end - AlignmentReserve; _end = _hard_end - CollectedHeap::lab_alignment_reserve();
assert(_end >= _top, "Negative buffer"); assert(_end >= _top, "Negative buffer");
// In support of ergonomic sizing // In support of ergonomic sizing
_allocated += word_sz(); _allocated += word_sz();

View file

@ -471,7 +471,7 @@ void ThreadLocalAllocStats::publish() {
} }
size_t ThreadLocalAllocBuffer::end_reserve() { size_t ThreadLocalAllocBuffer::end_reserve() {
size_t reserve_size = Universe::heap()->tlab_alloc_reserve(); size_t reserve_size = CollectedHeap::lab_alignment_reserve();
return MAX2(reserve_size, (size_t)_reserve_for_allocation_prefetch); return MAX2(reserve_size, (size_t)_reserve_for_allocation_prefetch);
} }