8270803: Reduce CDS API verbosity

Reviewed-by: minqi, ccheung
This commit is contained in:
Ioi Lam 2021-07-21 03:52:19 +00:00
parent 6346793c64
commit 7dd19af259
17 changed files with 196 additions and 208 deletions

View file

@ -1080,16 +1080,16 @@ void ArchiveBuilder::write_archive(FileMapInfo* mapinfo,
bitmap_size_in_bytes); bitmap_size_in_bytes);
if (closed_heap_regions != NULL) { if (closed_heap_regions != NULL) {
_total_closed_heap_region_size = mapinfo->write_archive_heap_regions( _total_closed_heap_region_size = mapinfo->write_heap_regions(
closed_heap_regions, closed_heap_regions,
closed_heap_oopmaps, closed_heap_oopmaps,
MetaspaceShared::first_closed_archive_heap_region, MetaspaceShared::first_closed_heap_region,
MetaspaceShared::max_closed_archive_heap_region); MetaspaceShared::max_closed_heap_region);
_total_open_heap_region_size = mapinfo->write_archive_heap_regions( _total_open_heap_region_size = mapinfo->write_heap_regions(
open_heap_regions, open_heap_regions,
open_heap_oopmaps, open_heap_oopmaps,
MetaspaceShared::first_open_archive_heap_region, MetaspaceShared::first_open_heap_region,
MetaspaceShared::max_open_archive_heap_region); MetaspaceShared::max_open_heap_region);
} }
print_region_stats(mapinfo, closed_heap_regions, open_heap_regions); print_region_stats(mapinfo, closed_heap_regions, open_heap_regions);
@ -1155,12 +1155,12 @@ void ArchiveBuilder::print_bitmap_region_stats(size_t size, size_t total_size) {
size, size/double(total_size)*100.0, size); size, size/double(total_size)*100.0, size);
} }
void ArchiveBuilder::print_heap_region_stats(GrowableArray<MemRegion> *heap_mem, void ArchiveBuilder::print_heap_region_stats(GrowableArray<MemRegion>* regions,
const char *name, size_t total_size) { const char *name, size_t total_size) {
int arr_len = heap_mem == NULL ? 0 : heap_mem->length(); int arr_len = regions == NULL ? 0 : regions->length();
for (int i = 0; i < arr_len; i++) { for (int i = 0; i < arr_len; i++) {
char* start = (char*)heap_mem->at(i).start(); char* start = (char*)regions->at(i).start();
size_t size = heap_mem->at(i).byte_size(); size_t size = regions->at(i).byte_size();
char* top = start + size; char* top = start + size;
log_debug(cds)("%s%d space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used] at " INTPTR_FORMAT, log_debug(cds)("%s%d space: " SIZE_FORMAT_W(9) " [ %4.1f%% of total] out of " SIZE_FORMAT_W(9) " bytes [100.0%% used] at " INTPTR_FORMAT,
name, i, size, size/double(total_size)*100.0, size, p2i(start)); name, i, size, size/double(total_size)*100.0, size, p2i(start));

View file

@ -216,7 +216,7 @@ private:
GrowableArray<MemRegion>* closed_heap_regions, GrowableArray<MemRegion>* closed_heap_regions,
GrowableArray<MemRegion>* open_heap_regions); GrowableArray<MemRegion>* open_heap_regions);
void print_bitmap_region_stats(size_t size, size_t total_size); void print_bitmap_region_stats(size_t size, size_t total_size);
void print_heap_region_stats(GrowableArray<MemRegion> *heap_mem, void print_heap_region_stats(GrowableArray<MemRegion>* regions,
const char *name, size_t total_size); const char *name, size_t total_size);
// For global access. // For global access.

View file

@ -307,12 +307,12 @@ void ReadClosure::do_tag(int tag) {
void ReadClosure::do_oop(oop *p) { void ReadClosure::do_oop(oop *p) {
narrowOop o = CompressedOops::narrow_oop_cast(nextPtr()); narrowOop o = CompressedOops::narrow_oop_cast(nextPtr());
if (CompressedOops::is_null(o) || !HeapShared::open_archive_heap_region_mapped()) { if (CompressedOops::is_null(o) || !HeapShared::open_regions_mapped()) {
*p = NULL; *p = NULL;
} else { } else {
assert(HeapShared::is_heap_object_archiving_allowed(), assert(HeapShared::is_heap_object_archiving_allowed(),
"Archived heap object is not allowed"); "Archived heap object is not allowed");
assert(HeapShared::open_archive_heap_region_mapped(), assert(HeapShared::open_regions_mapped(),
"Open archive heap region is not mapped"); "Open archive heap region is not mapped");
*p = HeapShared::decode_from_archive(o); *p = HeapShared::decode_from_archive(o);
} }

View file

@ -1406,13 +1406,13 @@ char* FileMapInfo::write_bitmap_region(const CHeapBitMap* ptrmap,
// Write out the given archive heap memory regions. GC code combines multiple // Write out the given archive heap memory regions. GC code combines multiple
// consecutive archive GC regions into one MemRegion whenever possible and // consecutive archive GC regions into one MemRegion whenever possible and
// produces the 'heap_mem' array. // produces the 'regions' array.
// //
// If the archive heap memory size is smaller than a single dump time GC region // If the archive heap memory size is smaller than a single dump time GC region
// size, there is only one MemRegion in the array. // size, there is only one MemRegion in the array.
// //
// If the archive heap memory size is bigger than one dump time GC region size, // If the archive heap memory size is bigger than one dump time GC region size,
// the 'heap_mem' array may contain more than one consolidated MemRegions. When // the 'regions' array may contain more than one consolidated MemRegions. When
// the first/bottom archive GC region is a partial GC region (with the empty // the first/bottom archive GC region is a partial GC region (with the empty
// portion at the higher address within the region), one MemRegion is used for // portion at the higher address within the region), one MemRegion is used for
// the bottom partial archive GC region. The rest of the consecutive archive // the bottom partial archive GC region. The rest of the consecutive archive
@ -1435,12 +1435,12 @@ char* FileMapInfo::write_bitmap_region(const CHeapBitMap* ptrmap,
// ^^^ // ^^^
// | // |
// +-- gap // +-- gap
size_t FileMapInfo::write_archive_heap_regions(GrowableArray<MemRegion> *heap_mem, size_t FileMapInfo::write_heap_regions(GrowableArray<MemRegion>* regions,
GrowableArray<ArchiveHeapOopmapInfo>* oopmaps, GrowableArray<ArchiveHeapOopmapInfo>* oopmaps,
int first_region_id, int max_num_regions) { int first_region_id, int max_num_regions) {
assert(max_num_regions <= 2, "Only support maximum 2 memory regions"); assert(max_num_regions <= 2, "Only support maximum 2 memory regions");
int arr_len = heap_mem == NULL ? 0 : heap_mem->length(); int arr_len = regions == NULL ? 0 : regions->length();
if (arr_len > max_num_regions) { if (arr_len > max_num_regions) {
fail_stop("Unable to write archive heap memory regions: " fail_stop("Unable to write archive heap memory regions: "
"number of memory regions exceeds maximum due to fragmentation. " "number of memory regions exceeds maximum due to fragmentation. "
@ -1454,8 +1454,8 @@ size_t FileMapInfo::write_archive_heap_regions(GrowableArray<MemRegion> *heap_me
char* start = NULL; char* start = NULL;
size_t size = 0; size_t size = 0;
if (i < arr_len) { if (i < arr_len) {
start = (char*)heap_mem->at(i).start(); start = (char*)regions->at(i).start();
size = heap_mem->at(i).byte_size(); size = regions->at(i).byte_size();
total_size += size; total_size += size;
} }
@ -1766,14 +1766,14 @@ address FileMapInfo::decode_start_address(FileMapRegion* spc, bool with_current_
} }
} }
static MemRegion *closed_archive_heap_ranges = NULL; static MemRegion *closed_heap_regions = NULL;
static MemRegion *open_archive_heap_ranges = NULL; static MemRegion *open_heap_regions = NULL;
static int num_closed_archive_heap_ranges = 0; static int num_closed_heap_regions = 0;
static int num_open_archive_heap_ranges = 0; static int num_open_heap_regions = 0;
#if INCLUDE_CDS_JAVA_HEAP #if INCLUDE_CDS_JAVA_HEAP
bool FileMapInfo::has_heap_regions() { bool FileMapInfo::has_heap_regions() {
return (space_at(MetaspaceShared::first_closed_archive_heap_region)->used() > 0); return (space_at(MetaspaceShared::first_closed_heap_region)->used() > 0);
} }
// Returns the address range of the archived heap regions computed using the // Returns the address range of the archived heap regions computed using the
@ -1784,7 +1784,7 @@ MemRegion FileMapInfo::get_heap_regions_range_with_current_oop_encoding_mode() {
address start = (address) max_uintx; address start = (address) max_uintx;
address end = NULL; address end = NULL;
for (int i = MetaspaceShared::first_closed_archive_heap_region; for (int i = MetaspaceShared::first_closed_heap_region;
i <= MetaspaceShared::last_valid_region; i <= MetaspaceShared::last_valid_region;
i++) { i++) {
FileMapRegion* si = space_at(i); FileMapRegion* si = space_at(i);
@ -1899,7 +1899,7 @@ void FileMapInfo::map_heap_regions_impl() {
log_info(cds)("CDS heap data relocation delta = " INTX_FORMAT " bytes", delta); log_info(cds)("CDS heap data relocation delta = " INTX_FORMAT " bytes", delta);
HeapShared::init_narrow_oop_decoding(narrow_oop_base() + delta, narrow_oop_shift()); HeapShared::init_narrow_oop_decoding(narrow_oop_base() + delta, narrow_oop_shift());
FileMapRegion* si = space_at(MetaspaceShared::first_closed_archive_heap_region); FileMapRegion* si = space_at(MetaspaceShared::first_closed_heap_region);
address relocated_closed_heap_region_bottom = start_address_as_decoded_from_archive(si); address relocated_closed_heap_region_bottom = start_address_as_decoded_from_archive(si);
if (!is_aligned(relocated_closed_heap_region_bottom, HeapRegion::GrainBytes)) { if (!is_aligned(relocated_closed_heap_region_bottom, HeapRegion::GrainBytes)) {
// Align the bottom of the closed archive heap regions at G1 region boundary. // Align the bottom of the closed archive heap regions at G1 region boundary.
@ -1918,20 +1918,19 @@ void FileMapInfo::map_heap_regions_impl() {
assert(is_aligned(relocated_closed_heap_region_bottom, HeapRegion::GrainBytes), assert(is_aligned(relocated_closed_heap_region_bottom, HeapRegion::GrainBytes),
"must be"); "must be");
// Map the closed_archive_heap regions, GC does not write into the regions. // Map the closed heap regions: GC does not write into these regions.
if (map_heap_data(&closed_archive_heap_ranges, if (map_heap_regions(MetaspaceShared::first_closed_heap_region,
MetaspaceShared::first_closed_archive_heap_region, MetaspaceShared::max_closed_heap_region,
MetaspaceShared::max_closed_archive_heap_region, /*is_open_archive=*/ false,
&num_closed_archive_heap_ranges)) { &closed_heap_regions, &num_closed_heap_regions)) {
HeapShared::set_closed_archive_heap_region_mapped(); HeapShared::set_closed_regions_mapped();
// Now, map open_archive heap regions, GC can write into the regions. // Now, map the open heap regions: GC can write into these regions.
if (map_heap_data(&open_archive_heap_ranges, if (map_heap_regions(MetaspaceShared::first_open_heap_region,
MetaspaceShared::first_open_archive_heap_region, MetaspaceShared::max_open_heap_region,
MetaspaceShared::max_open_archive_heap_region, /*is_open_archive=*/ true,
&num_open_archive_heap_ranges, &open_heap_regions, &num_open_heap_regions)) {
true /* open */)) { HeapShared::set_open_regions_mapped();
HeapShared::set_open_archive_heap_region_mapped();
HeapShared::set_roots(header()->heap_obj_roots()); HeapShared::set_roots(header()->heap_obj_roots());
} }
} }
@ -1942,19 +1941,19 @@ void FileMapInfo::map_heap_regions() {
map_heap_regions_impl(); map_heap_regions_impl();
} }
if (!HeapShared::closed_archive_heap_region_mapped()) { if (!HeapShared::closed_regions_mapped()) {
assert(closed_archive_heap_ranges == NULL && assert(closed_heap_regions == NULL &&
num_closed_archive_heap_ranges == 0, "sanity"); num_closed_heap_regions == 0, "sanity");
} }
if (!HeapShared::open_archive_heap_region_mapped()) { if (!HeapShared::open_regions_mapped()) {
assert(open_archive_heap_ranges == NULL && num_open_archive_heap_ranges == 0, "sanity"); assert(open_heap_regions == NULL && num_open_heap_regions == 0, "sanity");
MetaspaceShared::disable_full_module_graph(); MetaspaceShared::disable_full_module_graph();
} }
} }
bool FileMapInfo::map_heap_data(MemRegion **heap_mem, int first, bool FileMapInfo::map_heap_regions(int first, int max, bool is_open_archive,
int max, int* num, bool is_open_archive) { MemRegion** regions_ret, int* num_regions_ret) {
MemRegion* regions = MemRegion::create_array(max, mtInternal); MemRegion* regions = MemRegion::create_array(max, mtInternal);
struct Cleanup { struct Cleanup {
@ -1966,7 +1965,7 @@ bool FileMapInfo::map_heap_data(MemRegion **heap_mem, int first,
} cleanup(regions, max); } cleanup(regions, max);
FileMapRegion* si; FileMapRegion* si;
int region_num = 0; int num_regions = 0;
for (int i = first; for (int i = first;
i < first + max; i++) { i < first + max; i++) {
@ -1974,26 +1973,26 @@ bool FileMapInfo::map_heap_data(MemRegion **heap_mem, int first,
size_t size = si->used(); size_t size = si->used();
if (size > 0) { if (size > 0) {
HeapWord* start = (HeapWord*)start_address_as_decoded_from_archive(si); HeapWord* start = (HeapWord*)start_address_as_decoded_from_archive(si);
regions[region_num] = MemRegion(start, size / HeapWordSize); regions[num_regions] = MemRegion(start, size / HeapWordSize);
region_num ++; num_regions ++;
log_info(cds)("Trying to map heap data: region[%d] at " INTPTR_FORMAT ", size = " SIZE_FORMAT_W(8) " bytes", log_info(cds)("Trying to map heap data: region[%d] at " INTPTR_FORMAT ", size = " SIZE_FORMAT_W(8) " bytes",
i, p2i(start), size); i, p2i(start), size);
} }
} }
if (region_num == 0) { if (num_regions == 0) {
return false; // no archived java heap data return false; // no archived java heap data
} }
// Check that ranges are within the java heap // Check that regions are within the java heap
if (!G1CollectedHeap::heap()->check_archive_addresses(regions, region_num)) { if (!G1CollectedHeap::heap()->check_archive_addresses(regions, num_regions)) {
log_info(cds)("UseSharedSpaces: Unable to allocate region, range is not within java heap."); log_info(cds)("UseSharedSpaces: Unable to allocate region, range is not within java heap.");
return false; return false;
} }
// allocate from java heap // allocate from java heap
if (!G1CollectedHeap::heap()->alloc_archive_regions( if (!G1CollectedHeap::heap()->alloc_archive_regions(
regions, region_num, is_open_archive)) { regions, num_regions, is_open_archive)) {
log_info(cds)("UseSharedSpaces: Unable to allocate region, java heap range is already in use."); log_info(cds)("UseSharedSpaces: Unable to allocate region, java heap range is already in use.");
return false; return false;
} }
@ -2001,7 +2000,7 @@ bool FileMapInfo::map_heap_data(MemRegion **heap_mem, int first,
// Map the archived heap data. No need to call MemTracker::record_virtual_memory_type() // Map the archived heap data. No need to call MemTracker::record_virtual_memory_type()
// for mapped regions as they are part of the reserved java heap, which is // for mapped regions as they are part of the reserved java heap, which is
// already recorded. // already recorded.
for (int i = 0; i < region_num; i++) { for (int i = 0; i < num_regions; i++) {
si = space_at(first + i); si = space_at(first + i);
char* addr = (char*)regions[i].start(); char* addr = (char*)regions[i].start();
char* base = os::map_memory(_fd, _full_path, si->file_offset(), char* base = os::map_memory(_fd, _full_path, si->file_offset(),
@ -2009,7 +2008,7 @@ bool FileMapInfo::map_heap_data(MemRegion **heap_mem, int first,
si->allow_exec()); si->allow_exec());
if (base == NULL || base != addr) { if (base == NULL || base != addr) {
// dealloc the regions from java heap // dealloc the regions from java heap
dealloc_archive_heap_regions(regions, region_num); dealloc_heap_regions(regions, num_regions);
log_info(cds)("UseSharedSpaces: Unable to map at required address in java heap. " log_info(cds)("UseSharedSpaces: Unable to map at required address in java heap. "
INTPTR_FORMAT ", size = " SIZE_FORMAT " bytes", INTPTR_FORMAT ", size = " SIZE_FORMAT " bytes",
p2i(addr), regions[i].byte_size()); p2i(addr), regions[i].byte_size());
@ -2018,7 +2017,7 @@ bool FileMapInfo::map_heap_data(MemRegion **heap_mem, int first,
if (VerifySharedSpaces && !region_crc_check(addr, regions[i].byte_size(), si->crc())) { if (VerifySharedSpaces && !region_crc_check(addr, regions[i].byte_size(), si->crc())) {
// dealloc the regions from java heap // dealloc the regions from java heap
dealloc_archive_heap_regions(regions, region_num); dealloc_heap_regions(regions, num_regions);
log_info(cds)("UseSharedSpaces: mapped heap regions are corrupt"); log_info(cds)("UseSharedSpaces: mapped heap regions are corrupt");
return false; return false;
} }
@ -2026,36 +2025,36 @@ bool FileMapInfo::map_heap_data(MemRegion **heap_mem, int first,
cleanup._aborted = false; cleanup._aborted = false;
// the shared heap data is mapped successfully // the shared heap data is mapped successfully
*heap_mem = regions; *regions_ret = regions;
*num = region_num; *num_regions_ret = num_regions;
return true; return true;
} }
void FileMapInfo::patch_archived_heap_embedded_pointers() { void FileMapInfo::patch_heap_embedded_pointers() {
if (!_heap_pointers_need_patching) { if (!_heap_pointers_need_patching) {
return; return;
} }
log_info(cds)("patching heap embedded pointers"); log_info(cds)("patching heap embedded pointers");
patch_archived_heap_embedded_pointers(closed_archive_heap_ranges, patch_heap_embedded_pointers(closed_heap_regions,
num_closed_archive_heap_ranges, num_closed_heap_regions,
MetaspaceShared::first_closed_archive_heap_region); MetaspaceShared::first_closed_heap_region);
patch_archived_heap_embedded_pointers(open_archive_heap_ranges, patch_heap_embedded_pointers(open_heap_regions,
num_open_archive_heap_ranges, num_open_heap_regions,
MetaspaceShared::first_open_archive_heap_region); MetaspaceShared::first_open_heap_region);
} }
void FileMapInfo::patch_archived_heap_embedded_pointers(MemRegion* ranges, int num_ranges, void FileMapInfo::patch_heap_embedded_pointers(MemRegion* regions, int num_regions,
int first_region_idx) { int first_region_idx) {
char* bitmap_base = map_bitmap_region(); char* bitmap_base = map_bitmap_region();
if (bitmap_base == NULL) { if (bitmap_base == NULL) {
return; return;
} }
for (int i=0; i<num_ranges; i++) { for (int i=0; i<num_regions; i++) {
FileMapRegion* si = space_at(i + first_region_idx); FileMapRegion* si = space_at(i + first_region_idx);
HeapShared::patch_archived_heap_embedded_pointers( HeapShared::patch_embedded_pointers(
ranges[i], regions[i],
(address)(space_at(MetaspaceShared::bm)->mapped_base()) + si->oopmap_offset(), (address)(space_at(MetaspaceShared::bm)->mapped_base()) + si->oopmap_offset(),
si->oopmap_size_in_bits()); si->oopmap_size_in_bits());
} }
@ -2066,19 +2065,19 @@ void FileMapInfo::patch_archived_heap_embedded_pointers(MemRegion* ranges, int n
void FileMapInfo::fixup_mapped_heap_regions() { void FileMapInfo::fixup_mapped_heap_regions() {
assert(vmClasses::Object_klass_loaded(), "must be"); assert(vmClasses::Object_klass_loaded(), "must be");
// If any closed regions were found, call the fill routine to make them parseable. // If any closed regions were found, call the fill routine to make them parseable.
// Note that closed_archive_heap_ranges may be non-NULL even if no ranges were found. // Note that closed_heap_regions may be non-NULL even if no regions were found.
if (num_closed_archive_heap_ranges != 0) { if (num_closed_heap_regions != 0) {
assert(closed_archive_heap_ranges != NULL, assert(closed_heap_regions != NULL,
"Null closed_archive_heap_ranges array with non-zero count"); "Null closed_heap_regions array with non-zero count");
G1CollectedHeap::heap()->fill_archive_regions(closed_archive_heap_ranges, G1CollectedHeap::heap()->fill_archive_regions(closed_heap_regions,
num_closed_archive_heap_ranges); num_closed_heap_regions);
} }
// do the same for mapped open archive heap regions // do the same for mapped open archive heap regions
if (num_open_archive_heap_ranges != 0) { if (num_open_heap_regions != 0) {
assert(open_archive_heap_ranges != NULL, "NULL open_archive_heap_ranges array with non-zero count"); assert(open_heap_regions != NULL, "NULL open_heap_regions array with non-zero count");
G1CollectedHeap::heap()->fill_archive_regions(open_archive_heap_ranges, G1CollectedHeap::heap()->fill_archive_regions(open_heap_regions,
num_open_archive_heap_ranges); num_open_heap_regions);
// Populate the open archive regions' G1BlockOffsetTableParts. That ensures // Populate the open archive regions' G1BlockOffsetTableParts. That ensures
// fast G1BlockOffsetTablePart::block_start operations for any given address // fast G1BlockOffsetTablePart::block_start operations for any given address
@ -2089,15 +2088,15 @@ void FileMapInfo::fixup_mapped_heap_regions() {
// regions, because objects in closed archive regions never reference objects // regions, because objects in closed archive regions never reference objects
// outside the closed archive regions and they are immutable. So we never // outside the closed archive regions and they are immutable. So we never
// need their BOT during garbage collection. // need their BOT during garbage collection.
G1CollectedHeap::heap()->populate_archive_regions_bot_part(open_archive_heap_ranges, G1CollectedHeap::heap()->populate_archive_regions_bot_part(open_heap_regions,
num_open_archive_heap_ranges); num_open_heap_regions);
} }
} }
// dealloc the archive regions from java heap // dealloc the archive regions from java heap
void FileMapInfo::dealloc_archive_heap_regions(MemRegion* regions, int num) { void FileMapInfo::dealloc_heap_regions(MemRegion* regions, int num) {
if (num > 0) { if (num > 0) {
assert(regions != NULL, "Null archive ranges array with non-zero count"); assert(regions != NULL, "Null archive regions array with non-zero count");
G1CollectedHeap::heap()->dealloc_archive_regions(regions, num); G1CollectedHeap::heap()->dealloc_archive_regions(regions, num);
} }
} }
@ -2361,10 +2360,10 @@ void FileMapInfo::stop_sharing_and_unmap(const char* msg) {
} }
// Dealloc the archive heap regions only without unmapping. The regions are part // Dealloc the archive heap regions only without unmapping. The regions are part
// of the java heap. Unmapping of the heap regions are managed by GC. // of the java heap. Unmapping of the heap regions are managed by GC.
map_info->dealloc_archive_heap_regions(open_archive_heap_ranges, map_info->dealloc_heap_regions(open_heap_regions,
num_open_archive_heap_ranges); num_open_heap_regions);
map_info->dealloc_archive_heap_regions(closed_archive_heap_ranges, map_info->dealloc_heap_regions(closed_heap_regions,
num_closed_archive_heap_ranges); num_closed_heap_regions);
} else if (DumpSharedSpaces) { } else if (DumpSharedSpaces) {
fail_stop("%s", msg); fail_stop("%s", msg);
} }

View file

@ -460,7 +460,7 @@ public:
GrowableArray<ArchiveHeapOopmapInfo>* closed_oopmaps, GrowableArray<ArchiveHeapOopmapInfo>* closed_oopmaps,
GrowableArray<ArchiveHeapOopmapInfo>* open_oopmaps, GrowableArray<ArchiveHeapOopmapInfo>* open_oopmaps,
size_t &size_in_bytes); size_t &size_in_bytes);
size_t write_archive_heap_regions(GrowableArray<MemRegion> *heap_mem, size_t write_heap_regions(GrowableArray<MemRegion>* regions,
GrowableArray<ArchiveHeapOopmapInfo>* oopmaps, GrowableArray<ArchiveHeapOopmapInfo>* oopmaps,
int first_region_id, int max_num_regions); int first_region_id, int max_num_regions);
void write_bytes(const void* buffer, size_t count); void write_bytes(const void* buffer, size_t count);
@ -470,8 +470,8 @@ public:
void unmap_regions(int regions[], int num_regions); void unmap_regions(int regions[], int num_regions);
void map_heap_regions() NOT_CDS_JAVA_HEAP_RETURN; void map_heap_regions() NOT_CDS_JAVA_HEAP_RETURN;
void fixup_mapped_heap_regions() NOT_CDS_JAVA_HEAP_RETURN; void fixup_mapped_heap_regions() NOT_CDS_JAVA_HEAP_RETURN;
void patch_archived_heap_embedded_pointers() NOT_CDS_JAVA_HEAP_RETURN; void patch_heap_embedded_pointers() NOT_CDS_JAVA_HEAP_RETURN;
void patch_archived_heap_embedded_pointers(MemRegion* ranges, int num_ranges, void patch_heap_embedded_pointers(MemRegion* regions, int num_regions,
int first_region_idx) NOT_CDS_JAVA_HEAP_RETURN; int first_region_idx) NOT_CDS_JAVA_HEAP_RETURN;
bool has_heap_regions() NOT_CDS_JAVA_HEAP_RETURN_(false); bool has_heap_regions() NOT_CDS_JAVA_HEAP_RETURN_(false);
MemRegion get_heap_regions_range_with_current_oop_encoding_mode() NOT_CDS_JAVA_HEAP_RETURN_(MemRegion()); MemRegion get_heap_regions_range_with_current_oop_encoding_mode() NOT_CDS_JAVA_HEAP_RETURN_(MemRegion());
@ -570,10 +570,10 @@ public:
GrowableArray<const char*>* rp_array) NOT_CDS_RETURN_(false); GrowableArray<const char*>* rp_array) NOT_CDS_RETURN_(false);
bool validate_boot_class_paths() NOT_CDS_RETURN_(false); bool validate_boot_class_paths() NOT_CDS_RETURN_(false);
bool validate_app_class_paths(int shared_app_paths_len) NOT_CDS_RETURN_(false); bool validate_app_class_paths(int shared_app_paths_len) NOT_CDS_RETURN_(false);
bool map_heap_data(MemRegion **heap_mem, int first, int max, int* num, bool map_heap_regions(int first, int max, bool is_open_archive,
bool is_open = false) NOT_CDS_JAVA_HEAP_RETURN_(false); MemRegion** regions_ret, int* num_regions_ret) NOT_CDS_JAVA_HEAP_RETURN_(false);
bool region_crc_check(char* buf, size_t size, int expected_crc) NOT_CDS_RETURN_(false); bool region_crc_check(char* buf, size_t size, int expected_crc) NOT_CDS_RETURN_(false);
void dealloc_archive_heap_regions(MemRegion* regions, int num) NOT_CDS_JAVA_HEAP_RETURN; void dealloc_heap_regions(MemRegion* regions, int num) NOT_CDS_JAVA_HEAP_RETURN;
void map_heap_regions_impl() NOT_CDS_JAVA_HEAP_RETURN; void map_heap_regions_impl() NOT_CDS_JAVA_HEAP_RETURN;
char* map_bitmap_region(); char* map_bitmap_region();
MapArchiveResult map_region(int i, intx addr_delta, char* mapped_base_address, ReservedSpace rs); MapArchiveResult map_region(int i, intx addr_delta, char* mapped_base_address, ReservedSpace rs);

View file

@ -67,9 +67,8 @@
#if INCLUDE_CDS_JAVA_HEAP #if INCLUDE_CDS_JAVA_HEAP
bool HeapShared::_closed_archive_heap_region_mapped = false; bool HeapShared::_closed_regions_mapped = false;
bool HeapShared::_open_archive_heap_region_mapped = false; bool HeapShared::_open_regions_mapped = false;
bool HeapShared::_archive_heap_region_fixed = false;
address HeapShared::_narrow_oop_base; address HeapShared::_narrow_oop_base;
int HeapShared::_narrow_oop_shift; int HeapShared::_narrow_oop_shift;
DumpedInternedStrings *HeapShared::_dumped_interned_strings = NULL; DumpedInternedStrings *HeapShared::_dumped_interned_strings = NULL;
@ -122,10 +121,9 @@ OopHandle HeapShared::_roots;
// Java heap object archiving support // Java heap object archiving support
// //
//////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////
void HeapShared::fixup_mapped_heap_regions() { void HeapShared::fixup_mapped_regions() {
FileMapInfo *mapinfo = FileMapInfo::current_info(); FileMapInfo *mapinfo = FileMapInfo::current_info();
mapinfo->fixup_mapped_heap_regions(); mapinfo->fixup_mapped_heap_regions();
set_archive_heap_region_fixed();
if (is_mapped()) { if (is_mapped()) {
_roots = OopHandle(Universe::vm_global(), decode_from_archive(_roots_narrow)); _roots = OopHandle(Universe::vm_global(), decode_from_archive(_roots_narrow));
if (!MetaspaceShared::use_full_module_graph()) { if (!MetaspaceShared::use_full_module_graph()) {
@ -213,7 +211,7 @@ objArrayOop HeapShared::roots() {
void HeapShared::set_roots(narrowOop roots) { void HeapShared::set_roots(narrowOop roots) {
assert(UseSharedSpaces, "runtime only"); assert(UseSharedSpaces, "runtime only");
assert(open_archive_heap_region_mapped(), "must be"); assert(open_regions_mapped(), "must be");
_roots_narrow = roots; _roots_narrow = roots;
} }
@ -238,7 +236,7 @@ oop HeapShared::get_root(int index, bool clear) {
void HeapShared::clear_root(int index) { void HeapShared::clear_root(int index) {
assert(index >= 0, "sanity"); assert(index >= 0, "sanity");
assert(UseSharedSpaces, "must be"); assert(UseSharedSpaces, "must be");
if (open_archive_heap_region_mapped()) { if (open_regions_mapped()) {
if (log_is_enabled(Debug, cds, heap)) { if (log_is_enabled(Debug, cds, heap)) {
oop old = roots()->obj_at(index); oop old = roots()->obj_at(index);
log_debug(cds, heap)("Clearing root %d: was " PTR_FORMAT, index, p2i(old)); log_debug(cds, heap)("Clearing root %d: was " PTR_FORMAT, index, p2i(old));
@ -247,7 +245,7 @@ void HeapShared::clear_root(int index) {
} }
} }
oop HeapShared::archive_heap_object(oop obj) { oop HeapShared::archive_object(oop obj) {
assert(DumpSharedSpaces, "dump-time only"); assert(DumpSharedSpaces, "dump-time only");
oop ao = find_archived_heap_object(obj); oop ao = find_archived_heap_object(obj);
@ -333,8 +331,8 @@ void HeapShared::run_full_gc_in_vm_thread() {
} }
} }
void HeapShared::archive_java_heap_objects(GrowableArray<MemRegion>* closed, void HeapShared::archive_objects(GrowableArray<MemRegion>* closed_regions,
GrowableArray<MemRegion>* open) { GrowableArray<MemRegion>* open_regions) {
G1HeapVerifier::verify_ready_for_archiving(); G1HeapVerifier::verify_ready_for_archiving();
@ -347,10 +345,10 @@ void HeapShared::archive_java_heap_objects(GrowableArray<MemRegion>* closed,
log_info(cds)("Heap range = [" PTR_FORMAT " - " PTR_FORMAT "]", log_info(cds)("Heap range = [" PTR_FORMAT " - " PTR_FORMAT "]",
p2i(CompressedOops::begin()), p2i(CompressedOops::end())); p2i(CompressedOops::begin()), p2i(CompressedOops::end()));
log_info(cds)("Dumping objects to closed archive heap region ..."); log_info(cds)("Dumping objects to closed archive heap region ...");
copy_closed_archive_heap_objects(closed); copy_closed_objects(closed_regions);
log_info(cds)("Dumping objects to open archive heap region ..."); log_info(cds)("Dumping objects to open archive heap region ...");
copy_open_archive_heap_objects(open); copy_open_objects(open_regions);
destroy_archived_object_cache(); destroy_archived_object_cache();
} }
@ -358,8 +356,7 @@ void HeapShared::archive_java_heap_objects(GrowableArray<MemRegion>* closed,
G1HeapVerifier::verify_archive_regions(); G1HeapVerifier::verify_archive_regions();
} }
void HeapShared::copy_closed_archive_heap_objects( void HeapShared::copy_closed_objects(GrowableArray<MemRegion>* closed_regions) {
GrowableArray<MemRegion> * closed_archive) {
assert(is_heap_object_archiving_allowed(), "Cannot archive java heap objects"); assert(is_heap_object_archiving_allowed(), "Cannot archive java heap objects");
G1CollectedHeap::heap()->begin_archive_alloc_range(); G1CollectedHeap::heap()->begin_archive_alloc_range();
@ -372,12 +369,11 @@ void HeapShared::copy_closed_archive_heap_objects(
true /* is_closed_archive */, true /* is_closed_archive */,
false /* is_full_module_graph */); false /* is_full_module_graph */);
G1CollectedHeap::heap()->end_archive_alloc_range(closed_archive, G1CollectedHeap::heap()->end_archive_alloc_range(closed_regions,
os::vm_allocation_granularity()); os::vm_allocation_granularity());
} }
void HeapShared::copy_open_archive_heap_objects( void HeapShared::copy_open_objects(GrowableArray<MemRegion>* open_regions) {
GrowableArray<MemRegion> * open_archive) {
assert(is_heap_object_archiving_allowed(), "Cannot archive java heap objects"); assert(is_heap_object_archiving_allowed(), "Cannot archive java heap objects");
G1CollectedHeap::heap()->begin_archive_alloc_range(true /* open */); G1CollectedHeap::heap()->begin_archive_alloc_range(true /* open */);
@ -400,7 +396,7 @@ void HeapShared::copy_open_archive_heap_objects(
copy_roots(); copy_roots();
G1CollectedHeap::heap()->end_archive_alloc_range(open_archive, G1CollectedHeap::heap()->end_archive_alloc_range(open_regions,
os::vm_allocation_granularity()); os::vm_allocation_granularity());
} }
@ -908,7 +904,7 @@ class WalkOopAndArchiveClosure: public BasicOopIterateClosure {
} }
}; };
void HeapShared::check_closed_archive_heap_region_object(InstanceKlass* k) { void HeapShared::check_closed_region_object(InstanceKlass* k) {
// Check fields in the object // Check fields in the object
for (JavaFieldStream fs(k); !fs.done(); fs.next()) { for (JavaFieldStream fs(k); !fs.done(); fs.next()) {
if (!fs.access_flags().is_static()) { if (!fs.access_flags().is_static()) {
@ -990,7 +986,7 @@ oop HeapShared::archive_reachable_objects_from(int level,
bool record_klasses_only = (archived_obj != NULL); bool record_klasses_only = (archived_obj != NULL);
if (archived_obj == NULL) { if (archived_obj == NULL) {
++_num_new_archived_objs; ++_num_new_archived_objs;
archived_obj = archive_heap_object(orig_obj); archived_obj = archive_object(orig_obj);
if (archived_obj == NULL) { if (archived_obj == NULL) {
// Skip archiving the sub-graph referenced from the current entry field. // Skip archiving the sub-graph referenced from the current entry field.
ResourceMark rm; ResourceMark rm;
@ -1031,7 +1027,7 @@ oop HeapShared::archive_reachable_objects_from(int level,
subgraph_info, orig_obj, archived_obj); subgraph_info, orig_obj, archived_obj);
orig_obj->oop_iterate(&walker); orig_obj->oop_iterate(&walker);
if (is_closed_archive && orig_k->is_instance_klass()) { if (is_closed_archive && orig_k->is_instance_klass()) {
check_closed_archive_heap_region_object(InstanceKlass::cast(orig_k)); check_closed_region_object(InstanceKlass::cast(orig_k));
} }
return archived_obj; return archived_obj;
} }
@ -1433,7 +1429,9 @@ class PatchEmbeddedPointers: public BitMapClosure {
} }
}; };
void HeapShared::patch_archived_heap_embedded_pointers(MemRegion region, address oopmap, // Patch all the non-null pointers that are embedded in the archived heap objects
// in this region
void HeapShared::patch_embedded_pointers(MemRegion region, address oopmap,
size_t oopmap_size_in_bits) { size_t oopmap_size_in_bits) {
BitMapView bm((BitMap::bm_word_t*)oopmap, oopmap_size_in_bits); BitMapView bm((BitMap::bm_word_t*)oopmap, oopmap_size_in_bits);

View file

@ -143,9 +143,8 @@ class HeapShared: AllStatic {
private: private:
#if INCLUDE_CDS_JAVA_HEAP #if INCLUDE_CDS_JAVA_HEAP
static bool _closed_archive_heap_region_mapped; static bool _closed_regions_mapped;
static bool _open_archive_heap_region_mapped; static bool _open_regions_mapped;
static bool _archive_heap_region_fixed;
static DumpedInternedStrings *_dumped_interned_strings; static DumpedInternedStrings *_dumped_interned_strings;
public: public:
@ -200,7 +199,7 @@ private:
static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table; static DumpTimeKlassSubGraphInfoTable* _dump_time_subgraph_info_table;
static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table; static RunTimeKlassSubGraphInfoTable _run_time_subgraph_info_table;
static void check_closed_archive_heap_region_object(InstanceKlass* k); static void check_closed_region_object(InstanceKlass* k);
static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[], static void archive_object_subgraphs(ArchivableStaticFieldInfo fields[],
int num, int num,
@ -297,21 +296,14 @@ private:
} }
static oop find_archived_heap_object(oop obj); static oop find_archived_heap_object(oop obj);
static oop archive_heap_object(oop obj); static oop archive_object(oop obj);
static void archive_klass_objects(); static void archive_klass_objects();
static void set_archive_heap_region_fixed() { static void archive_objects(GrowableArray<MemRegion>* closed_regions,
_archive_heap_region_fixed = true; GrowableArray<MemRegion>* open_regions);
} static void copy_closed_objects(GrowableArray<MemRegion>* closed_regions);
static bool archive_heap_region_fixed() { static void copy_open_objects(GrowableArray<MemRegion>* open_regions);
return _archive_heap_region_fixed;
}
static void archive_java_heap_objects(GrowableArray<MemRegion> *closed,
GrowableArray<MemRegion> *open);
static void copy_closed_archive_heap_objects(GrowableArray<MemRegion> * closed_archive);
static void copy_open_archive_heap_objects(GrowableArray<MemRegion> * open_archive);
static oop archive_reachable_objects_from(int level, static oop archive_reachable_objects_from(int level,
KlassSubGraphInfo* subgraph_info, KlassSubGraphInfo* subgraph_info,
@ -357,32 +349,32 @@ private:
} }
static bool is_heap_region(int idx) { static bool is_heap_region(int idx) {
CDS_JAVA_HEAP_ONLY(return (idx >= MetaspaceShared::first_closed_archive_heap_region && CDS_JAVA_HEAP_ONLY(return (idx >= MetaspaceShared::first_closed_heap_region &&
idx <= MetaspaceShared::last_open_archive_heap_region);) idx <= MetaspaceShared::last_open_heap_region);)
NOT_CDS_JAVA_HEAP_RETURN_(false); NOT_CDS_JAVA_HEAP_RETURN_(false);
} }
static void set_closed_archive_heap_region_mapped() { static void set_closed_regions_mapped() {
CDS_JAVA_HEAP_ONLY(_closed_archive_heap_region_mapped = true;) CDS_JAVA_HEAP_ONLY(_closed_regions_mapped = true;)
NOT_CDS_JAVA_HEAP_RETURN; NOT_CDS_JAVA_HEAP_RETURN;
} }
static bool closed_archive_heap_region_mapped() { static bool closed_regions_mapped() {
CDS_JAVA_HEAP_ONLY(return _closed_archive_heap_region_mapped;) CDS_JAVA_HEAP_ONLY(return _closed_regions_mapped;)
NOT_CDS_JAVA_HEAP_RETURN_(false); NOT_CDS_JAVA_HEAP_RETURN_(false);
} }
static void set_open_archive_heap_region_mapped() { static void set_open_regions_mapped() {
CDS_JAVA_HEAP_ONLY(_open_archive_heap_region_mapped = true;) CDS_JAVA_HEAP_ONLY(_open_regions_mapped = true;)
NOT_CDS_JAVA_HEAP_RETURN; NOT_CDS_JAVA_HEAP_RETURN;
} }
static bool open_archive_heap_region_mapped() { static bool open_regions_mapped() {
CDS_JAVA_HEAP_ONLY(return _open_archive_heap_region_mapped;) CDS_JAVA_HEAP_ONLY(return _open_regions_mapped;)
NOT_CDS_JAVA_HEAP_RETURN_(false); NOT_CDS_JAVA_HEAP_RETURN_(false);
} }
static bool is_mapped() { static bool is_mapped() {
return closed_archive_heap_region_mapped() && open_archive_heap_region_mapped(); return closed_regions_mapped() && open_regions_mapped();
} }
static void fixup_mapped_heap_regions() NOT_CDS_JAVA_HEAP_RETURN; static void fixup_mapped_regions() NOT_CDS_JAVA_HEAP_RETURN;
inline static bool is_archived_object(oop p) NOT_CDS_JAVA_HEAP_RETURN_(false); inline static bool is_archived_object(oop p) NOT_CDS_JAVA_HEAP_RETURN_(false);
@ -397,7 +389,7 @@ private:
static void init_narrow_oop_decoding(address base, int shift) NOT_CDS_JAVA_HEAP_RETURN; static void init_narrow_oop_decoding(address base, int shift) NOT_CDS_JAVA_HEAP_RETURN;
static void patch_archived_heap_embedded_pointers(MemRegion mem, address oopmap, static void patch_embedded_pointers(MemRegion region, address oopmap,
size_t oopmap_in_bits) NOT_CDS_JAVA_HEAP_RETURN; size_t oopmap_in_bits) NOT_CDS_JAVA_HEAP_RETURN;
static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN; static void init_for_dumping(TRAPS) NOT_CDS_JAVA_HEAP_RETURN;

View file

@ -114,7 +114,7 @@ bool MetaspaceShared::_use_full_module_graph = true;
// [5] SymbolTable, StringTable, SystemDictionary, and a few other read-only data // [5] SymbolTable, StringTable, SystemDictionary, and a few other read-only data
// are copied into the ro region as read-only tables. // are copied into the ro region as read-only tables.
// //
// The ca0/ca1 and oa0/oa1 regions are populated inside HeapShared::archive_java_heap_objects. // The ca0/ca1 and oa0/oa1 regions are populated inside HeapShared::archive_objects.
// Their layout is independent of the rw/ro regions. // Their layout is independent of the rw/ro regions.
static DumpRegion _symbol_region("symbols"); static DumpRegion _symbol_region("symbols");
@ -403,15 +403,15 @@ void MetaspaceShared::rewrite_nofast_bytecodes_and_calculate_fingerprints(Thread
class VM_PopulateDumpSharedSpace : public VM_GC_Operation { class VM_PopulateDumpSharedSpace : public VM_GC_Operation {
private: private:
GrowableArray<MemRegion> *_closed_archive_heap_regions; GrowableArray<MemRegion> *_closed_heap_regions;
GrowableArray<MemRegion> *_open_archive_heap_regions; GrowableArray<MemRegion> *_open_heap_regions;
GrowableArray<ArchiveHeapOopmapInfo> *_closed_archive_heap_oopmaps; GrowableArray<ArchiveHeapOopmapInfo> *_closed_heap_oopmaps;
GrowableArray<ArchiveHeapOopmapInfo> *_open_archive_heap_oopmaps; GrowableArray<ArchiveHeapOopmapInfo> *_open_heap_oopmaps;
void dump_java_heap_objects(GrowableArray<Klass*>* klasses) NOT_CDS_JAVA_HEAP_RETURN; void dump_java_heap_objects(GrowableArray<Klass*>* klasses) NOT_CDS_JAVA_HEAP_RETURN;
void dump_archive_heap_oopmaps() NOT_CDS_JAVA_HEAP_RETURN; void dump_heap_oopmaps() NOT_CDS_JAVA_HEAP_RETURN;
void dump_archive_heap_oopmaps(GrowableArray<MemRegion>* regions, void dump_heap_oopmaps(GrowableArray<MemRegion>* regions,
GrowableArray<ArchiveHeapOopmapInfo>* oopmaps); GrowableArray<ArchiveHeapOopmapInfo>* oopmaps);
void dump_shared_symbol_table(GrowableArray<Symbol*>* symbols) { void dump_shared_symbol_table(GrowableArray<Symbol*>* symbols) {
log_info(cds)("Dumping symbol table ..."); log_info(cds)("Dumping symbol table ...");
@ -423,10 +423,10 @@ public:
VM_PopulateDumpSharedSpace() : VM_PopulateDumpSharedSpace() :
VM_GC_Operation(0 /* total collections, ignored */, GCCause::_archive_time_gc), VM_GC_Operation(0 /* total collections, ignored */, GCCause::_archive_time_gc),
_closed_archive_heap_regions(NULL), _closed_heap_regions(NULL),
_open_archive_heap_regions(NULL), _open_heap_regions(NULL),
_closed_archive_heap_oopmaps(NULL), _closed_heap_oopmaps(NULL),
_open_archive_heap_oopmaps(NULL) {} _open_heap_oopmaps(NULL) {}
bool skip_operation() const { return false; } bool skip_operation() const { return false; }
@ -472,7 +472,7 @@ char* VM_PopulateDumpSharedSpace::dump_read_only_tables() {
MetaspaceShared::serialize(&wc); MetaspaceShared::serialize(&wc);
// Write the bitmaps for patching the archive heap regions // Write the bitmaps for patching the archive heap regions
dump_archive_heap_oopmaps(); dump_heap_oopmaps();
return start; return start;
} }
@ -530,10 +530,10 @@ void VM_PopulateDumpSharedSpace::doit() {
mapinfo->set_cloned_vtables(cloned_vtables); mapinfo->set_cloned_vtables(cloned_vtables);
mapinfo->open_for_write(); mapinfo->open_for_write();
builder.write_archive(mapinfo, builder.write_archive(mapinfo,
_closed_archive_heap_regions, _closed_heap_regions,
_open_archive_heap_regions, _open_heap_regions,
_closed_archive_heap_oopmaps, _closed_heap_oopmaps,
_open_archive_heap_oopmaps); _open_heap_oopmaps);
if (PrintSystemDictionaryAtExit) { if (PrintSystemDictionaryAtExit) {
SystemDictionary::print(); SystemDictionary::print();
@ -825,26 +825,25 @@ void VM_PopulateDumpSharedSpace::dump_java_heap_objects(GrowableArray<Klass*>* k
} }
// The closed and open archive heap space has maximum two regions. // The closed and open archive heap space has maximum two regions.
// See FileMapInfo::write_archive_heap_regions() for details. // See FileMapInfo::write_heap_regions() for details.
_closed_archive_heap_regions = new GrowableArray<MemRegion>(2); _closed_heap_regions = new GrowableArray<MemRegion>(2);
_open_archive_heap_regions = new GrowableArray<MemRegion>(2); _open_heap_regions = new GrowableArray<MemRegion>(2);
HeapShared::archive_java_heap_objects(_closed_archive_heap_regions, HeapShared::archive_objects(_closed_heap_regions, _open_heap_regions);
_open_archive_heap_regions);
ArchiveBuilder::OtherROAllocMark mark; ArchiveBuilder::OtherROAllocMark mark;
HeapShared::write_subgraph_info_table(); HeapShared::write_subgraph_info_table();
} }
void VM_PopulateDumpSharedSpace::dump_archive_heap_oopmaps() { void VM_PopulateDumpSharedSpace::dump_heap_oopmaps() {
if (HeapShared::is_heap_object_archiving_allowed()) { if (HeapShared::is_heap_object_archiving_allowed()) {
_closed_archive_heap_oopmaps = new GrowableArray<ArchiveHeapOopmapInfo>(2); _closed_heap_oopmaps = new GrowableArray<ArchiveHeapOopmapInfo>(2);
dump_archive_heap_oopmaps(_closed_archive_heap_regions, _closed_archive_heap_oopmaps); dump_heap_oopmaps(_closed_heap_regions, _closed_heap_oopmaps);
_open_archive_heap_oopmaps = new GrowableArray<ArchiveHeapOopmapInfo>(2); _open_heap_oopmaps = new GrowableArray<ArchiveHeapOopmapInfo>(2);
dump_archive_heap_oopmaps(_open_archive_heap_regions, _open_archive_heap_oopmaps); dump_heap_oopmaps(_open_heap_regions, _open_heap_oopmaps);
} }
} }
void VM_PopulateDumpSharedSpace::dump_archive_heap_oopmaps(GrowableArray<MemRegion>* regions, void VM_PopulateDumpSharedSpace::dump_heap_oopmaps(GrowableArray<MemRegion>* regions,
GrowableArray<ArchiveHeapOopmapInfo>* oopmaps) { GrowableArray<ArchiveHeapOopmapInfo>* oopmaps) {
for (int i=0; i<regions->length(); i++) { for (int i=0; i<regions->length(); i++) {
ResourceBitMap oopmap = HeapShared::calculate_oopmap(regions->at(i)); ResourceBitMap oopmap = HeapShared::calculate_oopmap(regions->at(i));
@ -1400,7 +1399,7 @@ void MetaspaceShared::initialize_shared_spaces() {
// Initialize the run-time symbol table. // Initialize the run-time symbol table.
SymbolTable::create_table(); SymbolTable::create_table();
static_mapinfo->patch_archived_heap_embedded_pointers(); static_mapinfo->patch_heap_embedded_pointers();
// Close the mapinfo file // Close the mapinfo file
static_mapinfo->close(); static_mapinfo->close();

View file

@ -58,21 +58,21 @@ class MetaspaceShared : AllStatic {
public: public:
enum { enum {
// core archive spaces // core archive spaces
rw = 0, // read-write shared space in the heap rw = 0, // read-write shared space
ro = 1, // read-only shared space in the heap ro = 1, // read-only shared space
bm = 2, // relocation bitmaps (freed after file mapping is finished) bm = 2, // relocation bitmaps (freed after file mapping is finished)
num_core_region = 2, // rw and ro num_core_region = 2, // rw and ro
num_non_heap_spaces = 3, // rw and ro and bm num_non_heap_spaces = 3, // rw and ro and bm
// mapped java heap regions // mapped java heap regions
first_closed_archive_heap_region = bm + 1, first_closed_heap_region = bm + 1,
max_closed_archive_heap_region = 2, max_closed_heap_region = 2,
last_closed_archive_heap_region = first_closed_archive_heap_region + max_closed_archive_heap_region - 1, last_closed_heap_region = first_closed_heap_region + max_closed_heap_region - 1,
first_open_archive_heap_region = last_closed_archive_heap_region + 1, first_open_heap_region = last_closed_heap_region + 1,
max_open_archive_heap_region = 2, max_open_heap_region = 2,
last_open_archive_heap_region = first_open_archive_heap_region + max_open_archive_heap_region - 1, last_open_heap_region = first_open_heap_region + max_open_heap_region - 1,
last_valid_region = last_open_archive_heap_region, last_valid_region = last_open_heap_region,
n_regions = last_valid_region + 1 // total number of regions n_regions = last_valid_region + 1 // total number of regions
}; };
@ -104,7 +104,7 @@ public:
static void initialize_shared_spaces() NOT_CDS_RETURN; static void initialize_shared_spaces() NOT_CDS_RETURN;
// Return true if given address is in the shared metaspace regions (i.e., excluding any // Return true if given address is in the shared metaspace regions (i.e., excluding any
// mapped shared heap regions.) // mapped heap regions.)
static bool is_in_shared_metaspace(const void* p) { static bool is_in_shared_metaspace(const void* p) {
return MetaspaceObj::is_shared((const MetaspaceObj*)p); return MetaspaceObj::is_shared((const MetaspaceObj*)p);
} }

View file

@ -905,7 +905,7 @@ void java_lang_Class::fixup_mirror(Klass* k, TRAPS) {
} }
if (k->is_shared() && k->has_archived_mirror_index()) { if (k->is_shared() && k->has_archived_mirror_index()) {
if (HeapShared::open_archive_heap_region_mapped()) { if (HeapShared::open_regions_mapped()) {
bool present = restore_archived_mirror(k, Handle(), Handle(), Handle(), CHECK); bool present = restore_archived_mirror(k, Handle(), Handle(), Handle(), CHECK);
assert(present, "Missing archived mirror for %s", k->external_name()); assert(present, "Missing archived mirror for %s", k->external_name());
return; return;
@ -1156,7 +1156,7 @@ void java_lang_Class::archive_basic_type_mirrors() {
oop m = Universe::_mirrors[t].resolve(); oop m = Universe::_mirrors[t].resolve();
if (m != NULL) { if (m != NULL) {
// Update the field at _array_klass_offset to point to the relocated array klass. // Update the field at _array_klass_offset to point to the relocated array klass.
oop archived_m = HeapShared::archive_heap_object(m); oop archived_m = HeapShared::archive_object(m);
assert(archived_m != NULL, "sanity"); assert(archived_m != NULL, "sanity");
Klass *ak = (Klass*)(archived_m->metadata_field(_array_klass_offset)); Klass *ak = (Klass*)(archived_m->metadata_field(_array_klass_offset));
assert(ak != NULL || t == T_VOID, "should not be NULL"); assert(ak != NULL || t == T_VOID, "should not be NULL");
@ -1215,7 +1215,7 @@ oop java_lang_Class::archive_mirror(Klass* k) {
} }
// Now start archiving the mirror object // Now start archiving the mirror object
oop archived_mirror = HeapShared::archive_heap_object(mirror); oop archived_mirror = HeapShared::archive_object(mirror);
if (archived_mirror == NULL) { if (archived_mirror == NULL) {
return NULL; return NULL;
} }

View file

@ -724,11 +724,11 @@ oop StringTable::create_archived_string(oop s) {
oop new_s = NULL; oop new_s = NULL;
typeArrayOop v = java_lang_String::value_no_keepalive(s); typeArrayOop v = java_lang_String::value_no_keepalive(s);
typeArrayOop new_v = (typeArrayOop)HeapShared::archive_heap_object(v); typeArrayOop new_v = (typeArrayOop)HeapShared::archive_object(v);
if (new_v == NULL) { if (new_v == NULL) {
return NULL; return NULL;
} }
new_s = HeapShared::archive_heap_object(s); new_s = HeapShared::archive_object(s);
if (new_s == NULL) { if (new_s == NULL) {
return NULL; return NULL;
} }
@ -779,7 +779,7 @@ void StringTable::serialize_shared_table_header(SerializeClosure* soc) {
if (soc->writing()) { if (soc->writing()) {
// Sanity. Make sure we don't use the shared table at dump time // Sanity. Make sure we don't use the shared table at dump time
_shared_table.reset(); _shared_table.reset();
} else if (!HeapShared::closed_archive_heap_region_mapped()) { } else if (!HeapShared::closed_regions_mapped()) {
_shared_table.reset(); _shared_table.reset();
} }
} }

View file

@ -1654,7 +1654,7 @@ void SystemDictionaryShared::update_archived_mirror_native_pointers_for(LambdaPr
} }
void SystemDictionaryShared::update_archived_mirror_native_pointers() { void SystemDictionaryShared::update_archived_mirror_native_pointers() {
if (!HeapShared::open_archive_heap_region_mapped()) { if (!HeapShared::open_regions_mapped()) {
return; return;
} }
if (MetaspaceShared::relocation_delta() == 0) { if (MetaspaceShared::relocation_delta() == 0) {

View file

@ -133,13 +133,13 @@ void vmClasses::resolve_all(TRAPS) {
// ConstantPool::restore_unshareable_info (restores the archived // ConstantPool::restore_unshareable_info (restores the archived
// resolved_references array object). // resolved_references array object).
// //
// HeapShared::fixup_mapped_heap_regions() fills the empty // HeapShared::fixup_mapped_regions() fills the empty
// spaces in the archived heap regions and may use // spaces in the archived heap regions and may use
// vmClasses::Object_klass(), so we can do this only after // vmClasses::Object_klass(), so we can do this only after
// Object_klass is resolved. See the above resolve_through() // Object_klass is resolved. See the above resolve_through()
// call. No mirror objects are accessed/restored in the above call. // call. No mirror objects are accessed/restored in the above call.
// Mirrors are restored after java.lang.Class is loaded. // Mirrors are restored after java.lang.Class is loaded.
HeapShared::fixup_mapped_heap_regions(); HeapShared::fixup_mapped_regions();
// Initialize the constant pool for the Object_class // Initialize the constant pool for the Object_class
assert(Object_klass()->is_shared(), "must be"); assert(Object_klass()->is_shared(), "must be");

View file

@ -433,7 +433,7 @@ void Universe::genesis(TRAPS) {
void Universe::initialize_basic_type_mirrors(TRAPS) { void Universe::initialize_basic_type_mirrors(TRAPS) {
#if INCLUDE_CDS_JAVA_HEAP #if INCLUDE_CDS_JAVA_HEAP
if (UseSharedSpaces && if (UseSharedSpaces &&
HeapShared::open_archive_heap_region_mapped() && HeapShared::open_regions_mapped() &&
_mirrors[T_INT].resolve() != NULL) { _mirrors[T_INT].resolve() != NULL) {
assert(HeapShared::is_heap_object_archiving_allowed(), "Sanity"); assert(HeapShared::is_heap_object_archiving_allowed(), "Sanity");

View file

@ -287,10 +287,10 @@ void ConstantPool::archive_resolved_references() {
} }
} }
oop archived = HeapShared::archive_heap_object(rr); oop archived = HeapShared::archive_object(rr);
// If the resolved references array is not archived (too large), // If the resolved references array is not archived (too large),
// the 'archived' object is NULL. No need to explicitly check // the 'archived' object is NULL. No need to explicitly check
// the return value of archive_heap_object here. At runtime, the // the return value of archive_object() here. At runtime, the
// resolved references will be created using the normal process // resolved references will be created using the normal process
// when there is no archived value. // when there is no archived value.
_cache->set_archived_references(archived); _cache->set_archived_references(archived);
@ -347,7 +347,7 @@ void ConstantPool::restore_unshareable_info(TRAPS) {
if (vmClasses::Object_klass_loaded()) { if (vmClasses::Object_klass_loaded()) {
ClassLoaderData* loader_data = pool_holder()->class_loader_data(); ClassLoaderData* loader_data = pool_holder()->class_loader_data();
#if INCLUDE_CDS_JAVA_HEAP #if INCLUDE_CDS_JAVA_HEAP
if (HeapShared::open_archive_heap_region_mapped() && if (HeapShared::open_regions_mapped() &&
_cache->archived_references() != NULL) { _cache->archived_references() != NULL) {
oop archived = _cache->archived_references(); oop archived = _cache->archived_references();
// Create handle for the archived resolved reference array object // Create handle for the archived resolved reference array object

View file

@ -604,7 +604,7 @@ void Klass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protec
if (this->has_archived_mirror_index()) { if (this->has_archived_mirror_index()) {
ResourceMark rm(THREAD); ResourceMark rm(THREAD);
log_debug(cds, mirror)("%s has raw archived mirror", external_name()); log_debug(cds, mirror)("%s has raw archived mirror", external_name());
if (HeapShared::open_archive_heap_region_mapped()) { if (HeapShared::open_regions_mapped()) {
bool present = java_lang_Class::restore_archived_mirror(this, loader, module_handle, bool present = java_lang_Class::restore_archived_mirror(this, loader, module_handle,
protection_domain, protection_domain,
CHECK); CHECK);

View file

@ -1941,7 +1941,7 @@ WB_ENTRY(jboolean, WB_IsSharedClass(JNIEnv* env, jobject wb, jclass clazz))
WB_END WB_END
WB_ENTRY(jboolean, WB_AreSharedStringsIgnored(JNIEnv* env)) WB_ENTRY(jboolean, WB_AreSharedStringsIgnored(JNIEnv* env))
return !HeapShared::closed_archive_heap_region_mapped(); return !HeapShared::closed_regions_mapped();
WB_END WB_END
WB_ENTRY(jobject, WB_GetResolvedReferences(JNIEnv* env, jobject wb, jclass clazz)) WB_ENTRY(jobject, WB_GetResolvedReferences(JNIEnv* env, jobject wb, jclass clazz))
@ -1966,7 +1966,7 @@ WB_ENTRY(void, WB_LinkClass(JNIEnv* env, jobject wb, jclass clazz))
WB_END WB_END
WB_ENTRY(jboolean, WB_AreOpenArchiveHeapObjectsMapped(JNIEnv* env)) WB_ENTRY(jboolean, WB_AreOpenArchiveHeapObjectsMapped(JNIEnv* env))
return HeapShared::open_archive_heap_region_mapped(); return HeapShared::open_regions_mapped();
WB_END WB_END
WB_ENTRY(jboolean, WB_IsCDSIncluded(JNIEnv* env)) WB_ENTRY(jboolean, WB_IsCDSIncluded(JNIEnv* env))