mirror of
https://github.com/openjdk/jdk.git
synced 2025-09-20 11:04:34 +02:00
Merge
This commit is contained in:
commit
892e74a87e
59 changed files with 2231 additions and 714 deletions
|
@ -2590,7 +2590,7 @@ void ClassFileParser::parse_classfile_sourcefile_attribute(TRAPS) {
|
|||
valid_symbol_at(sourcefile_index),
|
||||
"Invalid SourceFile attribute at constant pool index %u in class file %s",
|
||||
sourcefile_index, CHECK);
|
||||
set_class_sourcefile(_cp->symbol_at(sourcefile_index));
|
||||
set_class_sourcefile_index(sourcefile_index);
|
||||
}
|
||||
|
||||
|
||||
|
@ -2728,7 +2728,7 @@ void ClassFileParser::parse_classfile_signature_attribute(TRAPS) {
|
|||
valid_symbol_at(signature_index),
|
||||
"Invalid constant pool index %u in Signature attribute in class file %s",
|
||||
signature_index, CHECK);
|
||||
set_class_generic_signature(_cp->symbol_at(signature_index));
|
||||
set_class_generic_signature_index(signature_index);
|
||||
}
|
||||
|
||||
void ClassFileParser::parse_classfile_bootstrap_methods_attribute(u4 attribute_byte_length, TRAPS) {
|
||||
|
@ -2975,13 +2975,11 @@ void ClassFileParser::parse_classfile_attributes(ClassFileParser::ClassAnnotatio
|
|||
void ClassFileParser::apply_parsed_class_attributes(instanceKlassHandle k) {
|
||||
if (_synthetic_flag)
|
||||
k->set_is_synthetic();
|
||||
if (_sourcefile != NULL) {
|
||||
_sourcefile->increment_refcount();
|
||||
k->set_source_file_name(_sourcefile);
|
||||
if (_sourcefile_index != 0) {
|
||||
k->set_source_file_name_index(_sourcefile_index);
|
||||
}
|
||||
if (_generic_signature != NULL) {
|
||||
_generic_signature->increment_refcount();
|
||||
k->set_generic_signature(_generic_signature);
|
||||
if (_generic_signature_index != 0) {
|
||||
k->set_generic_signature_index(_generic_signature_index);
|
||||
}
|
||||
if (_sde_buffer != NULL) {
|
||||
k->set_source_debug_extension(_sde_buffer, _sde_length);
|
||||
|
|
|
@ -62,8 +62,8 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
|
|||
bool _synthetic_flag;
|
||||
int _sde_length;
|
||||
char* _sde_buffer;
|
||||
Symbol* _sourcefile;
|
||||
Symbol* _generic_signature;
|
||||
u2 _sourcefile_index;
|
||||
u2 _generic_signature_index;
|
||||
|
||||
// Metadata created before the instance klass is created. Must be deallocated
|
||||
// if not transferred to the InstanceKlass upon successful class loading
|
||||
|
@ -81,16 +81,16 @@ class ClassFileParser VALUE_OBJ_CLASS_SPEC {
|
|||
Array<AnnotationArray*>* _fields_type_annotations;
|
||||
InstanceKlass* _klass; // InstanceKlass once created.
|
||||
|
||||
void set_class_synthetic_flag(bool x) { _synthetic_flag = x; }
|
||||
void set_class_sourcefile(Symbol* x) { _sourcefile = x; }
|
||||
void set_class_generic_signature(Symbol* x) { _generic_signature = x; }
|
||||
void set_class_sde_buffer(char* x, int len) { _sde_buffer = x; _sde_length = len; }
|
||||
void set_class_synthetic_flag(bool x) { _synthetic_flag = x; }
|
||||
void set_class_sourcefile_index(u2 x) { _sourcefile_index = x; }
|
||||
void set_class_generic_signature_index(u2 x) { _generic_signature_index = x; }
|
||||
void set_class_sde_buffer(char* x, int len) { _sde_buffer = x; _sde_length = len; }
|
||||
|
||||
void init_parsed_class_attributes(ClassLoaderData* loader_data) {
|
||||
_loader_data = loader_data;
|
||||
_synthetic_flag = false;
|
||||
_sourcefile = NULL;
|
||||
_generic_signature = NULL;
|
||||
_sourcefile_index = 0;
|
||||
_generic_signature_index = 0;
|
||||
_sde_buffer = NULL;
|
||||
_sde_length = 0;
|
||||
// initialize the other flags too:
|
||||
|
|
|
@ -262,6 +262,7 @@ void G1MonitoringSupport::update_sizes() {
|
|||
old_collection_counters()->update_all();
|
||||
young_collection_counters()->update_all();
|
||||
MetaspaceCounters::update_performance_counters();
|
||||
CompressedClassSpaceCounters::update_performance_counters();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -216,6 +216,7 @@ void ParallelScavengeHeap::update_counters() {
|
|||
young_gen()->update_counters();
|
||||
old_gen()->update_counters();
|
||||
MetaspaceCounters::update_performance_counters();
|
||||
CompressedClassSpaceCounters::update_performance_counters();
|
||||
}
|
||||
|
||||
size_t ParallelScavengeHeap::capacity() const {
|
||||
|
|
|
@ -362,15 +362,12 @@ bool FileMapInfo::remap_shared_readonly_as_readwrite() {
|
|||
ReservedSpace FileMapInfo::reserve_shared_memory() {
|
||||
struct FileMapInfo::FileMapHeader::space_info* si = &_header._space[0];
|
||||
char* requested_addr = si->_base;
|
||||
size_t alignment = os::vm_allocation_granularity();
|
||||
|
||||
size_t size = align_size_up(SharedReadOnlySize + SharedReadWriteSize +
|
||||
SharedMiscDataSize + SharedMiscCodeSize,
|
||||
alignment);
|
||||
size_t size = FileMapInfo::shared_spaces_size();
|
||||
|
||||
// Reserve the space first, then map otherwise map will go right over some
|
||||
// other reserved memory (like the code cache).
|
||||
ReservedSpace rs(size, alignment, false, requested_addr);
|
||||
ReservedSpace rs(size, os::vm_allocation_granularity(), false, requested_addr);
|
||||
if (!rs.is_reserved()) {
|
||||
fail_continue(err_msg("Unable to reserve shared space at required address " INTPTR_FORMAT, requested_addr));
|
||||
return rs;
|
||||
|
@ -559,3 +556,19 @@ void FileMapInfo::print_shared_spaces() {
|
|||
si->_base, si->_base + si->_used);
|
||||
}
|
||||
}
|
||||
|
||||
// Unmap mapped regions of shared space.
|
||||
void FileMapInfo::stop_sharing_and_unmap(const char* msg) {
|
||||
FileMapInfo *map_info = FileMapInfo::current_info();
|
||||
if (map_info) {
|
||||
map_info->fail_continue(msg);
|
||||
for (int i = 0; i < MetaspaceShared::n_regions; i++) {
|
||||
if (map_info->_header._space[i]._base != NULL) {
|
||||
map_info->unmap_region(i);
|
||||
map_info->_header._space[i]._base = NULL;
|
||||
}
|
||||
}
|
||||
} else if (DumpSharedSpaces) {
|
||||
fail_stop(msg, NULL);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2003, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -150,6 +150,15 @@ public:
|
|||
// Return true if given address is in the mapped shared space.
|
||||
bool is_in_shared_space(const void* p) NOT_CDS_RETURN_(false);
|
||||
void print_shared_spaces() NOT_CDS_RETURN;
|
||||
|
||||
static size_t shared_spaces_size() {
|
||||
return align_size_up(SharedReadOnlySize + SharedReadWriteSize +
|
||||
SharedMiscDataSize + SharedMiscCodeSize,
|
||||
os::vm_allocation_granularity());
|
||||
}
|
||||
|
||||
// Stop CDS sharing and unmap CDS regions.
|
||||
static void stop_sharing_and_unmap(const char* msg);
|
||||
};
|
||||
|
||||
#endif // SHARE_VM_MEMORY_FILEMAP_HPP
|
||||
|
|
|
@ -1211,6 +1211,7 @@ void GenCollectedHeap::gc_epilogue(bool full) {
|
|||
}
|
||||
|
||||
MetaspaceCounters::update_performance_counters();
|
||||
CompressedClassSpaceCounters::update_performance_counters();
|
||||
|
||||
always_do_update_barrier = UseConcMarkSweepGC;
|
||||
};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -118,9 +118,12 @@ bool CodeHeap::reserve(size_t reserved_size, size_t committed_size,
|
|||
_number_of_committed_segments = size_to_segments(_memory.committed_size());
|
||||
_number_of_reserved_segments = size_to_segments(_memory.reserved_size());
|
||||
assert(_number_of_reserved_segments >= _number_of_committed_segments, "just checking");
|
||||
const size_t reserved_segments_alignment = MAX2((size_t)os::vm_page_size(), granularity);
|
||||
const size_t reserved_segments_size = align_size_up(_number_of_reserved_segments, reserved_segments_alignment);
|
||||
const size_t committed_segments_size = align_to_page_size(_number_of_committed_segments);
|
||||
|
||||
// reserve space for _segmap
|
||||
if (!_segmap.initialize(align_to_page_size(_number_of_reserved_segments), align_to_page_size(_number_of_committed_segments))) {
|
||||
if (!_segmap.initialize(reserved_segments_size, committed_segments_size)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
#include "memory/resourceArea.hpp"
|
||||
#include "memory/universe.hpp"
|
||||
#include "runtime/globals.hpp"
|
||||
#include "runtime/java.hpp"
|
||||
#include "runtime/mutex.hpp"
|
||||
#include "runtime/orderAccess.hpp"
|
||||
#include "services/memTracker.hpp"
|
||||
|
@ -54,6 +55,8 @@ size_t const allocation_from_dictionary_limit = 64 * K;
|
|||
|
||||
MetaWord* last_allocated = 0;
|
||||
|
||||
size_t Metaspace::_class_metaspace_size;
|
||||
|
||||
// Used in declarations in SpaceManager and ChunkManager
|
||||
enum ChunkIndex {
|
||||
ZeroIndex = 0,
|
||||
|
@ -261,10 +264,6 @@ class VirtualSpaceNode : public CHeapObj<mtClass> {
|
|||
// count of chunks contained in this VirtualSpace
|
||||
uintx _container_count;
|
||||
|
||||
// Convenience functions for logical bottom and end
|
||||
MetaWord* bottom() const { return (MetaWord*) _virtual_space.low(); }
|
||||
MetaWord* end() const { return (MetaWord*) _virtual_space.high(); }
|
||||
|
||||
// Convenience functions to access the _virtual_space
|
||||
char* low() const { return virtual_space()->low(); }
|
||||
char* high() const { return virtual_space()->high(); }
|
||||
|
@ -284,6 +283,10 @@ class VirtualSpaceNode : public CHeapObj<mtClass> {
|
|||
VirtualSpaceNode(ReservedSpace rs) : _top(NULL), _next(NULL), _rs(rs), _container_count(0) {}
|
||||
~VirtualSpaceNode();
|
||||
|
||||
// Convenience functions for logical bottom and end
|
||||
MetaWord* bottom() const { return (MetaWord*) _virtual_space.low(); }
|
||||
MetaWord* end() const { return (MetaWord*) _virtual_space.high(); }
|
||||
|
||||
// address of next available space in _virtual_space;
|
||||
// Accessors
|
||||
VirtualSpaceNode* next() { return _next; }
|
||||
|
@ -1313,7 +1316,8 @@ bool MetaspaceGC::should_expand(VirtualSpaceList* vsl, size_t word_size) {
|
|||
|
||||
// Class virtual space should always be expanded. Call GC for the other
|
||||
// metadata virtual space.
|
||||
if (vsl == Metaspace::class_space_list()) return true;
|
||||
if (Metaspace::using_class_space() &&
|
||||
(vsl == Metaspace::class_space_list())) return true;
|
||||
|
||||
// If this is part of an allocation after a GC, expand
|
||||
// unconditionally.
|
||||
|
@ -2257,7 +2261,7 @@ void SpaceManager::deallocate(MetaWord* p, size_t word_size) {
|
|||
size_t raw_word_size = get_raw_word_size(word_size);
|
||||
size_t min_size = TreeChunk<Metablock, FreeList>::min_size();
|
||||
assert(raw_word_size >= min_size,
|
||||
err_msg("Should not deallocate dark matter " SIZE_FORMAT, word_size));
|
||||
err_msg("Should not deallocate dark matter " SIZE_FORMAT "<" SIZE_FORMAT, word_size, min_size));
|
||||
block_freelists()->return_block(p, raw_word_size);
|
||||
}
|
||||
|
||||
|
@ -2374,7 +2378,7 @@ MetaWord* SpaceManager::allocate_work(size_t word_size) {
|
|||
if (result == NULL) {
|
||||
result = grow_and_allocate(word_size);
|
||||
}
|
||||
if (result > 0) {
|
||||
if (result != 0) {
|
||||
inc_used_metrics(word_size);
|
||||
assert(result != (MetaWord*) chunks_in_use(MediumIndex),
|
||||
"Head of the list is being allocated");
|
||||
|
@ -2476,15 +2480,13 @@ void SpaceManager::mangle_freed_chunks() {
|
|||
size_t MetaspaceAux::_allocated_capacity_words[] = {0, 0};
|
||||
size_t MetaspaceAux::_allocated_used_words[] = {0, 0};
|
||||
|
||||
size_t MetaspaceAux::free_bytes(Metaspace::MetadataType mdtype) {
|
||||
VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
|
||||
return list == NULL ? 0 : list->free_bytes();
|
||||
}
|
||||
|
||||
size_t MetaspaceAux::free_bytes() {
|
||||
size_t result = 0;
|
||||
if (Metaspace::class_space_list() != NULL) {
|
||||
result = result + Metaspace::class_space_list()->free_bytes();
|
||||
}
|
||||
if (Metaspace::space_list() != NULL) {
|
||||
result = result + Metaspace::space_list()->free_bytes();
|
||||
}
|
||||
return result;
|
||||
return free_bytes(Metaspace::ClassType) + free_bytes(Metaspace::NonClassType);
|
||||
}
|
||||
|
||||
void MetaspaceAux::dec_capacity(Metaspace::MetadataType mdtype, size_t words) {
|
||||
|
@ -2549,6 +2551,9 @@ size_t MetaspaceAux::free_in_bytes(Metaspace::MetadataType mdtype) {
|
|||
}
|
||||
|
||||
size_t MetaspaceAux::capacity_bytes_slow(Metaspace::MetadataType mdtype) {
|
||||
if ((mdtype == Metaspace::ClassType) && !Metaspace::using_class_space()) {
|
||||
return 0;
|
||||
}
|
||||
// Don't count the space in the freelists. That space will be
|
||||
// added to the capacity calculation as needed.
|
||||
size_t capacity = 0;
|
||||
|
@ -2563,18 +2568,18 @@ size_t MetaspaceAux::capacity_bytes_slow(Metaspace::MetadataType mdtype) {
|
|||
}
|
||||
|
||||
size_t MetaspaceAux::reserved_in_bytes(Metaspace::MetadataType mdtype) {
|
||||
size_t reserved = (mdtype == Metaspace::ClassType) ?
|
||||
Metaspace::class_space_list()->virtual_space_total() :
|
||||
Metaspace::space_list()->virtual_space_total();
|
||||
return reserved * BytesPerWord;
|
||||
VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
|
||||
return list == NULL ? 0 : list->virtual_space_total();
|
||||
}
|
||||
|
||||
size_t MetaspaceAux::min_chunk_size() { return Metaspace::first_chunk_word_size(); }
|
||||
|
||||
size_t MetaspaceAux::free_chunks_total(Metaspace::MetadataType mdtype) {
|
||||
ChunkManager* chunk = (mdtype == Metaspace::ClassType) ?
|
||||
Metaspace::class_space_list()->chunk_manager() :
|
||||
Metaspace::space_list()->chunk_manager();
|
||||
VirtualSpaceList* list = Metaspace::get_space_list(mdtype);
|
||||
if (list == NULL) {
|
||||
return 0;
|
||||
}
|
||||
ChunkManager* chunk = list->chunk_manager();
|
||||
chunk->slow_verify();
|
||||
return chunk->free_chunks_total();
|
||||
}
|
||||
|
@ -2615,7 +2620,6 @@ void MetaspaceAux::print_metaspace_change(size_t prev_metadata_used) {
|
|||
|
||||
// This is printed when PrintGCDetails
|
||||
void MetaspaceAux::print_on(outputStream* out) {
|
||||
Metaspace::MetadataType ct = Metaspace::ClassType;
|
||||
Metaspace::MetadataType nct = Metaspace::NonClassType;
|
||||
|
||||
out->print_cr(" Metaspace total "
|
||||
|
@ -2629,12 +2633,15 @@ void MetaspaceAux::print_on(outputStream* out) {
|
|||
allocated_capacity_bytes(nct)/K,
|
||||
allocated_used_bytes(nct)/K,
|
||||
reserved_in_bytes(nct)/K);
|
||||
out->print_cr(" class space "
|
||||
SIZE_FORMAT "K, used " SIZE_FORMAT "K,"
|
||||
" reserved " SIZE_FORMAT "K",
|
||||
allocated_capacity_bytes(ct)/K,
|
||||
allocated_used_bytes(ct)/K,
|
||||
reserved_in_bytes(ct)/K);
|
||||
if (Metaspace::using_class_space()) {
|
||||
Metaspace::MetadataType ct = Metaspace::ClassType;
|
||||
out->print_cr(" class space "
|
||||
SIZE_FORMAT "K, used " SIZE_FORMAT "K,"
|
||||
" reserved " SIZE_FORMAT "K",
|
||||
allocated_capacity_bytes(ct)/K,
|
||||
allocated_used_bytes(ct)/K,
|
||||
reserved_in_bytes(ct)/K);
|
||||
}
|
||||
}
|
||||
|
||||
// Print information for class space and data space separately.
|
||||
|
@ -2659,13 +2666,37 @@ void MetaspaceAux::print_on(outputStream* out, Metaspace::MetadataType mdtype) {
|
|||
assert(!SafepointSynchronize::is_at_safepoint() || used_and_free == capacity_bytes, "Accounting is wrong");
|
||||
}
|
||||
|
||||
// Print total fragmentation for class and data metaspaces separately
|
||||
void MetaspaceAux::print_waste(outputStream* out) {
|
||||
|
||||
size_t specialized_waste = 0, small_waste = 0, medium_waste = 0;
|
||||
size_t specialized_count = 0, small_count = 0, medium_count = 0, humongous_count = 0;
|
||||
// Print total fragmentation for class metaspaces
|
||||
void MetaspaceAux::print_class_waste(outputStream* out) {
|
||||
assert(Metaspace::using_class_space(), "class metaspace not used");
|
||||
size_t cls_specialized_waste = 0, cls_small_waste = 0, cls_medium_waste = 0;
|
||||
size_t cls_specialized_count = 0, cls_small_count = 0, cls_medium_count = 0, cls_humongous_count = 0;
|
||||
ClassLoaderDataGraphMetaspaceIterator iter;
|
||||
while (iter.repeat()) {
|
||||
Metaspace* msp = iter.get_next();
|
||||
if (msp != NULL) {
|
||||
cls_specialized_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
|
||||
cls_specialized_count += msp->class_vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
|
||||
cls_small_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SmallIndex);
|
||||
cls_small_count += msp->class_vsm()->sum_count_in_chunks_in_use(SmallIndex);
|
||||
cls_medium_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(MediumIndex);
|
||||
cls_medium_count += msp->class_vsm()->sum_count_in_chunks_in_use(MediumIndex);
|
||||
cls_humongous_count += msp->class_vsm()->sum_count_in_chunks_in_use(HumongousIndex);
|
||||
}
|
||||
}
|
||||
out->print_cr(" class: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
|
||||
SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
|
||||
SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
|
||||
"large count " SIZE_FORMAT,
|
||||
cls_specialized_count, cls_specialized_waste,
|
||||
cls_small_count, cls_small_waste,
|
||||
cls_medium_count, cls_medium_waste, cls_humongous_count);
|
||||
}
|
||||
|
||||
// Print total fragmentation for data and class metaspaces separately
|
||||
void MetaspaceAux::print_waste(outputStream* out) {
|
||||
size_t specialized_waste = 0, small_waste = 0, medium_waste = 0;
|
||||
size_t specialized_count = 0, small_count = 0, medium_count = 0, humongous_count = 0;
|
||||
|
||||
ClassLoaderDataGraphMetaspaceIterator iter;
|
||||
while (iter.repeat()) {
|
||||
|
@ -2678,14 +2709,6 @@ void MetaspaceAux::print_waste(outputStream* out) {
|
|||
medium_waste += msp->vsm()->sum_waste_in_chunks_in_use(MediumIndex);
|
||||
medium_count += msp->vsm()->sum_count_in_chunks_in_use(MediumIndex);
|
||||
humongous_count += msp->vsm()->sum_count_in_chunks_in_use(HumongousIndex);
|
||||
|
||||
cls_specialized_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SpecializedIndex);
|
||||
cls_specialized_count += msp->class_vsm()->sum_count_in_chunks_in_use(SpecializedIndex);
|
||||
cls_small_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(SmallIndex);
|
||||
cls_small_count += msp->class_vsm()->sum_count_in_chunks_in_use(SmallIndex);
|
||||
cls_medium_waste += msp->class_vsm()->sum_waste_in_chunks_in_use(MediumIndex);
|
||||
cls_medium_count += msp->class_vsm()->sum_count_in_chunks_in_use(MediumIndex);
|
||||
cls_humongous_count += msp->class_vsm()->sum_count_in_chunks_in_use(HumongousIndex);
|
||||
}
|
||||
}
|
||||
out->print_cr("Total fragmentation waste (words) doesn't count free space");
|
||||
|
@ -2695,13 +2718,9 @@ void MetaspaceAux::print_waste(outputStream* out) {
|
|||
"large count " SIZE_FORMAT,
|
||||
specialized_count, specialized_waste, small_count,
|
||||
small_waste, medium_count, medium_waste, humongous_count);
|
||||
out->print_cr(" class: " SIZE_FORMAT " specialized(s) " SIZE_FORMAT ", "
|
||||
SIZE_FORMAT " small(s) " SIZE_FORMAT ", "
|
||||
SIZE_FORMAT " medium(s) " SIZE_FORMAT ", "
|
||||
"large count " SIZE_FORMAT,
|
||||
cls_specialized_count, cls_specialized_waste,
|
||||
cls_small_count, cls_small_waste,
|
||||
cls_medium_count, cls_medium_waste, cls_humongous_count);
|
||||
if (Metaspace::using_class_space()) {
|
||||
print_class_waste(out);
|
||||
}
|
||||
}
|
||||
|
||||
// Dump global metaspace things from the end of ClassLoaderDataGraph
|
||||
|
@ -2714,7 +2733,9 @@ void MetaspaceAux::dump(outputStream* out) {
|
|||
|
||||
void MetaspaceAux::verify_free_chunks() {
|
||||
Metaspace::space_list()->chunk_manager()->verify();
|
||||
Metaspace::class_space_list()->chunk_manager()->verify();
|
||||
if (Metaspace::using_class_space()) {
|
||||
Metaspace::class_space_list()->chunk_manager()->verify();
|
||||
}
|
||||
}
|
||||
|
||||
void MetaspaceAux::verify_capacity() {
|
||||
|
@ -2776,7 +2797,9 @@ Metaspace::Metaspace(Mutex* lock, MetaspaceType type) {
|
|||
|
||||
Metaspace::~Metaspace() {
|
||||
delete _vsm;
|
||||
delete _class_vsm;
|
||||
if (using_class_space()) {
|
||||
delete _class_vsm;
|
||||
}
|
||||
}
|
||||
|
||||
VirtualSpaceList* Metaspace::_space_list = NULL;
|
||||
|
@ -2784,9 +2807,123 @@ VirtualSpaceList* Metaspace::_class_space_list = NULL;
|
|||
|
||||
#define VIRTUALSPACEMULTIPLIER 2
|
||||
|
||||
#ifdef _LP64
|
||||
void Metaspace::set_narrow_klass_base_and_shift(address metaspace_base, address cds_base) {
|
||||
// Figure out the narrow_klass_base and the narrow_klass_shift. The
|
||||
// narrow_klass_base is the lower of the metaspace base and the cds base
|
||||
// (if cds is enabled). The narrow_klass_shift depends on the distance
|
||||
// between the lower base and higher address.
|
||||
address lower_base;
|
||||
address higher_address;
|
||||
if (UseSharedSpaces) {
|
||||
higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()),
|
||||
(address)(metaspace_base + class_metaspace_size()));
|
||||
lower_base = MIN2(metaspace_base, cds_base);
|
||||
} else {
|
||||
higher_address = metaspace_base + class_metaspace_size();
|
||||
lower_base = metaspace_base;
|
||||
}
|
||||
Universe::set_narrow_klass_base(lower_base);
|
||||
if ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint) {
|
||||
Universe::set_narrow_klass_shift(0);
|
||||
} else {
|
||||
assert(!UseSharedSpaces, "Cannot shift with UseSharedSpaces");
|
||||
Universe::set_narrow_klass_shift(LogKlassAlignmentInBytes);
|
||||
}
|
||||
}
|
||||
|
||||
// Return TRUE if the specified metaspace_base and cds_base are close enough
|
||||
// to work with compressed klass pointers.
|
||||
bool Metaspace::can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base) {
|
||||
assert(cds_base != 0 && UseSharedSpaces, "Only use with CDS");
|
||||
assert(UseCompressedKlassPointers, "Only use with CompressedKlassPtrs");
|
||||
address lower_base = MIN2((address)metaspace_base, cds_base);
|
||||
address higher_address = MAX2((address)(cds_base + FileMapInfo::shared_spaces_size()),
|
||||
(address)(metaspace_base + class_metaspace_size()));
|
||||
return ((uint64_t)(higher_address - lower_base) < (uint64_t)max_juint);
|
||||
}
|
||||
|
||||
// Try to allocate the metaspace at the requested addr.
|
||||
void Metaspace::allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base) {
|
||||
assert(using_class_space(), "called improperly");
|
||||
assert(UseCompressedKlassPointers, "Only use with CompressedKlassPtrs");
|
||||
assert(class_metaspace_size() < KlassEncodingMetaspaceMax,
|
||||
"Metaspace size is too big");
|
||||
|
||||
ReservedSpace metaspace_rs = ReservedSpace(class_metaspace_size(),
|
||||
os::vm_allocation_granularity(),
|
||||
false, requested_addr, 0);
|
||||
if (!metaspace_rs.is_reserved()) {
|
||||
if (UseSharedSpaces) {
|
||||
// Keep trying to allocate the metaspace, increasing the requested_addr
|
||||
// by 1GB each time, until we reach an address that will no longer allow
|
||||
// use of CDS with compressed klass pointers.
|
||||
char *addr = requested_addr;
|
||||
while (!metaspace_rs.is_reserved() && (addr + 1*G > addr) &&
|
||||
can_use_cds_with_metaspace_addr(addr + 1*G, cds_base)) {
|
||||
addr = addr + 1*G;
|
||||
metaspace_rs = ReservedSpace(class_metaspace_size(),
|
||||
os::vm_allocation_granularity(), false, addr, 0);
|
||||
}
|
||||
}
|
||||
|
||||
// If no successful allocation then try to allocate the space anywhere. If
|
||||
// that fails then OOM doom. At this point we cannot try allocating the
|
||||
// metaspace as if UseCompressedKlassPointers is off because too much
|
||||
// initialization has happened that depends on UseCompressedKlassPointers.
|
||||
// So, UseCompressedKlassPointers cannot be turned off at this point.
|
||||
if (!metaspace_rs.is_reserved()) {
|
||||
metaspace_rs = ReservedSpace(class_metaspace_size(),
|
||||
os::vm_allocation_granularity(), false);
|
||||
if (!metaspace_rs.is_reserved()) {
|
||||
vm_exit_during_initialization(err_msg("Could not allocate metaspace: %d bytes",
|
||||
class_metaspace_size()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we got here then the metaspace got allocated.
|
||||
MemTracker::record_virtual_memory_type((address)metaspace_rs.base(), mtClass);
|
||||
|
||||
// Verify that we can use shared spaces. Otherwise, turn off CDS.
|
||||
if (UseSharedSpaces && !can_use_cds_with_metaspace_addr(metaspace_rs.base(), cds_base)) {
|
||||
FileMapInfo::stop_sharing_and_unmap(
|
||||
"Could not allocate metaspace at a compatible address");
|
||||
}
|
||||
|
||||
set_narrow_klass_base_and_shift((address)metaspace_rs.base(),
|
||||
UseSharedSpaces ? (address)cds_base : 0);
|
||||
|
||||
initialize_class_space(metaspace_rs);
|
||||
|
||||
if (PrintCompressedOopsMode || (PrintMiscellaneous && Verbose)) {
|
||||
gclog_or_tty->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: " SIZE_FORMAT,
|
||||
Universe::narrow_klass_base(), Universe::narrow_klass_shift());
|
||||
gclog_or_tty->print_cr("Metaspace Size: " SIZE_FORMAT " Address: " PTR_FORMAT " Req Addr: " PTR_FORMAT,
|
||||
class_metaspace_size(), metaspace_rs.base(), requested_addr);
|
||||
}
|
||||
}
|
||||
|
||||
// For UseCompressedKlassPointers the class space is reserved above the top of
|
||||
// the Java heap. The argument passed in is at the base of the compressed space.
|
||||
void Metaspace::initialize_class_space(ReservedSpace rs) {
|
||||
// The reserved space size may be bigger because of alignment, esp with UseLargePages
|
||||
assert(rs.size() >= ClassMetaspaceSize,
|
||||
err_msg(SIZE_FORMAT " != " UINTX_FORMAT, rs.size(), ClassMetaspaceSize));
|
||||
assert(using_class_space(), "Must be using class space");
|
||||
_class_space_list = new VirtualSpaceList(rs);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
void Metaspace::global_initialize() {
|
||||
// Initialize the alignment for shared spaces.
|
||||
int max_alignment = os::vm_page_size();
|
||||
size_t cds_total = 0;
|
||||
|
||||
set_class_metaspace_size(align_size_up(ClassMetaspaceSize,
|
||||
os::vm_allocation_granularity()));
|
||||
|
||||
MetaspaceShared::set_max_alignment(max_alignment);
|
||||
|
||||
if (DumpSharedSpaces) {
|
||||
|
@ -2798,15 +2935,31 @@ void Metaspace::global_initialize() {
|
|||
// Initialize with the sum of the shared space sizes. The read-only
|
||||
// and read write metaspace chunks will be allocated out of this and the
|
||||
// remainder is the misc code and data chunks.
|
||||
size_t total = align_size_up(SharedReadOnlySize + SharedReadWriteSize +
|
||||
SharedMiscDataSize + SharedMiscCodeSize,
|
||||
os::vm_allocation_granularity());
|
||||
size_t word_size = total/wordSize;
|
||||
_space_list = new VirtualSpaceList(word_size);
|
||||
cds_total = FileMapInfo::shared_spaces_size();
|
||||
_space_list = new VirtualSpaceList(cds_total/wordSize);
|
||||
|
||||
#ifdef _LP64
|
||||
// Set the compressed klass pointer base so that decoding of these pointers works
|
||||
// properly when creating the shared archive.
|
||||
assert(UseCompressedOops && UseCompressedKlassPointers,
|
||||
"UseCompressedOops and UseCompressedKlassPointers must be set");
|
||||
Universe::set_narrow_klass_base((address)_space_list->current_virtual_space()->bottom());
|
||||
if (TraceMetavirtualspaceAllocation && Verbose) {
|
||||
gclog_or_tty->print_cr("Setting_narrow_klass_base to Address: " PTR_FORMAT,
|
||||
_space_list->current_virtual_space()->bottom());
|
||||
}
|
||||
|
||||
// Set the shift to zero.
|
||||
assert(class_metaspace_size() < (uint64_t)(max_juint) - cds_total,
|
||||
"CDS region is too large");
|
||||
Universe::set_narrow_klass_shift(0);
|
||||
#endif
|
||||
|
||||
} else {
|
||||
// If using shared space, open the file that contains the shared space
|
||||
// and map in the memory before initializing the rest of metaspace (so
|
||||
// the addresses don't conflict)
|
||||
address cds_address = NULL;
|
||||
if (UseSharedSpaces) {
|
||||
FileMapInfo* mapinfo = new FileMapInfo();
|
||||
memset(mapinfo, 0, sizeof(FileMapInfo));
|
||||
|
@ -2821,8 +2974,22 @@ void Metaspace::global_initialize() {
|
|||
assert(!mapinfo->is_open() && !UseSharedSpaces,
|
||||
"archive file not closed or shared spaces not disabled.");
|
||||
}
|
||||
cds_total = FileMapInfo::shared_spaces_size();
|
||||
cds_address = (address)mapinfo->region_base(0);
|
||||
}
|
||||
|
||||
#ifdef _LP64
|
||||
// If UseCompressedKlassPointers is set then allocate the metaspace area
|
||||
// above the heap and above the CDS area (if it exists).
|
||||
if (using_class_space()) {
|
||||
if (UseSharedSpaces) {
|
||||
allocate_metaspace_compressed_klass_ptrs((char *)(cds_address + cds_total), cds_address);
|
||||
} else {
|
||||
allocate_metaspace_compressed_klass_ptrs((char *)CompressedKlassPointersBase, 0);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
// Initialize these before initializing the VirtualSpaceList
|
||||
_first_chunk_word_size = InitialBootClassLoaderMetaspaceSize / BytesPerWord;
|
||||
_first_chunk_word_size = align_word_size_up(_first_chunk_word_size);
|
||||
|
@ -2840,39 +3007,28 @@ void Metaspace::global_initialize() {
|
|||
}
|
||||
}
|
||||
|
||||
// For UseCompressedKlassPointers the class space is reserved as a piece of the
|
||||
// Java heap because the compression algorithm is the same for each. The
|
||||
// argument passed in is at the top of the compressed space
|
||||
void Metaspace::initialize_class_space(ReservedSpace rs) {
|
||||
// The reserved space size may be bigger because of alignment, esp with UseLargePages
|
||||
assert(rs.size() >= ClassMetaspaceSize,
|
||||
err_msg(SIZE_FORMAT " != " UINTX_FORMAT, rs.size(), ClassMetaspaceSize));
|
||||
_class_space_list = new VirtualSpaceList(rs);
|
||||
}
|
||||
|
||||
void Metaspace::initialize(Mutex* lock,
|
||||
MetaspaceType type) {
|
||||
void Metaspace::initialize(Mutex* lock, MetaspaceType type) {
|
||||
|
||||
assert(space_list() != NULL,
|
||||
"Metadata VirtualSpaceList has not been initialized");
|
||||
|
||||
_vsm = new SpaceManager(Metaspace::NonClassType, lock, space_list());
|
||||
_vsm = new SpaceManager(NonClassType, lock, space_list());
|
||||
if (_vsm == NULL) {
|
||||
return;
|
||||
}
|
||||
size_t word_size;
|
||||
size_t class_word_size;
|
||||
vsm()->get_initial_chunk_sizes(type,
|
||||
&word_size,
|
||||
&class_word_size);
|
||||
vsm()->get_initial_chunk_sizes(type, &word_size, &class_word_size);
|
||||
|
||||
assert(class_space_list() != NULL,
|
||||
"Class VirtualSpaceList has not been initialized");
|
||||
if (using_class_space()) {
|
||||
assert(class_space_list() != NULL,
|
||||
"Class VirtualSpaceList has not been initialized");
|
||||
|
||||
// Allocate SpaceManager for classes.
|
||||
_class_vsm = new SpaceManager(Metaspace::ClassType, lock, class_space_list());
|
||||
if (_class_vsm == NULL) {
|
||||
return;
|
||||
// Allocate SpaceManager for classes.
|
||||
_class_vsm = new SpaceManager(ClassType, lock, class_space_list());
|
||||
if (_class_vsm == NULL) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
MutexLockerEx cl(SpaceManager::expand_lock(), Mutex::_no_safepoint_check_flag);
|
||||
|
@ -2888,11 +3044,13 @@ void Metaspace::initialize(Mutex* lock,
|
|||
}
|
||||
|
||||
// Allocate chunk for class metadata objects
|
||||
Metachunk* class_chunk =
|
||||
class_space_list()->get_initialization_chunk(class_word_size,
|
||||
class_vsm()->medium_chunk_bunch());
|
||||
if (class_chunk != NULL) {
|
||||
class_vsm()->add_chunk(class_chunk, true);
|
||||
if (using_class_space()) {
|
||||
Metachunk* class_chunk =
|
||||
class_space_list()->get_initialization_chunk(class_word_size,
|
||||
class_vsm()->medium_chunk_bunch());
|
||||
if (class_chunk != NULL) {
|
||||
class_vsm()->add_chunk(class_chunk, true);
|
||||
}
|
||||
}
|
||||
|
||||
_alloc_record_head = NULL;
|
||||
|
@ -2906,7 +3064,8 @@ size_t Metaspace::align_word_size_up(size_t word_size) {
|
|||
|
||||
MetaWord* Metaspace::allocate(size_t word_size, MetadataType mdtype) {
|
||||
// DumpSharedSpaces doesn't use class metadata area (yet)
|
||||
if (mdtype == ClassType && !DumpSharedSpaces) {
|
||||
// Also, don't use class_vsm() unless UseCompressedKlassPointers is true.
|
||||
if (mdtype == ClassType && using_class_space()) {
|
||||
return class_vsm()->allocate(word_size);
|
||||
} else {
|
||||
return vsm()->allocate(word_size);
|
||||
|
@ -2937,14 +3096,19 @@ char* Metaspace::bottom() const {
|
|||
}
|
||||
|
||||
size_t Metaspace::used_words_slow(MetadataType mdtype) const {
|
||||
// return vsm()->allocated_used_words();
|
||||
return mdtype == ClassType ? class_vsm()->sum_used_in_chunks_in_use() :
|
||||
vsm()->sum_used_in_chunks_in_use(); // includes overhead!
|
||||
if (mdtype == ClassType) {
|
||||
return using_class_space() ? class_vsm()->sum_used_in_chunks_in_use() : 0;
|
||||
} else {
|
||||
return vsm()->sum_used_in_chunks_in_use(); // includes overhead!
|
||||
}
|
||||
}
|
||||
|
||||
size_t Metaspace::free_words(MetadataType mdtype) const {
|
||||
return mdtype == ClassType ? class_vsm()->sum_free_in_chunks_in_use() :
|
||||
vsm()->sum_free_in_chunks_in_use();
|
||||
if (mdtype == ClassType) {
|
||||
return using_class_space() ? class_vsm()->sum_free_in_chunks_in_use() : 0;
|
||||
} else {
|
||||
return vsm()->sum_free_in_chunks_in_use();
|
||||
}
|
||||
}
|
||||
|
||||
// Space capacity in the Metaspace. It includes
|
||||
|
@ -2953,8 +3117,11 @@ size_t Metaspace::free_words(MetadataType mdtype) const {
|
|||
// in the space available in the dictionary which
|
||||
// is already counted in some chunk.
|
||||
size_t Metaspace::capacity_words_slow(MetadataType mdtype) const {
|
||||
return mdtype == ClassType ? class_vsm()->sum_capacity_in_chunks_in_use() :
|
||||
vsm()->sum_capacity_in_chunks_in_use();
|
||||
if (mdtype == ClassType) {
|
||||
return using_class_space() ? class_vsm()->sum_capacity_in_chunks_in_use() : 0;
|
||||
} else {
|
||||
return vsm()->sum_capacity_in_chunks_in_use();
|
||||
}
|
||||
}
|
||||
|
||||
size_t Metaspace::used_bytes_slow(MetadataType mdtype) const {
|
||||
|
@ -2977,8 +3144,8 @@ void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
|
|||
#endif
|
||||
return;
|
||||
}
|
||||
if (is_class) {
|
||||
class_vsm()->deallocate(ptr, word_size);
|
||||
if (is_class && using_class_space()) {
|
||||
class_vsm()->deallocate(ptr, word_size);
|
||||
} else {
|
||||
vsm()->deallocate(ptr, word_size);
|
||||
}
|
||||
|
@ -2992,7 +3159,7 @@ void Metaspace::deallocate(MetaWord* ptr, size_t word_size, bool is_class) {
|
|||
#endif
|
||||
return;
|
||||
}
|
||||
if (is_class) {
|
||||
if (is_class && using_class_space()) {
|
||||
class_vsm()->deallocate(ptr, word_size);
|
||||
} else {
|
||||
vsm()->deallocate(ptr, word_size);
|
||||
|
@ -3101,14 +3268,18 @@ void Metaspace::purge() {
|
|||
MutexLockerEx cl(SpaceManager::expand_lock(),
|
||||
Mutex::_no_safepoint_check_flag);
|
||||
space_list()->purge();
|
||||
class_space_list()->purge();
|
||||
if (using_class_space()) {
|
||||
class_space_list()->purge();
|
||||
}
|
||||
}
|
||||
|
||||
void Metaspace::print_on(outputStream* out) const {
|
||||
// Print both class virtual space counts and metaspace.
|
||||
if (Verbose) {
|
||||
vsm()->print_on(out);
|
||||
vsm()->print_on(out);
|
||||
if (using_class_space()) {
|
||||
class_vsm()->print_on(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3122,17 +3293,21 @@ bool Metaspace::contains(const void * ptr) {
|
|||
// be needed. Note, locking this can cause inversion problems with the
|
||||
// caller in MetaspaceObj::is_metadata() function.
|
||||
return space_list()->contains(ptr) ||
|
||||
class_space_list()->contains(ptr);
|
||||
(using_class_space() && class_space_list()->contains(ptr));
|
||||
}
|
||||
|
||||
void Metaspace::verify() {
|
||||
vsm()->verify();
|
||||
class_vsm()->verify();
|
||||
if (using_class_space()) {
|
||||
class_vsm()->verify();
|
||||
}
|
||||
}
|
||||
|
||||
void Metaspace::dump(outputStream* const out) const {
|
||||
out->print_cr("\nVirtual space manager: " INTPTR_FORMAT, vsm());
|
||||
vsm()->dump(out);
|
||||
out->print_cr("\nClass space manager: " INTPTR_FORMAT, class_vsm());
|
||||
class_vsm()->dump(out);
|
||||
if (using_class_space()) {
|
||||
out->print_cr("\nClass space manager: " INTPTR_FORMAT, class_vsm());
|
||||
class_vsm()->dump(out);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -105,6 +105,16 @@ class Metaspace : public CHeapObj<mtClass> {
|
|||
// Align up the word size to the allocation word size
|
||||
static size_t align_word_size_up(size_t);
|
||||
|
||||
// Aligned size of the metaspace.
|
||||
static size_t _class_metaspace_size;
|
||||
|
||||
static size_t class_metaspace_size() {
|
||||
return _class_metaspace_size;
|
||||
}
|
||||
static void set_class_metaspace_size(size_t metaspace_size) {
|
||||
_class_metaspace_size = metaspace_size;
|
||||
}
|
||||
|
||||
static size_t _first_chunk_word_size;
|
||||
static size_t _first_class_chunk_word_size;
|
||||
|
||||
|
@ -126,11 +136,26 @@ class Metaspace : public CHeapObj<mtClass> {
|
|||
|
||||
static VirtualSpaceList* space_list() { return _space_list; }
|
||||
static VirtualSpaceList* class_space_list() { return _class_space_list; }
|
||||
static VirtualSpaceList* get_space_list(MetadataType mdtype) {
|
||||
assert(mdtype != MetadataTypeCount, "MetadaTypeCount can't be used as mdtype");
|
||||
return mdtype == ClassType ? class_space_list() : space_list();
|
||||
}
|
||||
|
||||
// This is used by DumpSharedSpaces only, where only _vsm is used. So we will
|
||||
// maintain a single list for now.
|
||||
void record_allocation(void* ptr, MetaspaceObj::Type type, size_t word_size);
|
||||
|
||||
#ifdef _LP64
|
||||
static void set_narrow_klass_base_and_shift(address metaspace_base, address cds_base);
|
||||
|
||||
// Returns true if can use CDS with metaspace allocated as specified address.
|
||||
static bool can_use_cds_with_metaspace_addr(char* metaspace_base, address cds_base);
|
||||
|
||||
static void allocate_metaspace_compressed_klass_ptrs(char* requested_addr, address cds_base);
|
||||
|
||||
static void initialize_class_space(ReservedSpace rs);
|
||||
#endif
|
||||
|
||||
class AllocRecord : public CHeapObj<mtClass> {
|
||||
public:
|
||||
AllocRecord(address ptr, MetaspaceObj::Type type, int byte_size)
|
||||
|
@ -151,7 +176,6 @@ class Metaspace : public CHeapObj<mtClass> {
|
|||
|
||||
// Initialize globals for Metaspace
|
||||
static void global_initialize();
|
||||
static void initialize_class_space(ReservedSpace rs);
|
||||
|
||||
static size_t first_chunk_word_size() { return _first_chunk_word_size; }
|
||||
static size_t first_class_chunk_word_size() { return _first_class_chunk_word_size; }
|
||||
|
@ -172,8 +196,6 @@ class Metaspace : public CHeapObj<mtClass> {
|
|||
MetaWord* expand_and_allocate(size_t size,
|
||||
MetadataType mdtype);
|
||||
|
||||
static bool is_initialized() { return _class_space_list != NULL; }
|
||||
|
||||
static bool contains(const void *ptr);
|
||||
void dump(outputStream* const out) const;
|
||||
|
||||
|
@ -190,11 +212,16 @@ class Metaspace : public CHeapObj<mtClass> {
|
|||
};
|
||||
|
||||
void iterate(AllocRecordClosure *closure);
|
||||
|
||||
// Return TRUE only if UseCompressedKlassPointers is True and DumpSharedSpaces is False.
|
||||
static bool using_class_space() {
|
||||
return NOT_LP64(false) LP64_ONLY(UseCompressedKlassPointers && !DumpSharedSpaces);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
class MetaspaceAux : AllStatic {
|
||||
static size_t free_chunks_total(Metaspace::MetadataType mdtype);
|
||||
static size_t free_chunks_total_in_bytes(Metaspace::MetadataType mdtype);
|
||||
|
||||
public:
|
||||
// Statistics for class space and data space in metaspace.
|
||||
|
@ -238,13 +265,15 @@ class MetaspaceAux : AllStatic {
|
|||
// Used by MetaspaceCounters
|
||||
static size_t free_chunks_total();
|
||||
static size_t free_chunks_total_in_bytes();
|
||||
static size_t free_chunks_total_in_bytes(Metaspace::MetadataType mdtype);
|
||||
|
||||
static size_t allocated_capacity_words(Metaspace::MetadataType mdtype) {
|
||||
return _allocated_capacity_words[mdtype];
|
||||
}
|
||||
static size_t allocated_capacity_words() {
|
||||
return _allocated_capacity_words[Metaspace::ClassType] +
|
||||
_allocated_capacity_words[Metaspace::NonClassType];
|
||||
return _allocated_capacity_words[Metaspace::NonClassType] +
|
||||
(Metaspace::using_class_space() ?
|
||||
_allocated_capacity_words[Metaspace::ClassType] : 0);
|
||||
}
|
||||
static size_t allocated_capacity_bytes(Metaspace::MetadataType mdtype) {
|
||||
return allocated_capacity_words(mdtype) * BytesPerWord;
|
||||
|
@ -257,8 +286,9 @@ class MetaspaceAux : AllStatic {
|
|||
return _allocated_used_words[mdtype];
|
||||
}
|
||||
static size_t allocated_used_words() {
|
||||
return _allocated_used_words[Metaspace::ClassType] +
|
||||
_allocated_used_words[Metaspace::NonClassType];
|
||||
return _allocated_used_words[Metaspace::NonClassType] +
|
||||
(Metaspace::using_class_space() ?
|
||||
_allocated_used_words[Metaspace::ClassType] : 0);
|
||||
}
|
||||
static size_t allocated_used_bytes(Metaspace::MetadataType mdtype) {
|
||||
return allocated_used_words(mdtype) * BytesPerWord;
|
||||
|
@ -268,6 +298,7 @@ class MetaspaceAux : AllStatic {
|
|||
}
|
||||
|
||||
static size_t free_bytes();
|
||||
static size_t free_bytes(Metaspace::MetadataType mdtype);
|
||||
|
||||
// Total capacity in all Metaspaces
|
||||
static size_t capacity_bytes_slow() {
|
||||
|
@ -300,6 +331,7 @@ class MetaspaceAux : AllStatic {
|
|||
static void print_on(outputStream * out);
|
||||
static void print_on(outputStream * out, Metaspace::MetadataType mdtype);
|
||||
|
||||
static void print_class_waste(outputStream* out);
|
||||
static void print_waste(outputStream* out);
|
||||
static void dump(outputStream* out);
|
||||
static void verify_free_chunks();
|
||||
|
|
|
@ -25,11 +25,47 @@
|
|||
#include "precompiled.hpp"
|
||||
#include "memory/metaspaceCounters.hpp"
|
||||
#include "memory/resourceArea.hpp"
|
||||
#include "runtime/globals.hpp"
|
||||
#include "runtime/perfData.hpp"
|
||||
#include "utilities/exceptions.hpp"
|
||||
|
||||
MetaspaceCounters* MetaspaceCounters::_metaspace_counters = NULL;
|
||||
class MetaspacePerfCounters: public CHeapObj<mtInternal> {
|
||||
friend class VMStructs;
|
||||
PerfVariable* _capacity;
|
||||
PerfVariable* _used;
|
||||
PerfVariable* _max_capacity;
|
||||
|
||||
size_t MetaspaceCounters::calc_total_capacity() {
|
||||
PerfVariable* create_variable(const char *ns, const char *name, size_t value, TRAPS) {
|
||||
const char *path = PerfDataManager::counter_name(ns, name);
|
||||
return PerfDataManager::create_variable(SUN_GC, path, PerfData::U_Bytes, value, THREAD);
|
||||
}
|
||||
|
||||
void create_constant(const char *ns, const char *name, size_t value, TRAPS) {
|
||||
const char *path = PerfDataManager::counter_name(ns, name);
|
||||
PerfDataManager::create_constant(SUN_GC, path, PerfData::U_Bytes, value, THREAD);
|
||||
}
|
||||
|
||||
public:
|
||||
MetaspacePerfCounters(const char* ns, size_t min_capacity, size_t curr_capacity, size_t max_capacity, size_t used) {
|
||||
EXCEPTION_MARK;
|
||||
ResourceMark rm;
|
||||
|
||||
create_constant(ns, "minCapacity", min_capacity, THREAD);
|
||||
_capacity = create_variable(ns, "capacity", curr_capacity, THREAD);
|
||||
_max_capacity = create_variable(ns, "maxCapacity", max_capacity, THREAD);
|
||||
_used = create_variable(ns, "used", used, THREAD);
|
||||
}
|
||||
|
||||
void update(size_t capacity, size_t max_capacity, size_t used) {
|
||||
_capacity->set_value(capacity);
|
||||
_max_capacity->set_value(max_capacity);
|
||||
_used->set_value(used);
|
||||
}
|
||||
};
|
||||
|
||||
MetaspacePerfCounters* MetaspaceCounters::_perf_counters = NULL;
|
||||
|
||||
size_t MetaspaceCounters::calculate_capacity() {
|
||||
// The total capacity is the sum of
|
||||
// 1) capacity of Metachunks in use by all Metaspaces
|
||||
// 2) unused space at the end of each Metachunk
|
||||
|
@ -39,95 +75,65 @@ size_t MetaspaceCounters::calc_total_capacity() {
|
|||
return total_capacity;
|
||||
}
|
||||
|
||||
MetaspaceCounters::MetaspaceCounters() :
|
||||
_capacity(NULL),
|
||||
_used(NULL),
|
||||
_max_capacity(NULL) {
|
||||
if (UsePerfData) {
|
||||
size_t min_capacity = MetaspaceAux::min_chunk_size();
|
||||
size_t max_capacity = MetaspaceAux::reserved_in_bytes();
|
||||
size_t curr_capacity = calc_total_capacity();
|
||||
size_t used = MetaspaceAux::allocated_used_bytes();
|
||||
|
||||
initialize(min_capacity, max_capacity, curr_capacity, used);
|
||||
}
|
||||
}
|
||||
|
||||
static PerfVariable* create_ms_variable(const char *ns,
|
||||
const char *name,
|
||||
size_t value,
|
||||
TRAPS) {
|
||||
const char *path = PerfDataManager::counter_name(ns, name);
|
||||
PerfVariable *result =
|
||||
PerfDataManager::create_variable(SUN_GC, path, PerfData::U_Bytes, value,
|
||||
CHECK_NULL);
|
||||
return result;
|
||||
}
|
||||
|
||||
static void create_ms_constant(const char *ns,
|
||||
const char *name,
|
||||
size_t value,
|
||||
TRAPS) {
|
||||
const char *path = PerfDataManager::counter_name(ns, name);
|
||||
PerfDataManager::create_constant(SUN_GC, path, PerfData::U_Bytes, value, CHECK);
|
||||
}
|
||||
|
||||
void MetaspaceCounters::initialize(size_t min_capacity,
|
||||
size_t max_capacity,
|
||||
size_t curr_capacity,
|
||||
size_t used) {
|
||||
|
||||
if (UsePerfData) {
|
||||
EXCEPTION_MARK;
|
||||
ResourceMark rm;
|
||||
|
||||
const char *ms = "metaspace";
|
||||
|
||||
create_ms_constant(ms, "minCapacity", min_capacity, CHECK);
|
||||
_max_capacity = create_ms_variable(ms, "maxCapacity", max_capacity, CHECK);
|
||||
_capacity = create_ms_variable(ms, "capacity", curr_capacity, CHECK);
|
||||
_used = create_ms_variable(ms, "used", used, CHECK);
|
||||
}
|
||||
}
|
||||
|
||||
void MetaspaceCounters::update_capacity() {
|
||||
assert(UsePerfData, "Should not be called unless being used");
|
||||
size_t total_capacity = calc_total_capacity();
|
||||
_capacity->set_value(total_capacity);
|
||||
}
|
||||
|
||||
void MetaspaceCounters::update_used() {
|
||||
assert(UsePerfData, "Should not be called unless being used");
|
||||
size_t used_in_bytes = MetaspaceAux::allocated_used_bytes();
|
||||
_used->set_value(used_in_bytes);
|
||||
}
|
||||
|
||||
void MetaspaceCounters::update_max_capacity() {
|
||||
assert(UsePerfData, "Should not be called unless being used");
|
||||
assert(_max_capacity != NULL, "Should be initialized");
|
||||
size_t reserved_in_bytes = MetaspaceAux::reserved_in_bytes();
|
||||
_max_capacity->set_value(reserved_in_bytes);
|
||||
}
|
||||
|
||||
void MetaspaceCounters::update_all() {
|
||||
if (UsePerfData) {
|
||||
update_used();
|
||||
update_capacity();
|
||||
update_max_capacity();
|
||||
}
|
||||
}
|
||||
|
||||
void MetaspaceCounters::initialize_performance_counters() {
|
||||
if (UsePerfData) {
|
||||
assert(_metaspace_counters == NULL, "Should only be initialized once");
|
||||
_metaspace_counters = new MetaspaceCounters();
|
||||
assert(_perf_counters == NULL, "Should only be initialized once");
|
||||
|
||||
size_t min_capacity = MetaspaceAux::min_chunk_size();
|
||||
size_t capacity = calculate_capacity();
|
||||
size_t max_capacity = MetaspaceAux::reserved_in_bytes();
|
||||
size_t used = MetaspaceAux::allocated_used_bytes();
|
||||
|
||||
_perf_counters = new MetaspacePerfCounters("metaspace", min_capacity, capacity, max_capacity, used);
|
||||
}
|
||||
}
|
||||
|
||||
void MetaspaceCounters::update_performance_counters() {
|
||||
if (UsePerfData) {
|
||||
assert(_metaspace_counters != NULL, "Should be initialized");
|
||||
_metaspace_counters->update_all();
|
||||
assert(_perf_counters != NULL, "Should be initialized");
|
||||
|
||||
size_t capacity = calculate_capacity();
|
||||
size_t max_capacity = MetaspaceAux::reserved_in_bytes();
|
||||
size_t used = MetaspaceAux::allocated_used_bytes();
|
||||
|
||||
_perf_counters->update(capacity, max_capacity, used);
|
||||
}
|
||||
}
|
||||
|
||||
MetaspacePerfCounters* CompressedClassSpaceCounters::_perf_counters = NULL;
|
||||
|
||||
size_t CompressedClassSpaceCounters::calculate_capacity() {
|
||||
return MetaspaceAux::allocated_capacity_bytes(_class_type) +
|
||||
MetaspaceAux::free_bytes(_class_type) +
|
||||
MetaspaceAux::free_chunks_total_in_bytes(_class_type);
|
||||
}
|
||||
|
||||
void CompressedClassSpaceCounters::update_performance_counters() {
|
||||
if (UsePerfData && UseCompressedKlassPointers) {
|
||||
assert(_perf_counters != NULL, "Should be initialized");
|
||||
|
||||
size_t capacity = calculate_capacity();
|
||||
size_t max_capacity = MetaspaceAux::reserved_in_bytes(_class_type);
|
||||
size_t used = MetaspaceAux::allocated_used_bytes(_class_type);
|
||||
|
||||
_perf_counters->update(capacity, max_capacity, used);
|
||||
}
|
||||
}
|
||||
|
||||
void CompressedClassSpaceCounters::initialize_performance_counters() {
|
||||
if (UsePerfData) {
|
||||
assert(_perf_counters == NULL, "Should only be initialized once");
|
||||
const char* ns = "compressedclassspace";
|
||||
|
||||
if (UseCompressedKlassPointers) {
|
||||
size_t min_capacity = MetaspaceAux::min_chunk_size();
|
||||
size_t capacity = calculate_capacity();
|
||||
size_t max_capacity = MetaspaceAux::reserved_in_bytes(_class_type);
|
||||
size_t used = MetaspaceAux::allocated_used_bytes(_class_type);
|
||||
|
||||
_perf_counters = new MetaspacePerfCounters(ns, min_capacity, capacity, max_capacity, used);
|
||||
} else {
|
||||
_perf_counters = new MetaspacePerfCounters(ns, 0, 0, 0, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,31 +25,27 @@
|
|||
#ifndef SHARE_VM_MEMORY_METASPACECOUNTERS_HPP
|
||||
#define SHARE_VM_MEMORY_METASPACECOUNTERS_HPP
|
||||
|
||||
#include "runtime/perfData.hpp"
|
||||
#include "memory/metaspace.hpp"
|
||||
|
||||
class MetaspacePerfCounters;
|
||||
|
||||
class MetaspaceCounters: public AllStatic {
|
||||
static MetaspacePerfCounters* _perf_counters;
|
||||
static size_t calculate_capacity();
|
||||
|
||||
class MetaspaceCounters: public CHeapObj<mtClass> {
|
||||
friend class VMStructs;
|
||||
PerfVariable* _capacity;
|
||||
PerfVariable* _used;
|
||||
PerfVariable* _max_capacity;
|
||||
static MetaspaceCounters* _metaspace_counters;
|
||||
void initialize(size_t min_capacity,
|
||||
size_t max_capacity,
|
||||
size_t curr_capacity,
|
||||
size_t used);
|
||||
size_t calc_total_capacity();
|
||||
public:
|
||||
MetaspaceCounters();
|
||||
~MetaspaceCounters();
|
||||
|
||||
void update_capacity();
|
||||
void update_used();
|
||||
void update_max_capacity();
|
||||
|
||||
void update_all();
|
||||
|
||||
static void initialize_performance_counters();
|
||||
static void update_performance_counters();
|
||||
|
||||
};
|
||||
|
||||
class CompressedClassSpaceCounters: public AllStatic {
|
||||
static MetaspacePerfCounters* _perf_counters;
|
||||
static size_t calculate_capacity();
|
||||
static const Metaspace::MetadataType _class_type = Metaspace::ClassType;
|
||||
|
||||
public:
|
||||
static void initialize_performance_counters();
|
||||
static void update_performance_counters();
|
||||
};
|
||||
|
||||
#endif // SHARE_VM_MEMORY_METASPACECOUNTERS_HPP
|
||||
|
|
|
@ -52,7 +52,6 @@ void MetaspaceShared::serialize(SerializeClosure* soc) {
|
|||
int tag = 0;
|
||||
soc->do_tag(--tag);
|
||||
|
||||
assert(!UseCompressedOops, "UseCompressedOops doesn't work with shared archive");
|
||||
// Verify the sizes of various metadata in the system.
|
||||
soc->do_tag(sizeof(Method));
|
||||
soc->do_tag(sizeof(ConstMethod));
|
||||
|
|
|
@ -145,8 +145,6 @@ NarrowPtrStruct Universe::_narrow_oop = { NULL, 0, true };
|
|||
NarrowPtrStruct Universe::_narrow_klass = { NULL, 0, true };
|
||||
address Universe::_narrow_ptrs_base;
|
||||
|
||||
size_t Universe::_class_metaspace_size;
|
||||
|
||||
void Universe::basic_type_classes_do(void f(Klass*)) {
|
||||
f(boolArrayKlassObj());
|
||||
f(byteArrayKlassObj());
|
||||
|
@ -641,6 +639,8 @@ jint universe_init() {
|
|||
return status;
|
||||
}
|
||||
|
||||
Metaspace::global_initialize();
|
||||
|
||||
// Create memory for metadata. Must be after initializing heap for
|
||||
// DumpSharedSpaces.
|
||||
ClassLoaderData::init_null_class_loader_data();
|
||||
|
@ -693,13 +693,9 @@ char* Universe::preferred_heap_base(size_t heap_size, NARROW_OOP_MODE mode) {
|
|||
if (!FLAG_IS_DEFAULT(HeapBaseMinAddress) && (mode == UnscaledNarrowOop)) {
|
||||
base = HeapBaseMinAddress;
|
||||
|
||||
// If the total size and the metaspace size are small enough to allow
|
||||
// UnscaledNarrowOop then just use UnscaledNarrowOop.
|
||||
} else if ((total_size <= OopEncodingHeapMax) && (mode != HeapBasedNarrowOop) &&
|
||||
(!UseCompressedKlassPointers ||
|
||||
(((OopEncodingHeapMax - heap_size) + Universe::class_metaspace_size()) <= KlassEncodingMetaspaceMax))) {
|
||||
// We don't need to check the metaspace size here because it is always smaller
|
||||
// than total_size.
|
||||
// If the total size is small enough to allow UnscaledNarrowOop then
|
||||
// just use UnscaledNarrowOop.
|
||||
} else if ((total_size <= OopEncodingHeapMax) && (mode != HeapBasedNarrowOop)) {
|
||||
if ((total_size <= NarrowOopHeapMax) && (mode == UnscaledNarrowOop) &&
|
||||
(Universe::narrow_oop_shift() == 0)) {
|
||||
// Use 32-bits oops without encoding and
|
||||
|
@ -716,13 +712,6 @@ char* Universe::preferred_heap_base(size_t heap_size, NARROW_OOP_MODE mode) {
|
|||
base = (OopEncodingHeapMax - heap_size);
|
||||
}
|
||||
}
|
||||
|
||||
// See if ZeroBaseNarrowOop encoding will work for a heap based at
|
||||
// (KlassEncodingMetaspaceMax - class_metaspace_size()).
|
||||
} else if (UseCompressedKlassPointers && (mode != HeapBasedNarrowOop) &&
|
||||
(Universe::class_metaspace_size() + HeapBaseMinAddress <= KlassEncodingMetaspaceMax) &&
|
||||
(KlassEncodingMetaspaceMax + heap_size - Universe::class_metaspace_size() <= OopEncodingHeapMax)) {
|
||||
base = (KlassEncodingMetaspaceMax - Universe::class_metaspace_size());
|
||||
} else {
|
||||
// UnscaledNarrowOop encoding didn't work, and no base was found for ZeroBasedOops or
|
||||
// HeapBasedNarrowOop encoding was requested. So, can't reserve below 32Gb.
|
||||
|
@ -732,8 +721,7 @@ char* Universe::preferred_heap_base(size_t heap_size, NARROW_OOP_MODE mode) {
|
|||
// Set narrow_oop_base and narrow_oop_use_implicit_null_checks
|
||||
// used in ReservedHeapSpace() constructors.
|
||||
// The final values will be set in initialize_heap() below.
|
||||
if ((base != 0) && ((base + heap_size) <= OopEncodingHeapMax) &&
|
||||
(!UseCompressedKlassPointers || (base + Universe::class_metaspace_size()) <= KlassEncodingMetaspaceMax)) {
|
||||
if ((base != 0) && ((base + heap_size) <= OopEncodingHeapMax)) {
|
||||
// Use zero based compressed oops
|
||||
Universe::set_narrow_oop_base(NULL);
|
||||
// Don't need guard page for implicit checks in indexed
|
||||
|
@ -816,9 +804,7 @@ jint Universe::initialize_heap() {
|
|||
tty->print("heap address: " PTR_FORMAT ", size: " SIZE_FORMAT " MB",
|
||||
Universe::heap()->base(), Universe::heap()->reserved_region().byte_size()/M);
|
||||
}
|
||||
if (((uint64_t)Universe::heap()->reserved_region().end() > OopEncodingHeapMax) ||
|
||||
(UseCompressedKlassPointers &&
|
||||
((uint64_t)Universe::heap()->base() + Universe::class_metaspace_size() > KlassEncodingMetaspaceMax))) {
|
||||
if (((uint64_t)Universe::heap()->reserved_region().end() > OopEncodingHeapMax)) {
|
||||
// Can't reserve heap below 32Gb.
|
||||
// keep the Universe::narrow_oop_base() set in Universe::reserve_heap()
|
||||
Universe::set_narrow_oop_shift(LogMinObjAlignmentInBytes);
|
||||
|
@ -849,20 +835,16 @@ jint Universe::initialize_heap() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (verbose) {
|
||||
tty->cr();
|
||||
tty->cr();
|
||||
}
|
||||
if (UseCompressedKlassPointers) {
|
||||
Universe::set_narrow_klass_base(Universe::narrow_oop_base());
|
||||
Universe::set_narrow_klass_shift(MIN2(Universe::narrow_oop_shift(), LogKlassAlignmentInBytes));
|
||||
}
|
||||
Universe::set_narrow_ptrs_base(Universe::narrow_oop_base());
|
||||
}
|
||||
// Universe::narrow_oop_base() is one page below the metaspace
|
||||
// base. The actual metaspace base depends on alignment constraints
|
||||
// so we don't know its exact location here.
|
||||
assert((intptr_t)Universe::narrow_oop_base() <= (intptr_t)(Universe::heap()->base() - os::vm_page_size() - ClassMetaspaceSize) ||
|
||||
// Universe::narrow_oop_base() is one page below the heap.
|
||||
assert((intptr_t)Universe::narrow_oop_base() <= (intptr_t)(Universe::heap()->base() -
|
||||
os::vm_page_size()) ||
|
||||
Universe::narrow_oop_base() == NULL, "invalid value");
|
||||
assert(Universe::narrow_oop_shift() == LogMinObjAlignmentInBytes ||
|
||||
Universe::narrow_oop_shift() == 0, "invalid value");
|
||||
|
@ -882,12 +864,7 @@ jint Universe::initialize_heap() {
|
|||
|
||||
// Reserve the Java heap, which is now the same for all GCs.
|
||||
ReservedSpace Universe::reserve_heap(size_t heap_size, size_t alignment) {
|
||||
// Add in the class metaspace area so the classes in the headers can
|
||||
// be compressed the same as instances.
|
||||
// Need to round class space size up because it's below the heap and
|
||||
// the actual alignment depends on its size.
|
||||
Universe::set_class_metaspace_size(align_size_up(ClassMetaspaceSize, alignment));
|
||||
size_t total_reserved = align_size_up(heap_size + Universe::class_metaspace_size(), alignment);
|
||||
size_t total_reserved = align_size_up(heap_size, alignment);
|
||||
assert(!UseCompressedOops || (total_reserved <= (OopEncodingHeapMax - os::vm_page_size())),
|
||||
"heap size is too big for compressed oops");
|
||||
char* addr = Universe::preferred_heap_base(total_reserved, Universe::UnscaledNarrowOop);
|
||||
|
@ -923,28 +900,17 @@ ReservedSpace Universe::reserve_heap(size_t heap_size, size_t alignment) {
|
|||
return total_rs;
|
||||
}
|
||||
|
||||
// Split the reserved space into main Java heap and a space for
|
||||
// classes so that they can be compressed using the same algorithm
|
||||
// as compressed oops. If compress oops and compress klass ptrs are
|
||||
// used we need the meta space first: if the alignment used for
|
||||
// compressed oops is greater than the one used for compressed klass
|
||||
// ptrs, a metadata space on top of the heap could become
|
||||
// unreachable.
|
||||
ReservedSpace class_rs = total_rs.first_part(Universe::class_metaspace_size());
|
||||
ReservedSpace heap_rs = total_rs.last_part(Universe::class_metaspace_size(), alignment);
|
||||
Metaspace::initialize_class_space(class_rs);
|
||||
|
||||
if (UseCompressedOops) {
|
||||
// Universe::initialize_heap() will reset this to NULL if unscaled
|
||||
// or zero-based narrow oops are actually used.
|
||||
address base = (address)(total_rs.base() - os::vm_page_size());
|
||||
Universe::set_narrow_oop_base(base);
|
||||
}
|
||||
return heap_rs;
|
||||
return total_rs;
|
||||
}
|
||||
|
||||
|
||||
// It's the caller's repsonsibility to ensure glitch-freedom
|
||||
// It's the caller's responsibility to ensure glitch-freedom
|
||||
// (if required).
|
||||
void Universe::update_heap_info_at_gc() {
|
||||
_heap_capacity_at_last_gc = heap()->capacity();
|
||||
|
@ -1135,6 +1101,8 @@ bool universe_post_init() {
|
|||
|
||||
// Initialize performance counters for metaspaces
|
||||
MetaspaceCounters::initialize_performance_counters();
|
||||
CompressedClassSpaceCounters::initialize_performance_counters();
|
||||
|
||||
MemoryService::add_metaspace_memory_pools();
|
||||
|
||||
GC_locker::unlock(); // allow gc after bootstrapping
|
||||
|
|
|
@ -75,10 +75,10 @@ class LatestMethodCache : public CHeapObj<mtClass> {
|
|||
};
|
||||
|
||||
|
||||
// For UseCompressedOops and UseCompressedKlassPointers.
|
||||
// For UseCompressedOops.
|
||||
struct NarrowPtrStruct {
|
||||
// Base address for oop/klass-within-java-object materialization.
|
||||
// NULL if using wide oops/klasses or zero based narrow oops/klasses.
|
||||
// Base address for oop-within-java-object materialization.
|
||||
// NULL if using wide oops or zero based narrow oops.
|
||||
address _base;
|
||||
// Number of shift bits for encoding/decoding narrow ptrs.
|
||||
// 0 if using wide ptrs or zero based unscaled narrow ptrs,
|
||||
|
@ -106,6 +106,7 @@ class Universe: AllStatic {
|
|||
friend class SystemDictionary;
|
||||
friend class VMStructs;
|
||||
friend class VM_PopulateDumpSharedSpace;
|
||||
friend class Metaspace;
|
||||
|
||||
friend jint universe_init();
|
||||
friend void universe2_init();
|
||||
|
@ -184,9 +185,6 @@ class Universe: AllStatic {
|
|||
static struct NarrowPtrStruct _narrow_klass;
|
||||
static address _narrow_ptrs_base;
|
||||
|
||||
// Aligned size of the metaspace.
|
||||
static size_t _class_metaspace_size;
|
||||
|
||||
// array of dummy objects used with +FullGCAlot
|
||||
debug_only(static objArrayOop _fullgc_alot_dummy_array;)
|
||||
// index of next entry to clear
|
||||
|
@ -238,15 +236,6 @@ class Universe: AllStatic {
|
|||
assert(UseCompressedOops, "no compressed ptrs?");
|
||||
_narrow_oop._use_implicit_null_checks = use;
|
||||
}
|
||||
static bool reserve_metaspace_helper(bool with_base = false);
|
||||
static ReservedHeapSpace reserve_heap_metaspace(size_t heap_size, size_t alignment, bool& contiguous);
|
||||
|
||||
static size_t class_metaspace_size() {
|
||||
return _class_metaspace_size;
|
||||
}
|
||||
static void set_class_metaspace_size(size_t metaspace_size) {
|
||||
_class_metaspace_size = metaspace_size;
|
||||
}
|
||||
|
||||
// Debugging
|
||||
static int _verify_count; // number of verifies done
|
||||
|
|
|
@ -269,7 +269,7 @@ InstanceKlass::InstanceKlass(int vtable_len,
|
|||
set_fields(NULL, 0);
|
||||
set_constants(NULL);
|
||||
set_class_loader_data(NULL);
|
||||
set_source_file_name(NULL);
|
||||
set_source_file_name_index(0);
|
||||
set_source_debug_extension(NULL, 0);
|
||||
set_array_name(NULL);
|
||||
set_inner_classes(NULL);
|
||||
|
@ -284,7 +284,7 @@ InstanceKlass::InstanceKlass(int vtable_len,
|
|||
set_osr_nmethods_head(NULL);
|
||||
set_breakpoints(NULL);
|
||||
init_previous_versions();
|
||||
set_generic_signature(NULL);
|
||||
set_generic_signature_index(0);
|
||||
release_set_methods_jmethod_ids(NULL);
|
||||
release_set_methods_cached_itable_indices(NULL);
|
||||
set_annotations(NULL);
|
||||
|
@ -2368,18 +2368,12 @@ void InstanceKlass::release_C_heap_structures() {
|
|||
// unreference array name derived from this class name (arrays of an unloaded
|
||||
// class can't be referenced anymore).
|
||||
if (_array_name != NULL) _array_name->decrement_refcount();
|
||||
if (_source_file_name != NULL) _source_file_name->decrement_refcount();
|
||||
if (_source_debug_extension != NULL) FREE_C_HEAP_ARRAY(char, _source_debug_extension, mtClass);
|
||||
|
||||
assert(_total_instanceKlass_count >= 1, "Sanity check");
|
||||
Atomic::dec(&_total_instanceKlass_count);
|
||||
}
|
||||
|
||||
void InstanceKlass::set_source_file_name(Symbol* n) {
|
||||
_source_file_name = n;
|
||||
if (_source_file_name != NULL) _source_file_name->increment_refcount();
|
||||
}
|
||||
|
||||
void InstanceKlass::set_source_debug_extension(char* array, int length) {
|
||||
if (array == NULL) {
|
||||
_source_debug_extension = NULL;
|
||||
|
|
|
@ -201,14 +201,10 @@ class InstanceKlass: public Klass {
|
|||
// number_of_inner_classes * 4 + enclosing_method_attribute_size.
|
||||
Array<jushort>* _inner_classes;
|
||||
|
||||
// Name of source file containing this klass, NULL if not specified.
|
||||
Symbol* _source_file_name;
|
||||
// the source debug extension for this klass, NULL if not specified.
|
||||
// Specified as UTF-8 string without terminating zero byte in the classfile,
|
||||
// it is stored in the instanceklass as a NULL-terminated UTF-8 string
|
||||
char* _source_debug_extension;
|
||||
// Generic signature, or null if none.
|
||||
Symbol* _generic_signature;
|
||||
// Array name derived from this class which needs unreferencing
|
||||
// if this class is unloaded.
|
||||
Symbol* _array_name;
|
||||
|
@ -217,6 +213,12 @@ class InstanceKlass: public Klass {
|
|||
// (including inherited fields but after header_size()).
|
||||
int _nonstatic_field_size;
|
||||
int _static_field_size; // number words used by static fields (oop and non-oop) in this klass
|
||||
// Constant pool index to the utf8 entry of the Generic signature,
|
||||
// or 0 if none.
|
||||
u2 _generic_signature_index;
|
||||
// Constant pool index to the utf8 entry for the name of source file
|
||||
// containing this klass, 0 if not specified.
|
||||
u2 _source_file_name_index;
|
||||
u2 _static_oop_field_count;// number of static oop fields in this klass
|
||||
u2 _java_fields_count; // The number of declared Java fields
|
||||
int _nonstatic_oop_map_size;// size in words of nonstatic oop map blocks
|
||||
|
@ -570,8 +572,16 @@ class InstanceKlass: public Klass {
|
|||
}
|
||||
|
||||
// source file name
|
||||
Symbol* source_file_name() const { return _source_file_name; }
|
||||
void set_source_file_name(Symbol* n);
|
||||
Symbol* source_file_name() const {
|
||||
return (_source_file_name_index == 0) ?
|
||||
(Symbol*)NULL : _constants->symbol_at(_source_file_name_index);
|
||||
}
|
||||
u2 source_file_name_index() const {
|
||||
return _source_file_name_index;
|
||||
}
|
||||
void set_source_file_name_index(u2 sourcefile_index) {
|
||||
_source_file_name_index = sourcefile_index;
|
||||
}
|
||||
|
||||
// minor and major version numbers of class file
|
||||
u2 minor_version() const { return _minor_version; }
|
||||
|
@ -648,8 +658,16 @@ class InstanceKlass: public Klass {
|
|||
void set_initial_method_idnum(u2 value) { _idnum_allocated_count = value; }
|
||||
|
||||
// generics support
|
||||
Symbol* generic_signature() const { return _generic_signature; }
|
||||
void set_generic_signature(Symbol* sig) { _generic_signature = sig; }
|
||||
Symbol* generic_signature() const {
|
||||
return (_generic_signature_index == 0) ?
|
||||
(Symbol*)NULL : _constants->symbol_at(_generic_signature_index);
|
||||
}
|
||||
u2 generic_signature_index() const {
|
||||
return _generic_signature_index;
|
||||
}
|
||||
void set_generic_signature_index(u2 sig_index) {
|
||||
_generic_signature_index = sig_index;
|
||||
}
|
||||
|
||||
u2 enclosing_method_data(int offset);
|
||||
u2 enclosing_method_class_index() {
|
||||
|
|
|
@ -704,6 +704,16 @@ class Klass : public Metadata {
|
|||
|
||||
virtual void oop_verify_on(oop obj, outputStream* st);
|
||||
|
||||
static bool is_null(narrowKlass obj);
|
||||
static bool is_null(Klass* obj);
|
||||
|
||||
// klass encoding for klass pointer in objects.
|
||||
static narrowKlass encode_klass_not_null(Klass* v);
|
||||
static narrowKlass encode_klass(Klass* v);
|
||||
|
||||
static Klass* decode_klass_not_null(narrowKlass v);
|
||||
static Klass* decode_klass(narrowKlass v);
|
||||
|
||||
private:
|
||||
// barriers used by klass_oop_store
|
||||
void klass_update_barrier_set(oop v);
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -25,6 +25,7 @@
|
|||
#ifndef SHARE_VM_OOPS_KLASS_INLINE_HPP
|
||||
#define SHARE_VM_OOPS_KLASS_INLINE_HPP
|
||||
|
||||
#include "memory/universe.hpp"
|
||||
#include "oops/klass.hpp"
|
||||
#include "oops/markOop.hpp"
|
||||
|
||||
|
@ -33,4 +34,41 @@ inline void Klass::set_prototype_header(markOop header) {
|
|||
_prototype_header = header;
|
||||
}
|
||||
|
||||
inline bool Klass::is_null(Klass* obj) { return obj == NULL; }
|
||||
inline bool Klass::is_null(narrowKlass obj) { return obj == 0; }
|
||||
|
||||
// Encoding and decoding for klass field.
|
||||
|
||||
inline bool check_klass_alignment(Klass* obj) {
|
||||
return (intptr_t)obj % KlassAlignmentInBytes == 0;
|
||||
}
|
||||
|
||||
inline narrowKlass Klass::encode_klass_not_null(Klass* v) {
|
||||
assert(!is_null(v), "klass value can never be zero");
|
||||
assert(check_klass_alignment(v), "Address not aligned");
|
||||
int shift = Universe::narrow_klass_shift();
|
||||
uint64_t pd = (uint64_t)(pointer_delta((void*)v, Universe::narrow_klass_base(), 1));
|
||||
assert(KlassEncodingMetaspaceMax > pd, "change encoding max if new encoding");
|
||||
uint64_t result = pd >> shift;
|
||||
assert((result & CONST64(0xffffffff00000000)) == 0, "narrow klass pointer overflow");
|
||||
assert(decode_klass(result) == v, "reversibility");
|
||||
return (narrowKlass)result;
|
||||
}
|
||||
|
||||
inline narrowKlass Klass::encode_klass(Klass* v) {
|
||||
return is_null(v) ? (narrowKlass)0 : encode_klass_not_null(v);
|
||||
}
|
||||
|
||||
inline Klass* Klass::decode_klass_not_null(narrowKlass v) {
|
||||
assert(!is_null(v), "narrow klass value can never be zero");
|
||||
int shift = Universe::narrow_klass_shift();
|
||||
Klass* result = (Klass*)(void*)((uintptr_t)Universe::narrow_klass_base() + ((uintptr_t)v << shift));
|
||||
assert(check_klass_alignment(result), err_msg("address not aligned: " PTR_FORMAT, (void*) result));
|
||||
return result;
|
||||
}
|
||||
|
||||
inline Klass* Klass::decode_klass(narrowKlass v) {
|
||||
return is_null(v) ? (Klass*)NULL : decode_klass_not_null(v);
|
||||
}
|
||||
|
||||
#endif // SHARE_VM_OOPS_KLASS_INLINE_HPP
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -62,7 +62,7 @@ class oopDesc {
|
|||
volatile markOop _mark;
|
||||
union _metadata {
|
||||
Klass* _klass;
|
||||
narrowOop _compressed_klass;
|
||||
narrowKlass _compressed_klass;
|
||||
} _metadata;
|
||||
|
||||
// Fast access to barrier set. Must be initialized.
|
||||
|
@ -84,7 +84,7 @@ class oopDesc {
|
|||
Klass* klass() const;
|
||||
Klass* klass_or_null() const volatile;
|
||||
Klass** klass_addr();
|
||||
narrowOop* compressed_klass_addr();
|
||||
narrowKlass* compressed_klass_addr();
|
||||
|
||||
void set_klass(Klass* k);
|
||||
|
||||
|
@ -189,13 +189,6 @@ class oopDesc {
|
|||
oop compare_value,
|
||||
bool prebarrier = false);
|
||||
|
||||
// klass encoding for klass pointer in objects.
|
||||
static narrowOop encode_klass_not_null(Klass* v);
|
||||
static narrowOop encode_klass(Klass* v);
|
||||
|
||||
static Klass* decode_klass_not_null(narrowOop v);
|
||||
static Klass* decode_klass(narrowOop v);
|
||||
|
||||
// Access to fields in a instanceOop through these methods.
|
||||
oop obj_field(int offset) const;
|
||||
volatile oop obj_field_volatile(int offset) const;
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
#include "memory/specialized_oop_closures.hpp"
|
||||
#include "oops/arrayKlass.hpp"
|
||||
#include "oops/arrayOop.hpp"
|
||||
#include "oops/klass.hpp"
|
||||
#include "oops/klass.inline.hpp"
|
||||
#include "oops/markOop.inline.hpp"
|
||||
#include "oops/oop.hpp"
|
||||
#include "runtime/atomic.hpp"
|
||||
|
@ -70,7 +70,7 @@ inline markOop oopDesc::cas_set_mark(markOop new_mark, markOop old_mark) {
|
|||
|
||||
inline Klass* oopDesc::klass() const {
|
||||
if (UseCompressedKlassPointers) {
|
||||
return decode_klass_not_null(_metadata._compressed_klass);
|
||||
return Klass::decode_klass_not_null(_metadata._compressed_klass);
|
||||
} else {
|
||||
return _metadata._klass;
|
||||
}
|
||||
|
@ -79,7 +79,7 @@ inline Klass* oopDesc::klass() const {
|
|||
inline Klass* oopDesc::klass_or_null() const volatile {
|
||||
// can be NULL in CMS
|
||||
if (UseCompressedKlassPointers) {
|
||||
return decode_klass(_metadata._compressed_klass);
|
||||
return Klass::decode_klass(_metadata._compressed_klass);
|
||||
} else {
|
||||
return _metadata._klass;
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ inline Klass* oopDesc::klass_or_null() const volatile {
|
|||
|
||||
inline int oopDesc::klass_gap_offset_in_bytes() {
|
||||
assert(UseCompressedKlassPointers, "only applicable to compressed klass pointers");
|
||||
return oopDesc::klass_offset_in_bytes() + sizeof(narrowOop);
|
||||
return oopDesc::klass_offset_in_bytes() + sizeof(narrowKlass);
|
||||
}
|
||||
|
||||
inline Klass** oopDesc::klass_addr() {
|
||||
|
@ -97,9 +97,9 @@ inline Klass** oopDesc::klass_addr() {
|
|||
return (Klass**) &_metadata._klass;
|
||||
}
|
||||
|
||||
inline narrowOop* oopDesc::compressed_klass_addr() {
|
||||
inline narrowKlass* oopDesc::compressed_klass_addr() {
|
||||
assert(UseCompressedKlassPointers, "only called by compressed klass pointers");
|
||||
return (narrowOop*) &_metadata._compressed_klass;
|
||||
return &_metadata._compressed_klass;
|
||||
}
|
||||
|
||||
inline void oopDesc::set_klass(Klass* k) {
|
||||
|
@ -107,7 +107,7 @@ inline void oopDesc::set_klass(Klass* k) {
|
|||
assert(Universe::is_bootstrapping() || k != NULL, "must be a real Klass*");
|
||||
assert(Universe::is_bootstrapping() || k->is_klass(), "not a Klass*");
|
||||
if (UseCompressedKlassPointers) {
|
||||
*compressed_klass_addr() = encode_klass_not_null(k);
|
||||
*compressed_klass_addr() = Klass::encode_klass_not_null(k);
|
||||
} else {
|
||||
*klass_addr() = k;
|
||||
}
|
||||
|
@ -127,7 +127,7 @@ inline void oopDesc::set_klass_to_list_ptr(oop k) {
|
|||
// This is only to be used during GC, for from-space objects, so no
|
||||
// barrier is needed.
|
||||
if (UseCompressedKlassPointers) {
|
||||
_metadata._compressed_klass = encode_heap_oop(k); // may be null (parnew overflow handling)
|
||||
_metadata._compressed_klass = (narrowKlass)encode_heap_oop(k); // may be null (parnew overflow handling)
|
||||
} else {
|
||||
_metadata._klass = (Klass*)(address)k;
|
||||
}
|
||||
|
@ -136,7 +136,7 @@ inline void oopDesc::set_klass_to_list_ptr(oop k) {
|
|||
inline oop oopDesc::list_ptr_from_klass() {
|
||||
// This is only to be used during GC, for from-space objects.
|
||||
if (UseCompressedKlassPointers) {
|
||||
return decode_heap_oop(_metadata._compressed_klass);
|
||||
return decode_heap_oop((narrowOop)_metadata._compressed_klass);
|
||||
} else {
|
||||
// Special case for GC
|
||||
return (oop)(address)_metadata._klass;
|
||||
|
@ -176,7 +176,6 @@ inline address* oopDesc::address_field_addr(int offset) const { return (address
|
|||
// the right type and inlines the appopriate code).
|
||||
|
||||
inline bool oopDesc::is_null(oop obj) { return obj == NULL; }
|
||||
inline bool oopDesc::is_null(Klass* obj) { return obj == NULL; }
|
||||
inline bool oopDesc::is_null(narrowOop obj) { return obj == 0; }
|
||||
|
||||
// Algorithm for encoding and decoding oops from 64 bit pointers to 32 bit
|
||||
|
@ -186,9 +185,6 @@ inline bool oopDesc::is_null(narrowOop obj) { return obj == 0; }
|
|||
inline bool check_obj_alignment(oop obj) {
|
||||
return (intptr_t)obj % MinObjAlignmentInBytes == 0;
|
||||
}
|
||||
inline bool check_klass_alignment(Klass* obj) {
|
||||
return (intptr_t)obj % KlassAlignmentInBytes == 0;
|
||||
}
|
||||
|
||||
inline narrowOop oopDesc::encode_heap_oop_not_null(oop v) {
|
||||
assert(!is_null(v), "oop value can never be zero");
|
||||
|
@ -224,39 +220,6 @@ inline oop oopDesc::decode_heap_oop(narrowOop v) {
|
|||
inline oop oopDesc::decode_heap_oop_not_null(oop v) { return v; }
|
||||
inline oop oopDesc::decode_heap_oop(oop v) { return v; }
|
||||
|
||||
// Encoding and decoding for klass field. It is copied code, but someday
|
||||
// might not be the same as oop.
|
||||
|
||||
inline narrowOop oopDesc::encode_klass_not_null(Klass* v) {
|
||||
assert(!is_null(v), "klass value can never be zero");
|
||||
assert(check_klass_alignment(v), "Address not aligned");
|
||||
address base = Universe::narrow_klass_base();
|
||||
int shift = Universe::narrow_klass_shift();
|
||||
uint64_t pd = (uint64_t)(pointer_delta((void*)v, (void*)base, 1));
|
||||
assert(KlassEncodingMetaspaceMax > pd, "change encoding max if new encoding");
|
||||
uint64_t result = pd >> shift;
|
||||
assert((result & CONST64(0xffffffff00000000)) == 0, "narrow klass pointer overflow");
|
||||
assert(decode_klass(result) == v, "reversibility");
|
||||
return (narrowOop)result;
|
||||
}
|
||||
|
||||
inline narrowOop oopDesc::encode_klass(Klass* v) {
|
||||
return (is_null(v)) ? (narrowOop)0 : encode_klass_not_null(v);
|
||||
}
|
||||
|
||||
inline Klass* oopDesc::decode_klass_not_null(narrowOop v) {
|
||||
assert(!is_null(v), "narrow oop value can never be zero");
|
||||
address base = Universe::narrow_klass_base();
|
||||
int shift = Universe::narrow_klass_shift();
|
||||
Klass* result = (Klass*)(void*)((uintptr_t)base + ((uintptr_t)v << shift));
|
||||
assert(check_klass_alignment(result), err_msg("address not aligned: " PTR_FORMAT, (void*) result));
|
||||
return result;
|
||||
}
|
||||
|
||||
inline Klass* oopDesc::decode_klass(narrowOop v) {
|
||||
return is_null(v) ? (Klass*)NULL : decode_klass_not_null(v);
|
||||
}
|
||||
|
||||
// Load an oop out of the Java heap as is without decoding.
|
||||
// Called by GC to check for null before decoding.
|
||||
inline oop oopDesc::load_heap_oop(oop* p) { return *p; }
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -33,6 +33,10 @@
|
|||
// of B, A's representation is a prefix of B's representation.
|
||||
|
||||
typedef juint narrowOop; // Offset instead of address for an oop within a java object
|
||||
|
||||
// If compressed klass pointers then use narrowKlass.
|
||||
typedef juint narrowKlass;
|
||||
|
||||
typedef void* OopOrNarrowOopStar;
|
||||
typedef class markOopDesc* markOop;
|
||||
|
||||
|
|
|
@ -1554,6 +1554,20 @@ bool VM_RedefineClasses::rewrite_cp_refs(instanceKlassHandle scratch_class,
|
|||
return false;
|
||||
}
|
||||
|
||||
// rewrite sourc file name index:
|
||||
u2 source_file_name_idx = scratch_class->source_file_name_index();
|
||||
if (source_file_name_idx != 0) {
|
||||
u2 new_source_file_name_idx = find_new_index(source_file_name_idx);
|
||||
scratch_class->set_source_file_name_index(new_source_file_name_idx);
|
||||
}
|
||||
|
||||
// rewrite class generic signature index:
|
||||
u2 generic_signature_index = scratch_class->generic_signature_index();
|
||||
if (generic_signature_index != 0) {
|
||||
u2 new_generic_signature_index = find_new_index(generic_signature_index);
|
||||
scratch_class->set_generic_signature_index(new_generic_signature_index);
|
||||
}
|
||||
|
||||
return true;
|
||||
} // end rewrite_cp_refs()
|
||||
|
||||
|
@ -3370,7 +3384,8 @@ void VM_RedefineClasses::redefine_single_class(jclass the_jclass,
|
|||
// Leave arrays of jmethodIDs and itable index cache unchanged
|
||||
|
||||
// Copy the "source file name" attribute from new class version
|
||||
the_class->set_source_file_name(scratch_class->source_file_name());
|
||||
the_class->set_source_file_name_index(
|
||||
scratch_class->source_file_name_index());
|
||||
|
||||
// Copy the "source debug extension" attribute from new class version
|
||||
the_class->set_source_debug_extension(
|
||||
|
|
|
@ -1393,10 +1393,8 @@ bool verify_object_alignment() {
|
|||
|
||||
inline uintx max_heap_for_compressed_oops() {
|
||||
// Avoid sign flip.
|
||||
if (OopEncodingHeapMax < ClassMetaspaceSize + os::vm_page_size()) {
|
||||
return 0;
|
||||
}
|
||||
LP64_ONLY(return OopEncodingHeapMax - ClassMetaspaceSize - os::vm_page_size());
|
||||
assert(OopEncodingHeapMax > (uint64_t)os::vm_page_size(), "Unusual page size");
|
||||
LP64_ONLY(return OopEncodingHeapMax - os::vm_page_size());
|
||||
NOT_LP64(ShouldNotReachHere(); return 0);
|
||||
}
|
||||
|
||||
|
@ -1448,6 +1446,35 @@ void Arguments::set_use_compressed_oops() {
|
|||
#endif // ZERO
|
||||
}
|
||||
|
||||
|
||||
// NOTE: set_use_compressed_klass_ptrs() must be called after calling
|
||||
// set_use_compressed_oops().
|
||||
void Arguments::set_use_compressed_klass_ptrs() {
|
||||
#ifndef ZERO
|
||||
#ifdef _LP64
|
||||
// UseCompressedOops must be on for UseCompressedKlassPointers to be on.
|
||||
if (!UseCompressedOops) {
|
||||
if (UseCompressedKlassPointers) {
|
||||
warning("UseCompressedKlassPointers requires UseCompressedOops");
|
||||
}
|
||||
FLAG_SET_DEFAULT(UseCompressedKlassPointers, false);
|
||||
} else {
|
||||
// Turn on UseCompressedKlassPointers too
|
||||
if (FLAG_IS_DEFAULT(UseCompressedKlassPointers)) {
|
||||
FLAG_SET_ERGO(bool, UseCompressedKlassPointers, true);
|
||||
}
|
||||
// Check the ClassMetaspaceSize to make sure we use compressed klass ptrs.
|
||||
if (UseCompressedKlassPointers) {
|
||||
if (ClassMetaspaceSize > KlassEncodingMetaspaceMax) {
|
||||
warning("Class metaspace size is too large for UseCompressedKlassPointers");
|
||||
FLAG_SET_DEFAULT(UseCompressedKlassPointers, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif // _LP64
|
||||
#endif // !ZERO
|
||||
}
|
||||
|
||||
void Arguments::set_ergonomics_flags() {
|
||||
|
||||
if (os::is_server_class_machine()) {
|
||||
|
@ -1470,7 +1497,8 @@ void Arguments::set_ergonomics_flags() {
|
|||
// server performance. On server class machines, keep the default
|
||||
// off unless it is asked for. Future work: either add bytecode rewriting
|
||||
// at link time, or rewrite bytecodes in non-shared methods.
|
||||
if (!DumpSharedSpaces && !RequireSharedSpaces) {
|
||||
if (!DumpSharedSpaces && !RequireSharedSpaces &&
|
||||
(FLAG_IS_DEFAULT(UseSharedSpaces) || !UseSharedSpaces)) {
|
||||
no_shared_spaces();
|
||||
}
|
||||
}
|
||||
|
@ -1478,33 +1506,11 @@ void Arguments::set_ergonomics_flags() {
|
|||
#ifndef ZERO
|
||||
#ifdef _LP64
|
||||
set_use_compressed_oops();
|
||||
// UseCompressedOops must be on for UseCompressedKlassPointers to be on.
|
||||
if (!UseCompressedOops) {
|
||||
if (UseCompressedKlassPointers) {
|
||||
warning("UseCompressedKlassPointers requires UseCompressedOops");
|
||||
}
|
||||
FLAG_SET_DEFAULT(UseCompressedKlassPointers, false);
|
||||
} else {
|
||||
// Turn on UseCompressedKlassPointers too
|
||||
if (FLAG_IS_DEFAULT(UseCompressedKlassPointers)) {
|
||||
FLAG_SET_ERGO(bool, UseCompressedKlassPointers, true);
|
||||
}
|
||||
// Set the ClassMetaspaceSize to something that will not need to be
|
||||
// expanded, since it cannot be expanded.
|
||||
if (UseCompressedKlassPointers) {
|
||||
if (ClassMetaspaceSize > KlassEncodingMetaspaceMax) {
|
||||
warning("Class metaspace size is too large for UseCompressedKlassPointers");
|
||||
FLAG_SET_DEFAULT(UseCompressedKlassPointers, false);
|
||||
} else if (FLAG_IS_DEFAULT(ClassMetaspaceSize)) {
|
||||
// 100,000 classes seems like a good size, so 100M assumes around 1K
|
||||
// per klass. The vtable and oopMap is embedded so we don't have a fixed
|
||||
// size per klass. Eventually, this will be parameterized because it
|
||||
// would also be useful to determine the optimal size of the
|
||||
// systemDictionary.
|
||||
FLAG_SET_ERGO(uintx, ClassMetaspaceSize, 100*M);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// set_use_compressed_klass_ptrs() must be called after calling
|
||||
// set_use_compressed_oops().
|
||||
set_use_compressed_klass_ptrs();
|
||||
|
||||
// Also checks that certain machines are slower with compressed oops
|
||||
// in vm_version initialization code.
|
||||
#endif // _LP64
|
||||
|
@ -2153,7 +2159,7 @@ bool Arguments::check_vm_args_consistency() {
|
|||
|
||||
status = status && verify_object_alignment();
|
||||
|
||||
status = status && verify_min_value(ClassMetaspaceSize, 1*M,
|
||||
status = status && verify_interval(ClassMetaspaceSize, 1*M, 3*G,
|
||||
"ClassMetaspaceSize");
|
||||
|
||||
status = status && verify_interval(MarkStackSizeMax,
|
||||
|
@ -3273,33 +3279,22 @@ jint Arguments::parse_options_environment_variable(const char* name, SysClassPat
|
|||
}
|
||||
|
||||
void Arguments::set_shared_spaces_flags() {
|
||||
#ifdef _LP64
|
||||
const bool must_share = DumpSharedSpaces || RequireSharedSpaces;
|
||||
|
||||
// CompressedOops cannot be used with CDS. The offsets of oopmaps and
|
||||
// static fields are incorrect in the archive. With some more clever
|
||||
// initialization, this restriction can probably be lifted.
|
||||
if (UseCompressedOops) {
|
||||
if (must_share) {
|
||||
warning("disabling compressed oops because of %s",
|
||||
DumpSharedSpaces ? "-Xshare:dump" : "-Xshare:on");
|
||||
FLAG_SET_CMDLINE(bool, UseCompressedOops, false);
|
||||
FLAG_SET_CMDLINE(bool, UseCompressedKlassPointers, false);
|
||||
} else {
|
||||
// Prefer compressed oops to class data sharing
|
||||
if (UseSharedSpaces && Verbose) {
|
||||
warning("turning off use of shared archive because of compressed oops");
|
||||
}
|
||||
no_shared_spaces();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (DumpSharedSpaces) {
|
||||
if (RequireSharedSpaces) {
|
||||
warning("cannot dump shared archive while using shared archive");
|
||||
}
|
||||
UseSharedSpaces = false;
|
||||
#ifdef _LP64
|
||||
if (!UseCompressedOops || !UseCompressedKlassPointers) {
|
||||
vm_exit_during_initialization(
|
||||
"Cannot dump shared archive when UseCompressedOops or UseCompressedKlassPointers is off.", NULL);
|
||||
}
|
||||
} else {
|
||||
// UseCompressedOops and UseCompressedKlassPointers must be on for UseSharedSpaces.
|
||||
if (!UseCompressedOops || !UseCompressedKlassPointers) {
|
||||
no_shared_spaces();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -309,6 +309,7 @@ class Arguments : AllStatic {
|
|||
static void set_g1_gc_flags();
|
||||
// GC ergonomics
|
||||
static void set_use_compressed_oops();
|
||||
static void set_use_compressed_klass_ptrs();
|
||||
static void set_ergonomics_flags();
|
||||
static void set_shared_spaces_flags();
|
||||
// limits the given memory size by the maximum amount of memory this process is
|
||||
|
|
|
@ -3036,7 +3036,7 @@ class CommandLineFlags {
|
|||
product(uintx, MaxMetaspaceSize, max_uintx, \
|
||||
"Maximum size of Metaspaces (in bytes)") \
|
||||
\
|
||||
product(uintx, ClassMetaspaceSize, 2*M, \
|
||||
product(uintx, ClassMetaspaceSize, 1*G, \
|
||||
"Maximum size of InstanceKlass area in Metaspace used for " \
|
||||
"UseCompressedKlassPointers") \
|
||||
\
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
/*
|
||||
* Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
|
@ -95,7 +95,6 @@ jint init_globals() {
|
|||
management_init();
|
||||
bytecodes_init();
|
||||
classLoader_init();
|
||||
Metaspace::global_initialize(); // must be before codeCache
|
||||
codeCache_init();
|
||||
VM_Version_init();
|
||||
os_init_globals();
|
||||
|
|
|
@ -294,7 +294,7 @@ typedef BinaryTreeDictionary<Metablock, FreeList> MetablockTreeDictionary;
|
|||
nonstatic_field(InstanceKlass, _java_fields_count, u2) \
|
||||
nonstatic_field(InstanceKlass, _constants, ConstantPool*) \
|
||||
nonstatic_field(InstanceKlass, _class_loader_data, ClassLoaderData*) \
|
||||
nonstatic_field(InstanceKlass, _source_file_name, Symbol*) \
|
||||
nonstatic_field(InstanceKlass, _source_file_name_index, u2) \
|
||||
nonstatic_field(InstanceKlass, _source_debug_extension, char*) \
|
||||
nonstatic_field(InstanceKlass, _inner_classes, Array<jushort>*) \
|
||||
nonstatic_field(InstanceKlass, _nonstatic_field_size, int) \
|
||||
|
@ -313,7 +313,7 @@ typedef BinaryTreeDictionary<Metablock, FreeList> MetablockTreeDictionary;
|
|||
nonstatic_field(InstanceKlass, _jni_ids, JNIid*) \
|
||||
nonstatic_field(InstanceKlass, _osr_nmethods_head, nmethod*) \
|
||||
nonstatic_field(InstanceKlass, _breakpoints, BreakpointInfo*) \
|
||||
nonstatic_field(InstanceKlass, _generic_signature, Symbol*) \
|
||||
nonstatic_field(InstanceKlass, _generic_signature_index, u2) \
|
||||
nonstatic_field(InstanceKlass, _methods_jmethod_ids, jmethodID*) \
|
||||
nonstatic_field(InstanceKlass, _methods_cached_itable_indices, int*) \
|
||||
volatile_nonstatic_field(InstanceKlass, _idnum_allocated_count, u2) \
|
||||
|
|
|
@ -231,6 +231,8 @@ const char* Abstract_VM_Version::internal_vm_info_string() {
|
|||
#define HOTSPOT_BUILD_COMPILER "Workshop 5.9"
|
||||
#elif __SUNPRO_CC == 0x5100
|
||||
#define HOTSPOT_BUILD_COMPILER "Sun Studio 12u1"
|
||||
#elif __SUNPRO_CC == 0x5120
|
||||
#define HOTSPOT_BUILD_COMPILER "Sun Studio 12u3"
|
||||
#else
|
||||
#define HOTSPOT_BUILD_COMPILER "unknown Workshop:" XSTR(__SUNPRO_CC)
|
||||
#endif
|
||||
|
|
|
@ -362,6 +362,8 @@ const int KlassAlignment = KlassAlignmentInBytes / HeapWordSize;
|
|||
// Klass encoding metaspace max size
|
||||
const uint64_t KlassEncodingMetaspaceMax = (uint64_t(max_juint) + 1) << LogKlassAlignmentInBytes;
|
||||
|
||||
const jlong CompressedKlassPointersBase = NOT_LP64(0) LP64_ONLY(CONST64(0x800000000)); // 32*G
|
||||
|
||||
// Machine dependent stuff
|
||||
|
||||
#ifdef TARGET_ARCH_x86
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue