8003985: Support @Contended Annotation - JEP 142

HotSpot changes to support @Contended annotation.

Reviewed-by: coleenp, kvn, jrose
This commit is contained in:
Aleksey Shipilev 2013-01-14 15:17:47 +01:00 committed by Jesper Wilhelmsson
parent ae35dfeeec
commit 0614ed6542
11 changed files with 617 additions and 92 deletions

View file

@ -970,6 +970,12 @@ void ClassFileParser::parse_field_attributes(ClassLoaderData* loader_data,
runtime_visible_annotations_length = attribute_length;
runtime_visible_annotations = cfs->get_u1_buffer();
assert(runtime_visible_annotations != NULL, "null visible annotations");
parse_annotations(loader_data,
runtime_visible_annotations,
runtime_visible_annotations_length,
cp,
parsed_annotations,
CHECK);
cfs->skip_u1(runtime_visible_annotations_length, CHECK);
} else if (PreserveAllAnnotations && attribute_name == vmSymbols::tag_runtime_invisible_annotations()) {
runtime_invisible_annotations_length = attribute_length;
@ -1216,19 +1222,16 @@ Array<u2>* ClassFileParser::parse_fields(ClassLoaderData* loader_data,
field->initialize(access_flags.as_short(),
name_index,
signature_index,
constantvalue_index,
0);
if (parsed_annotations.has_any_annotations())
parsed_annotations.apply_to(field);
constantvalue_index);
BasicType type = cp->basic_type_for_signature_at(signature_index);
// Remember how many oops we encountered and compute allocation type
FieldAllocationType atype = fac->update(is_static, type);
field->set_allocation_type(atype);
// The correct offset is computed later (all oop fields will be located together)
// We temporarily store the allocation type in the offset field
field->set_offset(atype);
// After field is initialized with type, we can augment it with aux info
if (parsed_annotations.has_any_annotations())
parsed_annotations.apply_to(field);
}
int index = length;
@ -1259,17 +1262,13 @@ Array<u2>* ClassFileParser::parse_fields(ClassLoaderData* loader_data,
field->initialize(JVM_ACC_FIELD_INTERNAL,
injected[n].name_index,
injected[n].signature_index,
0,
0);
BasicType type = FieldType::basic_type(injected[n].signature());
// Remember how many oops we encountered and compute allocation type
FieldAllocationType atype = fac->update(false, type);
// The correct offset is computed later (all oop fields will be located together)
// We temporarily store the allocation type in the offset field
field->set_offset(atype);
field->set_allocation_type(atype);
index++;
}
}
@ -1735,7 +1734,8 @@ int ClassFileParser::skip_annotation_value(u1* buffer, int limit, int index) {
}
// Sift through annotations, looking for those significant to the VM:
void ClassFileParser::parse_annotations(u1* buffer, int limit,
void ClassFileParser::parse_annotations(ClassLoaderData* loader_data,
u1* buffer, int limit,
constantPoolHandle cp,
ClassFileParser::AnnotationCollector* coll,
TRAPS) {
@ -1752,9 +1752,12 @@ void ClassFileParser::parse_annotations(u1* buffer, int limit,
e_type_off = 7, // utf8 such as 'Ljava/lang/annotation/RetentionPolicy;'
e_con_off = 9, // utf8 payload, such as 'SOURCE', 'CLASS', 'RUNTIME'
e_size = 11, // end of 'e' annotation
c_tag_val = 'c',
c_con_off = 7, // utf8 payload, such as 'I' or 'Ljava/lang/String;'
c_tag_val = 'c', // payload is type
c_con_off = 7, // utf8 payload, such as 'I'
c_size = 9, // end of 'c' annotation
s_tag_val = 's', // payload is String
s_con_off = 7, // utf8 payload, such as 'Ljava/lang/String;'
s_size = 9,
min_size = 6 // smallest possible size (zero members)
};
while ((--nann) >= 0 && (index-2 + min_size <= limit)) {
@ -1773,57 +1776,65 @@ void ClassFileParser::parse_annotations(u1* buffer, int limit,
}
// Here is where parsing particular annotations will take place.
AnnotationCollector::ID id = coll->annotation_index(aname);
AnnotationCollector::ID id = coll->annotation_index(loader_data, aname);
if (id == AnnotationCollector::_unknown) continue;
coll->set_annotation(id);
// If there are no values, just set the bit and move on:
if (count == 0) continue;
// For the record, here is how annotation payloads can be collected.
// Suppose we want to capture @Retention.value. Here is how:
//if (id == AnnotationCollector::_class_Retention) {
// Symbol* payload = NULL;
// if (count == 1
// && e_size == (index0 - index) // match size
// && e_tag_val == *(abase + tag_off)
// && (check_symbol_at(cp, Bytes::get_Java_u2(abase + e_type_off))
// == vmSymbols::RetentionPolicy_signature())
// && member == vmSymbols::value_name()) {
// payload = check_symbol_at(cp, Bytes::get_Java_u2(abase + e_con_off));
// }
// check_property(payload != NULL,
// "Invalid @Retention annotation at offset %u in class file %s",
// index0, CHECK);
// if (payload != NULL) {
// payload->increment_refcount();
// coll->_class_RetentionPolicy = payload;
// }
//}
if (id == AnnotationCollector::_sun_misc_Contended) {
if (count == 1
&& s_size == (index - index0) // match size
&& s_tag_val == *(abase + tag_off)
&& member == vmSymbols::value_name()) {
u2 group_index = Bytes::get_Java_u2(abase + s_con_off);
coll->set_contended_group(group_index);
} else {
coll->set_contended_group(0); // default contended group
}
coll->set_contended(true);
} else {
coll->set_contended(false);
}
}
}
ClassFileParser::AnnotationCollector::ID ClassFileParser::AnnotationCollector::annotation_index(Symbol* name) {
ClassFileParser::AnnotationCollector::ID
ClassFileParser::AnnotationCollector::annotation_index(ClassLoaderData* loader_data,
Symbol* name) {
vmSymbols::SID sid = vmSymbols::find_sid(name);
bool privileged = false;
if (loader_data->is_the_null_class_loader_data()) {
// Privileged code can use all annotations. Other code silently drops some.
privileged = true;
}
switch (sid) {
case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_ForceInline_signature):
if (_location != _in_method) break; // only allow for methods
if (!privileged) break; // only allow in privileged code
return _method_ForceInline;
case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_DontInline_signature):
if (_location != _in_method) break; // only allow for methods
if (!privileged) break; // only allow in privileged code
return _method_DontInline;
case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_LambdaForm_Compiled_signature):
if (_location != _in_method) break; // only allow for methods
if (!privileged) break; // only allow in privileged code
return _method_LambdaForm_Compiled;
case vmSymbols::VM_SYMBOL_ENUM_NAME(java_lang_invoke_LambdaForm_Hidden_signature):
if (_location != _in_method) break; // only allow for methods
if (!privileged) break; // only allow in privileged code
return _method_LambdaForm_Hidden;
case vmSymbols::VM_SYMBOL_ENUM_NAME(sun_misc_Contended_signature):
if (_location != _in_field && _location != _in_class) break; // only allow for fields and classes
if (!EnableContended || (RestrictContended && !privileged)) break; // honor privileges
return _sun_misc_Contended;
default: break;
}
return AnnotationCollector::_unknown;
}
void ClassFileParser::FieldAnnotationCollector::apply_to(FieldInfo* f) {
fatal("no field annotations yet");
if (is_contended())
f->set_contended_group(contended_group());
}
void ClassFileParser::MethodAnnotationCollector::apply_to(methodHandle m) {
@ -1838,7 +1849,7 @@ void ClassFileParser::MethodAnnotationCollector::apply_to(methodHandle m) {
}
void ClassFileParser::ClassAnnotationCollector::apply_to(instanceKlassHandle k) {
fatal("no class annotations yet");
k->set_is_contended(is_contended());
}
@ -2181,7 +2192,8 @@ methodHandle ClassFileParser::parse_method(ClassLoaderData* loader_data,
runtime_visible_annotations_length = method_attribute_length;
runtime_visible_annotations = cfs->get_u1_buffer();
assert(runtime_visible_annotations != NULL, "null visible annotations");
parse_annotations(runtime_visible_annotations,
parse_annotations(loader_data,
runtime_visible_annotations,
runtime_visible_annotations_length, cp, &parsed_annotations,
CHECK_(nullHandle));
cfs->skip_u1(runtime_visible_annotations_length, CHECK_(nullHandle));
@ -2886,7 +2898,8 @@ void ClassFileParser::parse_classfile_attributes(ClassLoaderData* loader_data,
runtime_visible_annotations_length = attribute_length;
runtime_visible_annotations = cfs->get_u1_buffer();
assert(runtime_visible_annotations != NULL, "null visible annotations");
parse_annotations(runtime_visible_annotations,
parse_annotations(loader_data,
runtime_visible_annotations,
runtime_visible_annotations_length,
cp,
parsed_annotations,
@ -3405,18 +3418,21 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
// Size of Java itable (in words)
itable_size = access_flags.is_interface() ? 0 : klassItable::compute_itable_size(transitive_interfaces);
// get the padding width from the option
// TODO: Ask VM about specific CPU we are running on
int pad_size = ContendedPaddingWidth;
// Field size and offset computation
int nonstatic_field_size = super_klass() == NULL ? 0 : super_klass->nonstatic_field_size();
#ifndef PRODUCT
int orig_nonstatic_field_size = 0;
#endif
int static_field_size = 0;
int next_static_oop_offset;
int next_static_double_offset;
int next_static_word_offset;
int next_static_short_offset;
int next_static_byte_offset;
int next_static_type_offset;
int next_static_padded_offset;
int next_nonstatic_oop_offset;
int next_nonstatic_double_offset;
int next_nonstatic_word_offset;
@ -3426,11 +3442,36 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
int first_nonstatic_oop_offset;
int first_nonstatic_field_offset;
int next_nonstatic_field_offset;
int next_nonstatic_padded_offset;
// Count the contended fields by type.
int static_contended_count = 0;
int nonstatic_contended_count = 0;
FieldAllocationCount fac_contended;
for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
if (fs.is_contended()) {
fac_contended.count[atype]++;
if (fs.access_flags().is_static()) {
static_contended_count++;
} else {
nonstatic_contended_count++;
}
}
}
int contended_count = static_contended_count + nonstatic_contended_count;
// Calculate the starting byte offsets
next_static_oop_offset = InstanceMirrorKlass::offset_of_static_fields();
// class is contended, pad before all the fields
if (parsed_annotations.is_contended()) {
next_static_oop_offset += pad_size;
}
next_static_double_offset = next_static_oop_offset +
(fac.count[STATIC_OOP] * heapOopSize);
((fac.count[STATIC_OOP] - fac_contended.count[STATIC_OOP]) * heapOopSize);
if ( fac.count[STATIC_DOUBLE] &&
(Universe::field_type_should_be_aligned(T_DOUBLE) ||
Universe::field_type_should_be_aligned(T_LONG)) ) {
@ -3438,25 +3479,29 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
}
next_static_word_offset = next_static_double_offset +
(fac.count[STATIC_DOUBLE] * BytesPerLong);
((fac.count[STATIC_DOUBLE] - fac_contended.count[STATIC_DOUBLE]) * BytesPerLong);
next_static_short_offset = next_static_word_offset +
(fac.count[STATIC_WORD] * BytesPerInt);
((fac.count[STATIC_WORD] - fac_contended.count[STATIC_WORD]) * BytesPerInt);
next_static_byte_offset = next_static_short_offset +
(fac.count[STATIC_SHORT] * BytesPerShort);
next_static_type_offset = align_size_up((next_static_byte_offset +
fac.count[STATIC_BYTE] ), wordSize );
static_field_size = (next_static_type_offset -
next_static_oop_offset) / wordSize;
((fac.count[STATIC_SHORT] - fac_contended.count[STATIC_SHORT]) * BytesPerShort);
next_static_padded_offset = next_static_byte_offset +
((fac.count[STATIC_BYTE] - fac_contended.count[STATIC_BYTE]) * 1);
first_nonstatic_field_offset = instanceOopDesc::base_offset_in_bytes() +
nonstatic_field_size * heapOopSize;
// class is contended, pad before all the fields
if (parsed_annotations.is_contended()) {
first_nonstatic_field_offset += pad_size;
}
next_nonstatic_field_offset = first_nonstatic_field_offset;
unsigned int nonstatic_double_count = fac.count[NONSTATIC_DOUBLE];
unsigned int nonstatic_word_count = fac.count[NONSTATIC_WORD];
unsigned int nonstatic_short_count = fac.count[NONSTATIC_SHORT];
unsigned int nonstatic_byte_count = fac.count[NONSTATIC_BYTE];
unsigned int nonstatic_oop_count = fac.count[NONSTATIC_OOP];
unsigned int nonstatic_double_count = fac.count[NONSTATIC_DOUBLE] - fac_contended.count[NONSTATIC_DOUBLE];
unsigned int nonstatic_word_count = fac.count[NONSTATIC_WORD] - fac_contended.count[NONSTATIC_WORD];
unsigned int nonstatic_short_count = fac.count[NONSTATIC_SHORT] - fac_contended.count[NONSTATIC_SHORT];
unsigned int nonstatic_byte_count = fac.count[NONSTATIC_BYTE] - fac_contended.count[NONSTATIC_BYTE];
unsigned int nonstatic_oop_count = fac.count[NONSTATIC_OOP] - fac_contended.count[NONSTATIC_OOP];
bool super_has_nonstatic_fields =
(super_klass() != NULL && super_klass->has_nonstatic_fields());
@ -3529,12 +3574,12 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
}
if( allocation_style == 0 ) {
// Fields order: oops, longs/doubles, ints, shorts/chars, bytes
// Fields order: oops, longs/doubles, ints, shorts/chars, bytes, padded fields
next_nonstatic_oop_offset = next_nonstatic_field_offset;
next_nonstatic_double_offset = next_nonstatic_oop_offset +
(nonstatic_oop_count * heapOopSize);
} else if( allocation_style == 1 ) {
// Fields order: longs/doubles, ints, shorts/chars, bytes, oops
// Fields order: longs/doubles, ints, shorts/chars, bytes, oops, padded fields
next_nonstatic_double_offset = next_nonstatic_field_offset;
} else if( allocation_style == 2 ) {
// Fields allocation: oops fields in super and sub classes are together.
@ -3613,27 +3658,33 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
(nonstatic_word_count * BytesPerInt);
next_nonstatic_byte_offset = next_nonstatic_short_offset +
(nonstatic_short_count * BytesPerShort);
next_nonstatic_padded_offset = next_nonstatic_byte_offset +
nonstatic_byte_count;
int notaligned_offset;
if( allocation_style == 0 ) {
notaligned_offset = next_nonstatic_byte_offset + nonstatic_byte_count;
} else { // allocation_style == 1
next_nonstatic_oop_offset = next_nonstatic_byte_offset + nonstatic_byte_count;
// let oops jump before padding with this allocation style
if( allocation_style == 1 ) {
next_nonstatic_oop_offset = next_nonstatic_padded_offset;
if( nonstatic_oop_count > 0 ) {
next_nonstatic_oop_offset = align_size_up(next_nonstatic_oop_offset, heapOopSize);
}
notaligned_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize);
next_nonstatic_padded_offset = next_nonstatic_oop_offset + (nonstatic_oop_count * heapOopSize);
}
next_nonstatic_type_offset = align_size_up(notaligned_offset, heapOopSize );
nonstatic_field_size = nonstatic_field_size + ((next_nonstatic_type_offset
- first_nonstatic_field_offset)/heapOopSize);
// Iterate over fields again and compute correct offsets.
// The field allocation type was temporarily stored in the offset slot.
// oop fields are located before non-oop fields (static and non-static).
for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
// skip already laid out fields
if (fs.is_offset_set()) continue;
// contended fields are handled below
if (fs.is_contended()) continue;
int real_offset;
FieldAllocationType atype = (FieldAllocationType) fs.offset();
FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
// pack the rest of the fields
switch (atype) {
case STATIC_OOP:
real_offset = next_static_oop_offset;
@ -3722,13 +3773,225 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
fs.set_offset(real_offset);
}
// Handle the contended cases.
//
// Each contended field should not intersect the cache line with another contended field.
// In the absence of alignment information, we end up with pessimistically separating
// the fields with full-width padding.
//
// Additionally, this should not break alignment for the fields, so we round the alignment up
// for each field.
if (contended_count > 0) {
// if there is at least one contended field, we need to have pre-padding for them
if (nonstatic_contended_count > 0) {
next_nonstatic_padded_offset += pad_size;
}
// collect all contended groups
BitMap bm(cp->size());
for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
// skip already laid out fields
if (fs.is_offset_set()) continue;
if (fs.is_contended()) {
bm.set_bit(fs.contended_group());
}
}
int current_group = -1;
while ((current_group = (int)bm.get_next_one_offset(current_group + 1)) != (int)bm.size()) {
for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
// skip already laid out fields
if (fs.is_offset_set()) continue;
// skip non-contended fields and fields from different group
if (!fs.is_contended() || (fs.contended_group() != current_group)) continue;
// handle statics below
if (fs.access_flags().is_static()) continue;
int real_offset;
FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
switch (atype) {
case NONSTATIC_BYTE:
next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, 1);
real_offset = next_nonstatic_padded_offset;
next_nonstatic_padded_offset += 1;
break;
case NONSTATIC_SHORT:
next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerShort);
real_offset = next_nonstatic_padded_offset;
next_nonstatic_padded_offset += BytesPerShort;
break;
case NONSTATIC_WORD:
next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerInt);
real_offset = next_nonstatic_padded_offset;
next_nonstatic_padded_offset += BytesPerInt;
break;
case NONSTATIC_DOUBLE:
next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, BytesPerLong);
real_offset = next_nonstatic_padded_offset;
next_nonstatic_padded_offset += BytesPerLong;
break;
case NONSTATIC_OOP:
next_nonstatic_padded_offset = align_size_up(next_nonstatic_padded_offset, heapOopSize);
real_offset = next_nonstatic_padded_offset;
next_nonstatic_padded_offset += heapOopSize;
// Create new oop map
nonstatic_oop_offsets[nonstatic_oop_map_count] = real_offset;
nonstatic_oop_counts [nonstatic_oop_map_count] = 1;
nonstatic_oop_map_count += 1;
if( first_nonstatic_oop_offset == 0 ) { // Undefined
first_nonstatic_oop_offset = real_offset;
}
break;
default:
ShouldNotReachHere();
}
if (fs.contended_group() == 0) {
// Contended group defines the equivalence class over the fields:
// the fields within the same contended group are not inter-padded.
// The only exception is default group, which does not incur the
// equivalence, and so requires intra-padding.
next_nonstatic_padded_offset += pad_size;
}
fs.set_offset(real_offset);
} // for
// Start laying out the next group.
// Note that this will effectively pad the last group in the back;
// this is expected to alleviate memory contention effects for
// subclass fields and/or adjacent object.
// If this was the default group, the padding is already in place.
if (current_group != 0) {
next_nonstatic_padded_offset += pad_size;
}
}
// handle static fields
// if there is at least one contended field, we need to have pre-padding for them
if (static_contended_count > 0) {
next_static_padded_offset += pad_size;
}
current_group = -1;
while ((current_group = (int)bm.get_next_one_offset(current_group + 1)) != (int)bm.size()) {
for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
// skip already laid out fields
if (fs.is_offset_set()) continue;
// skip non-contended fields and fields from different group
if (!fs.is_contended() || (fs.contended_group() != current_group)) continue;
// non-statics already handled above
if (!fs.access_flags().is_static()) continue;
int real_offset;
FieldAllocationType atype = (FieldAllocationType) fs.allocation_type();
switch (atype) {
case STATIC_BYTE:
next_static_padded_offset = align_size_up(next_static_padded_offset, 1);
real_offset = next_static_padded_offset;
next_static_padded_offset += 1;
break;
case STATIC_SHORT:
next_static_padded_offset = align_size_up(next_static_padded_offset, BytesPerShort);
real_offset = next_static_padded_offset;
next_static_padded_offset += BytesPerShort;
break;
case STATIC_WORD:
next_static_padded_offset = align_size_up(next_static_padded_offset, BytesPerInt);
real_offset = next_static_padded_offset;
next_static_padded_offset += BytesPerInt;
break;
case STATIC_DOUBLE:
next_static_padded_offset = align_size_up(next_static_padded_offset, BytesPerLong);
real_offset = next_static_padded_offset;
next_static_padded_offset += BytesPerLong;
break;
case STATIC_OOP:
next_static_padded_offset = align_size_up(next_static_padded_offset, heapOopSize);
real_offset = next_static_padded_offset;
next_static_padded_offset += heapOopSize;
break;
default:
ShouldNotReachHere();
}
if (fs.contended_group() == 0) {
// Contended group defines the equivalence class over the fields:
// the fields within the same contended group are not inter-padded.
// The only exception is default group, which does not incur the
// equivalence, and so requires intra-padding.
next_static_padded_offset += pad_size;
}
fs.set_offset(real_offset);
} // for
// Start laying out the next group.
// Note that this will effectively pad the last group in the back;
// this is expected to alleviate memory contention effects for
// subclass fields and/or adjacent object.
// If this was the default group, the padding is already in place.
if (current_group != 0) {
next_static_padded_offset += pad_size;
}
}
} // handle contended
// Size of instances
int instance_size;
int notaligned_offset = next_nonstatic_padded_offset;
// Entire class is contended, pad in the back.
// This helps to alleviate memory contention effects for subclass fields
// and/or adjacent object.
if (parsed_annotations.is_contended()) {
notaligned_offset += pad_size;
next_static_padded_offset += pad_size;
}
int next_static_type_offset = align_size_up(next_static_padded_offset, wordSize);
int static_field_size = (next_static_type_offset -
InstanceMirrorKlass::offset_of_static_fields()) / wordSize;
next_nonstatic_type_offset = align_size_up(notaligned_offset, heapOopSize );
nonstatic_field_size = nonstatic_field_size + ((next_nonstatic_type_offset
- first_nonstatic_field_offset)/heapOopSize);
next_nonstatic_type_offset = align_size_up(notaligned_offset, wordSize );
instance_size = align_object_size(next_nonstatic_type_offset / wordSize);
assert(instance_size == align_object_size(align_size_up((instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size*heapOopSize), wordSize) / wordSize), "consistent layout helper value");
assert(instance_size == align_object_size(align_size_up(
(instanceOopDesc::base_offset_in_bytes() + nonstatic_field_size*heapOopSize + ((parsed_annotations.is_contended()) ? pad_size : 0)),
wordSize) / wordSize), "consistent layout helper value");
// Number of non-static oop map blocks allocated at end of klass.
const unsigned int total_oop_map_count =
@ -4008,6 +4271,18 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
}
#endif
#ifndef PRODUCT
if (PrintFieldLayout) {
print_field_layout(name,
fields,
cp,
instance_size,
first_nonstatic_field_offset,
next_nonstatic_field_offset,
next_static_type_offset);
}
#endif
// preserve result across HandleMark
preserve_this_klass = this_klass();
}
@ -4020,6 +4295,38 @@ instanceKlassHandle ClassFileParser::parseClassFile(Symbol* name,
return this_klass;
}
void ClassFileParser::print_field_layout(Symbol* name,
Array<u2>* fields,
constantPoolHandle cp,
int instance_size,
int instance_fields_start,
int instance_fields_end,
int static_fields_end) {
tty->print("%s: field layout\n", name->as_klass_external_name());
tty->print(" @%3d %s\n", instance_fields_start, "--- instance fields start ---");
for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
if (!fs.access_flags().is_static()) {
tty->print(" @%3d \"%s\" %s\n",
fs.offset(),
fs.name()->as_klass_external_name(),
fs.signature()->as_klass_external_name());
}
}
tty->print(" @%3d %s\n", instance_fields_end, "--- instance fields end ---");
tty->print(" @%3d %s\n", instance_size * wordSize, "--- instance ends ---");
tty->print(" @%3d %s\n", InstanceMirrorKlass::offset_of_static_fields(), "--- static fields start ---");
for (AllFieldStream fs(fields, cp); !fs.done(); fs.next()) {
if (fs.access_flags().is_static()) {
tty->print(" @%3d \"%s\" %s\n",
fs.offset(),
fs.name()->as_klass_external_name(),
fs.signature()->as_klass_external_name());
}
}
tty->print(" @%3d %s\n", static_fields_end, "--- static fields end ---");
tty->print("\n");
}
unsigned int
ClassFileParser::compute_oop_map_count(instanceKlassHandle super,
unsigned int nonstatic_oop_map_count,