8288477: nmethod header size reduction

Reviewed-by: kvn, never
This commit is contained in:
Boris Ulasevich 2022-07-28 19:49:31 +00:00
parent 54a2c5a6d1
commit e052d7f4bc
11 changed files with 80 additions and 73 deletions

View file

@ -1148,7 +1148,7 @@ void ciEnv::register_method(ciMethod* target,
debug_info(), dependencies(), code_buffer, debug_info(), dependencies(), code_buffer,
frame_words, oop_map_set, frame_words, oop_map_set,
handler_table, inc_table, handler_table, inc_table,
compiler, task()->comp_level()); compiler, CompLevel(task()->comp_level()));
// Free codeBlobs // Free codeBlobs
code_buffer->free_blob(); code_buffer->free_blob();

View file

@ -79,12 +79,6 @@ unsigned int CodeBlob::allocation_size(CodeBuffer* cb, int header_size) {
} }
CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments, bool compiled) : CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, int frame_complete_offset, int frame_size, ImmutableOopMapSet* oop_maps, bool caller_must_gc_arguments, bool compiled) :
_type(type),
_size(layout.size()),
_header_size(layout.header_size()),
_frame_complete_offset(frame_complete_offset),
_data_offset(layout.data_offset()),
_frame_size(frame_size),
_code_begin(layout.code_begin()), _code_begin(layout.code_begin()),
_code_end(layout.code_end()), _code_end(layout.code_end()),
_content_begin(layout.content_begin()), _content_begin(layout.content_begin()),
@ -92,9 +86,15 @@ CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& la
_relocation_begin(layout.relocation_begin()), _relocation_begin(layout.relocation_begin()),
_relocation_end(layout.relocation_end()), _relocation_end(layout.relocation_end()),
_oop_maps(oop_maps), _oop_maps(oop_maps),
_name(name),
_size(layout.size()),
_header_size(layout.header_size()),
_frame_complete_offset(frame_complete_offset),
_data_offset(layout.data_offset()),
_frame_size(frame_size),
_caller_must_gc_arguments(caller_must_gc_arguments), _caller_must_gc_arguments(caller_must_gc_arguments),
_is_compiled(compiled), _is_compiled(compiled),
_name(name) _type(type)
{ {
assert(is_aligned(layout.size(), oopSize), "unaligned size"); assert(is_aligned(layout.size(), oopSize), "unaligned size");
assert(is_aligned(layout.header_size(), oopSize), "unaligned size"); assert(is_aligned(layout.header_size(), oopSize), "unaligned size");
@ -108,21 +108,21 @@ CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& la
} }
CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, CodeBuffer* cb /*UNUSED*/, int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments, bool compiled) : CodeBlob::CodeBlob(const char* name, CompilerType type, const CodeBlobLayout& layout, CodeBuffer* cb /*UNUSED*/, int frame_complete_offset, int frame_size, OopMapSet* oop_maps, bool caller_must_gc_arguments, bool compiled) :
_type(type),
_size(layout.size()),
_header_size(layout.header_size()),
_frame_complete_offset(frame_complete_offset),
_data_offset(layout.data_offset()),
_frame_size(frame_size),
_code_begin(layout.code_begin()), _code_begin(layout.code_begin()),
_code_end(layout.code_end()), _code_end(layout.code_end()),
_content_begin(layout.content_begin()), _content_begin(layout.content_begin()),
_data_end(layout.data_end()), _data_end(layout.data_end()),
_relocation_begin(layout.relocation_begin()), _relocation_begin(layout.relocation_begin()),
_relocation_end(layout.relocation_end()), _relocation_end(layout.relocation_end()),
_name(name),
_size(layout.size()),
_header_size(layout.header_size()),
_frame_complete_offset(frame_complete_offset),
_data_offset(layout.data_offset()),
_frame_size(frame_size),
_caller_must_gc_arguments(caller_must_gc_arguments), _caller_must_gc_arguments(caller_must_gc_arguments),
_is_compiled(compiled), _is_compiled(compiled),
_name(name) _type(type)
{ {
assert(is_aligned(_size, oopSize), "unaligned size"); assert(is_aligned(_size, oopSize), "unaligned size");
assert(is_aligned(_header_size, oopSize), "unaligned size"); assert(is_aligned(_header_size, oopSize), "unaligned size");

View file

@ -90,16 +90,7 @@ class CodeBlob {
protected: protected:
const CompilerType _type; // CompilerType // order fields from large to small to minimize padding between fields
int _size; // total size of CodeBlob in bytes
int _header_size; // size of header (depends on subclass)
int _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
// not finished setting up their frame. Beware of pc's in
// that range. There is a similar range(s) on returns
// which we don't detect.
int _data_offset; // offset to where data region begins
int _frame_size; // size of stack frame
address _code_begin; address _code_begin;
address _code_end; address _code_end;
address _content_begin; // address to where content region begins (this includes consts, insts, stubs) address _content_begin; // address to where content region begins (this includes consts, insts, stubs)
@ -109,13 +100,24 @@ protected:
address _relocation_end; address _relocation_end;
ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob ImmutableOopMapSet* _oop_maps; // OopMap for this CodeBlob
bool _caller_must_gc_arguments;
bool _is_compiled;
const char* _name; const char* _name;
S390_ONLY(int _ctable_offset;) S390_ONLY(int _ctable_offset;)
int _size; // total size of CodeBlob in bytes
int _header_size; // size of header (depends on subclass)
int _frame_complete_offset; // instruction offsets in [0.._frame_complete_offset) have
// not finished setting up their frame. Beware of pc's in
// that range. There is a similar range(s) on returns
// which we don't detect.
int _data_offset; // offset to where data region begins
int _frame_size; // size of stack frame
bool _caller_must_gc_arguments;
bool _is_compiled;
const CompilerType _type; // CompilerType
#ifndef PRODUCT #ifndef PRODUCT
AsmRemarks _asm_remarks; AsmRemarks _asm_remarks;
DbgStrings _dbg_strings; DbgStrings _dbg_strings;

View file

@ -511,7 +511,7 @@ nmethod* nmethod::new_nmethod(const methodHandle& method,
ExceptionHandlerTable* handler_table, ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table, ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler, AbstractCompiler* compiler,
int comp_level CompLevel comp_level
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
, char* speculations, , char* speculations,
int speculations_len, int speculations_len,
@ -611,9 +611,9 @@ nmethod::nmethod(
ByteSize basic_lock_sp_offset, ByteSize basic_lock_sp_offset,
OopMapSet* oop_maps ) OopMapSet* oop_maps )
: CompiledMethod(method, "native nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true), : CompiledMethod(method, "native nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),
_is_unloading_state(0),
_native_receiver_sp_offset(basic_lock_owner_sp_offset), _native_receiver_sp_offset(basic_lock_owner_sp_offset),
_native_basic_lock_sp_offset(basic_lock_sp_offset) _native_basic_lock_sp_offset(basic_lock_sp_offset),
_is_unloading_state(0)
{ {
{ {
int scopes_data_offset = 0; int scopes_data_offset = 0;
@ -624,6 +624,7 @@ nmethod::nmethod(
assert_locked_or_safepoint(CodeCache_lock); assert_locked_or_safepoint(CodeCache_lock);
init_defaults(); init_defaults();
_comp_level = CompLevel_none;
_entry_bci = InvocationEntryBci; _entry_bci = InvocationEntryBci;
// We have no exception handler or deopt handler make the // We have no exception handler or deopt handler make the
// values something that will never match a pc like the nmethod vtable entry // values something that will never match a pc like the nmethod vtable entry
@ -648,7 +649,6 @@ nmethod::nmethod(
_nmethod_end_offset = _nul_chk_table_offset; _nmethod_end_offset = _nul_chk_table_offset;
#endif #endif
_compile_id = compile_id; _compile_id = compile_id;
_comp_level = CompLevel_none;
_entry_point = code_begin() + offsets->value(CodeOffsets::Entry); _entry_point = code_begin() + offsets->value(CodeOffsets::Entry);
_verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry); _verified_entry_point = code_begin() + offsets->value(CodeOffsets::Verified_Entry);
_osr_entry_point = NULL; _osr_entry_point = NULL;
@ -738,7 +738,7 @@ nmethod::nmethod(
ExceptionHandlerTable* handler_table, ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table, ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler, AbstractCompiler* compiler,
int comp_level CompLevel comp_level
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
, char* speculations, , char* speculations,
int speculations_len, int speculations_len,
@ -746,9 +746,9 @@ nmethod::nmethod(
#endif #endif
) )
: CompiledMethod(method, "nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true), : CompiledMethod(method, "nmethod", type, nmethod_size, sizeof(nmethod), code_buffer, offsets->value(CodeOffsets::Frame_Complete), frame_size, oop_maps, false, true),
_is_unloading_state(0),
_native_receiver_sp_offset(in_ByteSize(-1)), _native_receiver_sp_offset(in_ByteSize(-1)),
_native_basic_lock_sp_offset(in_ByteSize(-1)) _native_basic_lock_sp_offset(in_ByteSize(-1)),
_is_unloading_state(0)
{ {
assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR"); assert(debug_info->oop_recorder() == code_buffer->oop_recorder(), "shared OR");
{ {

View file

@ -71,11 +71,16 @@ class nmethod : public CompiledMethod {
friend class JVMCINMethodData; friend class JVMCINMethodData;
private: private:
// Shared fields for all nmethod's
int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method
uint64_t _gc_epoch; uint64_t _gc_epoch;
// not_entrant method removal. Each mark_sweep pass will update
// this mark to current sweep invocation count if it is seen on the
// stack. An not_entrant method can be removed when there are no
// more activations, i.e., when the _stack_traversal_mark is less than
// current sweep traversal index.
volatile int64_t _stack_traversal_mark;
// To support simple linked-list chaining of nmethods: // To support simple linked-list chaining of nmethods:
nmethod* _osr_link; // from InstanceKlass::osr_nmethods_head nmethod* _osr_link; // from InstanceKlass::osr_nmethods_head
@ -198,6 +203,9 @@ class nmethod : public CompiledMethod {
address _verified_entry_point; // entry point without class check address _verified_entry_point; // entry point without class check
address _osr_entry_point; // entry point for on stack replacement address _osr_entry_point; // entry point for on stack replacement
// Shared fields for all nmethod's
int _entry_bci; // != InvocationEntryBci if this nmethod is an on-stack replacement method
// Offsets for different nmethod parts // Offsets for different nmethod parts
int _exception_offset; int _exception_offset;
// Offset of the unwind handler if it exists // Offset of the unwind handler if it exists
@ -225,7 +233,41 @@ class nmethod : public CompiledMethod {
int _orig_pc_offset; int _orig_pc_offset;
int _compile_id; // which compilation made this nmethod int _compile_id; // which compilation made this nmethod
int _comp_level; // compilation level
#if INCLUDE_RTM_OPT
// RTM state at compile time. Used during deoptimization to decide
// whether to restart collecting RTM locking abort statistic again.
RTMState _rtm_state;
#endif
// Nmethod Flushing lock. If non-zero, then the nmethod is not removed
// and is not made into a zombie. However, once the nmethod is made into
// a zombie, it will be locked one final time if CompiledMethodUnload
// event processing needs to be done.
volatile jint _lock_count;
// The _hotness_counter indicates the hotness of a method. The higher
// the value the hotter the method. The hotness counter of a nmethod is
// set to [(ReservedCodeCacheSize / (1024 * 1024)) * 2] each time the method
// is active while stack scanning (do_stack_scanning()). The hotness
// counter is decreased (by 1) while sweeping.
int _hotness_counter;
// These are used for compiled synchronized native methods to
// locate the owner and stack slot for the BasicLock. They are
// needed because there is no debug information for compiled native
// wrappers and the oop maps are insufficient to allow
// frame::retrieve_receiver() to work. Currently they are expected
// to be byte offsets from the Java stack pointer for maximum code
// sharing between platforms. JVMTI's GetLocalInstance() uses these
// offsets to find the receiver for non-static native wrapper frames.
ByteSize _native_receiver_sp_offset;
ByteSize _native_basic_lock_sp_offset;
CompLevel _comp_level; // compilation level
// Local state used to keep track of whether unloading is happening or not
volatile uint8_t _is_unloading_state;
// protected by CodeCache_lock // protected by CodeCache_lock
bool _has_flushed_dependencies; // Used for maintenance of dependencies (CodeCache_lock) bool _has_flushed_dependencies; // Used for maintenance of dependencies (CodeCache_lock)
@ -241,46 +283,6 @@ class nmethod : public CompiledMethod {
bool _oops_are_stale; // indicates that it's no longer safe to access oops section bool _oops_are_stale; // indicates that it's no longer safe to access oops section
#endif #endif
#if INCLUDE_RTM_OPT
// RTM state at compile time. Used during deoptimization to decide
// whether to restart collecting RTM locking abort statistic again.
RTMState _rtm_state;
#endif
// Nmethod Flushing lock. If non-zero, then the nmethod is not removed
// and is not made into a zombie. However, once the nmethod is made into
// a zombie, it will be locked one final time if CompiledMethodUnload
// event processing needs to be done.
volatile jint _lock_count;
// not_entrant method removal. Each mark_sweep pass will update
// this mark to current sweep invocation count if it is seen on the
// stack. An not_entrant method can be removed when there are no
// more activations, i.e., when the _stack_traversal_mark is less than
// current sweep traversal index.
volatile int64_t _stack_traversal_mark;
// The _hotness_counter indicates the hotness of a method. The higher
// the value the hotter the method. The hotness counter of a nmethod is
// set to [(ReservedCodeCacheSize / (1024 * 1024)) * 2] each time the method
// is active while stack scanning (do_stack_scanning()). The hotness
// counter is decreased (by 1) while sweeping.
int _hotness_counter;
// Local state used to keep track of whether unloading is happening or not
volatile uint8_t _is_unloading_state;
// These are used for compiled synchronized native methods to
// locate the owner and stack slot for the BasicLock. They are
// needed because there is no debug information for compiled native
// wrappers and the oop maps are insufficient to allow
// frame::retrieve_receiver() to work. Currently they are expected
// to be byte offsets from the Java stack pointer for maximum code
// sharing between platforms. JVMTI's GetLocalInstance() uses these
// offsets to find the receiver for non-static native wrapper frames.
ByteSize _native_receiver_sp_offset;
ByteSize _native_basic_lock_sp_offset;
friend class nmethodLocker; friend class nmethodLocker;
// For native wrappers // For native wrappers
@ -311,7 +313,7 @@ class nmethod : public CompiledMethod {
ExceptionHandlerTable* handler_table, ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table, ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler, AbstractCompiler* compiler,
int comp_level CompLevel comp_level
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
, char* speculations, , char* speculations,
int speculations_len, int speculations_len,
@ -359,7 +361,7 @@ class nmethod : public CompiledMethod {
ExceptionHandlerTable* handler_table, ExceptionHandlerTable* handler_table,
ImplicitExceptionTable* nul_chk_table, ImplicitExceptionTable* nul_chk_table,
AbstractCompiler* compiler, AbstractCompiler* compiler,
int comp_level CompLevel comp_level
#if INCLUDE_JVMCI #if INCLUDE_JVMCI
, char* speculations = NULL, , char* speculations = NULL,
int speculations_len = 0, int speculations_len = 0,
@ -372,9 +374,9 @@ class nmethod : public CompiledMethod {
// Only used for unit tests. // Only used for unit tests.
nmethod() nmethod()
: CompiledMethod(), : CompiledMethod(),
_is_unloading_state(0),
_native_receiver_sp_offset(in_ByteSize(-1)), _native_receiver_sp_offset(in_ByteSize(-1)),
_native_basic_lock_sp_offset(in_ByteSize(-1)) {} _native_basic_lock_sp_offset(in_ByteSize(-1)),
_is_unloading_state(0) {}
static nmethod* new_native_nmethod(const methodHandle& method, static nmethod* new_native_nmethod(const methodHandle& method,

View file

@ -31,7 +31,7 @@
#include "runtime/globals.hpp" #include "runtime/globals.hpp"
// The (closed set) of concrete compiler classes. // The (closed set) of concrete compiler classes.
enum CompilerType { enum CompilerType : u1 {
compiler_none, compiler_none,
compiler_c1, compiler_c1,
compiler_c2, compiler_c2,
@ -54,7 +54,7 @@ enum MethodCompilation {
}; };
// Enumeration to distinguish tiers of compilation // Enumeration to distinguish tiers of compilation
enum CompLevel { enum CompLevel : s1 {
CompLevel_any = -1, // Used for querying the state CompLevel_any = -1, // Used for querying the state
CompLevel_all = -1, // Used for changing the state CompLevel_all = -1, // Used for changing the state
CompLevel_none = 0, // Interpreter CompLevel_none = 0, // Interpreter

View file

@ -2063,7 +2063,7 @@ JVMCI::CodeInstallResult JVMCIRuntime::register_method(JVMCIEnv* JVMCIENV,
int speculations_len) { int speculations_len) {
JVMCI_EXCEPTION_CONTEXT; JVMCI_EXCEPTION_CONTEXT;
nmethod* nm = NULL; nmethod* nm = NULL;
int comp_level = CompLevel_full_optimization; CompLevel comp_level = CompLevel_full_optimization;
char* failure_detail = NULL; char* failure_detail = NULL;
bool install_default = JVMCIENV->get_HotSpotNmethod_isDefault(nmethod_mirror) != 0; bool install_default = JVMCIENV->get_HotSpotNmethod_isDefault(nmethod_mirror) != 0;

View file

@ -255,7 +255,7 @@
nonstatic_field(MethodData, _jvmci_ir_size, int) \ nonstatic_field(MethodData, _jvmci_ir_size, int) \
\ \
nonstatic_field(nmethod, _verified_entry_point, address) \ nonstatic_field(nmethod, _verified_entry_point, address) \
nonstatic_field(nmethod, _comp_level, int) \ nonstatic_field(nmethod, _comp_level, CompLevel) \
\ \
nonstatic_field(ObjArrayKlass, _element_klass, Klass*) \ nonstatic_field(ObjArrayKlass, _element_klass, Klass*) \
\ \
@ -366,6 +366,7 @@
declare_unsigned_integer_type(size_t) \ declare_unsigned_integer_type(size_t) \
declare_integer_type(intx) \ declare_integer_type(intx) \
declare_unsigned_integer_type(uintx) \ declare_unsigned_integer_type(uintx) \
declare_integer_type(CompLevel) \
\ \
declare_toplevel_type(BasicLock) \ declare_toplevel_type(BasicLock) \
declare_toplevel_type(CompilerToVM) \ declare_toplevel_type(CompilerToVM) \

View file

@ -665,7 +665,7 @@
volatile_nonstatic_field(nmethod, _lock_count, jint) \ volatile_nonstatic_field(nmethod, _lock_count, jint) \
volatile_nonstatic_field(nmethod, _stack_traversal_mark, int64_t) \ volatile_nonstatic_field(nmethod, _stack_traversal_mark, int64_t) \
nonstatic_field(nmethod, _compile_id, int) \ nonstatic_field(nmethod, _compile_id, int) \
nonstatic_field(nmethod, _comp_level, int) \ nonstatic_field(nmethod, _comp_level, CompLevel) \
\ \
unchecked_c2_static_field(Deoptimization, _trap_reason_name, void*) \ unchecked_c2_static_field(Deoptimization, _trap_reason_name, void*) \
\ \
@ -1971,6 +1971,8 @@
declare_integer_type(AccessFlags) /* FIXME: wrong type (not integer) */\ declare_integer_type(AccessFlags) /* FIXME: wrong type (not integer) */\
declare_toplevel_type(address) /* FIXME: should this be an integer type? */\ declare_toplevel_type(address) /* FIXME: should this be an integer type? */\
declare_integer_type(BasicType) /* FIXME: wrong type (not integer) */ \ declare_integer_type(BasicType) /* FIXME: wrong type (not integer) */ \
\
declare_integer_type(CompLevel) \
JVMTI_ONLY(declare_toplevel_type(BreakpointInfo)) \ JVMTI_ONLY(declare_toplevel_type(BreakpointInfo)) \
JVMTI_ONLY(declare_toplevel_type(BreakpointInfo*)) \ JVMTI_ONLY(declare_toplevel_type(BreakpointInfo*)) \
declare_toplevel_type(CodeBlob*) \ declare_toplevel_type(CodeBlob*) \

View file

@ -459,7 +459,7 @@ final class HotSpotResolvedJavaMethodImpl extends HotSpotMethod implements HotSp
public boolean hasCompiledCodeAtLevel(int level) { public boolean hasCompiledCodeAtLevel(int level) {
long compiledCode = getCompiledCode(); long compiledCode = getCompiledCode();
if (compiledCode != 0) { if (compiledCode != 0) {
return UNSAFE.getInt(compiledCode + config().nmethodCompLevelOffset) == level; return UNSAFE.getByte(compiledCode + config().nmethodCompLevelOffset) == level;
} }
return false; return false;
} }

View file

@ -182,7 +182,7 @@ class HotSpotVMConfig extends HotSpotVMConfigAccess {
final int methodDataOverflowRecompiles = getFieldOffset("MethodData::_compiler_counters._nof_overflow_recompiles", Integer.class, "uint"); final int methodDataOverflowRecompiles = getFieldOffset("MethodData::_compiler_counters._nof_overflow_recompiles", Integer.class, "uint");
final int methodDataOverflowTraps = getFieldOffset("MethodData::_compiler_counters._nof_overflow_traps", Integer.class, "uint"); final int methodDataOverflowTraps = getFieldOffset("MethodData::_compiler_counters._nof_overflow_traps", Integer.class, "uint");
final int nmethodCompLevelOffset = getFieldOffset("nmethod::_comp_level", Integer.class, "int"); final int nmethodCompLevelOffset = getFieldOffset("nmethod::_comp_level", Integer.class, "CompLevel");
final int compilationLevelNone = getConstant("CompLevel_none", Integer.class); final int compilationLevelNone = getConstant("CompLevel_none", Integer.class);
final int compilationLevelSimple = getConstant("CompLevel_simple", Integer.class); final int compilationLevelSimple = getConstant("CompLevel_simple", Integer.class);