8136525: Generate interpreter entries only once and avoid unnecessary jump to jump

Reviewed-by: coleenp, twisti, aph
This commit is contained in:
Martin Doerr 2015-09-17 09:03:57 +02:00
parent ff77d8762c
commit 99c37e9ee2
26 changed files with 129 additions and 175 deletions

View file

@ -42,6 +42,11 @@
// Implementation of InterpreterMacroAssembler // Implementation of InterpreterMacroAssembler
void InterpreterMacroAssembler::jump_to_entry(address entry) {
assert(entry, "Entry must have been generated by now");
b(entry);
}
#ifndef CC_INTERP #ifndef CC_INTERP
void InterpreterMacroAssembler::check_and_handle_popframe(Register java_thread) { void InterpreterMacroAssembler::check_and_handle_popframe(Register java_thread) {

View file

@ -66,6 +66,8 @@ class InterpreterMacroAssembler: public MacroAssembler {
void load_earlyret_value(TosState state); void load_earlyret_value(TosState state);
void jump_to_entry(address entry);
#ifdef CC_INTERP #ifdef CC_INTERP
void save_bcp() { /* not needed in c++ interpreter and harmless */ } void save_bcp() { /* not needed in c++ interpreter and harmless */ }
void restore_bcp() { /* not needed in c++ interpreter and harmless */ } void restore_bcp() { /* not needed in c++ interpreter and harmless */ }

View file

@ -41,9 +41,8 @@ private:
address generate_native_entry(bool synchronized); address generate_native_entry(bool synchronized);
address generate_abstract_entry(void); address generate_abstract_entry(void);
address generate_math_entry(AbstractInterpreter::MethodKind kind); address generate_math_entry(AbstractInterpreter::MethodKind kind);
address generate_jump_to_normal_entry(void); address generate_accessor_entry(void) { return NULL; }
address generate_accessor_entry(void) { return generate_jump_to_normal_entry(); } address generate_empty_entry(void) { return NULL; }
address generate_empty_entry(void) { return generate_jump_to_normal_entry(); }
void generate_transcendental_entry(AbstractInterpreter::MethodKind kind, int fpargs); void generate_transcendental_entry(AbstractInterpreter::MethodKind kind, int fpargs);
address generate_Reference_get_entry(); address generate_Reference_get_entry();
address generate_CRC32_update_entry(); address generate_CRC32_update_entry();

View file

@ -236,17 +236,6 @@ void InterpreterGenerator::generate_transcendental_entry(AbstractInterpreter::Me
__ blrt(rscratch1, gpargs, fpargs, rtype); __ blrt(rscratch1, gpargs, fpargs, rtype);
} }
// Jump into normal path for accessor and empty entry to jump to normal entry
// The "fast" optimization don't update compilation count therefore can disable inlining
// for these functions that should be inlined.
address InterpreterGenerator::generate_jump_to_normal_entry(void) {
address entry_point = __ pc();
assert(Interpreter::entry_for_kind(Interpreter::zerolocals) != NULL, "should already be generated");
__ b(Interpreter::entry_for_kind(Interpreter::zerolocals));
return entry_point;
}
// Abstract method entry // Abstract method entry
// Attempt to execute abstract method. Throw exception // Attempt to execute abstract method. Throw exception
address InterpreterGenerator::generate_abstract_entry(void) { address InterpreterGenerator::generate_abstract_entry(void) {

View file

@ -721,8 +721,7 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
// generate a vanilla interpreter entry as the slow path // generate a vanilla interpreter entry as the slow path
__ bind(slow_path); __ bind(slow_path);
(void) generate_normal_entry(false); __ jump_to_entry(Interpreter::entry_for_kind(Interpreter::zerolocals));
return entry; return entry;
} }
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
@ -779,12 +778,10 @@ address InterpreterGenerator::generate_CRC32_update_entry() {
// generate a vanilla native entry as the slow path // generate a vanilla native entry as the slow path
__ bind(slow_path); __ bind(slow_path);
__ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native));
(void) generate_native_entry(false);
return entry; return entry;
} }
return generate_native_entry(false); return NULL;
} }
/** /**
@ -841,12 +838,10 @@ address InterpreterGenerator::generate_CRC32_updateBytes_entry(AbstractInterpret
// generate a vanilla native entry as the slow path // generate a vanilla native entry as the slow path
__ bind(slow_path); __ bind(slow_path);
__ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native));
(void) generate_native_entry(false);
return entry; return entry;
} }
return generate_native_entry(false); return NULL;
} }
void InterpreterGenerator::bang_stack_shadow_pages(bool native_call) { void InterpreterGenerator::bang_stack_shadow_pages(bool native_call) {

View file

@ -46,7 +46,7 @@ void InterpreterMacroAssembler::null_check_throw(Register a, int offset, Registe
MacroAssembler::null_check_throw(a, offset, temp_reg, exception_entry); MacroAssembler::null_check_throw(a, offset, temp_reg, exception_entry);
} }
void InterpreterMacroAssembler::branch_to_entry(address entry, Register Rscratch) { void InterpreterMacroAssembler::jump_to_entry(address entry, Register Rscratch) {
assert(entry, "Entry must have been generated by now"); assert(entry, "Entry must have been generated by now");
if (is_within_range_of_b(entry, pc())) { if (is_within_range_of_b(entry, pc())) {
b(entry); b(entry);

View file

@ -39,7 +39,7 @@ class InterpreterMacroAssembler: public MacroAssembler {
void null_check_throw(Register a, int offset, Register temp_reg); void null_check_throw(Register a, int offset, Register temp_reg);
void branch_to_entry(address entry, Register Rscratch); void jump_to_entry(address entry, Register Rscratch);
// Handy address generation macros. // Handy address generation macros.
#define thread_(field_name) in_bytes(JavaThread::field_name ## _offset()), R16_thread #define thread_(field_name) in_bytes(JavaThread::field_name ## _offset()), R16_thread

View file

@ -31,9 +31,8 @@
private: private:
address generate_abstract_entry(void); address generate_abstract_entry(void);
address generate_jump_to_normal_entry(void); address generate_accessor_entry(void) { return NULL; }
address generate_accessor_entry(void) { return generate_jump_to_normal_entry(); } address generate_empty_entry(void) { return NULL; }
address generate_empty_entry(void) { return generate_jump_to_normal_entry(); }
address generate_Reference_get_entry(void); address generate_Reference_get_entry(void);
address generate_CRC32_update_entry(); address generate_CRC32_update_entry();

View file

@ -427,18 +427,6 @@ address AbstractInterpreterGenerator::generate_result_handler_for(BasicType type
return entry; return entry;
} }
// Call an accessor method (assuming it is resolved, otherwise drop into
// vanilla (slow path) entry.
address InterpreterGenerator::generate_jump_to_normal_entry(void) {
address entry = __ pc();
address normal_entry = Interpreter::entry_for_kind(Interpreter::zerolocals);
assert(normal_entry != NULL, "should already be generated.");
__ branch_to_entry(normal_entry, R11_scratch1);
__ flush();
return entry;
}
// Abstract method entry. // Abstract method entry.
// //
address InterpreterGenerator::generate_abstract_entry(void) { address InterpreterGenerator::generate_abstract_entry(void) {
@ -529,12 +517,12 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
// regular method entry code to generate the NPE. // regular method entry code to generate the NPE.
// //
if (UseG1GC) {
address entry = __ pc(); address entry = __ pc();
const int referent_offset = java_lang_ref_Reference::referent_offset; const int referent_offset = java_lang_ref_Reference::referent_offset;
guarantee(referent_offset > 0, "referent offset not initialized"); guarantee(referent_offset > 0, "referent offset not initialized");
if (UseG1GC) {
Label slow_path; Label slow_path;
// Debugging not possible, so can't use __ skip_if_jvmti_mode(slow_path, GR31_SCRATCH); // Debugging not possible, so can't use __ skip_if_jvmti_mode(slow_path, GR31_SCRATCH);
@ -577,13 +565,11 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
// Generate regular method entry. // Generate regular method entry.
__ bind(slow_path); __ bind(slow_path);
__ branch_to_entry(Interpreter::entry_for_kind(Interpreter::zerolocals), R11_scratch1); __ jump_to_entry(Interpreter::entry_for_kind(Interpreter::zerolocals), R11_scratch1);
__ flush();
return entry; return entry;
} else {
return generate_jump_to_normal_entry();
} }
return NULL;
} }
void Deoptimization::unwind_callee_save_values(frame* f, vframeArray* vframe_array) { void Deoptimization::unwind_callee_save_values(frame* f, vframeArray* vframe_array) {

View file

@ -620,7 +620,7 @@ inline bool math_entry_available(AbstractInterpreter::MethodKind kind) {
address TemplateInterpreterGenerator::generate_math_entry(AbstractInterpreter::MethodKind kind) { address TemplateInterpreterGenerator::generate_math_entry(AbstractInterpreter::MethodKind kind) {
if (!math_entry_available(kind)) { if (!math_entry_available(kind)) {
NOT_PRODUCT(__ should_not_reach_here();) NOT_PRODUCT(__ should_not_reach_here();)
return Interpreter::entry_for_kind(Interpreter::zerolocals); return NULL;
} }
address entry = __ pc(); address entry = __ pc();
@ -1126,14 +1126,6 @@ address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
generate_fixed_frame(false, Rsize_of_parameters, Rsize_of_locals); generate_fixed_frame(false, Rsize_of_parameters, Rsize_of_locals);
#ifdef FAST_DISPATCH
__ unimplemented("Fast dispatch in generate_normal_entry");
#if 0
__ set((intptr_t)Interpreter::dispatch_table(), IdispatchTables);
// Set bytecode dispatch table base.
#endif
#endif
// -------------------------------------------------------------------------- // --------------------------------------------------------------------------
// Zero out non-parameter locals. // Zero out non-parameter locals.
// Note: *Always* zero out non-parameter locals as Sparc does. It's not // Note: *Always* zero out non-parameter locals as Sparc does. It's not
@ -1266,9 +1258,8 @@ address TemplateInterpreterGenerator::generate_normal_entry(bool synchronized) {
* int java.util.zip.CRC32.update(int crc, int b) * int java.util.zip.CRC32.update(int crc, int b)
*/ */
address InterpreterGenerator::generate_CRC32_update_entry() { address InterpreterGenerator::generate_CRC32_update_entry() {
address start = __ pc(); // Remember stub start address (is rtn value).
if (UseCRC32Intrinsics) { if (UseCRC32Intrinsics) {
address start = __ pc(); // Remember stub start address (is rtn value).
Label slow_path; Label slow_path;
// Safepoint check // Safepoint check
@ -1313,11 +1304,11 @@ address InterpreterGenerator::generate_CRC32_update_entry() {
// Generate a vanilla native entry as the slow path. // Generate a vanilla native entry as the slow path.
BLOCK_COMMENT("} CRC32_update"); BLOCK_COMMENT("} CRC32_update");
BIND(slow_path); BIND(slow_path);
__ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native), R11_scratch1);
return start;
} }
(void) generate_native_entry(false); return NULL;
return start;
} }
// CRC32 Intrinsics. // CRC32 Intrinsics.
@ -1327,9 +1318,8 @@ address InterpreterGenerator::generate_CRC32_update_entry() {
* int java.util.zip.CRC32.updateByteBuffer(int crc, long* buf, int off, int len) * int java.util.zip.CRC32.updateByteBuffer(int crc, long* buf, int off, int len)
*/ */
address InterpreterGenerator::generate_CRC32_updateBytes_entry(AbstractInterpreter::MethodKind kind) { address InterpreterGenerator::generate_CRC32_updateBytes_entry(AbstractInterpreter::MethodKind kind) {
address start = __ pc(); // Remember stub start address (is rtn value).
if (UseCRC32Intrinsics) { if (UseCRC32Intrinsics) {
address start = __ pc(); // Remember stub start address (is rtn value).
Label slow_path; Label slow_path;
// Safepoint check // Safepoint check
@ -1406,11 +1396,11 @@ address InterpreterGenerator::generate_CRC32_updateBytes_entry(AbstractInterpret
// Generate a vanilla native entry as the slow path. // Generate a vanilla native entry as the slow path.
BLOCK_COMMENT("} CRC32_updateBytes(Buffer)"); BLOCK_COMMENT("} CRC32_updateBytes(Buffer)");
BIND(slow_path); BIND(slow_path);
__ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native), R11_scratch1);
return start;
} }
(void) generate_native_entry(false); return NULL;
return start;
} }
// These should never be compiled since the interpreter will prefer // These should never be compiled since the interpreter will prefer

View file

@ -468,7 +468,7 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
// If G1 is not enabled then attempt to go through the accessor entry point // If G1 is not enabled then attempt to go through the accessor entry point
// Reference.get is an accessor // Reference.get is an accessor
return generate_jump_to_normal_entry(); return NULL;
} }
// //

View file

@ -59,6 +59,13 @@ const Address InterpreterMacroAssembler::d_tmp(FP, (frame::interpreter_frame_d_s
#endif // CC_INTERP #endif // CC_INTERP
void InterpreterMacroAssembler::jump_to_entry(address entry) {
assert(entry, "Entry must have been generated by now");
AddressLiteral al(entry);
jump_to(al, G3_scratch);
delayed()->nop();
}
void InterpreterMacroAssembler::compute_extra_locals_size_in_bytes(Register args_size, Register locals_size, Register delta) { void InterpreterMacroAssembler::compute_extra_locals_size_in_bytes(Register args_size, Register locals_size, Register delta) {
// Note: this algorithm is also used by C1's OSR entry sequence. // Note: this algorithm is also used by C1's OSR entry sequence.
// Any changes should also be applied to CodeEmitter::emit_osr_entry(). // Any changes should also be applied to CodeEmitter::emit_osr_entry().

View file

@ -80,6 +80,8 @@ class InterpreterMacroAssembler: public MacroAssembler {
InterpreterMacroAssembler(CodeBuffer* c) InterpreterMacroAssembler(CodeBuffer* c)
: MacroAssembler(c) {} : MacroAssembler(c) {}
void jump_to_entry(address entry);
#ifndef CC_INTERP #ifndef CC_INTERP
virtual void load_earlyret_value(TosState state); virtual void load_earlyret_value(TosState state);

View file

@ -34,9 +34,8 @@
address generate_abstract_entry(void); address generate_abstract_entry(void);
// there are no math intrinsics on sparc // there are no math intrinsics on sparc
address generate_math_entry(AbstractInterpreter::MethodKind kind) { return NULL; } address generate_math_entry(AbstractInterpreter::MethodKind kind) { return NULL; }
address generate_jump_to_normal_entry(void); address generate_accessor_entry(void) { return NULL; }
address generate_accessor_entry(void) { return generate_jump_to_normal_entry(); } address generate_empty_entry(void) { return NULL; }
address generate_empty_entry(void) { return generate_jump_to_normal_entry(); }
address generate_Reference_get_entry(void); address generate_Reference_get_entry(void);
void lock_method(void); void lock_method(void);
void save_native_result(void); void save_native_result(void);

View file

@ -241,15 +241,6 @@ void InterpreterGenerator::generate_counter_overflow(Label& Lcontinue) {
// Various method entries // Various method entries
address InterpreterGenerator::generate_jump_to_normal_entry(void) {
address entry = __ pc();
assert(Interpreter::entry_for_kind(Interpreter::zerolocals) != NULL, "should already be generated");
AddressLiteral al(Interpreter::entry_for_kind(Interpreter::zerolocals));
__ jump_to(al, G3_scratch);
__ delayed()->nop();
return entry;
}
// Abstract method entry // Abstract method entry
// Attempt to execute abstract method. Throw exception // Attempt to execute abstract method. Throw exception
// //

View file

@ -779,14 +779,14 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
// Generate regular method entry // Generate regular method entry
__ bind(slow_path); __ bind(slow_path);
(void) generate_normal_entry(false); __ jump_to_entry(Interpreter::entry_for_kind(Interpreter::zerolocals));
return entry; return entry;
} }
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
// If G1 is not enabled then attempt to go through the accessor entry point // If G1 is not enabled then attempt to go through the accessor entry point
// Reference.get is an accessor // Reference.get is an accessor
return generate_jump_to_normal_entry(); return NULL;
} }
// //

View file

@ -807,7 +807,7 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
// If G1 is not enabled then attempt to go through the accessor entry point // If G1 is not enabled then attempt to go through the accessor entry point
// Reference.get is an accessor // Reference.get is an accessor
return generate_jump_to_normal_entry(); return NULL;
} }
// //

View file

@ -40,6 +40,11 @@
// Implementation of InterpreterMacroAssembler // Implementation of InterpreterMacroAssembler
void InterpreterMacroAssembler::jump_to_entry(address entry) {
assert(entry, "Entry must have been generated by now");
jump(RuntimeAddress(entry));
}
#ifndef CC_INTERP #ifndef CC_INTERP
void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr) { void InterpreterMacroAssembler::profile_obj_type(Register obj, const Address& mdo_addr) {
Label update, next, none; Label update, next, none;

View file

@ -60,6 +60,8 @@ class InterpreterMacroAssembler: public MacroAssembler {
_locals_register(LP64_ONLY(r14) NOT_LP64(rdi)), _locals_register(LP64_ONLY(r14) NOT_LP64(rdi)),
_bcp_register(LP64_ONLY(r13) NOT_LP64(rsi)) {} _bcp_register(LP64_ONLY(r13) NOT_LP64(rsi)) {}
void jump_to_entry(address entry);
void load_earlyret_value(TosState state); void load_earlyret_value(TosState state);
#ifdef CC_INTERP #ifdef CC_INTERP

View file

@ -31,17 +31,6 @@
#define __ _masm-> #define __ _masm->
// Jump into normal path for accessor and empty entry to jump to normal entry
// The "fast" optimization don't update compilation count therefore can disable inlining
// for these functions that should be inlined.
address InterpreterGenerator::generate_jump_to_normal_entry(void) {
address entry_point = __ pc();
assert(Interpreter::entry_for_kind(Interpreter::zerolocals) != NULL, "should already be generated");
__ jump(RuntimeAddress(Interpreter::entry_for_kind(Interpreter::zerolocals)));
return entry_point;
}
// Abstract method entry // Abstract method entry
// Attempt to execute abstract method. Throw exception // Attempt to execute abstract method. Throw exception
address InterpreterGenerator::generate_abstract_entry(void) { address InterpreterGenerator::generate_abstract_entry(void) {

View file

@ -36,9 +36,8 @@
address generate_native_entry(bool synchronized); address generate_native_entry(bool synchronized);
address generate_abstract_entry(void); address generate_abstract_entry(void);
address generate_math_entry(AbstractInterpreter::MethodKind kind); address generate_math_entry(AbstractInterpreter::MethodKind kind);
address generate_jump_to_normal_entry(void); address generate_accessor_entry(void) { return NULL; }
address generate_accessor_entry(void) { return generate_jump_to_normal_entry(); } address generate_empty_entry(void) { return NULL; }
address generate_empty_entry(void) { return generate_jump_to_normal_entry(); }
address generate_Reference_get_entry(); address generate_Reference_get_entry();
address generate_CRC32_update_entry(); address generate_CRC32_update_entry();
address generate_CRC32_updateBytes_entry(AbstractInterpreter::MethodKind kind); address generate_CRC32_updateBytes_entry(AbstractInterpreter::MethodKind kind);

View file

@ -697,15 +697,14 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
__ jmp(rdi); __ jmp(rdi);
__ bind(slow_path); __ bind(slow_path);
(void) generate_normal_entry(false); __ jump_to_entry(Interpreter::entry_for_kind(Interpreter::zerolocals));
return entry; return entry;
} }
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
// If G1 is not enabled then attempt to go through the accessor entry point // If G1 is not enabled then attempt to go through the accessor entry point
// Reference.get is an accessor // Reference.get is an accessor
return generate_jump_to_normal_entry(); return NULL;
} }
/** /**
@ -753,12 +752,10 @@ address InterpreterGenerator::generate_CRC32_update_entry() {
// generate a vanilla native entry as the slow path // generate a vanilla native entry as the slow path
__ bind(slow_path); __ bind(slow_path);
__ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native));
(void) generate_native_entry(false);
return entry; return entry;
} }
return generate_native_entry(false); return NULL;
} }
/** /**
@ -821,12 +818,10 @@ address InterpreterGenerator::generate_CRC32_updateBytes_entry(AbstractInterpret
// generate a vanilla native entry as the slow path // generate a vanilla native entry as the slow path
__ bind(slow_path); __ bind(slow_path);
__ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native));
(void) generate_native_entry(false);
return entry; return entry;
} }
return generate_native_entry(false); return NULL;
} }
/** /**
@ -873,7 +868,7 @@ address InterpreterGenerator::generate_CRC32C_updateBytes_entry(AbstractInterpre
return entry; return entry;
} }
return generate_native_entry(false); return NULL;
} }
/** /**
@ -881,10 +876,8 @@ address InterpreterGenerator::generate_CRC32C_updateBytes_entry(AbstractInterpre
* java.lang.Float.intBitsToFloat(int bits) * java.lang.Float.intBitsToFloat(int bits)
*/ */
address InterpreterGenerator::generate_Float_intBitsToFloat_entry() { address InterpreterGenerator::generate_Float_intBitsToFloat_entry() {
address entry;
if (UseSSE >= 1) { if (UseSSE >= 1) {
entry = __ pc(); address entry = __ pc();
// rsi: the sender's SP // rsi: the sender's SP
@ -898,11 +891,10 @@ address InterpreterGenerator::generate_Float_intBitsToFloat_entry() {
__ pop(rdi); // get return address __ pop(rdi); // get return address
__ mov(rsp, rsi); // set rsp to the sender's SP __ mov(rsp, rsi); // set rsp to the sender's SP
__ jmp(rdi); __ jmp(rdi);
} else { return entry;
entry = generate_native_entry(false);
} }
return entry; return NULL;
} }
/** /**
@ -910,10 +902,8 @@ address InterpreterGenerator::generate_Float_intBitsToFloat_entry() {
* java.lang.Float.floatToRawIntBits(float value) * java.lang.Float.floatToRawIntBits(float value)
*/ */
address InterpreterGenerator::generate_Float_floatToRawIntBits_entry() { address InterpreterGenerator::generate_Float_floatToRawIntBits_entry() {
address entry;
if (UseSSE >= 1) { if (UseSSE >= 1) {
entry = __ pc(); address entry = __ pc();
// rsi: the sender's SP // rsi: the sender's SP
@ -927,11 +917,10 @@ address InterpreterGenerator::generate_Float_floatToRawIntBits_entry() {
__ pop(rdi); // get return address __ pop(rdi); // get return address
__ mov(rsp, rsi); // set rsp to the sender's SP __ mov(rsp, rsi); // set rsp to the sender's SP
__ jmp(rdi); __ jmp(rdi);
} else { return entry;
entry = generate_native_entry(false);
} }
return entry; return NULL;
} }
@ -940,10 +929,8 @@ address InterpreterGenerator::generate_Float_floatToRawIntBits_entry() {
* java.lang.Double.longBitsToDouble(long bits) * java.lang.Double.longBitsToDouble(long bits)
*/ */
address InterpreterGenerator::generate_Double_longBitsToDouble_entry() { address InterpreterGenerator::generate_Double_longBitsToDouble_entry() {
address entry;
if (UseSSE >= 2) { if (UseSSE >= 2) {
entry = __ pc(); address entry = __ pc();
// rsi: the sender's SP // rsi: the sender's SP
@ -957,11 +944,10 @@ address InterpreterGenerator::generate_Double_longBitsToDouble_entry() {
__ pop(rdi); // get return address __ pop(rdi); // get return address
__ mov(rsp, rsi); // set rsp to the sender's SP __ mov(rsp, rsi); // set rsp to the sender's SP
__ jmp(rdi); __ jmp(rdi);
} else { return entry;
entry = generate_native_entry(false);
} }
return entry; return NULL;
} }
/** /**
@ -969,10 +955,8 @@ address InterpreterGenerator::generate_Double_longBitsToDouble_entry() {
* java.lang.Double.doubleToRawLongBits(double value) * java.lang.Double.doubleToRawLongBits(double value)
*/ */
address InterpreterGenerator::generate_Double_doubleToRawLongBits_entry() { address InterpreterGenerator::generate_Double_doubleToRawLongBits_entry() {
address entry;
if (UseSSE >= 2) { if (UseSSE >= 2) {
entry = __ pc(); address entry = __ pc();
// rsi: the sender's SP // rsi: the sender's SP
@ -987,11 +971,10 @@ address InterpreterGenerator::generate_Double_doubleToRawLongBits_entry() {
__ pop(rdi); // get return address __ pop(rdi); // get return address
__ mov(rsp, rsi); // set rsp to the sender's SP __ mov(rsp, rsi); // set rsp to the sender's SP
__ jmp(rdi); __ jmp(rdi);
} else { return entry;
entry = generate_native_entry(false);
} }
return entry; return NULL;
} }
// //

View file

@ -677,15 +677,14 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
// generate a vanilla interpreter entry as the slow path // generate a vanilla interpreter entry as the slow path
__ bind(slow_path); __ bind(slow_path);
(void) generate_normal_entry(false); __ jump_to_entry(Interpreter::entry_for_kind(Interpreter::zerolocals));
return entry; return entry;
} }
#endif // INCLUDE_ALL_GCS #endif // INCLUDE_ALL_GCS
// If G1 is not enabled then attempt to go through the accessor entry point // If G1 is not enabled then attempt to go through the accessor entry point
// Reference.get is an accessor // Reference.get is an accessor
return generate_jump_to_normal_entry(); return NULL;
} }
/** /**
@ -733,12 +732,10 @@ address InterpreterGenerator::generate_CRC32_update_entry() {
// generate a vanilla native entry as the slow path // generate a vanilla native entry as the slow path
__ bind(slow_path); __ bind(slow_path);
__ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native));
(void) generate_native_entry(false);
return entry; return entry;
} }
return generate_native_entry(false); return NULL;
} }
/** /**
@ -796,12 +793,10 @@ address InterpreterGenerator::generate_CRC32_updateBytes_entry(AbstractInterpret
// generate a vanilla native entry as the slow path // generate a vanilla native entry as the slow path
__ bind(slow_path); __ bind(slow_path);
__ jump_to_entry(Interpreter::entry_for_kind(Interpreter::native));
(void) generate_native_entry(false);
return entry; return entry;
} }
return generate_native_entry(false); return NULL;
} }
/** /**
@ -852,7 +847,7 @@ address InterpreterGenerator::generate_CRC32C_updateBytes_entry(AbstractInterpre
return entry; return entry;
} }
return generate_native_entry(false); return NULL;
} }
// Interpreter stub for calling a native method. (asm interpreter) // Interpreter stub for calling a native method. (asm interpreter)

View file

@ -816,7 +816,7 @@ address InterpreterGenerator::generate_Reference_get_entry(void) {
// If G1 is not enabled then attempt to go through the normal entry point // If G1 is not enabled then attempt to go through the normal entry point
// Reference.get could be instrumented by jvmti // Reference.get could be instrumented by jvmti
return generate_normal_entry(false); return NULL;
} }
address InterpreterGenerator::generate_native_entry(bool synchronized) { address InterpreterGenerator::generate_native_entry(bool synchronized) {

View file

@ -104,7 +104,10 @@ CodeletMark::~CodeletMark() {
(*_masm)->flush(); (*_masm)->flush();
// Commit Codelet. // Commit Codelet.
AbstractInterpreter::code()->commit((*_masm)->code()->pure_insts_size(), (*_masm)->code()->strings()); int committed_code_size = (*_masm)->code()->pure_insts_size();
if (committed_code_size) {
AbstractInterpreter::code()->commit(committed_code_size, (*_masm)->code()->strings());
}
// Make sure nobody can use _masm outside a CodeletMark lifespan. // Make sure nobody can use _masm outside a CodeletMark lifespan.
*_masm = NULL; *_masm = NULL;
} }
@ -546,14 +549,15 @@ void AbstractInterpreterGenerator::initialize_method_handle_entries() {
address InterpreterGenerator::generate_method_entry( address InterpreterGenerator::generate_method_entry(
AbstractInterpreter::MethodKind kind) { AbstractInterpreter::MethodKind kind) {
// determine code generation flags // determine code generation flags
bool native = false;
bool synchronized = false; bool synchronized = false;
address entry_point = NULL; address entry_point = NULL;
switch (kind) { switch (kind) {
case Interpreter::zerolocals : break; case Interpreter::zerolocals : break;
case Interpreter::zerolocals_synchronized: synchronized = true; break; case Interpreter::zerolocals_synchronized: synchronized = true; break;
case Interpreter::native : entry_point = generate_native_entry(false); break; case Interpreter::native : native = true; break;
case Interpreter::native_synchronized : entry_point = generate_native_entry(true); break; case Interpreter::native_synchronized : native = true; synchronized = true; break;
case Interpreter::empty : entry_point = generate_empty_entry(); break; case Interpreter::empty : entry_point = generate_empty_entry(); break;
case Interpreter::accessor : entry_point = generate_accessor_entry(); break; case Interpreter::accessor : entry_point = generate_accessor_entry(); break;
case Interpreter::abstract : entry_point = generate_abstract_entry(); break; case Interpreter::abstract : entry_point = generate_abstract_entry(); break;
@ -571,11 +575,11 @@ address InterpreterGenerator::generate_method_entry(
: entry_point = generate_Reference_get_entry(); break; : entry_point = generate_Reference_get_entry(); break;
#ifndef CC_INTERP #ifndef CC_INTERP
case Interpreter::java_util_zip_CRC32_update case Interpreter::java_util_zip_CRC32_update
: entry_point = generate_CRC32_update_entry(); break; : native = true; entry_point = generate_CRC32_update_entry(); break;
case Interpreter::java_util_zip_CRC32_updateBytes case Interpreter::java_util_zip_CRC32_updateBytes
: // fall thru : // fall thru
case Interpreter::java_util_zip_CRC32_updateByteBuffer case Interpreter::java_util_zip_CRC32_updateByteBuffer
: entry_point = generate_CRC32_updateBytes_entry(kind); break; : native = true; entry_point = generate_CRC32_updateBytes_entry(kind); break;
case Interpreter::java_util_zip_CRC32C_updateBytes case Interpreter::java_util_zip_CRC32C_updateBytes
: // fall thru : // fall thru
case Interpreter::java_util_zip_CRC32C_updateDirectByteBuffer case Interpreter::java_util_zip_CRC32C_updateDirectByteBuffer
@ -584,19 +588,19 @@ address InterpreterGenerator::generate_method_entry(
// On x86_32 platforms, a special entry is generated for the following four methods. // On x86_32 platforms, a special entry is generated for the following four methods.
// On other platforms the normal entry is used to enter these methods. // On other platforms the normal entry is used to enter these methods.
case Interpreter::java_lang_Float_intBitsToFloat case Interpreter::java_lang_Float_intBitsToFloat
: entry_point = generate_Float_intBitsToFloat_entry(); break; : native = true; entry_point = generate_Float_intBitsToFloat_entry(); break;
case Interpreter::java_lang_Float_floatToRawIntBits case Interpreter::java_lang_Float_floatToRawIntBits
: entry_point = generate_Float_floatToRawIntBits_entry(); break; : native = true; entry_point = generate_Float_floatToRawIntBits_entry(); break;
case Interpreter::java_lang_Double_longBitsToDouble case Interpreter::java_lang_Double_longBitsToDouble
: entry_point = generate_Double_longBitsToDouble_entry(); break; : native = true; entry_point = generate_Double_longBitsToDouble_entry(); break;
case Interpreter::java_lang_Double_doubleToRawLongBits case Interpreter::java_lang_Double_doubleToRawLongBits
: entry_point = generate_Double_doubleToRawLongBits_entry(); break; : native = true; entry_point = generate_Double_doubleToRawLongBits_entry(); break;
#else #else
case Interpreter::java_lang_Float_intBitsToFloat: case Interpreter::java_lang_Float_intBitsToFloat:
case Interpreter::java_lang_Float_floatToRawIntBits: case Interpreter::java_lang_Float_floatToRawIntBits:
case Interpreter::java_lang_Double_longBitsToDouble: case Interpreter::java_lang_Double_longBitsToDouble:
case Interpreter::java_lang_Double_doubleToRawLongBits: case Interpreter::java_lang_Double_doubleToRawLongBits:
entry_point = generate_native_entry(false); native = true;
break; break;
#endif // defined(TARGET_ARCH_x86) && !defined(_LP64) #endif // defined(TARGET_ARCH_x86) && !defined(_LP64)
#endif // CC_INTERP #endif // CC_INTERP
@ -609,5 +613,18 @@ address InterpreterGenerator::generate_method_entry(
return entry_point; return entry_point;
} }
return generate_normal_entry(synchronized); // We expect the normal and native entry points to be generated first so we can reuse them.
if (native) {
entry_point = Interpreter::entry_for_kind(synchronized ? Interpreter::native_synchronized : Interpreter::native);
if (entry_point == NULL) {
entry_point = generate_native_entry(synchronized);
}
} else {
entry_point = Interpreter::entry_for_kind(synchronized ? Interpreter::zerolocals_synchronized : Interpreter::zerolocals);
if (entry_point == NULL) {
entry_point = generate_normal_entry(synchronized);
}
}
return entry_point;
} }

View file

@ -412,6 +412,14 @@ void TemplateInterpreterGenerator::generate_all() {
method_entry(java_lang_math_pow ) method_entry(java_lang_math_pow )
method_entry(java_lang_ref_reference_get) method_entry(java_lang_ref_reference_get)
initialize_method_handle_entries();
// all native method kinds (must be one contiguous block)
Interpreter::_native_entry_begin = Interpreter::code()->code_end();
method_entry(native)
method_entry(native_synchronized)
Interpreter::_native_entry_end = Interpreter::code()->code_end();
if (UseCRC32Intrinsics) { if (UseCRC32Intrinsics) {
method_entry(java_util_zip_CRC32_update) method_entry(java_util_zip_CRC32_update)
method_entry(java_util_zip_CRC32_updateBytes) method_entry(java_util_zip_CRC32_updateBytes)
@ -428,14 +436,6 @@ void TemplateInterpreterGenerator::generate_all() {
method_entry(java_lang_Double_longBitsToDouble); method_entry(java_lang_Double_longBitsToDouble);
method_entry(java_lang_Double_doubleToRawLongBits); method_entry(java_lang_Double_doubleToRawLongBits);
initialize_method_handle_entries();
// all native method kinds (must be one contiguous block)
Interpreter::_native_entry_begin = Interpreter::code()->code_end();
method_entry(native)
method_entry(native_synchronized)
Interpreter::_native_entry_end = Interpreter::code()->code_end();
#undef method_entry #undef method_entry
// Bytecodes // Bytecodes