8299162: Refactor shared trampoline emission logic

Reviewed-by: fyang, adinn, luhenry
This commit is contained in:
Xiaolin Zheng 2023-02-06 12:38:36 +00:00 committed by Ludovic Henry
parent 522fa13274
commit 773050647e
9 changed files with 83 additions and 62 deletions

View file

@ -42,6 +42,8 @@ void CodeBuffer::share_trampoline_for(address dest, int caller_offset) {
_finalize_stubs = true;
}
#define __ masm.
static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampolineRequests* requests) {
if (requests == nullptr) {
return true;
@ -49,39 +51,35 @@ static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampoline
MacroAssembler masm(cb);
bool p_succeeded = true;
auto emit = [&](address dest, const CodeBuffer::Offsets &offsets) {
masm.set_code_section(cb->stubs());
if (!is_aligned(masm.offset(), wordSize)) {
if (cb->stubs()->maybe_expand_to_ensure_remaining(NativeInstruction::instruction_size) && cb->blob() == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
p_succeeded = false;
return p_succeeded;
}
masm.align(wordSize);
}
assert(cb->stubs()->remaining() >= MacroAssembler::max_trampoline_stub_size(), "pre-allocated trampolines");
LinkedListIterator<int> it(offsets.head());
int offset = *it.next();
for (; !it.is_empty(); offset = *it.next()) {
masm.relocate(trampoline_stub_Relocation::spec(cb->insts()->start() + offset));
}
masm.set_code_section(cb->insts());
address stub = __ emit_trampoline_stub(offset, dest);
assert(stub, "pre-allocated trampolines");
address stub = masm.emit_trampoline_stub(offset, dest);
if (stub == nullptr) {
ciEnv::current()->record_failure("CodeCache is full");
p_succeeded = false;
address reloc_pc = cb->stubs()->end() - NativeCallTrampolineStub::instruction_size;
while (!it.is_empty()) {
offset = *it.next();
address caller_pc = cb->insts()->start() + offset;
cb->stubs()->relocate(reloc_pc, trampoline_stub_Relocation::spec(caller_pc));
}
return p_succeeded;
return true;
};
requests->iterate(emit);
return p_succeeded;
assert(requests->number_of_entries() >= 1, "at least one");
const int total_requested_size = MacroAssembler::max_trampoline_stub_size() * requests->number_of_entries();
if (cb->stubs()->maybe_expand_to_ensure_remaining(total_requested_size) && cb->blob() == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return false;
}
requests->iterate(emit);
return true;
}
#undef __
bool CodeBuffer::pd_finalize_stubs() {
return emit_shared_stubs_to_interp<MacroAssembler>(this, _shared_stub_to_interp_requests)
&& emit_shared_trampolines(this, _shared_trampoline_requests);

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2021, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -71,15 +71,14 @@ address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark)
#undef __
int CompiledStaticCall::to_interp_stub_size() {
// isb; movk; movz; movz; movk; movz; movz; br
return 8 * NativeInstruction::instruction_size;
return MacroAssembler::static_call_stub_size();
}
int CompiledStaticCall::to_trampoline_stub_size() {
// Somewhat pessimistically, we count 3 instructions here (although
// there are only two) because we sometimes emit an alignment nop.
// Trampoline stubs are always word aligned.
return 3 * NativeInstruction::instruction_size + wordSize;
return MacroAssembler::max_trampoline_stub_size();
}
// Relocation entries for call stub, compiled java to interpreter.

View file

@ -926,8 +926,7 @@ address MacroAssembler::trampoline_call(Address entry) {
address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset,
address dest) {
// Max stub size: alignment nop, TrampolineStub.
address stub = start_a_stub(NativeInstruction::instruction_size
+ NativeCallTrampolineStub::instruction_size);
address stub = start_a_stub(max_trampoline_stub_size());
if (stub == NULL) {
return NULL; // CodeBuffer::expand failed
}
@ -959,6 +958,11 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset,
return stub_start_addr;
}
int MacroAssembler::max_trampoline_stub_size() {
// Max stub size: alignment nop, TrampolineStub.
return NativeInstruction::instruction_size + NativeCallTrampolineStub::instruction_size;
}
void MacroAssembler::emit_static_call_stub() {
// CompiledDirectStaticCall::set_to_interpreted knows the
// exact layout of this stub.
@ -971,6 +975,11 @@ void MacroAssembler::emit_static_call_stub() {
br(rscratch1);
}
int MacroAssembler::static_call_stub_size() {
// isb; movk; movz; movz; movk; movz; movz; br
return 8 * NativeInstruction::instruction_size;
}
void MacroAssembler::c2bool(Register x) {
// implements x == 0 ? 0 : 1
// note: must only look at least-significant byte of x

View file

@ -638,7 +638,9 @@ public:
return false;
}
address emit_trampoline_stub(int insts_call_instruction_offset, address target);
static int max_trampoline_stub_size();
void emit_static_call_stub();
static int static_call_stub_size();
// The following 4 methods return the offset of the appropriate move instruction

View file

@ -44,6 +44,8 @@ void CodeBuffer::share_trampoline_for(address dest, int caller_offset) {
_finalize_stubs = true;
}
#define __ masm.
static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampolineRequests* requests) {
if (requests == nullptr) {
return true;
@ -51,39 +53,35 @@ static bool emit_shared_trampolines(CodeBuffer* cb, CodeBuffer::SharedTrampoline
MacroAssembler masm(cb);
bool p_succeeded = true;
auto emit = [&](address dest, const CodeBuffer::Offsets &offsets) {
masm.set_code_section(cb->stubs());
if (!is_aligned(masm.offset() + NativeCallTrampolineStub::data_offset, wordSize)) {
if (cb->stubs()->maybe_expand_to_ensure_remaining(NativeInstruction::instruction_size) && cb->blob() == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
p_succeeded = false;
return p_succeeded;
}
masm.align(wordSize, NativeCallTrampolineStub::data_offset);
}
assert(cb->stubs()->remaining() >= MacroAssembler::max_trampoline_stub_size(), "pre-allocated trampolines");
LinkedListIterator<int> it(offsets.head());
int offset = *it.next();
for (; !it.is_empty(); offset = *it.next()) {
masm.relocate(trampoline_stub_Relocation::spec(cb->insts()->start() + offset));
}
masm.set_code_section(cb->insts());
address stub = __ emit_trampoline_stub(offset, dest);
assert(stub, "pre-allocated trampolines");
address stub = masm.emit_trampoline_stub(offset, dest);
if (stub == nullptr) {
ciEnv::current()->record_failure("CodeCache is full");
p_succeeded = false;
address reloc_pc = cb->stubs()->end() - NativeCallTrampolineStub::instruction_size;
while (!it.is_empty()) {
offset = *it.next();
address caller_pc = cb->insts()->start() + offset;
cb->stubs()->relocate(reloc_pc, trampoline_stub_Relocation::spec(caller_pc));
}
return p_succeeded;
return true;
};
requests->iterate(emit);
return p_succeeded;
assert(requests->number_of_entries() >= 1, "at least one");
const int total_requested_size = MacroAssembler::max_trampoline_stub_size() * requests->number_of_entries();
if (cb->stubs()->maybe_expand_to_ensure_remaining(total_requested_size) && cb->blob() == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return false;
}
requests->iterate(emit);
return true;
}
#undef __
bool CodeBuffer::pd_finalize_stubs() {
return emit_shared_stubs_to_interp<MacroAssembler>(this, _shared_stub_to_interp_requests)
&& emit_shared_trampolines(this, _shared_trampoline_requests);

View file

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2019, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2018, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2021, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -69,15 +69,14 @@ address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf, address mark)
#undef __
int CompiledStaticCall::to_interp_stub_size() {
// (lui, addi, slli, addi, slli, addi) + (lui, addi, slli, addi, slli) + jalr
return 12 * NativeInstruction::instruction_size;
return MacroAssembler::static_call_stub_size();
}
int CompiledStaticCall::to_trampoline_stub_size() {
// Somewhat pessimistically, we count 4 instructions here (although
// there are only 3) because we sometimes emit an alignment nop.
// Trampoline stubs are always word aligned.
return NativeInstruction::instruction_size + NativeCallTrampolineStub::instruction_size;
return MacroAssembler::max_trampoline_stub_size();
}
// Relocation entries for call stub, compiled java to interpreter.

View file

@ -3142,8 +3142,8 @@ address MacroAssembler::ic_call(address entry, jint method_index) {
address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset,
address dest) {
address stub = start_a_stub(NativeInstruction::instruction_size
+ NativeCallTrampolineStub::instruction_size);
// Max stub size: alignment nop, TrampolineStub.
address stub = start_a_stub(max_trampoline_stub_size());
if (stub == NULL) {
return NULL; // CodeBuffer::expand failed
}
@ -3183,6 +3183,16 @@ address MacroAssembler::emit_trampoline_stub(int insts_call_instruction_offset,
return stub_start_addr;
}
int MacroAssembler::max_trampoline_stub_size() {
// Max stub size: alignment nop, TrampolineStub.
return NativeInstruction::instruction_size + NativeCallTrampolineStub::instruction_size;
}
int MacroAssembler::static_call_stub_size() {
// (lui, addi, slli, addi, slli, addi) + (lui, addi, slli, addi, slli) + jalr
return 12 * NativeInstruction::instruction_size;
}
Address MacroAssembler::add_memory_helper(const Address dst, Register tmp) {
switch (dst.getMode()) {
case Address::base_plus_offset:

View file

@ -412,7 +412,9 @@ class MacroAssembler: public Assembler {
}
address emit_trampoline_stub(int insts_call_instruction_offset, address target);
static int max_trampoline_stub_size();
void emit_static_call_stub();
static int static_call_stub_size();
// The following 4 methods return the offset of the appropriate move instruction

View file

@ -29,6 +29,8 @@
#include "ci/ciEnv.hpp"
#include "code/compiledIC.hpp"
#define __ masm.
template <typename MacroAssembler, int relocate_format = 0>
bool emit_shared_stubs_to_interp(CodeBuffer* cb, SharedStubToInterpRequests* shared_stub_to_interp_requests) {
if (shared_stub_to_interp_requests == NULL) {
@ -46,7 +48,7 @@ bool emit_shared_stubs_to_interp(CodeBuffer* cb, SharedStubToInterpRequests* sha
shared_stub_to_interp_requests->sort(by_shared_method);
MacroAssembler masm(cb);
for (int i = 0; i < shared_stub_to_interp_requests->length();) {
address stub = masm.start_a_stub(CompiledStaticCall::to_interp_stub_size());
address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size());
if (stub == NULL) {
ciEnv::current()->record_failure("CodeCache is full");
return false;
@ -55,13 +57,15 @@ bool emit_shared_stubs_to_interp(CodeBuffer* cb, SharedStubToInterpRequests* sha
ciMethod* method = shared_stub_to_interp_requests->at(i).shared_method();
do {
address caller_pc = cb->insts_begin() + shared_stub_to_interp_requests->at(i).call_offset();
masm.relocate(static_stub_Relocation::spec(caller_pc), relocate_format);
__ relocate(static_stub_Relocation::spec(caller_pc), relocate_format);
++i;
} while (i < shared_stub_to_interp_requests->length() && shared_stub_to_interp_requests->at(i).shared_method() == method);
masm.emit_static_call_stub();
masm.end_a_stub();
__ emit_static_call_stub();
__ end_a_stub();
}
return true;
}
#undef __
#endif // SHARE_ASM_CODEBUFFER_INLINE_HPP