ZJIT: Stop duplicating context-less side exits (#14215)

This commit is contained in:
Takashi Kokubun 2025-08-14 08:31:23 -07:00 committed by GitHub
parent c30d900547
commit a677220aba
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 46 additions and 60 deletions

View file

@ -1372,7 +1372,7 @@ impl Assembler
pub fn compile_with_regs(self, cb: &mut CodeBlock, regs: Vec<Reg>) -> Option<(CodePtr, Vec<CodePtr>)> { pub fn compile_with_regs(self, cb: &mut CodeBlock, regs: Vec<Reg>) -> Option<(CodePtr, Vec<CodePtr>)> {
let asm = self.arm64_split(); let asm = self.arm64_split();
let mut asm = asm.alloc_regs(regs)?; let mut asm = asm.alloc_regs(regs)?;
asm.compile_side_exits()?; asm.compile_side_exits();
// Create label instances in the code block // Create label instances in the code block
for (idx, name) in asm.label_names.iter().enumerate() { for (idx, name) in asm.label_names.iter().enumerate() {

View file

@ -256,14 +256,6 @@ impl From<VALUE> for Opnd {
} }
} }
/// Set of things we need to restore for side exits.
#[derive(Clone, Debug)]
pub struct SideExitContext {
pub pc: *const VALUE,
pub stack: Vec<Opnd>,
pub locals: Vec<Opnd>,
}
/// Branch target (something that we can jump to) /// Branch target (something that we can jump to)
/// for branch instructions /// for branch instructions
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
@ -275,9 +267,9 @@ pub enum Target
Label(Label), Label(Label),
/// Side exit to the interpreter /// Side exit to the interpreter
SideExit { SideExit {
/// Context to restore on regular side exits. None for side exits right pc: *const VALUE,
/// after JIT-to-JIT calls because we restore them before the JIT call. stack: Vec<Opnd>,
context: Option<SideExitContext>, locals: Vec<Opnd>,
/// We use this to enrich asm comments. /// We use this to enrich asm comments.
reason: SideExitReason, reason: SideExitReason,
/// Some if the side exit should write this label. We use it for patch points. /// Some if the side exit should write this label. We use it for patch points.
@ -761,7 +753,7 @@ impl<'a> Iterator for InsnOpndIterator<'a> {
Insn::Label(target) | Insn::Label(target) |
Insn::LeaJumpTarget { target, .. } | Insn::LeaJumpTarget { target, .. } |
Insn::PatchPoint(target) => { Insn::PatchPoint(target) => {
if let Target::SideExit { context: Some(SideExitContext { stack, locals, .. }), .. } = target { if let Target::SideExit { stack, locals, .. } = target {
let stack_idx = self.idx; let stack_idx = self.idx;
if stack_idx < stack.len() { if stack_idx < stack.len() {
let opnd = &stack[stack_idx]; let opnd = &stack[stack_idx];
@ -786,7 +778,7 @@ impl<'a> Iterator for InsnOpndIterator<'a> {
return Some(opnd); return Some(opnd);
} }
if let Target::SideExit { context: Some(SideExitContext { stack, locals, .. }), .. } = target { if let Target::SideExit { stack, locals, .. } = target {
let stack_idx = self.idx - 1; let stack_idx = self.idx - 1;
if stack_idx < stack.len() { if stack_idx < stack.len() {
let opnd = &stack[stack_idx]; let opnd = &stack[stack_idx];
@ -917,7 +909,7 @@ impl<'a> InsnOpndMutIterator<'a> {
Insn::Label(target) | Insn::Label(target) |
Insn::LeaJumpTarget { target, .. } | Insn::LeaJumpTarget { target, .. } |
Insn::PatchPoint(target) => { Insn::PatchPoint(target) => {
if let Target::SideExit { context: Some(SideExitContext { stack, locals, .. }), .. } = target { if let Target::SideExit { stack, locals, .. } = target {
let stack_idx = self.idx; let stack_idx = self.idx;
if stack_idx < stack.len() { if stack_idx < stack.len() {
let opnd = &mut stack[stack_idx]; let opnd = &mut stack[stack_idx];
@ -942,7 +934,7 @@ impl<'a> InsnOpndMutIterator<'a> {
return Some(opnd); return Some(opnd);
} }
if let Target::SideExit { context: Some(SideExitContext { stack, locals, .. }), .. } = target { if let Target::SideExit { stack, locals, .. } = target {
let stack_idx = self.idx - 1; let stack_idx = self.idx - 1;
if stack_idx < stack.len() { if stack_idx < stack.len() {
let opnd = &mut stack[stack_idx]; let opnd = &mut stack[stack_idx];
@ -1555,8 +1547,7 @@ impl Assembler
} }
/// Compile Target::SideExit and convert it into Target::CodePtr for all instructions /// Compile Target::SideExit and convert it into Target::CodePtr for all instructions
#[must_use] pub fn compile_side_exits(&mut self) {
pub fn compile_side_exits(&mut self) -> Option<()> {
let mut targets = HashMap::new(); let mut targets = HashMap::new();
for (idx, insn) in self.insns.iter().enumerate() { for (idx, insn) in self.insns.iter().enumerate() {
if let Some(target @ Target::SideExit { .. }) = insn.target() { if let Some(target @ Target::SideExit { .. }) = insn.target() {
@ -1567,7 +1558,7 @@ impl Assembler
for (idx, target) in targets { for (idx, target) in targets {
// Compile a side exit. Note that this is past the split pass and alloc_regs(), // Compile a side exit. Note that this is past the split pass and alloc_regs(),
// so you can't use a VReg or an instruction that needs to be split. // so you can't use a VReg or an instruction that needs to be split.
if let Target::SideExit { context, reason, label } = target { if let Target::SideExit { pc, stack, locals, reason, label } = target {
asm_comment!(self, "Exit: {reason}"); asm_comment!(self, "Exit: {reason}");
let side_exit_label = if let Some(label) = label { let side_exit_label = if let Some(label) = label {
Target::Label(label) Target::Label(label)
@ -1578,7 +1569,6 @@ impl Assembler
// Restore the PC and the stack for regular side exits. We don't do this for // Restore the PC and the stack for regular side exits. We don't do this for
// side exits right after JIT-to-JIT calls, which restore them before the call. // side exits right after JIT-to-JIT calls, which restore them before the call.
if let Some(SideExitContext { pc, stack, locals }) = context {
asm_comment!(self, "write stack slots: {stack:?}"); asm_comment!(self, "write stack slots: {stack:?}");
for (idx, &opnd) in stack.iter().enumerate() { for (idx, &opnd) in stack.iter().enumerate() {
self.store(Opnd::mem(64, SP, idx as i32 * SIZEOF_VALUE_I32), opnd); self.store(Opnd::mem(64, SP, idx as i32 * SIZEOF_VALUE_I32), opnd);
@ -1597,7 +1587,6 @@ impl Assembler
self.lea_into(Opnd::Reg(Assembler::SCRATCH_REG), Opnd::mem(64, SP, stack.len() as i32 * SIZEOF_VALUE_I32)); self.lea_into(Opnd::Reg(Assembler::SCRATCH_REG), Opnd::mem(64, SP, stack.len() as i32 * SIZEOF_VALUE_I32));
let cfp_sp = Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SP); let cfp_sp = Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SP);
self.store(cfp_sp, Opnd::Reg(Assembler::SCRATCH_REG)); self.store(cfp_sp, Opnd::Reg(Assembler::SCRATCH_REG));
}
asm_comment!(self, "exit to the interpreter"); asm_comment!(self, "exit to the interpreter");
self.frame_teardown(&[]); // matching the setup in :bb0-prologue: self.frame_teardown(&[]); // matching the setup in :bb0-prologue:
@ -1607,7 +1596,6 @@ impl Assembler
*self.insns[idx].target_mut().unwrap() = side_exit_label; *self.insns[idx].target_mut().unwrap() = side_exit_label;
} }
} }
Some(())
} }
} }

View file

@ -895,7 +895,7 @@ impl Assembler
pub fn compile_with_regs(self, cb: &mut CodeBlock, regs: Vec<Reg>) -> Option<(CodePtr, Vec<CodePtr>)> { pub fn compile_with_regs(self, cb: &mut CodeBlock, regs: Vec<Reg>) -> Option<(CodePtr, Vec<CodePtr>)> {
let asm = self.x86_split(); let asm = self.x86_split();
let mut asm = asm.alloc_regs(regs)?; let mut asm = asm.alloc_regs(regs)?;
asm.compile_side_exits()?; asm.compile_side_exits();
// Create label instances in the code block // Create label instances in the code block
for (idx, name) in asm.label_names.iter().enumerate() { for (idx, name) in asm.label_names.iter().enumerate() {

View file

@ -9,7 +9,7 @@ use crate::gc::{append_gc_offsets, get_or_create_iseq_payload, get_or_create_ise
use crate::state::ZJITState; use crate::state::ZJITState;
use crate::stats::{counter_ptr, with_time_stat, Counter, Counter::compile_time_ns}; use crate::stats::{counter_ptr, with_time_stat, Counter, Counter::compile_time_ns};
use crate::{asm::CodeBlock, cruby::*, options::debug, virtualmem::CodePtr}; use crate::{asm::CodeBlock, cruby::*, options::debug, virtualmem::CodePtr};
use crate::backend::lir::{self, asm_comment, asm_ccall, Assembler, Opnd, SideExitContext, Target, CFP, C_ARG_OPNDS, C_RET_OPND, EC, NATIVE_STACK_PTR, NATIVE_BASE_PTR, SP}; use crate::backend::lir::{self, asm_comment, asm_ccall, Assembler, Opnd, Target, CFP, C_ARG_OPNDS, C_RET_OPND, EC, NATIVE_STACK_PTR, NATIVE_BASE_PTR, SP};
use crate::hir::{iseq_to_hir, Block, BlockId, BranchEdge, Invariant, RangeType, SideExitReason, SideExitReason::*, SpecialObjectType, SELF_PARAM_IDX}; use crate::hir::{iseq_to_hir, Block, BlockId, BranchEdge, Invariant, RangeType, SideExitReason, SideExitReason::*, SpecialObjectType, SELF_PARAM_IDX};
use crate::hir::{Const, FrameState, Function, Insn, InsnId}; use crate::hir::{Const, FrameState, Function, Insn, InsnId};
use crate::hir_type::{types, Type}; use crate::hir_type::{types, Type};
@ -908,7 +908,7 @@ fn gen_send_without_block_direct(
asm_comment!(asm, "side-exit if callee side-exits"); asm_comment!(asm, "side-exit if callee side-exits");
asm.cmp(ret, Qundef.into()); asm.cmp(ret, Qundef.into());
// Restore the C stack pointer on exit // Restore the C stack pointer on exit
asm.je(Target::SideExit { context: None, reason: CalleeSideExit, label: None }); asm.je(ZJITState::get_exit_code().into());
asm_comment!(asm, "restore SP register for the caller"); asm_comment!(asm, "restore SP register for the caller");
let new_sp = asm.sub(SP, sp_offset.into()); let new_sp = asm.sub(SP, sp_offset.into());
@ -1339,11 +1339,9 @@ fn build_side_exit(jit: &mut JITState, state: &FrameState, reason: SideExitReaso
} }
let target = Target::SideExit { let target = Target::SideExit {
context: Some(SideExitContext {
pc: state.pc, pc: state.pc,
stack, stack,
locals, locals,
}),
reason, reason,
label, label,
}; };
@ -1414,7 +1412,7 @@ c_callable! {
if cb.has_dropped_bytes() || payload.status == IseqStatus::CantCompile { if cb.has_dropped_bytes() || payload.status == IseqStatus::CantCompile {
// Exit to the interpreter // Exit to the interpreter
set_pc_and_sp(iseq, ec, sp); set_pc_and_sp(iseq, ec, sp);
return ZJITState::get_stub_exit().raw_ptr(cb); return ZJITState::get_exit_code().raw_ptr(cb);
} }
// Otherwise, attempt to compile the ISEQ. We have to mark_all_executable() beyond this point. // Otherwise, attempt to compile the ISEQ. We have to mark_all_executable() beyond this point.
@ -1424,7 +1422,7 @@ c_callable! {
} else { } else {
// Exit to the interpreter // Exit to the interpreter
set_pc_and_sp(iseq, ec, sp); set_pc_and_sp(iseq, ec, sp);
ZJITState::get_stub_exit() ZJITState::get_exit_code()
}; };
cb.mark_all_executable(); cb.mark_all_executable();
code_ptr.raw_ptr(cb) code_ptr.raw_ptr(cb)
@ -1494,12 +1492,12 @@ fn gen_function_stub(cb: &mut CodeBlock, iseq: IseqPtr, branch: Rc<Branch>) -> O
asm.compile(cb) asm.compile(cb)
} }
/// Generate a trampoline that is used when a function stub fails to compile the ISEQ /// Generate a trampoline that is used when a function exits without restoring PC and the stack
pub fn gen_stub_exit(cb: &mut CodeBlock) -> Option<CodePtr> { pub fn gen_exit(cb: &mut CodeBlock) -> Option<CodePtr> {
let mut asm = Assembler::new(); let mut asm = Assembler::new();
asm_comment!(asm, "exit from function stub"); asm_comment!(asm, "exit from function stub");
asm.frame_teardown(lir::JIT_PRESERVED_REGS); asm.frame_teardown(&[]); // matching the setup in :bb0-prologue:
asm.cret(Qundef.into()); asm.cret(Qundef.into());
asm.compile(cb).map(|(code_ptr, gc_offsets)| { asm.compile(cb).map(|(code_ptr, gc_offsets)| {

View file

@ -1,4 +1,4 @@
use crate::codegen::gen_stub_exit; use crate::codegen::gen_exit;
use crate::cruby::{self, rb_bug_panic_hook, rb_vm_insns_count, EcPtr, Qnil, VALUE}; use crate::cruby::{self, rb_bug_panic_hook, rb_vm_insns_count, EcPtr, Qnil, VALUE};
use crate::cruby_methods; use crate::cruby_methods;
use crate::invariants::Invariants; use crate::invariants::Invariants;
@ -33,8 +33,8 @@ pub struct ZJITState {
/// Properties of core library methods /// Properties of core library methods
method_annotations: cruby_methods::Annotations, method_annotations: cruby_methods::Annotations,
/// Side-exit trampoline used when it fails to compile the ISEQ for a function stub /// Trampoline to side-exit without restoring PC or the stack
stub_exit: CodePtr, exit_code: CodePtr,
} }
/// Private singleton instance of the codegen globals /// Private singleton instance of the codegen globals
@ -83,7 +83,7 @@ impl ZJITState {
#[cfg(test)] #[cfg(test)]
let mut cb = CodeBlock::new_dummy(); let mut cb = CodeBlock::new_dummy();
let stub_exit = gen_stub_exit(&mut cb).unwrap(); let exit_code = gen_exit(&mut cb).unwrap();
// Initialize the codegen globals instance // Initialize the codegen globals instance
let zjit_state = ZJITState { let zjit_state = ZJITState {
@ -92,7 +92,7 @@ impl ZJITState {
invariants: Invariants::default(), invariants: Invariants::default(),
assert_compiles: false, assert_compiles: false,
method_annotations: cruby_methods::init(), method_annotations: cruby_methods::init(),
stub_exit, exit_code,
}; };
unsafe { ZJIT_STATE = Some(zjit_state); } unsafe { ZJIT_STATE = Some(zjit_state); }
} }
@ -169,9 +169,9 @@ impl ZJITState {
} }
} }
/// Return a code pointer to the side-exit trampoline for function stubs /// Return a code pointer to the side-exit trampoline
pub fn get_stub_exit() -> CodePtr { pub fn get_exit_code() -> CodePtr {
ZJITState::get_instance().stub_exit ZJITState::get_instance().exit_code
} }
} }