ruby/vm_exec.c
John Hawthorn 4127f413a6
Tailcall threaded VM using musttail
This adds a compile-time option, OPT_TAILCALL_THREADED_CODE, to use musttail for threading in the VM loop.

This works best with the latest LLVM, though the latest GCC also has musttail.

This also attempts to use __attribute__((preserve_none)) when available (LLVM 19 or so?). This changes the calling convention of the instruction methods to save all registers and use what would normally be callee-saved registers to pass arguments. This significantly reduces pushing and popping when we end up calling non-tailcall methods from our VM instructions (which is pretty common for us).

Aside from performance a thing I really like about this is that profilers (like linux perf) are able to show the VM instructions we're spending time inside. It also allows using objdump to print out the source for individual instructions, which is much easier to read.

There have been various popular posts about using this technique:
* https://blog.reverberate.org/2021/04/21/musttail-efficient-interpreters.html
* https://sillycross.github.io/2022/11/22/2022-11-22/
* https://github.com/wasm3/wasm3/blob/main/docs/Interpreter.md#m3-massey-meta-machine
* https://blog.nelhage.com/post/cpython-tail-call/
2025-05-26 20:08:12 -07:00

177 lines
4 KiB
C

/* -*-c-*- */
/**********************************************************************
vm_exec.c -
$Author$
Copyright (C) 2004-2007 Koichi Sasada
**********************************************************************/
#include <math.h>
#if USE_YJIT
// The number of instructions executed on vm_exec_core. --yjit-stats uses this.
uint64_t rb_vm_insns_count = 0;
#endif
#if VM_COLLECT_USAGE_DETAILS
static void vm_analysis_insn(int insn);
#endif
#if VMDEBUG > 0
#define DECL_SC_REG(type, r, reg) register type reg_##r
#elif defined(__GNUC__) && defined(__x86_64__)
#define DECL_SC_REG(type, r, reg) register type reg_##r __asm__("r" reg)
#elif defined(__GNUC__) && defined(__i386__)
#define DECL_SC_REG(type, r, reg) register type reg_##r __asm__("e" reg)
#elif defined(__GNUC__) && (defined(__powerpc64__) || defined(__POWERPC__))
#define DECL_SC_REG(type, r, reg) register type reg_##r __asm__("r" reg)
#elif defined(__GNUC__) && defined(__aarch64__)
#define DECL_SC_REG(type, r, reg) register type reg_##r __asm__("x" reg)
#else
#define DECL_SC_REG(type, r, reg) register type reg_##r
#endif
/* #define DECL_SC_REG(r, reg) VALUE reg_##r */
#if !OPT_CALL_THREADED_CODE && !OPT_TAILCALL_THREADED_CODE
static VALUE
vm_exec_core(rb_execution_context_t *ec)
{
#if defined(__GNUC__) && defined(__i386__)
DECL_SC_REG(const VALUE *, pc, "di");
DECL_SC_REG(rb_control_frame_t *, cfp, "si");
#define USE_MACHINE_REGS 1
#elif defined(__GNUC__) && defined(__x86_64__)
DECL_SC_REG(const VALUE *, pc, "14");
DECL_SC_REG(rb_control_frame_t *, cfp, "15");
#define USE_MACHINE_REGS 1
#elif defined(__GNUC__) && (defined(__powerpc64__) || defined(__POWERPC__))
DECL_SC_REG(const VALUE *, pc, "14");
DECL_SC_REG(rb_control_frame_t *, cfp, "15");
#define USE_MACHINE_REGS 1
#elif defined(__GNUC__) && defined(__aarch64__)
DECL_SC_REG(const VALUE *, pc, "19");
DECL_SC_REG(rb_control_frame_t *, cfp, "20");
#define USE_MACHINE_REGS 1
#else
register rb_control_frame_t *reg_cfp;
const VALUE *reg_pc;
#define USE_MACHINE_REGS 0
#endif
#if USE_MACHINE_REGS
#undef RESTORE_REGS
#define RESTORE_REGS() \
{ \
VM_REG_CFP = ec->cfp; \
reg_pc = reg_cfp->pc; \
}
#undef VM_REG_PC
#define VM_REG_PC reg_pc
#undef GET_PC
#define GET_PC() (reg_pc)
#undef SET_PC
#define SET_PC(x) (reg_cfp->pc = VM_REG_PC = (x))
#endif
#if OPT_TOKEN_THREADED_CODE || OPT_DIRECT_THREADED_CODE
#include "vmtc.inc"
if (UNLIKELY(ec == 0)) {
return (VALUE)insns_address_table;
}
#endif
reg_cfp = ec->cfp;
reg_pc = reg_cfp->pc;
first:
INSN_DISPATCH();
/*****************/
#include "vm.inc"
/*****************/
END_INSNS_DISPATCH();
/* unreachable */
rb_bug("vm_eval: unreachable");
goto first;
}
const void **
rb_vm_get_insns_address_table(void)
{
return (const void **)vm_exec_core(0);
}
#else /* OPT_CALL_THREADED_CODE || OPT_TAILCALL_THREADED_CODE */
#if OPT_TAILCALL_THREADED_CODE
#undef RESTORE_REGS
#define RESTORE_REGS() \
{ \
VM_REG_CFP = ec->cfp; \
reg_pc = reg_cfp->pc; \
}
#undef VM_REG_PC
#define VM_REG_PC reg_pc
#undef GET_PC
#define GET_PC() (reg_pc)
#undef SET_PC
#define SET_PC(x) (reg_cfp->pc = VM_REG_PC = (x))
#endif
#include "vm.inc"
#include "vmtc.inc"
const void **
rb_vm_get_insns_address_table(void)
{
return (const void **)insns_address_table;
}
static VALUE
vm_exec_core(rb_execution_context_t *ec)
{
register rb_control_frame_t *reg_cfp = ec->cfp;
rb_thread_t *th;
#ifdef OPT_TAILCALL_THREADED_CODE
const VALUE *reg_pc = reg_cfp->pc;
reg_cfp = ((rb_insn_tailcall_func_t *) (*GET_PC()))(INSN_FUNC_ARGS);
RUBY_ASSERT_ALWAYS(reg_cfp == 0);
#else
while (1) {
reg_cfp = ((rb_insn_func_t) (*GET_PC()))(ec, reg_cfp);
if (UNLIKELY(reg_cfp == 0)) {
break;
}
}
#endif
if (!UNDEF_P((th = rb_ec_thread_ptr(ec))->retval)) {
VALUE ret = th->retval;
th->retval = Qundef;
return ret;
}
else {
VALUE err = ec->errinfo;
ec->errinfo = Qnil;
return err;
}
}
#endif