YJIT: Local variable register allocation (#11157)

* YJIT: Local variable register allocation

* locals are not stack temps

* Rename RegTemps to RegMappings

* Rename RegMapping to RegOpnd

* Rename local_size to num_locals

* s/stack value/operand/

* Rename spill_temps() to spill_regs()

* Clarify when num_locals becomes None

* Mention that InsnOut uses different registers

* Rename get_reg_mapping to get_reg_opnd

* Resurrect --yjit-temp-regs capability

* Use MAX_CTX_TEMPS and MAX_CTX_LOCALS
This commit is contained in:
Takashi Kokubun 2024-07-15 07:56:57 -07:00 committed by GitHub
parent 461a7b8316
commit ec773e15f4
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 415 additions and 301 deletions

View file

@ -381,7 +381,7 @@ impl Assembler
}
let live_ranges: Vec<usize> = take(&mut self.live_ranges);
let mut asm_local = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits));
let mut asm_local = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits), self.num_locals);
let asm = &mut asm_local;
let mut iterator = self.into_draining_iter();
@ -1383,7 +1383,7 @@ mod tests {
use crate::disasm::*;
fn setup_asm() -> (Assembler, CodeBlock) {
(Assembler::new(), CodeBlock::new_dummy(1024))
(Assembler::new(0), CodeBlock::new_dummy(1024))
}
#[test]
@ -1682,7 +1682,7 @@ mod tests {
#[test]
fn test_bcond_straddling_code_pages() {
const LANDING_PAGE: usize = 65;
let mut asm = Assembler::new();
let mut asm = Assembler::new(0);
let mut cb = CodeBlock::new_dummy_with_freed_pages(vec![0, LANDING_PAGE]);
// Skip to near the end of the page. Room for two instructions.

View file

@ -2,11 +2,11 @@ use std::collections::HashMap;
use std::fmt;
use std::convert::From;
use std::mem::take;
use crate::codegen::{gen_outlined_exit, gen_counted_exit};
use crate::cruby::{vm_stack_canary, SIZEOF_VALUE_I32, VALUE};
use crate::codegen::{gen_counted_exit, gen_outlined_exit};
use crate::cruby::{vm_stack_canary, SIZEOF_VALUE_I32, VALUE, VM_ENV_DATA_SIZE};
use crate::virtualmem::CodePtr;
use crate::asm::{CodeBlock, OutlinedCb};
use crate::core::{Context, RegTemps, MAX_REG_TEMPS};
use crate::core::{Context, RegMapping, RegOpnd, MAX_CTX_TEMPS};
use crate::options::*;
use crate::stats::*;
@ -77,10 +77,12 @@ pub enum Opnd
num_bits: u8,
/// ctx.stack_size when this operand is made. Used with idx for Opnd::Reg.
stack_size: u8,
/// The number of local variables in the current ISEQ. Used only for locals.
num_locals: Option<u32>,
/// ctx.sp_offset when this operand is made. Used with idx for Opnd::Mem.
sp_offset: i8,
/// ctx.reg_temps when this operand is read. Used for register allocation.
reg_temps: Option<RegTemps>
/// ctx.reg_mapping when this operand is read. Used for register allocation.
reg_mapping: Option<RegMapping>
},
// Low-level operands, for lowering
@ -172,7 +174,7 @@ impl Opnd
Opnd::Reg(reg) => Some(Opnd::Reg(reg.with_num_bits(num_bits))),
Opnd::Mem(Mem { base, disp, .. }) => Some(Opnd::Mem(Mem { base, disp, num_bits })),
Opnd::InsnOut { idx, .. } => Some(Opnd::InsnOut { idx, num_bits }),
Opnd::Stack { idx, stack_size, sp_offset, reg_temps, .. } => Some(Opnd::Stack { idx, num_bits, stack_size, sp_offset, reg_temps }),
Opnd::Stack { idx, stack_size, num_locals, sp_offset, reg_mapping, .. } => Some(Opnd::Stack { idx, num_bits, stack_size, num_locals, sp_offset, reg_mapping }),
_ => None,
}
}
@ -227,28 +229,26 @@ impl Opnd
Self::match_num_bits_iter(opnds.iter())
}
/// Calculate Opnd::Stack's index from the stack bottom.
pub fn stack_idx(&self) -> u8 {
self.get_stack_idx().unwrap()
/// Convert Opnd::Stack into RegMapping
pub fn reg_opnd(&self) -> RegOpnd {
self.get_reg_opnd().unwrap()
}
/// Calculate Opnd::Stack's index from the stack bottom if it's Opnd::Stack.
pub fn get_stack_idx(&self) -> Option<u8> {
match self {
Opnd::Stack { idx, stack_size, .. } => {
Some((*stack_size as isize - *idx as isize - 1) as u8)
},
_ => None
}
}
/// Get the index for stack temp registers.
pub fn reg_idx(&self) -> usize {
match self {
Opnd::Stack { .. } => {
self.stack_idx() as usize % get_option!(num_temp_regs)
},
_ => unreachable!(),
/// Convert an operand into RegMapping if it's Opnd::Stack
pub fn get_reg_opnd(&self) -> Option<RegOpnd> {
match *self {
Opnd::Stack { idx, stack_size, num_locals, .. } => Some(
if let Some(num_locals) = num_locals {
let last_idx = stack_size as i32 + VM_ENV_DATA_SIZE as i32 - 1;
assert!(last_idx <= idx, "Local index {} must be >= last local index {}", idx, last_idx);
assert!(idx <= last_idx + num_locals as i32, "Local index {} must be < last local index {} + local size {}", idx, last_idx, num_locals);
RegOpnd::Local((last_idx + num_locals as i32 - idx) as u8)
} else {
assert!(idx < stack_size as i32);
RegOpnd::Stack((stack_size as i32 - idx - 1) as u8)
}
),
_ => None,
}
}
}
@ -974,7 +974,7 @@ pub struct SideExitContext {
/// Context fields used by get_generic_ctx()
pub stack_size: u8,
pub sp_offset: i8,
pub reg_temps: RegTemps,
pub reg_mapping: RegMapping,
pub is_return_landing: bool,
pub is_deferred: bool,
}
@ -986,7 +986,7 @@ impl SideExitContext {
pc,
stack_size: ctx.get_stack_size(),
sp_offset: ctx.get_sp_offset(),
reg_temps: ctx.get_reg_temps(),
reg_mapping: ctx.get_reg_mapping(),
is_return_landing: ctx.is_return_landing(),
is_deferred: ctx.is_deferred(),
};
@ -1002,7 +1002,7 @@ impl SideExitContext {
let mut ctx = Context::default();
ctx.set_stack_size(self.stack_size);
ctx.set_sp_offset(self.sp_offset);
ctx.set_reg_temps(self.reg_temps);
ctx.set_reg_mapping(self.reg_mapping);
if self.is_return_landing {
ctx.set_as_return_landing();
}
@ -1031,6 +1031,13 @@ pub struct Assembler {
/// Context for generating the current insn
pub ctx: Context,
/// The current ISEQ's local table size. asm.local_opnd() uses this, and it's
/// sometimes hard to pass this value, e.g. asm.spill_temps() in asm.ccall().
///
/// `None` means we're not assembling for an ISEQ, or that the local size is
/// not relevant.
pub(super) num_locals: Option<u32>,
/// Side exit caches for each SideExitContext
pub(super) side_exits: HashMap<SideExitContext, CodePtr>,
@ -1046,16 +1053,31 @@ pub struct Assembler {
impl Assembler
{
pub fn new() -> Self {
Self::new_with_label_names(Vec::default(), HashMap::default())
/// Create an Assembler for ISEQ-specific code.
/// It includes all inline code and some outlined code like side exits and stubs.
pub fn new(num_locals: u32) -> Self {
Self::new_with_label_names(Vec::default(), HashMap::default(), Some(num_locals))
}
pub fn new_with_label_names(label_names: Vec<String>, side_exits: HashMap<SideExitContext, CodePtr>) -> Self {
/// Create an Assembler for outlined code that are not specific to any ISEQ,
/// e.g. trampolines that are shared globally.
pub fn new_without_iseq() -> Self {
Self::new_with_label_names(Vec::default(), HashMap::default(), None)
}
/// Create an Assembler with parameters that are populated by another Assembler instance.
/// This API is used for copying an Assembler for the next compiler pass.
pub fn new_with_label_names(
label_names: Vec<String>,
side_exits: HashMap<SideExitContext, CodePtr>,
num_locals: Option<u32>
) -> Self {
Self {
insns: Vec::with_capacity(ASSEMBLER_INSNS_CAPACITY),
live_ranges: Vec::with_capacity(ASSEMBLER_INSNS_CAPACITY),
label_names,
ctx: Context::default(),
num_locals,
side_exits,
side_exit_pc: None,
side_exit_stack_size: None,
@ -1064,11 +1086,16 @@ impl Assembler
}
/// Get the list of registers that can be used for stack temps.
pub fn get_temp_regs() -> &'static [Reg] {
pub fn get_temp_regs2() -> &'static [Reg] {
let num_regs = get_option!(num_temp_regs);
&TEMP_REGS[0..num_regs]
}
/// Get the number of locals for the ISEQ being compiled
pub fn get_num_locals(&self) -> Option<u32> {
self.num_locals
}
/// Set a context for generating side exits
pub fn set_side_exit_context(&mut self, pc: *mut VALUE, stack_size: u8) {
self.side_exit_pc = Some(pc);
@ -1090,31 +1117,32 @@ impl Assembler
let mut opnd_iter = insn.opnd_iter_mut();
while let Some(opnd) = opnd_iter.next() {
match opnd {
match *opnd {
// If we find any InsnOut from previous instructions, we're going to update
// the live range of the previous instruction to point to this one.
Opnd::InsnOut { idx, .. } => {
assert!(*idx < self.insns.len());
self.live_ranges[*idx] = insn_idx;
assert!(idx < self.insns.len());
self.live_ranges[idx] = insn_idx;
}
Opnd::Mem(Mem { base: MemBase::InsnOut(idx), .. }) => {
assert!(*idx < self.insns.len());
self.live_ranges[*idx] = insn_idx;
assert!(idx < self.insns.len());
self.live_ranges[idx] = insn_idx;
}
// Set current ctx.reg_temps to Opnd::Stack.
Opnd::Stack { idx, num_bits, stack_size, sp_offset, reg_temps: None } => {
// Set current ctx.reg_mapping to Opnd::Stack.
Opnd::Stack { idx, num_bits, stack_size, num_locals, sp_offset, reg_mapping: None } => {
assert_eq!(
self.ctx.get_stack_size() as i16 - self.ctx.get_sp_offset() as i16,
*stack_size as i16 - *sp_offset as i16,
stack_size as i16 - sp_offset as i16,
"Opnd::Stack (stack_size: {}, sp_offset: {}) expects a different SP position from asm.ctx (stack_size: {}, sp_offset: {})",
*stack_size, *sp_offset, self.ctx.get_stack_size(), self.ctx.get_sp_offset(),
stack_size, sp_offset, self.ctx.get_stack_size(), self.ctx.get_sp_offset(),
);
*opnd = Opnd::Stack {
idx: *idx,
num_bits: *num_bits,
stack_size: *stack_size,
sp_offset: *sp_offset,
reg_temps: Some(self.ctx.get_reg_temps()),
idx,
num_bits,
stack_size,
num_locals,
sp_offset,
reg_mapping: Some(self.ctx.get_reg_mapping()),
};
}
_ => {}
@ -1141,7 +1169,7 @@ impl Assembler
// Get a cached side exit
let side_exit = match self.side_exits.get(&side_exit_context) {
None => {
let exit_code = gen_outlined_exit(side_exit_context.pc, &side_exit_context.get_ctx(), ocb)?;
let exit_code = gen_outlined_exit(side_exit_context.pc, self.num_locals.unwrap(), &side_exit_context.get_ctx(), ocb)?;
self.side_exits.insert(*side_exit_context, exit_code);
exit_code
}
@ -1175,20 +1203,20 @@ impl Assembler
}
// Convert Opnd::Stack to Opnd::Reg
fn reg_opnd(opnd: &Opnd) -> Opnd {
let regs = Assembler::get_temp_regs();
fn reg_opnd(opnd: &Opnd, reg_idx: usize) -> Opnd {
let regs = Assembler::get_temp_regs2();
if let Opnd::Stack { num_bits, .. } = *opnd {
incr_counter!(temp_reg_opnd);
Opnd::Reg(regs[opnd.reg_idx()]).with_num_bits(num_bits).unwrap()
Opnd::Reg(regs[reg_idx]).with_num_bits(num_bits).unwrap()
} else {
unreachable!()
}
}
match opnd {
Opnd::Stack { reg_temps, .. } => {
if opnd.stack_idx() < MAX_REG_TEMPS && reg_temps.unwrap().get(opnd.stack_idx()) {
reg_opnd(opnd)
Opnd::Stack { reg_mapping, .. } => {
if let Some(reg_idx) = reg_mapping.unwrap().get_reg(opnd.reg_opnd()) {
reg_opnd(opnd, reg_idx)
} else {
mem_opnd(opnd)
}
@ -1198,18 +1226,11 @@ impl Assembler
}
/// Allocate a register to a stack temp if available.
pub fn alloc_temp_reg(&mut self, stack_idx: u8) {
if get_option!(num_temp_regs) == 0 {
return;
}
pub fn alloc_reg(&mut self, mapping: RegOpnd) {
// Allocate a register if there's no conflict.
let mut reg_temps = self.ctx.get_reg_temps();
if reg_temps.conflicts_with(stack_idx) {
assert!(!reg_temps.get(stack_idx));
} else {
reg_temps.set(stack_idx, true);
self.set_reg_temps(reg_temps);
let mut reg_mapping = self.ctx.get_reg_mapping();
if reg_mapping.alloc_reg(mapping) {
self.set_reg_mapping(reg_mapping);
}
}
@ -1220,47 +1241,58 @@ impl Assembler
self.ctx.clear_local_types();
}
/// Spill all live stack temps from registers to the stack
pub fn spill_temps(&mut self) {
/// Spill all live registers to the stack
pub fn spill_regs(&mut self) {
// Forget registers above the stack top
let mut reg_temps = self.ctx.get_reg_temps();
for stack_idx in self.ctx.get_stack_size()..MAX_REG_TEMPS {
reg_temps.set(stack_idx, false);
let mut reg_mapping = self.ctx.get_reg_mapping();
for stack_idx in self.ctx.get_stack_size()..MAX_CTX_TEMPS as u8 {
reg_mapping.dealloc_reg(RegOpnd::Stack(stack_idx));
}
self.set_reg_temps(reg_temps);
self.set_reg_mapping(reg_mapping);
// Spill live stack temps
if self.ctx.get_reg_temps() != RegTemps::default() {
asm_comment!(self, "spill_temps: {:08b} -> {:08b}", self.ctx.get_reg_temps().as_u8(), RegTemps::default().as_u8());
for stack_idx in 0..u8::min(MAX_REG_TEMPS, self.ctx.get_stack_size()) {
if self.ctx.get_reg_temps().get(stack_idx) {
if self.ctx.get_reg_mapping() != RegMapping::default() {
asm_comment!(self, "spill_temps: {:?} -> {:?}", self.ctx.get_reg_mapping(), RegMapping::default());
// Spill stack temps
for stack_idx in 0..u8::min(MAX_CTX_TEMPS as u8, self.ctx.get_stack_size()) {
if reg_mapping.dealloc_reg(RegOpnd::Stack(stack_idx)) {
let idx = self.ctx.get_stack_size() - 1 - stack_idx;
self.spill_temp(self.stack_opnd(idx.into()));
reg_temps.set(stack_idx, false);
}
}
self.ctx.set_reg_temps(reg_temps);
// Spill locals
for local_idx in 0..MAX_CTX_TEMPS as u8 {
if reg_mapping.dealloc_reg(RegOpnd::Local(local_idx)) {
let first_local_ep_offset = self.num_locals.unwrap() + VM_ENV_DATA_SIZE - 1;
let ep_offset = first_local_ep_offset - local_idx as u32;
self.spill_temp(self.local_opnd(ep_offset));
}
}
self.ctx.set_reg_mapping(reg_mapping);
}
// Every stack temp should have been spilled
assert_eq!(self.ctx.get_reg_temps(), RegTemps::default());
assert_eq!(self.ctx.get_reg_mapping(), RegMapping::default());
}
/// Spill a stack temp from a register to the stack
fn spill_temp(&mut self, opnd: Opnd) {
assert!(self.ctx.get_reg_temps().get(opnd.stack_idx()));
assert_ne!(self.ctx.get_reg_mapping().get_reg(opnd.reg_opnd()), None);
// Use different RegTemps for dest and src operands
let reg_temps = self.ctx.get_reg_temps();
let mut mem_temps = reg_temps;
mem_temps.set(opnd.stack_idx(), false);
// Use different RegMappings for dest and src operands
let reg_mapping = self.ctx.get_reg_mapping();
let mut mem_mappings = reg_mapping;
mem_mappings.dealloc_reg(opnd.reg_opnd());
// Move the stack operand from a register to memory
match opnd {
Opnd::Stack { idx, num_bits, stack_size, sp_offset, .. } => {
Opnd::Stack { idx, num_bits, stack_size, num_locals, sp_offset, .. } => {
self.mov(
Opnd::Stack { idx, num_bits, stack_size, sp_offset, reg_temps: Some(mem_temps) },
Opnd::Stack { idx, num_bits, stack_size, sp_offset, reg_temps: Some(reg_temps) },
Opnd::Stack { idx, num_bits, stack_size, num_locals, sp_offset, reg_mapping: Some(mem_mappings) },
Opnd::Stack { idx, num_bits, stack_size, num_locals, sp_offset, reg_mapping: Some(reg_mapping) },
);
}
_ => unreachable!(),
@ -1269,20 +1301,10 @@ impl Assembler
}
/// Update which stack temps are in a register
pub fn set_reg_temps(&mut self, reg_temps: RegTemps) {
if self.ctx.get_reg_temps() != reg_temps {
asm_comment!(self, "reg_temps: {:08b} -> {:08b}", self.ctx.get_reg_temps().as_u8(), reg_temps.as_u8());
self.ctx.set_reg_temps(reg_temps);
self.verify_reg_temps();
}
}
/// Assert there's no conflict in stack temp register allocation
fn verify_reg_temps(&self) {
for stack_idx in 0..MAX_REG_TEMPS {
if self.ctx.get_reg_temps().get(stack_idx) {
assert!(!self.ctx.get_reg_temps().conflicts_with(stack_idx));
}
pub fn set_reg_mapping(&mut self, reg_mapping: RegMapping) {
if self.ctx.get_reg_mapping() != reg_mapping {
asm_comment!(self, "reg_mapping: {:?} -> {:?}", self.ctx.get_reg_mapping(), reg_mapping);
self.ctx.set_reg_mapping(reg_mapping);
}
}
@ -1411,7 +1433,7 @@ impl Assembler
let live_ranges: Vec<usize> = take(&mut self.live_ranges);
// shifted_live_ranges is indexed by mapped indexes in insn operands.
let mut shifted_live_ranges: Vec<usize> = live_ranges.clone();
let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits));
let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits), self.num_locals);
let mut iterator = self.into_draining_iter();
while let Some((index, mut insn)) = iterator.next_mapped() {
@ -1703,24 +1725,24 @@ impl Assembler {
// Let vm_check_canary() assert this ccall's leafness if leaf_ccall is set
let canary_opnd = self.set_stack_canary(&opnds);
let old_temps = self.ctx.get_reg_temps(); // with registers
let old_temps = self.ctx.get_reg_mapping(); // with registers
// Spill stack temp registers since they are caller-saved registers.
// Note that this doesn't spill stack temps that are already popped
// but may still be used in the C arguments.
self.spill_temps();
let new_temps = self.ctx.get_reg_temps(); // all spilled
self.spill_regs();
let new_temps = self.ctx.get_reg_mapping(); // all spilled
// Temporarily manipulate RegTemps so that we can use registers
// Temporarily manipulate RegMappings so that we can use registers
// to pass stack operands that are already spilled above.
self.ctx.set_reg_temps(old_temps);
self.ctx.set_reg_mapping(old_temps);
// Call a C function
let out = self.next_opnd_out(Opnd::match_num_bits(&opnds));
self.push_insn(Insn::CCall { fptr, opnds, out });
// Registers in old_temps may be clobbered by the above C call,
// so rollback the manipulated RegTemps to a spilled version.
self.ctx.set_reg_temps(new_temps);
// so rollback the manipulated RegMappings to a spilled version.
self.ctx.set_reg_mapping(new_temps);
// Clear the canary after use
if let Some(canary_opnd) = canary_opnd {
@ -1738,7 +1760,7 @@ impl Assembler {
// If the slot is already used, which is a valid optimization to avoid spills,
// give up the verification.
let canary_opnd = if cfg!(debug_assertions) && self.leaf_ccall && opnds.iter().all(|opnd|
opnd.get_stack_idx() != canary_opnd.get_stack_idx()
opnd.get_reg_opnd() != canary_opnd.get_reg_opnd()
) {
asm_comment!(self, "set stack canary");
self.mov(canary_opnd, vm_stack_canary().into());
@ -1767,9 +1789,9 @@ impl Assembler {
pub fn cpop_all(&mut self) {
self.push_insn(Insn::CPopAll);
// Re-enable ccall's RegTemps assertion disabled by cpush_all.
// Re-enable ccall's RegMappings assertion disabled by cpush_all.
// cpush_all + cpop_all preserve all stack temp registers, so it's safe.
self.set_reg_temps(self.ctx.get_reg_temps());
self.set_reg_mapping(self.ctx.get_reg_mapping());
}
pub fn cpop_into(&mut self, opnd: Opnd) {
@ -1787,7 +1809,7 @@ impl Assembler {
// Temps will be marked back as being in registers by cpop_all.
// We assume that cpush_all + cpop_all are used for C functions in utils.rs
// that don't require spill_temps for GC.
self.set_reg_temps(RegTemps::default());
self.set_reg_mapping(RegMapping::default());
}
pub fn cret(&mut self, opnd: Opnd) {

View file

@ -1,19 +1,19 @@
#![cfg(test)]
use crate::asm::{CodeBlock};
use crate::asm::CodeBlock;
use crate::backend::ir::*;
use crate::cruby::*;
use crate::utils::c_callable;
#[test]
fn test_add() {
let mut asm = Assembler::new();
let mut asm = Assembler::new(0);
let out = asm.add(SP, Opnd::UImm(1));
let _ = asm.add(out, Opnd::UImm(2));
}
#[test]
fn test_alloc_regs() {
let mut asm = Assembler::new();
let mut asm = Assembler::new(0);
// Get the first output that we're going to reuse later.
let out1 = asm.add(EC, Opnd::UImm(1));
@ -62,7 +62,7 @@ fn test_alloc_regs() {
fn setup_asm() -> (Assembler, CodeBlock) {
return (
Assembler::new(),
Assembler::new(0),
CodeBlock::new_dummy(1024)
);
}
@ -194,7 +194,7 @@ fn test_c_call()
#[test]
fn test_alloc_ccall_regs() {
let mut asm = Assembler::new();
let mut asm = Assembler::new(0);
let out1 = asm.ccall(0 as *const u8, vec![]);
let out2 = asm.ccall(0 as *const u8, vec![out1]);
asm.mov(EC, out2);
@ -283,8 +283,7 @@ fn test_bake_string() {
#[test]
fn test_draining_iterator() {
let mut asm = Assembler::new();
let mut asm = Assembler::new(0);
let _ = asm.load(Opnd::None);
asm.store(Opnd::None, Opnd::None);
@ -315,7 +314,7 @@ fn test_cmp_8_bit() {
fn test_no_pos_marker_callback_when_compile_fails() {
// When compilation fails (e.g. when out of memory), the code written out is malformed.
// We don't want to invoke the pos_marker callbacks with positions of malformed code.
let mut asm = Assembler::new();
let mut asm = Assembler::new(0);
// Markers around code to exhaust memory limit
let fail_if_called = |_code_ptr, _cb: &_| panic!("pos_marker callback should not be called");

View file

@ -79,7 +79,7 @@ impl From<&Opnd> for X86Opnd {
}
}
/// List of registers that can be used for stack temps.
/// List of registers that can be used for stack temps and locals.
pub static TEMP_REGS: [Reg; 5] = [RSI_REG, RDI_REG, R8_REG, R9_REG, R10_REG];
impl Assembler
@ -112,7 +112,7 @@ impl Assembler
fn x86_split(mut self) -> Assembler
{
let live_ranges: Vec<usize> = take(&mut self.live_ranges);
let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits));
let mut asm = Assembler::new_with_label_names(take(&mut self.label_names), take(&mut self.side_exits), self.num_locals);
let mut iterator = self.into_draining_iter();
while let Some((index, mut insn)) = iterator.next_unmapped() {
@ -895,14 +895,14 @@ impl Assembler
#[cfg(test)]
mod tests {
use crate::disasm::{assert_disasm};
use crate::disasm::assert_disasm;
#[cfg(feature = "disasm")]
use crate::disasm::{unindent, disasm_addr_range};
use super::*;
fn setup_asm() -> (Assembler, CodeBlock) {
(Assembler::new(), CodeBlock::new_dummy(1024))
(Assembler::new(0), CodeBlock::new_dummy(1024))
}
#[test]