diff --git a/gc.c b/gc.c index 63e86d5ca2..4a3623cd75 100644 --- a/gc.c +++ b/gc.c @@ -2238,6 +2238,33 @@ rb_obj_id_p(VALUE obj) return !RB_TYPE_P(obj, T_IMEMO) && rb_shape_obj_has_id(obj); } +/* + * GC implementations should call this function before the GC phase that updates references + * embedded in the machine code generated by JIT compilers. JIT compilers usually enforce the + * "W^X" policy and protect the code memory from being modified during execution. This function + * makes the code memory writeable. + */ +void +rb_gc_before_updating_jit_code(void) +{ +#if USE_YJIT + rb_yjit_mark_all_writeable(); +#endif +} + +/* + * GC implementations should call this function before the GC phase that updates references + * embedded in the machine code generated by JIT compilers. This function makes the code memory + * executable again. + */ +void +rb_gc_after_updating_jit_code(void) +{ +#if USE_YJIT + rb_yjit_mark_all_executable(); +#endif +} + static enum rb_id_table_iterator_result cc_table_memsize_i(VALUE ccs_ptr, void *data_ptr) { diff --git a/gc/default/default.c b/gc/default/default.c index 366a3aaf80..384b3f10f0 100644 --- a/gc/default/default.c +++ b/gc/default/default.c @@ -7068,6 +7068,8 @@ gc_update_references(rb_objspace_t *objspace) { objspace->flags.during_reference_updating = true; + rb_gc_before_updating_jit_code(); + struct heap_page *page = NULL; for (int i = 0; i < HEAP_COUNT; i++) { @@ -7102,6 +7104,8 @@ gc_update_references(rb_objspace_t *objspace) ); } + rb_gc_after_updating_jit_code(); + objspace->flags.during_reference_updating = false; } diff --git a/gc/gc.h b/gc/gc.h index 23086c0aca..fe9aaeb965 100644 --- a/gc/gc.h +++ b/gc/gc.h @@ -94,6 +94,8 @@ MODULAR_GC_FN uint32_t rb_gc_rebuild_shape(VALUE obj, size_t heap_id); MODULAR_GC_FN void rb_gc_prepare_heap_process_object(VALUE obj); MODULAR_GC_FN bool rb_memerror_reentered(void); MODULAR_GC_FN bool rb_obj_id_p(VALUE); +MODULAR_GC_FN void rb_gc_before_updating_jit_code(void); +MODULAR_GC_FN void rb_gc_after_updating_jit_code(void); #if USE_MODULAR_GC MODULAR_GC_FN bool rb_gc_event_hook_required_p(rb_event_flag_t event); diff --git a/yjit.h b/yjit.h index 4689655002..cb96ee7838 100644 --- a/yjit.h +++ b/yjit.h @@ -50,6 +50,8 @@ void rb_yjit_show_usage(int help, int highlight, unsigned int width, int columns void rb_yjit_lazy_push_frame(const VALUE *pc); void rb_yjit_invalidate_no_singleton_class(VALUE klass); void rb_yjit_invalidate_ep_is_bp(const rb_iseq_t *iseq); +void rb_yjit_mark_all_writeable(void); +void rb_yjit_mark_all_executable(void); #else // !USE_YJIT diff --git a/yjit/src/asm/mod.rs b/yjit/src/asm/mod.rs index 0320fdd829..4fc6605715 100644 --- a/yjit/src/asm/mod.rs +++ b/yjit/src/asm/mod.rs @@ -590,6 +590,10 @@ impl CodeBlock { self.label_refs = state.label_refs; } + pub fn mark_all_writeable(&mut self) { + self.mem_block.borrow_mut().mark_all_writeable(); + } + pub fn mark_all_executable(&mut self) { self.mem_block.borrow_mut().mark_all_executable(); } diff --git a/yjit/src/core.rs b/yjit/src/core.rs index 6322b56c1c..57756e86ce 100644 --- a/yjit/src/core.rs +++ b/yjit/src/core.rs @@ -2035,13 +2035,6 @@ pub extern "C" fn rb_yjit_iseq_update_references(iseq: IseqPtr) { block_update_references(block, cb, true); } - // Note that we would have returned already if YJIT is off. - cb.mark_all_executable(); - - CodegenGlobals::get_outlined_cb() - .unwrap() - .mark_all_executable(); - return; fn block_update_references(block: &Block, cb: &mut CodeBlock, dead: bool) { @@ -2110,6 +2103,34 @@ pub extern "C" fn rb_yjit_iseq_update_references(iseq: IseqPtr) { } } +/// Mark all code memory as writable. +/// This function is useful for garbage collectors that update references in JIT-compiled code in +/// bulk. +#[no_mangle] +pub extern "C" fn rb_yjit_mark_all_writeable() { + if CodegenGlobals::has_instance() { + CodegenGlobals::get_inline_cb().mark_all_writeable(); + + CodegenGlobals::get_outlined_cb() + .unwrap() + .mark_all_writeable(); + } +} + +/// Mark all code memory as executable. +/// This function is useful for garbage collectors that update references in JIT-compiled code in +/// bulk. +#[no_mangle] +pub extern "C" fn rb_yjit_mark_all_executable() { + if CodegenGlobals::has_instance() { + CodegenGlobals::get_inline_cb().mark_all_executable(); + + CodegenGlobals::get_outlined_cb() + .unwrap() + .mark_all_executable(); + } +} + /// Get all blocks for a particular place in an iseq. fn get_version_list(blockid: BlockId) -> Option<&'static mut VersionList> { let insn_idx = blockid.idx.as_usize(); diff --git a/yjit/src/virtualmem.rs b/yjit/src/virtualmem.rs index f56b0d8213..66fb48b699 100644 --- a/yjit/src/virtualmem.rs +++ b/yjit/src/virtualmem.rs @@ -231,6 +231,23 @@ impl VirtualMemory { Ok(()) } + /// Make all the code in the region writeable. + /// Call this during GC before the phase of updating reference fields. + pub fn mark_all_writeable(&mut self) { + self.current_write_page = None; + + let region_start = self.region_start; + let mapped_region_bytes: u32 = self.mapped_region_bytes.try_into().unwrap(); + + // Make mapped region executable + if !self.allocator.mark_writable(region_start.as_ptr(), mapped_region_bytes) { + panic!("Cannot make memory region writable: {:?}-{:?}", + region_start.as_ptr(), + unsafe { region_start.as_ptr().add(mapped_region_bytes as usize)} + ); + } + } + /// Make all the code in the region executable. Call this at the end of a write session. /// See [Self] for usual usage flow. pub fn mark_all_executable(&mut self) {