Use rb_gc_mark_weak for cc->klass.

One of the biggest remaining contention point is `RClass.cc_table`.
The logical solution would be to turn it into a managed object, so
we can use an RCU strategy, given it's read heavy.

However, that's not currently possible because the table can't
be freed before the owning class, given the class free function
MUST go over all the CC entries to invalidate them.

However if the `CC->klass` reference is weak marked, then the
GC will take care of setting the reference to `Qundef`.
This commit is contained in:
Jean Boussier 2025-07-29 15:13:01 +02:00
parent 1055e04e28
commit fc5e1541e4
8 changed files with 49 additions and 42 deletions

View file

@ -49,7 +49,7 @@ RB_DEBUG_COUNTER(cc_temp) // dummy CC (stack-allocated)
RB_DEBUG_COUNTER(cc_found_in_ccs) // count for CC lookup success in CCS
RB_DEBUG_COUNTER(cc_not_found_in_ccs) // count for CC lookup success in CCS
RB_DEBUG_COUNTER(cc_ent_invalidate) // count for invalidating cc (cc->klass = 0)
RB_DEBUG_COUNTER(cc_ent_invalidate) // count for invalidating cc (cc->klass = Qundef)
RB_DEBUG_COUNTER(cc_cme_invalidate) // count for invalidating CME
RB_DEBUG_COUNTER(cc_invalidate_leaf) // count for invalidating klass if klass has no-subclasses

7
gc.c
View file

@ -1209,6 +1209,7 @@ classext_free(rb_classext_t *ext, bool is_prime, VALUE namespace, void *arg)
rb_id_table_free(RCLASSEXT_M_TBL(ext));
rb_cc_tbl_free(RCLASSEXT_CC_TBL(ext), args->klass);
if (!RCLASSEXT_SHARED_CONST_TBL(ext) && (tbl = RCLASSEXT_CONST_TBL(ext)) != NULL) {
rb_free_const_table(tbl);
}
@ -1743,7 +1744,7 @@ rb_objspace_free_objects(void *objspace)
int
rb_objspace_garbage_object_p(VALUE obj)
{
return rb_gc_impl_garbage_object_p(rb_gc_get_objspace(), obj);
return !SPECIAL_CONST_P(obj) && rb_gc_impl_garbage_object_p(rb_gc_get_objspace(), obj);
}
bool
@ -4924,11 +4925,11 @@ rb_raw_obj_info_buitin_type(char *const buff, const size_t buff_size, const VALU
case imemo_callcache:
{
const struct rb_callcache *cc = (const struct rb_callcache *)obj;
VALUE class_path = cc->klass ? rb_class_path_cached(cc->klass) : Qnil;
VALUE class_path = vm_cc_valid(cc) ? rb_class_path_cached(cc->klass) : Qnil;
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
APPEND_F("(klass:%s cme:%s%s (%p) call:%p",
NIL_P(class_path) ? (cc->klass ? "??" : "<NULL>") : RSTRING_PTR(class_path),
NIL_P(class_path) ? (vm_cc_valid(cc) ? "??" : "<NULL>") : RSTRING_PTR(class_path),
cme ? rb_id2name(cme->called_id) : "<NULL>",
cme ? (METHOD_ENTRY_INVALIDATED(cme) ? " [inv]" : "") : "",
(void *)cme,

16
imemo.c
View file

@ -273,7 +273,7 @@ rb_imemo_memsize(VALUE obj)
static bool
moved_or_living_object_strictly_p(VALUE obj)
{
return obj && (!rb_objspace_garbage_object_p(obj) || BUILTIN_TYPE(obj) == T_MOVED);
return !SPECIAL_CONST_P(obj) && (!rb_objspace_garbage_object_p(obj) || BUILTIN_TYPE(obj) == T_MOVED);
}
static void
@ -353,25 +353,19 @@ rb_imemo_mark_and_move(VALUE obj, bool reference_updating)
*/
struct rb_callcache *cc = (struct rb_callcache *)obj;
if (reference_updating) {
if (!cc->klass) {
// already invalidated
}
else {
if (moved_or_living_object_strictly_p(cc->klass) &&
moved_or_living_object_strictly_p((VALUE)cc->cme_)) {
if (moved_or_living_object_strictly_p((VALUE)cc->cme_)) {
*((VALUE *)&cc->klass) = rb_gc_location(cc->klass);
*((struct rb_callable_method_entry_struct **)&cc->cme_) =
(struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cc->cme_);
}
else {
else if (vm_cc_valid(cc)) {
vm_cc_invalidate(cc);
}
}
}
else {
if (cc->klass && (vm_cc_super_p(cc) || vm_cc_refinement_p(cc))) {
rb_gc_mark_weak((VALUE *)&cc->klass);
if ((vm_cc_super_p(cc) || vm_cc_refinement_p(cc))) {
rb_gc_mark_movable((VALUE)cc->cme_);
rb_gc_mark_movable((VALUE)cc->klass);
}
}

4
iseq.c
View file

@ -325,8 +325,7 @@ cc_is_active(const struct rb_callcache *cc, bool reference_updating)
cc = (const struct rb_callcache *)rb_gc_location((VALUE)cc);
}
if (vm_cc_markable(cc)) {
if (cc->klass) { // cc is not invalidated
if (vm_cc_markable(cc) && vm_cc_valid(cc)) {
const struct rb_callable_method_entry_struct *cme = vm_cc_cme(cc);
if (reference_updating) {
cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cme);
@ -336,7 +335,6 @@ cc_is_active(const struct rb_callcache *cc, bool reference_updating)
}
}
}
}
return false;
}

4
vm.c
View file

@ -607,7 +607,7 @@ rb_serial_t ruby_vm_global_cvar_state = 1;
static const struct rb_callcache vm_empty_cc = {
.flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
.klass = Qfalse,
.klass = Qundef,
.cme_ = NULL,
.call_ = vm_call_general,
.aux_ = {
@ -617,7 +617,7 @@ static const struct rb_callcache vm_empty_cc = {
static const struct rb_callcache vm_empty_cc_for_super = {
.flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
.klass = Qfalse,
.klass = Qundef,
.cme_ = NULL,
.call_ = vm_call_super_method,
.aux_ = {

View file

@ -279,9 +279,7 @@ struct rb_callcache {
const VALUE flags;
/* inline cache: key */
const VALUE klass; // should not mark it because klass can not be free'd
// because of this marking. When klass is collected,
// cc will be cleared (cc->klass = 0) at vm_ccs_free().
const VALUE klass; // Weak reference. When klass is collected, `cc->klass = Qundef`.
/* inline cache: values */
const struct rb_callable_method_entry_struct * const cme_;
@ -324,12 +322,20 @@ vm_cc_attr_index_initialize(const struct rb_callcache *cc, shape_id_t shape_id)
vm_cc_attr_index_set(cc, (attr_index_t)-1, shape_id);
}
static inline VALUE
cc_check_class(VALUE klass)
{
VM_ASSERT(klass == Qundef || RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
return klass;
}
static inline const struct rb_callcache *
vm_cc_new(VALUE klass,
const struct rb_callable_method_entry_struct *cme,
vm_call_handler call,
enum vm_cc_type type)
{
cc_check_class(klass);
struct rb_callcache *cc = IMEMO_NEW(struct rb_callcache, imemo_callcache, klass);
*((struct rb_callable_method_entry_struct **)&cc->cme_) = (struct rb_callable_method_entry_struct *)cme;
*((vm_call_handler *)&cc->call_) = call;
@ -374,7 +380,7 @@ vm_cc_refinement_p(const struct rb_callcache *cc)
(imemo_callcache << FL_USHIFT) | \
VM_CALLCACHE_UNMARKABLE | \
VM_CALLCACHE_ON_STACK, \
.klass = clazz, \
.klass = cc_check_class(clazz), \
.cme_ = cme, \
.call_ = call, \
.aux_ = aux, \
@ -384,8 +390,7 @@ static inline bool
vm_cc_class_check(const struct rb_callcache *cc, VALUE klass)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc->klass == 0 ||
RB_TYPE_P(cc->klass, T_CLASS) || RB_TYPE_P(cc->klass, T_ICLASS));
VM_ASSERT(cc_check_class(cc->klass));
return cc->klass == klass;
}
@ -396,6 +401,15 @@ vm_cc_markable(const struct rb_callcache *cc)
return FL_TEST_RAW((VALUE)cc, VM_CALLCACHE_UNMARKABLE) == 0;
}
static inline bool
vm_cc_valid(const struct rb_callcache *cc)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc_check_class(cc->klass));
return !UNDEF_P(cc->klass);
}
static inline const struct rb_callable_method_entry_struct *
vm_cc_cme(const struct rb_callcache *cc)
{
@ -447,7 +461,7 @@ vm_cc_cmethod_missing_reason(const struct rb_callcache *cc)
static inline bool
vm_cc_invalidated_p(const struct rb_callcache *cc)
{
if (cc->klass && !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc))) {
if (vm_cc_valid(cc) && !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc))) {
return false;
}
else {
@ -543,9 +557,9 @@ vm_cc_invalidate(const struct rb_callcache *cc)
{
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
VM_ASSERT(cc != vm_cc_empty());
VM_ASSERT(cc->klass != 0); // should be enable
VM_ASSERT(cc->klass != Qundef); // should be enable
*(VALUE *)&cc->klass = 0;
*(VALUE *)&cc->klass = Qundef;
RB_DEBUG_COUNTER_INC(cc_ent_invalidate);
}

View file

@ -57,7 +57,7 @@ static inline VALUE vm_call0_cc(rb_execution_context_t *ec, VALUE recv, ID id, i
VALUE
rb_vm_call0(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE *argv, const rb_callable_method_entry_t *cme, int kw_splat)
{
const struct rb_callcache cc = VM_CC_ON_STACK(Qfalse, vm_call_general, {{ 0 }}, cme);
const struct rb_callcache cc = VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
return vm_call0_cc(ec, recv, id, argc, argv, &cc, kw_splat);
}
@ -104,7 +104,7 @@ vm_call0_cc(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE
static VALUE
vm_call0_cme(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv, const rb_callable_method_entry_t *cme)
{
calling->cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, {{ 0 }}, cme);
calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
return vm_call0_body(ec, calling, argv);
}

View file

@ -409,7 +409,7 @@ invalidate_cc_refinement(st_data_t key, st_data_t data)
VM_ASSERT(vm_cc_refinement_p(cc));
if (cc->klass) {
if (vm_cc_valid(cc)) {
vm_cc_invalidate(cc);
}
}