Avoid marking CC children after invalidation

Once klass becomes Qundef, it's disconnected and won't be invalidated
when the CME is. So once that happens we must not mark or attempt to
move the cme_ field.
This commit is contained in:
John Hawthorn 2025-08-06 14:05:03 -07:00
parent ba4a36e226
commit a9f6fe0914

33
imemo.c
View file

@ -337,28 +337,37 @@ rb_imemo_mark_and_move(VALUE obj, bool reference_updating)
* cc->klass (klass) should not be marked because if the klass is * cc->klass (klass) should not be marked because if the klass is
* free'ed, the cc->klass will be cleared by `vm_cc_invalidate()`. * free'ed, the cc->klass will be cleared by `vm_cc_invalidate()`.
* *
* cc->cme (cme) should not be marked because if cc is invalidated * For "normal" CCs cc->cme (cme) should not be marked because the cc is
* when cme is free'ed. * invalidated through the klass when the cme is free'd.
* - klass marks cme if klass uses cme. * - klass marks cme if klass uses cme.
* - caller classe's ccs->cme marks cc->cme. * - caller class's ccs->cme marks cc->cme.
* - if cc is invalidated (klass doesn't refer the cc), * - if cc is invalidated (klass doesn't refer the cc), cc is
* cc is invalidated by `vm_cc_invalidate()` and cc->cme is * invalidated by `vm_cc_invalidate()` after which cc->cme must not
* not be accessed. * be accessed.
* - On the multi-Ractors, cme will be collected with global GC * - With multi-Ractors, cme will be collected with global GC
* so that it is safe if GC is not interleaving while accessing * so that it is safe if GC is not interleaving while accessing
* cc and cme. * cc and cme.
* - However, cc_type_super and cc_type_refinement are not chained *
* from ccs so cc->cme should be marked; the cme might be * However cc_type_super and cc_type_refinement are not chained
* reachable only through cc in these cases. * from ccs so cc->cme should be marked as long as the cc is valid;
* the cme might be reachable only through cc in these cases.
*/ */
struct rb_callcache *cc = (struct rb_callcache *)obj; struct rb_callcache *cc = (struct rb_callcache *)obj;
if (reference_updating) { if (UNDEF_P(cc->klass)) {
/* If it's invalidated, we must not mark anything.
* All fields should are considered invalid
*/
}
else if (reference_updating) {
if (moved_or_living_object_strictly_p((VALUE)cc->cme_)) { if (moved_or_living_object_strictly_p((VALUE)cc->cme_)) {
*((VALUE *)&cc->klass) = rb_gc_location(cc->klass); *((VALUE *)&cc->klass) = rb_gc_location(cc->klass);
*((struct rb_callable_method_entry_struct **)&cc->cme_) = *((struct rb_callable_method_entry_struct **)&cc->cme_) =
(struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cc->cme_); (struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cc->cme_);
RUBY_ASSERT(RB_TYPE_P(cc->klass, T_CLASS) || RB_TYPE_P(cc->klass, T_ICLASS));
RUBY_ASSERT(IMEMO_TYPE_P((VALUE)cc->cme_, imemo_ment));
} }
else if (vm_cc_valid(cc)) { else {
vm_cc_invalidate(cc); vm_cc_invalidate(cc);
} }
} }