mirror of
https://github.com/ruby/ruby.git
synced 2025-08-15 13:39:04 +02:00
Use rb_gc_mark_weak
for cc->klass
.
One of the biggest remaining contention point is `RClass.cc_table`. The logical solution would be to turn it into a managed object, so we can use an RCU strategy, given it's read heavy. However, that's not currently possible because the table can't be freed before the owning class, given the class free function MUST go over all the CC entries to invalidate them. However if the `CC->klass` reference is weak marked, then the GC will take care of setting the reference to `Qundef`.
This commit is contained in:
parent
1055e04e28
commit
fc5e1541e4
8 changed files with 49 additions and 42 deletions
|
@ -49,7 +49,7 @@ RB_DEBUG_COUNTER(cc_temp) // dummy CC (stack-allocated)
|
||||||
RB_DEBUG_COUNTER(cc_found_in_ccs) // count for CC lookup success in CCS
|
RB_DEBUG_COUNTER(cc_found_in_ccs) // count for CC lookup success in CCS
|
||||||
RB_DEBUG_COUNTER(cc_not_found_in_ccs) // count for CC lookup success in CCS
|
RB_DEBUG_COUNTER(cc_not_found_in_ccs) // count for CC lookup success in CCS
|
||||||
|
|
||||||
RB_DEBUG_COUNTER(cc_ent_invalidate) // count for invalidating cc (cc->klass = 0)
|
RB_DEBUG_COUNTER(cc_ent_invalidate) // count for invalidating cc (cc->klass = Qundef)
|
||||||
RB_DEBUG_COUNTER(cc_cme_invalidate) // count for invalidating CME
|
RB_DEBUG_COUNTER(cc_cme_invalidate) // count for invalidating CME
|
||||||
|
|
||||||
RB_DEBUG_COUNTER(cc_invalidate_leaf) // count for invalidating klass if klass has no-subclasses
|
RB_DEBUG_COUNTER(cc_invalidate_leaf) // count for invalidating klass if klass has no-subclasses
|
||||||
|
|
7
gc.c
7
gc.c
|
@ -1209,6 +1209,7 @@ classext_free(rb_classext_t *ext, bool is_prime, VALUE namespace, void *arg)
|
||||||
|
|
||||||
rb_id_table_free(RCLASSEXT_M_TBL(ext));
|
rb_id_table_free(RCLASSEXT_M_TBL(ext));
|
||||||
rb_cc_tbl_free(RCLASSEXT_CC_TBL(ext), args->klass);
|
rb_cc_tbl_free(RCLASSEXT_CC_TBL(ext), args->klass);
|
||||||
|
|
||||||
if (!RCLASSEXT_SHARED_CONST_TBL(ext) && (tbl = RCLASSEXT_CONST_TBL(ext)) != NULL) {
|
if (!RCLASSEXT_SHARED_CONST_TBL(ext) && (tbl = RCLASSEXT_CONST_TBL(ext)) != NULL) {
|
||||||
rb_free_const_table(tbl);
|
rb_free_const_table(tbl);
|
||||||
}
|
}
|
||||||
|
@ -1743,7 +1744,7 @@ rb_objspace_free_objects(void *objspace)
|
||||||
int
|
int
|
||||||
rb_objspace_garbage_object_p(VALUE obj)
|
rb_objspace_garbage_object_p(VALUE obj)
|
||||||
{
|
{
|
||||||
return rb_gc_impl_garbage_object_p(rb_gc_get_objspace(), obj);
|
return !SPECIAL_CONST_P(obj) && rb_gc_impl_garbage_object_p(rb_gc_get_objspace(), obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
|
@ -4924,11 +4925,11 @@ rb_raw_obj_info_buitin_type(char *const buff, const size_t buff_size, const VALU
|
||||||
case imemo_callcache:
|
case imemo_callcache:
|
||||||
{
|
{
|
||||||
const struct rb_callcache *cc = (const struct rb_callcache *)obj;
|
const struct rb_callcache *cc = (const struct rb_callcache *)obj;
|
||||||
VALUE class_path = cc->klass ? rb_class_path_cached(cc->klass) : Qnil;
|
VALUE class_path = vm_cc_valid(cc) ? rb_class_path_cached(cc->klass) : Qnil;
|
||||||
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
|
const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
|
||||||
|
|
||||||
APPEND_F("(klass:%s cme:%s%s (%p) call:%p",
|
APPEND_F("(klass:%s cme:%s%s (%p) call:%p",
|
||||||
NIL_P(class_path) ? (cc->klass ? "??" : "<NULL>") : RSTRING_PTR(class_path),
|
NIL_P(class_path) ? (vm_cc_valid(cc) ? "??" : "<NULL>") : RSTRING_PTR(class_path),
|
||||||
cme ? rb_id2name(cme->called_id) : "<NULL>",
|
cme ? rb_id2name(cme->called_id) : "<NULL>",
|
||||||
cme ? (METHOD_ENTRY_INVALIDATED(cme) ? " [inv]" : "") : "",
|
cme ? (METHOD_ENTRY_INVALIDATED(cme) ? " [inv]" : "") : "",
|
||||||
(void *)cme,
|
(void *)cme,
|
||||||
|
|
16
imemo.c
16
imemo.c
|
@ -273,7 +273,7 @@ rb_imemo_memsize(VALUE obj)
|
||||||
static bool
|
static bool
|
||||||
moved_or_living_object_strictly_p(VALUE obj)
|
moved_or_living_object_strictly_p(VALUE obj)
|
||||||
{
|
{
|
||||||
return obj && (!rb_objspace_garbage_object_p(obj) || BUILTIN_TYPE(obj) == T_MOVED);
|
return !SPECIAL_CONST_P(obj) && (!rb_objspace_garbage_object_p(obj) || BUILTIN_TYPE(obj) == T_MOVED);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
|
@ -353,25 +353,19 @@ rb_imemo_mark_and_move(VALUE obj, bool reference_updating)
|
||||||
*/
|
*/
|
||||||
struct rb_callcache *cc = (struct rb_callcache *)obj;
|
struct rb_callcache *cc = (struct rb_callcache *)obj;
|
||||||
if (reference_updating) {
|
if (reference_updating) {
|
||||||
if (!cc->klass) {
|
if (moved_or_living_object_strictly_p((VALUE)cc->cme_)) {
|
||||||
// already invalidated
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (moved_or_living_object_strictly_p(cc->klass) &&
|
|
||||||
moved_or_living_object_strictly_p((VALUE)cc->cme_)) {
|
|
||||||
*((VALUE *)&cc->klass) = rb_gc_location(cc->klass);
|
*((VALUE *)&cc->klass) = rb_gc_location(cc->klass);
|
||||||
*((struct rb_callable_method_entry_struct **)&cc->cme_) =
|
*((struct rb_callable_method_entry_struct **)&cc->cme_) =
|
||||||
(struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cc->cme_);
|
(struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cc->cme_);
|
||||||
}
|
}
|
||||||
else {
|
else if (vm_cc_valid(cc)) {
|
||||||
vm_cc_invalidate(cc);
|
vm_cc_invalidate(cc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
if (cc->klass && (vm_cc_super_p(cc) || vm_cc_refinement_p(cc))) {
|
rb_gc_mark_weak((VALUE *)&cc->klass);
|
||||||
|
if ((vm_cc_super_p(cc) || vm_cc_refinement_p(cc))) {
|
||||||
rb_gc_mark_movable((VALUE)cc->cme_);
|
rb_gc_mark_movable((VALUE)cc->cme_);
|
||||||
rb_gc_mark_movable((VALUE)cc->klass);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
4
iseq.c
4
iseq.c
|
@ -325,8 +325,7 @@ cc_is_active(const struct rb_callcache *cc, bool reference_updating)
|
||||||
cc = (const struct rb_callcache *)rb_gc_location((VALUE)cc);
|
cc = (const struct rb_callcache *)rb_gc_location((VALUE)cc);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (vm_cc_markable(cc)) {
|
if (vm_cc_markable(cc) && vm_cc_valid(cc)) {
|
||||||
if (cc->klass) { // cc is not invalidated
|
|
||||||
const struct rb_callable_method_entry_struct *cme = vm_cc_cme(cc);
|
const struct rb_callable_method_entry_struct *cme = vm_cc_cme(cc);
|
||||||
if (reference_updating) {
|
if (reference_updating) {
|
||||||
cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cme);
|
cme = (const struct rb_callable_method_entry_struct *)rb_gc_location((VALUE)cme);
|
||||||
|
@ -336,7 +335,6 @@ cc_is_active(const struct rb_callcache *cc, bool reference_updating)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
4
vm.c
4
vm.c
|
@ -607,7 +607,7 @@ rb_serial_t ruby_vm_global_cvar_state = 1;
|
||||||
|
|
||||||
static const struct rb_callcache vm_empty_cc = {
|
static const struct rb_callcache vm_empty_cc = {
|
||||||
.flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
|
.flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
|
||||||
.klass = Qfalse,
|
.klass = Qundef,
|
||||||
.cme_ = NULL,
|
.cme_ = NULL,
|
||||||
.call_ = vm_call_general,
|
.call_ = vm_call_general,
|
||||||
.aux_ = {
|
.aux_ = {
|
||||||
|
@ -617,7 +617,7 @@ static const struct rb_callcache vm_empty_cc = {
|
||||||
|
|
||||||
static const struct rb_callcache vm_empty_cc_for_super = {
|
static const struct rb_callcache vm_empty_cc_for_super = {
|
||||||
.flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
|
.flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
|
||||||
.klass = Qfalse,
|
.klass = Qundef,
|
||||||
.cme_ = NULL,
|
.cme_ = NULL,
|
||||||
.call_ = vm_call_super_method,
|
.call_ = vm_call_super_method,
|
||||||
.aux_ = {
|
.aux_ = {
|
||||||
|
|
|
@ -279,9 +279,7 @@ struct rb_callcache {
|
||||||
const VALUE flags;
|
const VALUE flags;
|
||||||
|
|
||||||
/* inline cache: key */
|
/* inline cache: key */
|
||||||
const VALUE klass; // should not mark it because klass can not be free'd
|
const VALUE klass; // Weak reference. When klass is collected, `cc->klass = Qundef`.
|
||||||
// because of this marking. When klass is collected,
|
|
||||||
// cc will be cleared (cc->klass = 0) at vm_ccs_free().
|
|
||||||
|
|
||||||
/* inline cache: values */
|
/* inline cache: values */
|
||||||
const struct rb_callable_method_entry_struct * const cme_;
|
const struct rb_callable_method_entry_struct * const cme_;
|
||||||
|
@ -324,12 +322,20 @@ vm_cc_attr_index_initialize(const struct rb_callcache *cc, shape_id_t shape_id)
|
||||||
vm_cc_attr_index_set(cc, (attr_index_t)-1, shape_id);
|
vm_cc_attr_index_set(cc, (attr_index_t)-1, shape_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline VALUE
|
||||||
|
cc_check_class(VALUE klass)
|
||||||
|
{
|
||||||
|
VM_ASSERT(klass == Qundef || RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
|
||||||
|
return klass;
|
||||||
|
}
|
||||||
|
|
||||||
static inline const struct rb_callcache *
|
static inline const struct rb_callcache *
|
||||||
vm_cc_new(VALUE klass,
|
vm_cc_new(VALUE klass,
|
||||||
const struct rb_callable_method_entry_struct *cme,
|
const struct rb_callable_method_entry_struct *cme,
|
||||||
vm_call_handler call,
|
vm_call_handler call,
|
||||||
enum vm_cc_type type)
|
enum vm_cc_type type)
|
||||||
{
|
{
|
||||||
|
cc_check_class(klass);
|
||||||
struct rb_callcache *cc = IMEMO_NEW(struct rb_callcache, imemo_callcache, klass);
|
struct rb_callcache *cc = IMEMO_NEW(struct rb_callcache, imemo_callcache, klass);
|
||||||
*((struct rb_callable_method_entry_struct **)&cc->cme_) = (struct rb_callable_method_entry_struct *)cme;
|
*((struct rb_callable_method_entry_struct **)&cc->cme_) = (struct rb_callable_method_entry_struct *)cme;
|
||||||
*((vm_call_handler *)&cc->call_) = call;
|
*((vm_call_handler *)&cc->call_) = call;
|
||||||
|
@ -374,7 +380,7 @@ vm_cc_refinement_p(const struct rb_callcache *cc)
|
||||||
(imemo_callcache << FL_USHIFT) | \
|
(imemo_callcache << FL_USHIFT) | \
|
||||||
VM_CALLCACHE_UNMARKABLE | \
|
VM_CALLCACHE_UNMARKABLE | \
|
||||||
VM_CALLCACHE_ON_STACK, \
|
VM_CALLCACHE_ON_STACK, \
|
||||||
.klass = clazz, \
|
.klass = cc_check_class(clazz), \
|
||||||
.cme_ = cme, \
|
.cme_ = cme, \
|
||||||
.call_ = call, \
|
.call_ = call, \
|
||||||
.aux_ = aux, \
|
.aux_ = aux, \
|
||||||
|
@ -384,8 +390,7 @@ static inline bool
|
||||||
vm_cc_class_check(const struct rb_callcache *cc, VALUE klass)
|
vm_cc_class_check(const struct rb_callcache *cc, VALUE klass)
|
||||||
{
|
{
|
||||||
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
|
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
|
||||||
VM_ASSERT(cc->klass == 0 ||
|
VM_ASSERT(cc_check_class(cc->klass));
|
||||||
RB_TYPE_P(cc->klass, T_CLASS) || RB_TYPE_P(cc->klass, T_ICLASS));
|
|
||||||
return cc->klass == klass;
|
return cc->klass == klass;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -396,6 +401,15 @@ vm_cc_markable(const struct rb_callcache *cc)
|
||||||
return FL_TEST_RAW((VALUE)cc, VM_CALLCACHE_UNMARKABLE) == 0;
|
return FL_TEST_RAW((VALUE)cc, VM_CALLCACHE_UNMARKABLE) == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static inline bool
|
||||||
|
vm_cc_valid(const struct rb_callcache *cc)
|
||||||
|
{
|
||||||
|
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
|
||||||
|
VM_ASSERT(cc_check_class(cc->klass));
|
||||||
|
|
||||||
|
return !UNDEF_P(cc->klass);
|
||||||
|
}
|
||||||
|
|
||||||
static inline const struct rb_callable_method_entry_struct *
|
static inline const struct rb_callable_method_entry_struct *
|
||||||
vm_cc_cme(const struct rb_callcache *cc)
|
vm_cc_cme(const struct rb_callcache *cc)
|
||||||
{
|
{
|
||||||
|
@ -447,7 +461,7 @@ vm_cc_cmethod_missing_reason(const struct rb_callcache *cc)
|
||||||
static inline bool
|
static inline bool
|
||||||
vm_cc_invalidated_p(const struct rb_callcache *cc)
|
vm_cc_invalidated_p(const struct rb_callcache *cc)
|
||||||
{
|
{
|
||||||
if (cc->klass && !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc))) {
|
if (vm_cc_valid(cc) && !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc))) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -543,9 +557,9 @@ vm_cc_invalidate(const struct rb_callcache *cc)
|
||||||
{
|
{
|
||||||
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
|
VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
|
||||||
VM_ASSERT(cc != vm_cc_empty());
|
VM_ASSERT(cc != vm_cc_empty());
|
||||||
VM_ASSERT(cc->klass != 0); // should be enable
|
VM_ASSERT(cc->klass != Qundef); // should be enable
|
||||||
|
|
||||||
*(VALUE *)&cc->klass = 0;
|
*(VALUE *)&cc->klass = Qundef;
|
||||||
RB_DEBUG_COUNTER_INC(cc_ent_invalidate);
|
RB_DEBUG_COUNTER_INC(cc_ent_invalidate);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -57,7 +57,7 @@ static inline VALUE vm_call0_cc(rb_execution_context_t *ec, VALUE recv, ID id, i
|
||||||
VALUE
|
VALUE
|
||||||
rb_vm_call0(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE *argv, const rb_callable_method_entry_t *cme, int kw_splat)
|
rb_vm_call0(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE *argv, const rb_callable_method_entry_t *cme, int kw_splat)
|
||||||
{
|
{
|
||||||
const struct rb_callcache cc = VM_CC_ON_STACK(Qfalse, vm_call_general, {{ 0 }}, cme);
|
const struct rb_callcache cc = VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
|
||||||
return vm_call0_cc(ec, recv, id, argc, argv, &cc, kw_splat);
|
return vm_call0_cc(ec, recv, id, argc, argv, &cc, kw_splat);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ vm_call0_cc(rb_execution_context_t *ec, VALUE recv, ID id, int argc, const VALUE
|
||||||
static VALUE
|
static VALUE
|
||||||
vm_call0_cme(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv, const rb_callable_method_entry_t *cme)
|
vm_call0_cme(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv, const rb_callable_method_entry_t *cme)
|
||||||
{
|
{
|
||||||
calling->cc = &VM_CC_ON_STACK(Qfalse, vm_call_general, {{ 0 }}, cme);
|
calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
|
||||||
return vm_call0_body(ec, calling, argv);
|
return vm_call0_body(ec, calling, argv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -409,7 +409,7 @@ invalidate_cc_refinement(st_data_t key, st_data_t data)
|
||||||
|
|
||||||
VM_ASSERT(vm_cc_refinement_p(cc));
|
VM_ASSERT(vm_cc_refinement_p(cc));
|
||||||
|
|
||||||
if (cc->klass) {
|
if (vm_cc_valid(cc)) {
|
||||||
vm_cc_invalidate(cc);
|
vm_cc_invalidate(cc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue