Get rid of TOO_COMPLEX shape type

Instead it's now a `shape_id` flag.

This allows to check if an object is complex without having
to chase the `rb_shape_t` pointer.
This commit is contained in:
Jean Boussier 2025-06-04 09:05:55 +02:00
parent 8d49c05c13
commit 675f33508c
Notes: git 2025-06-04 11:14:02 +00:00
13 changed files with 82 additions and 118 deletions

View File

@ -821,9 +821,6 @@ shape_id_i(shape_id_t shape_id, void *data)
case SHAPE_T_OBJECT: case SHAPE_T_OBJECT:
dump_append(dc, ", \"shape_type\":\"T_OBJECT\""); dump_append(dc, ", \"shape_type\":\"T_OBJECT\"");
break; break;
case SHAPE_OBJ_TOO_COMPLEX:
dump_append(dc, ", \"shape_type\":\"OBJ_TOO_COMPLEX\"");
break;
case SHAPE_OBJ_ID: case SHAPE_OBJ_ID:
dump_append(dc, ", \"shape_type\":\"OBJ_ID\""); dump_append(dc, ", \"shape_type\":\"OBJ_ID\"");
break; break;

136
shape.c
View File

@ -20,8 +20,6 @@
#define SHAPE_DEBUG (VM_CHECK_MODE > 0) #define SHAPE_DEBUG (VM_CHECK_MODE > 0)
#endif #endif
#define ROOT_TOO_COMPLEX_SHAPE_ID 0x1
#define REDBLACK_CACHE_SIZE (SHAPE_BUFFER_SIZE * 32) #define REDBLACK_CACHE_SIZE (SHAPE_BUFFER_SIZE * 32)
/* This depends on that the allocated memory by Ruby's allocator or /* This depends on that the allocated memory by Ruby's allocator or
@ -381,12 +379,6 @@ shape_frozen_p(shape_id_t shape_id)
} }
#endif #endif
static inline bool
shape_too_complex_p(rb_shape_t *shape)
{
return shape->flags & SHAPE_FL_TOO_COMPLEX;
}
void void
rb_shape_each_shape_id(each_shape_callback callback, void *data) rb_shape_each_shape_id(each_shape_callback callback, void *data)
{ {
@ -531,7 +523,6 @@ rb_shape_alloc_new_child(ID id, rb_shape_t *shape, enum shape_type shape_type)
redblack_cache_ancestors(new_shape); redblack_cache_ancestors(new_shape);
} }
break; break;
case SHAPE_OBJ_TOO_COMPLEX:
case SHAPE_ROOT: case SHAPE_ROOT:
case SHAPE_T_OBJECT: case SHAPE_T_OBJECT:
rb_bug("Unreachable"); rb_bug("Unreachable");
@ -541,8 +532,6 @@ rb_shape_alloc_new_child(ID id, rb_shape_t *shape, enum shape_type shape_type)
return new_shape; return new_shape;
} }
static rb_shape_t *shape_transition_too_complex(rb_shape_t *original_shape);
#define RUBY_ATOMIC_VALUE_LOAD(x) (VALUE)(RUBY_ATOMIC_PTR_LOAD(x)) #define RUBY_ATOMIC_VALUE_LOAD(x) (VALUE)(RUBY_ATOMIC_PTR_LOAD(x))
static rb_shape_t * static rb_shape_t *
@ -581,7 +570,7 @@ retry:
// If we're not allowed to create a new variation, of if we're out of shapes // If we're not allowed to create a new variation, of if we're out of shapes
// we return TOO_COMPLEX_SHAPE. // we return TOO_COMPLEX_SHAPE.
if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) { if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) {
res = shape_transition_too_complex(shape); res = NULL;
} }
else { else {
VALUE new_edges = 0; VALUE new_edges = 0;
@ -623,9 +612,6 @@ retry:
static rb_shape_t * static rb_shape_t *
get_next_shape_internal(rb_shape_t *shape, ID id, enum shape_type shape_type, bool *variation_created, bool new_variations_allowed) get_next_shape_internal(rb_shape_t *shape, ID id, enum shape_type shape_type, bool *variation_created, bool new_variations_allowed)
{ {
// There should never be outgoing edges from "too complex", except for SHAPE_OBJ_ID
RUBY_ASSERT(!shape_too_complex_p(shape) || shape_type == SHAPE_OBJ_ID);
if (rb_multi_ractor_p()) { if (rb_multi_ractor_p()) {
return get_next_shape_internal_atomic(shape, id, shape_type, variation_created, new_variations_allowed); return get_next_shape_internal_atomic(shape, id, shape_type, variation_created, new_variations_allowed);
} }
@ -660,7 +646,7 @@ get_next_shape_internal(rb_shape_t *shape, ID id, enum shape_type shape_type, bo
// If we're not allowed to create a new variation, of if we're out of shapes // If we're not allowed to create a new variation, of if we're out of shapes
// we return TOO_COMPLEX_SHAPE. // we return TOO_COMPLEX_SHAPE.
if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) { if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) {
res = shape_transition_too_complex(shape); res = NULL;
} }
else { else {
rb_shape_t *new_shape = rb_shape_alloc_new_child(id, shape, shape_type); rb_shape_t *new_shape = rb_shape_alloc_new_child(id, shape, shape_type);
@ -695,6 +681,7 @@ remove_shape_recursive(rb_shape_t *shape, ID id, rb_shape_t **removed_shape)
if (shape->parent_id == INVALID_SHAPE_ID) { if (shape->parent_id == INVALID_SHAPE_ID) {
// We've hit the top of the shape tree and couldn't find the // We've hit the top of the shape tree and couldn't find the
// IV we wanted to remove, so return NULL // IV we wanted to remove, so return NULL
*removed_shape = NULL;
return NULL; return NULL;
} }
else { else {
@ -710,23 +697,14 @@ remove_shape_recursive(rb_shape_t *shape, ID id, rb_shape_t **removed_shape)
// We found a new parent. Create a child of the new parent that // We found a new parent. Create a child of the new parent that
// has the same attributes as this shape. // has the same attributes as this shape.
if (new_parent) { if (new_parent) {
if (UNLIKELY(shape_too_complex_p(new_parent))) {
return new_parent;
}
bool dont_care; bool dont_care;
rb_shape_t *new_child = get_next_shape_internal(new_parent, shape->edge_name, shape->type, &dont_care, true); rb_shape_t *new_child = get_next_shape_internal(new_parent, shape->edge_name, shape->type, &dont_care, true);
if (UNLIKELY(shape_too_complex_p(new_child))) { RUBY_ASSERT(!new_child || new_child->capacity <= shape->capacity);
return new_child;
}
RUBY_ASSERT(new_child->capacity <= shape->capacity);
return new_child; return new_child;
} }
else { else {
// We went all the way to the top of the shape tree and couldn't // We went all the way to the top of the shape tree and couldn't
// find an IV to remove, so return NULL // find an IV to remove so return NULL.
return NULL; return NULL;
} }
} }
@ -736,19 +714,27 @@ remove_shape_recursive(rb_shape_t *shape, ID id, rb_shape_t **removed_shape)
shape_id_t shape_id_t
rb_shape_transition_remove_ivar(VALUE obj, ID id, shape_id_t *removed_shape_id) rb_shape_transition_remove_ivar(VALUE obj, ID id, shape_id_t *removed_shape_id)
{ {
shape_id_t shape_id = rb_obj_shape_id(obj); shape_id_t original_shape_id = RBASIC_SHAPE_ID(obj);
rb_shape_t *shape = RSHAPE(shape_id);
RUBY_ASSERT(!shape_too_complex_p(shape)); RUBY_ASSERT(!rb_shape_too_complex_p(original_shape_id));
RUBY_ASSERT(!shape_frozen_p(shape_id)); RUBY_ASSERT(!shape_frozen_p(original_shape_id));
rb_shape_t *removed_shape = NULL; rb_shape_t *removed_shape = NULL;
rb_shape_t *new_shape = remove_shape_recursive(shape, id, &removed_shape); rb_shape_t *new_shape = remove_shape_recursive(RSHAPE(original_shape_id), id, &removed_shape);
if (new_shape) {
if (removed_shape) {
*removed_shape_id = raw_shape_id(removed_shape); *removed_shape_id = raw_shape_id(removed_shape);
return raw_shape_id(new_shape);
} }
return shape_id;
if (new_shape) {
return shape_id(new_shape, original_shape_id);
}
else if (removed_shape) {
// We found the shape to remove, but couldn't create a new variation.
// We must transition to TOO_COMPLEX.
return ROOT_TOO_COMPLEX_SHAPE_ID | (original_shape_id & SHAPE_ID_FLAGS_MASK);
}
return original_shape_id;
} }
shape_id_t shape_id_t
@ -760,24 +746,11 @@ rb_shape_transition_frozen(VALUE obj)
return shape_id | SHAPE_ID_FL_FROZEN; return shape_id | SHAPE_ID_FL_FROZEN;
} }
static rb_shape_t *
shape_transition_too_complex(rb_shape_t *original_shape)
{
rb_shape_t *next_shape = RSHAPE(ROOT_TOO_COMPLEX_SHAPE_ID);
if (original_shape->flags & SHAPE_FL_HAS_OBJECT_ID) {
bool dont_care;
next_shape = get_next_shape_internal(next_shape, ruby_internal_object_id, SHAPE_OBJ_ID, &dont_care, false);
}
return next_shape;
}
shape_id_t shape_id_t
rb_shape_transition_complex(VALUE obj) rb_shape_transition_complex(VALUE obj)
{ {
shape_id_t original_shape_id = RBASIC_SHAPE_ID(obj); shape_id_t original_shape_id = RBASIC_SHAPE_ID(obj);
return shape_id(shape_transition_too_complex(RSHAPE(original_shape_id)), original_shape_id); return ROOT_TOO_COMPLEX_SHAPE_ID | (original_shape_id & SHAPE_ID_FLAGS_MASK);
} }
static inline bool static inline bool
@ -849,7 +822,6 @@ shape_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
case SHAPE_ROOT: case SHAPE_ROOT:
case SHAPE_T_OBJECT: case SHAPE_T_OBJECT:
return false; return false;
case SHAPE_OBJ_TOO_COMPLEX:
case SHAPE_OBJ_ID: case SHAPE_OBJ_ID:
rb_bug("Ivar should not exist on transition"); rb_bug("Ivar should not exist on transition");
} }
@ -865,9 +837,6 @@ static inline rb_shape_t *
shape_get_next(rb_shape_t *shape, VALUE obj, ID id, bool emit_warnings) shape_get_next(rb_shape_t *shape, VALUE obj, ID id, bool emit_warnings)
{ {
RUBY_ASSERT(!is_instance_id(id) || RTEST(rb_sym2str(ID2SYM(id)))); RUBY_ASSERT(!is_instance_id(id) || RTEST(rb_sym2str(ID2SYM(id))));
if (UNLIKELY(shape_too_complex_p(shape))) {
return shape;
}
#if RUBY_DEBUG #if RUBY_DEBUG
attr_index_t index; attr_index_t index;
@ -891,6 +860,11 @@ shape_get_next(rb_shape_t *shape, VALUE obj, ID id, bool emit_warnings)
bool variation_created = false; bool variation_created = false;
rb_shape_t *new_shape = get_next_shape_internal(shape, id, SHAPE_IVAR, &variation_created, allow_new_shape); rb_shape_t *new_shape = get_next_shape_internal(shape, id, SHAPE_IVAR, &variation_created, allow_new_shape);
if (!new_shape) {
// We could create a new variation, transitioning to TOO_COMPLEX.
return NULL;
}
// Check if we should update max_iv_count on the object's class // Check if we should update max_iv_count on the object's class
if (obj != klass && new_shape->next_field_index > RCLASS_MAX_IV_COUNT(klass)) { if (obj != klass && new_shape->next_field_index > RCLASS_MAX_IV_COUNT(klass)) {
RCLASS_SET_MAX_IV_COUNT(klass, new_shape->next_field_index); RCLASS_SET_MAX_IV_COUNT(klass, new_shape->next_field_index);
@ -1016,11 +990,11 @@ shape_cache_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
bool bool
rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value) rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value)
{ {
rb_shape_t *shape = RSHAPE(shape_id);
// It doesn't make sense to ask for the index of an IV that's stored // It doesn't make sense to ask for the index of an IV that's stored
// on an object that is "too complex" as it uses a hash for storing IVs // on an object that is "too complex" as it uses a hash for storing IVs
RUBY_ASSERT(!shape_too_complex_p(shape)); RUBY_ASSERT(!rb_shape_too_complex_p(shape_id));
rb_shape_t *shape = RSHAPE(shape_id);
if (!shape_cache_get_iv_index(shape, id, value)) { if (!shape_cache_get_iv_index(shape, id, value)) {
// If it wasn't in the ancestor cache, then don't do a linear search // If it wasn't in the ancestor cache, then don't do a linear search
@ -1083,9 +1057,6 @@ shape_traverse_from_new_root(rb_shape_t *initial_shape, rb_shape_t *dest_shape)
case SHAPE_ROOT: case SHAPE_ROOT:
case SHAPE_T_OBJECT: case SHAPE_T_OBJECT:
break; break;
case SHAPE_OBJ_TOO_COMPLEX:
rb_bug("Unreachable");
break;
} }
return next_shape; return next_shape;
@ -1102,20 +1073,17 @@ rb_shape_traverse_from_new_root(shape_id_t initial_shape_id, shape_id_t dest_sha
// Rebuild a similar shape with the same ivars but starting from // Rebuild a similar shape with the same ivars but starting from
// a different SHAPE_T_OBJECT, and don't cary over non-canonical transitions // a different SHAPE_T_OBJECT, and don't cary over non-canonical transitions
// such as SHAPE_OBJ_ID. // such as SHAPE_OBJ_ID.
rb_shape_t * static rb_shape_t *
rb_shape_rebuild_shape(rb_shape_t *initial_shape, rb_shape_t *dest_shape) shape_rebuild(rb_shape_t *initial_shape, rb_shape_t *dest_shape)
{ {
RUBY_ASSERT(raw_shape_id(initial_shape) != ROOT_TOO_COMPLEX_SHAPE_ID);
RUBY_ASSERT(raw_shape_id(dest_shape) != ROOT_TOO_COMPLEX_SHAPE_ID);
rb_shape_t *midway_shape; rb_shape_t *midway_shape;
RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT || initial_shape->type == SHAPE_ROOT); RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT || initial_shape->type == SHAPE_ROOT);
if (dest_shape->type != initial_shape->type) { if (dest_shape->type != initial_shape->type) {
midway_shape = rb_shape_rebuild_shape(initial_shape, RSHAPE(dest_shape->parent_id)); midway_shape = shape_rebuild(initial_shape, RSHAPE(dest_shape->parent_id));
if (UNLIKELY(raw_shape_id(midway_shape) == ROOT_TOO_COMPLEX_SHAPE_ID)) { if (UNLIKELY(!midway_shape)) {
return midway_shape; return NULL;
} }
} }
else { else {
@ -1130,9 +1098,6 @@ rb_shape_rebuild_shape(rb_shape_t *initial_shape, rb_shape_t *dest_shape)
case SHAPE_ROOT: case SHAPE_ROOT:
case SHAPE_T_OBJECT: case SHAPE_T_OBJECT:
break; break;
case SHAPE_OBJ_TOO_COMPLEX:
rb_bug("Unreachable");
break;
} }
return midway_shape; return midway_shape;
@ -1141,7 +1106,10 @@ rb_shape_rebuild_shape(rb_shape_t *initial_shape, rb_shape_t *dest_shape)
shape_id_t shape_id_t
rb_shape_rebuild(shape_id_t initial_shape_id, shape_id_t dest_shape_id) rb_shape_rebuild(shape_id_t initial_shape_id, shape_id_t dest_shape_id)
{ {
return raw_shape_id(rb_shape_rebuild_shape(RSHAPE(initial_shape_id), RSHAPE(dest_shape_id))); RUBY_ASSERT(!rb_shape_too_complex_p(initial_shape_id));
RUBY_ASSERT(!rb_shape_too_complex_p(dest_shape_id));
return raw_shape_id(shape_rebuild(RSHAPE(initial_shape_id), RSHAPE(dest_shape_id)));
} }
void void
@ -1185,18 +1153,6 @@ rb_shape_copy_complex_ivars(VALUE dest, VALUE obj, shape_id_t src_shape_id, st_t
rb_obj_init_too_complex(dest, table); rb_obj_init_too_complex(dest, table);
} }
RUBY_FUNC_EXPORTED bool
rb_shape_obj_too_complex_p(VALUE obj)
{
return shape_too_complex_p(obj_shape(obj));
}
bool
rb_shape_too_complex_p(shape_id_t shape_id)
{
return shape_too_complex_p(RSHAPE(shape_id));
}
size_t size_t
rb_shape_edges_count(shape_id_t shape_id) rb_shape_edges_count(shape_id_t shape_id)
{ {
@ -1233,8 +1189,7 @@ static VALUE
shape_too_complex(VALUE self) shape_too_complex(VALUE self)
{ {
shape_id_t shape_id = NUM2INT(rb_struct_getmember(self, rb_intern("id"))); shape_id_t shape_id = NUM2INT(rb_struct_getmember(self, rb_intern("id")));
rb_shape_t *shape = RSHAPE(shape_id); return RBOOL(rb_shape_too_complex_p(shape_id));
return RBOOL(shape_too_complex_p(shape));
} }
static VALUE static VALUE
@ -1486,13 +1441,6 @@ Init_default_shapes(void)
GET_SHAPE_TREE()->root_shape = root; GET_SHAPE_TREE()->root_shape = root;
RUBY_ASSERT(raw_shape_id(GET_SHAPE_TREE()->root_shape) == ROOT_SHAPE_ID); RUBY_ASSERT(raw_shape_id(GET_SHAPE_TREE()->root_shape) == ROOT_SHAPE_ID);
bool dont_care;
rb_shape_t *too_complex_shape = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID);
too_complex_shape->type = SHAPE_OBJ_TOO_COMPLEX;
too_complex_shape->flags |= SHAPE_FL_TOO_COMPLEX;
too_complex_shape->heap_index = 0;
RUBY_ASSERT(too_complex_shape == RSHAPE(ROOT_TOO_COMPLEX_SHAPE_ID));
// Make shapes for T_OBJECT // Make shapes for T_OBJECT
size_t *sizes = rb_gc_heap_sizes(); size_t *sizes = rb_gc_heap_sizes();
for (int i = 0; sizes[i] > 0; i++) { for (int i = 0; sizes[i] > 0; i++) {
@ -1504,10 +1452,6 @@ Init_default_shapes(void)
t_object_shape->ancestor_index = LEAF; t_object_shape->ancestor_index = LEAF;
RUBY_ASSERT(t_object_shape == RSHAPE(rb_shape_root(i))); RUBY_ASSERT(t_object_shape == RSHAPE(rb_shape_root(i)));
} }
// Prebuild TOO_COMPLEX variations so that they already exist if we ever need them after we
// ran out of shapes.
get_next_shape_internal(too_complex_shape, ruby_internal_object_id, SHAPE_OBJ_ID, &dont_care, true);
} }
void void

20
shape.h
View File

@ -14,6 +14,7 @@ STATIC_ASSERT(shape_id_num_bits, SHAPE_ID_NUM_BITS == sizeof(shape_id_t) * CHAR_
#define SHAPE_ID_OFFSET_MASK (SHAPE_BUFFER_SIZE - 1) #define SHAPE_ID_OFFSET_MASK (SHAPE_BUFFER_SIZE - 1)
#define SHAPE_ID_FLAGS_MASK (shape_id_t)(((1 << (SHAPE_ID_NUM_BITS - SHAPE_ID_OFFSET_NUM_BITS)) - 1) << SHAPE_ID_OFFSET_NUM_BITS) #define SHAPE_ID_FLAGS_MASK (shape_id_t)(((1 << (SHAPE_ID_NUM_BITS - SHAPE_ID_OFFSET_NUM_BITS)) - 1) << SHAPE_ID_OFFSET_NUM_BITS)
#define SHAPE_ID_FL_FROZEN (SHAPE_FL_FROZEN << SHAPE_ID_OFFSET_NUM_BITS) #define SHAPE_ID_FL_FROZEN (SHAPE_FL_FROZEN << SHAPE_ID_OFFSET_NUM_BITS)
#define SHAPE_ID_FL_TOO_COMPLEX (SHAPE_FL_TOO_COMPLEX << SHAPE_ID_OFFSET_NUM_BITS)
#define SHAPE_ID_READ_ONLY_MASK (~SHAPE_ID_FL_FROZEN) #define SHAPE_ID_READ_ONLY_MASK (~SHAPE_ID_FL_FROZEN)
typedef uint32_t redblack_id_t; typedef uint32_t redblack_id_t;
@ -28,9 +29,9 @@ typedef uint32_t redblack_id_t;
#define ATTR_INDEX_NOT_SET ((attr_index_t)-1) #define ATTR_INDEX_NOT_SET ((attr_index_t)-1)
#define ROOT_SHAPE_ID 0x0 #define ROOT_SHAPE_ID 0x0
// ROOT_TOO_COMPLEX_SHAPE_ID 0x1 #define ROOT_TOO_COMPLEX_SHAPE_ID (ROOT_SHAPE_ID | SHAPE_ID_FL_TOO_COMPLEX)
#define SPECIAL_CONST_SHAPE_ID (ROOT_SHAPE_ID | SHAPE_ID_FL_FROZEN) #define SPECIAL_CONST_SHAPE_ID (ROOT_SHAPE_ID | SHAPE_ID_FL_FROZEN)
#define FIRST_T_OBJECT_SHAPE_ID 0x2 #define FIRST_T_OBJECT_SHAPE_ID 0x1
extern ID ruby_internal_object_id; extern ID ruby_internal_object_id;
@ -62,7 +63,6 @@ enum shape_type {
SHAPE_IVAR, SHAPE_IVAR,
SHAPE_OBJ_ID, SHAPE_OBJ_ID,
SHAPE_T_OBJECT, SHAPE_T_OBJECT,
SHAPE_OBJ_TOO_COMPLEX,
}; };
enum shape_flags { enum shape_flags {
@ -142,8 +142,6 @@ RUBY_FUNC_EXPORTED shape_id_t rb_obj_shape_id(VALUE obj);
shape_id_t rb_shape_get_next_iv_shape(shape_id_t shape_id, ID id); shape_id_t rb_shape_get_next_iv_shape(shape_id_t shape_id, ID id);
bool rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value); bool rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value);
bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint); bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint);
RUBY_FUNC_EXPORTED bool rb_shape_obj_too_complex_p(VALUE obj);
bool rb_shape_too_complex_p(shape_id_t shape_id);
bool rb_shape_has_object_id(shape_id_t shape_id); bool rb_shape_has_object_id(shape_id_t shape_id);
shape_id_t rb_shape_transition_frozen(VALUE obj); shape_id_t rb_shape_transition_frozen(VALUE obj);
@ -159,6 +157,18 @@ shape_id_t rb_shape_rebuild(shape_id_t initial_shape_id, shape_id_t dest_shape_i
void rb_shape_copy_fields(VALUE dest, VALUE *dest_buf, shape_id_t dest_shape_id, VALUE src, VALUE *src_buf, shape_id_t src_shape_id); void rb_shape_copy_fields(VALUE dest, VALUE *dest_buf, shape_id_t dest_shape_id, VALUE src, VALUE *src_buf, shape_id_t src_shape_id);
void rb_shape_copy_complex_ivars(VALUE dest, VALUE obj, shape_id_t src_shape_id, st_table *fields_table); void rb_shape_copy_complex_ivars(VALUE dest, VALUE obj, shape_id_t src_shape_id, st_table *fields_table);
static inline bool
rb_shape_too_complex_p(shape_id_t shape_id)
{
return shape_id & SHAPE_ID_FL_TOO_COMPLEX;
}
static inline bool
rb_shape_obj_too_complex_p(VALUE obj)
{
return !RB_SPECIAL_CONST_P(obj) && rb_shape_too_complex_p(RBASIC_SHAPE_ID(obj));
}
static inline bool static inline bool
rb_shape_canonical_p(shape_id_t shape_id) rb_shape_canonical_p(shape_id_t shape_id)
{ {

View File

@ -2240,10 +2240,6 @@ iterate_over_shapes_with_callback(rb_shape_t *shape, rb_ivar_foreach_callback_fu
} }
} }
return false; return false;
case SHAPE_OBJ_TOO_COMPLEX:
default:
rb_bug("Unreachable");
UNREACHABLE_RETURN(false);
} }
} }

12
yjit.c
View File

@ -781,6 +781,18 @@ rb_object_shape_count(void)
return ULONG2NUM((unsigned long)GET_SHAPE_TREE()->next_shape_id); return ULONG2NUM((unsigned long)GET_SHAPE_TREE()->next_shape_id);
} }
bool
rb_yjit_shape_too_complex_p(shape_id_t shape_id)
{
return rb_shape_too_complex_p(shape_id);
}
bool
rb_yjit_shape_obj_too_complex_p(VALUE obj)
{
return rb_shape_obj_too_complex_p(obj);
}
// Assert that we have the VM lock. Relevant mostly for multi ractor situations. // Assert that we have the VM lock. Relevant mostly for multi ractor situations.
// The GC takes the lock before calling us, and this asserts that it indeed happens. // The GC takes the lock before calling us, and this asserts that it indeed happens.
void void

View File

@ -99,8 +99,8 @@ fn main() {
.allowlist_function("rb_shape_id_offset") .allowlist_function("rb_shape_id_offset")
.allowlist_function("rb_shape_get_iv_index") .allowlist_function("rb_shape_get_iv_index")
.allowlist_function("rb_shape_transition_add_ivar_no_warnings") .allowlist_function("rb_shape_transition_add_ivar_no_warnings")
.allowlist_function("rb_shape_obj_too_complex_p") .allowlist_function("rb_yjit_shape_obj_too_complex_p")
.allowlist_function("rb_shape_too_complex_p") .allowlist_function("rb_yjit_shape_too_complex_p")
.allowlist_var("SHAPE_ID_NUM_BITS") .allowlist_var("SHAPE_ID_NUM_BITS")
// From ruby/internal/intern/object.h // From ruby/internal/intern/object.h

View File

@ -3124,7 +3124,7 @@ fn gen_set_ivar(
// If the VM ran out of shapes, or this class generated too many leaf, // If the VM ran out of shapes, or this class generated too many leaf,
// it may be de-optimized into OBJ_TOO_COMPLEX_SHAPE (hash-table). // it may be de-optimized into OBJ_TOO_COMPLEX_SHAPE (hash-table).
new_shape_too_complex = unsafe { rb_shape_too_complex_p(next_shape_id) }; new_shape_too_complex = unsafe { rb_yjit_shape_too_complex_p(next_shape_id) };
if new_shape_too_complex { if new_shape_too_complex {
Some((next_shape_id, None, 0_usize)) Some((next_shape_id, None, 0_usize))
} else { } else {

View File

@ -441,7 +441,7 @@ impl VALUE {
} }
pub fn shape_too_complex(self) -> bool { pub fn shape_too_complex(self) -> bool {
unsafe { rb_shape_obj_too_complex_p(self) } unsafe { rb_yjit_shape_obj_too_complex_p(self) }
} }
pub fn shape_id_of(self) -> u32 { pub fn shape_id_of(self) -> u32 {

View File

@ -1137,8 +1137,6 @@ extern "C" {
pub fn rb_shape_lookup(shape_id: shape_id_t) -> *mut rb_shape_t; pub fn rb_shape_lookup(shape_id: shape_id_t) -> *mut rb_shape_t;
pub fn rb_obj_shape_id(obj: VALUE) -> shape_id_t; pub fn rb_obj_shape_id(obj: VALUE) -> shape_id_t;
pub fn rb_shape_get_iv_index(shape_id: shape_id_t, id: ID, value: *mut attr_index_t) -> bool; pub fn rb_shape_get_iv_index(shape_id: shape_id_t, id: ID, value: *mut attr_index_t) -> bool;
pub fn rb_shape_obj_too_complex_p(obj: VALUE) -> bool;
pub fn rb_shape_too_complex_p(shape_id: shape_id_t) -> bool;
pub fn rb_shape_transition_add_ivar_no_warnings(obj: VALUE, id: ID) -> shape_id_t; pub fn rb_shape_transition_add_ivar_no_warnings(obj: VALUE, id: ID) -> shape_id_t;
pub fn rb_gvar_get(arg1: ID) -> VALUE; pub fn rb_gvar_get(arg1: ID) -> VALUE;
pub fn rb_gvar_set(arg1: ID, arg2: VALUE) -> VALUE; pub fn rb_gvar_set(arg1: ID, arg2: VALUE) -> VALUE;
@ -1265,6 +1263,8 @@ extern "C" {
line: ::std::os::raw::c_int, line: ::std::os::raw::c_int,
); );
pub fn rb_object_shape_count() -> VALUE; pub fn rb_object_shape_count() -> VALUE;
pub fn rb_yjit_shape_too_complex_p(shape_id: shape_id_t) -> bool;
pub fn rb_yjit_shape_obj_too_complex_p(obj: VALUE) -> bool;
pub fn rb_yjit_assert_holding_vm_lock(); pub fn rb_yjit_assert_holding_vm_lock();
pub fn rb_yjit_sendish_sp_pops(ci: *const rb_callinfo) -> usize; pub fn rb_yjit_sendish_sp_pops(ci: *const rb_callinfo) -> usize;
pub fn rb_yjit_invokeblock_sp_pops(ci: *const rb_callinfo) -> usize; pub fn rb_yjit_invokeblock_sp_pops(ci: *const rb_callinfo) -> usize;

7
zjit.c
View File

@ -330,6 +330,11 @@ rb_zjit_print_exception(void)
rb_warn("Ruby error: %"PRIsVALUE"", rb_funcall(exception, rb_intern("full_message"), 0)); rb_warn("Ruby error: %"PRIsVALUE"", rb_funcall(exception, rb_intern("full_message"), 0));
} }
bool
rb_zjit_shape_obj_too_complex_p(VALUE obj)
{
return rb_shape_obj_too_complex_p(obj);
}
// Preprocessed zjit.rb generated during build // Preprocessed zjit.rb generated during build
#include "zjit.rbinc" #include "zjit.rbinc"

View File

@ -112,7 +112,7 @@ fn main() {
.allowlist_function("rb_shape_id_offset") .allowlist_function("rb_shape_id_offset")
.allowlist_function("rb_shape_get_iv_index") .allowlist_function("rb_shape_get_iv_index")
.allowlist_function("rb_shape_transition_add_ivar_no_warnings") .allowlist_function("rb_shape_transition_add_ivar_no_warnings")
.allowlist_function("rb_shape_obj_too_complex_p") .allowlist_function("rb_zjit_shape_obj_too_complex_p")
.allowlist_var("SHAPE_ID_NUM_BITS") .allowlist_var("SHAPE_ID_NUM_BITS")
// From ruby/internal/intern/object.h // From ruby/internal/intern/object.h

View File

@ -478,7 +478,7 @@ impl VALUE {
} }
pub fn shape_too_complex(self) -> bool { pub fn shape_too_complex(self) -> bool {
unsafe { rb_shape_obj_too_complex_p(self) } unsafe { rb_zjit_shape_obj_too_complex_p(self) }
} }
pub fn shape_id_of(self) -> u32 { pub fn shape_id_of(self) -> u32 {

View File

@ -868,7 +868,6 @@ unsafe extern "C" {
pub fn rb_shape_lookup(shape_id: shape_id_t) -> *mut rb_shape_t; pub fn rb_shape_lookup(shape_id: shape_id_t) -> *mut rb_shape_t;
pub fn rb_obj_shape_id(obj: VALUE) -> shape_id_t; pub fn rb_obj_shape_id(obj: VALUE) -> shape_id_t;
pub fn rb_shape_get_iv_index(shape_id: shape_id_t, id: ID, value: *mut attr_index_t) -> bool; pub fn rb_shape_get_iv_index(shape_id: shape_id_t, id: ID, value: *mut attr_index_t) -> bool;
pub fn rb_shape_obj_too_complex_p(obj: VALUE) -> bool;
pub fn rb_shape_transition_add_ivar_no_warnings(obj: VALUE, id: ID) -> shape_id_t; pub fn rb_shape_transition_add_ivar_no_warnings(obj: VALUE, id: ID) -> shape_id_t;
pub fn rb_gvar_get(arg1: ID) -> VALUE; pub fn rb_gvar_get(arg1: ID) -> VALUE;
pub fn rb_gvar_set(arg1: ID, arg2: VALUE) -> VALUE; pub fn rb_gvar_set(arg1: ID, arg2: VALUE) -> VALUE;
@ -945,6 +944,7 @@ unsafe extern "C" {
pub fn rb_iseq_get_zjit_payload(iseq: *const rb_iseq_t) -> *mut ::std::os::raw::c_void; pub fn rb_iseq_get_zjit_payload(iseq: *const rb_iseq_t) -> *mut ::std::os::raw::c_void;
pub fn rb_iseq_set_zjit_payload(iseq: *const rb_iseq_t, payload: *mut ::std::os::raw::c_void); pub fn rb_iseq_set_zjit_payload(iseq: *const rb_iseq_t, payload: *mut ::std::os::raw::c_void);
pub fn rb_zjit_print_exception(); pub fn rb_zjit_print_exception();
pub fn rb_zjit_shape_obj_too_complex_p(obj: VALUE) -> bool;
pub fn rb_iseq_encoded_size(iseq: *const rb_iseq_t) -> ::std::os::raw::c_uint; pub fn rb_iseq_encoded_size(iseq: *const rb_iseq_t) -> ::std::os::raw::c_uint;
pub fn rb_iseq_pc_at_idx(iseq: *const rb_iseq_t, insn_idx: u32) -> *mut VALUE; pub fn rb_iseq_pc_at_idx(iseq: *const rb_iseq_t, insn_idx: u32) -> *mut VALUE;
pub fn rb_iseq_opcode_at_pc(iseq: *const rb_iseq_t, pc: *const VALUE) -> ::std::os::raw::c_int; pub fn rb_iseq_opcode_at_pc(iseq: *const rb_iseq_t, pc: *const VALUE) -> ::std::os::raw::c_int;