Refactor the last references to rb_shape_t

The type isn't opaque because Ruby isn't often compiled with LTO,
so for optimization purpose it's better to allow as much inlining
as possible.

However ideally only `shape.c` and `shape.h` should deal with
the actual struct, and everything else should just deal with opaque
`shape_id_t`.
This commit is contained in:
Jean Boussier 2025-06-11 14:32:35 +02:00
parent 4463ac264d
commit 95201299fd
Notes: git 2025-06-11 14:38:51 +00:00
8 changed files with 96 additions and 75 deletions

View File

@ -792,22 +792,21 @@ shape_id_i(shape_id_t shape_id, void *data)
return;
}
rb_shape_t *shape = RSHAPE(shape_id);
dump_append(dc, "{\"address\":");
dump_append_ref(dc, (VALUE)shape);
dump_append_ref(dc, (VALUE)RSHAPE(shape_id));
dump_append(dc, ", \"type\":\"SHAPE\", \"id\":");
dump_append_sizet(dc, shape_id);
if (shape->type != SHAPE_ROOT) {
if (RSHAPE_TYPE(shape_id) != SHAPE_ROOT) {
dump_append(dc, ", \"parent_id\":");
dump_append_lu(dc, shape->parent_id);
dump_append_lu(dc, RSHAPE_PARENT(shape_id));
}
dump_append(dc, ", \"depth\":");
dump_append_sizet(dc, rb_shape_depth(shape_id));
switch((enum shape_type)shape->type) {
switch (RSHAPE_TYPE(shape_id)) {
case SHAPE_ROOT:
dump_append(dc, ", \"shape_type\":\"ROOT\"");
break;
@ -815,7 +814,7 @@ shape_id_i(shape_id_t shape_id, void *data)
dump_append(dc, ", \"shape_type\":\"IVAR\"");
dump_append(dc, ",\"edge_name\":");
dump_append_id(dc, shape->edge_name);
dump_append_id(dc, RSHAPE_EDGE_NAME(shape_id));
break;
case SHAPE_OBJ_ID:

View File

@ -576,7 +576,7 @@ RCLASS_FIELDS_COUNT(VALUE obj)
return count;
}
else {
return RSHAPE(RBASIC_SHAPE_ID(obj))->next_field_index;
return RSHAPE_LEN(RBASIC_SHAPE_ID(obj));
}
}

View File

@ -13,7 +13,7 @@
#include "constant.h" /* for rb_const_entry_t */
#include "ruby/internal/stdbool.h" /* for bool */
#include "ruby/ruby.h" /* for VALUE */
#include "shape.h" /* for rb_shape_t */
#include "shape.h" /* for shape_id_t */
/* variable.c */
void rb_gc_mark_global_tbl(void);

View File

@ -340,7 +340,7 @@ rb_obj_copy_ivar(VALUE dest, VALUE obj)
shape_id_t dest_shape_id = src_shape_id;
shape_id_t initial_shape_id = RBASIC_SHAPE_ID(dest);
RUBY_ASSERT(RSHAPE(initial_shape_id)->type == SHAPE_ROOT);
RUBY_ASSERT(RSHAPE_TYPE_P(initial_shape_id, SHAPE_ROOT));
dest_shape_id = rb_shape_rebuild(initial_shape_id, src_shape_id);
if (UNLIKELY(rb_shape_too_complex_p(dest_shape_id))) {

25
shape.c
View File

@ -1184,6 +1184,31 @@ rb_shape_memsize(shape_id_t shape_id)
return memsize;
}
bool
rb_shape_foreach_field(shape_id_t initial_shape_id, rb_shape_foreach_transition_callback func, void *data)
{
RUBY_ASSERT(!rb_shape_too_complex_p(initial_shape_id));
rb_shape_t *shape = RSHAPE(initial_shape_id);
if (shape->type == SHAPE_ROOT) {
return true;
}
shape_id_t parent_id = shape_id(RSHAPE(shape->parent_id), initial_shape_id);
if (rb_shape_foreach_field(parent_id, func, data)) {
switch (func(shape_id(shape, initial_shape_id), data)) {
case ST_STOP:
return false;
case ST_CHECK:
case ST_CONTINUE:
break;
default:
rb_bug("unreachable");
}
}
return true;
}
#if RUBY_DEBUG
bool
rb_shape_verify_consistency(VALUE obj, shape_id_t shape_id)

17
shape.h
View File

@ -158,6 +158,9 @@ shape_id_t rb_shape_get_next_iv_shape(shape_id_t shape_id, ID id);
bool rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value);
bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint);
typedef int rb_shape_foreach_transition_callback(shape_id_t shape_id, void *data);
bool rb_shape_foreach_field(shape_id_t shape_id, rb_shape_foreach_transition_callback func, void *data);
shape_id_t rb_shape_transition_frozen(VALUE obj);
shape_id_t rb_shape_transition_complex(VALUE obj);
shape_id_t rb_shape_transition_remove_ivar(VALUE obj, ID id, shape_id_t *removed_shape_id);
@ -211,10 +214,22 @@ rb_shape_root(size_t heap_id)
return ROOT_SHAPE_ID | ((heap_index + 1) << SHAPE_ID_HEAP_INDEX_OFFSET);
}
static inline shape_id_t
RSHAPE_PARENT(shape_id_t shape_id)
{
return RSHAPE(shape_id)->parent_id;
}
static inline enum shape_type
RSHAPE_TYPE(shape_id_t shape_id)
{
return RSHAPE(shape_id)->type;
}
static inline bool
RSHAPE_TYPE_P(shape_id_t shape_id, enum shape_type type)
{
return RSHAPE(shape_id)->type == type;
return RSHAPE_TYPE(shape_id) == type;
}
static inline attr_index_t

View File

@ -1303,7 +1303,7 @@ VALUE
rb_obj_field_get(VALUE obj, shape_id_t target_shape_id)
{
RUBY_ASSERT(!SPECIAL_CONST_P(obj));
RUBY_ASSERT(RSHAPE(target_shape_id)->type == SHAPE_IVAR || RSHAPE(target_shape_id)->type == SHAPE_OBJ_ID);
RUBY_ASSERT(RSHAPE_TYPE_P(target_shape_id, SHAPE_IVAR) || RSHAPE_TYPE_P(target_shape_id, SHAPE_OBJ_ID));
if (rb_shape_too_complex_p(target_shape_id)) {
st_table *fields_hash;
@ -1325,7 +1325,7 @@ rb_obj_field_get(VALUE obj, shape_id_t target_shape_id)
break;
}
VALUE value = Qundef;
st_lookup(fields_hash, RSHAPE(target_shape_id)->edge_name, &value);
st_lookup(fields_hash, RSHAPE_EDGE_NAME(target_shape_id), &value);
#if RUBY_DEBUG
if (UNDEF_P(value)) {
@ -1337,7 +1337,7 @@ rb_obj_field_get(VALUE obj, shape_id_t target_shape_id)
return value;
}
attr_index_t attr_index = RSHAPE(target_shape_id)->next_field_index - 1;
attr_index_t attr_index = RSHAPE_INDEX(target_shape_id);
VALUE *fields;
switch (BUILTIN_TYPE(obj)) {
case T_CLASS:
@ -1505,7 +1505,7 @@ rb_ivar_delete(VALUE obj, ID id, VALUE undef)
goto too_complex;
}
RUBY_ASSERT(RSHAPE(next_shape_id)->next_field_index == RSHAPE(old_shape_id)->next_field_index - 1);
RUBY_ASSERT(RSHAPE_LEN(next_shape_id) == RSHAPE_LEN(old_shape_id) - 1);
VALUE *fields;
switch(BUILTIN_TYPE(obj)) {
@ -1526,9 +1526,9 @@ rb_ivar_delete(VALUE obj, ID id, VALUE undef)
RUBY_ASSERT(removed_shape_id != INVALID_SHAPE_ID);
attr_index_t new_fields_count = RSHAPE(next_shape_id)->next_field_index;
attr_index_t new_fields_count = RSHAPE_LEN(next_shape_id);
attr_index_t removed_index = RSHAPE(removed_shape_id)->next_field_index - 1;
attr_index_t removed_index = RSHAPE_INDEX(removed_shape_id);
val = fields[removed_index];
size_t trailing_fields = new_fields_count - removed_index;
@ -1810,8 +1810,8 @@ generic_fields_lookup_ensure_size(st_data_t *k, st_data_t *v, st_data_t u, int e
if (!existing || fields_lookup->resize) {
if (existing) {
RUBY_ASSERT(RSHAPE(fields_lookup->shape_id)->type == SHAPE_IVAR || RSHAPE(fields_lookup->shape_id)->type == SHAPE_OBJ_ID);
RUBY_ASSERT(RSHAPE_CAPACITY(RSHAPE(fields_lookup->shape_id)->parent_id) < RSHAPE_CAPACITY(fields_lookup->shape_id));
RUBY_ASSERT(RSHAPE_TYPE_P(fields_lookup->shape_id, SHAPE_IVAR) || RSHAPE_TYPE_P(fields_lookup->shape_id, SHAPE_OBJ_ID));
RUBY_ASSERT(RSHAPE_CAPACITY(RSHAPE_PARENT(fields_lookup->shape_id)) < RSHAPE_CAPACITY(fields_lookup->shape_id));
}
else {
FL_SET_RAW((VALUE)*k, FL_EXIVAR);
@ -2186,24 +2186,13 @@ struct iv_itr_data {
bool ivar_only;
};
/*
* Returns a flag to stop iterating depending on the result of +callback+.
*/
static bool
iterate_over_shapes_with_callback(rb_shape_t *shape, rb_ivar_foreach_callback_func *callback, struct iv_itr_data *itr_data)
static int
iterate_over_shapes_callback(shape_id_t shape_id, void *data)
{
switch ((enum shape_type)shape->type) {
case SHAPE_ROOT:
return false;
case SHAPE_OBJ_ID:
if (itr_data->ivar_only) {
return iterate_over_shapes_with_callback(RSHAPE(shape->parent_id), callback, itr_data);
}
// fallthrough
case SHAPE_IVAR:
ASSUME(callback);
if (iterate_over_shapes_with_callback(RSHAPE(shape->parent_id), callback, itr_data)) {
return true;
struct iv_itr_data *itr_data = data;
if (itr_data->ivar_only && !RSHAPE_TYPE_P(shape_id, SHAPE_IVAR)) {
return ST_CONTINUE;
}
VALUE *iv_list;
@ -2221,22 +2210,18 @@ iterate_over_shapes_with_callback(rb_shape_t *shape, rb_ivar_foreach_callback_fu
iv_list = itr_data->fields_tbl->as.shape.fields;
break;
}
VALUE val = iv_list[shape->next_field_index - 1];
if (!UNDEF_P(val)) {
switch (callback(shape->edge_name, val, itr_data->arg)) {
case ST_CHECK:
case ST_CONTINUE:
break;
case ST_STOP:
return true;
default:
rb_bug("unreachable");
}
}
return false;
default:
UNREACHABLE_RETURN(false);
VALUE val = iv_list[RSHAPE_INDEX(shape_id)];
return itr_data->func(RSHAPE_EDGE_NAME(shape_id), val, itr_data->arg);
}
/*
* Returns a flag to stop iterating depending on the result of +callback+.
*/
static void
iterate_over_shapes(shape_id_t shape_id, rb_ivar_foreach_callback_func *callback, struct iv_itr_data *itr_data)
{
rb_shape_foreach_field(shape_id, iterate_over_shapes_callback, itr_data);
}
static int
@ -2262,7 +2247,7 @@ obj_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg, b
rb_st_foreach(ROBJECT_FIELDS_HASH(obj), each_hash_iv, (st_data_t)&itr_data);
}
else {
iterate_over_shapes_with_callback(RSHAPE(shape_id), func, &itr_data);
iterate_over_shapes(shape_id, func, &itr_data);
}
}
@ -2285,7 +2270,7 @@ gen_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg, b
rb_st_foreach(fields_tbl->as.complex.table, each_hash_iv, (st_data_t)&itr_data);
}
else {
iterate_over_shapes_with_callback(RSHAPE(shape_id), func, &itr_data);
iterate_over_shapes(shape_id, func, &itr_data);
}
}
@ -2306,7 +2291,7 @@ class_fields_each(VALUE obj, rb_ivar_foreach_callback_func *func, st_data_t arg,
rb_st_foreach(RCLASS_WRITABLE_FIELDS_HASH(obj), each_hash_iv, (st_data_t)&itr_data);
}
else {
iterate_over_shapes_with_callback(RSHAPE(shape_id), func, &itr_data);
iterate_over_shapes(shape_id, func, &itr_data);
}
}
@ -2344,7 +2329,7 @@ rb_copy_generic_ivar(VALUE dest, VALUE obj)
shape_id_t initial_shape_id = rb_obj_shape_id(dest);
if (!rb_shape_canonical_p(src_shape_id)) {
RUBY_ASSERT(RSHAPE(initial_shape_id)->type == SHAPE_ROOT);
RUBY_ASSERT(RSHAPE_TYPE_P(initial_shape_id, SHAPE_ROOT));
dest_shape_id = rb_shape_rebuild(initial_shape_id, src_shape_id);
if (UNLIKELY(rb_shape_too_complex_p(dest_shape_id))) {

View File

@ -1455,9 +1455,7 @@ vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_i
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
}
else if (dest_shape_id != INVALID_SHAPE_ID) {
rb_shape_t *dest_shape = RSHAPE(dest_shape_id);
if (shape_id == dest_shape->parent_id && dest_shape->edge_name == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
if (shape_id == RSHAPE_PARENT(dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
}
else {
@ -1498,10 +1496,9 @@ vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t i
VM_ASSERT(!rb_ractor_shareable_p(obj));
}
else if (dest_shape_id != INVALID_SHAPE_ID) {
rb_shape_t *dest_shape = RSHAPE(dest_shape_id);
shape_id_t source_shape_id = dest_shape->parent_id;
shape_id_t source_shape_id = RSHAPE_PARENT(dest_shape_id);
if (shape_id == source_shape_id && dest_shape->edge_name == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
if (shape_id == source_shape_id && RSHAPE_EDGE_NAME(dest_shape_id) == id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
RBASIC_SET_SHAPE_ID(obj, dest_shape_id);