Allocate rb_shape_tree statically

There is no point allocating it during init, it adds
a useless indirection.
This commit is contained in:
Jean Boussier 2025-06-12 15:18:22 +02:00
parent de4b910381
commit 7c22330cd2
Notes: git 2025-06-12 15:08:38 +00:00
4 changed files with 47 additions and 50 deletions

85
shape.c
View File

@ -48,8 +48,8 @@ redblack_left(redblack_node_t *node)
return LEAF; return LEAF;
} }
else { else {
RUBY_ASSERT(node->l < rb_shape_tree->cache_size); RUBY_ASSERT(node->l < rb_shape_tree.cache_size);
redblack_node_t *left = &rb_shape_tree->shape_cache[node->l - 1]; redblack_node_t *left = &rb_shape_tree.shape_cache[node->l - 1];
return left; return left;
} }
} }
@ -61,8 +61,8 @@ redblack_right(redblack_node_t *node)
return LEAF; return LEAF;
} }
else { else {
RUBY_ASSERT(node->r < rb_shape_tree->cache_size); RUBY_ASSERT(node->r < rb_shape_tree.cache_size);
redblack_node_t *right = &rb_shape_tree->shape_cache[node->r - 1]; redblack_node_t *right = &rb_shape_tree.shape_cache[node->r - 1];
return right; return right;
} }
} }
@ -120,7 +120,7 @@ redblack_id_for(redblack_node_t *node)
return 0; return 0;
} }
else { else {
redblack_node_t *redblack_nodes = rb_shape_tree->shape_cache; redblack_node_t *redblack_nodes = rb_shape_tree.shape_cache;
redblack_id_t id = (redblack_id_t)(node - redblack_nodes); redblack_id_t id = (redblack_id_t)(node - redblack_nodes);
return id + 1; return id + 1;
} }
@ -129,7 +129,7 @@ redblack_id_for(redblack_node_t *node)
static redblack_node_t * static redblack_node_t *
redblack_new(char color, ID key, rb_shape_t *value, redblack_node_t *left, redblack_node_t *right) redblack_new(char color, ID key, rb_shape_t *value, redblack_node_t *left, redblack_node_t *right)
{ {
if (rb_shape_tree->cache_size + 1 >= REDBLACK_CACHE_SIZE) { if (rb_shape_tree.cache_size + 1 >= REDBLACK_CACHE_SIZE) {
// We're out of cache, just quit // We're out of cache, just quit
return LEAF; return LEAF;
} }
@ -137,8 +137,8 @@ redblack_new(char color, ID key, rb_shape_t *value, redblack_node_t *left, redbl
RUBY_ASSERT(left == LEAF || left->key < key); RUBY_ASSERT(left == LEAF || left->key < key);
RUBY_ASSERT(right == LEAF || right->key > key); RUBY_ASSERT(right == LEAF || right->key > key);
redblack_node_t *redblack_nodes = rb_shape_tree->shape_cache; redblack_node_t *redblack_nodes = rb_shape_tree.shape_cache;
redblack_node_t *node = &redblack_nodes[(rb_shape_tree->cache_size)++]; redblack_node_t *node = &redblack_nodes[(rb_shape_tree.cache_size)++];
node->key = key; node->key = key;
node->value = (rb_shape_t *)((uintptr_t)value | color); node->value = (rb_shape_t *)((uintptr_t)value | color);
node->l = redblack_id_for(left); node->l = redblack_id_for(left);
@ -288,20 +288,20 @@ redblack_insert(redblack_node_t *tree, ID key, rb_shape_t *value)
} }
#endif #endif
rb_shape_tree_t *rb_shape_tree = NULL; rb_shape_tree_t rb_shape_tree = { 0 };
static VALUE shape_tree_obj = Qfalse; static VALUE shape_tree_obj = Qfalse;
rb_shape_t * rb_shape_t *
rb_shape_get_root_shape(void) rb_shape_get_root_shape(void)
{ {
return rb_shape_tree->root_shape; return rb_shape_tree.root_shape;
} }
static void static void
shape_tree_mark(void *data) shape_tree_mark(void *data)
{ {
rb_shape_t *cursor = rb_shape_get_root_shape(); rb_shape_t *cursor = rb_shape_get_root_shape();
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id - 1); rb_shape_t *end = RSHAPE(rb_shape_tree.next_shape_id - 1);
while (cursor < end) { while (cursor < end) {
if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) { if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) {
rb_gc_mark_movable(cursor->edges); rb_gc_mark_movable(cursor->edges);
@ -314,7 +314,7 @@ static void
shape_tree_compact(void *data) shape_tree_compact(void *data)
{ {
rb_shape_t *cursor = rb_shape_get_root_shape(); rb_shape_t *cursor = rb_shape_get_root_shape();
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id - 1); rb_shape_t *end = RSHAPE(rb_shape_tree.next_shape_id - 1);
while (cursor < end) { while (cursor < end) {
if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) { if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) {
cursor->edges = rb_gc_location(cursor->edges); cursor->edges = rb_gc_location(cursor->edges);
@ -326,7 +326,7 @@ shape_tree_compact(void *data)
static size_t static size_t
shape_tree_memsize(const void *data) shape_tree_memsize(const void *data)
{ {
return rb_shape_tree->cache_size * sizeof(redblack_node_t); return rb_shape_tree.cache_size * sizeof(redblack_node_t);
} }
static const rb_data_type_t shape_tree_type = { static const rb_data_type_t shape_tree_type = {
@ -349,14 +349,14 @@ static inline shape_id_t
raw_shape_id(rb_shape_t *shape) raw_shape_id(rb_shape_t *shape)
{ {
RUBY_ASSERT(shape); RUBY_ASSERT(shape);
return (shape_id_t)(shape - rb_shape_tree->shape_list); return (shape_id_t)(shape - rb_shape_tree.shape_list);
} }
static inline shape_id_t static inline shape_id_t
shape_id(rb_shape_t *shape, shape_id_t previous_shape_id) shape_id(rb_shape_t *shape, shape_id_t previous_shape_id)
{ {
RUBY_ASSERT(shape); RUBY_ASSERT(shape);
shape_id_t raw_id = (shape_id_t)(shape - rb_shape_tree->shape_list); shape_id_t raw_id = (shape_id_t)(shape - rb_shape_tree.shape_list);
return raw_id | (previous_shape_id & SHAPE_ID_FLAGS_MASK); return raw_id | (previous_shape_id & SHAPE_ID_FLAGS_MASK);
} }
@ -373,7 +373,7 @@ rb_shape_each_shape_id(each_shape_callback callback, void *data)
{ {
rb_shape_t *start = rb_shape_get_root_shape(); rb_shape_t *start = rb_shape_get_root_shape();
rb_shape_t *cursor = start; rb_shape_t *cursor = start;
rb_shape_t *end = RSHAPE(rb_shape_tree->next_shape_id); rb_shape_t *end = RSHAPE(rb_shape_tree.next_shape_id);
while (cursor < end) { while (cursor < end) {
callback((shape_id_t)(cursor - start), data); callback((shape_id_t)(cursor - start), data);
cursor += 1; cursor += 1;
@ -414,14 +414,14 @@ rb_shape_depth(shape_id_t shape_id)
static rb_shape_t * static rb_shape_t *
shape_alloc(void) shape_alloc(void)
{ {
shape_id_t shape_id = (shape_id_t)RUBY_ATOMIC_FETCH_ADD(rb_shape_tree->next_shape_id, 1); shape_id_t shape_id = (shape_id_t)RUBY_ATOMIC_FETCH_ADD(rb_shape_tree.next_shape_id, 1);
if (shape_id == (MAX_SHAPE_ID + 1)) { if (shape_id == (MAX_SHAPE_ID + 1)) {
// TODO: Make an OutOfShapesError ?? // TODO: Make an OutOfShapesError ??
rb_bug("Out of shapes"); rb_bug("Out of shapes");
} }
return &rb_shape_tree->shape_list[shape_id]; return &rb_shape_tree.shape_list[shape_id];
} }
static rb_shape_t * static rb_shape_t *
@ -485,7 +485,7 @@ redblack_cache_ancestors(rb_shape_t *shape)
static attr_index_t static attr_index_t
shape_grow_capa(attr_index_t current_capa) shape_grow_capa(attr_index_t current_capa)
{ {
const attr_index_t *capacities = rb_shape_tree->capacities; const attr_index_t *capacities = rb_shape_tree.capacities;
// First try to use the next size that will be embeddable in a larger object slot. // First try to use the next size that will be embeddable in a larger object slot.
attr_index_t capa; attr_index_t capa;
@ -564,7 +564,7 @@ retry:
if (!res) { if (!res) {
// If we're not allowed to create a new variation, of if we're out of shapes // If we're not allowed to create a new variation, of if we're out of shapes
// we return TOO_COMPLEX_SHAPE. // we return TOO_COMPLEX_SHAPE.
if (!new_variations_allowed || rb_shape_tree->next_shape_id > MAX_SHAPE_ID) { if (!new_variations_allowed || rb_shape_tree.next_shape_id > MAX_SHAPE_ID) {
res = NULL; res = NULL;
} }
else { else {
@ -640,7 +640,7 @@ get_next_shape_internal(rb_shape_t *shape, ID id, enum shape_type shape_type, bo
if (!res) { if (!res) {
// If we're not allowed to create a new variation, of if we're out of shapes // If we're not allowed to create a new variation, of if we're out of shapes
// we return TOO_COMPLEX_SHAPE. // we return TOO_COMPLEX_SHAPE.
if (!new_variations_allowed || rb_shape_tree->next_shape_id > MAX_SHAPE_ID) { if (!new_variations_allowed || rb_shape_tree.next_shape_id > MAX_SHAPE_ID) {
res = NULL; res = NULL;
} }
else { else {
@ -1238,7 +1238,7 @@ rb_shape_verify_consistency(VALUE obj, shape_id_t shape_id)
uint8_t flags_heap_index = rb_shape_heap_index(shape_id); uint8_t flags_heap_index = rb_shape_heap_index(shape_id);
if (RB_TYPE_P(obj, T_OBJECT)) { if (RB_TYPE_P(obj, T_OBJECT)) {
size_t shape_id_slot_size = rb_shape_tree->capacities[flags_heap_index - 1] * sizeof(VALUE) + sizeof(struct RBasic); size_t shape_id_slot_size = rb_shape_tree.capacities[flags_heap_index - 1] * sizeof(VALUE) + sizeof(struct RBasic);
size_t actual_slot_size = rb_gc_obj_slot_size(obj); size_t actual_slot_size = rb_gc_obj_slot_size(obj);
if (shape_id_slot_size != actual_slot_size) { if (shape_id_slot_size != actual_slot_size) {
@ -1388,7 +1388,7 @@ rb_shape_root_shape(VALUE self)
static VALUE static VALUE
rb_shape_shapes_available(VALUE self) rb_shape_shapes_available(VALUE self)
{ {
return INT2NUM(MAX_SHAPE_ID - (rb_shape_tree->next_shape_id - 1)); return INT2NUM(MAX_SHAPE_ID - (rb_shape_tree.next_shape_id - 1));
} }
static VALUE static VALUE
@ -1396,7 +1396,7 @@ rb_shape_exhaust(int argc, VALUE *argv, VALUE self)
{ {
rb_check_arity(argc, 0, 1); rb_check_arity(argc, 0, 1);
int offset = argc == 1 ? NUM2INT(argv[0]) : 0; int offset = argc == 1 ? NUM2INT(argv[0]) : 0;
rb_shape_tree->next_shape_id = MAX_SHAPE_ID - offset + 1; rb_shape_tree.next_shape_id = MAX_SHAPE_ID - offset + 1;
return Qnil; return Qnil;
} }
@ -1452,7 +1452,7 @@ static VALUE
rb_shape_find_by_id(VALUE mod, VALUE id) rb_shape_find_by_id(VALUE mod, VALUE id)
{ {
shape_id_t shape_id = NUM2UINT(id); shape_id_t shape_id = NUM2UINT(id);
if (shape_id >= rb_shape_tree->next_shape_id) { if (shape_id >= rb_shape_tree.next_shape_id) {
rb_raise(rb_eArgError, "Shape ID %d is out of bounds\n", shape_id); rb_raise(rb_eArgError, "Shape ID %d is out of bounds\n", shape_id);
} }
return shape_id_t_to_rb_cShape(shape_id); return shape_id_t_to_rb_cShape(shape_id);
@ -1466,8 +1466,6 @@ rb_shape_find_by_id(VALUE mod, VALUE id)
void void
Init_default_shapes(void) Init_default_shapes(void)
{ {
rb_shape_tree = xcalloc(1, sizeof(rb_shape_tree_t));
size_t *heap_sizes = rb_gc_heap_sizes(); size_t *heap_sizes = rb_gc_heap_sizes();
size_t heaps_count = 0; size_t heaps_count = 0;
while (heap_sizes[heaps_count]) { while (heap_sizes[heaps_count]) {
@ -1479,23 +1477,23 @@ Init_default_shapes(void)
for (index = 0; index < heaps_count; index++) { for (index = 0; index < heaps_count; index++) {
capacities[index] = (heap_sizes[index] - sizeof(struct RBasic)) / sizeof(VALUE); capacities[index] = (heap_sizes[index] - sizeof(struct RBasic)) / sizeof(VALUE);
} }
rb_shape_tree->capacities = capacities; rb_shape_tree.capacities = capacities;
#ifdef HAVE_MMAP #ifdef HAVE_MMAP
size_t shape_list_mmap_size = rb_size_mul_or_raise(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t), rb_eRuntimeError); size_t shape_list_mmap_size = rb_size_mul_or_raise(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t), rb_eRuntimeError);
rb_shape_tree->shape_list = (rb_shape_t *)mmap(NULL, shape_list_mmap_size, rb_shape_tree.shape_list = (rb_shape_t *)mmap(NULL, shape_list_mmap_size,
PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
if (rb_shape_tree->shape_list == MAP_FAILED) { if (rb_shape_tree.shape_list == MAP_FAILED) {
rb_shape_tree->shape_list = 0; rb_shape_tree.shape_list = 0;
} }
else { else {
ruby_annotate_mmap(rb_shape_tree->shape_list, shape_list_mmap_size, "Ruby:Init_default_shapes:shape_list"); ruby_annotate_mmap(rb_shape_tree.shape_list, shape_list_mmap_size, "Ruby:Init_default_shapes:shape_list");
} }
#else #else
rb_shape_tree->shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t)); rb_shape_tree.shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t));
#endif #endif
if (!rb_shape_tree->shape_list) { if (!rb_shape_tree.shape_list) {
rb_memerror(); rb_memerror();
} }
@ -1505,19 +1503,19 @@ Init_default_shapes(void)
#ifdef HAVE_MMAP #ifdef HAVE_MMAP
size_t shape_cache_mmap_size = rb_size_mul_or_raise(REDBLACK_CACHE_SIZE, sizeof(redblack_node_t), rb_eRuntimeError); size_t shape_cache_mmap_size = rb_size_mul_or_raise(REDBLACK_CACHE_SIZE, sizeof(redblack_node_t), rb_eRuntimeError);
rb_shape_tree->shape_cache = (redblack_node_t *)mmap(NULL, shape_cache_mmap_size, rb_shape_tree.shape_cache = (redblack_node_t *)mmap(NULL, shape_cache_mmap_size,
PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
rb_shape_tree->cache_size = 0; rb_shape_tree.cache_size = 0;
// If mmap fails, then give up on the redblack tree cache. // If mmap fails, then give up on the redblack tree cache.
// We set the cache size such that the redblack node allocators think // We set the cache size such that the redblack node allocators think
// the cache is full. // the cache is full.
if (rb_shape_tree->shape_cache == MAP_FAILED) { if (rb_shape_tree.shape_cache == MAP_FAILED) {
rb_shape_tree->shape_cache = 0; rb_shape_tree.shape_cache = 0;
rb_shape_tree->cache_size = REDBLACK_CACHE_SIZE; rb_shape_tree.cache_size = REDBLACK_CACHE_SIZE;
} }
else { else {
ruby_annotate_mmap(rb_shape_tree->shape_cache, shape_cache_mmap_size, "Ruby:Init_default_shapes:shape_cache"); ruby_annotate_mmap(rb_shape_tree.shape_cache, shape_cache_mmap_size, "Ruby:Init_default_shapes:shape_cache");
} }
#endif #endif
@ -1528,8 +1526,8 @@ Init_default_shapes(void)
rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID); rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
root->capacity = 0; root->capacity = 0;
root->type = SHAPE_ROOT; root->type = SHAPE_ROOT;
rb_shape_tree->root_shape = root; rb_shape_tree.root_shape = root;
RUBY_ASSERT(raw_shape_id(rb_shape_tree->root_shape) == ROOT_SHAPE_ID); RUBY_ASSERT(raw_shape_id(rb_shape_tree.root_shape) == ROOT_SHAPE_ID);
rb_shape_t *root_with_obj_id = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID); rb_shape_t *root_with_obj_id = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID);
root_with_obj_id->type = SHAPE_OBJ_ID; root_with_obj_id->type = SHAPE_OBJ_ID;
@ -1541,8 +1539,7 @@ Init_default_shapes(void)
void void
rb_shape_free_all(void) rb_shape_free_all(void)
{ {
xfree((void *)rb_shape_tree->capacities); xfree((void *)rb_shape_tree.capacities);
xfree(rb_shape_tree);
} }
void void

View File

@ -94,7 +94,7 @@ typedef struct {
} rb_shape_tree_t; } rb_shape_tree_t;
RUBY_SYMBOL_EXPORT_BEGIN RUBY_SYMBOL_EXPORT_BEGIN
RUBY_EXTERN rb_shape_tree_t *rb_shape_tree; RUBY_EXTERN rb_shape_tree_t rb_shape_tree;
RUBY_SYMBOL_EXPORT_END RUBY_SYMBOL_EXPORT_END
union rb_attr_index_cache { union rb_attr_index_cache {
@ -151,7 +151,7 @@ RSHAPE(shape_id_t shape_id)
uint32_t offset = (shape_id & SHAPE_ID_OFFSET_MASK); uint32_t offset = (shape_id & SHAPE_ID_OFFSET_MASK);
RUBY_ASSERT(offset != INVALID_SHAPE_ID); RUBY_ASSERT(offset != INVALID_SHAPE_ID);
return &rb_shape_tree->shape_list[offset]; return &rb_shape_tree.shape_list[offset];
} }
int32_t rb_shape_id_offset(void); int32_t rb_shape_id_offset(void);
@ -240,7 +240,7 @@ RSHAPE_EMBEDDED_CAPACITY(shape_id_t shape_id)
{ {
uint8_t heap_index = rb_shape_heap_index(shape_id); uint8_t heap_index = rb_shape_heap_index(shape_id);
if (heap_index) { if (heap_index) {
return rb_shape_tree->capacities[heap_index - 1]; return rb_shape_tree.capacities[heap_index - 1];
} }
return 0; return 0;
} }

4
vm.c
View File

@ -736,8 +736,8 @@ vm_stat(int argc, VALUE *argv, VALUE self)
SET(constant_cache_invalidations, ruby_vm_constant_cache_invalidations); SET(constant_cache_invalidations, ruby_vm_constant_cache_invalidations);
SET(constant_cache_misses, ruby_vm_constant_cache_misses); SET(constant_cache_misses, ruby_vm_constant_cache_misses);
SET(global_cvar_state, ruby_vm_global_cvar_state); SET(global_cvar_state, ruby_vm_global_cvar_state);
SET(next_shape_id, (rb_serial_t)rb_shape_tree->next_shape_id); SET(next_shape_id, (rb_serial_t)rb_shape_tree.next_shape_id);
SET(shape_cache_size, (rb_serial_t)rb_shape_tree->cache_size); SET(shape_cache_size, (rb_serial_t)rb_shape_tree.cache_size);
#undef SET #undef SET
#if USE_DEBUG_COUNTER #if USE_DEBUG_COUNTER

2
yjit.c
View File

@ -778,7 +778,7 @@ VALUE
rb_object_shape_count(void) rb_object_shape_count(void)
{ {
// next_shape_id starts from 0, so it's the same as the count // next_shape_id starts from 0, so it's the same as the count
return ULONG2NUM((unsigned long)rb_shape_tree->next_shape_id); return ULONG2NUM((unsigned long)rb_shape_tree.next_shape_id);
} }
bool bool