Real arena implementation

Replace the toy arena implementation with a real one,
based on allocating 8K chunks of memory by default.
This commit is contained in:
Jeremy Hylton 2006-02-28 17:53:04 +00:00
parent 77e42fff31
commit 77f1bb2778
2 changed files with 84 additions and 72 deletions

View File

@ -23,16 +23,10 @@ extern "C" {
PyAPI_FUNC(void *) PyArena_Malloc(PyArena *, size_t); PyAPI_FUNC(void *) PyArena_Malloc(PyArena *, size_t);
/* The next two routines aren't proper arena allocation routines. /* This routines isn't a proper arena allocation routine. It takes
They exist to experiment with the arena API without making wholesale a PyObject* and records it so that it can be DECREFed when the
changes to the implementation. arena is freed.
The two functions register pointers with the arena id. These
are externally allocated pointers that will be freed when the
arena is freed. One takes a pointer allocated with malloc. The
other takes a PyObject that is DECREFed when the arena is freed.
*/ */
PyAPI_FUNC(int) PyArena_AddMallocPointer(PyArena *, void *);
PyAPI_FUNC(int) PyArena_AddPyObject(PyArena *, PyObject *); PyAPI_FUNC(int) PyArena_AddPyObject(PyArena *, PyObject *);
#ifdef __cplusplus #ifdef __cplusplus

View File

@ -1,26 +1,27 @@
#include "Python.h" #include "Python.h"
#include "pyarena.h" #include "pyarena.h"
/* An arena list is a linked list that can store either pointers or /* An arena list is a linked list that can store PyObjects. */
PyObjects. The type is clear from context.
*/
typedef struct _arena_list { typedef struct _arena_list {
struct _arena_list *al_next; struct _arena_list *al_next;
void *al_pointer; void *al_pointer;
} PyArenaList; } PyArenaList;
/* There are two linked lists in an arena, one for malloc pointers and /* A simple arena block structure */
one for PyObject. For each list, there is a pointer to the head /* TODO(jhylton): Measurement to justify block size. */
and to the tail. The head is used to free the list. The tail is
used to add a new element to the list.
The list always keeps one un-used node at the end of the list. #define DEFAULT_BLOCK_SIZE 8192
*/ typedef struct _block {
size_t ab_size;
size_t ab_offset;
struct _block *ab_next;
void *ab_mem;
} block;
struct _arena { struct _arena {
PyArenaList *a_malloc_head; block *a_head;
PyArenaList *a_malloc_tail; block *a_cur;
PyArenaList *a_object_head; PyArenaList *a_object_head;
PyArenaList *a_object_tail; PyArenaList *a_object_tail;
}; };
@ -50,21 +51,52 @@ PyArenaList_FreeObject(PyArenaList *alist)
} }
} }
static void static block *
PyArenaList_FreeMalloc(PyArenaList *alist) block_new(size_t size)
{ {
while (alist) { /* Allocate header and block as one unit. ab_mem points just past header. */
PyArenaList *prev; block *b = (block *)malloc(sizeof(block) + size);
if (alist->al_pointer) { if (!b)
free(alist->al_pointer); return NULL;
b->ab_size = size;
b->ab_mem = (void *)(b + 1);
b->ab_next = NULL;
b->ab_offset = 0;
return b;
} }
alist->al_pointer = NULL;
prev = alist; static void
alist = alist->al_next; block_free(block *b) {
free(prev); while (b) {
block *next = b->ab_next;
free(b);
b = next;
} }
} }
static void *
block_alloc(block *b, size_t size)
{
void *p;
assert(b);
if (b->ab_offset + size > b->ab_size) {
/* If we need to allocate more memory than will fit in the default
block, allocate a one-off block that is exactly the right size. */
/* TODO(jhylton): Think more about space waste at end of block */
block *new = block_new(
size < DEFAULT_BLOCK_SIZE ? DEFAULT_BLOCK_SIZE : size);
if (!new)
return NULL;
assert(!b->ab_next);
b->ab_next = new;
b = new;
}
assert(b->ab_offset + size <= b->ab_size);
p = (void *)(((char *)b->ab_mem) + b->ab_offset);
b->ab_offset += size;
return p;
}
PyArena * PyArena *
PyArena_New() PyArena_New()
@ -73,10 +105,10 @@ PyArena_New()
if (!arena) if (!arena)
return NULL; return NULL;
arena->a_head = block_new(DEFAULT_BLOCK_SIZE);
arena->a_cur = arena->a_head;
arena->a_object_head = PyArenaList_New(); arena->a_object_head = PyArenaList_New();
arena->a_object_tail = arena->a_object_head; arena->a_object_tail = arena->a_object_head;
arena->a_malloc_head = PyArenaList_New();
arena->a_malloc_tail = arena->a_malloc_head;
return arena; return arena;
} }
@ -84,36 +116,22 @@ void
PyArena_Free(PyArena *arena) PyArena_Free(PyArena *arena)
{ {
assert(arena); assert(arena);
block_free(arena->a_head);
PyArenaList_FreeObject(arena->a_object_head); PyArenaList_FreeObject(arena->a_object_head);
PyArenaList_FreeMalloc(arena->a_malloc_head);
free(arena); free(arena);
} }
void * void *
PyArena_Malloc(PyArena *arena, size_t size) PyArena_Malloc(PyArena *arena, size_t size)
{ {
/* A better implementation might actually use an arena. The current void *p = block_alloc(arena->a_cur, size);
approach is just a trivial implementation of the API that allows if (!p)
it to be tested. return NULL;
*/ /* Reset cur if we allocated a new block. */
void *p; if (arena->a_cur->ab_next) {
assert(size != 0); arena->a_cur = arena->a_cur->ab_next;
p = malloc(size);
if (p)
PyArena_AddMallocPointer(arena, p);
return p;
} }
return p;
int
PyArena_AddMallocPointer(PyArena *arena, void *pointer)
{
PyArenaList *tail = arena->a_malloc_tail;
assert(pointer);
assert(tail->al_pointer != pointer);
tail->al_next = PyArenaList_New();
tail->al_pointer = pointer;
arena->a_malloc_tail = tail->al_next;
return 1;
} }
int int