GH-127705: Use _PyStackRefs in the default build. (GH-127875)

This commit is contained in:
Mark Shannon 2025-03-10 14:06:56 +00:00 committed by GitHub
parent 7cc99a54b7
commit 2bef8ea8ea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 688 additions and 254 deletions

View File

@ -342,7 +342,7 @@ _Py_eval_breaker_bit_is_set(PyThreadState *tstate, uintptr_t bit)
void _Py_set_eval_breaker_bit_all(PyInterpreterState *interp, uintptr_t bit);
void _Py_unset_eval_breaker_bit_all(PyInterpreterState *interp, uintptr_t bit);
PyAPI_FUNC(PyObject *) _PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyStackRef right, double value);
PyAPI_FUNC(_PyStackRef) _PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyStackRef right, double value);
#ifdef __cplusplus
}

View File

@ -148,17 +148,26 @@ _PyFrame_NumSlotsForCodeObject(PyCodeObject *code)
static inline void _PyFrame_Copy(_PyInterpreterFrame *src, _PyInterpreterFrame *dest)
{
*dest = *src;
assert(src->stackpointer != NULL);
int stacktop = (int)(src->stackpointer - src->localsplus);
assert(stacktop >= _PyFrame_GetCode(src)->co_nlocalsplus);
dest->stackpointer = dest->localsplus + stacktop;
for (int i = 1; i < stacktop; i++) {
dest->localsplus[i] = src->localsplus[i];
}
dest->f_executable = PyStackRef_MakeHeapSafe(src->f_executable);
// Don't leave a dangling pointer to the old frame when creating generators
// and coroutines:
dest->previous = NULL;
dest->f_funcobj = PyStackRef_MakeHeapSafe(src->f_funcobj);
dest->f_globals = src->f_globals;
dest->f_builtins = src->f_builtins;
dest->f_locals = src->f_locals;
dest->frame_obj = src->frame_obj;
dest->instr_ptr = src->instr_ptr;
#ifdef Py_GIL_DISABLED
dest->tlbc_index = src->tlbc_index;
#endif
assert(src->stackpointer != NULL);
int stacktop = (int)(src->stackpointer - src->localsplus);
assert(stacktop >= 0);
dest->stackpointer = dest->localsplus + stacktop;
for (int i = 0; i < stacktop; i++) {
dest->localsplus[i] = PyStackRef_MakeHeapSafe(src->localsplus[i]);
}
}
#ifdef Py_GIL_DISABLED
@ -393,7 +402,7 @@ _PyFrame_PushTrampolineUnchecked(PyThreadState *tstate, PyCodeObject *code, int
PyAPI_FUNC(_PyInterpreterFrame *)
_PyEvalFramePushAndInit(PyThreadState *tstate, _PyStackRef func,
PyObject *locals, _PyStackRef const* args,
PyObject *locals, _PyStackRef const *args,
size_t argcount, PyObject *kwnames,
_PyInterpreterFrame *previous);

View File

@ -82,7 +82,7 @@ PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *);
#define _PyObject_HEAD_INIT(type) \
{ \
.ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT, \
.ob_flags = _Py_STATICALLY_ALLOCATED_FLAG, \
.ob_flags = _Py_STATIC_FLAG_BITS, \
.ob_type = (type) \
}
#else

View File

@ -2024,7 +2024,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[BINARY_OP_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG },
[BINARY_OP_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG },
[BINARY_OP_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG },
[BINARY_OP_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG },
[BINARY_OP_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG },
[BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG },
[BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[BINARY_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@ -2048,9 +2048,9 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[CALL_INTRINSIC_2] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[CALL_ISINSTANCE] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[CALL_KW] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[CALL_KW_BOUND_METHOD] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[CALL_KW_BOUND_METHOD] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[CALL_KW_NON_PY] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[CALL_KW_PY] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[CALL_KW_PY] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[CALL_LEN] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[CALL_LIST_APPEND] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[CALL_METHOD_DESCRIPTOR_FAST] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },

View File

@ -60,8 +60,6 @@ extern "C" {
#if !defined(Py_GIL_DISABLED) && defined(Py_STACKREF_DEBUG)
typedef union _PyStackRef {
uint64_t index;
} _PyStackRef;
@ -153,6 +151,16 @@ _PyStackRef_CLOSE(_PyStackRef ref, const char *filename, int linenumber)
}
#define PyStackRef_CLOSE(REF) _PyStackRef_CLOSE((REF), __FILE__, __LINE__)
static inline void
PyStackRef_XCLOSE(_PyStackRef ref)
{
if (PyStackRef_IsNull(ref)) {
return;
}
PyObject *obj = _Py_stackref_close(ref);
Py_DECREF(obj);
}
static inline _PyStackRef
_PyStackRef_DUP(_PyStackRef ref, const char *filename, int linenumber)
{
@ -162,7 +170,36 @@ _PyStackRef_DUP(_PyStackRef ref, const char *filename, int linenumber)
}
#define PyStackRef_DUP(REF) _PyStackRef_DUP(REF, __FILE__, __LINE__)
#define PyStackRef_CLOSE_SPECIALIZED(stackref, dealloc) PyStackRef_CLOSE(stackref)
extern void PyStackRef_CLOSE_SPECIALIZED(_PyStackRef ref, destructor destruct);
static inline _PyStackRef
PyStackRef_MakeHeapSafe(_PyStackRef ref)
{
return ref;
}
#define PyStackRef_CLEAR(REF) \
do { \
_PyStackRef *_tmp_op_ptr = &(REF); \
_PyStackRef _tmp_old_op = (*_tmp_op_ptr); \
*_tmp_op_ptr = PyStackRef_NULL; \
PyStackRef_XCLOSE(_tmp_old_op); \
} while (0)
static inline _PyStackRef
_PyStackRef_FromPyObjectStealMortal(PyObject *obj, const char *filename, int linenumber)
{
assert(!_Py_IsImmortal(obj));
return _Py_stackref_create(obj, filename, linenumber);
}
#define PyStackRef_FromPyObjectStealMortal(obj) _PyStackRef_FromPyObjectStealMortal(_PyObject_CAST(obj), __FILE__, __LINE__)
static inline bool
PyStackRef_IsHeapSafe(_PyStackRef ref)
{
return true;
}
#else
@ -171,12 +208,13 @@ typedef union _PyStackRef {
} _PyStackRef;
#ifdef Py_GIL_DISABLED
#define Py_TAG_DEFERRED (1)
#define Py_TAG_PTR ((uintptr_t)0)
#define Py_TAG_BITS ((uintptr_t)1)
#ifdef Py_GIL_DISABLED
static const _PyStackRef PyStackRef_NULL = { .bits = Py_TAG_DEFERRED};
#define PyStackRef_IsNull(stackref) ((stackref).bits == PyStackRef_NULL.bits)
@ -184,6 +222,11 @@ static const _PyStackRef PyStackRef_NULL = { .bits = Py_TAG_DEFERRED};
#define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) | Py_TAG_DEFERRED })
#define PyStackRef_None ((_PyStackRef){.bits = ((uintptr_t)&_Py_NoneStruct) | Py_TAG_DEFERRED })
// Checks that mask out the deferred bit in the free threading build.
#define PyStackRef_IsNone(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_None)
#define PyStackRef_IsTrue(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_True)
#define PyStackRef_IsFalse(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_False)
static inline PyObject *
PyStackRef_AsPyObjectBorrow(_PyStackRef stackref)
{
@ -220,6 +263,17 @@ _PyStackRef_FromPyObjectSteal(PyObject *obj)
}
# define PyStackRef_FromPyObjectSteal(obj) _PyStackRef_FromPyObjectSteal(_PyObject_CAST(obj))
static inline _PyStackRef
PyStackRef_FromPyObjectStealMortal(PyObject *obj)
{
assert(obj != NULL);
assert(!_Py_IsImmortal(obj));
// Make sure we don't take an already tagged value.
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
return (_PyStackRef){ .bits = (uintptr_t)obj };
}
static inline _PyStackRef
PyStackRef_FromPyObjectNew(PyObject *obj)
{
@ -255,6 +309,13 @@ PyStackRef_FromPyObjectImmortal(PyObject *obj)
} \
} while (0)
static inline void
PyStackRef_CLOSE_SPECIALIZED(_PyStackRef ref, destructor destruct)
{
(void)destruct;
PyStackRef_CLOSE(ref);
}
static inline _PyStackRef
PyStackRef_DUP(_PyStackRef stackref)
{
@ -269,6 +330,18 @@ PyStackRef_DUP(_PyStackRef stackref)
return stackref;
}
static inline bool
PyStackRef_IsHeapSafe(_PyStackRef ref)
{
return true;
}
static inline _PyStackRef
PyStackRef_MakeHeapSafe(_PyStackRef ref)
{
return ref;
}
// Convert a possibly deferred reference to a strong reference.
static inline _PyStackRef
PyStackRef_AsStrongReference(_PyStackRef stackref)
@ -276,55 +349,13 @@ PyStackRef_AsStrongReference(_PyStackRef stackref)
return PyStackRef_FromPyObjectSteal(PyStackRef_AsPyObjectSteal(stackref));
}
#define PyStackRef_CLOSE_SPECIALIZED(stackref, dealloc) PyStackRef_CLOSE(stackref)
#else // Py_GIL_DISABLED
// With GIL
static const _PyStackRef PyStackRef_NULL = { .bits = 0 };
#define PyStackRef_IsNull(stackref) ((stackref).bits == 0)
#define PyStackRef_True ((_PyStackRef){.bits = (uintptr_t)&_Py_TrueStruct })
#define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) })
#define PyStackRef_None ((_PyStackRef){.bits = ((uintptr_t)&_Py_NoneStruct) })
#define PyStackRef_AsPyObjectBorrow(stackref) ((PyObject *)(stackref).bits)
#define PyStackRef_AsPyObjectSteal(stackref) PyStackRef_AsPyObjectBorrow(stackref)
#define PyStackRef_FromPyObjectSteal(obj) ((_PyStackRef){.bits = ((uintptr_t)(obj))})
#define PyStackRef_FromPyObjectNew(obj) ((_PyStackRef){ .bits = (uintptr_t)(Py_NewRef(obj)) })
#define PyStackRef_FromPyObjectImmortal(obj) ((_PyStackRef){ .bits = (uintptr_t)(obj) })
#define PyStackRef_CLOSE(stackref) Py_DECREF(PyStackRef_AsPyObjectBorrow(stackref))
#define PyStackRef_DUP(stackref) PyStackRef_FromPyObjectSteal(Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref)))
#define PyStackRef_CLOSE_SPECIALIZED(stackref, dealloc) _Py_DECREF_SPECIALIZED(PyStackRef_AsPyObjectBorrow(stackref), dealloc)
#endif // Py_GIL_DISABLED
// Check if a stackref is exactly the same as another stackref, including the
// the deferred bit. This can only be used safely if you know that the deferred
// bits of `a` and `b` match.
#define PyStackRef_IsExactly(a, b) \
(assert(((a).bits & Py_TAG_BITS) == ((b).bits & Py_TAG_BITS)), (a).bits == (b).bits)
// Checks that mask out the deferred bit in the free threading build.
#define PyStackRef_IsNone(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_None)
#define PyStackRef_IsTrue(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_True)
#define PyStackRef_IsFalse(ref) (PyStackRef_AsPyObjectBorrow(ref) == Py_False)
#endif
// Converts a PyStackRef back to a PyObject *, converting the
// stackref to a new reference.
#define PyStackRef_AsPyObjectNew(stackref) Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref))
#define PyStackRef_TYPE(stackref) Py_TYPE(PyStackRef_AsPyObjectBorrow(stackref))
#define PyStackRef_XCLOSE(stackref) \
do { \
_PyStackRef _tmp = (stackref); \
if (!PyStackRef_IsNull(_tmp)) { \
PyStackRef_CLOSE(_tmp); \
} \
} while (0);
#define PyStackRef_CLEAR(op) \
do { \
@ -336,15 +367,250 @@ static const _PyStackRef PyStackRef_NULL = { .bits = 0 };
} \
} while (0)
#define PyStackRef_XCLOSE(stackref) \
do { \
_PyStackRef _tmp = (stackref); \
if (!PyStackRef_IsNull(_tmp)) { \
PyStackRef_CLOSE(_tmp); \
} \
} while (0);
#define PyStackRef_FromPyObjectNewMortal PyStackRef_FromPyObjectNew
#else // Py_GIL_DISABLED
// With GIL
/* References to immortal objects always have their tag bit set to Py_TAG_REFCNT
* as they can (must) have their reclamation deferred */
#define Py_TAG_BITS 1
#define Py_TAG_REFCNT 1
#if _Py_IMMORTAL_FLAGS != Py_TAG_REFCNT
# error "_Py_IMMORTAL_FLAGS != Py_TAG_REFCNT"
#endif
#define BITS_TO_PTR(REF) ((PyObject *)((REF).bits))
#define BITS_TO_PTR_MASKED(REF) ((PyObject *)(((REF).bits) & (~Py_TAG_BITS)))
#define PyStackRef_NULL_BITS Py_TAG_REFCNT
static const _PyStackRef PyStackRef_NULL = { .bits = PyStackRef_NULL_BITS };
#define PyStackRef_IsNull(ref) ((ref).bits == PyStackRef_NULL_BITS)
#define PyStackRef_True ((_PyStackRef){.bits = ((uintptr_t)&_Py_TrueStruct) | Py_TAG_REFCNT })
#define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) | Py_TAG_REFCNT })
#define PyStackRef_None ((_PyStackRef){.bits = ((uintptr_t)&_Py_NoneStruct) | Py_TAG_REFCNT })
#define PyStackRef_IsTrue(REF) ((REF).bits == (((uintptr_t)&_Py_TrueStruct) | Py_TAG_REFCNT))
#define PyStackRef_IsFalse(REF) ((REF).bits == (((uintptr_t)&_Py_FalseStruct) | Py_TAG_REFCNT))
#define PyStackRef_IsNone(REF) ((REF).bits == (((uintptr_t)&_Py_NoneStruct) | Py_TAG_REFCNT))
#ifdef Py_DEBUG
static inline void PyStackRef_CheckValid(_PyStackRef ref) {
assert(ref.bits != 0);
int tag = ref.bits & Py_TAG_BITS;
PyObject *obj = BITS_TO_PTR_MASKED(ref);
switch (tag) {
case 0:
/* Can be immortal if object was made immortal after reference came into existence */
assert(!_Py_IsStaticImmortal(obj));
break;
case Py_TAG_REFCNT:
assert(obj == NULL || _Py_IsImmortal(obj));
break;
default:
assert(0);
}
}
#else
#define PyStackRef_CheckValid(REF) ((void)0)
#endif
#ifdef _WIN32
#define PyStackRef_RefcountOnObject(REF) (((REF).bits & Py_TAG_BITS) == 0)
#define PyStackRef_AsPyObjectBorrow BITS_TO_PTR_MASKED
#else
/* Does this ref not have an embedded refcount and thus not refer to a declared immmortal object? */
static inline int
PyStackRef_RefcountOnObject(_PyStackRef ref)
{
return (ref.bits & Py_TAG_BITS) == 0;
}
static inline PyObject *
PyStackRef_AsPyObjectBorrow(_PyStackRef ref)
{
return BITS_TO_PTR_MASKED(ref);
}
#endif
static inline PyObject *
PyStackRef_AsPyObjectSteal(_PyStackRef ref)
{
if (PyStackRef_RefcountOnObject(ref)) {
return BITS_TO_PTR(ref);
}
else {
return Py_NewRef(BITS_TO_PTR_MASKED(ref));
}
}
static inline _PyStackRef
PyStackRef_FromPyObjectSteal(PyObject *obj)
{
assert(obj != NULL);
#if SIZEOF_VOID_P > 4
unsigned int tag = obj->ob_flags & Py_TAG_BITS;
#else
unsigned int tag = _Py_IsImmortal(obj) ? Py_TAG_REFCNT : 0;
#endif
_PyStackRef ref = ((_PyStackRef){.bits = ((uintptr_t)(obj)) | tag});
PyStackRef_CheckValid(ref);
return ref;
}
static inline _PyStackRef
PyStackRef_FromPyObjectStealMortal(PyObject *obj)
{
assert(obj != NULL);
assert(!_Py_IsImmortal(obj));
_PyStackRef ref = ((_PyStackRef){.bits = ((uintptr_t)(obj)) });
PyStackRef_CheckValid(ref);
return ref;
}
// Check if a stackref is exactly the same as another stackref, including the
// the deferred bit. This can only be used safely if you know that the deferred
// bits of `a` and `b` match.
#define PyStackRef_IsExactly(a, b) \
(assert(((a).bits & Py_TAG_BITS) == ((b).bits & Py_TAG_BITS)), (a).bits == (b).bits)
static inline _PyStackRef
_PyStackRef_FromPyObjectNew(PyObject *obj)
{
assert(obj != NULL);
if (_Py_IsImmortal(obj)) {
return (_PyStackRef){ .bits = ((uintptr_t)obj) | Py_TAG_REFCNT};
}
Py_INCREF_MORTAL(obj);
_PyStackRef ref = (_PyStackRef){ .bits = (uintptr_t)obj };
PyStackRef_CheckValid(ref);
return ref;
}
#define PyStackRef_FromPyObjectNew(obj) _PyStackRef_FromPyObjectNew(_PyObject_CAST(obj))
static inline _PyStackRef
_PyStackRef_FromPyObjectNewMortal(PyObject *obj)
{
assert(obj != NULL);
Py_INCREF_MORTAL(obj);
_PyStackRef ref = (_PyStackRef){ .bits = (uintptr_t)obj };
PyStackRef_CheckValid(ref);
return ref;
}
#define PyStackRef_FromPyObjectNewMortal(obj) _PyStackRef_FromPyObjectNewMortal(_PyObject_CAST(obj))
/* Create a new reference from an object with an embedded reference count */
static inline _PyStackRef
PyStackRef_FromPyObjectImmortal(PyObject *obj)
{
assert(_Py_IsImmortal(obj));
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_REFCNT};
}
/* WARNING: This macro evaluates its argument more than once */
#ifdef _WIN32
#define PyStackRef_DUP(REF) \
(PyStackRef_RefcountOnObject(REF) ? (Py_INCREF_MORTAL(BITS_TO_PTR(REF)), (REF)) : (REF))
#else
static inline _PyStackRef
PyStackRef_DUP(_PyStackRef ref)
{
assert(!PyStackRef_IsNull(ref));
if (PyStackRef_RefcountOnObject(ref)) {
Py_INCREF_MORTAL(BITS_TO_PTR(ref));
}
return ref;
}
#endif
static inline bool
PyStackRef_IsHeapSafe(_PyStackRef ref)
{
return (ref.bits & Py_TAG_BITS) == 0 || ref.bits == PyStackRef_NULL_BITS || _Py_IsImmortal(BITS_TO_PTR_MASKED(ref));
}
static inline _PyStackRef
PyStackRef_MakeHeapSafe(_PyStackRef ref)
{
if (PyStackRef_IsHeapSafe(ref)) {
return ref;
}
PyObject *obj = BITS_TO_PTR_MASKED(ref);
Py_INCREF(obj);
ref.bits = (uintptr_t)obj;
PyStackRef_CheckValid(ref);
return ref;
}
#ifdef _WIN32
#define PyStackRef_CLOSE(REF) \
do { \
_PyStackRef _temp = (REF); \
if (PyStackRef_RefcountOnObject(_temp)) Py_DECREF_MORTAL(BITS_TO_PTR(_temp)); \
} while (0)
#else
static inline void
PyStackRef_CLOSE(_PyStackRef ref)
{
assert(!PyStackRef_IsNull(ref));
if (PyStackRef_RefcountOnObject(ref)) {
Py_DECREF_MORTAL(BITS_TO_PTR(ref));
}
}
#endif
static inline void
PyStackRef_CLOSE_SPECIALIZED(_PyStackRef ref, destructor destruct)
{
assert(!PyStackRef_IsNull(ref));
if (PyStackRef_RefcountOnObject(ref)) {
Py_DECREF_MORTAL_SPECIALIZED(BITS_TO_PTR(ref), destruct);
}
}
#ifdef _WIN32
#define PyStackRef_XCLOSE PyStackRef_CLOSE
#else
static inline void
PyStackRef_XCLOSE(_PyStackRef ref)
{
assert(ref.bits != 0);
if (PyStackRef_RefcountOnObject(ref)) {
assert(!PyStackRef_IsNull(ref));
Py_DECREF_MORTAL(BITS_TO_PTR(ref));
}
}
#endif
#define PyStackRef_CLEAR(REF) \
do { \
_PyStackRef *_tmp_op_ptr = &(REF); \
_PyStackRef _tmp_old_op = (*_tmp_op_ptr); \
*_tmp_op_ptr = PyStackRef_NULL; \
PyStackRef_XCLOSE(_tmp_old_op); \
} while (0)
#endif // Py_GIL_DISABLED
// Note: this is a macro because MSVC (Windows) has trouble inlining it.
#define PyStackRef_Is(a, b) (((a).bits & (~Py_TAG_REFCNT)) == ((b).bits & (~Py_TAG_REFCNT)))
#endif // !defined(Py_GIL_DISABLED) && defined(Py_STACKREF_DEBUG)
#define PyStackRef_TYPE(stackref) Py_TYPE(PyStackRef_AsPyObjectBorrow(stackref))
// Converts a PyStackRef back to a PyObject *, converting the
// stackref to a new reference.
#define PyStackRef_AsPyObjectNew(stackref) Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref))
// StackRef type checks

View File

@ -88,7 +88,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_STORE_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_OP_SUBSCR_LIST_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_OP_SUBSCR_STR_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_OP_SUBSCR_TUPLE_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_OP_SUBSCR_TUPLE_INT] = HAS_DEOPT_FLAG,
[_BINARY_OP_SUBSCR_DICT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_OP_SUBSCR_CHECK_FUNC] = HAS_DEOPT_FLAG,
[_BINARY_OP_SUBSCR_INIT_CALL] = 0,
@ -242,7 +242,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_CALL_METHOD_DESCRIPTOR_NOARGS] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_CALL_METHOD_DESCRIPTOR_FAST] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_MAYBE_EXPAND_METHOD_KW] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG,
[_PY_FRAME_KW] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_PY_FRAME_KW] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_CHECK_FUNCTION_VERSION_KW] = HAS_ARG_FLAG | HAS_EXIT_FLAG,
[_CHECK_METHOD_VERSION_KW] = HAS_ARG_FLAG | HAS_EXIT_FLAG,
[_EXPAND_METHOD_KW] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG,

View File

@ -124,11 +124,13 @@ struct _object {
PY_INT64_T ob_refcnt_full; /* This field is needed for efficient initialization with Clang on ARM */
struct {
# if PY_BIG_ENDIAN
PY_UINT32_T ob_flags;
PY_UINT32_T ob_refcnt;
uint16_t ob_flags;
uint16_t ob_overflow;
uint32_t ob_refcnt;
# else
PY_UINT32_T ob_refcnt;
PY_UINT32_T ob_flags;
uint32_t ob_refcnt;
uint16_t ob_overflow;
uint16_t ob_flags;
# endif
};
#else

View File

@ -19,8 +19,8 @@ immortal. The latter should be the only instances that require
cleanup during runtime finalization.
*/
/* Leave the low bits for refcount overflow for old stable ABI code */
#define _Py_STATICALLY_ALLOCATED_FLAG (1 << 7)
#define _Py_STATICALLY_ALLOCATED_FLAG 4
#define _Py_IMMORTAL_FLAGS 1
#if SIZEOF_VOID_P > 4
/*
@ -43,7 +43,8 @@ be done by checking the bit sign flag in the lower 32 bits.
*/
#define _Py_IMMORTAL_INITIAL_REFCNT (3UL << 30)
#define _Py_STATIC_IMMORTAL_INITIAL_REFCNT ((Py_ssize_t)(_Py_IMMORTAL_INITIAL_REFCNT | (((Py_ssize_t)_Py_STATICALLY_ALLOCATED_FLAG) << 32)))
#define _Py_STATIC_FLAG_BITS ((Py_ssize_t)(_Py_STATICALLY_ALLOCATED_FLAG | _Py_IMMORTAL_FLAGS))
#define _Py_STATIC_IMMORTAL_INITIAL_REFCNT (((Py_ssize_t)_Py_IMMORTAL_INITIAL_REFCNT) | (_Py_STATIC_FLAG_BITS << 48))
#else
/*
@ -114,7 +115,6 @@ PyAPI_FUNC(Py_ssize_t) Py_REFCNT(PyObject *ob);
#endif
#endif
static inline Py_ALWAYS_INLINE int _Py_IsImmortal(PyObject *op)
{
#if defined(Py_GIL_DISABLED)
@ -242,6 +242,18 @@ PyAPI_FUNC(void) Py_DecRef(PyObject *);
PyAPI_FUNC(void) _Py_IncRef(PyObject *);
PyAPI_FUNC(void) _Py_DecRef(PyObject *);
#ifndef Py_GIL_DISABLED
static inline Py_ALWAYS_INLINE void Py_INCREF_MORTAL(PyObject *op)
{
assert(!_Py_IsStaticImmortal(op));
op->ob_refcnt++;
_Py_INCREF_STAT_INC();
#if defined(Py_REF_DEBUG) && !defined(Py_LIMITED_API)
_Py_INCREF_IncRefTotal();
#endif
}
#endif
static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op)
{
#if defined(Py_LIMITED_API) && (Py_LIMITED_API+0 >= 0x030c0000 || defined(Py_REF_DEBUG))
@ -372,6 +384,36 @@ static inline void Py_DECREF(PyObject *op)
#define Py_DECREF(op) Py_DECREF(_PyObject_CAST(op))
#elif defined(Py_REF_DEBUG)
static inline void Py_DECREF_MORTAL(const char *filename, int lineno, PyObject *op)
{
if (op->ob_refcnt <= 0) {
_Py_NegativeRefcount(filename, lineno, op);
}
_Py_DECREF_STAT_INC();
assert(!_Py_IsStaticImmortal(op));
_Py_DECREF_DecRefTotal();
if (--op->ob_refcnt == 0) {
_Py_Dealloc(op);
}
}
#define Py_DECREF_MORTAL(op) Py_DECREF_MORTAL(__FILE__, __LINE__, _PyObject_CAST(op))
static inline void _Py_DECREF_MORTAL_SPECIALIZED(const char *filename, int lineno, PyObject *op, destructor destruct)
{
if (op->ob_refcnt <= 0) {
_Py_NegativeRefcount(filename, lineno, op);
}
_Py_DECREF_STAT_INC();
assert(!_Py_IsStaticImmortal(op));
_Py_DECREF_DecRefTotal();
if (--op->ob_refcnt == 0) {
destruct(op);
}
}
#define Py_DECREF_MORTAL_SPECIALIZED(op, destruct) _Py_DECREF_MORTAL_SPECIALIZED(__FILE__, __LINE__, op, destruct)
static inline void Py_DECREF(const char *filename, int lineno, PyObject *op)
{
#if SIZEOF_VOID_P > 4
@ -396,6 +438,26 @@ static inline void Py_DECREF(const char *filename, int lineno, PyObject *op)
#define Py_DECREF(op) Py_DECREF(__FILE__, __LINE__, _PyObject_CAST(op))
#else
static inline void Py_DECREF_MORTAL(PyObject *op)
{
assert(!_Py_IsStaticImmortal(op));
_Py_DECREF_STAT_INC();
if (--op->ob_refcnt == 0) {
_Py_Dealloc(op);
}
}
#define Py_DECREF_MORTAL(op) Py_DECREF_MORTAL(_PyObject_CAST(op))
static inline void Py_DECREF_MORTAL_SPECIALIZED(PyObject *op, destructor destruct)
{
assert(!_Py_IsStaticImmortal(op));
_Py_DECREF_STAT_INC();
if (--op->ob_refcnt == 0) {
destruct(op);
}
}
#define Py_DECREF_MORTAL_SPECIALIZED(op, destruct) Py_DECREF_MORTAL_SPECIALIZED(_PyObject_CAST(op), destruct)
static inline Py_ALWAYS_INLINE void Py_DECREF(PyObject *op)
{
// Non-limited C API and limited C API for Python 3.9 and older access

View File

@ -2715,18 +2715,15 @@ class ShutdownTest(unittest.TestCase):
class ImmortalTests(unittest.TestCase):
if sys.maxsize < (1 << 32):
if support.Py_GIL_DISABLED:
IMMORTAL_REFCOUNT = 5 << 28
else:
IMMORTAL_REFCOUNT = 7 << 28
IMMORTAL_REFCOUNT_MINIMUM = 1 << 30
else:
IMMORTAL_REFCOUNT = 3 << 30
IMMORTAL_REFCOUNT_MINIMUM = 1 << 31
IMMORTALS = (None, True, False, Ellipsis, NotImplemented, *range(-5, 257))
def assert_immortal(self, immortal):
with self.subTest(immortal):
self.assertEqual(sys.getrefcount(immortal), self.IMMORTAL_REFCOUNT)
self.assertGreater(sys.getrefcount(immortal), self.IMMORTAL_REFCOUNT_MINIMUM)
def test_immortals(self):
for immortal in self.IMMORTALS:

View File

@ -0,0 +1,5 @@
Use tagged references (``_PyStackRef``) for the default build as well as for
the free-threading build. This has a small negative performance impact
short-term but will enable larger speedups in the future and signficantly
reduce maintenance costs by allowing a single implementation of tagged
references in the future.

View File

@ -136,34 +136,37 @@ PyFloat_FromDouble(double fval)
#ifdef Py_GIL_DISABLED
PyObject *_PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyStackRef right, double value)
_PyStackRef _PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyStackRef right, double value)
{
PyStackRef_CLOSE(left);
PyStackRef_CLOSE(right);
return PyFloat_FromDouble(value);
PyStackRef_CLOSE_SPECIALIZED(left, _PyFloat_ExactDealloc);
PyStackRef_CLOSE_SPECIALIZED(right, _PyFloat_ExactDealloc);
return PyStackRef_FromPyObjectSteal(PyFloat_FromDouble(value));
}
#else // Py_GIL_DISABLED
PyObject *_PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyStackRef right, double value)
_PyStackRef _PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyStackRef right, double value)
{
PyObject *left_o = PyStackRef_AsPyObjectSteal(left);
PyObject *right_o = PyStackRef_AsPyObjectSteal(right);
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
if (Py_REFCNT(left_o) == 1) {
((PyFloatObject *)left_o)->ob_fval = value;
_Py_DECREF_SPECIALIZED(right_o, _PyFloat_ExactDealloc);
return left_o;
PyStackRef_CLOSE_SPECIALIZED(right, _PyFloat_ExactDealloc);
return left;
}
else if (Py_REFCNT(right_o) == 1) {
((PyFloatObject *)right_o)->ob_fval = value;
_Py_DECREF_NO_DEALLOC(left_o);
return right_o;
PyStackRef_CLOSE_SPECIALIZED(left, _PyFloat_ExactDealloc);
return right;
}
else {
PyObject *result = PyFloat_FromDouble(value);
_Py_DECREF_NO_DEALLOC(left_o);
_Py_DECREF_NO_DEALLOC(right_o);
return result;
PyStackRef_CLOSE_SPECIALIZED(left, _PyFloat_ExactDealloc);
PyStackRef_CLOSE_SPECIALIZED(right, _PyFloat_ExactDealloc);
if (result == NULL) {
return PyStackRef_NULL;
}
return PyStackRef_FromPyObjectStealMortal(result);
}
}

View File

@ -2540,6 +2540,9 @@ _Py_SetImmortalUntracked(PyObject *op)
op->ob_ref_local = _Py_IMMORTAL_REFCNT_LOCAL;
op->ob_ref_shared = 0;
_Py_atomic_or_uint8(&op->ob_gc_bits, _PyGC_BITS_DEFERRED);
#elif SIZEOF_VOID_P > 4
op->ob_flags = _Py_IMMORTAL_FLAGS;
op->ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT;
#else
op->ob_refcnt = _Py_IMMORTAL_INITIAL_REFCNT;
#endif

View File

@ -311,7 +311,7 @@ dummy_func(
inst(LOAD_CONST_MORTAL, (-- value)) {
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
value = PyStackRef_FromPyObjectNew(obj);
value = PyStackRef_FromPyObjectNewMortal(obj);
}
inst(LOAD_CONST_IMMORTAL, (-- value)) {
@ -327,6 +327,10 @@ dummy_func(
}
replicate(8) inst(STORE_FAST, (value --)) {
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
DEAD(value);
@ -338,6 +342,10 @@ dummy_func(
};
inst(STORE_FAST_LOAD_FAST, (value1 -- value2)) {
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value1)
);
uint32_t oparg1 = oparg >> 4;
uint32_t oparg2 = oparg & 15;
_PyStackRef tmp = GETLOCAL(oparg1);
@ -348,6 +356,14 @@ dummy_func(
}
inst(STORE_FAST_STORE_FAST, (value2, value1 --)) {
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value1)
);
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value2)
);
uint32_t oparg1 = oparg >> 4;
uint32_t oparg2 = oparg & 15;
_PyStackRef tmp = GETLOCAL(oparg1);
@ -642,10 +658,9 @@ dummy_func(
double dres =
((PyFloatObject *)left_o)->ob_fval *
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
INPUTS_DEAD();
ERROR_IF(res_o == NULL, error);
res = PyStackRef_FromPyObjectSteal(res_o);
ERROR_IF(PyStackRef_IsNull(res), error);
}
pure op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) {
@ -658,10 +673,9 @@ dummy_func(
double dres =
((PyFloatObject *)left_o)->ob_fval +
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
INPUTS_DEAD();
ERROR_IF(res_o == NULL, error);
res = PyStackRef_FromPyObjectSteal(res_o);
ERROR_IF(PyStackRef_IsNull(res), error);
}
pure op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res)) {
@ -674,10 +688,9 @@ dummy_func(
double dres =
((PyFloatObject *)left_o)->ob_fval -
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
INPUTS_DEAD();
ERROR_IF(res_o == NULL, error);
res = PyStackRef_FromPyObjectSteal(res_o);
ERROR_IF(PyStackRef_IsNull(res), error);
}
macro(BINARY_OP_MULTIPLY_FLOAT) =
@ -733,6 +746,7 @@ dummy_func(
next_oparg = CURRENT_OPERAND0();
#endif
_PyStackRef *target_local = &GETLOCAL(next_oparg);
assert(PyUnicode_CheckExact(left_o));
DEOPT_IF(PyStackRef_AsPyObjectBorrow(*target_local) != left_o);
STAT_INC(BINARY_OP, hit);
/* Handle `left = left + right` or `left += right` for str.
@ -856,17 +870,16 @@ dummy_func(
PyObject *res_o = _PyList_GetItemRef((PyListObject*)list, index);
DEOPT_IF(res_o == NULL);
STAT_INC(BINARY_OP, hit);
res = PyStackRef_FromPyObjectSteal(res_o);
#else
DEOPT_IF(index >= PyList_GET_SIZE(list));
STAT_INC(BINARY_OP, hit);
PyObject *res_o = PyList_GET_ITEM(list, index);
assert(res_o != NULL);
Py_INCREF(res_o);
res = PyStackRef_FromPyObjectNew(res_o);
#endif
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
DEAD(sub_st);
PyStackRef_CLOSE(list_st);
res = PyStackRef_FromPyObjectSteal(res_o);
STAT_INC(BINARY_SUBSCR, hit);
DECREF_INPUTS();
}
inst(BINARY_OP_SUBSCR_STR_INT, (unused/5, str_st, sub_st -- res)) {
@ -886,7 +899,7 @@ dummy_func(
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
DEAD(sub_st);
PyStackRef_CLOSE(str_st);
res = PyStackRef_FromPyObjectSteal(res_o);
res = PyStackRef_FromPyObjectImmortal(res_o);
}
inst(BINARY_OP_SUBSCR_TUPLE_INT, (unused/5, tuple_st, sub_st -- res)) {
@ -903,11 +916,9 @@ dummy_func(
STAT_INC(BINARY_OP, hit);
PyObject *res_o = PyTuple_GET_ITEM(tuple, index);
assert(res_o != NULL);
Py_INCREF(res_o);
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
DEAD(sub_st);
PyStackRef_CLOSE(tuple_st);
res = PyStackRef_FromPyObjectSteal(res_o);
res = PyStackRef_FromPyObjectNew(res_o);
DECREF_INPUTS();
}
inst(BINARY_OP_SUBSCR_DICT, (unused/5, dict_st, sub_st -- res)) {
@ -1094,6 +1105,7 @@ dummy_func(
inst(RETURN_VALUE, (retval -- res)) {
assert(frame->owner != FRAME_OWNED_BY_INTERPRETER);
_PyStackRef temp = retval;
assert(PyStackRef_IsHeapSafe(temp));
DEAD(retval);
SAVE_STACK();
assert(EMPTY());
@ -1855,7 +1867,7 @@ dummy_func(
ERROR_NO_POP();
}
INPUTS_DEAD();
tup = PyStackRef_FromPyObjectSteal(tup_o);
tup = PyStackRef_FromPyObjectStealMortal(tup_o);
}
inst(BUILD_LIST, (values[oparg] -- list)) {
@ -1864,7 +1876,7 @@ dummy_func(
ERROR_NO_POP();
}
INPUTS_DEAD();
list = PyStackRef_FromPyObjectSteal(list_o);
list = PyStackRef_FromPyObjectStealMortal(list_o);
}
inst(LIST_EXTEND, (list_st, unused[oparg-1], iterable_st -- list_st, unused[oparg-1])) {
@ -1913,7 +1925,7 @@ dummy_func(
Py_DECREF(set_o);
ERROR_IF(true, error);
}
set = PyStackRef_FromPyObjectSteal(set_o);
set = PyStackRef_FromPyObjectStealMortal(set_o);
}
inst(BUILD_MAP, (values[oparg*2] -- map)) {
@ -1929,7 +1941,7 @@ dummy_func(
STACKREFS_TO_PYOBJECTS_CLEANUP(values_o);
DECREF_INPUTS();
ERROR_IF(map_o == NULL, error);
map = PyStackRef_FromPyObjectSteal(map_o);
map = PyStackRef_FromPyObjectStealMortal(map_o);
}
inst(SETUP_ANNOTATIONS, (--)) {
@ -3789,7 +3801,7 @@ dummy_func(
DEOPT_IF(callable_o != (PyObject *)&PyType_Type);
DEAD(callable);
STAT_INC(CALL, hit);
res = PyStackRef_FromPyObjectSteal(Py_NewRef(Py_TYPE(arg_o)));
res = PyStackRef_FromPyObjectNew(Py_TYPE(arg_o));
PyStackRef_CLOSE(arg);
}
@ -4413,9 +4425,7 @@ dummy_func(
// The frame has stolen all the arguments from the stack,
// so there is no need to clean them up.
SYNC_SP();
if (temp == NULL) {
ERROR_NO_POP();
}
ERROR_IF(temp == NULL, error);
new_frame = temp;
}
@ -4695,7 +4705,7 @@ dummy_func(
frame = tstate->current_frame = prev;
LOAD_IP(frame->return_offset);
RELOAD_STACK();
res = PyStackRef_FromPyObjectSteal((PyObject *)gen);
res = PyStackRef_FromPyObjectStealMortal((PyObject *)gen);
LLTRACE_RESUME_FRAME();
}
@ -4706,7 +4716,7 @@ dummy_func(
PyObject *slice_o = PySlice_New(start_o, stop_o, step_o);
DECREF_INPUTS();
ERROR_IF(slice_o == NULL, error);
slice = PyStackRef_FromPyObjectSteal(slice_o);
slice = PyStackRef_FromPyObjectStealMortal(slice_o);
}
inst(CONVERT_VALUE, (value -- result)) {

View File

@ -133,36 +133,55 @@
#ifdef Py_DEBUG
static void
dump_item(_PyStackRef item)
{
if (PyStackRef_IsNull(item)) {
printf("<NULL>");
return;
}
PyObject *obj = PyStackRef_AsPyObjectBorrow(item);
if (obj == NULL) {
printf("<nil>");
return;
}
if (
obj == Py_None
|| PyBool_Check(obj)
|| PyLong_CheckExact(obj)
|| PyFloat_CheckExact(obj)
|| PyUnicode_CheckExact(obj)
) {
if (PyObject_Print(obj, stdout, 0) == 0) {
return;
}
PyErr_Clear();
}
// Don't call __repr__(), it might recurse into the interpreter.
printf("<%s at %p>", Py_TYPE(obj)->tp_name, (void *)obj);
}
static void
dump_stack(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer)
{
_PyFrame_SetStackPointer(frame, stack_pointer);
_PyStackRef *locals_base = _PyFrame_GetLocalsArray(frame);
_PyStackRef *stack_base = _PyFrame_Stackbase(frame);
PyObject *exc = PyErr_GetRaisedException();
printf(" locals=[");
for (_PyStackRef *ptr = locals_base; ptr < stack_base; ptr++) {
if (ptr != locals_base) {
printf(", ");
}
dump_item(*ptr);
}
printf("]\n");
printf(" stack=[");
for (_PyStackRef *ptr = stack_base; ptr < stack_pointer; ptr++) {
if (ptr != stack_base) {
printf(", ");
}
PyObject *obj = PyStackRef_AsPyObjectBorrow(*ptr);
if (obj == NULL) {
printf("<nil>");
continue;
}
if (
obj == Py_None
|| PyBool_Check(obj)
|| PyLong_CheckExact(obj)
|| PyFloat_CheckExact(obj)
|| PyUnicode_CheckExact(obj)
) {
if (PyObject_Print(obj, stdout, 0) == 0) {
continue;
}
PyErr_Clear();
}
// Don't call __repr__(), it might recurse into the interpreter.
printf("<%s at %p>", Py_TYPE(obj)->tp_name, PyStackRef_AsPyObjectBorrow(*ptr));
dump_item(*ptr);
}
printf("]\n");
fflush(stdout);
@ -1390,7 +1409,6 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
{
PyCodeObject *co = (PyCodeObject*)func->func_code;
const Py_ssize_t total_args = co->co_argcount + co->co_kwonlyargcount;
/* Create a dictionary for keyword parameters (**kwags) */
PyObject *kwdict;
Py_ssize_t i;

View File

@ -440,28 +440,13 @@ do { \
/* How much scratch space to give stackref to PyObject* conversion. */
#define MAX_STACKREF_SCRATCH 10
#if defined(Py_GIL_DISABLED) || defined(Py_STACKREF_DEBUG)
#define STACKREFS_TO_PYOBJECTS(ARGS, ARG_COUNT, NAME) \
/* +1 because vectorcall might use -1 to write self */ \
PyObject *NAME##_temp[MAX_STACKREF_SCRATCH+1]; \
PyObject **NAME = _PyObjectArray_FromStackRefArray(ARGS, ARG_COUNT, NAME##_temp + 1);
#else
#define STACKREFS_TO_PYOBJECTS(ARGS, ARG_COUNT, NAME) \
PyObject **NAME = (PyObject **)ARGS; \
assert(NAME != NULL);
#endif
#if defined(Py_GIL_DISABLED) || defined(Py_STACKREF_DEBUG)
#define STACKREFS_TO_PYOBJECTS_CLEANUP(NAME) \
/* +1 because we +1 previously */ \
_PyObjectArray_Free(NAME - 1, NAME##_temp);
#else
#define STACKREFS_TO_PYOBJECTS_CLEANUP(NAME) \
(void)(NAME);
#endif
#if defined(Py_GIL_DISABLED) || defined(Py_STACKREF_DEBUG)
#define CONVERSION_FAILED(NAME) ((NAME) == NULL)
#else
#define CONVERSION_FAILED(NAME) (0)
#endif

View File

@ -218,7 +218,7 @@
_PyStackRef value;
oparg = CURRENT_OPARG();
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
value = PyStackRef_FromPyObjectNew(obj);
value = PyStackRef_FromPyObjectNewMortal(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -306,6 +306,10 @@
oparg = 0;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -321,6 +325,10 @@
oparg = 1;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -336,6 +344,10 @@
oparg = 2;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -351,6 +363,10 @@
oparg = 3;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -366,6 +382,10 @@
oparg = 4;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -381,6 +401,10 @@
oparg = 5;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -396,6 +420,10 @@
oparg = 6;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -411,6 +439,10 @@
oparg = 7;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -425,6 +457,10 @@
_PyStackRef value;
oparg = CURRENT_OPARG();
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -863,13 +899,13 @@
double dres =
((PyFloatObject *)left_o)->ob_fval *
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (res_o == NULL) {
stack_pointer += -2;
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (PyStackRef_IsNull(res)) {
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
JUMP_TO_ERROR();
}
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@ -890,13 +926,13 @@
double dres =
((PyFloatObject *)left_o)->ob_fval +
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (res_o == NULL) {
stack_pointer += -2;
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (PyStackRef_IsNull(res)) {
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
JUMP_TO_ERROR();
}
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@ -917,13 +953,13 @@
double dres =
((PyFloatObject *)left_o)->ob_fval -
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (res_o == NULL) {
stack_pointer += -2;
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (PyStackRef_IsNull(res)) {
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
JUMP_TO_ERROR();
}
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@ -991,6 +1027,7 @@
next_oparg = CURRENT_OPERAND0();
#endif
_PyStackRef *target_local = &GETLOCAL(next_oparg);
assert(PyUnicode_CheckExact(left_o));
if (PyStackRef_AsPyObjectBorrow(*target_local) != left_o) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
@ -1205,6 +1242,7 @@
JUMP_TO_JUMP_TARGET();
}
STAT_INC(BINARY_OP, hit);
res = PyStackRef_FromPyObjectSteal(res_o);
#else
if (index >= PyList_GET_SIZE(list)) {
UOP_STAT_INC(uopcode, miss);
@ -1213,18 +1251,21 @@
STAT_INC(BINARY_OP, hit);
PyObject *res_o = PyList_GET_ITEM(list, index);
assert(res_o != NULL);
Py_INCREF(res_o);
res = PyStackRef_FromPyObjectNew(res_o);
#endif
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
stack_pointer += -2;
STAT_INC(BINARY_SUBSCR, hit);
_PyFrame_SetStackPointer(frame, stack_pointer);
_PyStackRef tmp = list_st;
list_st = res;
stack_pointer[-2] = list_st;
PyStackRef_CLOSE(tmp);
stack_pointer = _PyFrame_GetStackPointer(frame);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(list_st);
PyStackRef_CLOSE(sub_st);
stack_pointer = _PyFrame_GetStackPointer(frame);
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
stack_pointer[-1] = res;
break;
}
@ -1267,7 +1308,7 @@
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(str_st);
stack_pointer = _PyFrame_GetStackPointer(frame);
res = PyStackRef_FromPyObjectSteal(res_o);
res = PyStackRef_FromPyObjectImmortal(res_o);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -1303,17 +1344,17 @@
STAT_INC(BINARY_OP, hit);
PyObject *res_o = PyTuple_GET_ITEM(tuple, index);
assert(res_o != NULL);
Py_INCREF(res_o);
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
stack_pointer += -2;
res = PyStackRef_FromPyObjectNew(res_o);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(tuple_st);
_PyStackRef tmp = tuple_st;
tuple_st = res;
stack_pointer[-1] = tuple_st;
PyStackRef_CLOSE(tmp);
stack_pointer = _PyFrame_GetStackPointer(frame);
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
stack_pointer[-1] = res;
break;
}
@ -1654,6 +1695,7 @@
retval = stack_pointer[-1];
assert(frame->owner != FRAME_OWNED_BY_INTERPRETER);
_PyStackRef temp = retval;
assert(PyStackRef_IsHeapSafe(temp));
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
@ -2534,7 +2576,7 @@
if (tup_o == NULL) {
JUMP_TO_ERROR();
}
tup = PyStackRef_FromPyObjectSteal(tup_o);
tup = PyStackRef_FromPyObjectStealMortal(tup_o);
stack_pointer[-oparg] = tup;
stack_pointer += 1 - oparg;
assert(WITHIN_STACK_BOUNDS());
@ -2552,7 +2594,7 @@
if (list_o == NULL) {
JUMP_TO_ERROR();
}
list = PyStackRef_FromPyObjectSteal(list_o);
list = PyStackRef_FromPyObjectStealMortal(list_o);
stack_pointer[-oparg] = list;
stack_pointer += 1 - oparg;
assert(WITHIN_STACK_BOUNDS());
@ -2666,7 +2708,7 @@
stack_pointer = _PyFrame_GetStackPointer(frame);
JUMP_TO_ERROR();
}
set = PyStackRef_FromPyObjectSteal(set_o);
set = PyStackRef_FromPyObjectStealMortal(set_o);
stack_pointer[0] = set;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -2712,7 +2754,7 @@
if (map_o == NULL) {
JUMP_TO_ERROR();
}
map = PyStackRef_FromPyObjectSteal(map_o);
map = PyStackRef_FromPyObjectStealMortal(map_o);
stack_pointer[0] = map;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -5047,7 +5089,7 @@
JUMP_TO_JUMP_TARGET();
}
STAT_INC(CALL, hit);
res = PyStackRef_FromPyObjectSteal(Py_NewRef(Py_TYPE(arg_o)));
res = PyStackRef_FromPyObjectNew(Py_TYPE(arg_o));
stack_pointer[-3] = res;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
@ -6394,7 +6436,7 @@
frame = tstate->current_frame = prev;
LOAD_IP(frame->return_offset);
stack_pointer = _PyFrame_GetStackPointer(frame);
res = PyStackRef_FromPyObjectSteal((PyObject *)gen);
res = PyStackRef_FromPyObjectStealMortal((PyObject *)gen);
LLTRACE_RESUME_FRAME();
stack_pointer[0] = res;
stack_pointer += 1;
@ -6424,7 +6466,7 @@
if (slice_o == NULL) {
JUMP_TO_ERROR();
}
slice = PyStackRef_FromPyObjectSteal(slice_o);
slice = PyStackRef_FromPyObjectStealMortal(slice_o);
stack_pointer[0] = slice;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());

View File

@ -50,24 +50,23 @@ take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame)
{
assert(frame->owner < FRAME_OWNED_BY_INTERPRETER);
assert(frame->owner != FRAME_OWNED_BY_FRAME_OBJECT);
Py_ssize_t size = ((char*)frame->stackpointer) - (char *)frame;
memcpy((_PyInterpreterFrame *)f->_f_frame_data, frame, size);
frame = (_PyInterpreterFrame *)f->_f_frame_data;
frame->stackpointer = (_PyStackRef *)(((char *)frame) + size);
frame->f_executable = PyStackRef_DUP(frame->f_executable);
f->f_frame = frame;
frame->owner = FRAME_OWNED_BY_FRAME_OBJECT;
if (_PyFrame_IsIncomplete(frame)) {
_PyInterpreterFrame *new_frame = (_PyInterpreterFrame *)f->_f_frame_data;
_PyFrame_Copy(frame, new_frame);
// _PyFrame_Copy takes the reference to the executable,
// so we need to restore it.
frame->f_executable = PyStackRef_DUP(new_frame->f_executable);
f->f_frame = new_frame;
new_frame->owner = FRAME_OWNED_BY_FRAME_OBJECT;
if (_PyFrame_IsIncomplete(new_frame)) {
// This may be a newly-created generator or coroutine frame. Since it's
// dead anyways, just pretend that the first RESUME ran:
PyCodeObject *code = _PyFrame_GetCode(frame);
frame->instr_ptr =
_PyFrame_GetBytecode(frame) + code->_co_firsttraceable + 1;
PyCodeObject *code = _PyFrame_GetCode(new_frame);
new_frame->instr_ptr =
_PyFrame_GetBytecode(new_frame) + code->_co_firsttraceable + 1;
}
assert(!_PyFrame_IsIncomplete(frame));
assert(!_PyFrame_IsIncomplete(new_frame));
assert(f->f_back == NULL);
_PyInterpreterFrame *prev = _PyFrame_GetFirstComplete(frame->previous);
frame->previous = NULL;
if (prev) {
assert(prev->owner < FRAME_OWNED_BY_INTERPRETER);
/* Link PyFrameObjects.f_back and remove link through _PyInterpreterFrame.previous */

View File

@ -1488,11 +1488,11 @@ mark_stacks(PyInterpreterState *interp, PyGC_Head *visited, int visited_space, b
objects_marked += move_to_reachable(func, &reachable, visited_space);
while (sp > locals) {
sp--;
if (PyStackRef_IsNull(*sp)) {
PyObject *op = PyStackRef_AsPyObjectBorrow(*sp);
if (op == NULL || _Py_IsImmortal(op)) {
continue;
}
PyObject *op = PyStackRef_AsPyObjectBorrow(*sp);
if (!_Py_IsImmortal(op) && _PyObject_IS_GC(op)) {
if (_PyObject_IS_GC(op)) {
PyGC_Head *gc = AS_GC(op);
if (_PyObject_GC_IS_TRACKED(op) &&
gc_old_space(gc) != visited_space) {

View File

@ -123,11 +123,10 @@
double dres =
((PyFloatObject *)left_o)->ob_fval +
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (res_o == NULL) {
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (PyStackRef_IsNull(res)) {
JUMP_TO_LABEL(pop_2_error);
}
res = PyStackRef_FromPyObjectSteal(res_o);
}
stack_pointer[-2] = res;
stack_pointer += -1;
@ -352,6 +351,7 @@
next_oparg = CURRENT_OPERAND0();
#endif
_PyStackRef *target_local = &GETLOCAL(next_oparg);
assert(PyUnicode_CheckExact(left_o));
if (PyStackRef_AsPyObjectBorrow(*target_local) != left_o) {
UPDATE_MISS_STATS(BINARY_OP);
assert(_PyOpcode_Deopt[opcode] == (BINARY_OP));
@ -436,11 +436,10 @@
double dres =
((PyFloatObject *)left_o)->ob_fval *
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (res_o == NULL) {
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (PyStackRef_IsNull(res)) {
JUMP_TO_LABEL(pop_2_error);
}
res = PyStackRef_FromPyObjectSteal(res_o);
}
stack_pointer[-2] = res;
stack_pointer += -1;
@ -691,6 +690,7 @@
JUMP_TO_PREDICTED(BINARY_OP);
}
STAT_INC(BINARY_OP, hit);
res = PyStackRef_FromPyObjectSteal(res_o);
#else
if (index >= PyList_GET_SIZE(list)) {
UPDATE_MISS_STATS(BINARY_OP);
@ -700,18 +700,21 @@
STAT_INC(BINARY_OP, hit);
PyObject *res_o = PyList_GET_ITEM(list, index);
assert(res_o != NULL);
Py_INCREF(res_o);
res = PyStackRef_FromPyObjectNew(res_o);
#endif
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
stack_pointer += -2;
STAT_INC(BINARY_SUBSCR, hit);
_PyFrame_SetStackPointer(frame, stack_pointer);
_PyStackRef tmp = list_st;
list_st = res;
stack_pointer[-2] = list_st;
PyStackRef_CLOSE(tmp);
stack_pointer = _PyFrame_GetStackPointer(frame);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(list_st);
PyStackRef_CLOSE(sub_st);
stack_pointer = _PyFrame_GetStackPointer(frame);
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
stack_pointer[-1] = res;
DISPATCH();
}
@ -770,7 +773,7 @@
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(str_st);
stack_pointer = _PyFrame_GetStackPointer(frame);
res = PyStackRef_FromPyObjectSteal(res_o);
res = PyStackRef_FromPyObjectImmortal(res_o);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -821,17 +824,17 @@
STAT_INC(BINARY_OP, hit);
PyObject *res_o = PyTuple_GET_ITEM(tuple, index);
assert(res_o != NULL);
Py_INCREF(res_o);
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
stack_pointer += -2;
res = PyStackRef_FromPyObjectNew(res_o);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(tuple_st);
_PyStackRef tmp = tuple_st;
tuple_st = res;
stack_pointer[-1] = tuple_st;
PyStackRef_CLOSE(tmp);
stack_pointer = _PyFrame_GetStackPointer(frame);
res = PyStackRef_FromPyObjectSteal(res_o);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
stack_pointer[-1] = res;
DISPATCH();
}
@ -877,11 +880,10 @@
double dres =
((PyFloatObject *)left_o)->ob_fval -
((PyFloatObject *)right_o)->ob_fval;
PyObject *res_o = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (res_o == NULL) {
res = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
if (PyStackRef_IsNull(res)) {
JUMP_TO_LABEL(pop_2_error);
}
res = PyStackRef_FromPyObjectSteal(res_o);
}
stack_pointer[-2] = res;
stack_pointer += -1;
@ -1021,7 +1023,7 @@
if (list_o == NULL) {
JUMP_TO_LABEL(error);
}
list = PyStackRef_FromPyObjectSteal(list_o);
list = PyStackRef_FromPyObjectStealMortal(list_o);
stack_pointer[-oparg] = list;
stack_pointer += 1 - oparg;
assert(WITHIN_STACK_BOUNDS());
@ -1073,7 +1075,7 @@
if (map_o == NULL) {
JUMP_TO_LABEL(error);
}
map = PyStackRef_FromPyObjectSteal(map_o);
map = PyStackRef_FromPyObjectStealMortal(map_o);
stack_pointer[0] = map;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -1131,7 +1133,7 @@
stack_pointer = _PyFrame_GetStackPointer(frame);
JUMP_TO_LABEL(error);
}
set = PyStackRef_FromPyObjectSteal(set_o);
set = PyStackRef_FromPyObjectStealMortal(set_o);
stack_pointer[0] = set;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -1166,7 +1168,7 @@
if (slice_o == NULL) {
JUMP_TO_LABEL(error);
}
slice = PyStackRef_FromPyObjectSteal(slice_o);
slice = PyStackRef_FromPyObjectStealMortal(slice_o);
stack_pointer[0] = slice;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -1235,7 +1237,7 @@
if (tup_o == NULL) {
JUMP_TO_LABEL(error);
}
tup = PyStackRef_FromPyObjectSteal(tup_o);
tup = PyStackRef_FromPyObjectStealMortal(tup_o);
stack_pointer[-oparg] = tup;
stack_pointer += 1 - oparg;
assert(WITHIN_STACK_BOUNDS());
@ -4271,7 +4273,7 @@
JUMP_TO_PREDICTED(CALL);
}
STAT_INC(CALL, hit);
res = PyStackRef_FromPyObjectSteal(Py_NewRef(Py_TYPE(arg_o)));
res = PyStackRef_FromPyObjectNew(Py_TYPE(arg_o));
stack_pointer[-3] = res;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
@ -7237,6 +7239,7 @@
retval = val;
assert(frame->owner != FRAME_OWNED_BY_INTERPRETER);
_PyStackRef temp = retval;
assert(PyStackRef_IsHeapSafe(temp));
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
@ -8741,7 +8744,7 @@
static_assert(0 == 0, "incorrect cache size");
_PyStackRef value;
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
value = PyStackRef_FromPyObjectNew(obj);
value = PyStackRef_FromPyObjectNewMortal(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -10254,7 +10257,7 @@
frame = tstate->current_frame = prev;
LOAD_IP(frame->return_offset);
stack_pointer = _PyFrame_GetStackPointer(frame);
res = PyStackRef_FromPyObjectSteal((PyObject *)gen);
res = PyStackRef_FromPyObjectStealMortal((PyObject *)gen);
LLTRACE_RESUME_FRAME();
stack_pointer[0] = res;
stack_pointer += 1;
@ -10275,6 +10278,7 @@
retval = stack_pointer[-1];
assert(frame->owner != FRAME_OWNED_BY_INTERPRETER);
_PyStackRef temp = retval;
assert(PyStackRef_IsHeapSafe(temp));
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
@ -10910,6 +10914,10 @@
INSTRUCTION_STATS(STORE_FAST);
_PyStackRef value;
value = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value)
);
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
@ -10931,6 +10939,10 @@
_PyStackRef value1;
_PyStackRef value2;
value1 = stack_pointer[-1];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value1)
);
uint32_t oparg1 = oparg >> 4;
uint32_t oparg2 = oparg & 15;
_PyStackRef tmp = GETLOCAL(oparg1);
@ -10955,6 +10967,14 @@
_PyStackRef value1;
value1 = stack_pointer[-1];
value2 = stack_pointer[-2];
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value1)
);
assert(
((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
PyStackRef_IsHeapSafe(value2)
);
uint32_t oparg1 = oparg >> 4;
uint32_t oparg2 = oparg & 15;
_PyStackRef tmp = GETLOCAL(oparg1);

View File

@ -1,6 +1,7 @@
#include "Python.h"
#include "pycore_object.h"
#include "pycore_stackref.h"
#if !defined(Py_GIL_DISABLED) && defined(Py_STACKREF_DEBUG)
@ -175,8 +176,16 @@ _Py_stackref_report_leaks(PyInterpreterState *interp)
int leak = 0;
_Py_hashtable_foreach(interp->open_stackrefs_table, report_leak, &leak);
if (leak) {
fflush(stdout);
Py_FatalError("Stackrefs leaked.");
}
}
void
PyStackRef_CLOSE_SPECIALIZED(_PyStackRef ref, destructor destruct)
{
PyObject *obj = _Py_stackref_close(ref);
_Py_DECREF_SPECIALIZED(obj, destruct);
}
#endif

View File

@ -586,6 +586,7 @@ NON_ESCAPING_FUNCTIONS = (
"PySlice_New",
"PyStackRef_AsPyObjectBorrow",
"PyStackRef_AsPyObjectNew",
"PyStackRef_FromPyObjectNewMortal",
"PyStackRef_AsPyObjectSteal",
"PyStackRef_CLEAR",
"PyStackRef_CLOSE_SPECIALIZED",
@ -595,7 +596,10 @@ NON_ESCAPING_FUNCTIONS = (
"PyStackRef_FromPyObjectNew",
"PyStackRef_FromPyObjectSteal",
"PyStackRef_IsExactly",
"PyStackRef_FromPyObjectStealMortal",
"PyStackRef_IsNone",
"PyStackRef_Is",
"PyStackRef_IsHeapSafe",
"PyStackRef_IsTrue",
"PyStackRef_IsFalse",
"PyStackRef_IsNull",