mirror of https://github.com/python/cpython
gh-117139: Convert the evaluation stack to stack refs (#118450)
This PR sets up tagged pointers for CPython. The general idea is to create a separate struct _PyStackRef for everything on the evaluation stack to store the bits. This forces the C compiler to warn us if we try to cast things or pull things out of the struct directly. Only for free threading: We tag the low bit if something is deferred - that means we skip incref and decref operations on it. This behavior may change in the future if Mark's plans to defer all objects in the interpreter loop pans out. This implies a strict stack reference discipline is required. ALL incref and decref operations on stackrefs must use the stackref variants. It is unsafe to untag something then do normal incref/decref ops on it. The new incref and decref variants are called dup and close. They mimic a "handle" API operating on these stackrefs. Please read Include/internal/pycore_stackref.h for more information! --------- Co-authored-by: Mark Shannon <9448417+markshannon@users.noreply.github.com>
This commit is contained in:
parent
d611c4c8e9
commit
22b0de2755
|
@ -261,8 +261,11 @@ PyAPI_FUNC(void) _PyEval_FormatExcUnbound(PyThreadState *tstate, PyCodeObject *c
|
||||||
PyAPI_FUNC(void) _PyEval_FormatKwargsError(PyThreadState *tstate, PyObject *func, PyObject *kwargs);
|
PyAPI_FUNC(void) _PyEval_FormatKwargsError(PyThreadState *tstate, PyObject *func, PyObject *kwargs);
|
||||||
PyAPI_FUNC(PyObject *)_PyEval_MatchClass(PyThreadState *tstate, PyObject *subject, PyObject *type, Py_ssize_t nargs, PyObject *kwargs);
|
PyAPI_FUNC(PyObject *)_PyEval_MatchClass(PyThreadState *tstate, PyObject *subject, PyObject *type, Py_ssize_t nargs, PyObject *kwargs);
|
||||||
PyAPI_FUNC(PyObject *)_PyEval_MatchKeys(PyThreadState *tstate, PyObject *map, PyObject *keys);
|
PyAPI_FUNC(PyObject *)_PyEval_MatchKeys(PyThreadState *tstate, PyObject *map, PyObject *keys);
|
||||||
PyAPI_FUNC(int) _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v, int argcnt, int argcntafter, PyObject **sp);
|
PyAPI_FUNC(int) _PyEval_UnpackIterableStackRef(PyThreadState *tstate, _PyStackRef v, int argcnt, int argcntafter, _PyStackRef *sp);
|
||||||
PyAPI_FUNC(void) _PyEval_FrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame);
|
PyAPI_FUNC(void) _PyEval_FrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame *frame);
|
||||||
|
PyAPI_FUNC(PyObject **) _PyObjectArray_FromStackRefArray(_PyStackRef *input, Py_ssize_t nargs, PyObject **scratch);
|
||||||
|
|
||||||
|
PyAPI_FUNC(void) _PyObjectArray_Free(PyObject **array, PyObject **scratch);
|
||||||
|
|
||||||
|
|
||||||
/* Bits that can be set in PyThreadState.eval_breaker */
|
/* Bits that can be set in PyThreadState.eval_breaker */
|
||||||
|
|
|
@ -8,6 +8,7 @@ extern "C" {
|
||||||
# error "this header requires Py_BUILD_CORE define"
|
# error "this header requires Py_BUILD_CORE define"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#include "pycore_stackref.h" // _PyStackRef
|
||||||
#include "pycore_lock.h" // PyMutex
|
#include "pycore_lock.h" // PyMutex
|
||||||
#include "pycore_backoff.h" // _Py_BackoffCounter
|
#include "pycore_backoff.h" // _Py_BackoffCounter
|
||||||
|
|
||||||
|
@ -317,30 +318,30 @@ extern void _PyCode_Clear_Executors(PyCodeObject *code);
|
||||||
|
|
||||||
/* Specialization functions */
|
/* Specialization functions */
|
||||||
|
|
||||||
extern void _Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls,
|
extern void _Py_Specialize_LoadSuperAttr(_PyStackRef global_super, _PyStackRef cls,
|
||||||
_Py_CODEUNIT *instr, int load_method);
|
_Py_CODEUNIT *instr, int load_method);
|
||||||
extern void _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr,
|
extern void _Py_Specialize_LoadAttr(_PyStackRef owner, _Py_CODEUNIT *instr,
|
||||||
PyObject *name);
|
PyObject *name);
|
||||||
extern void _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr,
|
extern void _Py_Specialize_StoreAttr(_PyStackRef owner, _Py_CODEUNIT *instr,
|
||||||
PyObject *name);
|
PyObject *name);
|
||||||
extern void _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins,
|
extern void _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins,
|
||||||
_Py_CODEUNIT *instr, PyObject *name);
|
_Py_CODEUNIT *instr, PyObject *name);
|
||||||
extern void _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container,
|
extern void _Py_Specialize_BinarySubscr(_PyStackRef sub, _PyStackRef container,
|
||||||
_Py_CODEUNIT *instr);
|
_Py_CODEUNIT *instr);
|
||||||
extern void _Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub,
|
extern void _Py_Specialize_StoreSubscr(_PyStackRef container, _PyStackRef sub,
|
||||||
_Py_CODEUNIT *instr);
|
_Py_CODEUNIT *instr);
|
||||||
extern void _Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr,
|
extern void _Py_Specialize_Call(_PyStackRef callable, _Py_CODEUNIT *instr,
|
||||||
int nargs);
|
int nargs);
|
||||||
extern void _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
|
extern void _Py_Specialize_BinaryOp(_PyStackRef lhs, _PyStackRef rhs, _Py_CODEUNIT *instr,
|
||||||
int oparg, PyObject **locals);
|
int oparg, _PyStackRef *locals);
|
||||||
extern void _Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs,
|
extern void _Py_Specialize_CompareOp(_PyStackRef lhs, _PyStackRef rhs,
|
||||||
_Py_CODEUNIT *instr, int oparg);
|
_Py_CODEUNIT *instr, int oparg);
|
||||||
extern void _Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr,
|
extern void _Py_Specialize_UnpackSequence(_PyStackRef seq, _Py_CODEUNIT *instr,
|
||||||
int oparg);
|
int oparg);
|
||||||
extern void _Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg);
|
extern void _Py_Specialize_ForIter(_PyStackRef iter, _Py_CODEUNIT *instr, int oparg);
|
||||||
extern void _Py_Specialize_Send(PyObject *receiver, _Py_CODEUNIT *instr);
|
extern void _Py_Specialize_Send(_PyStackRef receiver, _Py_CODEUNIT *instr);
|
||||||
extern void _Py_Specialize_ToBool(PyObject *value, _Py_CODEUNIT *instr);
|
extern void _Py_Specialize_ToBool(_PyStackRef value, _Py_CODEUNIT *instr);
|
||||||
extern void _Py_Specialize_ContainsOp(PyObject *value, _Py_CODEUNIT *instr);
|
extern void _Py_Specialize_ContainsOp(_PyStackRef value, _Py_CODEUNIT *instr);
|
||||||
|
|
||||||
#ifdef Py_STATS
|
#ifdef Py_STATS
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,7 @@ extern "C" {
|
||||||
#include <stdbool.h>
|
#include <stdbool.h>
|
||||||
#include <stddef.h> // offsetof()
|
#include <stddef.h> // offsetof()
|
||||||
#include "pycore_code.h" // STATS
|
#include "pycore_code.h" // STATS
|
||||||
|
#include "pycore_stackref.h" // _PyStackRef
|
||||||
|
|
||||||
/* See Objects/frame_layout.md for an explanation of the frame stack
|
/* See Objects/frame_layout.md for an explanation of the frame stack
|
||||||
* including explanation of the PyFrameObject and _PyInterpreterFrame
|
* including explanation of the PyFrameObject and _PyInterpreterFrame
|
||||||
|
@ -67,7 +68,7 @@ typedef struct _PyInterpreterFrame {
|
||||||
uint16_t return_offset; /* Only relevant during a function call */
|
uint16_t return_offset; /* Only relevant during a function call */
|
||||||
char owner;
|
char owner;
|
||||||
/* Locals and stack */
|
/* Locals and stack */
|
||||||
PyObject *localsplus[1];
|
_PyStackRef localsplus[1];
|
||||||
} _PyInterpreterFrame;
|
} _PyInterpreterFrame;
|
||||||
|
|
||||||
#define _PyInterpreterFrame_LASTI(IF) \
|
#define _PyInterpreterFrame_LASTI(IF) \
|
||||||
|
@ -78,23 +79,23 @@ static inline PyCodeObject *_PyFrame_GetCode(_PyInterpreterFrame *f) {
|
||||||
return (PyCodeObject *)f->f_executable;
|
return (PyCodeObject *)f->f_executable;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline PyObject **_PyFrame_Stackbase(_PyInterpreterFrame *f) {
|
static inline _PyStackRef *_PyFrame_Stackbase(_PyInterpreterFrame *f) {
|
||||||
return f->localsplus + _PyFrame_GetCode(f)->co_nlocalsplus;
|
return (f->localsplus + _PyFrame_GetCode(f)->co_nlocalsplus);
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline PyObject *_PyFrame_StackPeek(_PyInterpreterFrame *f) {
|
static inline _PyStackRef _PyFrame_StackPeek(_PyInterpreterFrame *f) {
|
||||||
assert(f->stacktop > _PyFrame_GetCode(f)->co_nlocalsplus);
|
assert(f->stacktop > _PyFrame_GetCode(f)->co_nlocalsplus);
|
||||||
assert(f->localsplus[f->stacktop-1] != NULL);
|
assert(!PyStackRef_IsNull(f->localsplus[f->stacktop-1]));
|
||||||
return f->localsplus[f->stacktop-1];
|
return f->localsplus[f->stacktop-1];
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline PyObject *_PyFrame_StackPop(_PyInterpreterFrame *f) {
|
static inline _PyStackRef _PyFrame_StackPop(_PyInterpreterFrame *f) {
|
||||||
assert(f->stacktop > _PyFrame_GetCode(f)->co_nlocalsplus);
|
assert(f->stacktop > _PyFrame_GetCode(f)->co_nlocalsplus);
|
||||||
f->stacktop--;
|
f->stacktop--;
|
||||||
return f->localsplus[f->stacktop];
|
return f->localsplus[f->stacktop];
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void _PyFrame_StackPush(_PyInterpreterFrame *f, PyObject *value) {
|
static inline void _PyFrame_StackPush(_PyInterpreterFrame *f, _PyStackRef value) {
|
||||||
f->localsplus[f->stacktop] = value;
|
f->localsplus[f->stacktop] = value;
|
||||||
f->stacktop++;
|
f->stacktop++;
|
||||||
}
|
}
|
||||||
|
@ -143,14 +144,14 @@ _PyFrame_Initialize(
|
||||||
frame->owner = FRAME_OWNED_BY_THREAD;
|
frame->owner = FRAME_OWNED_BY_THREAD;
|
||||||
|
|
||||||
for (int i = null_locals_from; i < code->co_nlocalsplus; i++) {
|
for (int i = null_locals_from; i < code->co_nlocalsplus; i++) {
|
||||||
frame->localsplus[i] = NULL;
|
frame->localsplus[i] = PyStackRef_NULL;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Gets the pointer to the locals array
|
/* Gets the pointer to the locals array
|
||||||
* that precedes this frame.
|
* that precedes this frame.
|
||||||
*/
|
*/
|
||||||
static inline PyObject**
|
static inline _PyStackRef*
|
||||||
_PyFrame_GetLocalsArray(_PyInterpreterFrame *frame)
|
_PyFrame_GetLocalsArray(_PyInterpreterFrame *frame)
|
||||||
{
|
{
|
||||||
return frame->localsplus;
|
return frame->localsplus;
|
||||||
|
@ -160,16 +161,16 @@ _PyFrame_GetLocalsArray(_PyInterpreterFrame *frame)
|
||||||
Having stacktop <= 0 ensures that invalid
|
Having stacktop <= 0 ensures that invalid
|
||||||
values are not visible to the cycle GC.
|
values are not visible to the cycle GC.
|
||||||
We choose -1 rather than 0 to assist debugging. */
|
We choose -1 rather than 0 to assist debugging. */
|
||||||
static inline PyObject**
|
static inline _PyStackRef*
|
||||||
_PyFrame_GetStackPointer(_PyInterpreterFrame *frame)
|
_PyFrame_GetStackPointer(_PyInterpreterFrame *frame)
|
||||||
{
|
{
|
||||||
PyObject **sp = frame->localsplus + frame->stacktop;
|
_PyStackRef *sp = frame->localsplus + frame->stacktop;
|
||||||
frame->stacktop = -1;
|
frame->stacktop = -1;
|
||||||
return sp;
|
return sp;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void
|
static inline void
|
||||||
_PyFrame_SetStackPointer(_PyInterpreterFrame *frame, PyObject **stack_pointer)
|
_PyFrame_SetStackPointer(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer)
|
||||||
{
|
{
|
||||||
frame->stacktop = (int)(stack_pointer - frame->localsplus);
|
frame->stacktop = (int)(stack_pointer - frame->localsplus);
|
||||||
}
|
}
|
||||||
|
@ -309,7 +310,7 @@ _PyFrame_PushTrampolineUnchecked(PyThreadState *tstate, PyCodeObject *code, int
|
||||||
|
|
||||||
PyAPI_FUNC(_PyInterpreterFrame *)
|
PyAPI_FUNC(_PyInterpreterFrame *)
|
||||||
_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
|
_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
PyObject *locals, PyObject* const* args,
|
PyObject *locals, _PyStackRef const* args,
|
||||||
size_t argcount, PyObject *kwnames);
|
size_t argcount, PyObject *kwnames);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
|
|
|
@ -11,7 +11,7 @@ extern "C" {
|
||||||
|
|
||||||
#ifdef _Py_JIT
|
#ifdef _Py_JIT
|
||||||
|
|
||||||
typedef _Py_CODEUNIT *(*jit_func)(_PyInterpreterFrame *frame, PyObject **stack_pointer, PyThreadState *tstate);
|
typedef _Py_CODEUNIT *(*jit_func)(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer, PyThreadState *tstate);
|
||||||
|
|
||||||
int _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction *trace, size_t length);
|
int _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction *trace, size_t length);
|
||||||
void _PyJIT_Free(_PyExecutorObject *executor);
|
void _PyJIT_Free(_PyExecutorObject *executor);
|
||||||
|
|
|
@ -159,21 +159,6 @@ static inline void _Py_ClearImmortal(PyObject *op)
|
||||||
op = NULL; \
|
op = NULL; \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
// Mark an object as supporting deferred reference counting. This is a no-op
|
|
||||||
// in the default (with GIL) build. Objects that use deferred reference
|
|
||||||
// counting should be tracked by the GC so that they are eventually collected.
|
|
||||||
extern void _PyObject_SetDeferredRefcount(PyObject *op);
|
|
||||||
|
|
||||||
static inline int
|
|
||||||
_PyObject_HasDeferredRefcount(PyObject *op)
|
|
||||||
{
|
|
||||||
#ifdef Py_GIL_DISABLED
|
|
||||||
return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_DEFERRED);
|
|
||||||
#else
|
|
||||||
return 0;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
#if !defined(Py_GIL_DISABLED)
|
#if !defined(Py_GIL_DISABLED)
|
||||||
static inline void
|
static inline void
|
||||||
_Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
|
_Py_DECREF_SPECIALIZED(PyObject *op, const destructor destruct)
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
#ifndef Py_INTERNAL_OBJECT_DEFERRED_H
|
||||||
|
#define Py_INTERNAL_OBJECT_DEFERRED_H
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include "pycore_gc.h"
|
||||||
|
|
||||||
|
#ifndef Py_BUILD_CORE
|
||||||
|
# error "this header requires Py_BUILD_CORE define"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// Mark an object as supporting deferred reference counting. This is a no-op
|
||||||
|
// in the default (with GIL) build. Objects that use deferred reference
|
||||||
|
// counting should be tracked by the GC so that they are eventually collected.
|
||||||
|
extern void _PyObject_SetDeferredRefcount(PyObject *op);
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
_PyObject_HasDeferredRefcount(PyObject *op)
|
||||||
|
{
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
return _PyObject_HAS_GC_BITS(op, _PyGC_BITS_DEFERRED);
|
||||||
|
#else
|
||||||
|
return 0;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
#endif // !Py_INTERNAL_OBJECT_DEFERRED_H
|
|
@ -271,7 +271,7 @@ extern int _Py_uop_frame_pop(_Py_UOpsContext *ctx);
|
||||||
|
|
||||||
PyAPI_FUNC(PyObject *) _Py_uop_symbols_test(PyObject *self, PyObject *ignored);
|
PyAPI_FUNC(PyObject *) _Py_uop_symbols_test(PyObject *self, PyObject *ignored);
|
||||||
|
|
||||||
PyAPI_FUNC(int) _PyOptimizer_Optimize(struct _PyInterpreterFrame *frame, _Py_CODEUNIT *start, PyObject **stack_pointer, _PyExecutorObject **exec_ptr);
|
PyAPI_FUNC(int) _PyOptimizer_Optimize(struct _PyInterpreterFrame *frame, _Py_CODEUNIT *start, _PyStackRef *stack_pointer, _PyExecutorObject **exec_ptr);
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,187 +8,264 @@ extern "C" {
|
||||||
# error "this header requires Py_BUILD_CORE define"
|
# error "this header requires Py_BUILD_CORE define"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#include "pycore_object_deferred.h"
|
||||||
|
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
|
|
||||||
|
/*
|
||||||
|
This file introduces a new API for handling references on the stack, called
|
||||||
|
_PyStackRef. This API is inspired by HPy.
|
||||||
|
|
||||||
|
There are 3 main operations, that convert _PyStackRef to PyObject* and
|
||||||
|
vice versa:
|
||||||
|
|
||||||
|
1. Borrow (discouraged)
|
||||||
|
2. Steal
|
||||||
|
3. New
|
||||||
|
|
||||||
|
Borrow means that the reference is converted without any change in ownership.
|
||||||
|
This is discouraged because it makes verification much harder. It also makes
|
||||||
|
unboxed integers harder in the future.
|
||||||
|
|
||||||
|
Steal means that ownership is transferred to something else. The total
|
||||||
|
number of references to the object stays the same.
|
||||||
|
|
||||||
|
New creates a new reference from the old reference. The old reference
|
||||||
|
is still valid.
|
||||||
|
|
||||||
|
With these 3 API, a strict stack discipline must be maintained. All
|
||||||
|
_PyStackRef must be operated on by the new reference operations:
|
||||||
|
|
||||||
|
1. DUP
|
||||||
|
2. CLOSE
|
||||||
|
|
||||||
|
DUP is roughly equivalent to Py_NewRef. It creates a new reference from an old
|
||||||
|
reference. The old reference remains unchanged.
|
||||||
|
|
||||||
|
CLOSE is roughly equivalent to Py_DECREF. It destroys a reference.
|
||||||
|
|
||||||
|
Note that it is unsafe to borrow a _PyStackRef and then do normal
|
||||||
|
CPython refcounting operations on it!
|
||||||
|
*/
|
||||||
|
|
||||||
typedef union {
|
typedef union {
|
||||||
uintptr_t bits;
|
uintptr_t bits;
|
||||||
} _PyStackRef;
|
} _PyStackRef;
|
||||||
|
|
||||||
static const _PyStackRef Py_STACKREF_NULL = { .bits = 0 };
|
|
||||||
|
|
||||||
#define Py_TAG_DEFERRED (1)
|
#define Py_TAG_DEFERRED (1)
|
||||||
|
|
||||||
// Gets a PyObject * from a _PyStackRef
|
#define Py_TAG_PTR (0)
|
||||||
#if defined(Py_GIL_DISABLED)
|
#define Py_TAG_BITS (1)
|
||||||
static inline PyObject *
|
|
||||||
PyStackRef_Get(_PyStackRef tagged)
|
#ifdef Py_GIL_DISABLED
|
||||||
{
|
static const _PyStackRef PyStackRef_NULL = { .bits = 0 | Py_TAG_DEFERRED};
|
||||||
PyObject *cleared = ((PyObject *)((tagged).bits & (~Py_TAG_DEFERRED)));
|
|
||||||
return cleared;
|
|
||||||
}
|
|
||||||
#else
|
#else
|
||||||
# define PyStackRef_Get(tagged) ((PyObject *)((tagged).bits))
|
static const _PyStackRef PyStackRef_NULL = { .bits = 0 };
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Converts a PyObject * to a PyStackRef, stealing the reference.
|
#define PyStackRef_IsNull(stackref) ((stackref).bits == PyStackRef_NULL.bits)
|
||||||
#if defined(Py_GIL_DISABLED)
|
|
||||||
static inline _PyStackRef
|
|
||||||
_PyStackRef_StealRef(PyObject *obj)
|
#ifdef Py_GIL_DISABLED
|
||||||
{
|
# define PyStackRef_True ((_PyStackRef){.bits = ((uintptr_t)&_Py_TrueStruct) | Py_TAG_DEFERRED })
|
||||||
// Make sure we don't take an already tagged value.
|
|
||||||
assert(((uintptr_t)obj & Py_TAG_DEFERRED) == 0);
|
|
||||||
return ((_PyStackRef){.bits = ((uintptr_t)(obj))});
|
|
||||||
}
|
|
||||||
# define PyStackRef_StealRef(obj) _PyStackRef_StealRef(_PyObject_CAST(obj))
|
|
||||||
#else
|
#else
|
||||||
# define PyStackRef_StealRef(obj) ((_PyStackRef){.bits = ((uintptr_t)(obj))})
|
# define PyStackRef_True ((_PyStackRef){.bits = ((uintptr_t)&_Py_TrueStruct) })
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
# define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) | Py_TAG_DEFERRED })
|
||||||
|
#else
|
||||||
|
# define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) })
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
# define PyStackRef_None ((_PyStackRef){.bits = ((uintptr_t)&_Py_NoneStruct) | Py_TAG_DEFERRED })
|
||||||
|
#else
|
||||||
|
# define PyStackRef_None ((_PyStackRef){.bits = ((uintptr_t)&_Py_NoneStruct) })
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyStackRef_Is(_PyStackRef a, _PyStackRef b) {
|
||||||
|
return a.bits == b.bits;
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline int
|
||||||
|
PyStackRef_IsDeferred(_PyStackRef ref)
|
||||||
|
{
|
||||||
|
return ((ref.bits & Py_TAG_BITS) == Py_TAG_DEFERRED);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gets a PyObject * from a _PyStackRef
|
||||||
|
static inline PyObject *
|
||||||
|
PyStackRef_AsPyObjectBorrow(_PyStackRef stackref)
|
||||||
|
{
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
PyObject *cleared = ((PyObject *)((stackref).bits & (~Py_TAG_BITS)));
|
||||||
|
return cleared;
|
||||||
|
#else
|
||||||
|
return ((PyObject *)(stackref).bits);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
// Converts a PyStackRef back to a PyObject *, stealing the
|
||||||
|
// PyStackRef.
|
||||||
|
static inline PyObject *
|
||||||
|
PyStackRef_AsPyObjectSteal(_PyStackRef stackref)
|
||||||
|
{
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
if (!PyStackRef_IsNull(stackref) && PyStackRef_IsDeferred(stackref)) {
|
||||||
|
return Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref));
|
||||||
|
}
|
||||||
|
return PyStackRef_AsPyObjectBorrow(stackref);
|
||||||
|
#else
|
||||||
|
return PyStackRef_AsPyObjectBorrow(stackref);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
// Converts a PyStackRef back to a PyObject *, converting the
|
||||||
|
// stackref to a new reference.
|
||||||
|
static inline PyObject *
|
||||||
|
PyStackRef_AsPyObjectNew(_PyStackRef stackref)
|
||||||
|
{
|
||||||
|
return Py_NewRef(PyStackRef_AsPyObjectBorrow(stackref));
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline PyTypeObject *
|
||||||
|
PyStackRef_TYPE(_PyStackRef stackref)
|
||||||
|
{
|
||||||
|
return Py_TYPE(PyStackRef_AsPyObjectBorrow(stackref));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Converts a PyObject * to a PyStackRef, stealing the reference
|
||||||
|
static inline _PyStackRef
|
||||||
|
_PyStackRef_FromPyObjectSteal(PyObject *obj)
|
||||||
|
{
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
// Make sure we don't take an already tagged value.
|
||||||
|
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
|
||||||
|
int tag = (obj == NULL || _Py_IsImmortal(obj)) ? (Py_TAG_DEFERRED) : Py_TAG_PTR;
|
||||||
|
return ((_PyStackRef){.bits = ((uintptr_t)(obj)) | tag});
|
||||||
|
#else
|
||||||
|
return ((_PyStackRef){.bits = ((uintptr_t)(obj))});
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
#define PyStackRef_FromPyObjectSteal(obj) _PyStackRef_FromPyObjectSteal(_PyObject_CAST(obj))
|
||||||
|
|
||||||
|
|
||||||
// Converts a PyObject * to a PyStackRef, with a new reference
|
// Converts a PyObject * to a PyStackRef, with a new reference
|
||||||
#if defined(Py_GIL_DISABLED)
|
|
||||||
static inline _PyStackRef
|
static inline _PyStackRef
|
||||||
_PyStackRef_NewRefDeferred(PyObject *obj)
|
PyStackRef_FromPyObjectNew(PyObject *obj)
|
||||||
{
|
{
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
// Make sure we don't take an already tagged value.
|
// Make sure we don't take an already tagged value.
|
||||||
assert(((uintptr_t)obj & Py_TAG_DEFERRED) == 0);
|
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
|
||||||
assert(obj != NULL);
|
assert(obj != NULL);
|
||||||
if (_PyObject_HasDeferredRefcount(obj)) {
|
// TODO (gh-117139): Add deferred objects later.
|
||||||
|
if (_Py_IsImmortal(obj)) {
|
||||||
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_DEFERRED };
|
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_DEFERRED };
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return (_PyStackRef){ .bits = (uintptr_t)Py_NewRef(obj) };
|
return (_PyStackRef){ .bits = (uintptr_t)(Py_NewRef(obj)) | Py_TAG_PTR };
|
||||||
}
|
}
|
||||||
}
|
|
||||||
# define PyStackRef_NewRefDeferred(obj) _PyStackRef_NewRefDeferred(_PyObject_CAST(obj))
|
|
||||||
#else
|
#else
|
||||||
# define PyStackRef_NewRefDeferred(obj) PyStackRef_NewRef(((_PyStackRef){.bits = ((uintptr_t)(obj))}))
|
return ((_PyStackRef){ .bits = (uintptr_t)(Py_NewRef(obj)) });
|
||||||
#endif
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
#if defined(Py_GIL_DISABLED)
|
#define PyStackRef_FromPyObjectNew(obj) PyStackRef_FromPyObjectNew(_PyObject_CAST(obj))
|
||||||
|
|
||||||
|
// Same as PyStackRef_FromPyObjectNew but only for immortal objects.
|
||||||
static inline _PyStackRef
|
static inline _PyStackRef
|
||||||
_PyStackRef_XNewRefDeferred(PyObject *obj)
|
PyStackRef_FromPyObjectImmortal(PyObject *obj)
|
||||||
{
|
{
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
// Make sure we don't take an already tagged value.
|
// Make sure we don't take an already tagged value.
|
||||||
assert(((uintptr_t)obj & Py_TAG_DEFERRED) == 0);
|
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
|
||||||
if (obj == NULL) {
|
assert(obj != NULL);
|
||||||
return Py_STACKREF_NULL;
|
assert(_Py_IsImmortal(obj));
|
||||||
}
|
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_DEFERRED };
|
||||||
return _PyStackRef_NewRefDeferred(obj);
|
|
||||||
}
|
|
||||||
# define PyStackRef_XNewRefDeferred(obj) _PyStackRef_XNewRefDeferred(_PyObject_CAST(obj))
|
|
||||||
#else
|
#else
|
||||||
# define PyStackRef_XNewRefDeferred(obj) PyStackRef_XNewRef(((_PyStackRef){.bits = ((uintptr_t)(obj))}))
|
assert(_Py_IsImmortal(obj));
|
||||||
|
return ((_PyStackRef){ .bits = (uintptr_t)(obj) });
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Converts a PyStackRef back to a PyObject *.
|
|
||||||
#if defined(Py_GIL_DISABLED)
|
|
||||||
static inline PyObject *
|
|
||||||
PyStackRef_StealObject(_PyStackRef tagged)
|
|
||||||
{
|
|
||||||
if ((tagged.bits & Py_TAG_DEFERRED) == Py_TAG_DEFERRED) {
|
|
||||||
assert(_PyObject_HasDeferredRefcount(PyStackRef_Get(tagged)));
|
|
||||||
return Py_NewRef(PyStackRef_Get(tagged));
|
|
||||||
}
|
|
||||||
return PyStackRef_Get(tagged);
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
# define PyStackRef_StealObject(tagged) PyStackRef_Get(tagged)
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static inline void
|
|
||||||
_Py_untag_stack_borrowed(PyObject **dst, const _PyStackRef *src, size_t length)
|
|
||||||
{
|
|
||||||
for (size_t i = 0; i < length; i++) {
|
|
||||||
dst[i] = PyStackRef_Get(src[i]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void
|
#define PyStackRef_FromPyObjectImmortal(obj) PyStackRef_FromPyObjectImmortal(_PyObject_CAST(obj))
|
||||||
_Py_untag_stack_steal(PyObject **dst, const _PyStackRef *src, size_t length)
|
|
||||||
{
|
|
||||||
for (size_t i = 0; i < length; i++) {
|
|
||||||
dst[i] = PyStackRef_StealObject(src[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#define PyStackRef_XSETREF(dst, src) \
|
|
||||||
do { \
|
|
||||||
_PyStackRef *_tmp_dst_ptr = &(dst); \
|
|
||||||
_PyStackRef _tmp_old_dst = (*_tmp_dst_ptr); \
|
|
||||||
*_tmp_dst_ptr = (src); \
|
|
||||||
PyStackRef_XDECREF(_tmp_old_dst); \
|
|
||||||
} while (0)
|
|
||||||
|
|
||||||
#define PyStackRef_SETREF(dst, src) \
|
|
||||||
do { \
|
|
||||||
_PyStackRef *_tmp_dst_ptr = &(dst); \
|
|
||||||
_PyStackRef _tmp_old_dst = (*_tmp_dst_ptr); \
|
|
||||||
*_tmp_dst_ptr = (src); \
|
|
||||||
PyStackRef_DECREF(_tmp_old_dst); \
|
|
||||||
} while (0)
|
|
||||||
|
|
||||||
#define PyStackRef_CLEAR(op) \
|
#define PyStackRef_CLEAR(op) \
|
||||||
do { \
|
do { \
|
||||||
_PyStackRef *_tmp_op_ptr = &(op); \
|
_PyStackRef *_tmp_op_ptr = &(op); \
|
||||||
_PyStackRef _tmp_old_op = (*_tmp_op_ptr); \
|
_PyStackRef _tmp_old_op = (*_tmp_op_ptr); \
|
||||||
if (_tmp_old_op.bits != Py_STACKREF_NULL.bits) { \
|
if (!PyStackRef_IsNull(_tmp_old_op)) { \
|
||||||
*_tmp_op_ptr = Py_STACKREF_NULL; \
|
*_tmp_op_ptr = PyStackRef_NULL; \
|
||||||
PyStackRef_DECREF(_tmp_old_op); \
|
PyStackRef_CLOSE(_tmp_old_op); \
|
||||||
} \
|
} \
|
||||||
} while (0)
|
} while (0)
|
||||||
|
|
||||||
#if defined(Py_GIL_DISABLED)
|
|
||||||
static inline void
|
static inline void
|
||||||
PyStackRef_DECREF(_PyStackRef tagged)
|
PyStackRef_CLOSE(_PyStackRef stackref)
|
||||||
{
|
{
|
||||||
if ((tagged.bits & Py_TAG_DEFERRED) == Py_TAG_DEFERRED) {
|
#ifdef Py_GIL_DISABLED
|
||||||
|
if (PyStackRef_IsDeferred(stackref)) {
|
||||||
|
// No assert for being immortal or deferred here.
|
||||||
|
// The GC unsets deferred objects right before clearing.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Py_DECREF(PyStackRef_Get(tagged));
|
Py_DECREF(PyStackRef_AsPyObjectBorrow(stackref));
|
||||||
}
|
|
||||||
#else
|
#else
|
||||||
# define PyStackRef_DECREF(op) Py_DECREF(PyStackRef_Get(op))
|
Py_DECREF(PyStackRef_AsPyObjectBorrow(stackref));
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(Py_GIL_DISABLED)
|
|
||||||
static inline void
|
|
||||||
PyStackRef_INCREF(_PyStackRef tagged)
|
|
||||||
{
|
|
||||||
if ((tagged.bits & Py_TAG_DEFERRED) == Py_TAG_DEFERRED) {
|
|
||||||
assert(_PyObject_HasDeferredRefcount(PyStackRef_Get(tagged)));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
Py_INCREF(PyStackRef_Get(tagged));
|
|
||||||
}
|
}
|
||||||
#else
|
|
||||||
# define PyStackRef_INCREF(op) Py_INCREF(PyStackRef_Get(op))
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static inline void
|
#define PyStackRef_XCLOSE(stackref) \
|
||||||
PyStackRef_XDECREF(_PyStackRef op)
|
do { \
|
||||||
|
_PyStackRef _tmp = (stackref); \
|
||||||
|
if (!PyStackRef_IsNull(_tmp)) { \
|
||||||
|
PyStackRef_CLOSE(_tmp); \
|
||||||
|
} \
|
||||||
|
} while (0);
|
||||||
|
|
||||||
|
|
||||||
|
static inline _PyStackRef
|
||||||
|
PyStackRef_DUP(_PyStackRef stackref)
|
||||||
{
|
{
|
||||||
if (op.bits != Py_STACKREF_NULL.bits) {
|
#ifdef Py_GIL_DISABLED
|
||||||
PyStackRef_DECREF(op);
|
if (PyStackRef_IsDeferred(stackref)) {
|
||||||
|
assert(PyStackRef_IsNull(stackref) ||
|
||||||
|
_Py_IsImmortal(PyStackRef_AsPyObjectBorrow(stackref)));
|
||||||
|
return stackref;
|
||||||
}
|
}
|
||||||
|
Py_INCREF(PyStackRef_AsPyObjectBorrow(stackref));
|
||||||
|
return stackref;
|
||||||
|
#else
|
||||||
|
Py_INCREF(PyStackRef_AsPyObjectBorrow(stackref));
|
||||||
|
return stackref;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline _PyStackRef
|
static inline _PyStackRef
|
||||||
PyStackRef_NewRef(_PyStackRef obj)
|
PyStackRef_XDUP(_PyStackRef stackref)
|
||||||
{
|
{
|
||||||
PyStackRef_INCREF(obj);
|
if (!PyStackRef_IsNull(stackref)) {
|
||||||
return obj;
|
return PyStackRef_DUP(stackref);
|
||||||
|
}
|
||||||
|
return stackref;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline _PyStackRef
|
|
||||||
PyStackRef_XNewRef(_PyStackRef obj)
|
static inline void
|
||||||
|
_PyObjectStack_FromStackRefStack(PyObject **dst, const _PyStackRef *src, size_t length)
|
||||||
{
|
{
|
||||||
if (obj.bits == Py_STACKREF_NULL.bits) {
|
for (size_t i = 0; i < length; i++) {
|
||||||
return obj;
|
dst[i] = PyStackRef_AsPyObjectBorrow(src[i]);
|
||||||
}
|
}
|
||||||
return PyStackRef_NewRef(obj);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
#ifdef __cplusplus
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -147,7 +147,7 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *value;
|
_PyStackRef value;
|
||||||
value = stack_pointer[-1];
|
value = stack_pointer[-1];
|
||||||
spam();
|
spam();
|
||||||
stack_pointer += -1;
|
stack_pointer += -1;
|
||||||
|
@ -168,7 +168,7 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *res;
|
_PyStackRef res;
|
||||||
spam();
|
spam();
|
||||||
stack_pointer[0] = res;
|
stack_pointer[0] = res;
|
||||||
stack_pointer += 1;
|
stack_pointer += 1;
|
||||||
|
@ -189,8 +189,8 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *value;
|
_PyStackRef value;
|
||||||
PyObject *res;
|
_PyStackRef res;
|
||||||
value = stack_pointer[-1];
|
value = stack_pointer[-1];
|
||||||
spam();
|
spam();
|
||||||
stack_pointer[-1] = res;
|
stack_pointer[-1] = res;
|
||||||
|
@ -210,9 +210,9 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *right;
|
_PyStackRef right;
|
||||||
PyObject *left;
|
_PyStackRef left;
|
||||||
PyObject *res;
|
_PyStackRef res;
|
||||||
right = stack_pointer[-1];
|
right = stack_pointer[-1];
|
||||||
left = stack_pointer[-2];
|
left = stack_pointer[-2];
|
||||||
spam();
|
spam();
|
||||||
|
@ -235,9 +235,9 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *right;
|
_PyStackRef right;
|
||||||
PyObject *left;
|
_PyStackRef left;
|
||||||
PyObject *result;
|
_PyStackRef result;
|
||||||
right = stack_pointer[-1];
|
right = stack_pointer[-1];
|
||||||
left = stack_pointer[-2];
|
left = stack_pointer[-2];
|
||||||
spam();
|
spam();
|
||||||
|
@ -263,8 +263,8 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP1);
|
INSTRUCTION_STATS(OP1);
|
||||||
PREDICTED(OP1);
|
PREDICTED(OP1);
|
||||||
PyObject *arg;
|
_PyStackRef arg;
|
||||||
PyObject *rest;
|
_PyStackRef rest;
|
||||||
arg = stack_pointer[-1];
|
arg = stack_pointer[-1];
|
||||||
stack_pointer[-1] = rest;
|
stack_pointer[-1] = rest;
|
||||||
DISPATCH();
|
DISPATCH();
|
||||||
|
@ -275,8 +275,8 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP3);
|
INSTRUCTION_STATS(OP3);
|
||||||
static_assert(INLINE_CACHE_ENTRIES_OP1 == 0, "incorrect cache size");
|
static_assert(INLINE_CACHE_ENTRIES_OP1 == 0, "incorrect cache size");
|
||||||
PyObject *arg;
|
_PyStackRef arg;
|
||||||
PyObject *res;
|
_PyStackRef res;
|
||||||
arg = stack_pointer[-1];
|
arg = stack_pointer[-1];
|
||||||
DEOPT_IF(xxx, OP1);
|
DEOPT_IF(xxx, OP1);
|
||||||
stack_pointer[-1] = res;
|
stack_pointer[-1] = res;
|
||||||
|
@ -332,9 +332,9 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *right;
|
_PyStackRef right;
|
||||||
PyObject *left;
|
_PyStackRef left;
|
||||||
PyObject *res;
|
_PyStackRef res;
|
||||||
right = stack_pointer[-1];
|
right = stack_pointer[-1];
|
||||||
left = stack_pointer[-2];
|
left = stack_pointer[-2];
|
||||||
if (cond) goto pop_2_label;
|
if (cond) goto pop_2_label;
|
||||||
|
@ -357,7 +357,7 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
(void)this_instr;
|
(void)this_instr;
|
||||||
next_instr += 4;
|
next_instr += 4;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *value;
|
_PyStackRef value;
|
||||||
value = stack_pointer[-1];
|
value = stack_pointer[-1];
|
||||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||||
(void)counter;
|
(void)counter;
|
||||||
|
@ -408,10 +408,10 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
PREDICTED(OP);
|
PREDICTED(OP);
|
||||||
_Py_CODEUNIT *this_instr = next_instr - 6;
|
_Py_CODEUNIT *this_instr = next_instr - 6;
|
||||||
(void)this_instr;
|
(void)this_instr;
|
||||||
PyObject *right;
|
_PyStackRef right;
|
||||||
PyObject *left;
|
_PyStackRef left;
|
||||||
PyObject *arg2;
|
_PyStackRef arg2;
|
||||||
PyObject *res;
|
_PyStackRef res;
|
||||||
// _OP1
|
// _OP1
|
||||||
right = stack_pointer[-1];
|
right = stack_pointer[-1];
|
||||||
left = stack_pointer[-2];
|
left = stack_pointer[-2];
|
||||||
|
@ -439,8 +439,8 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
(void)this_instr;
|
(void)this_instr;
|
||||||
next_instr += 2;
|
next_instr += 2;
|
||||||
INSTRUCTION_STATS(OP1);
|
INSTRUCTION_STATS(OP1);
|
||||||
PyObject *right;
|
_PyStackRef right;
|
||||||
PyObject *left;
|
_PyStackRef left;
|
||||||
right = stack_pointer[-1];
|
right = stack_pointer[-1];
|
||||||
left = stack_pointer[-2];
|
left = stack_pointer[-2];
|
||||||
uint16_t counter = read_u16(&this_instr[1].cache);
|
uint16_t counter = read_u16(&this_instr[1].cache);
|
||||||
|
@ -454,10 +454,10 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
next_instr += 6;
|
next_instr += 6;
|
||||||
INSTRUCTION_STATS(OP3);
|
INSTRUCTION_STATS(OP3);
|
||||||
static_assert(INLINE_CACHE_ENTRIES_OP == 5, "incorrect cache size");
|
static_assert(INLINE_CACHE_ENTRIES_OP == 5, "incorrect cache size");
|
||||||
PyObject *right;
|
_PyStackRef right;
|
||||||
PyObject *left;
|
_PyStackRef left;
|
||||||
PyObject *arg2;
|
_PyStackRef arg2;
|
||||||
PyObject *res;
|
_PyStackRef res;
|
||||||
/* Skip 5 cache entries */
|
/* Skip 5 cache entries */
|
||||||
right = stack_pointer[-1];
|
right = stack_pointer[-1];
|
||||||
left = stack_pointer[-2];
|
left = stack_pointer[-2];
|
||||||
|
@ -539,9 +539,9 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *above;
|
_PyStackRef above;
|
||||||
PyObject **values;
|
_PyStackRef *values;
|
||||||
PyObject *below;
|
_PyStackRef below;
|
||||||
above = stack_pointer[-1];
|
above = stack_pointer[-1];
|
||||||
values = &stack_pointer[-1 - oparg*2];
|
values = &stack_pointer[-1 - oparg*2];
|
||||||
below = stack_pointer[-2 - oparg*2];
|
below = stack_pointer[-2 - oparg*2];
|
||||||
|
@ -564,9 +564,9 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *below;
|
_PyStackRef below;
|
||||||
PyObject **values;
|
_PyStackRef *values;
|
||||||
PyObject *above;
|
_PyStackRef above;
|
||||||
values = &stack_pointer[-1];
|
values = &stack_pointer[-1];
|
||||||
spam(values, oparg);
|
spam(values, oparg);
|
||||||
stack_pointer[-2] = below;
|
stack_pointer[-2] = below;
|
||||||
|
@ -589,8 +589,8 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject **values;
|
_PyStackRef *values;
|
||||||
PyObject *above;
|
_PyStackRef above;
|
||||||
values = &stack_pointer[-oparg];
|
values = &stack_pointer[-oparg];
|
||||||
spam(values, oparg);
|
spam(values, oparg);
|
||||||
stack_pointer[0] = above;
|
stack_pointer[0] = above;
|
||||||
|
@ -612,8 +612,8 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject **values;
|
_PyStackRef *values;
|
||||||
PyObject *extra;
|
_PyStackRef extra;
|
||||||
values = &stack_pointer[-oparg];
|
values = &stack_pointer[-oparg];
|
||||||
extra = stack_pointer[-1 - oparg];
|
extra = stack_pointer[-1 - oparg];
|
||||||
if (oparg == 0) { stack_pointer += -1 - oparg; goto somewhere; }
|
if (oparg == 0) { stack_pointer += -1 - oparg; goto somewhere; }
|
||||||
|
@ -635,12 +635,12 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(OP);
|
INSTRUCTION_STATS(OP);
|
||||||
PyObject *cc;
|
_PyStackRef cc;
|
||||||
PyObject *input = NULL;
|
_PyStackRef input = PyStackRef_NULL;
|
||||||
PyObject *aa;
|
_PyStackRef aa;
|
||||||
PyObject *xx;
|
_PyStackRef xx;
|
||||||
PyObject *output = NULL;
|
_PyStackRef output = PyStackRef_NULL;
|
||||||
PyObject *zz;
|
_PyStackRef zz;
|
||||||
cc = stack_pointer[-1];
|
cc = stack_pointer[-1];
|
||||||
if ((oparg & 1) == 1) { input = stack_pointer[-1 - (((oparg & 1) == 1) ? 1 : 0)]; }
|
if ((oparg & 1) == 1) { input = stack_pointer[-1 - (((oparg & 1) == 1) ? 1 : 0)]; }
|
||||||
aa = stack_pointer[-2 - (((oparg & 1) == 1) ? 1 : 0)];
|
aa = stack_pointer[-2 - (((oparg & 1) == 1) ? 1 : 0)];
|
||||||
|
@ -670,12 +670,12 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(M);
|
INSTRUCTION_STATS(M);
|
||||||
PyObject *right;
|
_PyStackRef right;
|
||||||
PyObject *middle;
|
_PyStackRef middle;
|
||||||
PyObject *left;
|
_PyStackRef left;
|
||||||
PyObject *deep;
|
_PyStackRef deep;
|
||||||
PyObject *extra = NULL;
|
_PyStackRef extra = PyStackRef_NULL;
|
||||||
PyObject *res;
|
_PyStackRef res;
|
||||||
// A
|
// A
|
||||||
right = stack_pointer[-1];
|
right = stack_pointer[-1];
|
||||||
middle = stack_pointer[-2];
|
middle = stack_pointer[-2];
|
||||||
|
@ -712,8 +712,8 @@ class TestGeneratedCases(unittest.TestCase):
|
||||||
frame->instr_ptr = next_instr;
|
frame->instr_ptr = next_instr;
|
||||||
next_instr += 1;
|
next_instr += 1;
|
||||||
INSTRUCTION_STATS(M);
|
INSTRUCTION_STATS(M);
|
||||||
PyObject *val1;
|
_PyStackRef val1;
|
||||||
PyObject *val2;
|
_PyStackRef val2;
|
||||||
// A
|
// A
|
||||||
{
|
{
|
||||||
val1 = spam();
|
val1 = spam();
|
||||||
|
|
|
@ -1198,6 +1198,7 @@ PYTHON_HEADERS= \
|
||||||
$(srcdir)/Include/internal/pycore_namespace.h \
|
$(srcdir)/Include/internal/pycore_namespace.h \
|
||||||
$(srcdir)/Include/internal/pycore_object.h \
|
$(srcdir)/Include/internal/pycore_object.h \
|
||||||
$(srcdir)/Include/internal/pycore_object_alloc.h \
|
$(srcdir)/Include/internal/pycore_object_alloc.h \
|
||||||
|
$(srcdir)/Include/internal/pycore_object_deferred.h \
|
||||||
$(srcdir)/Include/internal/pycore_object_stack.h \
|
$(srcdir)/Include/internal/pycore_object_stack.h \
|
||||||
$(srcdir)/Include/internal/pycore_object_state.h \
|
$(srcdir)/Include/internal/pycore_object_state.h \
|
||||||
$(srcdir)/Include/internal/pycore_obmalloc.h \
|
$(srcdir)/Include/internal/pycore_obmalloc.h \
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
Convert the Python evaluation stack to use internal stack references. The
|
||||||
|
purpose is to support tagged pointers. In :pep:`703`, this will
|
||||||
|
allow for its form of deferred reference counting. For both
|
||||||
|
the default and free-threaded builds, this sets up the infrastructure
|
||||||
|
for unboxed integers in the future.
|
|
@ -21,10 +21,10 @@
|
||||||
static PyObject *
|
static PyObject *
|
||||||
framelocalsproxy_getval(_PyInterpreterFrame *frame, PyCodeObject *co, int i)
|
framelocalsproxy_getval(_PyInterpreterFrame *frame, PyCodeObject *co, int i)
|
||||||
{
|
{
|
||||||
PyObject **fast = _PyFrame_GetLocalsArray(frame);
|
_PyStackRef *fast = _PyFrame_GetLocalsArray(frame);
|
||||||
_PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
|
_PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
|
||||||
|
|
||||||
PyObject *value = fast[i];
|
PyObject *value = PyStackRef_AsPyObjectBorrow(fast[i]);
|
||||||
PyObject *cell = NULL;
|
PyObject *cell = NULL;
|
||||||
|
|
||||||
if (value == NULL) {
|
if (value == NULL) {
|
||||||
|
@ -136,9 +136,9 @@ static int
|
||||||
framelocalsproxy_setitem(PyObject *self, PyObject *key, PyObject *value)
|
framelocalsproxy_setitem(PyObject *self, PyObject *key, PyObject *value)
|
||||||
{
|
{
|
||||||
/* Merge locals into fast locals */
|
/* Merge locals into fast locals */
|
||||||
PyFrameObject* frame = ((PyFrameLocalsProxyObject*)self)->frame;
|
PyFrameObject *frame = ((PyFrameLocalsProxyObject*)self)->frame;
|
||||||
PyObject** fast = _PyFrame_GetLocalsArray(frame->f_frame);
|
_PyStackRef *fast = _PyFrame_GetLocalsArray(frame->f_frame);
|
||||||
PyCodeObject* co = _PyFrame_GetCode(frame->f_frame);
|
PyCodeObject *co = _PyFrame_GetCode(frame->f_frame);
|
||||||
|
|
||||||
if (value == NULL) {
|
if (value == NULL) {
|
||||||
PyErr_SetString(PyExc_TypeError, "cannot remove variables from FrameLocalsProxy");
|
PyErr_SetString(PyExc_TypeError, "cannot remove variables from FrameLocalsProxy");
|
||||||
|
@ -151,26 +151,28 @@ framelocalsproxy_setitem(PyObject *self, PyObject *key, PyObject *value)
|
||||||
_Py_Executors_InvalidateDependency(PyInterpreterState_Get(), co, 1);
|
_Py_Executors_InvalidateDependency(PyInterpreterState_Get(), co, 1);
|
||||||
|
|
||||||
_PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
|
_PyLocals_Kind kind = _PyLocals_GetKind(co->co_localspluskinds, i);
|
||||||
PyObject *oldvalue = fast[i];
|
_PyStackRef oldvalue = fast[i];
|
||||||
PyObject *cell = NULL;
|
PyObject *cell = NULL;
|
||||||
if (kind == CO_FAST_FREE) {
|
if (kind == CO_FAST_FREE) {
|
||||||
// The cell was set when the frame was created from
|
// The cell was set when the frame was created from
|
||||||
// the function's closure.
|
// the function's closure.
|
||||||
assert(oldvalue != NULL && PyCell_Check(oldvalue));
|
assert(oldvalue.bits != 0 && PyCell_Check(PyStackRef_AsPyObjectBorrow(oldvalue)));
|
||||||
cell = oldvalue;
|
cell = PyStackRef_AsPyObjectBorrow(oldvalue);
|
||||||
} else if (kind & CO_FAST_CELL && oldvalue != NULL) {
|
} else if (kind & CO_FAST_CELL && oldvalue.bits != 0) {
|
||||||
if (PyCell_Check(oldvalue)) {
|
PyObject *as_obj = PyStackRef_AsPyObjectBorrow(oldvalue);
|
||||||
cell = oldvalue;
|
if (PyCell_Check(as_obj)) {
|
||||||
|
cell = as_obj;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (cell != NULL) {
|
if (cell != NULL) {
|
||||||
oldvalue = PyCell_GET(cell);
|
PyObject *oldvalue_o = PyCell_GET(cell);
|
||||||
if (value != oldvalue) {
|
if (value != oldvalue_o) {
|
||||||
PyCell_SET(cell, Py_XNewRef(value));
|
PyCell_SET(cell, Py_XNewRef(value));
|
||||||
Py_XDECREF(oldvalue);
|
Py_XDECREF(oldvalue_o);
|
||||||
}
|
}
|
||||||
} else if (value != oldvalue) {
|
} else if (value != PyStackRef_AsPyObjectBorrow(oldvalue)) {
|
||||||
Py_XSETREF(fast[i], Py_NewRef(value));
|
PyStackRef_XCLOSE(fast[i]);
|
||||||
|
fast[i] = PyStackRef_FromPyObjectNew(value);
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -1511,7 +1513,7 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore
|
||||||
for (int i = 0; i < code->co_nlocalsplus; i++) {
|
for (int i = 0; i < code->co_nlocalsplus; i++) {
|
||||||
// Counting every unbound local is overly-cautious, but a full flow
|
// Counting every unbound local is overly-cautious, but a full flow
|
||||||
// analysis (like we do in the compiler) is probably too expensive:
|
// analysis (like we do in the compiler) is probably too expensive:
|
||||||
unbound += f->f_frame->localsplus[i] == NULL;
|
unbound += PyStackRef_IsNull(f->f_frame->localsplus[i]);
|
||||||
}
|
}
|
||||||
if (unbound) {
|
if (unbound) {
|
||||||
const char *e = "assigning None to %d unbound local%s";
|
const char *e = "assigning None to %d unbound local%s";
|
||||||
|
@ -1522,8 +1524,8 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore
|
||||||
// Do this in a second pass to avoid writing a bunch of Nones when
|
// Do this in a second pass to avoid writing a bunch of Nones when
|
||||||
// warnings are being treated as errors and the previous bit raises:
|
// warnings are being treated as errors and the previous bit raises:
|
||||||
for (int i = 0; i < code->co_nlocalsplus; i++) {
|
for (int i = 0; i < code->co_nlocalsplus; i++) {
|
||||||
if (f->f_frame->localsplus[i] == NULL) {
|
if (PyStackRef_IsNull(f->f_frame->localsplus[i])) {
|
||||||
f->f_frame->localsplus[i] = Py_NewRef(Py_None);
|
f->f_frame->localsplus[i] = PyStackRef_None;
|
||||||
unbound--;
|
unbound--;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1536,14 +1538,13 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore
|
||||||
while (start_stack > best_stack) {
|
while (start_stack > best_stack) {
|
||||||
if (top_of_stack(start_stack) == Except) {
|
if (top_of_stack(start_stack) == Except) {
|
||||||
/* Pop exception stack as well as the evaluation stack */
|
/* Pop exception stack as well as the evaluation stack */
|
||||||
PyObject *exc = _PyFrame_StackPop(f->f_frame);
|
PyObject *exc = PyStackRef_AsPyObjectBorrow(_PyFrame_StackPop(f->f_frame));
|
||||||
assert(PyExceptionInstance_Check(exc) || exc == Py_None);
|
assert(PyExceptionInstance_Check(exc) || exc == Py_None);
|
||||||
PyThreadState *tstate = _PyThreadState_GET();
|
PyThreadState *tstate = _PyThreadState_GET();
|
||||||
Py_XSETREF(tstate->exc_info->exc_value, exc == Py_None ? NULL : exc);
|
Py_XSETREF(tstate->exc_info->exc_value, exc == Py_None ? NULL : exc);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
PyObject *v = _PyFrame_StackPop(f->f_frame);
|
PyStackRef_XCLOSE(_PyFrame_StackPop(f->f_frame));
|
||||||
Py_XDECREF(v);
|
|
||||||
}
|
}
|
||||||
start_stack = pop_value(start_stack);
|
start_stack = pop_value(start_stack);
|
||||||
}
|
}
|
||||||
|
@ -1618,9 +1619,9 @@ frame_dealloc(PyFrameObject *f)
|
||||||
frame->f_executable = NULL;
|
frame->f_executable = NULL;
|
||||||
Py_CLEAR(frame->f_funcobj);
|
Py_CLEAR(frame->f_funcobj);
|
||||||
Py_CLEAR(frame->f_locals);
|
Py_CLEAR(frame->f_locals);
|
||||||
PyObject **locals = _PyFrame_GetLocalsArray(frame);
|
_PyStackRef *locals = _PyFrame_GetLocalsArray(frame);
|
||||||
for (int i = 0; i < frame->stacktop; i++) {
|
for (int i = 0; i < frame->stacktop; i++) {
|
||||||
Py_CLEAR(locals[i]);
|
PyStackRef_CLEAR(locals[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Py_CLEAR(f->f_back);
|
Py_CLEAR(f->f_back);
|
||||||
|
@ -1651,10 +1652,10 @@ frame_tp_clear(PyFrameObject *f)
|
||||||
Py_CLEAR(f->f_extra_locals);
|
Py_CLEAR(f->f_extra_locals);
|
||||||
|
|
||||||
/* locals and stack */
|
/* locals and stack */
|
||||||
PyObject **locals = _PyFrame_GetLocalsArray(f->f_frame);
|
_PyStackRef *locals = _PyFrame_GetLocalsArray(f->f_frame);
|
||||||
assert(f->f_frame->stacktop >= 0);
|
assert(f->f_frame->stacktop >= 0);
|
||||||
for (int i = 0; i < f->f_frame->stacktop; i++) {
|
for (int i = 0; i < f->f_frame->stacktop; i++) {
|
||||||
Py_CLEAR(locals[i]);
|
PyStackRef_CLEAR(locals[i]);
|
||||||
}
|
}
|
||||||
f->f_frame->stacktop = 0;
|
f->f_frame->stacktop = 0;
|
||||||
Py_CLEAR(f->f_frame->f_locals);
|
Py_CLEAR(f->f_frame->f_locals);
|
||||||
|
@ -1848,7 +1849,7 @@ frame_init_get_vars(_PyInterpreterFrame *frame)
|
||||||
int offset = PyUnstable_Code_GetFirstFree(co);
|
int offset = PyUnstable_Code_GetFirstFree(co);
|
||||||
for (int i = 0; i < co->co_nfreevars; ++i) {
|
for (int i = 0; i < co->co_nfreevars; ++i) {
|
||||||
PyObject *o = PyTuple_GET_ITEM(closure, i);
|
PyObject *o = PyTuple_GET_ITEM(closure, i);
|
||||||
frame->localsplus[offset + i] = Py_NewRef(o);
|
frame->localsplus[offset + i] = PyStackRef_FromPyObjectNew(o);
|
||||||
}
|
}
|
||||||
// COPY_FREE_VARS doesn't have inline CACHEs, either:
|
// COPY_FREE_VARS doesn't have inline CACHEs, either:
|
||||||
frame->instr_ptr = _PyCode_CODE(_PyFrame_GetCode(frame));
|
frame->instr_ptr = _PyCode_CODE(_PyFrame_GetCode(frame));
|
||||||
|
@ -1873,7 +1874,7 @@ frame_get_var(_PyInterpreterFrame *frame, PyCodeObject *co, int i,
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
PyObject *value = frame->localsplus[i];
|
PyObject *value = PyStackRef_AsPyObjectBorrow(frame->localsplus[i]);
|
||||||
if (frame->stacktop) {
|
if (frame->stacktop) {
|
||||||
if (kind & CO_FAST_FREE) {
|
if (kind & CO_FAST_FREE) {
|
||||||
// The cell was set by COPY_FREE_VARS.
|
// The cell was set by COPY_FREE_VARS.
|
||||||
|
|
|
@ -212,7 +212,7 @@ gen_send_ex2(PyGenObject *gen, PyObject *arg, PyObject **presult,
|
||||||
|
|
||||||
/* Push arg onto the frame's value stack */
|
/* Push arg onto the frame's value stack */
|
||||||
PyObject *arg_obj = arg ? arg : Py_None;
|
PyObject *arg_obj = arg ? arg : Py_None;
|
||||||
_PyFrame_StackPush(frame, Py_NewRef(arg_obj));
|
_PyFrame_StackPush(frame, PyStackRef_FromPyObjectNew(arg_obj));
|
||||||
|
|
||||||
_PyErr_StackItem *prev_exc_info = tstate->exc_info;
|
_PyErr_StackItem *prev_exc_info = tstate->exc_info;
|
||||||
gen->gi_exc_state.previous_item = prev_exc_info;
|
gen->gi_exc_state.previous_item = prev_exc_info;
|
||||||
|
@ -344,7 +344,7 @@ _PyGen_yf(PyGenObject *gen)
|
||||||
_PyInterpreterFrame *frame = &gen->gi_iframe;
|
_PyInterpreterFrame *frame = &gen->gi_iframe;
|
||||||
assert(is_resume(frame->instr_ptr));
|
assert(is_resume(frame->instr_ptr));
|
||||||
assert((frame->instr_ptr->op.arg & RESUME_OPARG_LOCATION_MASK) >= RESUME_AFTER_YIELD_FROM);
|
assert((frame->instr_ptr->op.arg & RESUME_OPARG_LOCATION_MASK) >= RESUME_AFTER_YIELD_FROM);
|
||||||
return Py_NewRef(_PyFrame_StackPeek(frame));
|
return PyStackRef_AsPyObjectNew(_PyFrame_StackPeek(frame));
|
||||||
}
|
}
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
|
@ -11411,7 +11411,7 @@ super_init_without_args(_PyInterpreterFrame *cframe, PyCodeObject *co,
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(_PyFrame_GetCode(cframe)->co_nlocalsplus > 0);
|
assert(_PyFrame_GetCode(cframe)->co_nlocalsplus > 0);
|
||||||
PyObject *firstarg = _PyFrame_GetLocalsArray(cframe)[0];
|
PyObject *firstarg = PyStackRef_AsPyObjectBorrow(_PyFrame_GetLocalsArray(cframe)[0]);
|
||||||
// The first argument might be a cell.
|
// The first argument might be a cell.
|
||||||
if (firstarg != NULL && (_PyLocals_GetKind(co->co_localspluskinds, 0) & CO_FAST_CELL)) {
|
if (firstarg != NULL && (_PyLocals_GetKind(co->co_localspluskinds, 0) & CO_FAST_CELL)) {
|
||||||
// "firstarg" is a cell here unless (very unlikely) super()
|
// "firstarg" is a cell here unless (very unlikely) super()
|
||||||
|
@ -11439,7 +11439,7 @@ super_init_without_args(_PyInterpreterFrame *cframe, PyCodeObject *co,
|
||||||
PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
|
PyObject *name = PyTuple_GET_ITEM(co->co_localsplusnames, i);
|
||||||
assert(PyUnicode_Check(name));
|
assert(PyUnicode_Check(name));
|
||||||
if (_PyUnicode_Equal(name, &_Py_ID(__class__))) {
|
if (_PyUnicode_Equal(name, &_Py_ID(__class__))) {
|
||||||
PyObject *cell = _PyFrame_GetLocalsArray(cframe)[i];
|
PyObject *cell = PyStackRef_AsPyObjectBorrow(_PyFrame_GetLocalsArray(cframe)[i]);
|
||||||
if (cell == NULL || !PyCell_Check(cell)) {
|
if (cell == NULL || !PyCell_Check(cell)) {
|
||||||
PyErr_SetString(PyExc_RuntimeError,
|
PyErr_SetString(PyExc_RuntimeError,
|
||||||
"super(): bad __class__ cell");
|
"super(): bad __class__ cell");
|
||||||
|
|
|
@ -264,6 +264,7 @@
|
||||||
<ClInclude Include="..\Include\internal\pycore_namespace.h" />
|
<ClInclude Include="..\Include\internal\pycore_namespace.h" />
|
||||||
<ClInclude Include="..\Include\internal\pycore_object.h" />
|
<ClInclude Include="..\Include\internal\pycore_object.h" />
|
||||||
<ClInclude Include="..\Include\internal\pycore_object_alloc.h" />
|
<ClInclude Include="..\Include\internal\pycore_object_alloc.h" />
|
||||||
|
<ClInclude Include="..\Include\internal\pycore_object_deferred.h" />
|
||||||
<ClInclude Include="..\Include\internal\pycore_object_state.h" />
|
<ClInclude Include="..\Include\internal\pycore_object_state.h" />
|
||||||
<ClInclude Include="..\Include\internal\pycore_obmalloc.h" />
|
<ClInclude Include="..\Include\internal\pycore_obmalloc.h" />
|
||||||
<ClInclude Include="..\Include\internal\pycore_obmalloc_init.h" />
|
<ClInclude Include="..\Include\internal\pycore_obmalloc_init.h" />
|
||||||
|
|
|
@ -714,6 +714,9 @@
|
||||||
<ClInclude Include="..\Include\internal\pycore_object_alloc.h">
|
<ClInclude Include="..\Include\internal\pycore_object_alloc.h">
|
||||||
<Filter>Include\internal</Filter>
|
<Filter>Include\internal</Filter>
|
||||||
</ClInclude>
|
</ClInclude>
|
||||||
|
<ClInclude Include="..\Include\internal\pycore_object_deferred.h">
|
||||||
|
<Filter>Include\internal</Filter>
|
||||||
|
</ClInclude>
|
||||||
<ClInclude Include="..\Include\internal\pycore_object_state.h">
|
<ClInclude Include="..\Include\internal\pycore_object_state.h">
|
||||||
<Filter>Include\internal</Filter>
|
<Filter>Include\internal</Filter>
|
||||||
</ClInclude>
|
</ClInclude>
|
||||||
|
|
2018
Python/bytecodes.c
2018
Python/bytecodes.c
File diff suppressed because it is too large
Load Diff
183
Python/ceval.c
183
Python/ceval.c
|
@ -39,6 +39,7 @@
|
||||||
#include "opcode.h"
|
#include "opcode.h"
|
||||||
#include "pydtrace.h"
|
#include "pydtrace.h"
|
||||||
#include "setobject.h"
|
#include "setobject.h"
|
||||||
|
#include "pycore_stackref.h"
|
||||||
|
|
||||||
#include <stdbool.h> // bool
|
#include <stdbool.h> // bool
|
||||||
|
|
||||||
|
@ -104,33 +105,34 @@
|
||||||
|
|
||||||
#ifdef LLTRACE
|
#ifdef LLTRACE
|
||||||
static void
|
static void
|
||||||
dump_stack(_PyInterpreterFrame *frame, PyObject **stack_pointer)
|
dump_stack(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer)
|
||||||
{
|
{
|
||||||
PyObject **stack_base = _PyFrame_Stackbase(frame);
|
_PyStackRef *stack_base = _PyFrame_Stackbase(frame);
|
||||||
PyObject *exc = PyErr_GetRaisedException();
|
PyObject *exc = PyErr_GetRaisedException();
|
||||||
printf(" stack=[");
|
printf(" stack=[");
|
||||||
for (PyObject **ptr = stack_base; ptr < stack_pointer; ptr++) {
|
for (_PyStackRef *ptr = stack_base; ptr < stack_pointer; ptr++) {
|
||||||
if (ptr != stack_base) {
|
if (ptr != stack_base) {
|
||||||
printf(", ");
|
printf(", ");
|
||||||
}
|
}
|
||||||
if (*ptr == NULL) {
|
PyObject *obj = PyStackRef_AsPyObjectBorrow(*ptr);
|
||||||
|
if (obj == NULL) {
|
||||||
printf("<nil>");
|
printf("<nil>");
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
*ptr == Py_None
|
obj == Py_None
|
||||||
|| PyBool_Check(*ptr)
|
|| PyBool_Check(obj)
|
||||||
|| PyLong_CheckExact(*ptr)
|
|| PyLong_CheckExact(obj)
|
||||||
|| PyFloat_CheckExact(*ptr)
|
|| PyFloat_CheckExact(obj)
|
||||||
|| PyUnicode_CheckExact(*ptr)
|
|| PyUnicode_CheckExact(obj)
|
||||||
) {
|
) {
|
||||||
if (PyObject_Print(*ptr, stdout, 0) == 0) {
|
if (PyObject_Print(obj, stdout, 0) == 0) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
PyErr_Clear();
|
PyErr_Clear();
|
||||||
}
|
}
|
||||||
// Don't call __repr__(), it might recurse into the interpreter.
|
// Don't call __repr__(), it might recurse into the interpreter.
|
||||||
printf("<%s at %p>", Py_TYPE(*ptr)->tp_name, (void *)(*ptr));
|
printf("<%s at %p>", Py_TYPE(obj)->tp_name, (void *)(ptr->bits));
|
||||||
}
|
}
|
||||||
printf("]\n");
|
printf("]\n");
|
||||||
fflush(stdout);
|
fflush(stdout);
|
||||||
|
@ -139,7 +141,7 @@ dump_stack(_PyInterpreterFrame *frame, PyObject **stack_pointer)
|
||||||
|
|
||||||
static void
|
static void
|
||||||
lltrace_instruction(_PyInterpreterFrame *frame,
|
lltrace_instruction(_PyInterpreterFrame *frame,
|
||||||
PyObject **stack_pointer,
|
_PyStackRef *stack_pointer,
|
||||||
_Py_CODEUNIT *next_instr,
|
_Py_CODEUNIT *next_instr,
|
||||||
int opcode,
|
int opcode,
|
||||||
int oparg)
|
int oparg)
|
||||||
|
@ -695,6 +697,35 @@ extern void _PyUOpPrint(const _PyUOpInstruction *uop);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
PyObject **
|
||||||
|
_PyObjectArray_FromStackRefArray(_PyStackRef *input, Py_ssize_t nargs, PyObject **scratch)
|
||||||
|
{
|
||||||
|
PyObject **result;
|
||||||
|
if (nargs > MAX_STACKREF_SCRATCH) {
|
||||||
|
// +1 in case PY_VECTORCALL_ARGUMENTS_OFFSET is set.
|
||||||
|
result = PyMem_Malloc((nargs + 1) * sizeof(PyObject *));
|
||||||
|
if (result == NULL) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
result++;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result = scratch;
|
||||||
|
}
|
||||||
|
for (int i = 0; i < nargs; i++) {
|
||||||
|
result[i] = PyStackRef_AsPyObjectBorrow(input[i]);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
_PyObjectArray_Free(PyObject **array, PyObject **scratch)
|
||||||
|
{
|
||||||
|
if (array != scratch) {
|
||||||
|
PyMem_Free(array);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/* _PyEval_EvalFrameDefault() is a *big* function,
|
/* _PyEval_EvalFrameDefault() is a *big* function,
|
||||||
* so consume 3 units of C stack */
|
* so consume 3 units of C stack */
|
||||||
#define PY_EVAL_C_STACK_UNITS 2
|
#define PY_EVAL_C_STACK_UNITS 2
|
||||||
|
@ -773,7 +804,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
|
||||||
/* Local "register" variables.
|
/* Local "register" variables.
|
||||||
* These are cached values from the frame and code object. */
|
* These are cached values from the frame and code object. */
|
||||||
_Py_CODEUNIT *next_instr;
|
_Py_CODEUNIT *next_instr;
|
||||||
PyObject **stack_pointer;
|
_PyStackRef *stack_pointer;
|
||||||
|
|
||||||
#if defined(_Py_TIER2) && !defined(_Py_JIT)
|
#if defined(_Py_TIER2) && !defined(_Py_JIT)
|
||||||
/* Tier 2 interpreter state */
|
/* Tier 2 interpreter state */
|
||||||
|
@ -916,10 +947,9 @@ exception_unwind:
|
||||||
assert(_PyErr_Occurred(tstate));
|
assert(_PyErr_Occurred(tstate));
|
||||||
|
|
||||||
/* Pop remaining stack entries. */
|
/* Pop remaining stack entries. */
|
||||||
PyObject **stackbase = _PyFrame_Stackbase(frame);
|
_PyStackRef *stackbase = _PyFrame_Stackbase(frame);
|
||||||
while (stack_pointer > stackbase) {
|
while (stack_pointer > stackbase) {
|
||||||
PyObject *o = POP();
|
PyStackRef_XCLOSE(POP());
|
||||||
Py_XDECREF(o);
|
|
||||||
}
|
}
|
||||||
assert(STACK_LEVEL() == 0);
|
assert(STACK_LEVEL() == 0);
|
||||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||||
|
@ -928,10 +958,9 @@ exception_unwind:
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(STACK_LEVEL() >= level);
|
assert(STACK_LEVEL() >= level);
|
||||||
PyObject **new_top = _PyFrame_Stackbase(frame) + level;
|
_PyStackRef *new_top = _PyFrame_Stackbase(frame) + level;
|
||||||
while (stack_pointer > new_top) {
|
while (stack_pointer > new_top) {
|
||||||
PyObject *v = POP();
|
PyStackRef_XCLOSE(POP());
|
||||||
Py_XDECREF(v);
|
|
||||||
}
|
}
|
||||||
if (lasti) {
|
if (lasti) {
|
||||||
int frame_lasti = _PyInterpreterFrame_LASTI(frame);
|
int frame_lasti = _PyInterpreterFrame_LASTI(frame);
|
||||||
|
@ -939,7 +968,7 @@ exception_unwind:
|
||||||
if (lasti == NULL) {
|
if (lasti == NULL) {
|
||||||
goto exception_unwind;
|
goto exception_unwind;
|
||||||
}
|
}
|
||||||
PUSH(lasti);
|
PUSH(PyStackRef_FromPyObjectSteal(lasti));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Make the raw exception data
|
/* Make the raw exception data
|
||||||
|
@ -947,7 +976,7 @@ exception_unwind:
|
||||||
so a program can emulate the
|
so a program can emulate the
|
||||||
Python main loop. */
|
Python main loop. */
|
||||||
PyObject *exc = _PyErr_GetRaisedException(tstate);
|
PyObject *exc = _PyErr_GetRaisedException(tstate);
|
||||||
PUSH(exc);
|
PUSH(PyStackRef_FromPyObjectSteal(exc));
|
||||||
next_instr = _PyCode_CODE(_PyFrame_GetCode(frame)) + handler;
|
next_instr = _PyCode_CODE(_PyFrame_GetCode(frame)) + handler;
|
||||||
|
|
||||||
if (monitor_handled(tstate, frame, next_instr, exc) < 0) {
|
if (monitor_handled(tstate, frame, next_instr, exc) < 0) {
|
||||||
|
@ -1217,7 +1246,7 @@ format_missing(PyThreadState *tstate, const char *kind,
|
||||||
static void
|
static void
|
||||||
missing_arguments(PyThreadState *tstate, PyCodeObject *co,
|
missing_arguments(PyThreadState *tstate, PyCodeObject *co,
|
||||||
Py_ssize_t missing, Py_ssize_t defcount,
|
Py_ssize_t missing, Py_ssize_t defcount,
|
||||||
PyObject **localsplus, PyObject *qualname)
|
_PyStackRef *localsplus, PyObject *qualname)
|
||||||
{
|
{
|
||||||
Py_ssize_t i, j = 0;
|
Py_ssize_t i, j = 0;
|
||||||
Py_ssize_t start, end;
|
Py_ssize_t start, end;
|
||||||
|
@ -1238,7 +1267,7 @@ missing_arguments(PyThreadState *tstate, PyCodeObject *co,
|
||||||
end = start + co->co_kwonlyargcount;
|
end = start + co->co_kwonlyargcount;
|
||||||
}
|
}
|
||||||
for (i = start; i < end; i++) {
|
for (i = start; i < end; i++) {
|
||||||
if (localsplus[i] == NULL) {
|
if (PyStackRef_IsNull(localsplus[i])) {
|
||||||
PyObject *raw = PyTuple_GET_ITEM(co->co_localsplusnames, i);
|
PyObject *raw = PyTuple_GET_ITEM(co->co_localsplusnames, i);
|
||||||
PyObject *name = PyObject_Repr(raw);
|
PyObject *name = PyObject_Repr(raw);
|
||||||
if (name == NULL) {
|
if (name == NULL) {
|
||||||
|
@ -1256,7 +1285,7 @@ missing_arguments(PyThreadState *tstate, PyCodeObject *co,
|
||||||
static void
|
static void
|
||||||
too_many_positional(PyThreadState *tstate, PyCodeObject *co,
|
too_many_positional(PyThreadState *tstate, PyCodeObject *co,
|
||||||
Py_ssize_t given, PyObject *defaults,
|
Py_ssize_t given, PyObject *defaults,
|
||||||
PyObject **localsplus, PyObject *qualname)
|
_PyStackRef *localsplus, PyObject *qualname)
|
||||||
{
|
{
|
||||||
int plural;
|
int plural;
|
||||||
Py_ssize_t kwonly_given = 0;
|
Py_ssize_t kwonly_given = 0;
|
||||||
|
@ -1267,7 +1296,7 @@ too_many_positional(PyThreadState *tstate, PyCodeObject *co,
|
||||||
assert((co->co_flags & CO_VARARGS) == 0);
|
assert((co->co_flags & CO_VARARGS) == 0);
|
||||||
/* Count missing keyword-only args. */
|
/* Count missing keyword-only args. */
|
||||||
for (i = co_argcount; i < co_argcount + co->co_kwonlyargcount; i++) {
|
for (i = co_argcount; i < co_argcount + co->co_kwonlyargcount; i++) {
|
||||||
if (localsplus[i] != NULL) {
|
if (PyStackRef_AsPyObjectBorrow(localsplus[i]) != NULL) {
|
||||||
kwonly_given++;
|
kwonly_given++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1445,7 +1474,7 @@ get_exception_handler(PyCodeObject *code, int index, int *level, int *handler, i
|
||||||
|
|
||||||
static int
|
static int
|
||||||
initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
PyObject **localsplus, PyObject *const *args,
|
_PyStackRef *localsplus, _PyStackRef const *args,
|
||||||
Py_ssize_t argcount, PyObject *kwnames)
|
Py_ssize_t argcount, PyObject *kwnames)
|
||||||
{
|
{
|
||||||
PyCodeObject *co = (PyCodeObject*)func->func_code;
|
PyCodeObject *co = (PyCodeObject*)func->func_code;
|
||||||
|
@ -1463,8 +1492,8 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
if (co->co_flags & CO_VARARGS) {
|
if (co->co_flags & CO_VARARGS) {
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
assert(localsplus[i] == NULL);
|
assert(PyStackRef_IsNull(localsplus[i]));
|
||||||
localsplus[i] = kwdict;
|
localsplus[i] = PyStackRef_FromPyObjectSteal(kwdict);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
kwdict = NULL;
|
kwdict = NULL;
|
||||||
|
@ -1479,9 +1508,8 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
n = argcount;
|
n = argcount;
|
||||||
}
|
}
|
||||||
for (j = 0; j < n; j++) {
|
for (j = 0; j < n; j++) {
|
||||||
PyObject *x = args[j];
|
assert(PyStackRef_IsNull(localsplus[j]));
|
||||||
assert(localsplus[j] == NULL);
|
localsplus[j] = args[j];
|
||||||
localsplus[j] = x;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Pack other positional arguments into the *args argument */
|
/* Pack other positional arguments into the *args argument */
|
||||||
|
@ -1492,18 +1520,23 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
assert(args != NULL);
|
assert(args != NULL);
|
||||||
u = _PyTuple_FromArraySteal(args + n, argcount - n);
|
STACKREFS_TO_PYOBJECTS((_PyStackRef *)args, argcount, args_o);
|
||||||
|
if (args_o == NULL) {
|
||||||
|
goto fail_pre_positional;
|
||||||
|
}
|
||||||
|
u = _PyTuple_FromArraySteal((args_o + n), argcount - n);
|
||||||
|
STACKREFS_TO_PYOBJECTS_CLEANUP(args_o);
|
||||||
}
|
}
|
||||||
if (u == NULL) {
|
if (u == NULL) {
|
||||||
goto fail_post_positional;
|
goto fail_post_positional;
|
||||||
}
|
}
|
||||||
assert(localsplus[total_args] == NULL);
|
assert(PyStackRef_AsPyObjectBorrow(localsplus[total_args]) == NULL);
|
||||||
localsplus[total_args] = u;
|
localsplus[total_args] = PyStackRef_FromPyObjectSteal(u);
|
||||||
}
|
}
|
||||||
else if (argcount > n) {
|
else if (argcount > n) {
|
||||||
/* Too many positional args. Error is reported later */
|
/* Too many positional args. Error is reported later */
|
||||||
for (j = n; j < argcount; j++) {
|
for (j = n; j < argcount; j++) {
|
||||||
Py_DECREF(args[j]);
|
PyStackRef_CLOSE(args[j]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1513,7 +1546,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
for (i = 0; i < kwcount; i++) {
|
for (i = 0; i < kwcount; i++) {
|
||||||
PyObject **co_varnames;
|
PyObject **co_varnames;
|
||||||
PyObject *keyword = PyTuple_GET_ITEM(kwnames, i);
|
PyObject *keyword = PyTuple_GET_ITEM(kwnames, i);
|
||||||
PyObject *value = args[i+argcount];
|
_PyStackRef value_stackref = args[i+argcount];
|
||||||
Py_ssize_t j;
|
Py_ssize_t j;
|
||||||
|
|
||||||
if (keyword == NULL || !PyUnicode_Check(keyword)) {
|
if (keyword == NULL || !PyUnicode_Check(keyword)) {
|
||||||
|
@ -1586,27 +1619,26 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
goto kw_fail;
|
goto kw_fail;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (PyDict_SetItem(kwdict, keyword, value) == -1) {
|
if (PyDict_SetItem(kwdict, keyword, PyStackRef_AsPyObjectSteal(value_stackref)) == -1) {
|
||||||
goto kw_fail;
|
goto kw_fail;
|
||||||
}
|
}
|
||||||
Py_DECREF(value);
|
PyStackRef_CLOSE(value_stackref);
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
kw_fail:
|
kw_fail:
|
||||||
for (;i < kwcount; i++) {
|
for (;i < kwcount; i++) {
|
||||||
PyObject *value = args[i+argcount];
|
PyStackRef_CLOSE(args[i+argcount]);
|
||||||
Py_DECREF(value);
|
|
||||||
}
|
}
|
||||||
goto fail_post_args;
|
goto fail_post_args;
|
||||||
|
|
||||||
kw_found:
|
kw_found:
|
||||||
if (localsplus[j] != NULL) {
|
if (PyStackRef_AsPyObjectBorrow(localsplus[j]) != NULL) {
|
||||||
_PyErr_Format(tstate, PyExc_TypeError,
|
_PyErr_Format(tstate, PyExc_TypeError,
|
||||||
"%U() got multiple values for argument '%S'",
|
"%U() got multiple values for argument '%S'",
|
||||||
func->func_qualname, keyword);
|
func->func_qualname, keyword);
|
||||||
goto kw_fail;
|
goto kw_fail;
|
||||||
}
|
}
|
||||||
localsplus[j] = value;
|
localsplus[j] = value_stackref;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1623,7 +1655,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
Py_ssize_t m = co->co_argcount - defcount;
|
Py_ssize_t m = co->co_argcount - defcount;
|
||||||
Py_ssize_t missing = 0;
|
Py_ssize_t missing = 0;
|
||||||
for (i = argcount; i < m; i++) {
|
for (i = argcount; i < m; i++) {
|
||||||
if (localsplus[i] == NULL) {
|
if (PyStackRef_IsNull(localsplus[i])) {
|
||||||
missing++;
|
missing++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1639,9 +1671,9 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
if (defcount) {
|
if (defcount) {
|
||||||
PyObject **defs = &PyTuple_GET_ITEM(func->func_defaults, 0);
|
PyObject **defs = &PyTuple_GET_ITEM(func->func_defaults, 0);
|
||||||
for (; i < defcount; i++) {
|
for (; i < defcount; i++) {
|
||||||
if (localsplus[m+i] == NULL) {
|
if (PyStackRef_AsPyObjectBorrow(localsplus[m+i]) == NULL) {
|
||||||
PyObject *def = defs[i];
|
PyObject *def = defs[i];
|
||||||
localsplus[m+i] = Py_NewRef(def);
|
localsplus[m+i] = PyStackRef_FromPyObjectNew(def);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1651,7 +1683,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
if (co->co_kwonlyargcount > 0) {
|
if (co->co_kwonlyargcount > 0) {
|
||||||
Py_ssize_t missing = 0;
|
Py_ssize_t missing = 0;
|
||||||
for (i = co->co_argcount; i < total_args; i++) {
|
for (i = co->co_argcount; i < total_args; i++) {
|
||||||
if (localsplus[i] != NULL)
|
if (PyStackRef_AsPyObjectBorrow(localsplus[i]) != NULL)
|
||||||
continue;
|
continue;
|
||||||
PyObject *varname = PyTuple_GET_ITEM(co->co_localsplusnames, i);
|
PyObject *varname = PyTuple_GET_ITEM(co->co_localsplusnames, i);
|
||||||
if (func->func_kwdefaults != NULL) {
|
if (func->func_kwdefaults != NULL) {
|
||||||
|
@ -1660,7 +1692,7 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
goto fail_post_args;
|
goto fail_post_args;
|
||||||
}
|
}
|
||||||
if (def) {
|
if (def) {
|
||||||
localsplus[i] = def;
|
localsplus[i] = PyStackRef_FromPyObjectSteal(def);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1676,14 +1708,14 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
|
|
||||||
fail_pre_positional:
|
fail_pre_positional:
|
||||||
for (j = 0; j < argcount; j++) {
|
for (j = 0; j < argcount; j++) {
|
||||||
Py_DECREF(args[j]);
|
PyStackRef_CLOSE(args[j]);
|
||||||
}
|
}
|
||||||
/* fall through */
|
/* fall through */
|
||||||
fail_post_positional:
|
fail_post_positional:
|
||||||
if (kwnames) {
|
if (kwnames) {
|
||||||
Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
|
Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
|
||||||
for (j = argcount; j < argcount+kwcount; j++) {
|
for (j = argcount; j < argcount+kwcount; j++) {
|
||||||
Py_DECREF(args[j]);
|
PyStackRef_CLOSE(args[j]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* fall through */
|
/* fall through */
|
||||||
|
@ -1738,7 +1770,7 @@ _PyEval_FrameClearAndPop(PyThreadState *tstate, _PyInterpreterFrame * frame)
|
||||||
/* Consumes references to func, locals and all the args */
|
/* Consumes references to func, locals and all the args */
|
||||||
_PyInterpreterFrame *
|
_PyInterpreterFrame *
|
||||||
_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
|
_PyEvalFramePushAndInit(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
PyObject *locals, PyObject* const* args,
|
PyObject *locals, _PyStackRef const* args,
|
||||||
size_t argcount, PyObject *kwnames)
|
size_t argcount, PyObject *kwnames)
|
||||||
{
|
{
|
||||||
PyCodeObject * code = (PyCodeObject *)func->func_code;
|
PyCodeObject * code = (PyCodeObject *)func->func_code;
|
||||||
|
@ -1759,18 +1791,45 @@ fail:
|
||||||
Py_DECREF(func);
|
Py_DECREF(func);
|
||||||
Py_XDECREF(locals);
|
Py_XDECREF(locals);
|
||||||
for (size_t i = 0; i < argcount; i++) {
|
for (size_t i = 0; i < argcount; i++) {
|
||||||
Py_DECREF(args[i]);
|
PyStackRef_CLOSE(args[i]);
|
||||||
}
|
}
|
||||||
if (kwnames) {
|
if (kwnames) {
|
||||||
Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
|
Py_ssize_t kwcount = PyTuple_GET_SIZE(kwnames);
|
||||||
for (Py_ssize_t i = 0; i < kwcount; i++) {
|
for (Py_ssize_t i = 0; i < kwcount; i++) {
|
||||||
Py_DECREF(args[i+argcount]);
|
PyStackRef_CLOSE(args[i+argcount]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PyErr_NoMemory();
|
PyErr_NoMemory();
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static _PyInterpreterFrame *
|
||||||
|
_PyEvalFramePushAndInit_UnTagged(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
|
PyObject *locals, PyObject *const* args,
|
||||||
|
size_t argcount, PyObject *kwnames)
|
||||||
|
{
|
||||||
|
#if defined(Py_GIL_DISABLED)
|
||||||
|
size_t kw_count = kwnames == NULL ? 0 : PyTuple_GET_SIZE(kwnames);
|
||||||
|
size_t total_argcount = argcount + kw_count;
|
||||||
|
_PyStackRef *tagged_args_buffer = PyMem_Malloc(sizeof(_PyStackRef) * total_argcount);
|
||||||
|
if (tagged_args_buffer == NULL) {
|
||||||
|
PyErr_NoMemory();
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
for (size_t i = 0; i < argcount; i++) {
|
||||||
|
tagged_args_buffer[i] = PyStackRef_FromPyObjectSteal(args[i]);
|
||||||
|
}
|
||||||
|
for (size_t i = 0; i < kw_count; i++) {
|
||||||
|
tagged_args_buffer[argcount + i] = PyStackRef_FromPyObjectSteal(args[argcount + i]);
|
||||||
|
}
|
||||||
|
_PyInterpreterFrame *res = _PyEvalFramePushAndInit(tstate, func, locals, (_PyStackRef const *)tagged_args_buffer, argcount, kwnames);
|
||||||
|
PyMem_Free(tagged_args_buffer);
|
||||||
|
return res;
|
||||||
|
#else
|
||||||
|
return _PyEvalFramePushAndInit(tstate, func, locals, (_PyStackRef const *)args, argcount, kwnames);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
/* Same as _PyEvalFramePushAndInit but takes an args tuple and kwargs dict.
|
/* Same as _PyEvalFramePushAndInit but takes an args tuple and kwargs dict.
|
||||||
Steals references to func, callargs and kwargs.
|
Steals references to func, callargs and kwargs.
|
||||||
*/
|
*/
|
||||||
|
@ -1795,7 +1854,7 @@ _PyEvalFramePushAndInit_Ex(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
Py_INCREF(PyTuple_GET_ITEM(callargs, i));
|
Py_INCREF(PyTuple_GET_ITEM(callargs, i));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit(
|
_PyInterpreterFrame *new_frame = _PyEvalFramePushAndInit_UnTagged(
|
||||||
tstate, (PyFunctionObject *)func, locals,
|
tstate, (PyFunctionObject *)func, locals,
|
||||||
newargs, nargs, kwnames
|
newargs, nargs, kwnames
|
||||||
);
|
);
|
||||||
|
@ -1833,7 +1892,7 @@ _PyEval_Vector(PyThreadState *tstate, PyFunctionObject *func,
|
||||||
Py_INCREF(args[i+argcount]);
|
Py_INCREF(args[i+argcount]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_PyInterpreterFrame *frame = _PyEvalFramePushAndInit(
|
_PyInterpreterFrame *frame = _PyEvalFramePushAndInit_UnTagged(
|
||||||
tstate, func, locals, args, argcount, kwnames);
|
tstate, func, locals, args, argcount, kwnames);
|
||||||
if (frame == NULL) {
|
if (frame == NULL) {
|
||||||
return NULL;
|
return NULL;
|
||||||
|
@ -2085,8 +2144,8 @@ _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type,
|
||||||
*/
|
*/
|
||||||
|
|
||||||
int
|
int
|
||||||
_PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
_PyEval_UnpackIterableStackRef(PyThreadState *tstate, _PyStackRef v_stackref,
|
||||||
int argcnt, int argcntafter, PyObject **sp)
|
int argcnt, int argcntafter, _PyStackRef *sp)
|
||||||
{
|
{
|
||||||
int i = 0, j = 0;
|
int i = 0, j = 0;
|
||||||
Py_ssize_t ll = 0;
|
Py_ssize_t ll = 0;
|
||||||
|
@ -2094,6 +2153,7 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
||||||
PyObject *w;
|
PyObject *w;
|
||||||
PyObject *l = NULL; /* variable list */
|
PyObject *l = NULL; /* variable list */
|
||||||
|
|
||||||
|
PyObject *v = PyStackRef_AsPyObjectBorrow(v_stackref);
|
||||||
assert(v != NULL);
|
assert(v != NULL);
|
||||||
|
|
||||||
it = PyObject_GetIter(v);
|
it = PyObject_GetIter(v);
|
||||||
|
@ -2128,7 +2188,7 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
||||||
}
|
}
|
||||||
goto Error;
|
goto Error;
|
||||||
}
|
}
|
||||||
*--sp = w;
|
*--sp = PyStackRef_FromPyObjectSteal(w);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (argcntafter == -1) {
|
if (argcntafter == -1) {
|
||||||
|
@ -2150,7 +2210,7 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
||||||
l = PySequence_List(it);
|
l = PySequence_List(it);
|
||||||
if (l == NULL)
|
if (l == NULL)
|
||||||
goto Error;
|
goto Error;
|
||||||
*--sp = l;
|
*--sp = PyStackRef_FromPyObjectSteal(l);
|
||||||
i++;
|
i++;
|
||||||
|
|
||||||
ll = PyList_GET_SIZE(l);
|
ll = PyList_GET_SIZE(l);
|
||||||
|
@ -2163,7 +2223,7 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
||||||
|
|
||||||
/* Pop the "after-variable" args off the list. */
|
/* Pop the "after-variable" args off the list. */
|
||||||
for (j = argcntafter; j > 0; j--, i++) {
|
for (j = argcntafter; j > 0; j--, i++) {
|
||||||
*--sp = PyList_GET_ITEM(l, ll - j);
|
*--sp = PyStackRef_FromPyObjectSteal(PyList_GET_ITEM(l, ll - j));
|
||||||
}
|
}
|
||||||
/* Resize the list. */
|
/* Resize the list. */
|
||||||
Py_SET_SIZE(l, ll - argcntafter);
|
Py_SET_SIZE(l, ll - argcntafter);
|
||||||
|
@ -2171,8 +2231,9 @@ _PyEval_UnpackIterable(PyThreadState *tstate, PyObject *v,
|
||||||
return 1;
|
return 1;
|
||||||
|
|
||||||
Error:
|
Error:
|
||||||
for (; i > 0; i--, sp++)
|
for (; i > 0; i--, sp++) {
|
||||||
Py_DECREF(*sp);
|
PyStackRef_CLOSE(*sp);
|
||||||
|
}
|
||||||
Py_XDECREF(it);
|
Py_XDECREF(it);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -264,9 +264,9 @@ GETITEM(PyObject *v, Py_ssize_t i) {
|
||||||
This is because it is possible that during the DECREF the frame is
|
This is because it is possible that during the DECREF the frame is
|
||||||
accessed by other code (e.g. a __del__ method or gc.collect()) and the
|
accessed by other code (e.g. a __del__ method or gc.collect()) and the
|
||||||
variable would be pointing to already-freed memory. */
|
variable would be pointing to already-freed memory. */
|
||||||
#define SETLOCAL(i, value) do { PyObject *tmp = GETLOCAL(i); \
|
#define SETLOCAL(i, value) do { _PyStackRef tmp = GETLOCAL(i); \
|
||||||
GETLOCAL(i) = value; \
|
GETLOCAL(i) = value; \
|
||||||
Py_XDECREF(tmp); } while (0)
|
PyStackRef_XCLOSE(tmp); } while (0)
|
||||||
|
|
||||||
#define GO_TO_INSTRUCTION(op) goto PREDICT_ID(op)
|
#define GO_TO_INSTRUCTION(op) goto PREDICT_ID(op)
|
||||||
|
|
||||||
|
@ -449,3 +449,34 @@ do { \
|
||||||
#define EXIT_TO_TRACE() goto exit_to_trace
|
#define EXIT_TO_TRACE() goto exit_to_trace
|
||||||
#define EXIT_TO_TIER1() goto exit_to_tier1
|
#define EXIT_TO_TIER1() goto exit_to_tier1
|
||||||
#define EXIT_TO_TIER1_DYNAMIC() goto exit_to_tier1_dynamic;
|
#define EXIT_TO_TIER1_DYNAMIC() goto exit_to_tier1_dynamic;
|
||||||
|
|
||||||
|
/* Stackref macros */
|
||||||
|
|
||||||
|
/* How much scratch space to give stackref to PyObject* conversion. */
|
||||||
|
#define MAX_STACKREF_SCRATCH 10
|
||||||
|
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
#define STACKREFS_TO_PYOBJECTS(ARGS, ARG_COUNT, NAME) \
|
||||||
|
/* +1 because vectorcall might use -1 to write self */ \
|
||||||
|
PyObject *NAME##_temp[MAX_STACKREF_SCRATCH+1]; \
|
||||||
|
PyObject **NAME = _PyObjectArray_FromStackRefArray(ARGS, ARG_COUNT, NAME##_temp + 1);
|
||||||
|
#else
|
||||||
|
#define STACKREFS_TO_PYOBJECTS(ARGS, ARG_COUNT, NAME) \
|
||||||
|
PyObject **NAME = (PyObject **)ARGS; \
|
||||||
|
assert(NAME != NULL);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
#define STACKREFS_TO_PYOBJECTS_CLEANUP(NAME) \
|
||||||
|
/* +1 because we +1 previously */ \
|
||||||
|
_PyObjectArray_Free(NAME - 1, NAME##_temp);
|
||||||
|
#else
|
||||||
|
#define STACKREFS_TO_PYOBJECTS_CLEANUP(NAME) \
|
||||||
|
(void)(NAME);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef Py_GIL_DISABLED
|
||||||
|
#define CONVERSION_FAILED(NAME) ((NAME) == NULL)
|
||||||
|
#else
|
||||||
|
#define CONVERSION_FAILED(NAME) (0)
|
||||||
|
#endif
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -16,11 +16,11 @@ _PyFrame_Traverse(_PyInterpreterFrame *frame, visitproc visit, void *arg)
|
||||||
Py_VISIT(frame->f_funcobj);
|
Py_VISIT(frame->f_funcobj);
|
||||||
Py_VISIT(_PyFrame_GetCode(frame));
|
Py_VISIT(_PyFrame_GetCode(frame));
|
||||||
/* locals */
|
/* locals */
|
||||||
PyObject **locals = _PyFrame_GetLocalsArray(frame);
|
_PyStackRef *locals = _PyFrame_GetLocalsArray(frame);
|
||||||
int i = 0;
|
int i = 0;
|
||||||
/* locals and stack */
|
/* locals and stack */
|
||||||
for (; i <frame->stacktop; i++) {
|
for (; i <frame->stacktop; i++) {
|
||||||
Py_VISIT(locals[i]);
|
Py_VISIT(PyStackRef_AsPyObjectBorrow(locals[i]));
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
@ -101,7 +101,7 @@ _PyFrame_ClearLocals(_PyInterpreterFrame *frame)
|
||||||
int stacktop = frame->stacktop;
|
int stacktop = frame->stacktop;
|
||||||
frame->stacktop = 0;
|
frame->stacktop = 0;
|
||||||
for (int i = 0; i < stacktop; i++) {
|
for (int i = 0; i < stacktop; i++) {
|
||||||
Py_XDECREF(frame->localsplus[i]);
|
PyStackRef_XCLOSE(frame->localsplus[i]);
|
||||||
}
|
}
|
||||||
Py_CLEAR(frame->f_locals);
|
Py_CLEAR(frame->f_locals);
|
||||||
}
|
}
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -196,7 +196,7 @@ _Py_SetTier2Optimizer(_PyOptimizerObject *optimizer)
|
||||||
int
|
int
|
||||||
_PyOptimizer_Optimize(
|
_PyOptimizer_Optimize(
|
||||||
_PyInterpreterFrame *frame, _Py_CODEUNIT *start,
|
_PyInterpreterFrame *frame, _Py_CODEUNIT *start,
|
||||||
PyObject **stack_pointer, _PyExecutorObject **executor_ptr)
|
_PyStackRef *stack_pointer, _PyExecutorObject **executor_ptr)
|
||||||
{
|
{
|
||||||
PyCodeObject *code = _PyFrame_GetCode(frame);
|
PyCodeObject *code = _PyFrame_GetCode(frame);
|
||||||
assert(PyCode_Check(code));
|
assert(PyCode_Check(code));
|
||||||
|
@ -1393,7 +1393,7 @@ counter_optimize(
|
||||||
_Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[JUMP_BACKWARD] - oparg;
|
_Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[JUMP_BACKWARD] - oparg;
|
||||||
_PyUOpInstruction buffer[4] = {
|
_PyUOpInstruction buffer[4] = {
|
||||||
{ .opcode = _START_EXECUTOR, .jump_target = 3, .format=UOP_FORMAT_JUMP },
|
{ .opcode = _START_EXECUTOR, .jump_target = 3, .format=UOP_FORMAT_JUMP },
|
||||||
{ .opcode = _LOAD_CONST_INLINE_BORROW, .operand = (uintptr_t)self },
|
{ .opcode = _LOAD_CONST_INLINE, .operand = (uintptr_t)self },
|
||||||
{ .opcode = _INTERNAL_INCREMENT_OPT_COUNTER },
|
{ .opcode = _INTERNAL_INCREMENT_OPT_COUNTER },
|
||||||
{ .opcode = _EXIT_TRACE, .target = (uint32_t)(target - _PyCode_CODE(code)), .format=UOP_FORMAT_TARGET }
|
{ .opcode = _EXIT_TRACE, .target = (uint32_t)(target - _PyCode_CODE(code)), .format=UOP_FORMAT_TARGET }
|
||||||
};
|
};
|
||||||
|
|
|
@ -955,9 +955,9 @@
|
||||||
/* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */
|
/* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */
|
||||||
|
|
||||||
case _LOAD_SUPER_ATTR_ATTR: {
|
case _LOAD_SUPER_ATTR_ATTR: {
|
||||||
_Py_UopsSymbol *attr;
|
_Py_UopsSymbol *attr_st;
|
||||||
attr = sym_new_not_null(ctx);
|
attr_st = sym_new_not_null(ctx);
|
||||||
stack_pointer[-3] = attr;
|
stack_pointer[-3] = attr_st;
|
||||||
stack_pointer += -2;
|
stack_pointer += -2;
|
||||||
assert(WITHIN_STACK_BOUNDS());
|
assert(WITHIN_STACK_BOUNDS());
|
||||||
break;
|
break;
|
||||||
|
@ -1319,9 +1319,9 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
case _GET_LEN: {
|
case _GET_LEN: {
|
||||||
_Py_UopsSymbol *len_o;
|
_Py_UopsSymbol *len;
|
||||||
len_o = sym_new_not_null(ctx);
|
len = sym_new_not_null(ctx);
|
||||||
stack_pointer[0] = len_o;
|
stack_pointer[0] = len;
|
||||||
stack_pointer += 1;
|
stack_pointer += 1;
|
||||||
assert(WITHIN_STACK_BOUNDS());
|
assert(WITHIN_STACK_BOUNDS());
|
||||||
break;
|
break;
|
||||||
|
@ -1898,9 +1898,9 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
case _SET_FUNCTION_ATTRIBUTE: {
|
case _SET_FUNCTION_ATTRIBUTE: {
|
||||||
_Py_UopsSymbol *func;
|
_Py_UopsSymbol *func_st;
|
||||||
func = sym_new_not_null(ctx);
|
func_st = sym_new_not_null(ctx);
|
||||||
stack_pointer[-2] = func;
|
stack_pointer[-2] = func_st;
|
||||||
stack_pointer += -1;
|
stack_pointer += -1;
|
||||||
assert(WITHIN_STACK_BOUNDS());
|
assert(WITHIN_STACK_BOUNDS());
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -679,7 +679,10 @@ specialize_module_load_attr(
|
||||||
/* Attribute specialization */
|
/* Attribute specialization */
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_LoadSuperAttr(PyObject *global_super, PyObject *cls, _Py_CODEUNIT *instr, int load_method) {
|
_Py_Specialize_LoadSuperAttr(_PyStackRef global_super_st, _PyStackRef cls_st, _Py_CODEUNIT *instr, int load_method) {
|
||||||
|
PyObject *global_super = PyStackRef_AsPyObjectBorrow(global_super_st);
|
||||||
|
PyObject *cls = PyStackRef_AsPyObjectBorrow(cls_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[LOAD_SUPER_ATTR] == INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR);
|
assert(_PyOpcode_Caches[LOAD_SUPER_ATTR] == INLINE_CACHE_ENTRIES_LOAD_SUPER_ATTR);
|
||||||
_PySuperAttrCache *cache = (_PySuperAttrCache *)(instr + 1);
|
_PySuperAttrCache *cache = (_PySuperAttrCache *)(instr + 1);
|
||||||
|
@ -885,8 +888,10 @@ static int specialize_attr_loadclassattr(PyObject* owner, _Py_CODEUNIT* instr, P
|
||||||
static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name);
|
static int specialize_class_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* name);
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
|
_Py_Specialize_LoadAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *name)
|
||||||
{
|
{
|
||||||
|
PyObject *owner = PyStackRef_AsPyObjectBorrow(owner_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[LOAD_ATTR] == INLINE_CACHE_ENTRIES_LOAD_ATTR);
|
assert(_PyOpcode_Caches[LOAD_ATTR] == INLINE_CACHE_ENTRIES_LOAD_ATTR);
|
||||||
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
|
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
|
||||||
|
@ -1081,8 +1086,10 @@ success:
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name)
|
_Py_Specialize_StoreAttr(_PyStackRef owner_st, _Py_CODEUNIT *instr, PyObject *name)
|
||||||
{
|
{
|
||||||
|
PyObject *owner = PyStackRef_AsPyObjectBorrow(owner_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[STORE_ATTR] == INLINE_CACHE_ENTRIES_STORE_ATTR);
|
assert(_PyOpcode_Caches[STORE_ATTR] == INLINE_CACHE_ENTRIES_STORE_ATTR);
|
||||||
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
|
_PyAttrCache *cache = (_PyAttrCache *)(instr + 1);
|
||||||
|
@ -1521,8 +1528,11 @@ type_get_version(PyTypeObject *t, int opcode)
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_BinarySubscr(
|
_Py_Specialize_BinarySubscr(
|
||||||
PyObject *container, PyObject *sub, _Py_CODEUNIT *instr)
|
_PyStackRef container_st, _PyStackRef sub_st, _Py_CODEUNIT *instr)
|
||||||
{
|
{
|
||||||
|
PyObject *container = PyStackRef_AsPyObjectBorrow(container_st);
|
||||||
|
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[BINARY_SUBSCR] ==
|
assert(_PyOpcode_Caches[BINARY_SUBSCR] ==
|
||||||
INLINE_CACHE_ENTRIES_BINARY_SUBSCR);
|
INLINE_CACHE_ENTRIES_BINARY_SUBSCR);
|
||||||
|
@ -1621,8 +1631,11 @@ success:
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_StoreSubscr(PyObject *container, PyObject *sub, _Py_CODEUNIT *instr)
|
_Py_Specialize_StoreSubscr(_PyStackRef container_st, _PyStackRef sub_st, _Py_CODEUNIT *instr)
|
||||||
{
|
{
|
||||||
|
PyObject *container = PyStackRef_AsPyObjectBorrow(container_st);
|
||||||
|
PyObject *sub = PyStackRef_AsPyObjectBorrow(sub_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
_PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)(instr + 1);
|
_PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)(instr + 1);
|
||||||
PyTypeObject *container_type = Py_TYPE(container);
|
PyTypeObject *container_type = Py_TYPE(container);
|
||||||
|
@ -1939,8 +1952,10 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs)
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_Call(PyObject *callable, _Py_CODEUNIT *instr, int nargs)
|
_Py_Specialize_Call(_PyStackRef callable_st, _Py_CODEUNIT *instr, int nargs)
|
||||||
{
|
{
|
||||||
|
PyObject *callable = PyStackRef_AsPyObjectBorrow(callable_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[CALL] == INLINE_CACHE_ENTRIES_CALL);
|
assert(_PyOpcode_Caches[CALL] == INLINE_CACHE_ENTRIES_CALL);
|
||||||
assert(_Py_OPCODE(*instr) != INSTRUMENTED_CALL);
|
assert(_Py_OPCODE(*instr) != INSTRUMENTED_CALL);
|
||||||
|
@ -2056,9 +2071,11 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs)
|
||||||
#endif // Py_STATS
|
#endif // Py_STATS
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
|
_Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr,
|
||||||
int oparg, PyObject **locals)
|
int oparg, _PyStackRef *locals)
|
||||||
{
|
{
|
||||||
|
PyObject *lhs = PyStackRef_AsPyObjectBorrow(lhs_st);
|
||||||
|
PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st);
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[BINARY_OP] == INLINE_CACHE_ENTRIES_BINARY_OP);
|
assert(_PyOpcode_Caches[BINARY_OP] == INLINE_CACHE_ENTRIES_BINARY_OP);
|
||||||
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + 1);
|
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + 1);
|
||||||
|
@ -2071,7 +2088,7 @@ _Py_Specialize_BinaryOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
|
||||||
if (PyUnicode_CheckExact(lhs)) {
|
if (PyUnicode_CheckExact(lhs)) {
|
||||||
_Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_BINARY_OP + 1];
|
_Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_BINARY_OP + 1];
|
||||||
bool to_store = (next.op.code == STORE_FAST);
|
bool to_store = (next.op.code == STORE_FAST);
|
||||||
if (to_store && locals[next.op.arg] == lhs) {
|
if (to_store && PyStackRef_AsPyObjectBorrow(locals[next.op.arg]) == lhs) {
|
||||||
instr->op.code = BINARY_OP_INPLACE_ADD_UNICODE;
|
instr->op.code = BINARY_OP_INPLACE_ADD_UNICODE;
|
||||||
goto success;
|
goto success;
|
||||||
}
|
}
|
||||||
|
@ -2163,9 +2180,12 @@ compare_op_fail_kind(PyObject *lhs, PyObject *rhs)
|
||||||
#endif // Py_STATS
|
#endif // Py_STATS
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_CompareOp(PyObject *lhs, PyObject *rhs, _Py_CODEUNIT *instr,
|
_Py_Specialize_CompareOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr,
|
||||||
int oparg)
|
int oparg)
|
||||||
{
|
{
|
||||||
|
PyObject *lhs = PyStackRef_AsPyObjectBorrow(lhs_st);
|
||||||
|
PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP);
|
assert(_PyOpcode_Caches[COMPARE_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP);
|
||||||
// All of these specializations compute boolean values, so they're all valid
|
// All of these specializations compute boolean values, so they're all valid
|
||||||
|
@ -2226,8 +2246,10 @@ unpack_sequence_fail_kind(PyObject *seq)
|
||||||
#endif // Py_STATS
|
#endif // Py_STATS
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_UnpackSequence(PyObject *seq, _Py_CODEUNIT *instr, int oparg)
|
_Py_Specialize_UnpackSequence(_PyStackRef seq_st, _Py_CODEUNIT *instr, int oparg)
|
||||||
{
|
{
|
||||||
|
PyObject *seq = PyStackRef_AsPyObjectBorrow(seq_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[UNPACK_SEQUENCE] ==
|
assert(_PyOpcode_Caches[UNPACK_SEQUENCE] ==
|
||||||
INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE);
|
INLINE_CACHE_ENTRIES_UNPACK_SEQUENCE);
|
||||||
|
@ -2337,12 +2359,12 @@ int
|
||||||
#endif // Py_STATS
|
#endif // Py_STATS
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_ForIter(PyObject *iter, _Py_CODEUNIT *instr, int oparg)
|
_Py_Specialize_ForIter(_PyStackRef iter, _Py_CODEUNIT *instr, int oparg)
|
||||||
{
|
{
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER);
|
assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER);
|
||||||
_PyForIterCache *cache = (_PyForIterCache *)(instr + 1);
|
_PyForIterCache *cache = (_PyForIterCache *)(instr + 1);
|
||||||
PyTypeObject *tp = Py_TYPE(iter);
|
PyTypeObject *tp = Py_TYPE(PyStackRef_AsPyObjectBorrow(iter));
|
||||||
if (tp == &PyListIter_Type) {
|
if (tp == &PyListIter_Type) {
|
||||||
instr->op.code = FOR_ITER_LIST;
|
instr->op.code = FOR_ITER_LIST;
|
||||||
goto success;
|
goto success;
|
||||||
|
@ -2379,8 +2401,10 @@ success:
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_Send(PyObject *receiver, _Py_CODEUNIT *instr)
|
_Py_Specialize_Send(_PyStackRef receiver_st, _Py_CODEUNIT *instr)
|
||||||
{
|
{
|
||||||
|
PyObject *receiver = PyStackRef_AsPyObjectBorrow(receiver_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[SEND] == INLINE_CACHE_ENTRIES_SEND);
|
assert(_PyOpcode_Caches[SEND] == INLINE_CACHE_ENTRIES_SEND);
|
||||||
_PySendCache *cache = (_PySendCache *)(instr + 1);
|
_PySendCache *cache = (_PySendCache *)(instr + 1);
|
||||||
|
@ -2406,11 +2430,12 @@ success:
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_ToBool(PyObject *value, _Py_CODEUNIT *instr)
|
_Py_Specialize_ToBool(_PyStackRef value_o, _Py_CODEUNIT *instr)
|
||||||
{
|
{
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[TO_BOOL] == INLINE_CACHE_ENTRIES_TO_BOOL);
|
assert(_PyOpcode_Caches[TO_BOOL] == INLINE_CACHE_ENTRIES_TO_BOOL);
|
||||||
_PyToBoolCache *cache = (_PyToBoolCache *)(instr + 1);
|
_PyToBoolCache *cache = (_PyToBoolCache *)(instr + 1);
|
||||||
|
PyObject *value = PyStackRef_AsPyObjectBorrow(value_o);
|
||||||
if (PyBool_Check(value)) {
|
if (PyBool_Check(value)) {
|
||||||
instr->op.code = TO_BOOL_BOOL;
|
instr->op.code = TO_BOOL_BOOL;
|
||||||
goto success;
|
goto success;
|
||||||
|
@ -2520,8 +2545,10 @@ static int containsop_fail_kind(PyObject *value) {
|
||||||
#endif // Py_STATS
|
#endif // Py_STATS
|
||||||
|
|
||||||
void
|
void
|
||||||
_Py_Specialize_ContainsOp(PyObject *value, _Py_CODEUNIT *instr)
|
_Py_Specialize_ContainsOp(_PyStackRef value_st, _Py_CODEUNIT *instr)
|
||||||
{
|
{
|
||||||
|
PyObject *value = PyStackRef_AsPyObjectBorrow(value_st);
|
||||||
|
|
||||||
assert(ENABLE_SPECIALIZATION);
|
assert(ENABLE_SPECIALIZATION);
|
||||||
assert(_PyOpcode_Caches[CONTAINS_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP);
|
assert(_PyOpcode_Caches[CONTAINS_OP] == INLINE_CACHE_ENTRIES_COMPARE_OP);
|
||||||
_PyContainsOpCache *cache = (_PyContainsOpCache *)(instr + 1);
|
_PyContainsOpCache *cache = (_PyContainsOpCache *)(instr + 1);
|
||||||
|
|
|
@ -111,7 +111,7 @@ class StackItem:
|
||||||
return f"{type}{self.name}{size}{cond} {self.peek}"
|
return f"{type}{self.name}{size}{cond} {self.peek}"
|
||||||
|
|
||||||
def is_array(self) -> bool:
|
def is_array(self) -> bool:
|
||||||
return self.type == "PyObject **"
|
return self.type == "_PyStackRef *"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -353,6 +353,21 @@ def has_error_without_pop(op: parser.InstDef) -> bool:
|
||||||
|
|
||||||
|
|
||||||
NON_ESCAPING_FUNCTIONS = (
|
NON_ESCAPING_FUNCTIONS = (
|
||||||
|
"PyStackRef_FromPyObjectSteal",
|
||||||
|
"PyStackRef_AsPyObjectBorrow",
|
||||||
|
"PyStackRef_AsPyObjectSteal",
|
||||||
|
"PyStackRef_CLOSE",
|
||||||
|
"PyStackRef_DUP",
|
||||||
|
"PyStackRef_CLEAR",
|
||||||
|
"PyStackRef_IsNull",
|
||||||
|
"PyStackRef_TYPE",
|
||||||
|
"PyStackRef_False",
|
||||||
|
"PyStackRef_True",
|
||||||
|
"PyStackRef_None",
|
||||||
|
"PyStackRef_Is",
|
||||||
|
"PyStackRef_FromPyObjectNew",
|
||||||
|
"PyStackRef_AsPyObjectNew",
|
||||||
|
"PyStackRef_FromPyObjectImmortal",
|
||||||
"Py_INCREF",
|
"Py_INCREF",
|
||||||
"_PyManagedDictPointer_IsValues",
|
"_PyManagedDictPointer_IsValues",
|
||||||
"_PyObject_GetManagedDict",
|
"_PyObject_GetManagedDict",
|
||||||
|
@ -399,8 +414,6 @@ NON_ESCAPING_FUNCTIONS = (
|
||||||
"_PyFrame_SetStackPointer",
|
"_PyFrame_SetStackPointer",
|
||||||
"_PyType_HasFeature",
|
"_PyType_HasFeature",
|
||||||
"PyUnicode_Concat",
|
"PyUnicode_Concat",
|
||||||
"_PyList_FromArraySteal",
|
|
||||||
"_PyTuple_FromArraySteal",
|
|
||||||
"PySlice_New",
|
"PySlice_New",
|
||||||
"_Py_LeaveRecursiveCallPy",
|
"_Py_LeaveRecursiveCallPy",
|
||||||
"CALL_STAT_INC",
|
"CALL_STAT_INC",
|
||||||
|
@ -413,6 +426,11 @@ NON_ESCAPING_FUNCTIONS = (
|
||||||
"PyFloat_AS_DOUBLE",
|
"PyFloat_AS_DOUBLE",
|
||||||
"_PyFrame_PushUnchecked",
|
"_PyFrame_PushUnchecked",
|
||||||
"Py_FatalError",
|
"Py_FatalError",
|
||||||
|
"STACKREFS_TO_PYOBJECTS",
|
||||||
|
"STACKREFS_TO_PYOBJECTS_CLEANUP",
|
||||||
|
"CONVERSION_FAILED",
|
||||||
|
"_PyList_FromArraySteal",
|
||||||
|
"_PyTuple_FromArraySteal",
|
||||||
)
|
)
|
||||||
|
|
||||||
ESCAPING_FUNCTIONS = (
|
ESCAPING_FUNCTIONS = (
|
||||||
|
|
|
@ -128,15 +128,15 @@ def replace_decrefs(
|
||||||
continue
|
continue
|
||||||
if var.size != "1":
|
if var.size != "1":
|
||||||
out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n")
|
out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n")
|
||||||
out.emit(f"Py_DECREF({var.name}[_i]);\n")
|
out.emit(f"PyStackRef_CLOSE({var.name}[_i]);\n")
|
||||||
out.emit("}\n")
|
out.emit("}\n")
|
||||||
elif var.condition:
|
elif var.condition:
|
||||||
if var.condition == "1":
|
if var.condition == "1":
|
||||||
out.emit(f"Py_DECREF({var.name});\n")
|
out.emit(f"PyStackRef_CLOSE({var.name});\n")
|
||||||
elif var.condition != "0":
|
elif var.condition != "0":
|
||||||
out.emit(f"Py_XDECREF({var.name});\n")
|
out.emit(f"PyStackRef_XCLOSE({var.name});\n")
|
||||||
else:
|
else:
|
||||||
out.emit(f"Py_DECREF({var.name});\n")
|
out.emit(f"PyStackRef_CLOSE({var.name});\n")
|
||||||
|
|
||||||
|
|
||||||
def replace_sync_sp(
|
def replace_sync_sp(
|
||||||
|
|
|
@ -103,7 +103,7 @@ def write_uop(
|
||||||
is_override = override is not None
|
is_override = override is not None
|
||||||
out.start_line()
|
out.start_line()
|
||||||
for var in reversed(prototype.stack.inputs):
|
for var in reversed(prototype.stack.inputs):
|
||||||
res = stack.pop(var)
|
res = stack.pop(var, extract_bits=True)
|
||||||
if not skip_inputs:
|
if not skip_inputs:
|
||||||
out.emit(res)
|
out.emit(res)
|
||||||
if not prototype.properties.stores_sp:
|
if not prototype.properties.stores_sp:
|
||||||
|
@ -140,7 +140,7 @@ def write_uop(
|
||||||
if not var.peek or is_override:
|
if not var.peek or is_override:
|
||||||
out.emit(stack.push(var))
|
out.emit(stack.push(var))
|
||||||
out.start_line()
|
out.start_line()
|
||||||
stack.flush(out, cast_type="_Py_UopsSymbol *")
|
stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=True)
|
||||||
except SizeMismatch as ex:
|
except SizeMismatch as ex:
|
||||||
raise analysis_error(ex.args[0], uop.body[0])
|
raise analysis_error(ex.args[0], uop.body[0])
|
||||||
|
|
||||||
|
|
|
@ -285,7 +285,7 @@ class Parser(PLexer):
|
||||||
if not (size := self.expression()):
|
if not (size := self.expression()):
|
||||||
raise self.make_syntax_error("Expected expression")
|
raise self.make_syntax_error("Expected expression")
|
||||||
self.require(lx.RBRACKET)
|
self.require(lx.RBRACKET)
|
||||||
type_text = "PyObject **"
|
type_text = "_PyStackRef *"
|
||||||
size_text = size.text.strip()
|
size_text = size.text.strip()
|
||||||
return StackEffect(tkn.text, type_text, cond_text, size_text)
|
return StackEffect(tkn.text, type_text, cond_text, size_text)
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -125,7 +125,7 @@ class Stack:
|
||||||
self.variables: list[StackItem] = []
|
self.variables: list[StackItem] = []
|
||||||
self.defined: set[str] = set()
|
self.defined: set[str] = set()
|
||||||
|
|
||||||
def pop(self, var: StackItem) -> str:
|
def pop(self, var: StackItem, extract_bits: bool = False) -> str:
|
||||||
self.top_offset.pop(var)
|
self.top_offset.pop(var)
|
||||||
if not var.peek:
|
if not var.peek:
|
||||||
self.peek_offset.pop(var)
|
self.peek_offset.pop(var)
|
||||||
|
@ -155,8 +155,9 @@ class Stack:
|
||||||
else:
|
else:
|
||||||
self.defined.add(var.name)
|
self.defined.add(var.name)
|
||||||
cast = f"({var.type})" if (not indirect and var.type) else ""
|
cast = f"({var.type})" if (not indirect and var.type) else ""
|
||||||
|
bits = ".bits" if cast and not extract_bits else ""
|
||||||
assign = (
|
assign = (
|
||||||
f"{var.name} = {cast}{indirect}stack_pointer[{self.base_offset.to_c()}];"
|
f"{var.name} = {cast}{indirect}stack_pointer[{self.base_offset.to_c()}]{bits};"
|
||||||
)
|
)
|
||||||
if var.condition:
|
if var.condition:
|
||||||
if var.condition == "1":
|
if var.condition == "1":
|
||||||
|
@ -178,11 +179,12 @@ class Stack:
|
||||||
self.top_offset.push(var)
|
self.top_offset.push(var)
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def flush(self, out: CWriter, cast_type: str = "PyObject *") -> None:
|
def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
|
||||||
out.start_line()
|
out.start_line()
|
||||||
for var in self.variables:
|
for var in self.variables:
|
||||||
if not var.peek:
|
if not var.peek:
|
||||||
cast = f"({cast_type})" if var.type else ""
|
cast = f"({cast_type})" if var.type else ""
|
||||||
|
bits = ".bits" if cast and not extract_bits else ""
|
||||||
if var.name not in UNUSED and not var.is_array():
|
if var.name not in UNUSED and not var.is_array():
|
||||||
if var.condition:
|
if var.condition:
|
||||||
if var.condition == "0":
|
if var.condition == "0":
|
||||||
|
@ -190,7 +192,7 @@ class Stack:
|
||||||
elif var.condition != "1":
|
elif var.condition != "1":
|
||||||
out.emit(f"if ({var.condition}) ")
|
out.emit(f"if ({var.condition}) ")
|
||||||
out.emit(
|
out.emit(
|
||||||
f"stack_pointer[{self.base_offset.to_c()}] = {cast}{var.name};\n"
|
f"stack_pointer[{self.base_offset.to_c()}]{bits} = {cast}{var.name};\n"
|
||||||
)
|
)
|
||||||
self.base_offset.push(var)
|
self.base_offset.push(var)
|
||||||
if self.base_offset.to_c() != self.top_offset.to_c():
|
if self.base_offset.to_c() != self.top_offset.to_c():
|
||||||
|
|
|
@ -37,20 +37,25 @@ def declare_variables(inst: Instruction, out: CWriter) -> None:
|
||||||
if isinstance(uop, Uop):
|
if isinstance(uop, Uop):
|
||||||
for var in reversed(uop.stack.inputs):
|
for var in reversed(uop.stack.inputs):
|
||||||
if var.name not in variables:
|
if var.name not in variables:
|
||||||
type = var.type if var.type else "PyObject *"
|
|
||||||
variables.add(var.name)
|
variables.add(var.name)
|
||||||
|
type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL")
|
||||||
|
space = " " if type[-1].isalnum() else ""
|
||||||
if var.condition:
|
if var.condition:
|
||||||
out.emit(f"{type}{var.name} = NULL;\n")
|
out.emit(f"{type}{space}{var.name} = {null};\n")
|
||||||
else:
|
else:
|
||||||
out.emit(f"{type}{var.name};\n")
|
if var.is_array():
|
||||||
|
out.emit(f"{var.type}{space}{var.name};\n")
|
||||||
|
else:
|
||||||
|
out.emit(f"{type}{space}{var.name};\n")
|
||||||
for var in uop.stack.outputs:
|
for var in uop.stack.outputs:
|
||||||
if var.name not in variables:
|
if var.name not in variables:
|
||||||
variables.add(var.name)
|
variables.add(var.name)
|
||||||
type = var.type if var.type else "PyObject *"
|
type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL")
|
||||||
|
space = " " if type[-1].isalnum() else ""
|
||||||
if var.condition:
|
if var.condition:
|
||||||
out.emit(f"{type}{var.name} = NULL;\n")
|
out.emit(f"{type}{space}{var.name} = {null};\n")
|
||||||
else:
|
else:
|
||||||
out.emit(f"{type}{var.name};\n")
|
out.emit(f"{type}{space}{var.name};\n")
|
||||||
|
|
||||||
|
|
||||||
def write_uop(
|
def write_uop(
|
||||||
|
|
|
@ -34,16 +34,17 @@ def declare_variable(
|
||||||
) -> None:
|
) -> None:
|
||||||
if var.name in variables:
|
if var.name in variables:
|
||||||
return
|
return
|
||||||
type = var.type if var.type else "PyObject *"
|
|
||||||
variables.add(var.name)
|
variables.add(var.name)
|
||||||
|
type, null = (var.type, "NULL") if var.type else ("_PyStackRef", "PyStackRef_NULL")
|
||||||
|
space = " " if type[-1].isalnum() else ""
|
||||||
if var.condition:
|
if var.condition:
|
||||||
out.emit(f"{type}{var.name} = NULL;\n")
|
out.emit(f"{type}{space}{var.name} = {null};\n")
|
||||||
if uop.replicates:
|
if uop.replicates:
|
||||||
# Replicas may not use all their conditional variables
|
# Replicas may not use all their conditional variables
|
||||||
# So avoid a compiler warning with a fake use
|
# So avoid a compiler warning with a fake use
|
||||||
out.emit(f"(void){var.name};\n")
|
out.emit(f"(void){var.name};\n")
|
||||||
else:
|
else:
|
||||||
out.emit(f"{type}{var.name};\n")
|
out.emit(f"{type}{space}{var.name};\n")
|
||||||
|
|
||||||
|
|
||||||
def declare_variables(uop: Uop, out: CWriter) -> None:
|
def declare_variables(uop: Uop, out: CWriter) -> None:
|
||||||
|
|
|
@ -100,6 +100,8 @@ MAX_OUTPUT_LEN=1024
|
||||||
|
|
||||||
hexdigits = "0123456789abcdef"
|
hexdigits = "0123456789abcdef"
|
||||||
|
|
||||||
|
USED_TAGS = 0b11
|
||||||
|
|
||||||
ENCODING = locale.getpreferredencoding()
|
ENCODING = locale.getpreferredencoding()
|
||||||
|
|
||||||
FRAME_INFO_OPTIMIZED_OUT = '(frame information optimized out)'
|
FRAME_INFO_OPTIMIZED_OUT = '(frame information optimized out)'
|
||||||
|
@ -158,6 +160,8 @@ class PyObjectPtr(object):
|
||||||
_typename = 'PyObject'
|
_typename = 'PyObject'
|
||||||
|
|
||||||
def __init__(self, gdbval, cast_to=None):
|
def __init__(self, gdbval, cast_to=None):
|
||||||
|
# Clear the tagged pointer
|
||||||
|
gdbval = gdb.Value(int(gdbval) & (~USED_TAGS)).cast(gdbval.type)
|
||||||
if cast_to:
|
if cast_to:
|
||||||
self._gdbval = gdbval.cast(cast_to)
|
self._gdbval = gdbval.cast(cast_to)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
#include "pycore_setobject.h"
|
#include "pycore_setobject.h"
|
||||||
#include "pycore_sliceobject.h"
|
#include "pycore_sliceobject.h"
|
||||||
#include "pycore_descrobject.h"
|
#include "pycore_descrobject.h"
|
||||||
|
#include "pycore_stackref.h"
|
||||||
|
|
||||||
#include "ceval_macros.h"
|
#include "ceval_macros.h"
|
||||||
|
|
||||||
|
@ -84,7 +85,7 @@ do { \
|
||||||
#define WITHIN_STACK_BOUNDS() 1
|
#define WITHIN_STACK_BOUNDS() 1
|
||||||
|
|
||||||
_Py_CODEUNIT *
|
_Py_CODEUNIT *
|
||||||
_JIT_ENTRY(_PyInterpreterFrame *frame, PyObject **stack_pointer, PyThreadState *tstate)
|
_JIT_ENTRY(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer, PyThreadState *tstate)
|
||||||
{
|
{
|
||||||
// Locals that the instruction implementations expect to exist:
|
// Locals that the instruction implementations expect to exist:
|
||||||
PATCH_VALUE(_PyExecutorObject *, current_executor, _JIT_EXECUTOR)
|
PATCH_VALUE(_PyExecutorObject *, current_executor, _JIT_EXECUTOR)
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
// The actual change is patched in while the JIT compiler is being built, in
|
// The actual change is patched in while the JIT compiler is being built, in
|
||||||
// Tools/jit/_targets.py. On other platforms, this function compiles to nothing.
|
// Tools/jit/_targets.py. On other platforms, this function compiles to nothing.
|
||||||
_Py_CODEUNIT *
|
_Py_CODEUNIT *
|
||||||
_ENTRY(_PyInterpreterFrame *frame, PyObject **stack_pointer, PyThreadState *tstate)
|
_ENTRY(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer, PyThreadState *tstate)
|
||||||
{
|
{
|
||||||
// This is subtle. The actual trace will return to us once it exits, so we
|
// This is subtle. The actual trace will return to us once it exits, so we
|
||||||
// need to make sure that we stay alive until then. If our trace side-exits
|
// need to make sure that we stay alive until then. If our trace side-exits
|
||||||
|
|
Loading…
Reference in New Issue